Beispiel #1
0
 def cron_generate_ddmrp_warnings(self, automatic=False):
     auto_commit = not getattr(threading.currentThread(), "testing", False)
     buffer_ids = self.search([]).ids
     i = 0
     j = len(buffer_ids)
     for buffer_chunk_ids in split_every(self.CRON_DDMRP_CHUNKS,
                                         buffer_ids):
         for b in self.browse(buffer_chunk_ids).exists():
             try:
                 i += 1
                 _logger.debug(
                     "ddmrp cron_generate_ddmrp_warnings: {}. ({}/{})".
                     format(b.name, i, j))
                 if automatic:
                     with self.env.cr.savepoint():
                         b._generate_ddmrp_warnings()
                 else:
                     b._generate_ddmrp_warnings()
             except Exception:
                 _logger.exception("Fail to compute Warnings for buffer %s",
                                   b.name)
                 if not automatic:
                     raise
         if auto_commit:
             self._cr.commit()  # pylint: disable=E8102
     return True
Beispiel #2
0
    def run_scheduler(self, use_new_cursor=False, company_id=False):
        ''' Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
        and the availability of moves. This function is intended to be run for all the companies at the same time, so
        we run functions as SUPERUSER to avoid intercompanies and access rights issues. '''
        super(ProcurementOrder, self).run_scheduler(use_new_cursor=use_new_cursor, company_id=company_id)
        try:
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))  # TDE FIXME

            # Minimum stock rules
            self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

            # Search all confirmed stock_moves and try to assign them
            confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc')
            for moves_chunk in split_every(100, confirmed_moves.ids):
                # TDE CLEANME: muf muf
                self.env['stock.move'].browse(moves_chunk).action_assign()
                if use_new_cursor:
                    self._cr.commit()
            if use_new_cursor:
                self._cr.commit()
        finally:
            if use_new_cursor:
                try:
                    self._cr.close()
                except Exception:
                    pass
        return {}
Beispiel #3
0
    def woo_sync_customers(self, customers):
        """
        Generate queue & queue lines for customer sync process.
        :param customers: List of dict
        :return: Boolean
        """
        woo_sync_customer_obj = self.env['woo.customer.data.queue.ept']
        woo_sync_customer_data = self.env['woo.customer.data.queue.line.ept']

        for customer_queue in split_every(100, customers):
            queue = woo_sync_customer_obj.create(
                {"woo_instance_id": self.woo_instance_id.id})
            sync_vals = {
                'woo_instance_id': self.woo_instance_id.id,
                'queue_id': queue.id
            }

            for customer in customer_queue:
                sync_vals.update({
                    'last_process_date':
                    datetime.now(),
                    'woo_synced_data':
                    json.dumps(customer),
                    'woo_synced_data_id':
                    customer.get('id'),
                    'name':
                    customer.get('billing').get('first_name') +
                    customer.get('billing').get('last_name')
                    if customer.get('billing') else ''
                })
                woo_sync_customer_data.create(sync_vals)
        return queue
Beispiel #4
0
    def create_customer_data_queues(self, customer_data):
        """
        It creates customer data queue from data of Customer.
        @author: Maulik Barad on Date 09-Sep-2020.
        @param customer_data: Data of Customer.
        """
        customer_queue_list = []
        customer_data_queue_obj = self.env["shopify.customer.data.queue.ept"]
        customer_data_queue_line_obj = self.env[
            "shopify.customer.data.queue.line.ept"]
        bus_bus_obj = self.env["bus.bus"]

        if len(customer_data) > 0:
            for customer_id_chunk in split_every(125, customer_data):
                customer_queue = customer_data_queue_obj.create_customer_queue(
                    self.shopify_instance_id, "import_process")
                customer_data_queue_line_obj.shopify_create_multi_queue(
                    customer_queue, customer_id_chunk)

                message = "Customer Queue created {}".format(
                    customer_queue.name)
                bus_bus_obj.sendone(
                    (self._cr.dbname, "res.partner",
                     self.env.user.partner_id.id), {
                         "type": "simple_notification",
                         "title": "Shopify Notification",
                         "message": message,
                         "sticky": False,
                         "warning": True
                     })
                _logger.info(message)

                customer_queue_list.append(customer_queue.id)
            self._cr.commit()
        return customer_queue_list
Beispiel #5
0
    def run_scheduler(self, use_new_cursor=False, company_id=False):
        ''' Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
        and the availability of moves. This function is intended to be run for all the companies at the same time, so
        we run functions as SUPERUSER to avoid intercompanies and access rights issues. '''
        super(ProcurementOrder,
              self).run_scheduler(use_new_cursor=use_new_cursor,
                                  company_id=company_id)
        try:
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))  # TDE FIXME

            # Minimum stock rules
            self.sudo()._procure_orderpoint_confirm(
                use_new_cursor=use_new_cursor, company_id=company_id)

            # Search all confirmed stock_moves and try to assign them
            confirmed_moves = self.env['stock.move'].search(
                [('state', '=', 'confirmed')],
                limit=None,
                order='priority desc, date_expected asc')
            for moves_chunk in split_every(100, confirmed_moves.ids):
                # TDE CLEANME: muf muf
                self.env['stock.move'].browse(moves_chunk).action_assign()
                if use_new_cursor:
                    self._cr.commit()
            if use_new_cursor:
                self._cr.commit()
        finally:
            if use_new_cursor:
                try:
                    self._cr.close()
                except Exception:
                    pass
        return {}
Beispiel #6
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

        # Search all confirmed stock_moves and try to assign them
        confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, confirmed_moves.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        exception_moves = self.env['stock.move'].search(self._get_exceptions_domain())
        for move in exception_moves:
            values = move._prepare_procurement_values()
            try:
                with self._cr.savepoint():
                    origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/")
                    self.run(move.product_id, move.product_uom_qty, move.product_uom, move.location_id, move.rule_id and move.rule_id.name or "/", origin, values)
            except UserError as error:
                self.env['procurement.rule']._log_next_activity(move.product_id, error.name)
        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._merge_quants()
Beispiel #7
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        domain = self._get_orderpoint_domain(company_id=company_id)
        orderpoints = self.env['stock.warehouse.orderpoint'].search(domain)
        orderpoints.sudo()._procure_orderpoint_confirm(
            use_new_cursor=use_new_cursor,
            company_id=company_id,
            raise_user_error=False)

        # Search all confirmed stock_moves and try to assign them
        domain = self._get_moves_to_assign_domain()
        moves_to_assign = self.env['stock.move'].search(
            domain, limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, moves_to_assign.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._quant_tasks()

        if use_new_cursor:
            self._cr.commit()
Beispiel #8
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

        # Search all confirmed stock_moves and try to assign them
        confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed'), ('product_uom_qty', '!=', 0.0)], limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, confirmed_moves.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        exception_moves = self.env['stock.move'].search(self._get_exceptions_domain())
        for move in exception_moves:
            values = move._prepare_procurement_values()
            try:
                with self._cr.savepoint():
                    origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/")
                    self.run(move.product_id, move.product_uom_qty, move.product_uom, move.location_id, move.rule_id and move.rule_id.name or "/", origin, values)
            except UserError as error:
                self.env['procurement.rule']._log_next_activity(move.product_id, error.name)
        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._merge_quants()
Beispiel #9
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        domain = self._get_orderpoint_domain(company_id=company_id)
        orderpoints = self.env['stock.warehouse.orderpoint'].search(domain)
        # ensure that qty_* which depends on datetime.now() are correctly
        # recomputed
        orderpoints.sudo()._compute_qty_to_order()
        orderpoints.sudo()._procure_orderpoint_confirm(
            use_new_cursor=use_new_cursor,
            company_id=company_id,
            raise_user_error=False)
        if use_new_cursor:
            self._cr.commit()

        # Search all confirmed stock_moves and try to assign them
        domain = self._get_moves_to_assign_domain(company_id)
        moves_to_assign = self.env['stock.move'].search(
            domain, limit=None, order='priority desc, date asc')
        for moves_chunk in split_every(100, moves_to_assign.ids):
            self.env['stock.move'].browse(moves_chunk).sudo()._action_assign()
            if use_new_cursor:
                self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._quant_tasks()

        if use_new_cursor:
            self._cr.commit()

        # Run cyclic inventories
        self.env['stock.inventory']._run_inventory_tasks(company_id)

        if use_new_cursor:
            self._cr.commit()
Beispiel #10
0
    def action_done(self):
        res = super().action_done()
        # Search all confirmed stock_moves and try to assign them
        domain = self.env['procurement.group']._get_moves_to_assign_domain(
            self.company_id.id)

        #domain.append(('raw_material_production_id', '!=', False))
        moves_to_assign = self.env['stock.move'].search(
            domain, limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, moves_to_assign.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()

        # COMPUTE LAUNCH ACTION ASSIGN FOR PRODUCTION MOVES ONLY LUPEON COMPANY:
        # No es necesario porque ya se hace globalmente, pero esto sería el código
        # para hacer la busqueda de movimientos de producciones, producto a producto
        # if self.company_id and not self.company_id.cost_sheet_sale:
        #     for product in self.move_line_ids.mapped('product_id'):
        #         domain = [
        #             ('product_id', '=', product.id),
        #             ('company_id', '=', self.company_id.id),
        #             ('state', 'in', ['confirmed', 'partially_available']),
        #             ('product_uom_qty', '!=', 0.0),
        #             ('raw_material_production_id', '!=', False)
        #         ]
        #         moves_to_assign = self.env['stock.move'].search(domain, limit=None,
        #             order='priority desc, date_expected asc')
        #         for moves_chunk in split_every(100, moves_to_assign.ids):
        #             self.env['stock.move'].browse(moves_chunk)._action_assign()

        # Merge duplicated quants
        self.env['stock.quant']._merge_quants()
        self.env['stock.quant']._unlink_zero_quants()
        return res
Beispiel #11
0
 def _notify_send(self, body, subject, recipients, **mail_values):
     emails = self.env['mail.mail']
     recipients_nbr = len(recipients)
     for email_chunk in split_every(50, recipients.ids):
         # TDE FIXME: missing message parameter. So we will find mail_message_id
         # in the mail_values and browse it. It should already be in the
         # cache so should not impact performances.
         mail_message_id = mail_values.get('mail_message_id')
         message = self.env['mail.message'].browse(
             mail_message_id) if mail_message_id else None
         tig = self.env[message.model].browse(
             message.res_id
         ) if message and message.model and message.res_id else False
         recipient_values = self.env[
             'mail.thread']._notify_email_recipients_on_records(message,
                                                                email_chunk,
                                                                records=tig)
         create_values = {
             'body_html': body,
             'subject': subject,
         }
         create_values.update(mail_values)
         create_values.update(recipient_values)
         emails |= self.env['mail.mail'].create(create_values)
     return emails, recipients_nbr
 def migrate(self):
     storage_location = self._storage().upper()
     batch_size = self.env.context.get('migration_batch_size', 100)
     batches_to_migrate = math.ceil(len(self) / batch_size)
     for batch_index, sub_ids in enumerate(split_every(
             batch_size, self.ids)):
         with api.Environment.manage():
             with registry(self.env.cr.dbname).cursor() as batch_cr:
                 batch_env = api.Environment(batch_cr, self.env.uid,
                                             self.env.context.copy())
                 attachment_records = batch_env['ir.attachment'].browse(
                     sub_ids)
                 batch_records_count = len(attachment_records)
                 try:
                     for index, attach in enumerate(attachment_records):
                         _logger.info(
                             "Migrate Attachment %s of %s to %s [Batch %s of %s]",
                             index + 1, batch_records_count,
                             storage_location, batch_index + 1,
                             batches_to_migrate)
                         attach.with_context(migration=True).write(
                             {'datas': attach.datas})
                 except:
                     batch_cr.rollback()
                     raise
                 else:
                     batch_cr.commit()
Beispiel #13
0
    def run_scheduler(self, use_new_cursor=False, company_id=False):
        """ Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
        and the availability of moves. This function is intended to be run for all the companies at the same time, so
        we run functions as SUPERUSER to avoid intercompanies and access rights issues. """
        try:
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))  # TDE FIXME

            # Minimum stock rules
            self.sudo()._procure_orderpoint_confirm(
                use_new_cursor=use_new_cursor, company_id=company_id)

            # Search all confirmed stock_moves and try to assign them
            confirmed_moves = self.env['stock.move'].search(
                [('state', '=', 'confirmed')],
                limit=None,
                order='priority desc, date_expected asc')
            for moves_chunk in split_every(100, confirmed_moves.ids):
                self.env['stock.move'].browse(moves_chunk).action_assign()
                if use_new_cursor:
                    self._cr.commit()

            exception_moves = self.env['stock.move'].search(
                self._get_exceptions_domain())
            for move in exception_moves:
                values = move._prepare_procurement_values()
                try:
                    with self._cr.savepoint():
                        origin = (move.group_id and (move.group_id.name + ":")
                                  or "") + (move.rule_id and move.rule_id.name
                                            or move.origin
                                            or move.picking_id.name or "/")
                        self.run(move.product_id, move.product_uom_qty,
                                 move.product_uom, move.location_id,
                                 move.rule_id and move.rule_id.name or "/",
                                 origin, values)
                except UserError as error:
                    self.env['procurement.rule']._log_next_activity(
                        move.product_id, error.name)

            if use_new_cursor:
                self._cr.commit()
        finally:
            if use_new_cursor:
                try:
                    self._cr.close()
                except Exception:
                    pass
        return {}
    def _update_default_prices(self, product_ids):
        SPLIT = 500
        search_domain = [
            ('id', 'in', product_ids),
            ('type', '=', 'product'),
        ]

        products = self.search(
            search_domain + [
                ('|'),
                ('purchase_ok', '=', True),
                ('sale_ok', '=', True),
            ]
        )
        idx = 0
        for ids in pb(list(split_every(SPLIT, products.ids))):
            _logger.info(
                'Processing (%d -> %d)/%d', idx,
                min(idx + SPLIT, len(products.ids)), len(products.ids)
            )
            idx += SPLIT
            self.browse(ids).update_default_purchase_price()
            self.env.cr.commit()

        products = self.search(search_domain + [
            ('sale_ok', '=', True),
        ])
        idx = 0
        for ids in pb(list(split_every(SPLIT, products.ids))):
            _logger.info(
                'Processing (%d -> %d)/%d', idx,
                min(idx + SPLIT, len(products.ids)), len(products.ids)
            )
            idx += SPLIT
            self.browse(ids).update_default_sell_price()
            self.env.cr.commit()
Beispiel #15
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

        # Search all confirmed stock_moves and try to assign them
        confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed'), ('product_uom_qty', '!=', 0.0)], limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, confirmed_moves.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._merge_quants()
Beispiel #16
0
    def _procure_orderpoint_confirm(self, use_new_cursor=False, company_id=None):
        """ Create procurements based on orderpoints.
        :param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing
            1000 orderpoints.
            This is appropriate for batch jobs only.
        """
        self = self.with_company(company_id)
        domain = self._get_orderpoint_domain(company_id=company_id)
        orderpoints_noprefetch = self.env['stock.warehouse.orderpoint'].search_read(
            domain, fields=['id'], order=self._procurement_from_orderpoint_get_order())
        orderpoints_noprefetch = [orderpoint['id'] for orderpoint in orderpoints_noprefetch]

        for orderpoints_batch in split_every(1000, orderpoints_noprefetch):
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))
Beispiel #17
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

        # Search all confirmed stock_moves and try to assign them
        confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed'), ('product_uom_qty', '!=', 0.0)], limit=None, order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, confirmed_moves.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._merge_quants()
Beispiel #18
0
    def _notify_send(self, body, subject, recipients, **mail_values):
        emails = self.env['mail.mail']
        recipients_nbr = len(recipients)
        for email_chunk in split_every(50, recipients.ids):
            # TDE FIXME: missing message parameter. So we will find mail_message_id
            # in the mail_values and browse it. It should already be in the
            # cache so should not impact performances.
            mail_message_id = mail_values.get('mail_message_id')
            message = self.env['mail.message'].browse(
                mail_message_id) if mail_message_id else None
            if message and message.model and message.res_id and message.model in self.env and hasattr(
                    self.env[message.model], 'message_get_recipient_values'):
                tig = self.env[message.model].browse(message.res_id)
                recipient_values = tig.message_get_recipient_values(
                    notif_message=message, recipient_ids=email_chunk)
            else:
                recipient_values = self.env[
                    'mail.thread'].message_get_recipient_values(
                        notif_message=None, recipient_ids=email_chunk)
            create_values = {
                'body_html': body,
                'subject': subject,
                'cc_visible': message.cc_visible,
                'bcc_visible': message.bcc_visible,
            }
            # Set Partner CC & BCC Value In Recipient CC & BCC
            if message.cc_visible:
                create_values.update({
                    'recipient_cc_ids':
                    [(4, pccid.id) for pccid in message.partner_cc_ids]
                })
            else:
                create_values.update({'recipient_cc_ids': []})

            if message.bcc_visible:
                create_values.update({
                    'recipient_bcc_ids':
                    [(4, pbccid.id) for pbccid in message.partner_bcc_ids]
                })
            else:
                create_values.update({'recipient_bcc_ids': []})

            create_values.update(mail_values)
            create_values.update(recipient_values)
            emails |= self.env['mail.mail'].create(create_values)
        return emails, recipients_nbr
Beispiel #19
0
    def _notify_send(self, body, subject, recipients, **mail_values):
        emails = self.env['mail.mail']
        recipients_nbr = len(recipients)
        for email_chunk in split_every(50, recipients.ids):
            # TDE FIXME: missing message parameter. So we will find mail_message_id
            # in the mail_values and browse it. It should already be in the
            # cache so should not impact performances.
            mail_message_id = mail_values.get('mail_message_id')
            message = self.env['mail.message'].browse(
                mail_message_id) if mail_message_id else None
            if message and message.model and message.res_id and message.model in self.env and hasattr(
                    self.env[message.model], 'message_get_recipient_values'):
                tig = self.env[message.model].browse(message.res_id)
                recipient_values = tig.message_get_recipient_values(
                    notif_message=message, recipient_ids=email_chunk)
            else:
                recipient_values = self.env[
                    'mail.thread'].message_get_recipient_values(
                        notif_message=None, recipient_ids=email_chunk)
            if message.model == "helpdesk.support":
                history_messages = self.env['mail.message'].search([
                    ("res_id", "=", message.res_id),
                    ("message_type", "in", ('email', 'comment'))
                ])
                if history_messages:
                    history_body = ""
                    message_nr = 1
                    for message in history_messages:
                        if message_nr == 2:
                            history_body += "<br><br>From: " + str(
                                message.email_from
                            ) + '<br><br><div style="padding-left:50px;width:100%">' + str(
                                message.body) + "</div>"
                        message_nr = message_nr + 1
                body += "<div>" + history_body + "</div>"

            create_values = {
                'body_html': body,
                'subject': subject,
            }
            create_values.update(mail_values)
            create_values.update(recipient_values)

            emails |= self.env['mail.mail'].create(create_values)
        return emails, recipients_nbr
Beispiel #20
0
    def sync_shopify_customers(self):
        """This method used to sync the customers data from Shopify to Odoo.
            @param : self
            @author: Angel Patel @Emipro Technologies Pvt. Ltd on date 23/10/2019.
            :Task ID: 157065
        """
        self.shopify_instance_id.connect_in_shopify()
        if not self.shopify_instance_id.shopify_last_date_customer_import:
            customer_ids = shopify.Customer().search(limit=200)
            _logger.info("Imported first 200 Customers.")
            if len(customer_ids) >= 200:
                customer_ids = self.shopify_list_all_customer(customer_ids)
        else:
            customer_ids = shopify.Customer().find(
                updated_at_min=self.shopify_instance_id.shopify_last_date_customer_import)
            if len(customer_ids) >= 200:
                customer_ids = self.shopify_list_all_customer(customer_ids)
        if customer_ids:
            self.shopify_instance_id.shopify_last_date_customer_import = datetime.now()
        if not customer_ids:
            _logger.info(
                'Customers not found in result while the import customers from Shopify')
            return False
        _logger.info('Synced Customers len {}'.format(len(customer_ids)))
        # vals = {
        #     'shopify_instance_id': self.shopify_instance_id and self.shopify_instance_id.id or False,
        #     'state': 'draft',
        #     'record_created_from': 'import_process'
        # }
        customer_queue_list = []
        data_queue = self.env['shopify.customer.data.queue.ept']

        if len(customer_ids) > 0:
            # vals.update({'total_record_count': len(customer_ids)})

            if len(customer_ids) > 150:
                for customer_id_chunk in split_every(150, customer_ids):
                    customer_queue_id = data_queue.shopify_create_customer_queue(self.shopify_instance_id, "import_process")
                    customer_queue = self.shopify_create_multi_queue(customer_queue_id, customer_id_chunk)
                    customer_queue_list.append(customer_queue.id)
            else:
                customer_queue_id = data_queue.shopify_create_customer_queue(self.shopify_instance_id, "import_process")
                customer_queue = self.shopify_create_multi_queue(customer_queue_id, customer_ids)
                customer_queue_list.append(customer_queue.id)
        return customer_queue_list
Beispiel #21
0
    def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False):
        # Minimum stock rules
        self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

        # Search all confirmed stock_moves and try to assign them
        domain = self._get_moves_to_assign_domain()
        moves_to_assign = self.env['stock.move'].search(domain, limit=None,
            order='priority desc, date_expected asc')
        for moves_chunk in split_every(100, moves_to_assign.ids):
            self.env['stock.move'].browse(moves_chunk)._action_assign()
            if use_new_cursor:
                self._cr.commit()

        if use_new_cursor:
            self._cr.commit()

        # Merge duplicated quants
        self.env['stock.quant']._quant_tasks()
Beispiel #22
0
 def _notify_send(self, body, subject, recipients, **mail_values):
     emails = self.env['mail.mail']
     recipients_nbr = len(recipients)
     for email_chunk in split_every(50, recipients.ids):
         # TDE FIXME: missing message parameter. So we will find mail_message_id
         # in the mail_values and browse it. It should already be in the
         # cache so should not impact performances.
         mail_message_id = mail_values.get('mail_message_id')
         message = self.env['mail.message'].browse(mail_message_id) if mail_message_id else None
         tig = self.env[message.model].browse(message.res_id) if message and message.model and message.res_id else False
         recipient_values = self.env['mail.thread']._notify_email_recipients_on_records(message, email_chunk, records=tig)
         create_values = {
             'body_html': body,
             'subject': subject,
         }
         create_values.update(mail_values)
         create_values.update(recipient_values)
         emails |= self.env['mail.mail'].create(create_values)
     return emails, recipients_nbr
Beispiel #23
0
    def run_scheduler(self, use_new_cursor=False, company_id=False):
        """ Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
        and the availability of moves. This function is intended to be run for all the companies at the same time, so
        we run functions as SUPERUSER to avoid intercompanies and access rights issues. """
        try:
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))  # TDE FIXME

            # Minimum stock rules
            self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id)

            # Search all confirmed stock_moves and try to assign them
            confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc')
            for moves_chunk in split_every(100, confirmed_moves.ids):
                self.env['stock.move'].browse(moves_chunk)._action_assign()
                if use_new_cursor:
                    self._cr.commit()

            exception_moves = self.env['stock.move'].search(self._get_exceptions_domain())
            for move in exception_moves:
                values = move._prepare_procurement_values()
                try:
                    with self._cr.savepoint():
                        origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/")
                        self.run(move.product_id, move.product_uom_qty, move.product_uom, move.location_id, move.rule_id and move.rule_id.name or "/", origin, values)
                except UserError as error:
                    self.env['procurement.rule']._log_next_activity(move.product_id, error.name)

            if use_new_cursor:
                self._cr.commit()
        finally:
            if use_new_cursor:
                try:
                    self._cr.close()
                except Exception:
                    pass
        return {}
Beispiel #24
0
    def _notify(self,
                message,
                rdata,
                record,
                force_send=False,
                send_after_commit=True,
                model_description=False,
                mail_auto_delete=True):
        """ Method to send email linked to notified messages. The recipients are
        the recordset on which this method is called.

        :param message: mail.message record to notify;
        :param rdata: recipient data (see mail.message _notify);
        :param record: optional record on which the message was posted;
        :param force_send: tells whether to send notification emails within the
          current transaction or to use the email queue;
        :param send_after_commit: if force_send, tells whether to send emails after
          the transaction has been committed using a post-commit hook;
        :param model_description: optional data used in notification process (see
          notification templates);
        :param mail_auto_delete: delete notification emails once sent;
        """
        if not rdata:
            return True

        # Cetmix. Check context.
        if not self._context.get("default_wizard_mode",
                                 False) in ['quote', 'forward']:
            return super(PRTPartner,
                         self)._notify(message=message,
                                       rdata=rdata,
                                       record=record,
                                       force_send=force_send,
                                       send_after_commit=send_after_commit,
                                       model_description=model_description,
                                       mail_auto_delete=mail_auto_delete)

        # Get signature location
        signature_location = self._context.get("signature_location", False)

        # After quote
        if signature_location == 'a':
            return super(PRTPartner,
                         self)._notify(message=message,
                                       rdata=rdata,
                                       record=record,
                                       force_send=force_send,
                                       send_after_commit=send_after_commit,
                                       model_description=model_description,
                                       mail_auto_delete=mail_auto_delete)

        base_template_ctx = self._notify_prepare_template_context(
            message, record, model_description=model_description)
        # Cetmix. Get signature
        signature = base_template_ctx.pop("signature", False)
        template_xmlid = message.layout if message.layout else 'mail.message_notification_email'
        try:
            base_template = self.env.ref(template_xmlid,
                                         raise_if_not_found=True).with_context(
                                             lang=base_template_ctx['lang'])
        except ValueError:
            _logger.warning(
                'QWeb template %s not found when sending notification emails. Sending without layouting.'
                % (template_xmlid))
            base_template = False

        # prepare notification mail values
        base_mail_values = {
            'mail_message_id':
            message.id,
            'mail_server_id':
            message.mail_server_id.id,
            'auto_delete':
            mail_auto_delete,
            'references':
            message.parent_id.message_id if message.parent_id else False
        }
        if record:
            base_mail_values.update(self.env['mail.thread'].
                                    _notify_specific_email_values_on_records(
                                        message, records=record))

        # classify recipients: actions / no action
        recipients = self.env[
            'mail.thread']._notify_classify_recipients_on_records(
                message, rdata, records=record)

        Mail = self.env['mail.mail'].sudo()
        emails = self.env['mail.mail'].sudo()
        email_pids = set()
        recipients_nbr, recipients_max = 0, 50
        for group_tpl_values in [
                group for group in recipients.values() if group['recipients']
        ]:
            # generate notification email content
            template_ctx = {**base_template_ctx, **group_tpl_values}
            mail_body = base_template.render(template_ctx,
                                             engine='ir.qweb',
                                             minimal_qcontext=True)
            mail_body = self.env['mail.thread']._replace_local_links(mail_body)
            mail_subject = message.subject or (message.record_name and
                                               'Re: %s' % message.record_name)

            # Cetmix. Put signature before quote
            if signature_location == 'b':
                quote_index = mail_body.find("<blockquote")
                if quote_index:
                    mail_body = "%s%s%s" % (mail_body[:quote_index], signature,
                                            mail_body[quote_index:]
                                            )  # legacy mode

            # send email
            for email_chunk in split_every(50, group_tpl_values['recipients']):
                recipient_values = self.env[
                    'mail.thread']._notify_email_recipients_on_records(
                        message, email_chunk, records=record)
                create_values = {
                    'body_html': mail_body,
                    'subject': mail_subject,
                }
                create_values.update(base_mail_values)
                create_values.update(recipient_values)
                recipient_ids = [
                    r[1] for r in create_values.get('recipient_ids', [])
                ]
                email = Mail.create(create_values)

                if email and recipient_ids:
                    notifications = self.env['mail.notification'].sudo(
                    ).search([('mail_message_id', '=',
                               email.mail_message_id.id),
                              ('res_partner_id', 'in', list(recipient_ids))])
                    notifications.write({
                        # 'is_email': True,
                        'notification_type': 'email',
                        'mail_id': email.id,
                        'is_read':
                        True,  # handle by email discards Inbox notification
                        # 'email_status': 'ready',
                        'notification_status': 'ready',
                    })

                emails |= email
                email_pids.update(recipient_ids)

        # NOTE:
        #   1. for more than 50 followers, use the queue system
        #   2. do not send emails immediately if the registry is not loaded,
        #      to prevent sending email during a simple update of the database
        #      using the command-line.
        test_mode = getattr(threading.currentThread(), 'testing', False)
        if force_send and len(emails) < recipients_max and \
                (not self.pool._init or test_mode):
            email_ids = emails.ids
            dbname = self.env.cr.dbname
            _context = self._context

            def send_notifications():
                db_registry = registry(dbname)
                with api.Environment.manage(), db_registry.cursor() as cr:
                    env = api.Environment(cr, SUPERUSER_ID, _context)
                    env['mail.mail'].browse(email_ids).send()

            # unless asked specifically, send emails after the transaction to
            # avoid side effects due to emails being sent while the transaction fails
            if not test_mode and send_after_commit:
                self._cr.after('commit', send_notifications)
            else:
                emails.send()

        return True
Beispiel #25
0
    def _compute_sheet_tasks(self,
                             use_new_cursor=False,
                             active_id=False,
                             from_date=False,
                             to_date=False,
                             credit_note=False,
                             employee_ids=[]):
        intercompany_uid = None
        context = self._context.copy()
        for run in self.env['hr.payslip.run'].browse(active_id):
            company = run.company_id
            intercompany_uid = company.intercompany_user_id and company.intercompany_user_id.id or False
            context['force_company'] = company.id

        payslipModel = self.env['hr.payslip']
        payslips = []
        for employees_chunk in split_every(1, employee_ids):
            _logger.info('--- Nomina Procesando Employees %s', employees_chunk)
            for employee in self.env['hr.employee'].browse(employees_chunk):
                slip_data = payslipModel.onchange_employee_id(
                    from_date, to_date, employee.id, contract_id=False)
                res = {
                    'employee_id':
                    employee.id,
                    'name':
                    slip_data['value'].get('name'),
                    'struct_id':
                    slip_data['value'].get('struct_id'),
                    'contract_id':
                    slip_data['value'].get('contract_id'),
                    'payslip_run_id':
                    active_id,
                    'input_line_ids':
                    [(0, 0, x)
                     for x in slip_data['value'].get('input_line_ids')],
                    'worked_days_line_ids':
                    [(0, 0, x)
                     for x in slip_data['value'].get('worked_days_line_ids')],
                    'date_from':
                    from_date,
                    'date_to':
                    to_date,
                    'credit_note':
                    credit_note,
                    'company_id':
                    employee.company_id.id,
                    'cfdi_source_sncf':
                    slip_data['value'].get('cfdi_source_sncf'),
                    'cfdi_amount_sncf':
                    slip_data['value'].get('cfdi_amount_sncf'),
                    'cfdi_tipo_nomina':
                    slip_data['value'].get('cfdi_tipo_nomina'),
                    'cfdi_tipo_nomina_especial':
                    slip_data['value'].get('cfdi_tipo_nomina_especial')
                }
                payslip_id = payslipModel.create(res)
                if use_new_cursor:
                    self._cr.commit()
                payslips.append(payslip_id.id)

        _logger.info('--- Nomina payslips %s ', len(payslips))
        for slip_chunk in split_every(1, payslips):
            _logger.info('--- Nomina payslip_ids %s ', slip_chunk)
            try:
                payslipModel.with_context(
                    slip_chunk=True).browse(slip_chunk).with_context(
                        context).sudo(intercompany_uid).compute_sheet()
                if use_new_cursor:
                    self._cr.commit()
            except Exception as e:
                _logger.info('------ Error al crear la Nomina %s ' % (e))
                pass
        if use_new_cursor:
            self._cr.commit()
Beispiel #26
0
    def _procure_orderpoint_confirm(self,
                                    use_new_cursor=False,
                                    company_id=None):
        """ Create procurements based on orderpoints.
        :param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing
            1000 orderpoints.
            This is appropriate for batch jobs only.
        """
        self = self.with_company(company_id)
        domain = self._get_orderpoint_domain(company_id=company_id)
        orderpoints_noprefetch = self.env[
            'stock.warehouse.orderpoint'].search_read(
                domain,
                fields=['id'],
                order=self._procurement_from_orderpoint_get_order())
        orderpoints_noprefetch = [
            orderpoint['id'] for orderpoint in orderpoints_noprefetch
        ]

        for orderpoints_batch in split_every(1000, orderpoints_noprefetch):
            if use_new_cursor:
                cr = registry(self._cr.dbname).cursor()
                self = self.with_env(self.env(cr=cr))
            orderpoints_batch = self.env['stock.warehouse.orderpoint'].browse(
                orderpoints_batch)
            orderpoints_exceptions = []
            while orderpoints_batch:

                # Calculate groups that can be executed together
                orderpoints_contexts = defaultdict(
                    lambda: self.env['stock.warehouse.orderpoint'])

                procurements = []
                for orderpoint in orderpoints_batch:
                    orderpoint_context = orderpoint._get_product_context()
                    product_context = frozendict({
                        **self.env.context,
                        **orderpoint_context
                    })
                    orderpoints_contexts[product_context] |= orderpoint

                for orderpoint_context, orderpoints_by_context in orderpoints_contexts.items(
                ):
                    substract_quantity = orderpoints_by_context._quantity_in_progress(
                    )
                    product_quantity = orderpoints_by_context.product_id.with_context(
                        orderpoint_context)._product_available()

                    for orderpoint in orderpoints_by_context:
                        op_product_virtual = product_quantity[
                            orderpoint.product_id.id]['virtual_available']
                        if op_product_virtual is None:
                            continue
                        if float_compare(op_product_virtual,
                                         orderpoint.product_min_qty,
                                         precision_rounding=orderpoint.
                                         product_uom.rounding) <= 0:
                            qty = max(orderpoint.product_min_qty, orderpoint.
                                      product_max_qty) - op_product_virtual
                            remainder = orderpoint.qty_multiple > 0 and qty % orderpoint.qty_multiple or 0.0

                            if float_compare(remainder,
                                             0.0,
                                             precision_rounding=orderpoint.
                                             product_uom.rounding) > 0:
                                qty += orderpoint.qty_multiple - remainder

                            if float_compare(qty,
                                             0.0,
                                             precision_rounding=orderpoint.
                                             product_uom.rounding) < 0:
                                continue

                            qty -= substract_quantity[orderpoint.id]
                            qty_rounded = float_round(
                                qty,
                                precision_rounding=orderpoint.product_uom.
                                rounding)
                            if qty_rounded > 0:
                                date = datetime.combine(
                                    orderpoint_context.get('to_date'),
                                    time.min)
                                values = orderpoint._prepare_procurement_values(
                                    qty_rounded, date=date)
                                procurements.append(
                                    self.env['procurement.group'].Procurement(
                                        orderpoint.product_id, qty_rounded,
                                        orderpoint.product_uom,
                                        orderpoint.location_id,
                                        orderpoint.name, orderpoint.name,
                                        orderpoint.company_id, values))

                try:
                    with self.env.cr.savepoint():
                        self.env['procurement.group'].with_context(
                            from_orderpoint=True).run(procurements,
                                                      raise_user_error=False)
                except ProcurementException as errors:
                    for procurement, error_msg in errors.procurement_exceptions:
                        orderpoints_exceptions += [
                            (procurement.values.get('orderpoint_id'),
                             error_msg)
                        ]
                    failed_orderpoints = self.env[
                        'stock.warehouse.orderpoint'].concat(
                            *[o[0] for o in orderpoints_exceptions])
                    if not failed_orderpoints:
                        _logger.error('Unable to process orderpoints')
                        break
                    orderpoints_batch -= failed_orderpoints

                except OperationalError:
                    if use_new_cursor:
                        cr.rollback()
                        continue
                    else:
                        raise
                else:
                    orderpoints_batch._post_process_scheduler()
                    break

            # Log an activity on product template for failed orderpoints.
            for orderpoint, error_msg in orderpoints_exceptions:
                existing_activity = self.env['mail.activity'].search([
                    ('res_id', '=', orderpoint.product_id.product_tmpl_id.id),
                    ('res_model_id', '=',
                     self.env.ref('product.model_product_template').id),
                    ('note', '=', error_msg)
                ])
                if not existing_activity:
                    orderpoint.product_id.product_tmpl_id.activity_schedule(
                        'mail.mail_activity_data_warning',
                        note=error_msg,
                        user_id=orderpoint.product_id.responsible_id.id
                        or SUPERUSER_ID,
                    )

            if use_new_cursor:
                cr.commit()
                cr.close()

        return {}
Beispiel #27
0
    def _notify(self, message, rdata, record, force_send=False, send_after_commit=True, model_description=False, mail_auto_delete=True):
        """ Method to send email linked to notified messages. The recipients are
        the recordset on which this method is called.

        :param message: mail.message record to notify;
        :param rdata: recipient data (see mail.message _notify);
        :param record: optional record on which the message was posted;
        :param force_send: tells whether to send notification emails within the
          current transaction or to use the email queue;
        :param send_after_commit: if force_send, tells whether to send emails after
          the transaction has been committed using a post-commit hook;
        :param model_description: optional data used in notification process (see
          notification templates);
        :param mail_auto_delete: delete notification emails once sent;
        """
        if not rdata:
            return True

        base_template_ctx = self._notify_prepare_template_context(message, record, model_description=model_description)
        template_xmlid = message.layout if message.layout else 'mail.message_notification_email'
        try:
            base_template = self.env.ref(template_xmlid, raise_if_not_found=True).with_context(lang=base_template_ctx['lang'])
        except ValueError:
            _logger.warning('QWeb template %s not found when sending notification emails. Sending without layouting.' % (template_xmlid))
            base_template = False

        # prepare notification mail values
        base_mail_values = {
            'mail_message_id': message.id,
            'mail_server_id': message.mail_server_id.id,
            'auto_delete': mail_auto_delete,
            'references': message.parent_id.message_id if message.parent_id else False
        }
        if record:
            base_mail_values.update(self.env['mail.thread']._notify_specific_email_values_on_records(message, records=record))

        # classify recipients: actions / no action
        recipients = self.env['mail.thread']._notify_classify_recipients_on_records(message, rdata, records=record)

        Mail = self.env['mail.mail'].sudo()
        emails = self.env['mail.mail'].sudo()
        email_pids = set()
        recipients_nbr, recipients_max = 0, 50
        for group_tpl_values in [group for group in recipients.values() if group['recipients']]:
            # generate notification email content
            template_ctx = {**base_template_ctx, **group_tpl_values}
            mail_body = base_template.render(template_ctx, engine='ir.qweb', minimal_qcontext=True)
            mail_body = self.env['mail.thread']._replace_local_links(mail_body)
            mail_subject = message.subject or (message.record_name and 'Re: %s' % message.record_name)

            # send email
            for email_chunk in split_every(50, group_tpl_values['recipients']):
                recipient_values = self.env['mail.thread']._notify_email_recipients_on_records(message, email_chunk, records=record)
                create_values = {
                    'body_html': mail_body,
                    'subject': mail_subject,
                }
                create_values.update(base_mail_values)
                create_values.update(recipient_values)
                recipient_ids = [r[1] for r in create_values.get('recipient_ids', [])]
                email = Mail.create(create_values)

                if email and recipient_ids:
                    notifications = self.env['mail.notification'].sudo().search([
                        ('mail_message_id', '=', email.mail_message_id.id),
                        ('res_partner_id', 'in', list(recipient_ids))
                    ])
                    notifications.write({
                        'is_email': True,
                        'mail_id': email.id,
                        'is_read': True,  # handle by email discards Inbox notification
                        'email_status': 'ready',
                    })

                emails |= email
                email_pids.update(recipient_ids)

        # NOTE:
        #   1. for more than 50 followers, use the queue system
        #   2. do not send emails immediately if the registry is not loaded,
        #      to prevent sending email during a simple update of the database
        #      using the command-line.
        test_mode = getattr(threading.currentThread(), 'testing', False)
        if force_send and len(emails) < recipients_max and \
                (not self.pool._init or test_mode):
            email_ids = emails.ids
            dbname = self.env.cr.dbname
            _context = self._context

            def send_notifications():
                db_registry = registry(dbname)
                with api.Environment.manage(), db_registry.cursor() as cr:
                    env = api.Environment(cr, SUPERUSER_ID, _context)
                    env['mail.mail'].browse(email_ids).send()

            # unless asked specifically, send emails after the transaction to
            # avoid side effects due to emails being sent while the transaction fails
            if not test_mode and send_after_commit:
                self._cr.after('commit', send_notifications)
            else:
                emails.send()

        return True
Beispiel #28
0
    def _notify_record_by_email(self,
                                message,
                                recipients_data,
                                msg_vals=False,
                                model_description=False,
                                mail_auto_delete=True,
                                check_existing=False,
                                force_send=True,
                                send_after_commit=True,
                                **kwargs):
        """ Method to send email linked to notified messages.

        :param message: mail.message record to notify;
        :param recipients_data: see ``_notify_thread``;
        :param msg_vals: see ``_notify_thread``;

        :param model_description: model description used in email notification process
          (computed if not given);
        :param mail_auto_delete: delete notification emails once sent;
        :param check_existing: check for existing notifications to update based on
          mailed recipient, otherwise create new notifications;

        :param force_send: send emails directly instead of using queue;
        :param send_after_commit: if force_send, tells whether to send emails after
          the transaction has been committed using a post-commit hook;
        """
        partners_data = [
            r for r in recipients_data['partners'] if r['notif'] == 'email'
        ]
        if not partners_data:
            return True

        model = msg_vals.get('model') if msg_vals else message.model
        model_name = model_description or (
            self.with_lang().env['ir.model']._get(model).display_name
            if model else False)  # one query for display name
        recipients_groups_data = self._notify_classify_recipients(
            partners_data, model_name)

        if not recipients_groups_data:
            return True
        force_send = self.env.context.get('mail_notify_force_send', force_send)

        template_values = self._notify_prepare_template_context(
            message, msg_vals,
            model_description=model_description)  # 10 queries

        email_layout_xmlid = msg_vals.get(
            'email_layout_xmlid') if msg_vals else message.email_layout_xmlid
        template_xmlid = email_layout_xmlid if email_layout_xmlid else 'mail.message_notification_email'
        try:
            base_template = self.env.ref(
                template_xmlid, raise_if_not_found=True).with_context(
                    lang=template_values['lang'])  # 1 query
        except ValueError:
            _logger.warning(
                'QWeb template %s not found when sending notification emails. Sending without layouting.'
                % (template_xmlid))
            base_template = False

        mail_subject = message.subject or (message.record_name
                                           and 'Re: %s' % message.record_name
                                           )  # in cache, no queries
        # prepare notification mail values
        base_mail_values = {
            'mail_message_id':
            message.id,
            'mail_server_id':
            message.mail_server_id.
            id,  # 2 query, check acces + read, may be useless, Falsy, when will it be used?
            'auto_delete':
            mail_auto_delete,
            # due to ir.rule, user have no right to access parent message if message is not published
            'references':
            message.parent_id.sudo().message_id
            if message.parent_id else False,
            'subject':
            mail_subject,
        }
        headers = self._notify_email_headers()
        if headers:
            base_mail_values['headers'] = headers

        Mail = self.env['mail.mail'].sudo()
        emails = self.env['mail.mail'].sudo()

        # loop on groups (customer, portal, user,  ... + model specific like group_sale_salesman)
        notif_create_values = []
        recipients_max = 50
        for recipients_group_data in recipients_groups_data:
            # generate notification email content
            recipients_ids = recipients_group_data.pop('recipients')
            render_values = {**template_values, **recipients_group_data}
            # {company, is_discussion, lang, message, model_description, record, record_name, signature, subtype, tracking_values, website_url}
            # {actions, button_access, has_button_access, recipients}

            if base_template:
                mail_body = base_template.render(render_values,
                                                 engine='ir.qweb',
                                                 minimal_qcontext=True)
            else:
                mail_body = message.body
            mail_body = self._replace_local_links(mail_body)

            # create email
            for recipients_ids_chunk in split_every(recipients_max,
                                                    recipients_ids):
                recipient_values = self._notify_email_recipient_values(
                    recipients_ids_chunk)
                email_to = recipient_values['email_to']
                recipient_ids = recipient_values['recipient_ids']

                create_values = {
                    'body_html': mail_body,
                    'subject': mail_subject,
                    'recipient_ids': [(4, pid) for pid in recipient_ids],
                    'email_bcc': message.email_bcc,
                    'email_cc': message.email_cc,
                    'email_to': message.email_to,
                    'cc_recipient_ids': message.cc_recipient_ids,
                    'bcc_recipient_ids': message.bcc_recipient_ids,
                }
                if email_to:
                    create_values['email_to'] = email_to
                create_values.update(
                    base_mail_values
                )  # mail_message_id, mail_server_id, auto_delete, references, headers
                email = Mail.create(create_values)

                if email and recipient_ids:
                    tocreate_recipient_ids = list(recipient_ids)
                    if check_existing:
                        existing_notifications = self.env[
                            'mail.notification'].sudo().search([
                                ('mail_message_id', '=', message.id),
                                ('notification_type', '=', 'email'),
                                ('res_partner_id', 'in',
                                 tocreate_recipient_ids)
                            ])
                        if existing_notifications:
                            tocreate_recipient_ids = [
                                rid for rid in recipient_ids
                                if rid not in existing_notifications.mapped(
                                    'res_partner_id.id')
                            ]
                            existing_notifications.write({
                                'notification_status':
                                'ready',
                                'mail_id':
                                email.id,
                            })
                    notif_create_values += [
                        {
                            'mail_message_id': message.id,
                            'res_partner_id': recipient_id,
                            'notification_type': 'email',
                            'mail_id': email.id,
                            'is_read': True,  # discard Inbox notification
                            'notification_status': 'ready',
                        } for recipient_id in tocreate_recipient_ids
                    ]
                emails |= email

        if notif_create_values:
            self.env['mail.notification'].sudo().create(notif_create_values)

        # NOTE:
        #   1. for more than 50 followers, use the queue system
        #   2. do not send emails immediately if the registry is not loaded,
        #      to prevent sending email during a simple update of the database
        #      using the command-line.
        test_mode = getattr(threading.currentThread(), 'testing', False)
        if force_send and len(emails) < recipients_max and (not self.pool._init
                                                            or test_mode):
            # unless asked specifically, send emails after the transaction to
            # avoid side effects due to emails being sent while the transaction fails
            if not test_mode and send_after_commit:
                email_ids = emails.ids
                dbname = self.env.cr.dbname
                _context = self._context

                def send_notifications():
                    db_registry = registry(dbname)
                    with api.Environment.manage(), db_registry.cursor() as cr:
                        env = api.Environment(cr, SUPERUSER_ID, _context)
                        env['mail.mail'].browse(email_ids).send()

                self._cr.after('commit', send_notifications)
            else:
                emails.send()

        return True
Beispiel #29
0
    def prepare_data_import_stock_new_api(self):
        """
        This method is used for prepare data for import stock
        :return: List of dict
        @author: Pragnadeep Pitroda @Emipro Technologies Pvt. Ltd 16-Nov-2019
        :Task id: 156886
        """
        common_log_obj = self.env["common.log.book.ept"]
        woo_product = self.env['woo.product.product.ept']
        common_log_line_obj = self.env["common.log.lines.ept"]
        model = "woo.product.product.ept"
        model_id = common_log_line_obj.get_model_id(model)
        instance = self.woo_instance_id
        wcapi = instance.woo_connect()
        products_stock = []
        log_line_id = []
        try:
            woo_products = woo_product.search([('exported_in_woo', '=', True),
                                               ('woo_instance_id', '=',
                                                instance.id)])
            sku = woo_products.mapped('default_code')
            product_fields = 'id,name,sku,manage_stock,stock_quantity'
            for sku_chunk in split_every(100, sku):
                str_sku = ",".join(sku_chunk)
                res = wcapi.get("products",
                                params={
                                    'sku': str_sku,
                                    '_fields': product_fields,
                                    'per_page': 100
                                })
                if res.status_code not in [200, 201]:
                    log_id = common_log_line_obj.create({
                        'model_id':
                        model_id,
                        'message':
                        'Import Stock for products has not proper response.\n Response %s'
                        % (res.content)
                    })
                    log_line_id.append(log_id.id)

                res_products = res.json()
                for res_product in res_products:
                    stock_data = {}
                    product = woo_products.filtered(
                        lambda x: x.default_code == res_product.get('sku'))
                    if product:
                        if res_product.get('manage_stock') and res_product.get(
                                'stock_quantity'):
                            if product.product_id.type == 'product':
                                product_qty = res_product.get('stock_quantity')
                                stock_data.update({'product_qty': product_qty})
                                stock_data.update(
                                    {'product_id': product.product_id})
                                products_stock.append(stock_data)
                    else:
                        log_id = common_log_line_obj.create({
                            'model_id':
                            model_id,
                            'message':
                            'Import Stock for product %s does not exist in odoo'
                            % (res_product.get('sku')),
                        })
                        log_line_id.append(log_id.id)

        except Exception as e:
            log_id = common_log_line_obj.create({
                'model_id':
                model_id,
                'message':
                'Import Stock for products not perform.\n Error %s' % (e),
            })
            log_line_id.append(log_id.id)
        if log_line_id:
            common_log_id = common_log_obj.create({
                'type': 'import',
                'module': 'woocommerce_ept',
                'woo_instance_id': instance.id,
                'active': True,
            })
            common_log_line_obj.browse(log_line_id).write(
                {'log_line_id': common_log_id and common_log_id.id or False})
        return products_stock
Beispiel #30
0
    def _notify_record_by_email(self,
                                message,
                                recipients_data,
                                msg_vals=False,
                                model_description=False,
                                mail_auto_delete=True,
                                check_existing=False,
                                force_send=True,
                                send_after_commit=True,
                                **kwargs):
        """
        Using Odoo generic method. Must keep an eye on changes
        """

        # Cetmix. Sent from Messages Easy composer?
        if not self._context.get("default_wizard_mode",
                                 False) in ["quote", "forward"]:
            return super(MailThread, self)._notify_record_by_email(
                message, recipients_data, msg_vals, model_description,
                mail_auto_delete, check_existing, force_send,
                send_after_commit, **kwargs)
        # Cetmix. Get signature location
        signature_location = self._context.get("signature_location", False)
        if signature_location == "a":  # Regular signature location
            return super(MailThread, self)._notify_record_by_email(
                message, recipients_data, msg_vals, model_description,
                mail_auto_delete, check_existing, force_send,
                send_after_commit, **kwargs)

        partners_data = [
            r for r in recipients_data["partners"] if r["notif"] == "email"
        ]
        if not partners_data:
            return True

        model = msg_vals.get("model") if msg_vals else message.model
        model_name = model_description or (
            self._fallback_lang().env["ir.model"]._get(model).display_name
            if model else False)  # one query for display name
        recipients_groups_data = self._notify_classify_recipients(
            partners_data, model_name)

        if not recipients_groups_data:
            return True
        force_send = self.env.context.get("mail_notify_force_send", force_send)

        template_values = self._notify_prepare_template_context(
            message, msg_vals,
            model_description=model_description)  # 10 queries
        # Cetmix. Replace signature
        if signature_location:  # Remove signature, we don't need it in values
            signature = template_values.pop("signature", False)
        else:
            signature = False

        email_layout_xmlid = (msg_vals.get("email_layout_xmlid")
                              if msg_vals else message.email_layout_xmlid)
        template_xmlid = (email_layout_xmlid if email_layout_xmlid else
                          "mail.message_notification_email")
        try:
            base_template = self.env.ref(
                template_xmlid, raise_if_not_found=True).with_context(
                    lang=template_values["lang"])  # 1 query
        except ValueError:
            _logger.warning(
                "QWeb template %s not found when sending notification emails."
                " Sending without layout." % (template_xmlid))
            base_template = False

        mail_subject = message.subject or (message.record_name
                                           and "Re: %s" % message.record_name
                                           )  # in cache, no queries
        # prepare notification mail values
        base_mail_values = {
            "mail_message_id":
            message.id,
            "mail_server_id":
            message.mail_server_id.id,
            # 2 query, check acces + read, may be useless, Falsy, when will it be used?
            "auto_delete":
            mail_auto_delete,
            # due to ir.rule, user have no right to access parent message
            # if message is not published
            "references":
            message.parent_id.sudo().message_id
            if message.parent_id else False,
            "subject":
            mail_subject,
        }
        base_mail_values = self._notify_by_email_add_values(base_mail_values)

        Mail = self.env["mail.mail"].sudo()
        emails = self.env["mail.mail"].sudo()

        notif_create_values = []
        recipients_max = 50
        for recipients_group_data in recipients_groups_data:
            # generate notification email content
            recipients_ids = recipients_group_data.pop("recipients")
            render_values = {**template_values, **recipients_group_data}

            if base_template:
                mail_body = base_template._render(render_values,
                                                  engine="ir.qweb",
                                                  minimal_qcontext=True)
            else:
                mail_body = message.body

            # Cetmix. Put signature before quote?
            if signature and signature_location == "b":
                quote_index = mail_body.find(b"<blockquote")
                if quote_index:
                    mail_body = (mail_body[:quote_index] +
                                 signature.encode("utf-8") +
                                 mail_body[quote_index:])

            mail_body = self.env["mail.render.mixin"]._replace_local_links(
                mail_body)

            # create email
            for recipients_ids_chunk in split_every(recipients_max,
                                                    recipients_ids):
                recipient_values = self._notify_email_recipient_values(
                    recipients_ids_chunk)
                email_to = recipient_values["email_to"]
                recipient_ids = recipient_values["recipient_ids"]

                create_values = {
                    "body_html": mail_body,
                    "subject": mail_subject,
                    "recipient_ids": [(4, pid) for pid in recipient_ids],
                }
                if email_to:
                    create_values["email_to"] = email_to
                create_values.update(
                    base_mail_values
                )  # mail_message_id, mail_server_id, auto_delete, references, headers
                email = Mail.create(create_values)

                if email and recipient_ids:
                    tocreate_recipient_ids = list(recipient_ids)
                    if check_existing:
                        existing_notifications = (
                            self.env["mail.notification"].sudo().search([
                                ("mail_message_id", "=", message.id),
                                ("notification_type", "=", "email"),
                                ("res_partner_id", "in",
                                 tocreate_recipient_ids),
                            ]))
                        if existing_notifications:
                            tocreate_recipient_ids = [
                                rid for rid in recipient_ids
                                if rid not in existing_notifications.mapped(
                                    "res_partner_id.id")
                            ]
                            existing_notifications.write({
                                "notification_status":
                                "ready",
                                "mail_id":
                                email.id
                            })
                    notif_create_values += [
                        {
                            "mail_message_id": message.id,
                            "res_partner_id": recipient_id,
                            "notification_type": "email",
                            "mail_id": email.id,
                            "is_read": True,  # discard Inbox notification
                            "notification_status": "ready",
                        } for recipient_id in tocreate_recipient_ids
                    ]
                emails |= email

        if notif_create_values:
            self.env["mail.notification"].sudo().create(notif_create_values)

        # NOTE:
        #   1. for more than 50 followers, use the queue system
        #   2. do not send emails immediately if the registry is not loaded,
        #      to prevent sending email during a simple update of the database
        #      using the command-line.
        test_mode = getattr(threading.currentThread(), "testing", False)
        if (force_send and len(emails) < recipients_max
                and (not self.pool._init or test_mode)):
            # unless asked specifically, send emails after the transaction to
            # avoid side effects due to emails being sent while the transaction fails
            if not test_mode and send_after_commit:
                email_ids = emails.ids
                dbname = self.env.cr.dbname
                _context = self._context

                @self.env.cr.postcommit.add
                def send_notifications():
                    db_registry = registry(dbname)
                    with api.Environment.manage(), db_registry.cursor() as cr:
                        env = api.Environment(cr, SUPERUSER_ID, _context)
                        env["mail.mail"].browse(email_ids).send()

            else:
                emails.send()

        return True
Beispiel #31
0
# [('sq_AL', 'Albanian / Shqip'),
#  ('am_ET', 'Amharic / አምሃርኛ'),
#  ('ar_SY', 'Arabic / الْعَرَبيّة'),
#  ('eu_ES', 'Basque / Euskara'),
#  ('bs_BA', 'Bosnian / bosanski jezik'),
#
#  ..]

misc.human_size(1024 * 10)
# 10.00 Kb
misc.human_size(1024 * 10000)
# 9.77 Mb
misc.human_size(1024 * 100000000)
# 95.37 Gb

for split in misc.split_every(iterable=['a', 'b', 'c', 'd'], n=2):
    print(split)
    # ('a', 'b')
    # ('c', 'd')

misc.groupby([{
    'first_name': 'Maris',
    'last_name': 'Riediger'
}, {
    'first_name': 'Katya',
    'last_name': 'Nikitko'
}, {
    'first_name': 'Zhenya',
    'last_name': '1'
}, {
    'first_name': 'Zhenya',