Пример #1
0
def open_xml(file_text, _type=None, parser=False,
             avoid_unicode_error=True, repair=True, print_error=True):
    if parser is False:
        parser = etree.XMLParser(recover=True)
    node = None
    try:
        node = etree.XML(file_text, parser=parser)
    except Exception as e:
        if avoid_unicode_error and 'Unicode strings with ' \
                                   'encoding declaration are ' \
                                   'not supported.' in str(e):
            try:
                declaration_end = file_text.index('?>') + 2
                node = etree.XML(file_text[declaration_end:],
                                 parser=parser)
            except Exception as e2:
                logger.error(format_exception(e2))
                node = None
        if node is None:
            raise Warning(format_exception(e))
    if repair:
        if _type is None:
            _type = (node.xpath("//*[local-name() = 'ControlReference']"
                                "/*[local-name() = 'Type']") or
                     node.xpath("//ControlReference/Type"))[0].text.lower()
        err = validate_xml(_type.lower(), node, print_error)
        if err:
            try:
                node = repair_xml_file(node, _type.lower(),
                                       print_error=print_error)
            except Exception as e:
                raise Warning('{0}: {1}'.format(err,
                                                format_exception(e)))
    return node
Пример #2
0
def open_xml(file_text,
             _type=None,
             print_error=True,
             repair=True,
             parser=None):
    try:
        node = etree.XML(str(file_text), parser=parser)
    except Exception as e:
        raise Warning(format_exception(e))
    if repair:
        if _type is None:
            _type = (node.xpath(
                "//*[local-name() = 'ControlReference']/*[local-name() = 'Type']"
            ) or node.xpath("//ControlReference/Type"))[0].text.lower()
        err = validate_xml(_type.lower(), node, print_error)
        if err:
            try:
                node = repair_xml_file(node, _type.lower())
            except Exception as e:
                raise Warning('{0}: {1}'.format(err, format_exception(e)))
    return node
Пример #3
0
    def _print_pdf_to_pcl(self, cr, uid, internal_path, context=None):
        ''' Gets a PDF and converts it to a PCL. Everything is done using system calls.
            It is highly heuristic and is intended to be used as a _temporary_ solution (ok?)
        '''
        if context is None:
            context = {}

        # Gets the parameters that we need to do the conversion.
        printer_name = self.get_param('invoice_pcl_printer_name',
                                      required=True)
        printer_output_file = self.get_param('invoice_pcl_printer_destination',
                                             required=True)
        silent_mode = self.get_param('invoice_pcl_printer_silent_printing',
                                     required=False)

        try:
            # Launches the command to print the file.
            # This prints to the file indicated by variable 'printer_output_file'.
            if silent_mode:
                commands = ['lp', '-s', '-d', printer_name, internal_path]
            else:
                commands = ['lp', '-d', printer_name, internal_path]
            output = subprocess.check_output(commands)

            # Gets the name of the job sent.
            # It parses a string of the form: xxx xxx JOB_NAME   (xx xxxx)
            job_name = output.split('(')[0].strip().split(' ')[-1]

            # Keeps waiting for the job to complete.
            job_is_completed = False
            while not job_is_completed:
                output = subprocess.check_output(
                    ['lpstat', '-W', 'not-completed'])
                if len(output) == 0:
                    job_is_completed = True
                else:
                    job_lines = output.split('\n')
                    job_was_found = False
                    for job_line in job_lines:
                        if (job_line.strip() != '') and (job_line.split(' ')[0]
                                                         == job_name):
                            job_was_found = True
                    if not job_was_found:
                        job_is_completed = True

        except Exception as e:
            raise Exception(
                'There was a problem while converting the PDF to a PCL: {0}'.
                format(format_exception(e)))

        return printer_output_file
    def _process_event(self,
                       cr,
                       uid,
                       ids,
                       func,
                       event_code,
                       warehouse_id,
                       context=None):
        ''' If something goes wrong, an issue will be logged associated to the current warehouse.
                It is the function passed as the argument 'func' which must log an issue per each
            event which yielded an error.
        '''
        if context is None:
            context = {}

        project_issue_obj = self.pool.get('project.issue')
        stock_event_obj = self.pool.get('stock.event')

        event_ids = stock_event_obj.search(
            cr,
            uid, [
                ('warehouse_id', '=', warehouse_id),
                ('event_code', '=', event_code),
                ('state', '=', EVENT_STATE_DRAFT),
                ('error', '=', False),
            ],
            context=context)

        try:
            processed_event_ids = func(cr, uid, ids, event_ids, ctx=context)
        except Exception as e:
            error_message = "Warehouse with ID={0}: Error on event {1}, over events {2}: {3}".format(
                warehouse_id, event_code, event_ids, format_exception(e))
            logger.error(error_message)
            project_issue_obj.create_issue(cr,
                                           uid,
                                           'stock.warehouse',
                                           warehouse_id,
                                           error_message,
                                           context=context)
            raise

        logger.debug("{0} Event {1}: processed {2} of {3} events".format(
            warehouse_id, event_code, len(processed_event_ids),
            len(event_ids)))
Пример #5
0
def validate_xml(schema_name, xml_node, print_error=True):
    global schemas
    if schema_name not in schemas:
        try:
            with codecs.open(schema_paths[schema_name], 'r', 'UTF-8') as f:
                schemas[schema_name] = etree.XMLSchema(etree.parse(f))
        except Exception as e:
            err = "Schema {0} not found:\n{1}".format(schema_name, format_exception(e))
            if print_error:
                logger.error(err)
            return err
    if not schemas[schema_name].validate(xml_node):
        if print_error:
            logger.error("[{0}] Error validating node: {1}".format(schema_name, schemas[schema_name].error_log.last_error))
            logger.error('-' * 15)
            logger.error(xml_to_string(xml_node))
            logger.error('-' * 15)
        return schemas[schema_name].error_log.last_error
    else:
        return None
Пример #6
0
    def connection_get_files(self, cr, uid, ids, context=None):
        ''' We GET the files from the server. However, since the server may have a counter set on
            each file to limit the number of times it can be downloaded, and some versions of
            paramiko somehow open a file to check its size when a listing of the file is done,
            we download the files just once, and  put it on a temporal folder.
                We download *all* the files from the output folder, from *all* the clients, into
            the temporal folder. Then the files are moved into the archiving folder taking into
            account the filename-template set for each client.
        '''
        if not isinstance(ids, list):
            ids = [ids]
        if context is None:
            context = {}

        connect_obj = self.pool.get('stock.connect')
        file_obj = self.pool.get('stock.connect.file')
        project_issue_obj = self.pool.get('project.issue')

        for connection in connect_obj.browse(cr, uid, ids, context):

            # We check that the folder we are going to store the files for archive is created,
            # otherwise we raise an exception.
            if not os.path.exists(connection.local_archive_input_dir):
                error_message = _(
                    'Folder {0} does not exist on the local machine, and it is needed to archive the downloaded files.'
                ).format(connection.local_archive_input_dir)
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect',
                                               connection.id,
                                               error_message,
                                               context=context)
                raise orm.except_orm(_('Error'), error_message)

            if not os.path.exists(connection.local_archive_input_dir_temporal):
                error_message = _(
                    'Folder {0} does not exist on the local machine, and it is needed to archive the downloaded files.'
                ).format(connection.local_archive_input_dir_temporal)
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect',
                                               connection.id,
                                               error_message,
                                               context=context)
                raise orm.except_orm(_('Error'), error_message)

            if self._name == 'stock.connect' and connection.type:
                pool = self.pool['stock.connect.{0}'.format(connection.type)]
                pool.connection_get_files(cr, uid, connection.id, context)
            else:
                logger.debug("Standard get files behaviour")

                # Tests the connection. If the testing fails, then it logs and issue.
                # If the the 'show errors' is set in the context, then no exception is
                # raised (this is intended to be used when the downloading of the files
                # is the first step of the processing of files, events, and submitting new files).
                try:
                    connection.connect_transport_id.test_connection()
                except Exception as e:
                    connection.log_issue(_ISSUE_NO_CONNECTION, exception=e)
                    project_issue_obj.create_issue(cr,
                                                   uid,
                                                   'stock.connect',
                                                   connection.id,
                                                   format_exception(e),
                                                   context=context)
                    if context.get('show_errors', False):
                        logger.error(format_exception(e))
                        break
                    else:
                        raise

                # Connects to the server and downloads all the files which are there.
                try:
                    pattern = re.compile(connection.remote_file_template)

                    mutex.acquire()
                    con = connection.connect_transport_id.create_connection()
                    con.open()

                    # Lists all the files in the remote INcoming folder.
                    list_result = con.list(connection.remote_input_dir)
                    for path in list_result:
                        _name = path.split('/')[-1]
                        if _name:
                            # It downloads all the files it to a local temporal-folder which is used
                            # We do this so that we can empty the remote folder in a safe way,
                            # by keeping a copy of the processed files.
                            remote_file_path = os.path.join(
                                connection.remote_input_dir,
                                _name).replace('//', '/')
                            temporal_local_file_path = os.path.join(
                                connection.local_archive_input_dir_temporal,
                                _name).replace('//', '/')
                            con.get(remote_file_path, temporal_local_file_path)

                            # Once we have copied it to the local folder used to archive the files, we
                            # delete it from the server, but only if it was actually copied in local.
                            if os.path.exists(temporal_local_file_path):
                                con.remove(remote_file_path)

                    # Moves to the archiving folder all the files of the temporary folder
                    # which are already in the database and which matches with the pattern.
                    #     We do not create the file into the database first because in that
                    # case we risk that a roll-back makes some files which were already archived
                    # not to be in the database.
                    for file_name in os.listdir(
                            connection.local_archive_input_dir_temporal):
                        if pattern.match(file_name) and \
                           file_obj.search(cr, uid, [('name', '=', file_name),
                                                     ('stock_connect_id', '=', connection.id),
                                                     ('input', '=', True),
                                                     ], count=True, context=context):
                            temporal_local_file_path = os.path.join(
                                connection.local_archive_input_dir_temporal,
                                file_name).replace('//', '/')
                            destination_file_path = os.path.join(
                                connection.local_archive_input_dir,
                                file_name).replace('//', '/')
                            shutil.copy2(temporal_local_file_path,
                                         destination_file_path)
                            if os.path.exists(destination_file_path):
                                os.remove(temporal_local_file_path)

                    # Stores the files in the temporal folder into the database,
                    # but only if they are not there already and they matches with the name pattern.
                    for file_name in os.listdir(
                            connection.local_archive_input_dir_temporal):
                        if pattern.match(file_name) and \
                           not file_obj.search(cr, uid, [('name', '=', file_name),
                                                         ('stock_connect_id', '=', connection.id),
                                                         ('input', '=', True),
                                                         ], count=True, context=context):
                            temporal_local_file_path = os.path.join(
                                connection.local_archive_input_dir_temporal,
                                file_name).replace('//', '/')
                            with open(temporal_local_file_path, 'r') as f:
                                file_obj.create(
                                    cr, uid, {
                                        'name': file_name,
                                        'content': f.read(),
                                        'input': True,
                                        'stock_connect_id': connection.id,
                                    }, context)

                except Exception as e:
                    # If there is an exception in this case, we do raise because maybe we got
                    # an error which otherwise would result in our database being corrupted.
                    project_issue_obj.create_issue(cr,
                                                   uid,
                                                   'stock.connect',
                                                   connection.id,
                                                   format_exception(e),
                                                   context=context)
                    raise

                finally:
                    con.close()
                    mutex.release()

        return True
Пример #7
0
    def process_file_tree(self,
                          cr,
                          uid,
                          ids,
                          context=None,
                          file_id=None,
                          function=None):
        if context is None:
            context = {}
        if not isinstance(ids, list):
            ids = [ids]

        file_obj = self.pool.get('stock.connect.file')
        project_issue_obj = self.pool.get('project.issue')

        if file_id is None:
            r = True
            for _id in ids:
                file_ids = file_obj.search(cr,
                                           uid, [
                                               ('stock_connect_id', '=', _id),
                                               ('parent_file_id', '=', False),
                                           ],
                                           context=context)
                for file_id in file_ids:
                    try:
                        r = self.process_file_tree(cr,
                                                   uid,
                                                   ids,
                                                   context,
                                                   file_id=file_id,
                                                   function=function) and r
                    except Exception as e:
                        raise
            # We return a boolean indicating if there was any error
            # Upper code may do a rollback, process all errors, re-try, etc.
            return r

        else:
            new_cr = self.pool.cursor()
            try:
                file_record = file_obj.browse(cr, uid, file_id, context)
                if file_record.error or file_record.state == FILE_STATE_DRAFT:
                    # errors are returned, and draft files stop the process
                    return False
                elif file_record.state == FILE_STATE_READY:
                    # If there is no error, and the file is ready, we process if
                    for subfile in file_record.child_file_ids:
                        if not self.process_file_tree(cr,
                                                      uid,
                                                      ids,
                                                      context,
                                                      file_id=subfile.id,
                                                      function=function):
                            # Errors are sent back
                            return False
                    for subatt in file_record.attachments:
                        if not self.process_file(cr,
                                                 uid,
                                                 ids,
                                                 context,
                                                 file_id=file_id,
                                                 att_id=subatt.id,
                                                 function=function):
                            return False
                    if not self.process_file(
                            cr, uid, ids, context, file_id, function=function):
                        return False
                else:
                    return True
                # Now, we set the file as processed
                file_record.write({'state': FILE_STATE_DONE, 'error': False})

            except Exception as e:
                # Logs a software issue and stores the cause of the error.
                error_message = _(
                    'An exception ocurred with stock.connect.file with ID={0}: {1}'
                ).format(file_record.id, format_exception(e))
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect.file',
                                               file_record.id,
                                               error_message,
                                               context=context)
                file_obj.write(new_cr,
                               uid,
                               file_record.id, {
                                   'error': True,
                                   'info': error_message
                               },
                               context=context)
                raise

            finally:
                new_cr.commit()
                new_cr.close()

        # When we get here, everything is fine
        return True
    def _process_stock_picking_assigned(self,
                                        cr,
                                        uid,
                                        ids,
                                        event_ids,
                                        ctx=None):
        if ctx is None:
            ctx = {}
        ctx['check_date_ready_for_export'] = True

        if isinstance(ids, list) and len(ids) > 1:
            ret = []
            for x in ids:
                ret.extend(
                    self._process_stock_picking_assigned(cr,
                                                         uid,
                                                         x,
                                                         event_ids,
                                                         ctx=ctx))
            return ret

        conf_data = self.pool.get('configuration.data').get(cr,
                                                            uid, [],
                                                            context=ctx)
        today = datetime.today()
        env = [self.pool, cr, uid]

        if ctx:
            context = ctx.copy()
        else:
            context = {}

        if 'stock_connect_id' not in context:
            context['stock_connect_id'] = ids[0]
        if 'yc_min_number_attachments' not in context:
            context['yc_min_number_attachments'] = 2

        wab_factory = get_factory(env, 'wab', context=context)
        wbl_factory = get_factory(env, 'wbl', context=context)
        picking_obj = self.pool['stock.picking']
        stock_event_obj = self.pool['stock.event']
        file_obj = self.pool['stock.connect.file']
        stock_connect_obj = self.pool['stock.connect']
        project_issue_obj = self.pool['project.issue']

        ret = []
        this = self.pool['stock.connect'].browse(cr, uid, ids[0], context)
        stock_events_ignored = []  # Stores the events to ignore.
        for event_id in event_ids:

            error_message = None

            event = stock_event_obj.browse(cr, uid, event_id, context=context)
            picking_id = event.res_id
            picking = picking_obj.browse(cr, uid, picking_id, context)

            if conf_data.yc_ignore_events_until_process_date:
                if not (picking.process_date):
                    logger.debug(
                        "Recomputing process_date for picking {0}".format(
                            picking.name))
                    self.env.add_todo(picking_obj._fields['process_date'],
                                      picking)
                    picking_obj.recompute()
                if not (picking.process_date) or datetime.strptime(
                        picking.process_date,
                        DEFAULT_SERVER_DATETIME_FORMAT) >= today:
                    event.write({"info": "Ignored until process date is met."})
                    continue

            # Back-orders are never processed
            if picking.do_not_send_to_warehouse or (picking.state !=
                                                    'assigned'):
                stock_events_ignored.append(event)
                event.write({"info": "Ignored until ready to be sent."})
                continue

            #picking_type = None

            factory = None

            if picking.sale_id and picking.type in ['outgoing', None]:
                factory = wab_factory
            elif picking.purchase_id and picking.type in ['incoming', None]:
                factory = wbl_factory
            else:
                factory = None
            context['warehouse_id'] = event.warehouse_id.id

            try:
                new_cr = self.pool.cursor()
                if not factory:
                    raise Warning(
                        _('This stock.picking cannot be processed, it neither has a purchase or a sale order related'
                          ))

                related_items = factory.get_related_items(picking_id)
                related_files = []

                product_ids = []
                if self.is_type_enabled(cr,
                                        uid,
                                        this.id,
                                        'art',
                                        context=context):
                    for product_id in related_items.get(
                            'product.product', False) or []:
                        msg = None
                        res = self.get_last_file_for_record(cr,
                                                            uid,
                                                            this.id,
                                                            'product.product',
                                                            product_id,
                                                            _type='art',
                                                            context=context)
                        if not res:
                            msg = 'Missing'
                            product_ids.append(product_id)
                        else:
                            if not res.server_ack or res.state != 'done':
                                msg = 'Pending'
                            elif this.yc_hours_between_art_files:
                                delta = timedelta(
                                    hours=this.yc_hours_between_art_files)
                                filedate = datetime.strptime(
                                    res.create_date,
                                    DEFAULT_SERVER_DATETIME_FORMAT)
                                if filedate + delta < datetime.today():
                                    msg = 'Out-of-date'
                                    product_ids.append(product_id)
                        # If there is a pending file, we mark it
                        if msg:
                            related_files.append(
                                ('product.product', product_id, msg))

                    # Here we create the missing documents that we have a dependency
                    if this.yc_enable_art_ondemand:
                        if this.yc_enable_art_multifile:
                            for p in product_ids:
                                self._process_art_file(cr,
                                                       uid,
                                                       this.id,
                                                       missing_product_ids=[p],
                                                       context=context)
                        elif product_ids:
                            self._process_art_file(
                                cr,
                                uid,
                                this.id,
                                missing_product_ids=product_ids,
                                context=context)

                if related_files:
                    msg = "There are missing files that must be processed before: {0}".format(
                        related_files)
                    event.write({'info': msg})
                    logger.info(msg)
                else:
                    picking_id = factory.generate_files([('id', '=',
                                                          picking_id)])
                    if picking_id:
                        ret.append(event)

            # TODO: Rewrite this.
            except Warning as w:
                error_message = _(
                    'Warning while processing event on stock.picking with ID {0}: {1}'
                ).format(picking_id, format_exception(w))
                if context.get('yc_print_errors', True):
                    logger.error(error_message)
                with api.Environment.manage():
                    project_issue_obj.create_issue(new_cr,
                                                   uid,
                                                   'stock.event',
                                                   event_id,
                                                   error_message,
                                                   context=context)

                    stock_event_obj.write(new_cr,
                                          uid,
                                          event.id, {
                                              'error': True,
                                              'info': error_message
                                          },
                                          context=context)

            # TODO: Rewrite this.

            except Exception as e:
                error_message = _(
                    'Exception while processing event on stock.picking with ID {0}: {1}'
                ).format(picking_id, format_exception(e))
                stock_connect_obj.log_issue(new_cr,
                                            uid,
                                            ids,
                                            error_message,
                                            event_id=event_id,
                                            context=context,
                                            exception=e,
                                            log_issue_no_format=True)
                logger.error(error_message)
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.event',
                                               event_id,
                                               error_message,
                                               context=context)

                uid_exception, context_exception = uid, context
                with api.Environment.manage():
                    stock_connect_obj.log_issue(new_cr,
                                                uid,
                                                ids,
                                                error_message,
                                                event_id=event_id,
                                                context=context,
                                                exception=e,
                                                log_issue_no_format=True)

                    self.env = api.Environment(new_cr, uid_exception,
                                               context_exception)
                    event.write({'error': True, 'info': error_message})

                raise e

            finally:
                new_cr.commit()
                new_cr.close()

        # Sets as done all those events which were correctly processed.
        for event_correctly_processed in ret:
            event_correctly_processed.write({
                'state': EVENT_STATE_DONE,
                'info': ''
            })

        # Sets as ignored the events which are must be ignored.
        for event_to_ignore in stock_events_ignored:
            event_to_ignore.write({'state': EVENT_STATE_IGNORED, 'info': ''})

        del ctx['check_date_ready_for_export']
        return [x.id for x in ret]
    def _process_file(self, cr, uid, ids, xml_type, context=None):
        env = [self.pool, cr, uid]
        this = self._this(cr, uid, ids, context)
        if not this.is_type_enabled(xml_type):
            return
        file_ids = this._find_yc_import_file(xml_type.upper())
        if not file_ids:
            return

        factory = get_factory(env, xml_type.lower(), context=context)
        file_obj = self.pool.get('stock.connect.file')
        stock_connect = self.pool.get('stock.connect').browse(
            cr, uid, ids[0], context)
        project_issue_obj = self.pool.get('project.issue')

        for _file in file_obj.browse(cr, uid, file_ids, context):
            if _file.state != 'draft' or _file.error:
                if stock_connect.log_about_already_existing_files:
                    logger.info('Ignoring {0} file {1}#{2}'.format(
                        xml_type, _file.id, _file.name))
                continue
            error = None

            new_cr = self.pool.cursor()
            try:
                if factory.import_file(_file.content):
                    _file.write({
                        'type': xml_type.lower(),
                        'state': 'done',
                        'info': ''
                    })
                else:
                    error = 'Not success'

            except Warning as w:
                error = '{0} {1}'.format(
                    _('Warning: Error while processing file.'),
                    format_exception(w))
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect.file',
                                               _file.id,
                                               error,
                                               context=context)
                if context.get('yc_print_errors', True):
                    logger.error(error)
                _file.write({'error': True, 'info': error}, context=context)

            except Exception as e:
                error = '{0} {1}'.format(
                    _('Exception: Error while processing file.'),
                    format_exception(e))
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect.file',
                                               _file.id,
                                               error,
                                               context=context)
                logger.error(error)
                file_obj.write(new_cr,
                               uid,
                               _file.id, {
                                   'error': True,
                                   'info': error
                               },
                               context=context)
                print 'error>>>' * 5
                print _file.content
                print '<<<error' * 5
                raise e

            finally:
                new_cr.commit()
                new_cr.close()

            if error:
                _file.write({'error': True, 'info': error})
    def cron_send_invoices_to_partner(self, cr, uid, context=None):
        ''' Sends the invoices to the partner, by email.
        '''
        if context is None:
            context = {}

        # Gets the email template to use to send the invoices to the partner.
        # If no email template is indicated, then no invoices are sent.
        configuration_data = self.pool.get('configuration.data').get(cr, uid, None, context)
        invoice_to_partner_email_template = configuration_data.invoice_to_partner_email_template_id

        if invoice_to_partner_email_template:
            account_invoice_obj = self.pool.get('account.invoice')
            ir_attachment_obj = self.pool.get('ir.attachment')
            mail_template_obj = self.pool.get("email.template")
            mail_mail_obj = self.pool.get('mail.mail')
            project_issue_obj = self.pool.get('project.issue')

            # List of invoices which were successfully sent to the partners.
            successfully_sent_invoice_ids = []

            # Looks for all the invoices which are pending to be sent to the partners.
            account_invoice_ids = account_invoice_obj.search(cr, uid, [('send_invoice_to_partner', '=', 'to_send')], context=context)
            for account_invoice_id in account_invoice_ids:

                # Gets the name of the attachment of the invoice.
                file_name = account_invoice_obj.get_file_name(cr, uid, account_invoice_id, context=context)

                # Gets the ir.attachment
                ir_attachment_id = ir_attachment_obj.search(cr, uid, [('res_model', '=', 'account.invoice'),
                                                                      ('res_id', '=', account_invoice_id),
                                                                      ('name', '=', file_name)], context=context)

                # Generates the email from the template and adds the attachment.
                try:
                    values = mail_template_obj.generate_email(cr, uid, invoice_to_partner_email_template.id, account_invoice_id, context=context)
                    msg_id = mail_mail_obj.create(cr, uid, values, context=context)
                    mail_mail_obj.write(cr, uid, msg_id, {'attachment_ids': [(6, 0, ir_attachment_id)]}, context=context)
                    successfully_sent_invoice_ids.append(account_invoice_id)

                except Exception as e:
                    issue_ids = project_issue_obj.find_resource_issues(cr, uid, 'account.invoice', account_invoice_id, tags=['partner'], create=True, reopen=True, context=context)
                    error_message = _('Account.invoice with ID={0} could not be sent to the partner: {1}').format(account_invoice_id, format_exception(e))
                    for issue_id in issue_ids:
                        project_issue_obj.message_post(cr, uid, issue_id, error_message, context=context)

            # Only those invoices correctly sent are marked as sent.
            self.write(cr, uid, successfully_sent_invoice_ids, {'send_invoice_to_partner': 'sent'}, context=context)

        return True
Пример #11
0
    def generate_files(self, domain=None, ignore_product_ids=None, force_product_ids=None, multifile=False):
        self.base_priority = -1
        if not multifile:
            # If call standard way, we delegate on typical creation
            self.ignore_product_ids = ignore_product_ids
            self.force_product_ids = force_product_ids
            return xml_abstract_factory.generate_files(self, domain=domain)
        else:
            self.ignore_product_ids = None
            self.force_product_ids = None

        product_domain = ['&',
                          ('id', 'not in', ignore_product_ids or []),
                          ('id', 'in', force_product_ids),
                          ]

        if not force_product_ids:
            product_domain = [product_domain[1]]
        else:
            self.base_priority = 1

        products_to_export = self.pool.get('product.product').search(self.cr, self.uid, product_domain, context=self.context)
        logger.debug("Exporting {0} files for {1} products".format(self._factory_name, len(products_to_export)))
        self.main_file_id = None
        sender = self.get_param('sender', required=True)
        table_model = self.pool[self._table]
        # search_domain = []#[('xml_export_state', '=', 'draft')]
        # For each object that matches the domain, we create its xml file
        object_ids = table_model.search(self.cr, self.uid, domain, context=self.context)
        for _object in table_model.browse(self.cr, self.uid, object_ids, context=self.context):
            main_file_name = self.get_main_file_name(_object)
            if not main_file_name:
                raise Warning(_('Missing filename for main object {0} {1}#{2}').format(_object.name, self._table, _object.id))
            for product_id in products_to_export:
                try:
                    object_id = _object.id
                    # We generated the final filename, according to task with ID=2922
                    object_filename = "{sender}_{factory_name}_{name}_sub{sub}.xml".format(sender=sender,
                                                                                           factory_name=self._factory_name,
                                                                                           name=export_filename(main_file_name, self.context),
                                                                                           sub=product_id)

                    logger.debug("Exporting xml for {2} {0} into file {1}".format(object_id, object_filename, self._table))
                    # The name of the main xml, is appened to each related file
                    self.context['filename_prefix'] = "{0}_".format(object_filename[:-4])
                    # The XML root is generated
                    xml_node = self.generate_root_element(_object, domain=[('id', '=', product_id)])
                    if xml_node is None:
                        continue
                    xml_output = xml_to_string(xml_node, remove_ns=True)
                    # The associated files are copied
                    self.main_file_id = None
                    self.save_file(xml_output, object_filename, main=True, binary=False, record_id=product_id, model='product.product')
                    self.mark_as_exported(_object.id)
                except Warning as e:
                    logger.error("Exception exporting into xml {0}: {1}".format(object_id, format_exception(e)))
                finally:
                    if 'filename_prefix' in self.context:
                        del self.context['filename_prefix']
        return True
    def _send_item(self,
                   cr,
                   uid,
                   ids,
                   server,
                   xml_node,
                   action,
                   schema_name=None,
                   context=None):
        """
        @param server: connection proxy to outside server
        @param xml_node: lxml xml element to send
        @param action: SOAP action to query
        @param schema_name: schema to validate the node against before sending, if set.
        @return: xml_return_message, error_message
        """
        try:
            if schema_name:
                r = validate_xml(schema_name,
                                 xml_node,
                                 print_error=bool(server.config.debug))
                if r:
                    return None, r
            xml_kargs = {'pretty_print': True, 'xml_declaration': False}
            this = self.__this(cr, uid, ids, context)
            root = create_root('{{{soapenv}}}Envelope')
            ns = schema_namespaces['soapenv']
            # 0 soapenv:Header
            xml_header = create_element('Header', ns=ns)
            body = create_element('Body', ns=ns)
            body.append(xml_node)
            root.append(xml_header)
            root.append(body)
            out_data = xml_to_string(root, **xml_kargs)
            parser = etree.XMLParser(remove_blank_text=True)
            out_data = xml_to_string(open_xml(out_data,
                                              repair=False,
                                              parser=parser),
                                     pretty_print=True)

            if this.yc_soapsec_key_path:
                out_data = self._sign_xml(out_data,
                                          keyfile=this.yc_soapsec_key_path,
                                          certfile=this.yc_soapsec_cert_path)

            with _soap_debug(this, action) as soap_debug:
                soap_debug.write('SENDING', out_data)
                r, namespace = server.transport.call(
                    server.proxy,
                    out_data,
                    server.namespace,
                    action,
                    encoding=server.encoding,
                    http_proxy=server.http_proxy,
                    config=server.config,
                    timeout=server.timeout)
                soap_debug.write('RECEIVING', r)
            response = etree.fromstring(r)
            body = response.xpath('//soapenv:Body',
                                  namespaces=schema_namespaces)[0]
            fault = body.xpath('soapenv:Fault', namespaces=schema_namespaces)
            if fault:
                return fault[0], xml_to_string(fault[0]).replace(
                    '&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
            return body[0], None
        except Exception as e:
            return None, format_exception(e)
        return None, 'Unknown error'
    def _process_gen_response(self, cr, uid, ids, context=None):
        if context is None:
            context = {}
        if not isinstance(ids, list):
            ids = [ids]

        stock_connect_file_obj = self.pool.get('stock.connect.file')

        stock_connect = self.browse(cr, uid, ids[0], context=context)

        domain = [('stock_connect_id', '=', stock_connect.id),
                  ('model', '=', 'stock.connect.file'),
                  ('state', '=', 'draft'), ('type', '=', False),
                  ('input', '=', True), ('error', '=', False)]
        gen_file_ids = stock_connect_file_obj.search(cr,
                                                     uid,
                                                     domain,
                                                     context=context)
        for gen_file in stock_connect_file_obj.browse(cr,
                                                      uid,
                                                      gen_file_ids,
                                                      context=context):
            # Find the elements and mark them
            if 'GEN_Response>' in gen_file.content:
                gen_file.write({'type': 'gen', 'state': 'ready'})
        domain[2] = ('state', '=', 'ready')
        domain[3] = ('type', '=', 'gen')

        def __read_file(xml_node):
            return {
                'type': nspath(xml, 'gen:MessageType')[0].text,
                'ref': nspath(xml, 'gen:Reference')[0].text,
                'status_text': nspath(xml, 'gen:StatusText')[0].text,
                'status_code': int(nspath(xml, 'gen:StatusCode')[0].text),
                'status_type': nspath(xml, 'gen:StatusType')[0].text,
            }

        server = self._get_proxy(cr, uid, ids, context=context)
        gen_file_ids = stock_connect_file_obj.search(cr,
                                                     uid,
                                                     domain,
                                                     order='id ASC',
                                                     context=context)
        for gen_file in stock_connect_file_obj.browse(cr,
                                                      uid,
                                                      gen_file_ids,
                                                      context=context):
            # Process all the files
            try:
                original_file = stock_connect_file_obj.browse(
                    cr, uid, gen_file.res_id, context)
                if original_file.server_ack:
                    # Only process if required
                    msg = _("Ignoring Response on behalf old ACK on file")
                    logger.warning(msg)
                    gen_file.write({
                        'error': False,
                        'state': 'cancel',
                        'info': msg
                    })
                    continue
                xml = open_xml(gen_file.content, repair=False)
                values = __read_file(xml)
                if values['status_type'] != 'S' or values['status_code'] > 100:
                    # Don't re-process errors
                    raise Exception(_("Error on Status response"),
                                    values['status_text'])
                elif values['status_code'] == 100:
                    # If it was finished, don't reprocess
                    gen_file.write({
                        'state': 'done',
                        'info': values['status_text'],
                        'internal_index': values['status_code']
                    })
                    original_file.write({'server_ack': True})
                else:
                    # Send a request
                    ns = schema_namespaces['gen_req']
                    xml_req = create_root('{{{gen_req}}}GEN_STATUS')
                    xml_req.append(
                        self._get_control_reference(cr,
                                                    uid,
                                                    ids,
                                                    ns,
                                                    values['type'],
                                                    context=context))
                    xml_req.append(
                        create_element('Reference', text=values['ref'], ns=ns))
                    xml_ret, err_ret = self._send_item(
                        cr,
                        uid,
                        ids,
                        server,
                        xml_req,
                        action='GetInsertArticleMasterDataStatus',
                        schema_name='gen_req',
                        context=context)
                    if err_ret:
                        # Write errors on file
                        gen_file.write({'error': True, 'info': err_ret})
                    else:
                        values_ret = __read_file(xml_ret)
                        if values_ret['status_type'] != 'S' or values_ret[
                                'status_code'] > 100:
                            # Strange status codes are errors
                            gen_file.write({
                                'error':
                                True,
                                'info':
                                xml_to_string(xml_ret),
                                'internal_index':
                                values_ret['status_code']
                            })
                        elif values_ret['status_code'] < 100:
                            # Modify the file if pending, avoiding excess of inputs
                            gen_file.write({
                                'content':
                                xml_to_string(xml_ret),
                                'internal_index':
                                values_ret['status_code']
                            })
                        else:
                            # Propagate end
                            gen_file.write({
                                'state':
                                'done',
                                'content':
                                xml_to_string(xml_ret),
                                'info':
                                values_ret['status_text'],
                                'internal_index':
                                values_ret['status_code']
                            })
                            original_file.write({'server_ack': True})
            except Exception as e:
                logger.error(format_exception(e))
                gen_file.write({'error': True, 'info': format_exception(e)})
 def _send_xml_on_soap(self,
                       cr,
                       uid,
                       ids,
                       server,
                       function,
                       action,
                       _type,
                       context=None):
     if context is None:
         context = {}
     file_obj = self.pool.get('stock.connect.file')
     file_ids = file_obj.search(cr,
                                uid, [('priority', '>=', 0),
                                      ('type', '=', _type),
                                      ('stock_connect_id', '=', ids),
                                      ('input', '=', False),
                                      ('state', '=', 'ready'),
                                      ('parent_file_id', '=', False),
                                      ('error', '=', False)],
                                context=context)
     file_obj.lock_file(cr, uid, file_ids, context=context)
     for file_id in file_ids:
         err = False
         limit = context['limit_files']
         rets = []
         filename_index = None
         if limit[0] and limit[0] <= limit[1]:
             break
         try:
             cr.execute("SAVEPOINT soap_send_file;")
             _file = file_obj.browse(cr, uid, file_id, context)
             filename_index = _file.internal_index
             xml_root = function(cr, uid, ids, file_id, context=context)
             if not isinstance(xml_root, list):
                 xml_root = [xml_root]
                 filename_index = None
             for xml_part in xml_root:
                 ret, err = self._send_item(cr,
                                            uid,
                                            ids,
                                            server,
                                            xml_part,
                                            action,
                                            schema_name=_type,
                                            context=context)
                 if err:
                     break
                 rets.append(ret)
             cr.execute("RELEASE SAVEPOINT soap_send_file;")
         except Exception as e:
             cr.execute("ROLLBACK TO SAVEPOINT soap_send_file;")
             err = format_exception(e)
         if err:
             self._save_error(cr, uid, ids, err, context)
         else:
             _file = file_obj.browse(cr, uid, file_id, context=context)
             if _file.internal_index <= 0:
                 if _type in _NEED_ACK:
                     _file.write({
                         'internal_index': 0,
                         'state': 'done',
                         'server_ack': False
                     })
                 else:
                     _file.write({'internal_index': 0, 'state': 'done'})
             pos_no = 0
             for ret in rets:
                 pos_no += 1
                 if filename_index is None:
                     name = 'RESPONSE_{0}'.format(_file.name)
                 else:
                     name = 'RESPONSE_sub{0}_{1}'.format(
                         filename_index + pos_no, _file.name)
                 self._save_soap_return(cr, uid, ids, file_id, name, ret,
                                        action, context)
     file_obj.unlock_file(cr, uid, file_ids, context=context)
Пример #15
0
    def import_file(self, file_text):

        configuration_data = self.pool.get('configuration.data').get(
            self.cr, self.uid, [])

        logger.debug("Processing WAR file")
        self.success = True
        self.errors = []

        stock_obj = self.pool.get("stock.picking")
        partner_obj = self.pool.get('res.partner')
        stock_move_obj = self.pool.get('stock.move')
        product_obj = self.pool.get('product.product')
        connection_obj = self.pool.get('stock.connect')

        # Gets the warehouse of the YellowCube.
        warehouse = connection_obj.browse(
            self.cr, self.uid, self.connection_id,
            context=self.context).warehouse_ids[0]

        xml = open_xml(file_text, _type='war', print_error=self.print_errors)
        if nspath(xml, '//warr:WAR_List'):
            i = 0
            self.cr.execute("SAVEPOINT yellowcube_war_xml_factory__WARList;")
            for x in nspath(xml, '//warr:WAR_List/warr:WAR'):
                i += 1
                # First, we try to check the records
                try:
                    text = xml_to_string(x)
                    self.import_file(text)
                except Warning as w:
                    self.cr.execute(
                        "ROLLBACK TO SAVEPOINT yellowcube_war_xml_factory__WARList;"
                    )
                    print 'error>>>' * 5
                    print text
                    print '<<<error' * 5
                    raise Warning('Error on sub WAR file number {0}'.format(i),
                                  format_exception(w))
            self.cr.execute(
                "RELEASE SAVEPOINT yellowcube_war_xml_factory__WARList;")
            return True

        validate_xml('war', xml, print_error=False)

        order_header = nspath(xml, "//warr:CustomerOrderHeader")[0]

        customer_order_no = nspath(order_header,
                                   "warr:CustomerOrderNo")[0].text
        stock_ids = stock_obj.search(
            self.cr,
            self.uid,
            [('yellowcube_customer_order_no', '=', customer_order_no),
             ('state', 'in', ['confirmed', 'assigned'])],
            context=self.context)

        # Checks if the stock.picking exists. Otherwise, logs an issue an continues with the next one.
        self._check(
            warehouse,
            len(stock_ids) > 0,
            _("There is not any stock.picking with CustomerOrderNo ={0} in state confirmed or assigned."
              ).format(customer_order_no))
        if not self.success:
            raise Warning(
                'There where some errors in the WAR file: {0}'.format(
                    '\n'.join(self.errors)))

        # Gets the stock picking out associated to this WAR.
        picking_out = stock_obj.browse(self.cr,
                                       self.uid,
                                       stock_ids,
                                       context=self.context)[0]

        # Saves BookingVoucherID and BookingVoucherYear on the stock.move
        goods_issue_header = nspath(xml, "//warr:GoodsIssueHeader")[0]
        booking_voucher_id = nspath(goods_issue_header,
                                    "warr:BookingVoucherID")[0].text
        booking_voucher_year = nspath(goods_issue_header,
                                      "warr:BookingVoucherYear")[0].text
        # TODO: Put this at the end, like in the WBA.
        #         for move_line in picking_out.move_lines:
        #             stock_move_obj.write(self.cr, self.uid, move_line.id, {'booking_voucher_id': booking_voucher_id,
        #                                                                     'booking_voucher_year': booking_voucher_year,
        #                                                                     }, self.context)

        # Validates DepositorNo against the system's parameter. If does not match, then aborts and logs an issue.
        depositor_no = nspath(goods_issue_header, "warr:DepositorNo")[0].text
        expected_depositor_no = self.get_param('depositor_no', required=True)
        self._check(
            warehouse, expected_depositor_no,
            _("Variable YC DepositorNo is not defined in the configuration data."
              ))
        self._check(
            warehouse, depositor_no == expected_depositor_no,
            _("Configuration variable YC DepositorNo does not match with that of tag 'DepositorNo'"
              ))

        # <YCDeliveryNo>
        yellowcube_delivery_no = nspath(order_header,
                                        "warr:YCDeliveryNo")[0].text
        if yellowcube_delivery_no and picking_out.yellowcube_delivery_no and picking_out.yellowcube_delivery_no != yellowcube_delivery_no:
            self.post_issue(
                warehouse,
                _('YCDeliveryNo {0} does not match its current value {1} in the stock picking.'
                  ).format(picking_out.yellowcube_delivery_no,
                           yellowcube_delivery_no),
                create=True,
                reopen=True)

        if picking_out.yellowcube_delivery_no != yellowcube_delivery_no:
            stock_obj.write(self.cr,
                            self.uid, [picking_out.id],
                            {'yellowcube_delivery_no': yellowcube_delivery_no},
                            context=self.context)

        # <YCDeloveryDate>
        yellowcube_delivery_date = nspath(order_header,
                                          "warr:YCDeliveryDate")[0].text
        if yellowcube_delivery_date and picking_out.yellowcube_delivery_date and picking_out.yellowcube_delivery_date != yellowcube_delivery_date:
            self.post_issue(
                warehouse,
                _('YCDeliveryDate {0} does not match its current value {1} in the stock picking.'
                  ).format(picking_out.yellowcube_delivery_date,
                           yellowcube_delivery_date),
                create=True,
                reopen=True)

        if picking_out.yellowcube_delivery_date != yellowcube_delivery_date:
            stock_obj.write(
                self.cr,
                self.uid, [picking_out.id],
                {'yellowcube_delivery_date': yellowcube_delivery_date},
                context=self.context)

        # <PartnerReference>
        partner_reference = nspath(order_header, "warr:PartnerReference")
        if partner_reference:
            partner_reference = partner_reference[0].text
            if picking_out.partner_id.ref:
                self._check(
                    warehouse, picking_out.partner_id.ref == partner_reference,
                    _('PartnerReference does not match its current value in the stock picking.'
                      ))
            else:
                partner_obj.write(self.cr,
                                  self.uid,
                                  picking_out.partner_id.id,
                                  {'ref': partner_reference},
                                  context=self.context)

        # <PostalShipmentNo>
        carrier_tracking_ref = nspath(order_header,
                                      "warr:PostalShipmentNo")[0].text
        stock_obj.write(self.cr,
                        self.uid, [picking_out.id],
                        {'carrier_tracking_ref': carrier_tracking_ref},
                        context=self.context)

        partials = {}
        id_table = {}
        i = 1
        for line in sorted([x.id for x in picking_out.move_lines]):
            id_table[i] = line
            i += 1

        for order_move in nspath(xml, "//warr:CustomerOrderDetail"):
            partial = {}

            pos_no = int(nspath(order_move, "warr:CustomerOrderPosNo")[0].text)

            # Gets the stock.move associated to this line.
            move_line = None
            for line in picking_out.move_lines:
                if line.id == id_table[pos_no]:
                    move_line = line
                    break

            # Checks that the line exists.
            self._check(
                picking_out, move_line is not None,
                _('CustomerOrderPosNo={0}: Mismatch with stock picking line number'
                  ).format(pos_no))
            if not self.success:
                raise Warning('Error parsing WAR file: {0}'.format('\n'.join(
                    self.errors)))

            partials[move_line if V8 else "move{0}".format(move_line.id
                                                           )] = partial

            # Caches the product of the stock.move.
            product_id = move_line.product_id.id
            partial['product_id'] = product_id
            product = product_obj.browse(self.cr, self.uid, product_id,
                                         self.context)

            # <YCArticleNo>
            yc_article_no = nspath(order_move, "warr:YCArticleNo")[0].text
            if product.yc_YCArticleNo:
                self._check(
                    picking_out, product.yc_YCArticleNo == yc_article_no,
                    _('Product {0} (id={1}): YCArticleNo does not match with YCArticleNo.'
                      ).format(product.name, product_id))
            else:
                product_obj.write(self.cr, self.uid, product_id,
                                  {'yc_YCArticleNo': yc_article_no},
                                  self.context)

            # <ArticleNo>
            article_no = nspath(order_move, "warr:ArticleNo")
            if article_no:
                article_no = article_no[0].text
                self._check(
                    picking_out, product.default_code == article_no,
                    _('Product {0} (id={1}): ArticleNo does not match with default_code.'
                      ).format(product.name, product_id))

            # <EAN>
            ean = nspath(order_move, "warr:EAN")
            if ean:
                ean = ean[0].text
                if product.ean13:
                    self._check(
                        picking_out, product.ean13 == ean,
                        _('Product {0} (id={1}): EAN does not match with ean13.'
                          ).format(product.name, product_id))
                else:
                    product_obj.write(self.cr, self.uid, product_id,
                                      {'ean13': ean}, self.context)

            # <Lot>
            lot = nspath(order_move, "warr:Lot")
            if lot:
                lot = lot[0].text

                # Searches for that lot in the system.
                lot_ids = self.pool.get('stock.production.lot').search(
                    self.cr,
                    self.uid, [('name', '=', lot),
                               ('product_id', '=', product_id)],
                    context=self.context)
                if not lot_ids:
                    self._check(
                        warehouse, False,
                        _('Lot {0} for product {1} (id={2}) does not exist in the system'
                          ).format(lot, product.name, product_id))
                elif getattr(move_line,
                             'restrict_lot_id' if V8 else 'prodlot_id'):
                    if self._check(
                            picking_out,
                            getattr(move_line, 'restrict_lot_id'
                                    if V8 else 'prodlot_id').name == lot,
                            _('Product {0} (id={1}): Lot does not match the lot indicated of the original stock.move.'
                              ).format(product_obj.name, product_id)):
                        partial['restrict_lot_id'
                                if V8 else 'prodlot_id'] = lot_ids[0]

            if product.track_outgoing:
                self._check(
                    warehouse, lot,
                    _("The WAR file must contain a lot, otherwise the stock.move can not be updated for product {0}"
                      .format(product.name)))

            # <Plant>
            plant = nspath(order_move, "warr:Plant")[0].text
            current_plant = self.get_param('plant_id', required=True)
            if current_plant:
                self._check(
                    picking_out, current_plant == plant,
                    _('Product {0} (id={1}): Plant does not match with the value of the configuration parameter YC PlantID.'
                      ).format(product.name, product_id))
            elif not current_plant:
                configuration_data.write(self.cr, self.uid,
                                         configuration_data.id,
                                         {'yc_plant_id': plant}, self.context)

            #  <QuantityUOM>
            quantity_uom = float(
                nspath(order_move, "warr:QuantityUOM")[0].text)
            self._check(
                picking_out, move_line.product_qty >= quantity_uom,
                _('Product {0} (id={1}): QuantityUOM is greater than that of the stock.move.'
                  ).format(product.name, product_id))
            partial['product_qty'] = quantity_uom

            # <QuantityISO>
            quantity_iso = nspath(order_move,
                                  "warr:QuantityUOM")[0].attrib['QuantityISO']
            uom_iso_list = self.pool.get('product.uom').search(
                self.cr,
                self.uid, [('uom_iso', '=', quantity_iso)],
                context=self.context)
            if len(uom_iso_list) > 0 and move_line.product_uom and (
                    quantity_iso != move_line.product_uom.uom_iso):
                self._check(
                    picking_out, False,
                    _('Product {0} (id={1}): Attribute QuantityISO does not match the ISO code indicated of the original stock.move.'
                      ).format(product.name, product_id))
            else:
                if not move_line.product_uom:
                    product_uom = uom_iso_list[0]
                    partial['product_uom'] = product_uom
                else:
                    self._check(
                        picking_out,
                        move_line.product_uom.uom_iso == quantity_iso,
                        _('Product {0} (id={1}): Attribute QuantityISO does not match that of the stock.move.'
                          ).format(product.name, product_id))
                    partial['product_uom'] = move_line.product_uom.id

            # Checks <StorageLocation> and <StockType>
            # Notes: Check together with StorageLocation against location_id on stock.move - alarm if wrong.
            #        If free type (' ', '0', 'F') use the StorageLocation, otherwise location YBLK.
            storage_location = nspath(order_move,
                                      "warr:StorageLocation")[0].text
            stock_type = nspath(order_move, "warr:StockType")
            if move_line.location_id or move_line.location_dest_id:
                location_names = []
                if move_line.location_id:
                    location_names.append(move_line.location_id.name)
                if move_line.location_dest_id:
                    location_names.append(move_line.location_dest_id.name)

                if stock_type:
                    # If there exists the tag <StockType>, then we follow the rules.
                    stock_type = stock_type[0].text
                    if stock_type not in ('X', 'S', '2', '3', '0', 'F', ' '):
                        self._check(
                            picking_out, False,
                            _("Product {0} (id={1}): StockType had value '{2}', which is not allowed."
                              ).format(product.name, product_id, stock_type))
                    elif stock_type in ('0', 'F', ' '):
                        self._check(
                            picking_out, storage_location in location_names,
                            _('Product {0} (id={1}): StorageLocation {2} and StockType {3} does not match with the location indicated in the stock.move {4}'
                              ).format(product.name, product_id,
                                       storage_location, stock_type,
                                       location_names))
                    else:
                        self._check(
                            picking_out, 'YBLK' in location_names,
                            _("Product {0} (id={1}): StorageLocation must be 'YBLK' since StockType is not a free type."
                              ).format(product.name, product_id))
                else:
                    # If <StockType> does not exist, it just checks that the values match.
                    if storage_location not in location_names:
                        self._check(
                            picking_out, False,
                            _('Product {0} (id={1}): StorageLocation {2} does not match with the location indicated in the stock.move {3}'
                              ).format(product.name, product_id,
                                       storage_location, location_names))
            else:
                self._check(
                    picking_out, False,
                    _('Product {0} (id={1}): The stock move does not have a location_id.'
                      ).format(product.name, product_id))

            # <Serial Numbers>
            serial_numbers = nspath(order_move, "warr:SerialNumbers")
            if serial_numbers:
                serial_numbers = serial_numbers[0].text
                if move_line.serial_number_scanned:
                    self._check(
                        picking_out,
                        move_line.serial_number_scanned == serial_numbers,
                        _('Product {0} (id={1}): SerialNumbers does not match the serial_number_scanned indicated of the original stock.move.'
                          ).format(product.name, product_id))
                else:
                    stock_move_obj.write(
                        self.cr, self.uid, move_line.id,
                        {'serial_number_scanned': serial_numbers},
                        self.context)

        if self.success:
            picking_id = picking_out.id

            picking_out.message_post(
                _('Imported WAR file BookingVoucherID={0} BookingVoucherYear={1}'
                  ).format(booking_voucher_id, booking_voucher_year))
            if V8:
                for move in partials:
                    vals = partials[move]
                    new_move_id = stock_move_obj.split(
                        self.cr,
                        self.uid,
                        move,
                        vals['product_qty'],
                        restrict_lot_id=vals.get('restrict_lot_id', False),
                        context=self.context)
                    stock_move_obj.action_done(self.cr,
                                               self.uid, [new_move_id],
                                               context=self.context)
            else:
                backorder_id, picking_id = picking_out.wrapper_do_partial(
                    partials)

                # Pickings created by WARs as backorders are not sent to the warehouse, by default.
                if backorder_id:
                    stock_obj.write(self.cr,
                                    self.uid,
                                    backorder_id, {
                                        'do_not_send_to_warehouse': True,
                                    },
                                    context=self.context)

            picking_to_deliver = stock_obj.browse(self.cr,
                                                  self.uid,
                                                  picking_id,
                                                  context=self.context)
            picking_to_deliver.action_done()
            picking_to_deliver.set_stock_moves_done()

            # Stores the values associated to BookingVoucherId and BookingVoucherYear, for reference.
            move_ids = [move.id for move in picking_out.move_lines]
            stock_move_obj.write(
                self.cr, self.uid, move_ids, {
                    'yc_booking_voucher_id': booking_voucher_id,
                    'yc_booking_voucher_year': booking_voucher_year,
                }, self.context)
            self.mark_record(picking_out.id,
                             'stock.picking' if V8 else 'stock.picking.out')

            # The message is sent ONLY if we had success.
#             picking_out.message_post(body=_("""Your order has been shipped through {0} and it can be tracked in the next link:\
#                                             <br/>\
#                                             <a href='https://www.post.ch/swisspost-tracking?formattedParcelCodes={1}'>Track&Trace</a>\
#                                             """).format(picking_out.carrier_id.name, urllib.quote(picking_out.carrier_tracking_ref)),
#                                      type='comment',
#                                      subtype="mail.mt_comment",
#                                      context=self.context,
#                                      partner_ids=picking_out.carrier_id and [picking_out.carrier_id.partner_id.id] or [])
        else:
            raise Warning(
                'There where some errors in the WAR file: {0}'.format(
                    '\n'.join(self.errors)))

        return True
Пример #16
0
    def import_file(self, file_text):
        logger.debug("Processing WBA file")
        self.success = True
        self.errors = []

        # Caches the pools.
        product_obj = self.pool.get('product.product')
        stock_obj = self.pool.get('stock.picking')
        stock_move_obj = self.pool.get('stock.move')
        warehouse_obj = self.pool.get('stock.warehouse')
        purchase_order_obj = self.pool.get('purchase.order')
        config_param_obj = self.pool.get('ir.config_parameter')
        connection_obj = self.pool.get('stock.connect')
        stock_production_lot_obj = self.pool.get('stock.production.lot')

        warehouse_id = connection_obj.browse(self.cr, self.uid, self.connection_id, context=self.context).warehouse_ids[0].id
        warehouse = warehouse_obj.browse(self.cr, self.uid, warehouse_id, context=self.context)

        xml = open_xml(file_text, _type='wba', print_error=self.print_errors)
        if nspath(xml, '//wba:WBA_List'):
            i = 0
            self.cr.execute("SAVEPOINT yellowcube_wba_xml_factory__WBAList;")
            for x in nspath(xml, '//wba:WBA_List/wba:WBA'):
                i += 1
                # First, we try to check the records
                try:
                    text = xml_to_string(x)
                    self.import_file(text)
                except Warning as w:
                    self.cr.execute("ROLLBACK TO SAVEPOINT yellowcube_wba_xml_factory__WBAList;")
                    print 'error>>>' * 5
                    print text
                    print '<<<error' * 5
                    raise Warning('Error on sub WBA file number {0}'.format(i), format_exception(w))
            self.cr.execute("RELEASE SAVEPOINT yellowcube_wba_xml_factory__WBAList;")
            return True

        validate_xml('wba', xml, print_error=False)

        imports = []

        # Gets the timestamp.
        timestamp_postgres = self.str_date_to_postgres(nspath(xml, "//wba:Timestamp")[0].text)

        # Header fields (under <GoodsReceiptHeader>)
        header = nspath(xml, "//wba:GoodsReceiptHeader")[0]

        # <BookingVoucherID> and <BookingVoucherYear>.
        booking_voucher_id = nspath(header, "wba:BookingVoucherID")[0].text
        booking_voucher_year = nspath(header, "wba:BookingVoucherYear")[0].text
        supplier_order_no = nspath(header, "wba:SupplierOrderNo")[0].text  # This is the stock.picking's ID.
        picking_in_ids = stock_obj.search(self.cr, self.uid, [('yellowcube_customer_order_no', '=', supplier_order_no),
                                                              ('state', 'not in', ['cancel', 'done']),
                                                              ], context=self.context)

        # Checks if the stock.picking exists. Otherwise, logs an issue an continues with the next one.
        self._check(warehouse, len(picking_in_ids) > 0, _("There is not any stock.picking with SupplierOrderNo (id) ={0}").format(supplier_order_no))
        if not self.success:
            raise Warning('There where some errors in the WBA file.', self.errors)

        # Gets the stock picking in associated to this purchase order.
        picking_in = stock_obj.browse(self.cr, self.uid, picking_in_ids[0], self.context)

        # <SupplierNo>.
        # We first check if the supplier has a supplier number, and if that's the case we
        # compare against it. Otherwise, we compare against the default supplier number
        # set for the connector.
        supplier_no = nspath(header, "wba:SupplierNo")[0].text
        if picking_in.partner_id.supplier and picking_in.partner_id.yc_supplier_no:
            yc_supplier_no = picking_in.partner_id.yc_supplier_no
            self._check(warehouse, yc_supplier_no == supplier_no, _("Configuration variable YC SupplierNo does not match with that of tag 'SupplierNo' on the supplier."))
        else:
            yc_supplier_no = self.get_param('supplier_no', required=True)
            self._check(warehouse, yc_supplier_no, _("Configuration variable YC SupplierNo is not defined in the system."))
            self._check(warehouse, yc_supplier_no == supplier_no, _("Configuration variable YC SupplierNo does not match with that of tag 'SupplierNo' on the connector."))

        # <SupplierOrderNo>.
        stock_picking_in_count = stock_obj.search(self.cr, self.uid, [('yellowcube_customer_order_no', '=', supplier_order_no)], context=self.context, count=True)
        self._check(warehouse, stock_picking_in_count > 0, _("Stock picking in with ID={0} does not exist in the system, thus can not be processed in the WBA.").format(supplier_order_no))

        id_table = {}
        last_posno = 0
        # Update missing values
        for line in picking_in.move_lines:
            if line.yc_posno:
                if line.yc_posno > last_posno:
                    last_posno = line.yc_posno
            id_table[line.id] = line
        for line_id in sorted([x for x in id_table]):
            line = id_table[line_id]
            if not line.yc_posno:
                last_posno += 1
                line.yc_posno = last_posno
                line.write({'yc_posno': last_posno})
        # Refresh the record
        picking_in = stock_obj.browse(self.cr, self.uid, picking_in_ids[0], self.context)

        for article in nspath(xml, "//wba:GoodsReceiptList/wba:GoodsReceiptDetail"):
            partials = {}
            partial = {}

            # <SupplierOrderPosNo>
            pos_no = int(nspath(article, "wba:SupplierOrderPosNo")[0].text)

            # Gets the stock.move associated to this line.
            move_line = None
            for line in picking_in.move_lines:
                if line.yc_posno == pos_no:
                    move_line = line
                    break

            self._check(picking_in, move_line is not None, _('Mismatch with stock picking line number {0}/{1}').format(pos_no, [x.yc_posno for x in picking_in.move_lines]))
            if not self.success:
                raise Warning('Error parsing wba file', self.errors)

            partials[move_line if V8 else "move{0}".format(move_line.id)] = partial

            partial['delivery_date'] = timestamp_postgres

            # Caches the product of the stock.move.
            product_id = move_line.product_id.id
            partial['product_id'] = product_id
            product = product_obj.browse(self.cr, self.uid, product_id, self.context)

            # <YCArticleNo>
            yc_article_no = nspath(article, "wba:YCArticleNo")[0].text
            if not product.yc_YCArticleNo:
                product_obj.write(self.cr, self.uid, product_id, {'yc_YCArticleNo': yc_article_no}, self.context)
                product.message_post(_('Product {0} with ID={1} did not have a YCArticleNo, so it was created with value {2}').format(product.name, product_id, yc_article_no))
            else:
                # If the product already had a YCArticleNo, then we check if the values match.
                self._check(warehouse, product.yc_YCArticleNo == yc_article_no, _("The 'YCArticleNo' does not match with the field 'YCArticleNo' of the product."))

            # <ArticleNo>
            article_no = nspath(article, "wba:ArticleNo")
            if article_no:
                article_no = article_no[0].text
                if not product.default_code:
                    product_obj.write(self.cr, self.uid, product_id, {'default_code': article_no}, self.context)
                    product.message_post(_('Product {0} with ID={1} did not have a default_code, so it was created with value {2}').format(product.name, product_id, article_no))
                else:
                    # If the product already has an ArticleNo (field 'default_code' in Odoo), then we check if the values match.
                    self._check(warehouse, product.default_code == article_no,
                                '{0} [{1}!={2}]'.format(_("The 'ArticleNo' does not match with the field 'default_code' of the product."), product.default_code, article_no))

            # <EAN>
            ean = nspath(article, "wba:EAN")
            if ean:
                ean = ean[0].text
                if not product.ean13:
                    product_obj.write(self.cr, self.uid, product_id, {'ean13': ean}, self.context)
                    product.message_post(_('Product {0} with ID={1} did not have an ean13 code, so it was created with value {2}').format(product.name, product_id, ean))
                else:
                    # If the product already has an EAN (field 'ean13' in Odoo) then we check if both values match.
                    self._check(warehouse, product.ean13 == ean, _("The 'EAN' does not match with the field 'ean13' of the product."))

            # <Lot>
            lot_search_domain = [('product_id', '=', product_id)]
            lot = nspath(article, 'wba:Lot')
            if lot:
                lot = lot[0].text
                lot_search_domain.append(('name', '=', lot))

            # <YCLot>
            yc_lot = nspath(article, 'wba:YCLot')
            if yc_lot:
                yc_lot = yc_lot[0].text
#                 lot_search_domain.append(('yellowcube_lot', '=', yc_lot))

            # If a lot was indicated but it does not exist in the system, create it.
            lot_ids = stock_production_lot_obj.search(self.cr, self.uid, lot_search_domain, context=self.context)
            if lot and (not lot_ids):
                lot_id_ = stock_production_lot_obj.create(self.cr, self.uid, {'name': lot,
                                                                              'yellowcube_lot': yc_lot or False,
                                                                              'product_id': product_id,
                                                                              'date': timestamp_postgres},
                                                          self.context)
                lot_ids = [lot_id_]
                lot = stock_production_lot_obj.browse(self.cr, self.uid, lot_id_, self.context)
                lot.message_post(_('Stock.production.lot {0} with ID={1} did not existed, and it was created by VoucherID {2}').format(lot.name, lot.id, booking_voucher_id),)

            lot = None
            if lot_ids:
                lot = stock_production_lot_obj.browse(self.cr, self.uid, lot_ids[0], self.context)
            # If <YCLot> exists but the stock production lot does not have it, stores it. If it has it, checks.
            if yc_lot and lot:
                if not lot.yellowcube_lot:
                    stock_production_lot_obj.write(self.cr, self.uid, lot.id, {'yellowcube_lot': yc_lot}, self.context)
                    lot.message_post(_('Stock.production.lot {0} with ID={1} did not have a yellowcube_lot, so it was set with value {2}').format(lot.name, lot.id, yc_lot))
                else:
                    self._check(warehouse, lot.yellowcube_lot == yc_lot, _("YCLot in the WBA does not match with the value of the stock.production.lot"))

            if lot_ids:
                partial['restrict_lot_id' if V8 else 'prodlot_id'] = lot_ids[0]

            if product.track_incoming:
                self._check(warehouse, lot is not None, _("The WBA file must contain a lot, otherwise the stock.move can not be updated for product {0}".format(product.name)))

            # <Plant>
            plant = nspath(article, 'wba:Plant')[0].text
            current_plant = self.get_param('plant_id', required=True)
            if current_plant:
                self._check(warehouse, current_plant == plant, _('Plant does not match with the value of the configuration parameter YC PlantID.'))
            elif not current_plant:
                self.set_param('plant_id', plant)

            #  <QuantityUOM>
            quantity_uom = float(nspath(article, "wba:QuantityUOM")[0].text)
            self._check(picking_in, move_line.product_qty >= quantity_uom, _('Product {0}: QuantityUOM is greater than that of the stock.move.').format(product.name))
            partial['product_qty'] = quantity_uom

            # <QuantityISO>
            quantity_iso = nspath(article, "wba:QuantityUOM")[0].attrib['QuantityISO']
            uom_iso_list = self.pool.get('product.uom').search(self.cr, self.uid, [('uom_iso', '=', quantity_iso)], context=self.context)
            if len(uom_iso_list) > 0 and move_line.product_uom and (quantity_iso != move_line.product_uom.uom_iso):
                self._check(picking_in, False, _('Product {0}: Attribute QuantityISO does not match the ISO code indicated of the original stock.move.').format(product.name))
            else:
                if not move_line.product_uom:
                    product_uom = uom_iso_list[0]
                    partial['product_uom'] = product_uom
                else:
                    self._check(warehouse, move_line.product_uom.uom_iso == quantity_iso, _('Product {0}: Attribute QuantityISO does not match that of the stock.move.').format(product.name))
                    partial['product_uom'] = move_line.product_uom.id

            # Checks <StorageLocation> and <StockType>
            # Notes: Check together with StorageLocation against location_id on stock.move - alarm if wrong.
            #        If free type (' ', '0', 'F') use the StorageLocation, otherwise location YBLK.
            storage_location = nspath(article, "wba:StorageLocation")[0].text
            stock_type = nspath(article, "wba:StockType")
            if move_line.location_id:
                if stock_type:
                    # If there exists the tag <StockType>, then we follow the rules.
                    stock_type = stock_type[0].text

                    if stock_type not in ('X', 'S', '2', '3', '0', 'F', ' '):
                        self._check(picking_in, False, _("Product {0}: StockType had value '{1}', which is not allowed.").format(product.name, stock_type), self.context)
                    elif stock_type in ('0', 'F', ' '):
                        if move_line.location_dest_id.name != storage_location:
                            self._check(picking_in,
                                        False,
                                        _('Product {0}: StorageLocation {1} does not match with the location indicated in the stock.move {2}.').format(product.name,
                                                                                                                                                       storage_location,
                                                                                                                                                       move_line.location_dest_id.name))
                    else:
                        if move_line.location_dest_id.name != 'YBLK':
                            self._check(picking_in, False, _("Product {0}: StorageLocation must be 'YBLK' since StockType is not a free type.").format(product.name))
                else:
                    # If <StockType> does not exist, it just checks that the values match.
                    if move_line.location_dest_id.name != storage_location:
                        self._check(picking_in,
                                    False,
                                    _('Product {0}: StorageLocation {1} does not match with the location indicated in the stock.move {2}.').format(product.name,
                                                                                                                                                   storage_location,
                                                                                                                                                   move_line.location_dest_id.name))
            else:
                self._check(picking_in, False, _('Product {0}: The stock move does not have a location_id.').format(product.name))

            # <EndOfDeliveryFlag>
            if self.success:
                end_of_delivery_flag = nspath(article, "wba:EndOfDeliveryFlag")[0].text
                complete_move_ids = []
                if V8:
                    for move in partials:
                        vals = partials[move]
                        new_move_id = stock_move_obj.split(self.cr,
                                                           self.uid,
                                                           move,
                                                           vals['product_qty'],
                                                           restrict_lot_id=vals.get('restrict_lot_id', False),
                                                           context=self.context)
                        stock_move_obj.action_done(self.cr, self.uid, [new_move_id], context=self.context)
                        complete_move_ids.append(new_move_id)
                else:
                    complete_move_ids = picking_in.do_partial(partials)

                if end_of_delivery_flag == '1':  # delivery is completed.
                    number_of_pending_moves = stock_move_obj.search(self.cr, self.uid, [('picking_id', '=', picking_in.id),
                                                                                        ('state', 'in', ('draft', 'waiting', 'confirmed', 'assigned')),
                                                                                        ], context=self.context, count=True)
                    if number_of_pending_moves > 0:
                        pass  # They don't want this alarm __for the moment__.
                        #self.post_issue(picking_in, _('Tag EndOfDeliveryFlag was set, but there exists some stock move which are not in state finish nor cancelled.'))
                    else:
                        picking_in.action_done()  # Closes the picking.
                # moves may have been deleted in the process (???)
                # So that is why we need to iterate over those which are kept.
                move_ids = stock_move_obj.search(self.cr, self.uid, [('id', 'in', complete_move_ids)], context=self.context)
                stock_move_obj.write(self.cr, self.uid, move_ids, {'yc_booking_voucher_id': booking_voucher_id,
                                                                   'yc_booking_voucher_year': booking_voucher_year,
                                                                   }, self.context)

        if self.success:
            self.mark_record(picking_in.id, 'stock.picking' if V8 else 'stock.picking.in')
            # Only confirm when received the end of delivery flag
            # picking_in.action_done()
        else:
            raise Warning('There where some errors in the WBA file', self.errors)

        return True
Пример #17
0
    def import_file(self, file_text, only_check=False):
        self.success = True
        self.errors = []
        xml = open_xml(file_text, _type='bur', print_error=self.print_errors)
        if nspath(xml, '//bur:BUR_List'):
            i = 0
            for x in nspath(xml, '//bur:BUR_List/bur:BUR'):
                i += 1
                # First, we try to check the records
                try:
                    text = xml_to_string(x)
                    self.import_file(text, only_check=True)
                except Warning as w:
                    print 'error>>>' * 5
                    print text
                    print '<<<error' * 5
                    raise Warning('Error on sub BUR file number {0}'.format(i),
                                  format_exception(w))
            for x in nspath(xml, '//bur:BUR_List/bur:BUR'):
                self.import_file(xml_to_string(x), only_check=False)
            return True

        validate_xml('bur', xml, print_error=False)
        imports = []
        product_obj = self.pool.get("product.product")
        lot_obj = self.pool.get('stock.production.lot')
        connection_obj = self.pool.get('stock.connect')
        mapping_bur_transactiontypes_obj = self.pool.get(
            'mapping_bur_transactiontypes')
        location_obj = self.pool.get('stock.location')

        # Gets the warehouse of the YellowCube.
        warehouse = connection_obj.browse(
            self.cr, self.uid, self.connection_id,
            context=self.context).warehouse_ids[0]
        # Header fields (under <GoodsReceiptHeader>)
        header = nspath(xml, "//bur:GoodsMovementsHeader")[0]

        # <BookingVoucherID> and <BookingVoucherYear>.
        # TODO: Check or save the value
        booking_voucher_id = nspath(header, "bur:BookingVoucherID")[0].text
        # TODO: Check or save the value
        booking_voucher_year = nspath(header, "bur:BookingVoucherYear")[0].text
        depositor_no = nspath(header, "bur:DepositorNo")[0].text

        self._check(warehouse, depositor_no == self.get_param('depositor_no'),
                    _('Invalid DepositorNo'))

        for article in nspath(xml, "//bur:BookingList/bur:BookingDetail"):
            partial_success = True
            element = {}

            # YCArticleNo
            element['yc_YCArticleNo'] = nspath(article,
                                               "bur:YCArticleNo")[0].text
            search_domain = [("yc_YCArticleNo", "=", element['yc_YCArticleNo'])
                             ]
            # ArticleNo
            article_no = nspath(article, "bur:ArticleNo")
            if len(article_no) > 0:
                # ArticleNo: Only set on dictionary, when needed for search (this avoids overwrite)
                element['default_code'] = article_no[0].text
                search_domain = [("default_code", "=", element['default_code'])
                                 ]
            ids = product_obj.search(self.cr,
                                     self.uid,
                                     search_domain,
                                     context=self.context)
            if len(ids) > 0:
                element['id'] = ids[0]
            else:
                element['id'] = -1
            imports.append(element)
            if not self._check(
                    warehouse,
                    len(ids) == 1,
                    _('Invalid search domain {0}').format(search_domain)):
                continue

            product = product_obj.browse(self.cr,
                                         self.uid,
                                         ids,
                                         context=self.context)[0]
            # EAN
            ean13 = nspath(article, 'bur:EAN')
            if ean13:
                element['ean13'] = ean13[0].text
                if product.ean13:
                    partial_success &= self._check(
                        product, product.ean13 == element['ean13'],
                        _('Product EAN13 {0} differs from XML EAN {1}').format(
                            product.ean13, element['ean13']))
            # BVPosNo
            # TODO: Check or save the value
            bv_pos_no = nspath(article, 'bur:BVPosNo')[0].text

            # Plant
            plant = nspath(article, 'bur:Plant')[0].text
            partial_success &= self._check(product,
                                           plant == self.get_param('plant_id'),
                                           _('Mismatching PlantID'))

            # MovePlant
            # TODO: Check or save
            move_plant = nspath(article, 'bur:MovePlant')

            # StorageLocation
            source_location = nspath(article, 'bur:StorageLocation')[0].text

            # MoveStorageLocation
            destination_location = nspath(article, "bur:MoveStorageLocation")
            if destination_location:
                destination_location = destination_location[0].text
            else:
                destination_location = False

            # TransactionType
            transaction_type = nspath(
                article, "bur:TransactionType"
            )[0].text  # Mandatory field, so won't fail.

            # We now determine the origin and destination locations based on the fields
            # StorageLocation, MoveStorageLocation, and TransactionType.
            # Have a look at task with ID=3725 for the algorithm which is copied below:
            # IF StorageLocation is recognized as a valid location in Odoo
            # AND MoveSorageLocation is recognized as a valid location in Odoo
            # THEN use those
            if location_obj.search(self.cr, self.uid, [('name', '=', source_location)], context=self.context, count=True) and \
               destination_location and \
               location_obj.search(self.cr, self.uid, [('name', '=', destination_location)], context=self.context, count=True):
                element['location'] = source_location
                element['destination'] = destination_location
            else:
                # ELSE look up the TransactionType given in the BUR in the configured TransactionType list,
                # and if found and the locations are valid, use them.
                is_mapped, mapped_origin_location, mapped_destination_location = \
                    mapping_bur_transactiontypes_obj.get_mapping(self.cr, self.uid, [], transaction_type, context=self.context)
                if is_mapped and mapped_origin_location and mapped_destination_location:
                    element['location'] = mapped_origin_location.name
                    element['destination'] = mapped_destination_location.name

                else:
                    # ELSE create an issue and stop processing the BUR. after resolving the TransactionType mapping, the import can be restarted...
                    self.success = False  # We know now that we had no success.
                    error_message = _(
                        'Error when importing BUR: StorageLocation and/or MoveStorageLocation were not defined or incorrect, AND '
                        'no correct mapping was defined for TransactionType={0}'
                    ).format(transaction_type)
                    self.post_issue(warehouse, error_message)

            # YCLot
            # TODO: check
            yc_lot = nspath(article, 'bur:YCLot')
            if yc_lot:
                element['yellowcube_lot'] = yc_lot[0].text

            # Lot
            lot = nspath(article, 'bur:Lot')
            if len(lot) > 0:
                element['lot'] = lot[0].text

                lot_id = lot_obj.search(self.cr,
                                        self.uid,
                                        [('product_id', '=', product.id),
                                         ('name', '=', element['lot'])],
                                        context=self.context)
                if not self._check(
                        product,
                        len(lot_id) <= 1,
                        _('Impossible to find a unique lot {0}'.format(
                            element['lot']))):
                    continue
                if not lot_id:
                    values = {'product_id': product.id, 'name': element['lot']}
                    production_date = nspath(article, "bur:ProductionDate")
                    if production_date:
                        values['date'] = self.str_date_to_postgres(
                            production_date[0].text)
                    lot_use_date = nspath(article, "bur:BestBeforeDate")
                    if lot_use_date:
                        values['use_date'] = self.str_date_to_postgres(
                            lot_use_date[0].text)
                    if only_check:
                        lot_id = None
                    else:
                        lot_id = [
                            lot_obj.create(self.cr,
                                           self.uid,
                                           values,
                                           context=self.context)
                        ]
                if lot_id is None and only_check:
                    lot = None
                else:
                    lot = lot_obj.browse(self.cr,
                                         self.uid,
                                         lot_id,
                                         context=self.context)[0]

            # StockType
            element['stock_type'] = nspath(article, 'bur:StockType')[0].text

            # Quantity
            element['qty_available'] = nspath(article,
                                              "bur:QuantityUOM")[0].text

            # QuantityUOM
            qty_uom = nspath(article,
                             "bur:QuantityUOM")[0].attrib['QuantityISO']
            qty_uom_ids = self.pool.get('product.uom').search(
                self.cr,
                self.uid, [('uom_iso', '=', qty_uom)],
                context=self.context)

            partial_success &= self._check(
                product, qty_uom_ids,
                _('There is not any Unit of Measure with ISO code being {0}.'.
                  format(qty_uom)))
            if partial_success:
                element['qty_uom_id'] = qty_uom_ids[0]

            write_on_lot = {}
            # BestBeforDate
            lot_use_date = nspath(article, "bur:BestBeforeDate")
            element['lot_use_date'] = False
            if len(lot_use_date) > 0:
                lot_use_date = lot_use_date[0].text
                element['lot_use_date'] = self.str_date_to_postgres(
                    lot_use_date)
                if lot is None:
                    self._check(product, only_check,
                                _('The lot may not exists in a two step file'))
                else:
                    if not lot.use_date:
                        write_on_lot['use_date'] = element['lot_use_date']
                    else:
                        partial_success &= self._check(
                            product,
                            self.keep_only_date(
                                lot.use_date) == element['lot_use_date'],
                            _('Mismatch with lot best before date'))

            # ProductionDate
            production_date = nspath(article, "bur:ProductionDate")
            element['date'] = False
            if production_date:
                lot_date = lot_use_date[0].text
                element['lot_date'] = self.str_date_to_postgres(lot_date)
                if not lot.date:
                    write_on_lot['date'] = element['lot_date']
                else:
                    partial_success &= self._check(
                        product,
                        self.keep_only_date(lot.date) == element['lot_date'],
                        _('Mismatch with lot fabrication date'))

            if write_on_lot and partial_success:
                lot.write(write_on_lot)

            element['name'] = "BUR-{0}-{1}".format(
                nspath(xml, "//bur:ControlReference/bur:Timestamp")[0].text,
                nspath(article, "bur:BVPosNo")[0].text)

        # print imports
        if not self.context.get('force_import', False):
            bad_imports = [x['yc_YCArticleNo'] for x in imports if x['id'] < 0]
            if len(bad_imports) > 0:
                raise Exception(
                    "Invalid XML Elements: {0}".format(bad_imports))

        if not self.success:
            raise Warning(
                'There where errors on the import process. See import log thread.',
                self.errors)

        if only_check:
            # Everything was OK, and it could be imported in a second step.
            return True

        stock_move_pool = self.pool.get("stock.move")
        for article in imports:
            _id = article['id']
            self.mark_record(_id, 'product.product')
            _lot = False
            if 'lot' in article:
                _lot = self.pool.get('stock.production.lot').search(
                    self.cr,
                    self.uid, [('name', '=', article['lot']),
                               ('product_id', '=', _id)],
                    context=self.context)
                if len(_lot) > 0:
                    _lot = _lot[0]
                else:
                    _lot = self.pool.get('stock.production.lot').create(
                        self.cr,
                        self.uid, {
                            'name': article['lot'],
                            'product_id': _id,
                            'date': element.get('lot_date', None),
                            'use_date': element.get('lot_use_date', None)
                        },
                        context=self.context)

            def loc(is_input, element, warehouse):
                key = 'location' if is_input else 'destination'
                if (not input) and element['stock_type'] not in ['', '0', 'F']:
                    return warehouse.lot_blocked_id.id
                if element[key] == 'YROD':
                    return warehouse.lot_input_id.id
                if element[key] == 'YAFS':
                    return warehouse.lot_stock_id.id
                return self.pool['ir.model.data'].get_object_reference(
                    self.cr, self.uid, 'stock', 'location_inventory')[1]

            stock_move_id = stock_move_pool.create(
                self.cr,
                self.uid, {
                    'name':
                    article['name'],
                    'product_id':
                    _id,
                    'location_id':
                    loc(True, article, warehouse),
                    'location_dest_id':
                    loc(False, article, warehouse),
                    'product_uom_qty' if V8 else 'product_qty':
                    article['qty_available'],
                    'product_uom':
                    article['qty_uom_id'],
                    'state':
                    'done',
                    'restrict_lot_id' if V8 else 'prodlot_id':
                    _lot,
                    'origin':
                    'YellowCube',
                    'type':
                    'internal',
                    'yc_booking_voucher_id':
                    booking_voucher_id,
                    'yc_booking_voucher_year':
                    booking_voucher_year,
                },
                context=self.context)
            self.mark_record(_id, 'product.product')

        return True
    def _process_art_file(self,
                          cr,
                          uid,
                          ids,
                          missing_product_ids=None,
                          context=None):
        if isinstance(ids, list):
            ids = ids[0]
        project_issue_obj = self.pool.get('project.issue')
        this = self.pool['stock.connect'].browse(cr, uid, ids, context)
        if not self.is_type_enabled(cr, uid, ids, 'art', context=context):
            return
        ctx = context.copy()
        env = [self.pool, cr, uid]
        limit_date = datetime.now() - timedelta(
            hours=this.yc_hours_between_art_files)
        created_art_products = []
        for _file in this.stock_connect_file_ids:
            if _file.type == 'art' and _file.input == False:
                if (_file.state in [FILE_STATE_READY, FILE_STATE_DRAFT]
                        or datetime.strptime(_file.create_date,
                                             DEFAULT_SERVER_DATETIME_FORMAT) >
                        limit_date or _file.server_ack is False):
                    if _file.model == 'stock.location':
                        complete = True
                        for p in missing_product_ids or []:
                            if ',product.product:{0},'.format(
                                    p) not in _file.related_ids:
                                complete = False
                                break
                        if complete or not this.yc_enable_art_ondemand:
                            logger.info(
                                "ART file already exists, and was ready to submit."
                            )
                            return
                    elif _file.model == 'product.product':
                        created_art_products.append(_file.res_id)
        art_factory = get_factory(env, 'art', context=ctx)
        if not this.yc_enable_art_multifile:
            created_art_products = None
        elif this.yc_enable_art_ondemand and not missing_product_ids:
            logger.info(
                "ART on demand activated. Not creating nothing automatically.")
            return
        for warehouse in this.warehouse_ids:
            new_cr = self.pool.cursor()
            try:
                art_factory.generate_files(
                    [('id', '=', warehouse.lot_stock_id.id)],
                    ignore_product_ids=created_art_products,
                    force_product_ids=missing_product_ids,
                    multifile=this.yc_enable_art_multifile)
            except Warning as w:
                ctx['warning'] = format_exception(w)
                self.pool.get('stock.connect').log_issue(
                    cr,
                    uid,
                    ids,
                    _('Error while processing ART file:\n{warning}'),
                    context=ctx,
                    exception=w)
                if context.get('yc_print_errors', True):
                    logger.error(format_exception(w))

            except Exception as e:
                error = '{0}\n{1}'.format(_('Error while processing ART file'),
                                          format_exception(e))
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect.file',
                                               warehouse.id,
                                               error,
                                               context=context)
                logger.error('Exception: {0}'.format(error))
                self.pool.get('stock.connect').log_issue(
                    new_cr,
                    uid,
                    ids,
                    _('Error while processing ART file'),
                    context=context,
                    exception=e)
                raise e

            finally:
                new_cr.commit()
                new_cr.close()

            # Right now, there is no difference between warehouses, so it is enough to create the ART file once.
            break
Пример #19
0
    def generate_files(self, domain=None):
        logger.debug("Exporting {0} files".format(self._factory_name))
        self.main_file_id = None
        sender = self.get_param('sender', required=True)
        table_model = self.pool[self._table]
        # search_domain = []#[('xml_export_state', '=', 'draft')]
        # For each object that matches the domain, we create its xml file
        for object_id in table_model.search(
                self.cr, self.uid, domain, context=self.context
        ):  # TODO: Check that self.context contains the yc_language set.
            try:
                _object = table_model.browse(self.cr,
                                             self.uid,
                                             object_id,
                                             context=self.context)

                # We generated the final filename, according to task with ID=2922
                main_file_name = self.get_main_file_name(_object)
                if not main_file_name:
                    raise Warning(
                        _('Missing filename for main object {0} {1}#{2}').
                        format(_object.name, self._table, _object.id))
                object_filename = "{sender}_{factory_name}_{name}.xml".format(
                    sender=sender,
                    factory_name=self._factory_name,
                    name=export_filename(main_file_name, self.context))

                logger.debug("Exporting xml for {2} {0} into file {1}".format(
                    object_id, object_filename, self._table))
                # The name of the main xml, is appened to each related file
                self.context['filename_prefix'] = "{0}_".format(
                    object_filename[:-4])
                # The XML root is generated
                self.processed_items = []
                xml_node = self.generate_root_element(_object)
                if xml_node is not None:
                    xml_node.append(
                        etree.Comment("Model: {0} ID: {1} Name: {2}".format(
                            self._table, _object.id, _object.name)))
                    xml_output = xml_to_string(xml_node, remove_ns=True)
                    # The associated files are copied
                    self.save_file(xml_output,
                                   object_filename,
                                   main=True,
                                   binary=False,
                                   record_id=_object.id)
                    export_files = self.get_export_files(_object)
                    logger.debug("Exporting files {0}".format(export_files))
                    for name in export_files:
                        src = export_files[name]

                        if self._file_is_pcl(src):
                            # If the file must be submitted as PCL, then it generates the PCL
                            logger.debug(
                                "PCL conversion: PCL creation for {0} STARTED."
                                .format(src))
                            pcl_output_path = self._print_pdf_to_pcl(
                                self.cr, self.uid, src, self.context)
                            logger.debug(
                                "PCL conversion: PCL creation for {0} FINISHED"
                                .format(src))
                            data = None
                            with open(pcl_output_path, 'rb') as f:
                                data = f.read()
                            self.save_file(data, name)
                        else:
                            data = None
                            with open(src, 'rb') as f:
                                data = f.read()
                            self.save_file(data, name)

                    self.mark_as_exported(_object.id)
            except Exception as e:
                logger.error("Exception exporting into xml {0}: {1}".format(
                    object_id, format_exception(e)))
                raise
            finally:
                if 'filename_prefix' in self.context:
                    del self.context['filename_prefix']
        return True
    def _process_bar_file(self, cr, uid, ids, context):
        this = self._this(cr, uid, ids, context)
        env = [self.pool, cr, uid]
        if not this.is_type_enabled('bar'):
            return
        bar_file_ids = this._find_yc_import_file('BAR')
        if not bar_file_ids:
            return

        project_issue_obj = self.pool.get('project.issue')
        file_obj = self.pool.get('stock.connect.file')
        product_obj = self.pool.get('product.product')
        stock_production_lot_obj = self.pool.get('stock.production.lot')
        stock_connect = self.pool.get('stock.connect').browse(
            cr, uid, ids[0], context)

        for bar_file in file_obj.browse(cr, uid, bar_file_ids, context):
            if bar_file.state != 'draft' or bar_file.error:
                if stock_connect.log_about_already_existing_files:
                    logger.info('Ignoring bar file {0}#{1}'.format(
                        bar_file.id, bar_file.name))
                continue
            error = None

            new_cr = self.pool.cursor()
            try:
                ctx = context.copy()
                ctx['imported_products'] = None
                ctx['imported_lots'] = None
                bar_factory = get_factory(env, 'bar', context=ctx)
                if bar_factory.import_file(bar_file.content):
                    bar_file.write({
                        'type': 'bar',
                        'state': 'done',
                        'info': str(ctx['imported_products'])
                    })
                    if ctx['imported_products']:
                        product_obj.write(
                            cr,
                            uid,
                            ctx['imported_products'], {
                                'yc_last_bar_update':
                                datetime.now().strftime(
                                    DEFAULT_SERVER_DATETIME_FORMAT)
                            },
                            context=ctx)
                    if ctx['imported_lots']:
                        stock_production_lot_obj.write(
                            cr,
                            uid,
                            ctx['imported_lots'], {
                                'yc_last_bar_update':
                                datetime.now().strftime(
                                    DEFAULT_SERVER_DATETIME_FORMAT)
                            },
                            context=ctx)
                else:
                    error = 'Not success'

            except Warning as w:
                self.pool.get('stock.connect').log_issue(
                    cr,
                    uid,
                    ids,
                    _('Error while processing BAR file'),
                    file_id=bar_file.id,
                    context=context,
                    exception=w)
                project_issue_obj.create_issue(
                    cr,
                    uid,
                    'stock.connect.file',
                    bar_file.id,
                    _('Error while processing BAR file'),
                    context=context)
                if context.get('yc_print_errors', True):
                    logger.error(format_exception(w))
                error = format_exception(w)

            except Exception as e:
                error = format_exception(e)
                project_issue_obj.create_issue(cr,
                                               uid,
                                               'stock.connect.file',
                                               bar_file.id,
                                               error,
                                               context=context)
                logger.error('Exception: {0}'.format(error))
                if file_obj.search(new_cr,
                                   uid, [('id', '=', bar_file.id)],
                                   context=context):
                    file_obj.write(new_cr,
                                   uid,
                                   bar_file.id, {
                                       'error': True,
                                       'info': error
                                   },
                                   context=context)
                else:
                    logger.error(
                        "Impossible to log error on unsaved BAR file!!! {0}".
                        format(error))
                raise e

            finally:
                new_cr.commit()
                new_cr.close()

            if error:
                bar_file.write({'error': True, 'info': error})