def check_xsd_rules(self, cr, uid, ids, context=None): cr.execute("SELECT code FROM res_lang WHERE active") languages = [x[0] for x in cr.fetchall()] for product in self.browse(cr, uid, ids, context=context): art_factory = get_factory(self.env, "art", context=context) art_factory._generate_article_element(product, languages, raise_error=True) raise osv.except_osv('XSD validation', 'Everything was fine.') return True
def _process_stock_picking_assigned(self, cr, uid, ids, event_ids, ctx=None): if ctx is None: ctx = {} ctx['check_date_ready_for_export'] = True if isinstance(ids, list) and len(ids) > 1: ret = [] for x in ids: ret.extend( self._process_stock_picking_assigned(cr, uid, x, event_ids, ctx=ctx)) return ret conf_data = self.pool.get('configuration.data').get(cr, uid, [], context=ctx) today = datetime.today() env = [self.pool, cr, uid] if ctx: context = ctx.copy() else: context = {} if 'stock_connect_id' not in context: context['stock_connect_id'] = ids[0] if 'yc_min_number_attachments' not in context: context['yc_min_number_attachments'] = 2 wab_factory = get_factory(env, 'wab', context=context) wbl_factory = get_factory(env, 'wbl', context=context) picking_obj = self.pool['stock.picking'] stock_event_obj = self.pool['stock.event'] file_obj = self.pool['stock.connect.file'] stock_connect_obj = self.pool['stock.connect'] project_issue_obj = self.pool['project.issue'] ret = [] this = self.pool['stock.connect'].browse(cr, uid, ids[0], context) stock_events_ignored = [] # Stores the events to ignore. for event_id in event_ids: error_message = None event = stock_event_obj.browse(cr, uid, event_id, context=context) picking_id = event.res_id picking = picking_obj.browse(cr, uid, picking_id, context) if conf_data.yc_ignore_events_until_process_date: if not (picking.process_date): logger.debug( "Recomputing process_date for picking {0}".format( picking.name)) self.env.add_todo(picking_obj._fields['process_date'], picking) picking_obj.recompute() if not (picking.process_date) or datetime.strptime( picking.process_date, DEFAULT_SERVER_DATETIME_FORMAT) >= today: event.write({"info": "Ignored until process date is met."}) continue # Back-orders are never processed if picking.do_not_send_to_warehouse or (picking.state != 'assigned'): stock_events_ignored.append(event) event.write({"info": "Ignored until ready to be sent."}) continue #picking_type = None factory = None if picking.sale_id and picking.type in ['outgoing', None]: factory = wab_factory elif picking.purchase_id and picking.type in ['incoming', None]: factory = wbl_factory else: factory = None context['warehouse_id'] = event.warehouse_id.id try: new_cr = self.pool.cursor() if not factory: raise Warning( _('This stock.picking cannot be processed, it neither has a purchase or a sale order related' )) related_items = factory.get_related_items(picking_id) related_files = [] product_ids = [] if self.is_type_enabled(cr, uid, this.id, 'art', context=context): for product_id in related_items.get( 'product.product', False) or []: msg = None res = self.get_last_file_for_record(cr, uid, this.id, 'product.product', product_id, _type='art', context=context) if not res: msg = 'Missing' product_ids.append(product_id) else: if not res.server_ack or res.state != 'done': msg = 'Pending' elif this.yc_hours_between_art_files: delta = timedelta( hours=this.yc_hours_between_art_files) filedate = datetime.strptime( res.create_date, DEFAULT_SERVER_DATETIME_FORMAT) if filedate + delta < datetime.today(): msg = 'Out-of-date' product_ids.append(product_id) # If there is a pending file, we mark it if msg: related_files.append( ('product.product', product_id, msg)) # Here we create the missing documents that we have a dependency if this.yc_enable_art_ondemand: if this.yc_enable_art_multifile: for p in product_ids: self._process_art_file(cr, uid, this.id, missing_product_ids=[p], context=context) elif product_ids: self._process_art_file( cr, uid, this.id, missing_product_ids=product_ids, context=context) if related_files: msg = "There are missing files that must be processed before: {0}".format( related_files) event.write({'info': msg}) logger.info(msg) else: picking_id = factory.generate_files([('id', '=', picking_id)]) if picking_id: ret.append(event) # TODO: Rewrite this. except Warning as w: error_message = _( 'Warning while processing event on stock.picking with ID {0}: {1}' ).format(picking_id, format_exception(w)) if context.get('yc_print_errors', True): logger.error(error_message) with api.Environment.manage(): project_issue_obj.create_issue(new_cr, uid, 'stock.event', event_id, error_message, context=context) stock_event_obj.write(new_cr, uid, event.id, { 'error': True, 'info': error_message }, context=context) # TODO: Rewrite this. except Exception as e: error_message = _( 'Exception while processing event on stock.picking with ID {0}: {1}' ).format(picking_id, format_exception(e)) stock_connect_obj.log_issue(new_cr, uid, ids, error_message, event_id=event_id, context=context, exception=e, log_issue_no_format=True) logger.error(error_message) project_issue_obj.create_issue(cr, uid, 'stock.event', event_id, error_message, context=context) uid_exception, context_exception = uid, context with api.Environment.manage(): stock_connect_obj.log_issue(new_cr, uid, ids, error_message, event_id=event_id, context=context, exception=e, log_issue_no_format=True) self.env = api.Environment(new_cr, uid_exception, context_exception) event.write({'error': True, 'info': error_message}) raise e finally: new_cr.commit() new_cr.close() # Sets as done all those events which were correctly processed. for event_correctly_processed in ret: event_correctly_processed.write({ 'state': EVENT_STATE_DONE, 'info': '' }) # Sets as ignored the events which are must be ignored. for event_to_ignore in stock_events_ignored: event_to_ignore.write({'state': EVENT_STATE_IGNORED, 'info': ''}) del ctx['check_date_ready_for_export'] return [x.id for x in ret]
def _process_bar_file(self, cr, uid, ids, context): this = self._this(cr, uid, ids, context) env = [self.pool, cr, uid] if not this.is_type_enabled('bar'): return bar_file_ids = this._find_yc_import_file('BAR') if not bar_file_ids: return project_issue_obj = self.pool.get('project.issue') file_obj = self.pool.get('stock.connect.file') product_obj = self.pool.get('product.product') stock_production_lot_obj = self.pool.get('stock.production.lot') stock_connect = self.pool.get('stock.connect').browse( cr, uid, ids[0], context) for bar_file in file_obj.browse(cr, uid, bar_file_ids, context): if bar_file.state != 'draft' or bar_file.error: if stock_connect.log_about_already_existing_files: logger.info('Ignoring bar file {0}#{1}'.format( bar_file.id, bar_file.name)) continue error = None new_cr = self.pool.cursor() try: ctx = context.copy() ctx['imported_products'] = None ctx['imported_lots'] = None bar_factory = get_factory(env, 'bar', context=ctx) if bar_factory.import_file(bar_file.content): bar_file.write({ 'type': 'bar', 'state': 'done', 'info': str(ctx['imported_products']) }) if ctx['imported_products']: product_obj.write( cr, uid, ctx['imported_products'], { 'yc_last_bar_update': datetime.now().strftime( DEFAULT_SERVER_DATETIME_FORMAT) }, context=ctx) if ctx['imported_lots']: stock_production_lot_obj.write( cr, uid, ctx['imported_lots'], { 'yc_last_bar_update': datetime.now().strftime( DEFAULT_SERVER_DATETIME_FORMAT) }, context=ctx) else: error = 'Not success' except Warning as w: self.pool.get('stock.connect').log_issue( cr, uid, ids, _('Error while processing BAR file'), file_id=bar_file.id, context=context, exception=w) project_issue_obj.create_issue( cr, uid, 'stock.connect.file', bar_file.id, _('Error while processing BAR file'), context=context) if context.get('yc_print_errors', True): logger.error(format_exception(w)) error = format_exception(w) except Exception as e: error = format_exception(e) project_issue_obj.create_issue(cr, uid, 'stock.connect.file', bar_file.id, error, context=context) logger.error('Exception: {0}'.format(error)) if file_obj.search(new_cr, uid, [('id', '=', bar_file.id)], context=context): file_obj.write(new_cr, uid, bar_file.id, { 'error': True, 'info': error }, context=context) else: logger.error( "Impossible to log error on unsaved BAR file!!! {0}". format(error)) raise e finally: new_cr.commit() new_cr.close() if error: bar_file.write({'error': True, 'info': error})
def _process_file(self, cr, uid, ids, xml_type, context=None): env = [self.pool, cr, uid] this = self._this(cr, uid, ids, context) if not this.is_type_enabled(xml_type): return file_ids = this._find_yc_import_file(xml_type.upper()) if not file_ids: return factory = get_factory(env, xml_type.lower(), context=context) file_obj = self.pool.get('stock.connect.file') stock_connect = self.pool.get('stock.connect').browse( cr, uid, ids[0], context) project_issue_obj = self.pool.get('project.issue') for _file in file_obj.browse(cr, uid, file_ids, context): if _file.state != 'draft' or _file.error: if stock_connect.log_about_already_existing_files: logger.info('Ignoring {0} file {1}#{2}'.format( xml_type, _file.id, _file.name)) continue error = None new_cr = self.pool.cursor() try: if factory.import_file(_file.content): _file.write({ 'type': xml_type.lower(), 'state': 'done', 'info': '' }) else: error = 'Not success' except Warning as w: error = '{0} {1}'.format( _('Warning: Error while processing file.'), format_exception(w)) project_issue_obj.create_issue(cr, uid, 'stock.connect.file', _file.id, error, context=context) if context.get('yc_print_errors', True): logger.error(error) _file.write({'error': True, 'info': error}, context=context) except Exception as e: error = '{0} {1}'.format( _('Exception: Error while processing file.'), format_exception(e)) project_issue_obj.create_issue(cr, uid, 'stock.connect.file', _file.id, error, context=context) logger.error(error) file_obj.write(new_cr, uid, _file.id, { 'error': True, 'info': error }, context=context) print 'error>>>' * 5 print _file.content print '<<<error' * 5 raise e finally: new_cr.commit() new_cr.close() if error: _file.write({'error': True, 'info': error})
def _process_art_file(self, cr, uid, ids, missing_product_ids=None, context=None): if isinstance(ids, list): ids = ids[0] project_issue_obj = self.pool.get('project.issue') this = self.pool['stock.connect'].browse(cr, uid, ids, context) if not self.is_type_enabled(cr, uid, ids, 'art', context=context): return ctx = context.copy() env = [self.pool, cr, uid] limit_date = datetime.now() - timedelta( hours=this.yc_hours_between_art_files) created_art_products = [] for _file in this.stock_connect_file_ids: if _file.type == 'art' and _file.input == False: if (_file.state in [FILE_STATE_READY, FILE_STATE_DRAFT] or datetime.strptime(_file.create_date, DEFAULT_SERVER_DATETIME_FORMAT) > limit_date or _file.server_ack is False): if _file.model == 'stock.location': complete = True for p in missing_product_ids or []: if ',product.product:{0},'.format( p) not in _file.related_ids: complete = False break if complete or not this.yc_enable_art_ondemand: logger.info( "ART file already exists, and was ready to submit." ) return elif _file.model == 'product.product': created_art_products.append(_file.res_id) art_factory = get_factory(env, 'art', context=ctx) if not this.yc_enable_art_multifile: created_art_products = None elif this.yc_enable_art_ondemand and not missing_product_ids: logger.info( "ART on demand activated. Not creating nothing automatically.") return for warehouse in this.warehouse_ids: new_cr = self.pool.cursor() try: art_factory.generate_files( [('id', '=', warehouse.lot_stock_id.id)], ignore_product_ids=created_art_products, force_product_ids=missing_product_ids, multifile=this.yc_enable_art_multifile) except Warning as w: ctx['warning'] = format_exception(w) self.pool.get('stock.connect').log_issue( cr, uid, ids, _('Error while processing ART file:\n{warning}'), context=ctx, exception=w) if context.get('yc_print_errors', True): logger.error(format_exception(w)) except Exception as e: error = '{0}\n{1}'.format(_('Error while processing ART file'), format_exception(e)) project_issue_obj.create_issue(cr, uid, 'stock.connect.file', warehouse.id, error, context=context) logger.error('Exception: {0}'.format(error)) self.pool.get('stock.connect').log_issue( new_cr, uid, ids, _('Error while processing ART file'), context=context, exception=e) raise e finally: new_cr.commit() new_cr.close() # Right now, there is no difference between warehouses, so it is enough to create the ART file once. break