def display_articles_differences(self): comparison_result = self.exact_comparison_result msg = [] if len(comparison_result) > 0: msg.append(html_reports.p_message(self.status)) for label, differences in comparison_result: diff = [differences[0], differences[1]] diff = ' => '.join( [d for d in diff if d is not None]) msg.append(html_reports.tag('p', diff)) return ''.join(msg)
def report_articles_merging_conflicts(self): if not hasattr(self, '_report_articles_merging_conflicts'): merging_errors = [] if len(self.docs_merger.titaut_conflicts) + len( self.docs_merger.name_order_conflicts) > 0: keys = list(self.docs_merger.titaut_conflicts.keys()) + list( self.docs_merger.name_order_conflicts.keys()) keys = sorted(list(set(keys))) merging_errors = [ html_reports.p_message( validation_status.STATUS_BLOCKING_ERROR + ': ' + _('Unable to update because the registered article data and the package article data do not match. ' )) ] articles = self.docs_merger.articles registered_articles = self.docs_merger.registered_articles for name in keys: labels = [ name, _('title/author conflicts'), _('name/order conflicts') ] values = [ article_data_reports.display_article_data_to_compare( articles.get(name)) ] articles_in_conflict = [] for reg_name, art in self.docs_merger.titaut_conflicts.get( name, {}).items(): articles_in_conflict.append( article_data_reports. display_article_data_to_compare(art)) values.append(''.join(articles_in_conflict)) articles_in_conflict = [] for pkg_name, art in self.docs_merger.name_order_conflicts.get( name, {}).items(): articles_in_conflict.append( article_data_reports. display_article_data_to_compare(art)) values.append(''.join(articles_in_conflict)) merging_errors.append( html_reports.sheet( labels, [html_reports.label_values(labels, values)], table_style='dbstatus', html_cell_content=labels)) self._report_articles_merging_conflicts = ''.join(merging_errors) return self._report_articles_merging_conflicts
def spf_message(self): if not self.sps_pkg_info: return "" ftp = "" if self.sps_pkg_info.get("server"): ftp = _("(FTP: {} | User: {})").format( self.sps_pkg_info.get("server"), self.sps_pkg_info.get("user", '')) return html_reports.p_message( _("[INFO] {} is available for SPF {}").format( self.sps_pkg_info.get("file"), ftp))
def _report(self, blocking_error, pkg): msg = html_reports.p_message(blocking_error or "") if not blocking_error: msg = self.pkg_namer.report() img_reports = ImagesOriginReport( self.enhancer.images_origin, self.pkg_namer.href_replacements, pkg.package_folder.path) html_reports.save( self.FILES.sgmxml_outputs.images_report_filename, '', img_reports.report()) fs_utils.write_file( self.FILES.sgmxml_outputs.mkp2xml_report_filename, msg)
def invalid_xml_report(self): r = '' if len(self.invalid_xml_name_items) > 0: r += html_reports.tag( 'div', html_reports.p_message( _('{status}: invalid XML files. ').format( status=validation_status.STATUS_BLOCKING_ERROR))) r += html_reports.tag( 'div', html_reports.format_list('', 'ol', self.invalid_xml_name_items, 'issue-problem')) return r
def spf_message(self): if not self.sps_pkg_info: return "" result = False if self.sps_pkg_info.get("server"): result = _("FTP: {} | User: {}").format( self.sps_pkg_info.get("server"), self.sps_pkg_info.get("user", '')) elif self.sps_pkg_info.get("file"): result = os.path.isfile(self.sps_pkg_info.get("file")) return html_reports.p_message( _("[INFO] {} is available for SPF ({})").format( self.sps_pkg_info.get("file"), result))
def conclusion_message(self): if hasattr(self, '_conclusion_message'): return self._conclusion_message text = ''.join(self.error_messages) app_site = self.web_app_site or _('scielo web site') result = _('updated/published on {app_site}').format(app_site=app_site) conclusion = self.conclusion action = _('will be') if conclusion.get("update") else _('will not be') text = u'{status}: {issueid} {action} {reason}'.format( issueid=self.acron_issue_label, action=action + " " + result, **conclusion) converted = "{}: {}/{}".format(_('converted'), self.total_converted, self.accepted_articles) self._conclusion_message = (html_reports.p_message(converted, False) + html_reports.p_message(text, False) + self.spf_message) return self._conclusion_message
def report_missing_required_issue_data(self): if not hasattr(self, '_report_missing_required_issue_data'): r = '' for label, items in self.group.missing_required_data.items(): r += html_reports.tag( 'div', html_reports.p_message( _('{status}: missing {label} in: ').format( status=validation_status.STATUS_BLOCKING_ERROR, label=label))) r += html_reports.tag( 'div', html_reports.format_list('', 'ol', items, 'issue-problem')) self._report_missing_required_issue_data = r return self._report_missing_required_issue_data
def display_article_data_to_compare(_article): r = '' style = 'excluded' if _article.is_ex_aop else None status = validation_status.STATUS_INFO + ': ' + _( 'This article is an ex-aop article. ' ) + _( 'Order of ex-aop is reserved, it is not allowed to reuse it for other article. ' ) if _article.is_ex_aop else '' r += html_reports.p_message(status) if _article.creation_date_display is None: r += html_reports.p_message(_('package')) else: r += html_reports.p_message(_('registered article')) r += html_reports.tag('p', _article.xml_name, 'article-title') r += html_reports.tag('p', html_reports.tag('strong', _article.order), 'fpage') r += display_article_metadata(_article, '<br/>') if _article.creation_date_display is not None: r += '<hr/>' + html_reports.display_label_value( _('creation date'), _article.creation_date_display, 'p') r += html_reports.display_label_value(_('last update date'), _article.last_update_display, 'p') return html_reports.tag('div', r, style)
def report_issue_data_duplicated_values(self): if not hasattr(self, '_report_issue_data_duplicated_values'): parts = [] for label, values in self.group.duplicated_values.items(): status = self.group.ERROR_LEVEL_FOR_UNIQUE_VALUES[label] _m = _( 'Unique value for {label} is required for all the documents in the package' ).format(label=label) parts.append(html_reports.p_message(status + ': ' + _m)) for value, xml_files in values.items(): parts.append( html_reports.format_list( _('found {label}="{value}" in:').format( label=label, value=value), 'ul', xml_files, 'issue-problem')) self._report_issue_data_duplicated_values = ''.join(parts) return self._report_issue_data_duplicated_values
def report_issue_data_conflicting_values(self): if not hasattr(self, '_report_issue_data_conflicting_values'): parts = [] for label, values in self.group.conflicting_values.items(): _status = validation_status.STATUS_BLOCKING_ERROR if self.group.is_rolling_pass or self.group.is_aop_issue: _status = validation_status.STATUS_WARNING elif label == 'license': _status = validation_status.STATUS_WARNING _m = _( '{status}: same value for {label} is required for all the documents in the package. ' ).format(status=_status, label=label) parts.append(html_reports.p_message(_m)) parts.append( html_reports.tag('div', html_reports.format_html_data(values), 'issue-problem')) self._report_issue_data_conflicting_values = ''.join(parts) return self._report_issue_data_conflicting_values
def display_item(self, item): return html_reports.p_message(item, False)