def validate_doc_export(doc_id, rules, timestamps=None): doc_info = get_doc_info(doc_id) doc_name = doc_info[0] doc_corpus = doc_info[1] doc_content = get_doc_content(doc_id) ether_doc_name = "gd_" + doc_corpus + "_" + doc_name if not timestamps: timestamps = get_timestamps(ether_url) last_edit = int(timestamps[ether_doc_name]) if last_edit <= int(cache.get_timestamp(doc_id, "export")): return cache.get_report(doc_id, "export") socialcalc = get_socialcalc(ether_url, ether_doc_name) report = "" export_rule_fired = False for rule in rules: if not rule.applies(doc_name, doc_corpus): continue export_rule_fired = True res = rule.validate(socialcalc, doc_id) report += res if not export_rule_fired: report = "<strong>No applicable export schemas</strong><br>" elif report: report = "<strong>Export problems:</strong><br>" + report else: report = "<strong>Export is valid</strong><br>" cache.cache_timestamped_validation_result(doc_id, "export", report, last_edit) return report
def validate_all_export(docs): reports = {} rules = [ExportValidator(x) for x in get_export_rules()] timestamps = get_timestamps(ether_url) for doc in docs: doc_id, doc_name, corpus, doc_mode, doc_schema, validation, timestamp = doc if doc_mode != "ether": continue reports[doc_id] = validate_doc_export(doc_id, rules, timestamps=timestamps) return json.dumps(reports)
def validate_all_export_bulk(docs): cached_reports = {} reports = [] rules = [BulkExportValidator(x) for x in get_export_rules()] timestamps = get_timestamps(ether_url) doc_ids = [] for doc in docs: doc_id, doc_name, doc_corpus, doc_mode, doc_schema, validation, timestamp = doc if doc_mode != "ether": continue ether_doc_name = "gd_" + doc_corpus + "_" + doc_name last_edit = int(timestamps[ether_doc_name]) if last_edit <= int(cache.get_timestamp(doc_id, "export")): cached_reports[doc_id] = cache.get_report(doc_id, "export") continue doc_ids.append(doc_id) for rule in rules: report, fired = rule.validate(doc_ids) if fired: reports.append(report) def merge_dicts(dictlist): keys = apply(set().union, dictlist) ret_dict = {} for k in keys: for d in dictlist: ret_dict[k] = "".join(d.get(k, '')) #return {k: "".join(d.get(k, '') for d in dictlist) for k in keys} return ret_dict reports = merge_dicts([cached_reports] + reports) for doc_id in doc_ids: if doc_id not in reports: reports[doc_id] = "No applicable export schemas" for doc_id, report in reports.items(): if doc_id in cached_reports: continue doc_name, doc_corpus, _, _, _, _, _ = get_doc_info(doc_id) ether_doc_name = "gd_" + doc_corpus + "_" + doc_name last_edit = int(timestamps[ether_doc_name]) cache.cache_timestamped_validation_result(doc_id, "export", report, last_edit) return json.dumps(reports)
def validate_doc_ether(doc_id, rules, timestamps=None, editor=False): doc_info = get_doc_info(doc_id) doc_name = doc_info[0] doc_corpus = doc_info[1] ether_doc_name = "gd_" + doc_corpus + "_" + doc_name if not timestamps: timestamps = get_timestamps(ether_url) last_edit = int(timestamps[ether_doc_name]) if last_edit <= int(cache.get_timestamp(doc_id, "ether")): return cache.get_report(doc_id, "ether") socialcalc = get_socialcalc(ether_url, ether_doc_name) parsed_ether = parse_ether(socialcalc, doc_id=doc_id) report = '' cells = [] ether_rule_fired = False for rule in rules: if not rule.applies(doc_name, doc_corpus): continue ether_rule_fired = True res = rule.validate(parsed_ether) if len(res['tooltip']) > 0: report += ("""<div class="tooltip">""" + res['report'][:-5] + """ <i class="fa fa-ellipsis-h"></i>""" + "<span class=\"msg\">" + res['tooltip'] + "</span>" + "</div>") else: report += res['report'] cells += res['cells'] if not ether_rule_fired: report = "<strong>No applicable spreadsheet validation rules</strong><br>" elif report: report = "<strong>Spreadsheet Problems:</strong><br>" + report else: report = "<strong>Spreadsheet is valid</strong><br>" cache.cache_timestamped_validation_result(doc_id, "ether", report, last_edit) if editor: highlight_cells(cells, ether_url, ether_doc_name) return report