def calculate_completion(self, translation): if translation['mtime']: translated_count = self.count_strings(translation) else: # This is a virtual file translated_count = 0 uid, _ = get_uid_and_muids(translation["path"]) root_lang = get_child_property_value(translation, "root_lang") root_edition = get_child_property_value(translation, "root_edition") missing = [] if not root_lang or not root_edition: missing.append("root lang") if not root_edition: missing.append("root edition") try: root_entry = get_matching_entry(uid, ["root", root_lang, root_edition]) root_count = self.count_strings(root_entry) total_count = max(root_count, translated_count) except NoMatchingEntry: total_count = translated_count problemsLog.add(file=translation['path'], msg="Root entry could not be determined") return {"_translated": translated_count, "_root": total_count}
def build_rules(publications): result = {} result['_paths'] = {} for pub_id, entry in publications.items(): try: source_path = source_url_to_path(entry['source_url']) except ValueError as e: problemsLog.add( file=publications_file_name, msg=f"In {entry['publication_number']}: {e.args[0]} ") github_ids = [entry.get('author_github_handle')] result['_paths'][source_path] = True for collaborator in entry.get("collaborator", []): github_ids.append(collaborator.get('author_github_handle')) if not any(github_ids) and 'parent_publication' not in entry: problemsLog.add( file=publications_file_name, msg=f"Publication {pub_id} has no author or collaborator") for github_id in github_ids: if not github_id: continue if github_id not in result: result[github_id] = { Permission.EDIT: [], Permission.SUGGEST: [], Permission.VIEW: ["*"] } if source_path not in result[github_id][Permission.EDIT]: result[github_id][Permission.EDIT].append(source_path) return result
def yield_strings(self, files): for file in files: if file.suffix != '.json': continue if '_' not in file.name: logging.error(f'Invalid filename: {file}') problemsLog.add(file=str(file.relative_to(WORKING_DIR)), msg=f'Not a valid filename: "_" missing') continue uid, muids = file.stem.split("_") if not uid: continue with file.open("r") as f: try: data = json.load(f) except Exception as e: logging.error(f'Error loading file: {file}') problemsLog.add(file=str(file.relative_to(WORKING_DIR)), msg=f'JSON Decode Error on line {e.lineno}') continue for segment_id, string in data.items(): if segment_id == "~": continue yield ( muids, { "_key": self.legalize_key(segment_id), "segment_id": segment_id, "string": string, "muids": muids, }, )
def validate_permissions(rules=None): if not rules: rules = get_rules() files = WORKING_DIR.glob('**/*.json') files = [ str(file.relative_to(WORKING_DIR)) for file in files if not any(part for part in file.parts if part.startswith('.')) ] for user, user_permissions in rules.items(): if user.startswith('_'): continue # Not a valid Github ID, used for bilara for paths in user_permissions.values(): for path in paths: if path == '*': continue for file in files: if file.startswith(path): break else: problemsLog.add(file=publications_file_name, msg=f"No files match path: {path}")