def import_projects(): """Imports all project and ref config sets that are stored in Gitiles. Logs errors, does not raise them. """ cfg = get_gitiles_config() for project in projects.get_projects(): loc = project.config_location if loc.storage_type != GITILES_LOCATION_TYPE: continue try: location = gitiles.Location.parse_resolve(loc.url) except ValueError: logging.exception('Invalid project location: %s', project.config_location) continue except net.AuthError as ex: logging.error('Could not resolve %s due to permissions: %s', project.config_location, ex.message) continue with _log_import_error('projects/%s' % project.id): import_project(project.id, location) # Import refs of the project for ref in projects.get_refs(project.id): assert ref.name assert ref.name.startswith('refs/'), ref.name ref_location = location._replace( treeish=ref.name, path=ref.config_path or cfg.ref_config_default_path, ) ref_cs = 'projects/%s/%s' % (project.id, ref.name) with _log_import_error(ref_cs): _import_config_set(ref_cs, ref_location)
def get_projects(): """Returns list of projects with metadata and repo info. Does not return projects that have no repo information. It might happen due to eventual consistency. Does not check access. Caches results in memcache for 1 min. """ result = [] projs = projects.get_projects() project_ids = [p.id for p in projs] repos_fut = projects.get_repos_async(project_ids) metadata_fut = projects.get_metadata_async(project_ids) ndb.Future.wait_all([repos_fut, metadata_fut]) repos, metadata = repos_fut.get_result(), metadata_fut.get_result() for p in projs: repo_type, repo_url = repos.get(p.id, (None, None)) if repo_type is None: # Not yet consistent. continue name = None if metadata.get(p.id) and metadata[p.id].name: name = metadata[p.id].name result.append( Project( id=p.id, name=name, repo_type=repo_type, repo_url=repo_url, )) return result
def _add_virtual_project_files(uid_index, muid_index, file_index, subtree, meta_definitions): for project_id, entry in get_projects().items(): root_path = entry['root_path'] translation_path = entry['translation_path'] translation_muids = entry['translation_muids'] root_parent_dir = (WORKING_DIR / root_path) files = list(root_parent_dir.glob('**/*.json')) print( f'Creating project for {project_id}/{translation_muids} with {len(files)} files' ) for file in sorted(files, key=lambda f: bilarasortkey(str(f))): parent_dir = pathlib.Path( translation_path) / file.parent.relative_to(root_parent_dir) uid, _ = file.stem.split('_') translation_stem = f'{uid}_{translation_muids}' virtual_file = parent_dir / (translation_stem + '.json') meta = { part: meta_definitions[part] for part in translation_muids.split('-') if part in meta_definitions } obj = { "uid": uid, "path": str(virtual_file), "mtime": None, "_meta": meta } if uid not in uid_index: uid_index[uid] = set() uid_index[uid].add(translation_stem) file_index[translation_stem] = obj for muid in translation_muids.split('-'): if muid not in muid_index: muid_index[muid] = set() muid_index[muid].add(translation_stem) parent_obj = subtree for part in parent_dir.parts: if part not in parent_obj: parent_obj[part] = {} parent_obj = parent_obj[part] if translation_stem not in parent_obj: # Don't clobber real entries parent_obj[translation_stem] = obj # Create virtual comment file entry _add_virtual_comment_file(uid, translation_muids, WORKING_DIR / virtual_file, uid_index, muid_index, file_index, meta_definitions)
def import_projects(): """Imports project configs that are stored in Gitiles.""" for project in projects.get_projects(): if project.config_storage_type != service_config_pb2.Project.GITILES: continue try: location = gitiles.Location.parse_resolve(project.config_location) except ValueError: logging.exception('Invalid project location: %s', project.config_location) continue try: import_project(project.id, location) except Exception as ex: logging.exception('Could not import project %s', project.id)
def _project_and_ref_config_sets(): """Returns a list of project and ref config sets stored in Gitiles.""" projs = projects.get_projects() refs = projects.get_refs([p.id for p in projs]) ret = [] for project in projs: ret.append('projects/%s' % project.id) # Import refs of the project for ref in refs[project.id] or []: assert ref.name assert ref.name.startswith('refs/'), ref.name ret.append('projects/%s/%s' % (project.id, ref.name)) return ret
def test_get_projects(self): storage.get_latest.return_value = ''' projects { id: "chromium" config_storage_type: GITILES config_location: "http://localhost" } ''' expected = service_config_pb2.ProjectsCfg( projects=[ service_config_pb2.Project( id='chromium', config_storage_type=service_config_pb2.Project.GITILES, config_location='http://localhost'), ], ) self.assertEqual(projects.get_projects(), expected.projects)
def test_get_projects(self): storage.get_latest_async.return_value.set_result(''' projects { id: "chromium" config_location { storage_type: GITILES url: "http://localhost" } } ''') expected = service_config_pb2.ProjectsCfg(projects=[ service_config_pb2.Project( id='chromium', config_location=service_config_pb2.ConfigSetLocation( storage_type=service_config_pb2.ConfigSetLocation.GITILES, url='http://localhost')), ], ) self.assertEqual(projects.get_projects(), expected.projects)
def test_get_projects(self): storage.get_latest_async.return_value.set_result(''' projects { id: "chromium" config_location { storage_type: GITILES url: "http://localhost" } } ''') expected = service_config_pb2.ProjectsCfg( projects=[ service_config_pb2.Project( id='chromium', config_location=service_config_pb2.ConfigSetLocation( storage_type=service_config_pb2.ConfigSetLocation.GITILES, url='http://localhost') ), ], ) self.assertEqual(projects.get_projects(), expected.projects)
def test_get_projects(self): self.mock_latest_config( storage.get_self_config_set(), ''' projects { id: "chromium" gitiles_location { repo: "https://localhost" ref: "refs/heads/main" } } ''') expected = service_config_pb2.ProjectsCfg(projects=[ service_config_pb2.Project( id='chromium', gitiles_location=service_config_pb2.GitilesLocation( repo='https://localhost', ref='refs/heads/main', ), ), ], ) self.assertEqual(projects.get_projects(), expected.projects)
def import_projects(): """Imports project configs that are stored in Gitiles.""" for project in projects.get_projects(): loc = project.config_location if loc.storage_type != service_config_pb2.ConfigSetLocation.GITILES: continue try: location = gitiles.Location.parse_resolve(loc.url) except ValueError: logging.exception('Invalid project location: %s', project.config_location) continue except net.AuthError as ex: logging.error( 'Could not resolve %s due to permissions: %s', project.config_location, ex.message) continue try: import_project(project.id, location) except Exception as ex: logging.exception('Could not import project %s', project.id)
def get_projects(): """Returns list of projects with metadata and repo info. Does not return projects that have no repo information. It might happen due to eventual consistency. Caches results in main memory for 10 min. """ result = [] for p in projects.get_projects(): repo_type, repo_url = projects.get_repo(p.id) if repo_type is None: # Not yet consistent. continue metadata = projects.get_metadata(p.id) result.append(Project( id=p.id, name=metadata.name or None, repo_type=repo_type, repo_url=repo_url, )) return result
def generate_translation_mapping(root_files): langs_seen = set() db = get_db() translation_mapping = {} for project_id, entry in get_projects().items(): root_path = entry['root_path'] if root_path != 'root/en/site': continue translation_path = entry['translation_path'] translation_muids = entry['translation_muids'] lang = translation_muids.split('-')[1] langs_seen.add(lang) parent_dir = pathlib.Path(translation_path) for file in sorted(root_files.keys(), key=lambda f: bilarasortkey(str(f))): uid, _ = file.stem.split('_') translation_stem = f'{uid}_{translation_muids}' translation_file = parent_dir / (translation_stem + '.json') if file not in translation_mapping: translation_mapping[file] = {} translation_mapping[file][lang] = translation_file
def import_projects(): """Imports all project and ref config sets that are stored in Gitiles. Logs errors, does not raise them. """ cfg = get_gitiles_config() for project in projects.get_projects(): loc = project.config_location if loc.storage_type != GITILES_LOCATION_TYPE: continue try: location = gitiles.Location.parse_resolve(loc.url) except ValueError: logging.exception('Invalid project location: %s', project.config_location) continue except net.AuthError as ex: logging.error( 'Could not resolve %s due to permissions: %s', project.config_location, ex.message) continue with _log_import_error('projects/%s' % project.id): import_project(project.id, location) # Import refs of the project for ref in projects.get_refs(project.id): assert ref.name assert ref.name.startswith('refs/'), ref.name ref_location = location._replace( treeish=ref.name, path=ref.config_path or cfg.ref_config_default_path, ) ref_cs = 'projects/%s/%s' % (project.id, ref.name) with _log_import_error(ref_cs): _import_config_set(ref_cs, ref_location)
def setup_model(self): self.root_item.loaded = True project_list = projects.get_projects() for p in project_list: self.root_item.addChild(p)
def projects(): _projects = get_projects() return render_template('projects.html', _projects=_projects)