def _create_db(self) -> Tuple[JSONs, str]: """ Write hardcoded portal integrations DB to S3. :return: Newly created DB and accompanying version. """ catalog = config.default_catalog plugin = RepositoryPlugin.load(catalog).create(catalog) db = self.demultiplex(plugin.portal_db()) version = self._write_db(db, None) return db, version
def main(argv): configure_script_logging(logger) import argparse parser = argparse.ArgumentParser( description='Subscribe indexer lambda to bundle events from DSS') parser.add_argument('--unsubscribe', '-U', dest='subscribe', action='store_false', default=True) parser.add_argument( '--personal', '-p', dest='shared', action='store_false', default=True, help= "Do not use the shared credentials of the Google service account that represents the " "current deployment, but instead use personal credentials for authenticating to the DSS. " "When specifying this option you will need to a) run `hca dss login` prior to running " "this script or b) set GOOGLE_APPLICATION_CREDENTIALS to point to another service " "account's credentials. Note that this implies that the resulting DSS subscription will " "be owned by a) you or b) the other service account and that only a) you or b) someone " "in possession of those credentials can modify the subscription in the future. This is " "typically not what you'd want.") options = parser.parse_args(argv) dss_client = azul.dss.client() for catalog in config.catalogs: plugin = RepositoryPlugin.load(catalog) if isinstance(plugin, dss.Plugin): if options.shared: with aws.service_account_credentials( config.ServiceAccount.indexer): subscription.manage_subscriptions( plugin, dss_client, subscribe=options.subscribe) else: subscription.manage_subscriptions(plugin, dss_client, subscribe=options.subscribe)
def main(argv): parser = argparse.ArgumentParser(description=__doc__, formatter_class=AzulArgumentHelpFormatter) default_catalog = config.default_catalog plugin_cls = RepositoryPlugin.load(default_catalog) plugin = plugin_cls.create(default_catalog) if len(plugin.sources) == 1: source_arg = {'default': str(one(plugin.sources))} else: source_arg = {'required': True} parser.add_argument('--source', '-s', **source_arg, help='The repository source containing the bundle') parser.add_argument('--uuid', '-b', required=True, help='The UUID of the bundle to can.') parser.add_argument('--version', '-v', help='The version of the bundle to can (default: the latest version).') parser.add_argument('--output-dir', '-O', default=os.path.join(config.project_root, 'test', 'indexer', 'data'), help='The path to the output directory (default: %(default)s).') args = parser.parse_args(argv) bundle = fetch_bundle(args.source, args.uuid, args.version) save_bundle(bundle, args.output_dir)
def repository_plugin(self, catalog: CatalogName) -> RepositoryPlugin: return RepositoryPlugin.load(catalog).create(catalog)
def plugin_for(catalog): return RepositoryPlugin.load(catalog).create(catalog)
def plugin_db(self) -> JSONs: # Must be lazy so the mock catalog's repository plugin is used catalog = config.default_catalog plugin = RepositoryPlugin.load(catalog).create(catalog) return plugin.portal_db()
def default_db(self) -> JSONs: # FIXME: Parameterize PortalService instances with current catalog # https://github.com/DataBiosphere/azul/issues/2716 catalog = config.default_catalog plugin = RepositoryPlugin.load(catalog).create(catalog) return self.demultiplex(plugin.portal_db())
def repository_plugin(self) -> RepositoryPlugin: catalog = self.catalog return RepositoryPlugin.load(catalog).create(catalog)