def cleanup_pubsub_topics(project_names): """Delete old pubsub topics and subscriptions.""" client = pubsub.PubSubClient() application_id = utils.get_application_id() expected_topics = set() for platform in PUBSUB_PLATFORMS: expected_topics.update( [untrusted.queue_name(project, platform) for project in project_names]) pubsub_config = local_config.Config('pubsub.queues') unmanaged_queues = [queue['name'] for queue in pubsub_config.get('resources')] for topic in client.list_topics(pubsub.project_name(application_id)): _, name = pubsub.parse_name(topic) if (not name.startswith(tasks.JOBS_PREFIX) and not name.startswith(tasks.HIGH_END_JOBS_PREFIX)): # Some topic created by another service, ignore. continue if name in unmanaged_queues: continue if name in expected_topics: continue for subscription in client.list_topic_subscriptions(topic): client.delete_subscription(subscription) client.delete_topic(topic)
def setUp(self): test_helpers.patch_environ(self) self.configs_directory = os.path.join(os.path.dirname(__file__), 'local_config_data') environment.set_value('CONFIG_DIR_OVERRIDE', self.configs_directory) self.config = local_config.Config()
def setup_pubsub(project): """Set up pubsub topics and subscriptions.""" config = local_config.Config('pubsub.queues') client = pubsub.PubSubClient() queues = config.get('resources') for queue in queues: create_pubsub_topic(client, project, queue['name']) create_pubsub_subscription(client, project, queue['name'], queue['name'])
def test_root_validation(self): """Test root validation.""" _ = local_config.Config() _ = local_config.Config('a') _ = local_config.Config('aa') _ = local_config.Config('aa.bb') _ = local_config.Config('aa.bb.cc') _ = local_config.Config('aa.bb.cc.dd') with self.assertRaises(errors.BadConfigError): _ = local_config.Config('aa.b') with self.assertRaises(errors.BadConfigError): _ = local_config.Config('aa.bb.c') with self.assertRaises(errors.BadConfigError): _ = local_config.Config('aa.bb.cc.d') with self.assertRaises(errors.InvalidConfigKey): _ = local_config.Config('aa.bb.cc.dd.ee')
def get(self): """Handle a cron job.""" backup_bucket = local_config.Config( local_config.PROJECT_PATH).get('backup.bucket') if not backup_bucket: logs.log('No backup bucket is set, skipping.') return kinds = [ kind for kind in ndb.Model._kind_map # pylint: disable=protected-access if (not kind.startswith('_') and kind not in EXCLUDED_MODELS) ] app_id = utils.get_application_id() timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S') output_url_prefix = ( 'gs://{backup_bucket}/datastore-backups/{timestamp}'.format( backup_bucket=backup_bucket, timestamp=timestamp)) body = { 'output_url_prefix': output_url_prefix, 'entity_filter': { 'kinds': kinds } } try: request = _datastore_client().projects().export( projectId=app_id, body=body) response = request.execute() message = 'Datastore export succeeded.' status_code = 200 logs.log(message, response=response) except googleapiclient.errors.HttpError as e: message = 'Datastore export failed.' status_code = e.resp.status logs.log_error(message, error=str(e)) self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(message) self.response.set_status(status_code)
def test_get_with_non_existent_configs_directory(self): """Test with non-existent configs directory.""" environment.set_value('CONFIG_DIR_OVERRIDE', 'non-existent') with self.assertRaises(errors.BadConfigError): local_config.Config().get('foo')
def test_with_cache(self): """Test that we invoke _search_key once with caching enabled.""" config = local_config.Config() for _ in range(10): self.assertEqual('value', config.get('a.b.c')) self.assertEqual(1, self.mock._search_key.call_count) # pylint: disable=protected-access
def _project_configs(): return local_config.Config(local_config.GCE_CLUSTERS_PATH).get()
def _get_project_ids(): """Return the GCE project IDs.""" return list( local_config.Config(local_config.GCE_CLUSTERS_PATH).get().keys())