def setUp(self): config = configs.ExecutorConfig( gcp_project_id='google.com:datcom-data', scheduler_location='us-central1', github_auth_username='******', github_auth_access_token='access-token', dashboard_oauth_client_id='dashboard', importer_oauth_client_id='importer', email_account='@google', email_token='token') self.scheduler = update_scheduler.UpdateScheduler( utils.SchedulerClientMock(), None, config, None)
def main(_): """Runs the local executor.""" config = configs.ExecutorConfig( github_repo_name=FLAGS.repo_name, github_repo_owner_username=FLAGS.owner_username, github_auth_username=FLAGS.username, github_auth_access_token=FLAGS.access_token) executor = import_executor.ImportExecutor( uploader=file_uploader.LocalFileUploader(output_dir=FLAGS.output_dir), github=github_api.GitHubRepoAPI(config.github_repo_owner_username, config.github_repo_name), config=config) results = executor.execute_imports_on_update(FLAGS.import_name) print(results)
def schedule_crons(): """Endpoint for scheduling cron jobs for updating imports upon GitHub commits.""" task_info = flask.request.get_json(force=True) if 'COMMIT_SHA' not in task_info: return 'COMMIT_SHA not found' task_configs = task_info.get('configs', {}) config = configs.ExecutorConfig(**task_configs) import_scheduler = update_scheduler.UpdateScheduler( client=scheduler.CloudSchedulerClient(), github=github_api.GitHubRepoAPI( repo_owner_username=config.github_repo_owner_username, repo_name=config.github_repo_name, auth_username=config.github_auth_username, auth_access_token=config.github_auth_access_token), config=config) return dataclasses.asdict( import_scheduler.schedule_on_commit(task_info['COMMIT_SHA']))
def scheduled_updates(): """Endpoint for updating imports.""" task_info = flask.request.get_json(force=True) if 'absolute_import_name' not in task_info: return {'error': 'absolute_import_name not found'} task_configs = task_info.get('configs', {}) config = configs.ExecutorConfig(**task_configs) executor = import_executor.ImportExecutor( uploader=file_uploader.GCSFileUploader( project_id=config.gcs_project_id, bucket_name=config.storage_prod_bucket_name), github=github_api.GitHubRepoAPI( repo_owner_username=config.github_repo_owner_username, repo_name=config.github_repo_name, auth_username=config.github_auth_username, auth_access_token=config.github_auth_access_token), dashboard=dashboard_api.DashboardAPI(config.dashboard_oauth_client_id), config=config) result = executor.execute_imports_on_update( task_info['absolute_import_name']) return dataclasses.asdict(result)
def execute_imports(): """Endpoint for executing imports on GitHub commits. Logs to the import progress dashboard. """ task_info = flask.request.get_json(force=True) if 'COMMIT_SHA' not in task_info: return {'error': 'COMMIT_SHA not found'} commit_sha = task_info['COMMIT_SHA'] repo_name = task_info.get('REPO_NAME') branch_name = task_info.get('BRANCH_NAME') pr_number = task_info.get('PR_NUMBER') task_configs = task_info.get('configs', {}) config = configs.ExecutorConfig(**task_configs) executor = import_executor.ImportExecutor( uploader=file_uploader.GCSFileUploader( project_id=config.gcs_project_id, bucket_name=config.storage_dev_bucket_name, path_prefix=config.storage_executor_output_prefix), github=github_api.GitHubRepoAPI( repo_owner_username=config.github_repo_owner_username, repo_name=config.github_repo_name, auth_username=config.github_auth_username, auth_access_token=config.github_auth_access_token), config=config, dashboard=dashboard_api.DashboardAPI(config.dashboard_oauth_client_id), notifier=email_notifier.EmailNotifier(config.email_account, config.email_token), importer=import_service.ImportServiceClient( project_id=config.gcs_project_id, executor_output_prefix=config.storage_executor_output_prefix, importer_output_prefix=config.storage_importer_output_prefix, unresolved_mcf_bucket_name=config.storage_dev_bucket_name, resolved_mcf_bucket_name=config.storage_importer_bucket_name)) result = executor.execute_imports_on_commit(commit_sha=commit_sha, repo_name=repo_name, branch_name=branch_name, pr_number=pr_number) return dataclasses.asdict(result)