def export_dos_opportunities( client, logger, stage: str, output_dir, dry_run: bool = False ): output_dir = Path(output_dir) if not output_dir.exists(): logger.info(f"Creating {output_dir} directory") output_dir.mkdir(parents=True) latest_framework_slug = get_latest_dos_framework(client) communications_bucket = S3(get_bucket_name(stage, "communications")) reports_bucket = S3(get_bucket_name(stage, "reports")) logger.info("Exporting DOS opportunity data to CSV") # Get the data rows = get_brief_data(client, logger, include_buyer_user_details=True) # Construct CSV for admins write_rows_to_csv(rows, output_dir / "opportunity-data-for-admins.csv", logger) # Construct public CSV (filter out buyer details) write_rows_to_csv( [ OrderedDict((k, v) for k, v in row.items() if k in DOS_OPPORTUNITY_HEADERS) for row in rows ], output_dir / DOWNLOAD_FILE_NAME, logger ) # Upload admin CSV to reports bucket upload_file_to_s3( output_dir / "opportunity-data-for-admins.csv", reports_bucket, f"{latest_framework_slug}/reports/{DOWNLOAD_FILE_NAME}", DOWNLOAD_FILE_NAME, public=False, dry_run=dry_run, logger=logger ) # Upload public CSV to S3 upload_file_to_s3( output_dir / DOWNLOAD_FILE_NAME, communications_bucket, f"{latest_framework_slug}/communications/data/{DOWNLOAD_FILE_NAME}", DOWNLOAD_FILE_NAME, public=True, dry_run=dry_run, logger=logger )
def test_get_bucket_name_returns_none_for_invalid_bucket_category(self): assert get_bucket_name('local', 'bananas') is None
def test_get_bucket_name_returns_none_for_invalid_stage(self): assert get_bucket_name('xanadu', 'agreements') is None
def test_get_bucket_name_for_agreements_documents(self, stage, expected_bucket_name): assert get_bucket_name(stage, 'agreements') == expected_bucket_name
if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] framework_slug = arguments['<framework_slug>'] local_directory = arguments['<local_documents_directory>'] bucket_category = arguments['--bucket_category'] file_type = arguments['--file_type'] tsv_path = arguments['--tsv-path'] dry_run = arguments['--dry-run'] if dry_run: bucket = None else: bucket = S3(get_bucket_name(stage, bucket_category)) supplier_name_dict = get_supplier_name_dict_from_tsv(tsv_path) if not os.path.exists(local_directory): print(f"Local directory {local_directory} not found. Aborting upload.") exit(1) for path in get_all_files_of_type(local_directory, file_type): try: upload_file(bucket, dry_run, path, framework_slug, bucket_category, supplier_name_dict=supplier_name_dict)
if report_type not in ['users', 'suppliers']: logger.error('Please specify users or suppliers to be exported.') sys.exit(1) if not os.path.exists(output_dir): logger.info("Creating {} directory".format(output_dir)) os.makedirs(output_dir) if dry_run: bucket = None else: if stage == 'local': bucket = S3('digitalmarketplace-dev-uploads') else: # e.g. preview would give 'digitalmarketplace-reports-preview-preview' bucket = S3(get_bucket_name(stage, "reports")) ok = generate_csv_and_upload_to_s3( bucket, framework_slug, report_type, output_dir, data_api_client, dry_run=dry_run, user_research_opted_in=user_research_opted_in, logger=logger, ) if not ok: sys.exit(1)
content_loader = ContentLoader(content_path) if framework['isESignatureSupported']: content_loader.load_messages(framework_slug, ['e-signature']) contract_title = content_loader.get_message( framework_slug, 'e-signature', 'framework_contract_title') else: contract_title = 'Framework Agreement' dry_run = arguments['--dry-run'] dm_notify_client = arguments.get("--notify-key") and scripts_notify_client( arguments["--notify-key"], logger=logger) if dry_run: bucket = None else: bucket = S3(get_bucket_name(stage, "agreements")) failure_count = 0 for file_path in get_all_files_of_type(document_directory, "pdf"): try: upload_counterpart_file( bucket, framework, file_path, dry_run, data_api_client, contract_title, dm_notify_client=dm_notify_client, notify_template_id=arguments.get("--notify-template-id"), notify_fail_early=False,
base_url=get_api_endpoint_from_stage(stage), auth_token=get_auth_token('api', stage)) local_directory = arguments['--folder'] file_format = arguments.get('--file-format', 'pdf') # Check framework status framework = data_api_client.get_framework(framework_slug) framework_status = framework['frameworks']['status'] if framework_status not in ['open']: print( f"Cannot update services for framework {framework_slug} in status '{framework_status}'" ) exit(1) # Check folder exists if local_directory and not os.path.exists(local_directory): print(f"Local directory {local_directory} not found. Aborting upload.") exit(1) # Setup S3 stuff bucket_category = "submissions" bucket = None if dry_run else S3(get_bucket_name(stage, bucket_category)) try: assets_path = get_assets_endpoint_from_stage(stage) except NotImplementedError: assets_path = "http://localhost" upload_draft_service_pdfs_from_folder(bucket, bucket_category, assets_path, local_directory, data_api_client, framework_slug, file_format, dry_run)