예제 #1
0
def test_get_bucket_name_for_agreements_documents():
    assert get_bucket_name(
        'dev', 'agreements') == 'digitalmarketplace-agreements-dev-dev'  # noqa
    assert get_bucket_name(
        'preview', 'agreements'
    ) == 'digitalmarketplace-agreements-preview-preview'  # noqa
    assert get_bucket_name(
        'staging', 'agreements'
    ) == 'digitalmarketplace-agreements-staging-staging'  # noqa
"""

import sys
sys.path.insert(0, '.')

from dmscripts.env import get_api_endpoint_from_stage
from dmscripts.bulk_upload_documents import get_bucket_name, get_all_files_of_type
from dmscripts.upload_counterpart_agreements import upload_counterpart_file
from docopt import docopt
from dmapiclient import DataAPIClient

from dmutils.s3 import S3


if __name__ == '__main__':
    arguments = docopt(__doc__)

    stage = arguments['<stage>']
    client = DataAPIClient(get_api_endpoint_from_stage(stage), arguments['<api_token>'])
    framework_slug = arguments['<framework_slug>']
    document_directory = arguments['<documents_directory>']
    dry_run = arguments['--dry-run']

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, "agreements"))

    for file_path in get_all_files_of_type(document_directory, "pdf"):
        upload_counterpart_file(bucket, framework_slug, file_path, dry_run, client)
)

from docopt import docopt

from dmutils.s3 import S3


if __name__ == "__main__":
    arguments = docopt(__doc__)

    stage = arguments["<stage>"]
    framework_slug = arguments["<framework_slug>"]
    local_directory = arguments["<local_documents_directory>"]
    bucket_category = arguments["--bucket_category"]
    file_type = arguments["--file_type"]
    tsv_path = arguments["<tsv_path>"]
    dry_run = arguments["--dry-run"]

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    supplier_name_dict = get_supplier_name_dict_from_tsv(tsv_path)
    for path in get_all_files_of_type(local_directory, file_type):
        try:
            upload_file(bucket, dry_run, path, framework_slug, bucket_category, supplier_name_dict=supplier_name_dict)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue
def test_get_bucket_name_for_agreements_documents():
    assert get_bucket_name('dev', 'agreements') == 'digitalmarketplace-agreements-dev-dev'  # noqa
    assert get_bucket_name('preview', 'agreements') == 'digitalmarketplace-agreements-preview-preview'  # noqa
    assert get_bucket_name('staging', 'agreements') == 'digitalmarketplace-agreements-staging-staging'  # noqa
        )

    stage = arguments['<stage>']
    data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage),
                                    get_auth_token('api', stage))
    framework = data_api_client.get_framework(
        arguments['<framework_slug>'])["frameworks"]
    document_directory = arguments['<documents_directory>']
    dry_run = arguments['--dry-run']
    dm_notify_client = arguments.get("--notify-key") and scripts_notify_client(
        arguments["--notify-key"], logger=logger)

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, "agreements"))

    failure_count = 0

    for file_path in get_all_files_of_type(document_directory, "pdf"):
        try:
            upload_counterpart_file(
                bucket,
                framework,
                file_path,
                dry_run,
                data_api_client,
                dm_notify_client=dm_notify_client,
                notify_template_id=arguments.get("--notify-template-id"),
                notify_fail_early=False,
                logger=logger,
    if report_type not in ['users', 'suppliers']:
        logger.error('Please specify users or suppliers to be exported.')
        sys.exit(1)

    if not os.path.exists(output_dir):
        logger.info("Creating {} directory".format(output_dir))
        os.makedirs(output_dir)

    if dry_run:
        bucket = None
    else:
        if stage == 'local':
            bucket = S3('digitalmarketplace-dev-uploads')
        else:
            # e.g. preview would give 'digitalmarketplace-reports-preview-preview'
            bucket = S3(get_bucket_name(stage, "reports"))

    ok = generate_csv_and_upload_to_s3(
        bucket,
        framework_slug,
        report_type,
        output_dir,
        data_api_client,
        dry_run=dry_run,
        user_research_opted_in=user_research_opted_in,
        logger=logger,
    )

    if not ok:
        sys.exit(1)
예제 #7
0
    framework_slug = arguments['<framework_slug>']
    local_directory = arguments['<local_documents_directory>']
    bucket_category = arguments['--bucket_category']
    document_category = arguments['<document_category>']
    document_type = arguments['--document_type']
    dry_run = arguments['--dry-run']

    document_categories = ['result-letter', 'framework-agreement']

    if document_category not in document_categories:
        print('Document needs to be one of {}'.format(document_categories))
        if document_category in ('signed-framework-agreement',
                                 'countersigned-framework-agreement'):
            print(
                'Signed and countersigned agreement paths now need to be stored in database so cannot be uploaded '
                'using this script.')
        sys.exit(1)

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    for path in get_all_files_of_type(local_directory, document_type):
        try:
            upload_file(bucket, dry_run, path, framework_slug, bucket_category,
                        document_category, document_type)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue