def test_get_all_files_of_type_for_flat_folder():
    temp_folder_path = tempfile.mkdtemp()
    pdf1 = open(os.path.join(temp_folder_path, 'test1.pdf'), 'w+')
    pdf2 = open(os.path.join(temp_folder_path, 'test2.pdf'), 'w+')
    assert len(list(get_all_files_of_type(temp_folder_path, 'pdf'))) == 2
    pdf1.close()
    pdf2.close()
    shutil.rmtree(temp_folder_path)
Ejemplo n.º 2
0
def test_get_all_files_of_type_for_flat_folder():
    temp_folder_path = tempfile.mkdtemp()
    pdf1 = open(os.path.join(temp_folder_path, 'test1.pdf'), 'w+')
    pdf2 = open(os.path.join(temp_folder_path, 'test2.pdf'), 'w+')
    assert len(list(get_all_files_of_type(temp_folder_path, 'pdf'))) == 2
    pdf1.close()
    pdf2.close()
    shutil.rmtree(temp_folder_path)
"""

import sys
sys.path.insert(0, '.')

from dmscripts.env import get_api_endpoint_from_stage
from dmscripts.bulk_upload_documents import get_bucket_name, get_all_files_of_type
from dmscripts.upload_counterpart_agreements import upload_counterpart_file
from docopt import docopt
from dmapiclient import DataAPIClient

from dmutils.s3 import S3


if __name__ == '__main__':
    arguments = docopt(__doc__)

    stage = arguments['<stage>']
    client = DataAPIClient(get_api_endpoint_from_stage(stage), arguments['<api_token>'])
    framework_slug = arguments['<framework_slug>']
    document_directory = arguments['<documents_directory>']
    dry_run = arguments['--dry-run']

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, "agreements"))

    for file_path in get_all_files_of_type(document_directory, "pdf"):
        upload_counterpart_file(bucket, framework_slug, file_path, dry_run, client)
)

from docopt import docopt

from dmutils.s3 import S3


if __name__ == "__main__":
    arguments = docopt(__doc__)

    stage = arguments["<stage>"]
    framework_slug = arguments["<framework_slug>"]
    local_directory = arguments["<local_documents_directory>"]
    bucket_category = arguments["--bucket_category"]
    file_type = arguments["--file_type"]
    tsv_path = arguments["<tsv_path>"]
    dry_run = arguments["--dry-run"]

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    supplier_name_dict = get_supplier_name_dict_from_tsv(tsv_path)
    for path in get_all_files_of_type(local_directory, file_type):
        try:
            upload_file(bucket, dry_run, path, framework_slug, bucket_category, supplier_name_dict=supplier_name_dict)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue
                                    get_auth_token('api', stage))
    framework = data_api_client.get_framework(
        arguments['<framework_slug>'])["frameworks"]
    document_directory = arguments['<documents_directory>']
    dry_run = arguments['--dry-run']
    dm_notify_client = arguments.get("--notify-key") and scripts_notify_client(
        arguments["--notify-key"], logger=logger)

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, "agreements"))

    failure_count = 0

    for file_path in get_all_files_of_type(document_directory, "pdf"):
        try:
            upload_counterpart_file(
                bucket,
                framework,
                file_path,
                dry_run,
                data_api_client,
                dm_notify_client=dm_notify_client,
                notify_template_id=arguments.get("--notify-template-id"),
                notify_fail_early=False,
                logger=logger,
            )
        except (OSError, IOError, S3ResponseError, EmailError, APIError) as e:
            # upload_counterpart_file should have already logged these so no need here
            failure_count += 1
    stage = arguments['<stage>']
    framework_slug = arguments['<framework_slug>']
    local_directory = arguments['<local_documents_directory>']
    bucket_category = arguments['--bucket_category']
    document_category = arguments['<document_category>']
    document_type = arguments['--document_type']
    dry_run = arguments['--dry-run']

    document_categories = ['result-letter',
                           'framework-agreement',
                           'signed-framework-agreement',
                           'countersigned-framework-agreement']

    if document_category not in document_categories:
        print('Document needs to be one of {}'.format(document_categories))
        sys.exit(1)

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    for path in get_all_files_of_type(local_directory, document_type):
        try:
            upload_file(bucket, dry_run, path, framework_slug, bucket_category,
                        document_category, document_type)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue
Ejemplo n.º 7
0
    framework_slug = arguments['<framework_slug>']
    local_directory = arguments['<local_documents_directory>']
    bucket_category = arguments['--bucket_category']
    document_category = arguments['<document_category>']
    document_type = arguments['--document_type']
    dry_run = arguments['--dry-run']

    document_categories = ['result-letter', 'framework-agreement']

    if document_category not in document_categories:
        print('Document needs to be one of {}'.format(document_categories))
        if document_category in ('signed-framework-agreement',
                                 'countersigned-framework-agreement'):
            print(
                'Signed and countersigned agreement paths now need to be stored in database so cannot be uploaded '
                'using this script.')
        sys.exit(1)

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    for path in get_all_files_of_type(local_directory, document_type):
        try:
            upload_file(bucket, dry_run, path, framework_slug, bucket_category,
                        document_category, document_type)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue
from dmutils.s3 import S3

if __name__ == '__main__':
    arguments = docopt(__doc__)

    stage = arguments['<stage>']
    framework_slug = arguments['<framework_slug>']
    local_directory = arguments['<local_documents_directory>']
    bucket_category = arguments['--bucket_category']
    file_type = arguments['--file_type']
    tsv_path = arguments['<tsv_path>']
    dry_run = arguments['--dry-run']

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, bucket_category))

    supplier_name_dict = get_supplier_name_dict_from_tsv(tsv_path)
    for path in get_all_files_of_type(local_directory, file_type):
        try:
            upload_file(bucket,
                        dry_run,
                        path,
                        framework_slug,
                        bucket_category,
                        supplier_name_dict=supplier_name_dict)
        except ValueError as e:
            print("SKIPPING: {}".format(e))
            continue