def main(stage, framework_slug, api_token, user, supplier_ids=None):
    agreements_bucket_name = 'digitalmarketplace-agreements-{0}-{0}'.format(stage)
    agreements_bucket = S3(agreements_bucket_name)

    api_client = DataAPIClient(
        get_api_endpoint_from_stage(stage, 'api'),
        api_token
    )

    if supplier_ids is not None:
        supplier_ids = [int(supplier_id.strip()) for supplier_id in supplier_ids.split(',')]

    suppliers = api_client.find_framework_suppliers(framework_slug, agreement_returned=True)['supplierFrameworks']

    if supplier_ids is not None:
        missing_supplier_ids = set(supplier_ids) - set(supplier['supplierId'] for supplier in suppliers)
        if missing_supplier_ids:
            raise Exception("Invalid supplier IDs: {}".format(', '.join(str(x) for x in missing_supplier_ids)))
    else:
        supplier_ids = set(supplier['supplierId'] for supplier in suppliers)

    for supplier_id in supplier_ids:
        logger.info("Resetting agreement returned flag for supplier {supplier_id}",
                    extra={'supplier_id': supplier_id})
        api_client.unset_framework_agreement_returned(supplier_id, framework_slug, user)

    signed_agreements = filter(
        lambda x: match_signed_agreements(supplier_ids, x['path']),
        agreements_bucket.list('{}/agreements/'.format(framework_slug))
    )

    for document in signed_agreements:
        logger.info("Deleting {path}", extra={'path': document['path']})
        agreements_bucket.delete_key(document['path'])
sys.path.insert(0, '.')

import getpass

from docopt import docopt

from dmapiclient import DataAPIClient

from dmscripts.env import get_api_endpoint_from_stage
from dmscripts.logging import configure_logger
from dmscripts.framework_utils import set_framework_result


logger = configure_logger()

if __name__ == '__main__':
    arguments = docopt(__doc__)

    framework_slug = arguments['<framework>']
    data_api_url = get_api_endpoint_from_stage(arguments['<stage>'], 'api')
    client = DataAPIClient(data_api_url, arguments['--api-token'])
    user = getpass.getuser()
    result = True if arguments['--pass'] else False

    with open(arguments['<ids_file>'], 'r') as f:
        supplier_ids = filter(None, [l.strip() for l in f.readlines()])

    for supplier_id in supplier_ids:
        logger.info(set_framework_result(client, framework_slug, supplier_id, result, user))
    logger.info("DONE")
"""
import sys
sys.path.insert(0, '.')

from docopt import docopt
from dmscripts.env import get_api_endpoint_from_stage
from dmscripts.export_dos_suppliers import export_suppliers
from dmapiclient import DataAPIClient
from dmutils.content_loader import ContentLoader


if __name__ == '__main__':
    arguments = docopt(__doc__)

    STAGE = arguments['<stage>']
    API_TOKEN = arguments['<api_token>']
    CONTENT_PATH = arguments['<content_path>']
    OUTPUT_DIR = arguments['<output_dir>']

    client = DataAPIClient(get_api_endpoint_from_stage(STAGE), API_TOKEN)
    content_loader = ContentLoader(CONTENT_PATH)

    supplier_id_file = arguments['<supplier_id_file>']
    if supplier_id_file:
        with open(arguments['<supplier_id_file>'], 'r') as f:
            supplier_ids = map(int, filter(None, [l.strip() for l in f.readlines()]))
    else:
        supplier_ids = None

    export_suppliers(client, content_loader, OUTPUT_DIR, supplier_ids)
            copy_document(draft_document_path, live_document_path)
            document_updates[document_key] = get_live_asset_url(live_document_path)

    if dry_run:
        print("    > not updating document URLs {}".format(document_updates))
    else:
        client.update_service(service_id, document_updates, 'Moving documents to live bucket')
        print("    > document URLs updated")


if __name__ == '__main__':
    arguments = docopt(__doc__)

    STAGE = arguments['<stage>']
    api_url = get_api_endpoint_from_stage(STAGE)

    client = DataAPIClient(api_url, arguments['<api_token>'])
    DRAFT_BUCKET = S3(arguments['<draft_bucket>'])
    DOCUMENTS_BUCKET = S3(arguments['<documents_bucket>'])
    DRY_RUN = arguments['--dry-run']
    copy_document = document_copier(DRAFT_BUCKET, DOCUMENTS_BUCKET, DRY_RUN)

    suppliers = find_suppliers_on_framework(client, FRAMEWORK_SLUG)

    for supplier in suppliers:
        print("Migrating drafts for supplier {} - {}".format(supplier['supplierId'], supplier['supplierName']))
        draft_services = find_submitted_draft_services(client, supplier, FRAMEWORK_SLUG)

        for draft_service in draft_services:
            make_draft_service_live(client, copy_document, draft_service, FRAMEWORK_SLUG, DRY_RUN)
"""

import sys
sys.path.insert(0, '.')

from dmscripts.env import get_api_endpoint_from_stage
from dmscripts.bulk_upload_documents import get_bucket_name, get_all_files_of_type
from dmscripts.upload_counterpart_agreements import upload_counterpart_file
from docopt import docopt
from dmapiclient import DataAPIClient

from dmutils.s3 import S3


if __name__ == '__main__':
    arguments = docopt(__doc__)

    stage = arguments['<stage>']
    client = DataAPIClient(get_api_endpoint_from_stage(stage), arguments['<api_token>'])
    framework_slug = arguments['<framework_slug>']
    document_directory = arguments['<documents_directory>']
    dry_run = arguments['--dry-run']

    if dry_run:
        bucket = None
    else:
        bucket = S3(get_bucket_name(stage, "agreements"))

    for file_path in get_all_files_of_type(document_directory, "pdf"):
        upload_counterpart_file(bucket, framework_slug, file_path, dry_run, client)
import getpass
import json


from dmscripts.env import get_api_endpoint_from_stage
from dmapiclient import DataAPIClient
from dmscripts.mark_definite_framework_results import mark_definite_framework_results
from dmscripts.logging import configure_logger, INFO as loglevel_INFO, DEBUG as loglevel_DEBUG


if __name__ == "__main__":
    from docopt import docopt
    args = docopt(__doc__)

    client = DataAPIClient(get_api_endpoint_from_stage(args["<stage>"], "api"), args["<api_token>"])
    updated_by = args["--updated-by"] or getpass.getuser()

    declaration_definite_pass_schema = json.load(open(args["<declaration_definite_pass_schema_path>"], "r"))

    declaration_baseline_schema = \
        (declaration_definite_pass_schema.get("definitions") or {}).get("baseline")

    service_schema = json.load(
        open(args["<draft_service_schema_path>"], "r")
    ) if args["<draft_service_schema_path>"] else None

    configure_logger({"script": loglevel_DEBUG if args["--verbose"] else loglevel_INFO})

    mark_definite_framework_results(
        client,
Usage:
    scripts/oneoff/generate-g-cloud-8-master-csv.py <stage> <auth_token>

Example:
    scripts/oneoff/generate-g-cloud-8-master-csv.py preview myToken
"""
from docopt import docopt

from dmapiclient import DataAPIClient

# add cwd to pythonpath
import sys
sys.path.insert(0, '.')

from dmscripts.generate_framework_master_csv import GenerateMasterCSV
from dmscripts.env import get_api_endpoint_from_stage

if __name__ == "__main__":
    arguments = docopt(__doc__)

    client = DataAPIClient(
        base_url=get_api_endpoint_from_stage(arguments['<stage>']),
        auth_token=arguments['<auth_token>'],
    )
    csv_builder = GenerateMasterCSV(
        client=client,
        target_framework_slug='g-cloud-8'
    )
    csv_builder.populate_output()
    csv_builder.write_csv()
        )
        client.register_framework_agreement_returned(
            supplier_id,
            'g-cloud-7',
            'script: fix incorrect extension'
        )


def get_bucket_name(stage):
    return 'digitalmarketplace-agreements-{0}-{0}'.format(stage)

if __name__ == '__main__':
    arguments = docopt(__doc__)

    stage = arguments['<stage>']
    api_token = arguments['<api_token>']
    download_directory = arguments['<download_directory>']
    dry_run = arguments['--dry-run']

    api_url = get_api_endpoint_from_stage(stage)

    if dry_run:
        client = None
        bucket = None
    else:
        client = DataAPIClient(api_url, api_token)
        bucket = S3(get_bucket_name(stage))

    for path in get_all_pdfs(download_directory):
        handle_path(client, bucket, dry_run, path)
#!/usr/bin/env python
"""

Usage:
    set-search-alias.py <stage> <search_api_token> <alias> <index>
"""

import sys
from docopt import docopt

sys.path.insert(0, '.')
from dmscripts.env import get_api_endpoint_from_stage
from dmapiclient import SearchAPIClient


if __name__ == '__main__':
    arguments = docopt(__doc__)

    search_api_url = get_api_endpoint_from_stage(arguments['<stage>'], 'search-api')
    search_api_token = arguments['<search_api_token>']

    client = SearchAPIClient(search_api_url, arguments['<search_api_url>'])
    client.set_alias(arguments['<alias>'], arguments['<index>'])
def main(stage, data_api_token, bad_words_path, framework_slug, output_dir):
    api_url = get_api_endpoint_from_stage(stage)
    client = DataAPIClient(api_url, data_api_token)
    bad_words = get_bad_words(bad_words_path)
    suppliers = get_suppliers(client, framework_slug)
    check_services_with_bad_words(output_dir, framework_slug, client, suppliers, bad_words)