def update_suppliers(data_api_endpoint, data_api_token, updates_path, updated_by): client = DataAPIClient(data_api_endpoint, data_api_token) for update in load_updates(updates_path): print("Updating {}".format(update)) client.update_supplier(update.pop('id'), update, updated_by)
def suppliers_on_framework(data_api_url, data_api_token, questions): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the declaration API to recover the answers - builds CSV row for each supplier :param data_api_url: :param data_api_token: :param questions: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers(questions)) for supplier in client.find_suppliers_iter(): try: declaration = client.get_supplier_declaration( supplier['id'], 'g-cloud-7')['declaration'] if not declaration: continue status = declaration['status'] processed_supplier_declaration = \ process_supplier_declaration( declaration, questions ) supplier_declaration = list() supplier_declaration.append(supplier['id']) supplier_declaration.append(supplier['name']) supplier_declaration.append(supplier.get('dunsNumber', "")) supplier_declaration.append(status) for declaration in processed_supplier_declaration: supplier_declaration.append(declaration) try: writer.writerow(supplier_declaration) except UnicodeEncodeError: writer.writerow([ field.encode('utf-8') if hasattr(field, 'encode') else field for field in supplier_declaration ]) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine # status = 'unstarted' pass else: # status = 'error-{}'.format(e.status_code) raise e except KeyError: # status = 'error-key-error' pass
def suppliers_lot_count(data_api_url, data_api_token): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the draft API to recover the services - builds CSV row for each supplier :param data_api_url: :param data_api_token: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers()) for supplier in client.find_suppliers_iter(): try: drafts = list() for draft_service in client.find_draft_services_iter( supplier['id']): drafts.append(draft_service) if drafts: aggregations = aggregate(drafts) supplier_row = list() supplier_row.append(supplier['id']) supplier_row.append(supplier['name']) supplier_row.append(supplier.get('dunsNumber', "")) supplier_row.append( submitted_count(aggregations.get('iaas', {}))) supplier_row.append( not_submitted_count(aggregations.get('iaas', {}))) supplier_row.append( submitted_count(aggregations.get('paas', {}))) supplier_row.append( not_submitted_count(aggregations.get('paas', {}))) supplier_row.append( submitted_count(aggregations.get('saas', {}))) supplier_row.append( not_submitted_count(aggregations.get('saas', {}))) supplier_row.append( submitted_count(aggregations.get('scs', {}))) supplier_row.append( not_submitted_count(aggregations.get('scs', {}))) writer.writerow(supplier_row) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine pass else: raise e
def clients_in_shell(stage): print('Retrieving credentials...') api_token = 'myToken' search_api_token = 'myToken' if stage != 'development': api_token = get_auth_token('api', stage), search_api_token = get_auth_token('search_api', stage) print('Creating clients...') data = DataAPIClient(get_api_endpoint_from_stage(stage), api_token) # noqa search = SearchAPIClient(get_api_endpoint_from_stage(stage, app='search-api'), search_api_token) # noqa print('Dropping into shell...') IPython.embed()
def clients_in_shell(stage, api_url, api_token, search_api_url, search_api_token): print('Retrieving credentials...') api_token = api_token or get_auth_token('api', stage) search_api_token = search_api_token or get_auth_token('search_api', stage) print('Creating clients...') data = DataAPIClient(api_url or get_api_endpoint_from_stage(stage), api_token) # noqa search = SearchAPIClient( search_api_url or get_api_endpoint_from_stage(stage, app='search-api'), search_api_token) # noqa print('Dropping into shell...') IPython.embed()
def get_dmp_supplier_data(self, framework=None, duns_number=None, from_declaration=False): "return the DMp data for a given DUNS number and initialises the DMp client if None" # TODO: error handling if self.data_api_client is None: self.data_api_client = DataAPIClient( base_url=get_api_endpoint_from_stage(self.stage), auth_token=get_auth_token('api', self.stage)) if duns_number is not None: return self.data_api_client.find_suppliers(duns_number=duns_number) elif framework is not None: # TODO: use iter instead -> digitalmarketplace-apiclient/blob/master/dmapiclient/data.py#L119 # TODO: check pagination if from_declaration: return self.data_api_client.find_framework_suppliers(framework) return self.data_api_client.find_suppliers(framework=framework)
def inject_framework_dates(stage): data_api_token = get_auth_token( 'api', stage) if stage != 'development' else 'myToken' data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), data_api_token) for framework_slug, framework_data in FRAMEWORKS_AND_DATES.items(): print(f'Injecting dates for {framework_slug}: {framework_data}') try: data_api_client.update_framework( framework_slug=framework_slug, data=framework_data, user=f'{getpass.getuser()} - ' f'digitalmarketplace-scripts/scripts/' f'oneoff/inject-framework-dates.py') except Exception as e: print( f'Failed with {e} on {framework_slug}. Data: {framework_data}')
def reset_supplier_declaration(stage, framework_slug, reason, email, supplier_id): data_api_token = get_auth_token('api', stage) if stage != 'development' else 'myToken' data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), data_api_token) if email: user = data_api_client.get_user(email_address=email) if not user: print(f'No user found for email address `{email}`') exit(1) user_supplier_id = user['users']['supplier']['supplierId'] if user_supplier_id and supplier_id and user_supplier_id != supplier_id: print(f'Email address provided does not match with supplier provided. Email address `{email}` is ' f'associated with supplierId `{supplier_id}`. Script was called with supplierId `{supplier_id}`.') exit(2) supplier_id = user_supplier_id try: supplier_framework = data_api_client.get_supplier_framework_info(supplier_id=supplier_id, framework_slug=framework_slug) print(f"Current supplier declaration: {supplier_framework['frameworkInterest']['declaration']}") except HTTPError: print(f'No supplier framework found for supplierId `{supplier_id}` on framework `{framework_slug}`.') exit(3) if not supplier_framework: print(f'No supplier framework/interest record found for supplierId `{supplier_id}` on framework ' f'`{framework_slug}`.') exit(4) data_api_client.set_supplier_declaration(supplier_id=supplier_id, framework_slug=framework_slug, declaration={}, user=f'{getpass.getuser()} - {reason}') data_api_client.set_supplier_framework_prefill_declaration(supplier_id=supplier_id, framework_slug=framework_slug, prefill_declaration_from_framework_slug=None, user=f'{getpass.getuser()} - {reason}') print(f'Supplier declaration for supplierId `{supplier_id}` on framework `{framework_slug}` has been reset.')
def main(): args = docopt(__doc__) frameworks_repo = Path(args["--frameworks-repo"]).resolve() framework_slug = args["<framework_slug>"] stage = args["<stage>"] lot = args["<lot>"] data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), get_auth_token('api', stage)) content_loader = ContentLoader(frameworks_repo) content_loader.load_manifest(framework_slug, "services", "services_search_filters") manifest = content_loader.get_manifest(framework_slug, "services_search_filters") # FIXME there isn't a uniform way to get the lots from the framework # content repo, hard code for G-Cloud for now framework_lots = [ {"name": "Cloud hosting", "slug": "cloud-hosting"}, {"name": "Cloud software", "slug": "cloud-software"}, {"name": "Cloud support", "slug": "cloud-support"}, ] writer = csv.writer(sys.stdout) # do the thing writer.writerow(['serviceId', 'topLevelCategories']) for service in data_api_client.find_services_iter(framework=framework_slug, status='published', lot=lot): service_categories = service['serviceCategories'] if service.get('serviceCategories') else [] top_level_categories = [] for f in filters_for_lot(service['lot'], manifest, framework_lots)['categories']['filters']: children = [f['label'] for f in f['children']] if f.get('children') else [] if any(item in service_categories for item in children): top_level_categories.append(f['label']) writer.writerow([service['id'], '; '.join(top_level_categories)])
supplier_id=supplier['id'], supplier={'tradingStatus': mapped_trading_status}, user=f'{getpass.getuser()} (migrate trading status script)', ) success_counter += 1 except HTTPError as e: print( f"{prefix}Error updating supplier {supplier['id']}: {e.message}" ) failure_counter += 1 if i % 100 == 0: print(f'{prefix}{i} suppliers processed ...') print(f'{prefix}Finished processing {i} suppliers.') print(f"{prefix}Succssfully updated: {success_counter}") print(f"{prefix}Failed to update: {failure_counter}") if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) migrate_trading_statuses( DataAPIClient(api_url, get_auth_token('api', stage)), dry_run)
if __name__ == '__main__': arguments = docopt(__doc__) supplier_ids = get_supplier_ids_from_args(arguments) STAGE = arguments['<stage>'] FRAMEWORK_SLUG = arguments['<framework>'] GOVUK_NOTIFY_API_KEY = arguments['<notify_api_key>'] GOVUK_NOTIFY_TEMPLATE_ID = arguments['<notify_template_id>'] CONTENT_PATH = arguments['<content_path>'] DRY_RUN = arguments['--dry-run'] content_loader = ContentLoader(CONTENT_PATH) content_loader.load_messages(FRAMEWORK_SLUG, ['e-signature']) mail_client = scripts_notify_client(GOVUK_NOTIFY_API_KEY, logger=logger) api_client = DataAPIClient(base_url=get_api_endpoint_from_stage(STAGE), auth_token=get_auth_token('api', STAGE)) context_helper = SuccessfulSupplierContextForNotify( api_client, FRAMEWORK_SLUG, supplier_ids=supplier_ids, logger=logger) context_helper.populate_data() context_data = context_helper.get_users_personalisations() framework = api_client.get_framework(FRAMEWORK_SLUG).get('frameworks') prefix = "[Dry Run] " if DRY_RUN else "" # Add in any framework-specific dates etc here extra_template_context = { "contract_title": content_loader.get_message(FRAMEWORK_SLUG, 'e-signature', 'framework_contract_title'), "intentionToAwardAt_dateformat":
service_id)) except Exception as e: if e.message == "Cannot re-publish a submitted service": print(u" > Draft {} already published".format(draft['id'])) else: print(u" > ERROR MIGRATING DRAFT {} - {}".format( draft['id'], e.message)) if __name__ == "__main__": arguments = docopt(__doc__) STAGE = arguments['<stage>'] DRY_RUN = arguments['--dry-run'] FRAMEWORK_SLUG = arguments['<framework_slug>'] api_url = get_api_endpoint_from_stage(STAGE) client = DataAPIClient(api_url, get_auth_token('api', STAGE)) print("Finding suppliers...") suppliers = find_suppliers_on_framework(client, FRAMEWORK_SLUG) print("Migrating drafts...") for supplier in suppliers: print(u"Migrating drafts for supplier {} - {}".format( supplier['supplierId'], supplier['supplierName'])) draft_services = get_submitted_drafts(client, FRAMEWORK_SLUG, supplier['supplierId']) for draft_service in draft_services: make_draft_service_live(client, draft_service, DRY_RUN)
("supplier_name", record["supplier"]["name"]), ("supplier_declaration_name", record["declaration"].get("supplierRegisteredName", "")), ("status", "PASSED" if record["onFramework"] else "FAILED"), ] return row + make_fields_from_content_questions(questions, record) return inner if __name__ == '__main__': arguments = docopt(__doc__) STAGE = arguments['<stage>'] CONTENT_PATH = arguments['<content_path>'] FRAMEWORK_SLUG = arguments['<framework_slug>'] client = DataAPIClient(get_api_endpoint_from_stage(STAGE), get_auth_token('api', STAGE)) content_loader = ContentLoader(CONTENT_PATH) content_loader.load_manifest(FRAMEWORK_SLUG, "services", "edit_submission") content_manifest = content_loader.get_manifest(FRAMEWORK_SLUG, "edit_submission") records = find_all_participants(client) write_csv_with_make_row( records, make_row(content_manifest), "output/{}-user-research-participants.csv".format(FRAMEWORK_SLUG))
output_dir = arguments['<output-dir>'] stage = arguments['<stage>'] framework_slug = arguments['<framework_slug>'] filename = "{}-how-application-looked-at-close-{}-{}.csv".format( framework_slug, stage, datetime.utcnow().strftime("%Y-%m-%d_%H.%M-") ) # Create output directory if it doesn't already exist if not os.path.exists(output_dir): os.makedirs(output_dir) client = DataAPIClient( base_url=get_api_endpoint_from_stage(stage), auth_token=arguments['<auth_token>'], ) csv_builder = GenerateFrameworkApplicationsCSV( client=client, target_framework_slug=framework_slug ) if arguments.get('<exclude_suppliers>') is not None: # updates the generator with any IDs the user wants excluded csv_builder.excluded_supplier_ids = [int(n) for n in arguments['<exclude_suppliers>'].split(',')] csv_builder.populate_output() with open(os.path.join(output_dir, filename), 'w') as csvfile: csv_builder.write_csv(outfile=csvfile)
from dmapiclient import DataAPIClient from dmutils.env_helpers import get_api_endpoint_from_stage from docopt import docopt sys.path.insert(0, ".") from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.updated_by_helpers import get_user if __name__ == "__main__": args = docopt(__doc__) api_client = DataAPIClient( get_api_endpoint_from_stage(args["<stage>"]), get_auth_token("api", args["<stage>"]), user=get_user(), ) with open(args["<input_file>"]) as input_file: services = list(csv.DictReader(input_file)) missing_services = [s for s in services if not s["serviceId"]] for service in missing_services: name = service["Service Name"].replace(".csv", "")[:100] supplier_id = service["Supplier ID"] lot = { "Software": "cloud-software", "Support": "cloud-support", }.get(service["Lot"])
def update_suppliers(data_api_endpoint, data_api_token, users_path): client = DataAPIClient(data_api_endpoint, data_api_token) for user in load_users(users_path): print("Adding {}".format(user)) client.create_user(user)
for supplier in client.find_suppliers_iter(): if supplier.get('registrationCountry') == OLD_COUNTRY: if not dry_run: try: client.update_supplier( supplier['id'], {'registrationCountry': NEW_COUNTRY}, 'rename supplier registered country script', ) success_counter += 1 except HTTPError as e: print("Error updating supplier {}: {}".format( supplier['id'], e.message)) failure_counter += 0 print("{}Succssfully updated {}".format('Dry run - ' if dry_run else '', success_counter)) print("{}Failed to update {}".format('Dry run - ' if dry_run else '', failure_counter)) if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) rename_country(DataAPIClient(api_url, get_auth_token('api', stage)), dry_run)
'digital-outcomes': "5c92c78a78" if stage == "production" else "f0077c516d", 'user-research-participants': "34ebe0bffa" if stage == "production" else "d35601203b", }, 'digital-outcomes-and-specialists-4': { 'digital-specialists': "29d06d5201" if stage == "production" else "07c21f0451", 'digital-outcomes': "4360debc5a" if stage == "production" else "f0077c516d", 'user-research-participants': "2538f8a0f1" if stage == "production" else "d35601203b", }, } lots = [{ 'lot_slug': lot_slug, 'list_id': list_ids[framework_slug][lot_slug], 'framework_slug': framework_slug } for lot_slug in LOT_SLUGS] api_url = get_api_endpoint_from_stage(stage) data_api_client = DataAPIClient(api_url, get_auth_token('api', stage)) dm_mailchimp_client = DMMailChimpClient(arguments['<mailchimp_username>'], arguments['<mailchimp_api_key>'], logger, retries=3) for lot_data in lots: main(data_api_client, dm_mailchimp_client, lot_data, logger)
def get_all_drafts(client): pool = ThreadPool(25) return itertools.chain.from_iterable( pool.imap_unordered(create_draft_getter(client), client.find_suppliers_iter())) if __name__ == "__main__": arguments = docopt(__doc__) stage = arguments['<stage>'] api_token = arguments['<token>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) client = DataAPIClient(api_url, api_token) counter = 0 for draft in get_all_drafts(client): update = {} if "outcomesLocations" in draft: update["locations"] = draft["outcomesLocations"] update["outcomesLocations"] = None if "recruitLocations" in draft: if update: raise ValueError( "draft {} has both outcomesLocations and recruitLocations". format(draft["id"])) update["locations"] = draft["recruitLocations"] update["recruitLocations"] = None
#!/usr/bin/env python """ Our test supplier in production is on the DOS 4 framework. However, its service has no declaration, which breaks a few small things. Remove the supplier from DOS 4. """ import sys sys.path.insert(0, ".") from dmapiclient import DataAPIClient from dmscripts.helpers.auth_helpers import get_auth_token from dmutils.env_helpers import get_api_endpoint_from_stage data = DataAPIClient(get_api_endpoint_from_stage("production"), get_auth_token("api", "production")) DMP_TEST_SUPPLIER = 577184 FRAMEWORK = "digital-outcomes-and-specialists-4" SHOULD_BE_ON_FRAMEWORK = False data.set_framework_result( DMP_TEST_SUPPLIER, FRAMEWORK, SHOULD_BE_ON_FRAMEWORK, user="******", )
from docopt import docopt from dmscripts.export_framework_applicant_details import get_csv_rows from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.framework_helpers import find_suppliers_with_details_and_draft_service_counts from dmscripts.helpers.supplier_data_helpers import get_supplier_ids_from_file from dmscripts.generate_framework_agreement_signature_pages import ( render_html_for_suppliers_awaiting_countersignature, render_pdf_for_each_html_page) from dmapiclient import DataAPIClient from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == '__main__': args = docopt(__doc__) framework_slug = args['<framework_slug>'] client = DataAPIClient(get_api_endpoint_from_stage(args['<stage>']), get_auth_token('api', args['<stage>'])) framework = client.get_framework(framework_slug)['frameworks'] framework_lot_slugs = tuple([ lot['slug'] for lot in client.get_framework(framework_slug)['frameworks']['lots'] ]) supplier_id_file = args['<supplier_id_file>'] supplier_ids = get_supplier_ids_from_file(supplier_id_file) html_dir = tempfile.mkdtemp() records = find_suppliers_with_details_and_draft_service_counts( client, framework_slug, supplier_ids) headers, rows = get_csv_rows(records, framework_slug, framework_lot_slugs,
args = parser.parse_args() stage = args.stage.lower() user = get_user() print(f"Setting user to '{user}'...") print('Retrieving credentials...') api_token = args.api_token or get_auth_token('api', stage) search_api_token = args.search_api_token or get_auth_token( 'search_api', stage) print('Creating clients...') data = DataAPIClient( base_url=args.api_url or get_api_endpoint_from_stage(stage), auth_token=api_token, user=user, ) search = SearchAPIClient( base_url=args.search_api_url or get_api_endpoint_from_stage(stage, app='search-api'), auth_token=search_api_token, user=user, ) if args.read_only: data = ReadOnlyDataAPIClient(data) ipython_config = Config() ipython_config.TerminalInteractiveShell.prompts_class = DMEnvironmentPrompt( stage, args.read_only)
Usage: scripts/get_active_users_csv.py <stage> Example scripts/get_active_users_csv.py preview > output.csv """ import csv import sys sys.path.insert(0, '.') from dmscripts.helpers.auth_helpers import get_auth_token from dmutils.env_helpers import get_api_endpoint_from_stage from docopt import docopt from dmapiclient import DataAPIClient if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), get_auth_token('api', stage)) writer = csv.writer(sys.stdout) writer.writerow(['email address']) for user in filter( lambda u: u['active'], data_api_client.find_users_iter(personal_data_removed=False)): writer.writerow([user['emailAddress']])
""" import sys sys.path.insert(0, '.') from os import environ from sys import exit from dmapiclient import DataAPIClient from dmscripts.helpers.auth_helpers import get_auth_token from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == "__main__": data_api_client = DataAPIClient( get_api_endpoint_from_stage(environ["STAGE"].lower()), get_auth_token('api', environ["STAGE"].lower()), ) email_address = environ["ACCOUNT_EMAIL"] user = data_api_client.get_user(email_address=email_address) if not user: print(f"User {email_address!r} not found") exit(2) if not data_api_client.update_user_password( user["users"]["id"], environ["ACCOUNT_PASSWORD"], "set-dm-password-by-email.py", ): print(f"Failed to set password for {email_address!r}") exit(3)
logger = logging.getLogger("script") from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers import logging_helpers from dmutils.env_helpers import get_api_endpoint_from_stage from dmscripts.data_retention_remove_supplier_declarations import remove_unsuccessful_supplier_declarations if __name__ == "__main__": arguments = docopt(__doc__) # Get script arguments stage = arguments['<stage>'] dry_run = arguments['--dry-run'] framework = arguments['<framework-slug>'] verbose = arguments['--verbose'] user = arguments['<user>'] or getpass.getuser() # Set defaults, instantiate clients logging_helpers.configure_logger({"dmapiclient": logging.INFO} if verbose else {"dmapiclient": logging.WARN}) data_api_client = DataAPIClient( base_url=get_api_endpoint_from_stage(stage), auth_token=get_auth_token('api', stage)) remove_unsuccessful_supplier_declarations(data_api_client=data_api_client, logger=logger, dry_run=dry_run, framework_slug=framework, user=user)
if __name__ == "__main__": args = docopt(__doc__) logging.basicConfig(level=logging.INFO) FT_JOB_NAMES = ["functional-tests-preview", "functional-tests-staging"] API_USER = os.getenv("DM_JENKINS_API_USER") API_TOKEN = os.getenv("DM_JENKINS_API_TOKEN") OUTPUT_FILE = args.get("<file>") or "functional_test_report.csv" auth = HTTPBasicAuth(API_USER, API_TOKEN) # Use staging to get the framework dates because it'll be the same as production api_client = DataAPIClient(get_api_endpoint_from_stage("staging"), get_auth_token("api", "staging")) frameworks = api_client.find_frameworks()["frameworks"] build_data = [] for job in FT_JOB_NAMES: for build in get_job_build_data(job, auth): build_data.append(format_build(job, build, frameworks)) logging.info(f"Writing report to {OUTPUT_FILE}") headers = build_data[0].keys() with open(OUTPUT_FILE, "w") as f: writer = csv.DictWriter(f, headers) writer.writeheader() writer.writerows(build_data)
from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers import logging_helpers from dmscripts.notify_suppliers_of_framework_application_event import \ notify_suppliers_of_framework_application_event if __name__ == "__main__": arguments = docopt(__doc__) logger = logging_helpers.configure_logger({"dmapiclient": logging.INFO}) run_id = None if not arguments.get("--resume-run-id") else UUID( arguments["--resume-run-id"]) failure_count = notify_suppliers_of_framework_application_event( data_api_client=DataAPIClient( base_url=get_api_endpoint_from_stage(arguments["<stage>"], "api"), auth_token=get_auth_token("api", arguments["<stage>"]), ), notify_client=scripts_notify_client( arguments['<govuk_notify_api_key>'], logger=logger), notify_template_id=arguments['<govuk_notify_template_id>'], framework_slug=arguments["<framework_slug>"], stage=arguments["<stage>"], dry_run=arguments["--dry-run"], logger=logger, run_id=run_id, ) if failure_count: logger.error("Failed sending {failure_count} messages", extra={"failure_count": failure_count})
document_updates[document_key] = get_live_asset_url( live_document_path) if dry_run: print(" > not updating document URLs {}".format(document_updates)) else: client.update_service(service_id, document_updates, 'Moving documents to live bucket') print(" > document URLs updated") if __name__ == '__main__': arguments = docopt(__doc__) STAGE = arguments['<stage>'] api_url = arguments['<api_url>'] client = DataAPIClient(api_url, arguments['<api_token>']) DRY_RUN = arguments['--dry-run'] suppliers = find_suppliers_on_framework(client, FRAMEWORK_SLUG) for supplier in suppliers: print("Migrating drafts for supplier {} - {}".format( supplier['supplierId'], supplier['supplierName'])) draft_services = find_submitted_draft_services(client, supplier, FRAMEWORK_SLUG) for draft_service in draft_services: make_draft_service_live(client, copy_document, draft_service, FRAMEWORK_SLUG, DRY_RUN)
for lot in lots: lot.update({"list_id": "096e52cebb"}) # Override list id if arguments.get("--list_id"): for lot in lots: lot.update({"list_id": arguments["--list_id"]}) # Override lot if arguments.get("--lot_slug"): lots = [ lot for lot in lots if lot["lot_slug"] == arguments["--lot_slug"] ] api_url = get_api_endpoint_from_stage(arguments['<stage>']) data_api_client = DataAPIClient( api_url, get_auth_token('api', arguments['<stage>'])) dm_mailchimp_client = DMMailChimpClient(arguments['<mailchimp_username>'], arguments['<mailchimp_api_key>'], logger) for lot_data in lots: ok = main( data_api_client=data_api_client, mailchimp_client=dm_mailchimp_client, lot_data=lot_data, number_of_days=number_of_days, framework_slug=framework_slug, ) if not ok: sys.exit(1)
if args.env not in ['dev', 'development', 'local', 'preview', 'staging']: print( "This script can only be run against dev/preview/staging environments." ) sys.exit(1) args.lots = args.lots.lower().split(',') if set(args.lots) - set(LOTS_WHITELIST): print( "This script only allows the following lots: {}. If you need other lots, please add them to the " "whitelist (this is just a sanity-check against typos).".format( LOTS_WHITELIST)) sys.exit(1) data_api_url = get_api_endpoint_from_stage(args.env, 'api') data_api_client = DataAPIClient(data_api_url, args.data_api_token) search_api_url = get_api_endpoint_from_stage(args.env, 'search-api') search_api_client = SearchAPIClient(search_api_url, args.search_api_token) services_generated = 0 gcloud_service_faker = JsonSchemaGCloudServiceFaker() email_address = "*****@*****.**" identity = 'generate-g-cloud-services script ({})'.format( getpass.getuser()) filepath_declaration_validator = 'schemas/{}.declaration.json'.format( args.new_slug) # 0) Store the current framework state so that it can be restored at the end. current_framework_state = data_api_client.get_framework( args.new_slug)['frameworks']['status']