def update_suppliers(data_api_endpoint, data_api_token, updates_path, updated_by): client = DataAPIClient(data_api_endpoint, data_api_token) for update in load_updates(updates_path): print("Updating {}".format(update)) client.update_supplier(update.pop('id'), update, updated_by)
def main(stage, framework_slug, api_token, user, supplier_ids=None): agreements_bucket_name = 'digitalmarketplace-agreements-{0}-{0}'.format(stage) agreements_bucket = S3(agreements_bucket_name) api_client = DataAPIClient( get_api_endpoint_from_stage(stage, 'api'), api_token ) if supplier_ids is not None: supplier_ids = [int(supplier_id.strip()) for supplier_id in supplier_ids.split(',')] suppliers = api_client.find_framework_suppliers(framework_slug, agreement_returned=True)['supplierFrameworks'] if supplier_ids is not None: missing_supplier_ids = set(supplier_ids) - set(supplier['supplierId'] for supplier in suppliers) if missing_supplier_ids: raise Exception("Invalid supplier IDs: {}".format(', '.join(str(x) for x in missing_supplier_ids))) else: supplier_ids = set(supplier['supplierId'] for supplier in suppliers) for supplier_id in supplier_ids: logger.info("Resetting agreement returned flag for supplier {supplier_id}", extra={'supplier_id': supplier_id}) api_client.unset_framework_agreement_returned(supplier_id, framework_slug, user) signed_agreements = filter( lambda x: match_signed_agreements(supplier_ids, x['path']), agreements_bucket.list('{}/agreements/'.format(framework_slug)) ) for document in signed_agreements: logger.info("Deleting {path}", extra={'path': document['path']}) agreements_bucket.delete_key(document['path'])
def suppliers_on_framework(data_api_url, data_api_token, questions): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the declaration API to recover the answers - builds CSV row for each supplier :param data_api_url: :param data_api_token: :param questions: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers(questions)) for supplier in client.find_suppliers_iter(): try: declaration = client.get_supplier_declaration( supplier['id'], 'g-cloud-7')['declaration'] if not declaration: continue status = declaration['status'] processed_supplier_declaration = \ process_supplier_declaration( declaration, questions ) supplier_declaration = list() supplier_declaration.append(supplier['id']) supplier_declaration.append(supplier['name']) supplier_declaration.append(supplier.get('dunsNumber', "")) supplier_declaration.append(status) for declaration in processed_supplier_declaration: supplier_declaration.append(declaration) try: writer.writerow(supplier_declaration) except UnicodeEncodeError: writer.writerow([ field.encode('utf-8') if hasattr(field, 'encode') else field for field in supplier_declaration ]) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine # status = 'unstarted' pass else: # status = 'error-{}'.format(e.status_code) raise e except KeyError: # status = 'error-key-error' pass
def suppliers_lot_count(data_api_url, data_api_token): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the draft API to recover the services - builds CSV row for each supplier :param data_api_url: :param data_api_token: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers()) for supplier in client.find_suppliers_iter(): try: drafts = list() for draft_service in client.find_draft_services_iter( supplier['id']): drafts.append(draft_service) if drafts: aggregations = aggregate(drafts) supplier_row = list() supplier_row.append(supplier['id']) supplier_row.append(supplier['name']) supplier_row.append(supplier.get('dunsNumber', "")) supplier_row.append( submitted_count(aggregations.get('iaas', {}))) supplier_row.append( not_submitted_count(aggregations.get('iaas', {}))) supplier_row.append( submitted_count(aggregations.get('paas', {}))) supplier_row.append( not_submitted_count(aggregations.get('paas', {}))) supplier_row.append( submitted_count(aggregations.get('saas', {}))) supplier_row.append( not_submitted_count(aggregations.get('saas', {}))) supplier_row.append( submitted_count(aggregations.get('scs', {}))) supplier_row.append( not_submitted_count(aggregations.get('scs', {}))) writer.writerow(supplier_row) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine pass else: raise e
def remove_dos4_answers(api_client: DataAPIClient, draft: dict, developer_email: str) -> dict: print(f"Supplier id: {draft['supplierId']}") print(f"Draft id: {draft['id']}") return api_client.update_draft_service( draft['id'], { "agileCoachPriceMin": None, "businessAnalystPriceMin": None, "communicationsManagerPriceMin": None, "contentDesignerPriceMin": None, "dataArchitectPriceMin": None, "dataEngineerPriceMin": None, "dataScientistPriceMin": None, "deliveryManagerPriceMin": None, "designerPriceMin": None, "developerPriceMin": None, "performanceAnalystPriceMin": None, "portfolioManagerPriceMin": None, "productManagerPriceMin": None, "programmeManagerPriceMin": None, "qualityAssurancePriceMin": None, "securityConsultantPriceMin": None, "serviceManagerPriceMin": None, "technicalArchitectPriceMin": None, "userResearcherPriceMin": None, "webOperationsPriceMin": None, "developerAccessibleApplications": None, "contentDesignerAccessibleApplications": None, "qualityAssuranceAccessibleApplications": None, "technicalArchitectAccessibleApplications": None, "webOperationsAccessibleApplications": None, "serviceManagerAccessibleApplications": None, "designerAccessibleApplications": None }, developer_email)
def get_affected_drafts_services(api_client: DataAPIClient) -> List[dict]: all_drafts = api_client.find_draft_services_by_framework_iter( 'digital-outcomes-and-specialists-5', status='not-submitted') return [ draft for draft in all_drafts if draft_service_contains_dos4_answer(draft) ]
def get_supplier_ids_not_signed(api_client: DataAPIClient, framework_slug: str) -> List[int]: """ Get a list of supplier IDs who have at least one successful lot entry but have not signed the framework agreement """ return [supplier["supplierId"] for supplier in api_client.find_framework_suppliers_iter(framework_slug, agreement_returned=False, with_declarations=False) if supplier["onFramework"]]
def get_dmp_supplier_data(self, framework=None, duns_number=None, from_declaration=False): "return the DMp data for a given DUNS number and initialises the DMp client if None" # TODO: error handling if self.data_api_client is None: self.data_api_client = DataAPIClient( base_url=get_api_endpoint_from_stage(self.stage), auth_token=get_auth_token('api', self.stage)) if duns_number is not None: return self.data_api_client.find_suppliers(duns_number=duns_number) elif framework is not None: # TODO: use iter instead -> digitalmarketplace-apiclient/blob/master/dmapiclient/data.py#L119 # TODO: check pagination if from_declaration: return self.data_api_client.find_framework_suppliers(framework) return self.data_api_client.find_suppliers(framework=framework)
def suppliers_lot_count(data_api_url, data_api_token): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the draft API to recover the services - builds CSV row for each supplier :param data_api_url: :param data_api_token: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers()) for supplier in client.find_suppliers_iter(): try: drafts = list() for draft_service in client.find_draft_services_iter(supplier['id']): drafts.append(draft_service) if drafts: aggregations = aggregate(drafts) supplier_row = list() supplier_row.append(supplier['id']) supplier_row.append(supplier['name']) supplier_row.append(supplier.get('dunsNumber', "")) supplier_row.append(submitted_count(aggregations.get('iaas', {}))) supplier_row.append(not_submitted_count(aggregations.get('iaas', {}))) supplier_row.append(submitted_count(aggregations.get('paas', {}))) supplier_row.append(not_submitted_count(aggregations.get('paas', {}))) supplier_row.append(submitted_count(aggregations.get('saas', {}))) supplier_row.append(not_submitted_count(aggregations.get('saas', {}))) supplier_row.append(submitted_count(aggregations.get('scs', {}))) supplier_row.append(not_submitted_count(aggregations.get('scs', {}))) writer.writerow(supplier_row) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine pass else: raise e
def inject_framework_dates(stage): data_api_token = get_auth_token( 'api', stage) if stage != 'development' else 'myToken' data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), data_api_token) for framework_slug, framework_data in FRAMEWORKS_AND_DATES.items(): print(f'Injecting dates for {framework_slug}: {framework_data}') try: data_api_client.update_framework( framework_slug=framework_slug, data=framework_data, user=f'{getpass.getuser()} - ' f'digitalmarketplace-scripts/scripts/' f'oneoff/inject-framework-dates.py') except Exception as e: print( f'Failed with {e} on {framework_slug}. Data: {framework_data}')
def get_supplier_ids_signed(api_client: DataAPIClient, framework_slug: str) -> List[int]: """ Get a list of supplier IDs who are on `framework_slug` and have signed the framework agreement """ return [ supplier["supplierId"] for supplier in api_client.find_framework_suppliers_iter( framework_slug, agreement_returned=True, with_declarations=False) if supplier["onFramework"] ]
def get_framework_for_reuse( supplier_id: int, client: DataAPIClient, exclude_framework_slugs: Optional[Container[str]] = None, ) -> Optional[dict]: """Given a list of declarations find the most suitable for reuse. :param supplier_id: supplier whose declarations we are inspecting :param client: data client :param exclude_framework_slugs: list of framework slugs to exclude from results :return: framework """ declarations = { sf['frameworkSlug']: sf for sf in client.find_supplier_declarations(supplier_id) ['frameworkInterest'] if sf['onFramework'] and sf.get('allowDeclarationReuse') is not False } return next( (framework for framework in order_frameworks_for_reuse( client.find_frameworks()['frameworks']) if framework['slug'] in declarations and framework['slug'] not in ( exclude_framework_slugs or ())), None)
def main(): args = docopt(__doc__) frameworks_repo = Path(args["--frameworks-repo"]).resolve() framework_slug = args["<framework_slug>"] stage = args["<stage>"] lot = args["<lot>"] data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), get_auth_token('api', stage)) content_loader = ContentLoader(frameworks_repo) content_loader.load_manifest(framework_slug, "services", "services_search_filters") manifest = content_loader.get_manifest(framework_slug, "services_search_filters") # FIXME there isn't a uniform way to get the lots from the framework # content repo, hard code for G-Cloud for now framework_lots = [ {"name": "Cloud hosting", "slug": "cloud-hosting"}, {"name": "Cloud software", "slug": "cloud-software"}, {"name": "Cloud support", "slug": "cloud-support"}, ] writer = csv.writer(sys.stdout) # do the thing writer.writerow(['serviceId', 'topLevelCategories']) for service in data_api_client.find_services_iter(framework=framework_slug, status='published', lot=lot): service_categories = service['serviceCategories'] if service.get('serviceCategories') else [] top_level_categories = [] for f in filters_for_lot(service['lot'], manifest, framework_lots)['categories']['filters']: children = [f['label'] for f in f['children']] if f.get('children') else [] if any(item in service_categories for item in children): top_level_categories.append(f['label']) writer.writerow([service['id'], '; '.join(top_level_categories)])
def clients_in_shell(stage): print('Retrieving credentials...') api_token = 'myToken' search_api_token = 'myToken' if stage != 'development': api_token = get_auth_token('api', stage), search_api_token = get_auth_token('search_api', stage) print('Creating clients...') data = DataAPIClient(get_api_endpoint_from_stage(stage), api_token) # noqa search = SearchAPIClient(get_api_endpoint_from_stage(stage, app='search-api'), search_api_token) # noqa print('Dropping into shell...') IPython.embed()
def clients_in_shell(stage, api_url, api_token, search_api_url, search_api_token): print('Retrieving credentials...') api_token = api_token or get_auth_token('api', stage) search_api_token = search_api_token or get_auth_token('search_api', stage) print('Creating clients...') data = DataAPIClient(api_url or get_api_endpoint_from_stage(stage), api_token) # noqa search = SearchAPIClient( search_api_url or get_api_endpoint_from_stage(stage, app='search-api'), search_api_token) # noqa print('Dropping into shell...') IPython.embed()
def reset_supplier_declaration(stage, framework_slug, reason, email, supplier_id): data_api_token = get_auth_token('api', stage) if stage != 'development' else 'myToken' data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), data_api_token) if email: user = data_api_client.get_user(email_address=email) if not user: print(f'No user found for email address `{email}`') exit(1) user_supplier_id = user['users']['supplier']['supplierId'] if user_supplier_id and supplier_id and user_supplier_id != supplier_id: print(f'Email address provided does not match with supplier provided. Email address `{email}` is ' f'associated with supplierId `{supplier_id}`. Script was called with supplierId `{supplier_id}`.') exit(2) supplier_id = user_supplier_id try: supplier_framework = data_api_client.get_supplier_framework_info(supplier_id=supplier_id, framework_slug=framework_slug) print(f"Current supplier declaration: {supplier_framework['frameworkInterest']['declaration']}") except HTTPError: print(f'No supplier framework found for supplierId `{supplier_id}` on framework `{framework_slug}`.') exit(3) if not supplier_framework: print(f'No supplier framework/interest record found for supplierId `{supplier_id}` on framework ' f'`{framework_slug}`.') exit(4) data_api_client.set_supplier_declaration(supplier_id=supplier_id, framework_slug=framework_slug, declaration={}, user=f'{getpass.getuser()} - {reason}') data_api_client.set_supplier_framework_prefill_declaration(supplier_id=supplier_id, framework_slug=framework_slug, prefill_declaration_from_framework_slug=None, user=f'{getpass.getuser()} - {reason}') print(f'Supplier declaration for supplierId `{supplier_id}` on framework `{framework_slug}` has been reset.')
def count_g12_recovery_drafts_by_status(data_api_client: DataAPIClient, supplier_id: int) -> Tuple[int, int]: """ Counts the number of a supplier's G12 recovery draft services which are either 'submitted' or 'not-submitted' Returns ------- A tuple (not_submitted_count, submitted_count) """ g12_recovery_draft_ids = get_g12_recovery_draft_ids() drafts = [ draft for draft in data_api_client.find_draft_services_iter(supplier_id=supplier_id, framework="g-cloud-12") ] not_submitted_ids = { draft["id"] for draft in drafts if draft["status"] == "not-submitted" }.intersection(g12_recovery_draft_ids) submitted_ids = { draft["id"] for draft in drafts if draft["status"] == "submitted" }.intersection(g12_recovery_draft_ids) return len(not_submitted_ids), len(submitted_ids)
supplier_id): download_path = get_agreement_document_path(framework_slug, supplier_id, SIGNED_AGREEMENT_PREFIX) files = bucket.list(download_path) return files.pop() if files else None def get_bucket_name(stage): return 'digitalmarketplace-agreements-{0}-{0}'.format(stage) if __name__ == '__main__': arguments = docopt(__doc__) data_api_url = get_api_endpoint_from_stage(arguments['<stage>'], 'api') client = DataAPIClient(data_api_url, get_auth_token('api', arguments['<stage>'])) FRAMEWORKS = ['g-cloud-7', 'g-cloud-8', 'digital-outcomes-and-specialists'] BUCKET_NAME = get_bucket_name(arguments['<stage>']) BUCKET = s3.S3(BUCKET_NAME) print("STARTED AT {}".format(time.strftime('%X %x %Z'))) for framework_slug in FRAMEWORKS: # Get all supplier frameworks who have returned their agreement supplier_frameworks = client.find_framework_suppliers( framework_slug=framework_slug, agreement_returned=True)['supplierFrameworks'] for supplier_framework in supplier_frameworks: print("======================") print("Supplier ID: {}, Agreement ID: {}".format( supplier_framework['supplierId'],
Usage: scripts/get_active_users_csv.py <stage> Example scripts/get_active_users_csv.py preview > output.csv """ import csv import sys sys.path.insert(0, '.') from dmscripts.helpers.auth_helpers import get_auth_token from dmutils.env_helpers import get_api_endpoint_from_stage from docopt import docopt from dmapiclient import DataAPIClient if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] data_api_client = DataAPIClient(get_api_endpoint_from_stage(stage), get_auth_token('api', stage)) writer = csv.writer(sys.stdout) writer.writerow(['email address']) for user in filter( lambda u: u['active'], data_api_client.find_users_iter(personal_data_removed=False)): writer.writerow([user['emailAddress']])
supplier_id, SIGNED_AGREEMENT_PREFIX ) files = bucket.list(download_path) return files.pop() if files else None def get_bucket_name(stage): return 'digitalmarketplace-agreements-{0}-{0}'.format(stage) if __name__ == '__main__': arguments = docopt(__doc__) data_api_url = get_api_endpoint_from_stage(arguments['<stage>'], 'api') client = DataAPIClient(data_api_url, arguments['<api_token>']) FRAMEWORKS = ['g-cloud-7', 'g-cloud-8', 'digital-outcomes-and-specialists'] BUCKET_NAME = get_bucket_name(arguments['<stage>']) BUCKET = s3.S3(BUCKET_NAME) print("STARTED AT {}".format(time.strftime('%X %x %Z'))) for framework_slug in FRAMEWORKS: # Get all supplier frameworks who have returned their agreement supplier_frameworks = client.find_framework_suppliers( framework_slug=framework_slug, agreement_returned=True)['supplierFrameworks'] for supplier_framework in supplier_frameworks: print("======================") print "Supplier ID: {}, Agreement ID: {}".format( supplier_framework['supplierId'], supplier_framework['agreementId'])
from docopt import docopt from dmscripts.export_framework_applicant_details import get_csv_rows from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.framework_helpers import find_suppliers_with_details_and_draft_service_counts from dmscripts.helpers.supplier_data_helpers import get_supplier_ids_from_file from dmscripts.generate_framework_agreement_signature_pages import ( render_html_for_suppliers_awaiting_countersignature, render_pdf_for_each_html_page) from dmapiclient import DataAPIClient from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == '__main__': args = docopt(__doc__) framework_slug = args['<framework_slug>'] client = DataAPIClient(get_api_endpoint_from_stage(args['<stage>']), get_auth_token('api', args['<stage>'])) framework = client.get_framework(framework_slug)['frameworks'] framework_lot_slugs = tuple([ lot['slug'] for lot in client.get_framework(framework_slug)['frameworks']['lots'] ]) supplier_id_file = args['<supplier_id_file>'] supplier_ids = get_supplier_ids_from_file(supplier_id_file) html_dir = tempfile.mkdtemp() records = find_suppliers_with_details_and_draft_service_counts( client, framework_slug, supplier_ids) headers, rows = get_csv_rows(records, framework_slug, framework_lot_slugs,
supplier_id=supplier['id'], supplier={'tradingStatus': mapped_trading_status}, user=f'{getpass.getuser()} (migrate trading status script)', ) success_counter += 1 except HTTPError as e: print( f"{prefix}Error updating supplier {supplier['id']}: {e.message}" ) failure_counter += 1 if i % 100 == 0: print(f'{prefix}{i} suppliers processed ...') print(f'{prefix}Finished processing {i} suppliers.') print(f"{prefix}Succssfully updated: {success_counter}") print(f"{prefix}Failed to update: {failure_counter}") if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) migrate_trading_statuses( DataAPIClient(api_url, get_auth_token('api', stage)), dry_run)
if __name__ == '__main__': arguments = docopt(__doc__) supplier_ids = get_supplier_ids_from_args(arguments) STAGE = arguments['<stage>'] FRAMEWORK_SLUG = arguments['<framework>'] GOVUK_NOTIFY_API_KEY = arguments['<notify_api_key>'] GOVUK_NOTIFY_TEMPLATE_ID = arguments['<notify_template_id>'] CONTENT_PATH = arguments['<content_path>'] DRY_RUN = arguments['--dry-run'] content_loader = ContentLoader(CONTENT_PATH) content_loader.load_messages(FRAMEWORK_SLUG, ['e-signature']) mail_client = scripts_notify_client(GOVUK_NOTIFY_API_KEY, logger=logger) api_client = DataAPIClient(base_url=get_api_endpoint_from_stage(STAGE), auth_token=get_auth_token('api', STAGE)) context_helper = SuccessfulSupplierContextForNotify( api_client, FRAMEWORK_SLUG, supplier_ids=supplier_ids, logger=logger) context_helper.populate_data() context_data = context_helper.get_users_personalisations() framework = api_client.get_framework(FRAMEWORK_SLUG).get('frameworks') prefix = "[Dry Run] " if DRY_RUN else "" # Add in any framework-specific dates etc here extra_template_context = { "contract_title": content_loader.get_message(FRAMEWORK_SLUG, 'e-signature', 'framework_contract_title'), "intentionToAwardAt_dateformat":
def update_suppliers(data_api_endpoint, data_api_token, users_path): client = DataAPIClient(data_api_endpoint, data_api_token) for user in load_users(users_path): print("Adding {}".format(user)) client.create_user(user)
dry_run = args["--dry-run"] verbose = args["--verbose"] if args["--threads"]: map_impl = ThreadPool(int(args["--threads"])).imap else: map_impl = map logger = configure_logger({ "dmapiclient.base": logging.WARNING, "framework_helpers": logging.DEBUG if verbose >= 2 else logging.WARNING, "script": logging.DEBUG if verbose else logging.INFO, }) logger.debug(f"connecting to api on {stage}") client = DataAPIClient( get_api_endpoint_from_stage(args["<stage>"]), get_auth_token("api", args["<stage>"]), ) logger.debug(f"fetching lots for framework '{framework_slug}'") framework = client.get_framework(framework_slug)["frameworks"] suppliers = find_suppliers(client, framework, supplier_ids, map_impl, dry_run) # create a temporary directory for the HTML files with tempfile.TemporaryDirectory() as html_dir: # create signature pages in HTML using Jinja templates from agreements repo logger.debug(f"generating HTML signature pages") render_html_for_successful_suppliers( suppliers, framework, agreements_dir, html_dir, dry_run) # convert HTML to PDF (this uses wkhtmltopdf under-the-hood)
service_id)) except Exception as e: if e.message == "Cannot re-publish a submitted service": print(u" > Draft {} already published".format(draft['id'])) else: print(u" > ERROR MIGRATING DRAFT {} - {}".format( draft['id'], e.message)) if __name__ == "__main__": arguments = docopt(__doc__) STAGE = arguments['<stage>'] DRY_RUN = arguments['--dry-run'] FRAMEWORK_SLUG = arguments['<framework_slug>'] api_url = get_api_endpoint_from_stage(STAGE) client = DataAPIClient(api_url, get_auth_token('api', STAGE)) print("Finding suppliers...") suppliers = find_suppliers_on_framework(client, FRAMEWORK_SLUG) print("Migrating drafts...") for supplier in suppliers: print(u"Migrating drafts for supplier {} - {}".format( supplier['supplierId'], supplier['supplierName'])) draft_services = get_submitted_drafts(client, FRAMEWORK_SLUG, supplier['supplierId']) for draft_service in draft_services: make_draft_service_live(client, draft_service, DRY_RUN)
("supplier_name", record["supplier"]["name"]), ("supplier_declaration_name", record["declaration"].get("supplierRegisteredName", "")), ("status", "PASSED" if record["onFramework"] else "FAILED"), ] return row + make_fields_from_content_questions(questions, record) return inner if __name__ == '__main__': arguments = docopt(__doc__) STAGE = arguments['<stage>'] CONTENT_PATH = arguments['<content_path>'] FRAMEWORK_SLUG = arguments['<framework_slug>'] client = DataAPIClient(get_api_endpoint_from_stage(STAGE), get_auth_token('api', STAGE)) content_loader = ContentLoader(CONTENT_PATH) content_loader.load_manifest(FRAMEWORK_SLUG, "services", "edit_submission") content_manifest = content_loader.get_manifest(FRAMEWORK_SLUG, "edit_submission") records = find_all_participants(client) write_csv_with_make_row( records, make_row(content_manifest), "output/{}-user-research-participants.csv".format(FRAMEWORK_SLUG))
if __name__ == "__main__": args = docopt(__doc__) logging.basicConfig(level=logging.INFO) FT_JOB_NAMES = ["functional-tests-preview", "functional-tests-staging"] API_USER = os.getenv("DM_JENKINS_API_USER") API_TOKEN = os.getenv("DM_JENKINS_API_TOKEN") OUTPUT_FILE = args.get("<file>") or "functional_test_report.csv" auth = HTTPBasicAuth(API_USER, API_TOKEN) # Use staging to get the framework dates because it'll be the same as production api_client = DataAPIClient(get_api_endpoint_from_stage("staging"), get_auth_token("api", "staging")) frameworks = api_client.find_frameworks()["frameworks"] build_data = [] for job in FT_JOB_NAMES: for build in get_job_build_data(job, auth): build_data.append(format_build(job, build, frameworks)) logging.info(f"Writing report to {OUTPUT_FILE}") headers = build_data[0].keys() with open(OUTPUT_FILE, "w") as f: writer = csv.DictWriter(f, headers) writer.writeheader() writer.writerows(build_data)
pool = ThreadPool(25) return itertools.chain.from_iterable(pool.imap_unordered( create_draft_getter(client), client.find_suppliers_iter() )) if __name__ == "__main__": arguments = docopt(__doc__) stage = arguments['<stage>'] api_token = arguments['<token>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) client = DataAPIClient(api_url, api_token) counter = 0 for draft in get_all_drafts(client): update = {} if "outcomesLocations" in draft: update["locations"] = draft["outcomesLocations"] update["outcomesLocations"] = None if "recruitLocations" in draft: if update: raise ValueError("draft {} has both outcomesLocations and recruitLocations".format(draft["id"])) update["locations"] = draft["recruitLocations"] update["recruitLocations"] = None if update: counter += 1
from dmapiclient import DataAPIClient from dmutils.env_helpers import get_api_endpoint_from_stage from docopt import docopt sys.path.insert(0, ".") from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.updated_by_helpers import get_user if __name__ == "__main__": args = docopt(__doc__) api_client = DataAPIClient( get_api_endpoint_from_stage(args["<stage>"]), get_auth_token("api", args["<stage>"]), user=get_user(), ) with open(args["<input_file>"]) as input_file: services = list(csv.DictReader(input_file)) missing_services = [s for s in services if not s["serviceId"]] for service in missing_services: name = service["Service Name"].replace(".csv", "")[:100] supplier_id = service["Supplier ID"] lot = { "Software": "cloud-software", "Support": "cloud-support", }.get(service["Lot"])
logger = logging.getLogger("script") from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers import logging_helpers from dmutils.env_helpers import get_api_endpoint_from_stage from dmscripts.data_retention_remove_supplier_declarations import remove_unsuccessful_supplier_declarations if __name__ == "__main__": arguments = docopt(__doc__) # Get script arguments stage = arguments['<stage>'] dry_run = arguments['--dry-run'] framework = arguments['<framework-slug>'] verbose = arguments['--verbose'] user = arguments['<user>'] or getpass.getuser() # Set defaults, instantiate clients logging_helpers.configure_logger({"dmapiclient": logging.INFO} if verbose else {"dmapiclient": logging.WARN}) data_api_client = DataAPIClient( base_url=get_api_endpoint_from_stage(stage), auth_token=get_auth_token('api', stage)) remove_unsuccessful_supplier_declarations(data_api_client=data_api_client, logger=logger, dry_run=dry_run, framework_slug=framework, user=user)
import json import sys sys.path.insert(0, '.') from docopt import docopt from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.supplier_data_helpers import get_supplier_ids_from_file from dmscripts.export_framework_results_reasons import export_suppliers from dmapiclient import DataAPIClient from dmcontent.content_loader import ContentLoader from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == '__main__': args = docopt(__doc__) client = DataAPIClient(get_api_endpoint_from_stage(args['<stage>']), get_auth_token('api', args['<stage>'])) content_loader = ContentLoader(args['<content_path>']) declaration_definite_pass_schema = json.load( open(args["<declaration_schema_path>"], "r")) declaration_baseline_schema = ( declaration_definite_pass_schema.get("definitions") or {}).get("baseline") supplier_id_file = args['<supplier_id_file>'] supplier_ids = get_supplier_ids_from_file(supplier_id_file) export_suppliers(client, args['<framework_slug>'], content_loader, args['<output_dir>'], declaration_definite_pass_schema, declaration_baseline_schema, supplier_ids)
from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers import logging_helpers from dmscripts.notify_suppliers_of_framework_application_event import \ notify_suppliers_of_framework_application_event if __name__ == "__main__": arguments = docopt(__doc__) logger = logging_helpers.configure_logger({"dmapiclient": logging.INFO}) run_id = None if not arguments.get("--resume-run-id") else UUID( arguments["--resume-run-id"]) failure_count = notify_suppliers_of_framework_application_event( data_api_client=DataAPIClient( base_url=get_api_endpoint_from_stage(arguments["<stage>"], "api"), auth_token=get_auth_token("api", arguments["<stage>"]), ), notify_client=scripts_notify_client( arguments['<govuk_notify_api_key>'], logger=logger), notify_template_id=arguments['<govuk_notify_template_id>'], framework_slug=arguments["<framework_slug>"], stage=arguments["<stage>"], dry_run=arguments["--dry-run"], logger=logger, run_id=run_id, ) if failure_count: logger.error("Failed sending {failure_count} messages", extra={"failure_count": failure_count})
""" import sys sys.path.insert(0, '.') from os import environ from sys import exit from dmapiclient import DataAPIClient from dmscripts.helpers.auth_helpers import get_auth_token from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == "__main__": data_api_client = DataAPIClient( get_api_endpoint_from_stage(environ["STAGE"].lower()), get_auth_token('api', environ["STAGE"].lower()), ) email_address = environ["ACCOUNT_EMAIL"] user = data_api_client.get_user(email_address=email_address) if not user: print(f"User {email_address!r} not found") exit(2) if not data_api_client.update_user_password( user["users"]["id"], environ["ACCOUNT_PASSWORD"], "set-dm-password-by-email.py", ): print(f"Failed to set password for {email_address!r}") exit(3)
for lot in lots: lot.update({"list_id": "096e52cebb"}) # Override list id if arguments.get("--list_id"): for lot in lots: lot.update({"list_id": arguments["--list_id"]}) # Override lot if arguments.get("--lot_slug"): lots = [ lot for lot in lots if lot["lot_slug"] == arguments["--lot_slug"] ] api_url = get_api_endpoint_from_stage(arguments['<stage>']) data_api_client = DataAPIClient( api_url, get_auth_token('api', arguments['<stage>'])) dm_mailchimp_client = DMMailChimpClient(arguments['<mailchimp_username>'], arguments['<mailchimp_api_key>'], logger) for lot_data in lots: ok = main( data_api_client=data_api_client, mailchimp_client=dm_mailchimp_client, lot_data=lot_data, number_of_days=number_of_days, framework_slug=framework_slug, ) if not ok: sys.exit(1)
create_draft_getter(client), client.find_suppliers_iter() )) def get_location(location): return NEW_LOCATION if location == OLD_LOCATION else location if __name__ == '__main__': arguments = docopt(__doc__) stage = arguments['<stage>'] api_token = arguments['<api_token>'] dry_run = arguments['--dry-run'] api_url = get_api_endpoint_from_stage(stage) client = DataAPIClient(api_url, api_token) counter = 0 for draft in get_all_drafts(client): for location_key in get_location_keys(draft): draft[location_key] = [ get_location(location) for location in draft[location_key] ] counter += 1 if not dry_run: client.update_draft_service(draft['id'], draft, "script", list(draft.keys())) print("Updated {}".format(counter))