def suppliers_on_framework(data_api_url, data_api_token, questions): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the declaration API to recover the answers - builds CSV row for each supplier :param data_api_url: :param data_api_token: :param questions: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers(questions)) for supplier in client.find_suppliers_iter(): try: declaration = client.get_supplier_declaration( supplier['id'], 'g-cloud-7')['declaration'] if not declaration: continue status = declaration['status'] processed_supplier_declaration = \ process_supplier_declaration( declaration, questions ) supplier_declaration = list() supplier_declaration.append(supplier['id']) supplier_declaration.append(supplier['name']) supplier_declaration.append(supplier.get('dunsNumber', "")) supplier_declaration.append(status) for declaration in processed_supplier_declaration: supplier_declaration.append(declaration) try: writer.writerow(supplier_declaration) except UnicodeEncodeError: writer.writerow([ field.encode('utf-8') if hasattr(field, 'encode') else field for field in supplier_declaration ]) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine # status = 'unstarted' pass else: # status = 'error-{}'.format(e.status_code) raise e except KeyError: # status = 'error-key-error' pass
def suppliers_lot_count(data_api_url, data_api_token): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the draft API to recover the services - builds CSV row for each supplier :param data_api_url: :param data_api_token: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers()) for supplier in client.find_suppliers_iter(): try: drafts = list() for draft_service in client.find_draft_services_iter( supplier['id']): drafts.append(draft_service) if drafts: aggregations = aggregate(drafts) supplier_row = list() supplier_row.append(supplier['id']) supplier_row.append(supplier['name']) supplier_row.append(supplier.get('dunsNumber', "")) supplier_row.append( submitted_count(aggregations.get('iaas', {}))) supplier_row.append( not_submitted_count(aggregations.get('iaas', {}))) supplier_row.append( submitted_count(aggregations.get('paas', {}))) supplier_row.append( not_submitted_count(aggregations.get('paas', {}))) supplier_row.append( submitted_count(aggregations.get('saas', {}))) supplier_row.append( not_submitted_count(aggregations.get('saas', {}))) supplier_row.append( submitted_count(aggregations.get('scs', {}))) supplier_row.append( not_submitted_count(aggregations.get('scs', {}))) writer.writerow(supplier_row) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine pass else: raise e
def suppliers_lot_count(data_api_url, data_api_token): """ Generate the CSV - takes the data api details - iterates through all suppliers - foreach supplier hits the draft API to recover the services - builds CSV row for each supplier :param data_api_url: :param data_api_token: :return: """ client = DataAPIClient(data_api_url, data_api_token) writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') writer.writerow(headers()) for supplier in client.find_suppliers_iter(): try: drafts = list() for draft_service in client.find_draft_services_iter(supplier['id']): drafts.append(draft_service) if drafts: aggregations = aggregate(drafts) supplier_row = list() supplier_row.append(supplier['id']) supplier_row.append(supplier['name']) supplier_row.append(supplier.get('dunsNumber', "")) supplier_row.append(submitted_count(aggregations.get('iaas', {}))) supplier_row.append(not_submitted_count(aggregations.get('iaas', {}))) supplier_row.append(submitted_count(aggregations.get('paas', {}))) supplier_row.append(not_submitted_count(aggregations.get('paas', {}))) supplier_row.append(submitted_count(aggregations.get('saas', {}))) supplier_row.append(not_submitted_count(aggregations.get('saas', {}))) supplier_row.append(submitted_count(aggregations.get('scs', {}))) supplier_row.append(not_submitted_count(aggregations.get('scs', {}))) writer.writerow(supplier_row) except HTTPError as e: if e.status_code == 404: # not all suppliers make a declaration so this is fine pass else: raise e
if __name__ == '__main__': arguments = docopt(__doc__) configure_logger({"script": loglevel_INFO}) client = DataAPIClient(get_api_endpoint_from_stage(arguments['<stage>']), arguments['<data_api_token>']) user = arguments['<user>'] or getpass.getuser() framework_slugs = arguments.get("<framework_slugs>") and arguments["<framework_slugs>"].split(",") dry_run = bool(arguments.get("--dry-run")) frameworks = tuple(client.get_framework(framework_slug)["frameworks"] for framework_slug in framework_slugs) logger.info("Inspecting framework declarations in this order: %s", ", ".join(fw["slug"] for fw in frameworks)) for supplier in client.find_suppliers_iter(): logger.info("Processing supplier %s", supplier["id"]) if supplier.get("tradingStatus") and (supplier["contactInformation"][0]).get("address2") == "": logger.info(" already done: {}".format(supplier["id"])) continue try: supplier_framework = next( sfr["frameworkInterest"] for sfr in ( _catch_404_none( lambda: client.get_supplier_framework_info(supplier["id"], framework["slug"]) ) for framework in frameworks ) if sfr and sfr["frameworkInterest"]["onFramework"] ) except StopIteration: logger.info("Supplier %s: not on any relevant frameworks", supplier["id"]) supplier_framework = None