def test_session_reset(self): papi = ApiSession(api.controller_ip, api.username, api.password, verify=False, api_version=api.api_version) res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 papi.reset_session() res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 data = {'name': 'test-reset'} res = papi.post('pool', data=data) assert res.status_code == 201 papi.reset_session() res = papi.delete_by_name('pool', 'test-reset') assert res.status_code == 204
def test_session_connected(self): ApiSession.clear_cached_sessions() session = ApiSession(controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=True) assert not session.connected session.get('pool') assert session.connected ApiSession.clear_cached_sessions() session = ApiSession(controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=False) assert session.connected
def test_lazy_authentication(self): ApiSession.clear_cached_sessions() session = ApiSession(controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "avi123"), lazy_authentication=True) assert not session.keystone_token session.get('pool') assert session.keystone_token ApiSession.clear_cached_sessions() session = ApiSession(controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "avi123"), lazy_authentication=False) assert session.keystone_token
class AviUsage: # Authenticates with Avi controller API and sets the SSL session def __init__(self, avi_ip, avi_user, avi_pswd, avi_version): self.avi_api = ApiSession(avi_ip, avi_user, avi_pswd, api_version=avi_version) # Runs the API and gets the statis def get_max_core_usage(self, sdate, edate, limit): metrics_api = "/analytics/metrics/controller" params = { 'metric_id': 'controller_stats.max_num_se_cores', 'step': 86400, 'start': sdate, 'stop': edate, 'limit': limit, 'pad_missing_data': 'false' } ret = self.avi_api.get(path=metrics_api, params=params) #print(json.dumps(ret.json())) count = ret.json()['count'] if count > 0: max_usage = ret.json( )['results'][0]['series'][0]['header']['statistics']['max'] min_usage = ret.json( )['results'][0]['series'][0]['header']['statistics']['min'] sdate = ret.json( )['results'][0]['series'][0]['header']['statistics']['min_ts'] edate = ret.json( )['results'][0]['series'][0]['header']['statistics']['max_ts'] return (max_usage, min_usage, sdate, edate)
def test_session_reset(self): papi = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, password=api.avi_credentials.password, verify=False, api_version=api.avi_credentials.api_version, data_log=api.data_log) res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 papi.reset_session() res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 data = {'name': 'test-reset'} res = papi.post('pool', data=data) assert res.status_code == 201 papi.reset_session() res = papi.delete_by_name('pool', 'test-reset') assert res.status_code == 204
def test_context_sharing(self): api1 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), lazy_authentication=False) context_api1 = api1.get_context() api1.clear_cached_sessions() api2 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), session_id= context_api1['session_id'], csrftoken=context_api1['csrftoken'], lazy_authentication=True) api2.get('pool') assert api2.get_context() == context_api1
def enable_disable_vs(): global session session = ApiSession('localhost', 'admin', 'admin', api_version='18.2.6') session.tenant = '*' vs_obj_list = session.get( 'virtualservice?join_subresources=runtime&page_size=-1').json( )['results'] data_disable = {'enabled': False} data_enable = {'enabled': True} vs_oper_init_list = [] for vs in vs_obj_list: if vs['runtime']['oper_status']['state'] == 'OPER_INITIALIZING': vs_oper_init_list.append(vs) for v in vs_oper_init_list: t_uuid = get_vs_tenant_uuid(v['tenant_ref']) t_name = get_tenant_name(t_uuid) v_uuid = get_vs_tenant_uuid(v['url']) v_name = get_vs_name(v_uuid) print "Disabling the VS %s\n" % (v_name) if t_name != 'admin': resp = session.patch('virtualservice/' + v['uuid'], tenant=t_name, data={'replace': data_disable}, api_version='18.2.6') print resp else: resp = session.patch('virtualservice/' + v['uuid'], data={'replace': data_disable}, api_version='18.2.6') print resp time.sleep(1) print "Enabling the VS %s\n" % (v_name) if t_name != 'admin': resp = session.patch('virtualservice/' + v['uuid'], tenant=t_name, data={'replace': data_enable}, api_version='18.2.6') print resp else: resp = session.patch('virtualservice/' + v['uuid'], data={'replace': data_enable}, api_version='18.2.6') print resp time.sleep(1) print "All the Vses have been Disabled/Enabled Successfully\n"
def test_user_login(self): api1 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), lazy_authentication=False) user_info = gSAMPLE_CONFIG["Passwords"] original_password = login_info.get('password') new_password = "******" user_info['password'] = new_password user_info['old_password'] = original_password res = api1.put('useraccount', data=json.dumps(user_info)) assert res.status_code == 200 api1.clear_cached_sessions() api2 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=new_password, lazy_authentication=False) res = api2.get('pool') assert res.status_code in [200, 204] resp = api2.get('systemconfiguration', tenant='admin') r = resp.json() data = r['portal_configuration']['password_strength_check'] = False sysresp = api2.put('systemconfiguration', data=data, tenant='admin') assert sysresp.status_code == 200 old_password = user_info['password'] changed_password = original_password user_info['password'] = original_password user_info['old_password'] = old_password result = api2.put('useraccount', user_info) assert result.status_code == 200 res = api2.get('pool') assert res.status_code in [200, 204] api2.clear_cached_sessions() api3 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=changed_password, lazy_authentication=False) res = api3.get('pool') assert res.status_code in [200, 204]
def main(): parser = argparse.ArgumentParser( description="AVISDK based Script query gslb services on multiple pages " ) parser.add_argument("-u", "--username", required=True, help="Login username") parser.add_argument("-p", "--password", required=True, help="Login password") parser.add_argument("-c", "--controller", required=True, help="Controller IP address") parser.add_argument("-a", "--api_version", required=False, help="Api Version Name") parser.add_argument("-t", "--tenant", required=False, help="Tenant, if left blank Admin is selected") args = parser.parse_args() user = args.username controller = args.controller api_version = str(args.api_version if args.api_version else '17.2.14') tenant = str(args.tenant if args.tenant else 'admin') password = args.password session = ApiSession(controller, user, password, tenant=tenant, api_version=api_version) page = 1 while True: resp = session.get("gslbservice?page_size=1000&page=" + str(page)) if resp.status_code in range(200, 299): json_data = json.loads(resp.text) print(json_data['results']) if 'next' in json_data: page += 1 else: print("End of entries") break else: print("Error: %s" % resp.text)
def test_basic_auth(self): basic_vs_cfg = gSAMPLE_CONFIG["BasicVS"] vs_obj = basic_vs_cfg["vs_obj"] headers = { 'X-Avi-Version': login_info.get("api_version", gapi_version), 'Authorization': 'Basic YWRtaW46YXZpMTIzJCU=' } aviapi = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), api_version=login_info.get("api_version", gapi_version), user_hdrs=headers) resp = aviapi.post('pool', data=json.dumps(basic_vs_cfg["pool_obj"]), api_version=login_info.get("api_version")) assert resp.status_code in (200, 201) resp = aviapi.post('vsvip', data=json.dumps(basic_vs_cfg["vsvip_obj"]), api_version=login_info.get("api_version")) assert resp.status_code in (200, 201) vs_obj["vsvip_ref"] = api.get_obj_ref(resp.json()) resp = aviapi.post('virtualservice', data=json.dumps(vs_obj), api_version=login_info.get("api_version")) print(resp.json) assert resp.status_code in (200, 201) pool_name = gSAMPLE_CONFIG["BasicVS"]["pool_obj"]["name"] vsvip_name = gSAMPLE_CONFIG["BasicVS"]["vsvip_obj"]["name"] resp = aviapi.get('virtualservice', tenant='admin', api_version=login_info.get("api_version")) assert resp.json()['count'] >= 1 assert resp.status_code in (200, 204) resp = aviapi.delete_by_name('virtualservice', vs_obj['name'], api_version=login_info.get("api_version")) assert resp.status_code in (200, 204) resp = aviapi.delete_by_name("pool", pool_name, api_version=login_info.get("api_version")) assert resp.status_code in (200, 204) resp = aviapi.delete_by_name("vsvip", vsvip_name, api_version=login_info.get("api_version")) assert resp.status_code in (200, 204)
# Author: Manmeet Singh #!/usr/bin/env python from avi.sdk.avi_api import ApiSession session = ApiSession('10.79.111.0', 'admin', 'Avi12345!', api_version='18.2.6') session.tenant = '*' rt_vs = [] ci_vs = [] vs_obj_list = session.get( 'virtualservice?join_subresources=runtime&page_size=-1').json()['results'] for vs in vs_obj_list: if vs['analytics_policy']['metrics_realtime_update']['enabled']: rt_vs.append(vs['name']) if vs['analytics_policy']['client_insights'] in ('PASSIVE', 'ACTIVE'): ci_vs.append(vs['name']) print('Total VS : ' + str(len(vs_obj_list))) print('\n') print('==============REALTIME METRICS==============') print(rt_vs) print('Number of VS with Real time Metrics are: ' + str(len(rt_vs))) print('\n') print('==============CLIENT INSIGHTS==============') print(ci_vs) print('Number of VS with Client Insights are: ' + str(len(ci_vs)))
def analyze_logs(api_session: ApiSession, start_date: datetime.datetime, end_date: datetime.datetime, page_size: int, delay: float, stats, args: dict): tenant = args.tenant vs_name = args.vs_name fields = args.fields top_n = args.top_n obfuscate_ips = not args.no_ip_obfuscation use_dns = args.dns uas = collections.defaultdict(DictOfInts) waf_rules = collections.defaultdict(int) # { "ARGS:foo" => { "count": 137, "values" => { "val1:" 3, "val2": 66 } } } match_elements = {} waf_hits = 0 waf_elements = 0 file_writer = ApiResponseWriter(args.logapiresponses) applog_writer = AppLogWriter(args.jsonfile, obfuscate_ips) path = "/analytics/logs/" num_fetched = 0 total_to_fetch = -1 fixed_options = "virtualservice={}&type=1&page_size={}".format(vs_name, page_size) fixed_options += "&udf=true&nf=true&orderby=report_timestamp" if fields: fixed_options += "&cols={}".format(fields) minutes_tofetch = 8 max_minutes = 64 min_minutes = 1 orig_start_date = start_date while start_date < end_date: upper_end = min(start_date + datetime.timedelta(minutes=minutes_tofetch), end_date) # make request for logs [start_date, up_to]: time_options = "&start={}&end={}".format( # The API claims to expect ISO 8601 format, but rejects the request if we use it: # start_date.isoformat(), end_date.isoformat() # It seems to be unable to handle time zones. The following works: # f"{start_date:%Y-%m-%dT%H:%M:%S.%fZ}", f"{upper_end:%Y-%m-%dT%H:%M:%S.%fZ}" "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(start_date), "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(upper_end) ) query_options = fixed_options + time_options logging.debug("Query String: '{}'".format(query_options)) result = api_session.get(path, tenant=tenant, params=query_options) file_writer.save_api_response(result.text) j = result.json() num_to_fetch = j['count'] # this is not 100% accurate, more a hint if total_to_fetch == -1: total_to_fetch = num_to_fetch if num_to_fetch == 0: start_date = upper_end if minutes_tofetch < max_minutes: minutes_tofetch *= 2 logging.debug("Increased time window to {} minutes".format(minutes_tofetch)) continue if len(j['results']) == 0: applog_writer.close() raise LogResponseException(num_fetched) for applog in j['results']: hits, elements = process_applog_entry(applog, uas, stats, waf_rules, match_elements) waf_hits += hits waf_elements += elements applog_writer.save_applog_chunk(applog) logging.debug(" First time stamp:{},\n last time stamp: {}".format( j['results'][0]['report_timestamp'], j['results'][-1]['report_timestamp'])) num_fetched += len(j['results']) remaining_time = end_date - upper_end r_minutes = ceil(remaining_time.days * 24 * 60 + remaining_time.seconds / 60) r_percent = ceil(100 * remaining_time / (end_date - orig_start_date)) # Print one line of info to indicate progress, even if verbose is off: logging.info("Fetched {:8d} AppLog entries, {:4d} minutes remaining ({:2d}%)". format(num_fetched, r_minutes, r_percent)) if num_to_fetch > len(j['results']) and minutes_tofetch > min_minutes: minutes_tofetch /= 2 logging.debug("Decreased time window to {} minutes".format(minutes_tofetch)) # if there's a 'next' hint in the response, use it # note that due to a current limitation, for page_size = 10k, there # is never a 'next' link. while 'next' in j: # fetch page after page, update num_fetched, save result next = j['next'] qs = next.split('?')[1] logging.debug("New query string from 'next': {}".format(qs)) if delay > 0: sleep(delay) result = api_session.get(path, tenant=tenant, params=qs) file_writer.save_api_response(result.text) j = result.json() for applog in j['results']: hits, elements = process_applog_entry(applog, uas, stats, waf_rules, match_elements) waf_hits += hits waf_elements += elements applog_writer.save_applog_chunk(applog) num_fetched += len(j['results']) logging.debug("Newly fetched: {}, total: {}".format(len(j['results']), num_fetched)) logging.debug("No 'next' link, to fetch: {}, fetched: {}".format(num_to_fetch, num_fetched)) # To avoid fetching the same us twice, we increment by 1 us (and risk # missing logs if there is more than 1 per us) start_date = j['results'][-1]['report_timestamp'] start_date = datetime.datetime.fromisoformat(start_date) + datetime.timedelta(microseconds=1) logging.debug("New start date: {}".format(start_date.isoformat())) # f"{start_date:%Y-%m-%dT%H:%M:%S.%fZ}")) if delay > 0: sleep(delay) if num_fetched >= total_to_fetch: print("Done") else: print("Download incomplete: Expected {}, got {} log lines".format(num_to_fetch, num_fetched)) applog_writer.close() file_writer.close() if num_fetched > 0: statfile = args.outfile if statfile: if not statfile.endswith(".gz"): statfile += ".gz" out_fd = gzip.open(statfile, "xt") else: out_fd = sys.stdout show_results(top_n, obfuscate_ips, use_dns, num_fetched, uas, stats, waf_rules, match_elements, waf_hits, waf_elements, out_fd) return num_fetched >= total_to_fetch
def get_crt(user, password, tenant, api_version, account_key, csr, CA=DEFAULT_CA, disable_check=False, directory_url=DEFAULT_DIRECTORY_URL, contact=None): directory, acct_headers, alg, jwk = None, None, None, None # global variables # helper functions - base64 encode for jose spec def _b64(b): return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "") # helper function - run external commands def _cmd(cmd_list, stdin=None, cmd_input=None, err_msg="Command Line Error"): proc = subprocess.Popen(cmd_list, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate(cmd_input) if proc.returncode != 0: raise IOError("{0}\n{1}".format(err_msg, err)) return out # helper function - make request and automatically parse json response def _do_request(url, data=None, err_msg="Error", depth=0): try: resp = urlopen( Request(url, data=data, headers={ "Content-Type": "application/jose+json", "User-Agent": "acme-tiny" })) resp_data, code, headers = resp.read().decode( "utf8"), resp.getcode(), resp.headers except IOError as e: resp_data = e.read().decode("utf8") if hasattr(e, "read") else str(e) code, headers = getattr(e, "code", None), {} try: resp_data = json.loads(resp_data) # try to parse json results except ValueError: pass # ignore json parsing errors if depth < 100 and code == 400 and resp_data[ 'type'] == "urn:ietf:params:acme:error:badNonce": raise IndexError(resp_data) # allow 100 retrys for bad nonces if code not in [200, 201, 204]: raise ValueError( "{0}:\nUrl: {1}\nData: {2}\nResponse Code: {3}\nResponse: {4}". format(err_msg, url, data, code, resp_data)) return resp_data, code, headers # helper function - make signed requests def _send_signed_request(url, payload, err_msg, depth=0): payload64 = "" if payload is None else _b64( json.dumps(payload).encode('utf8')) new_nonce = _do_request(directory['newNonce'])[2]['Replay-Nonce'] protected = {"url": url, "alg": alg, "nonce": new_nonce} protected.update({"jwk": jwk} if acct_headers is None else {"kid": acct_headers['Location']}) protected64 = _b64(json.dumps(protected).encode('utf8')) protected_input = "{0}.{1}".format(protected64, payload64).encode('utf8') out = _cmd(["openssl", "dgst", "-sha256", "-sign", account_key], stdin=subprocess.PIPE, cmd_input=protected_input, err_msg="OpenSSL Error") data = json.dumps({ "protected": protected64, "payload": payload64, "signature": _b64(out) }) try: return _do_request(url, data=data.encode('utf8'), err_msg=err_msg, depth=depth) except IndexError: # retry bad nonces (they raise IndexError) return _send_signed_request(url, payload, err_msg, depth=(depth + 1)) # helper function - poll until complete def _poll_until_not(url, pending_statuses, err_msg): result, t0 = None, time.time() while result is None or result['status'] in pending_statuses: assert (time.time() - t0 < 3600), "Polling timeout" # 1 hour timeout time.sleep(0 if result is None else 2) result, _, _ = _send_signed_request(url, None, err_msg) return result session = ApiSession('localhost', user, password, tenant=tenant, api_version=api_version) log.info("Generating account key...") out = _cmd(["openssl", "genrsa", "4096"], err_msg="OpenSSL Error") with open(account_key, 'w') as f: f.write(out.decode("utf-8")) # parse account key to get public key log.info("Parsing account key...") out = _cmd(["openssl", "rsa", "-in", account_key, "-noout", "-text"], err_msg="OpenSSL Error") pub_pattern = r"modulus:[\s]+?00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)" pub_hex, pub_exp = re.search(pub_pattern, out.decode('utf8'), re.MULTILINE | re.DOTALL).groups() pub_exp = "{0:x}".format(int(pub_exp)) pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp alg = "RS256" jwk = { "e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))), "kty": "RSA", "n": _b64(binascii.unhexlify( re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))), } accountkey_json = json.dumps(jwk, sort_keys=True, separators=(',', ':')) thumbprint = _b64(hashlib.sha256(accountkey_json.encode('utf8')).digest()) # find domains log.info("Parsing CSR...") out = _cmd(["openssl", "req", "-in", csr, "-noout", "-text"], err_msg="Error loading {0}".format(csr)) domains = set([]) common_name = re.search(r"Subject:.*? CN\s?=\s?([^\s,;/]+)", out.decode('utf8')) if common_name is not None: domains.add(common_name.group(1)) subject_alt_names = re.search( r"X509v3 Subject Alternative Name: (?:critical)?\n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE | re.DOTALL) if subject_alt_names is not None: for san in subject_alt_names.group(1).split(", "): if san.startswith("DNS:"): domains.add(san[4:]) log.info("Found domains: {0}".format(", ".join(domains))) # get the ACME directory of urls log.info("Getting directory...") directory_url = CA + "/directory" if CA != DEFAULT_CA else directory_url # backwards compatibility with deprecated CA kwarg directory, _, _ = _do_request(directory_url, err_msg="Error getting directory") log.info("Directory found!") # create account, update contact details (if any), and set the global key identifier log.info("Registering account...") reg_payload = {"termsOfServiceAgreed": True} account, code, acct_headers = _send_signed_request(directory['newAccount'], reg_payload, "Error registering") log.info("Registered!" if code == 201 else "Already registered!") if contact is not None: account, _, _ = _send_signed_request(acct_headers['Location'], {"contact": contact}, "Error updating contact details") log.info("Updated contact details:\n{0}".format("\n".join( account['contact']))) # create a new order log.info("Creating new order...") order_payload = { "identifiers": [{ "type": "dns", "value": d } for d in domains] } order, _, order_headers = _send_signed_request(directory['newOrder'], order_payload, "Error creating new order") log.info("Order created!") # get the authorizations that need to be completed for auth_url in order['authorizations']: authorization, _, _ = _send_signed_request(auth_url, None, "Error getting challenges") domain = authorization['identifier']['value'] log.info("Verifying {0}...".format(domain)) # find the http-01 challenge and write the challenge file challenge = [ c for c in authorization['challenges'] if c['type'] == "http-01" ][0] token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token']) keyauthorization = "{0}.{1}".format(token, thumbprint) # Update vs rsp = session.get("vsvip/?search=(fqdn,{})".format(domain)).json() if rsp["count"] == 0: raise Exception( "Could not find a VSVIP with fqdn = {}".format(domain)) vsvip_uuid = rsp["results"][0]["uuid"] rsp = session.get( "virtualservice?search=(vsvip_ref,{})".format(vsvip_uuid)).json() if rsp['count'] == 0: raise Exception( "Could not find a VS with common name = {}".format(domain)) vs_uuid = rsp["results"][0]["uuid"] log.info("Found vs {} with fqdn {}".format(vs_uuid, domain)) # Check if the vs is servering on port 80 serving_on_port_80 = False service_on_port_80_data = None for service in rsp["results"][0]["services"]: if service["port"] == "80": serving_on_port_80 = True break # create HTTP policy httppolicy_data = { "name": (domain + "LetsEncryptHTTPpolicy"), "http_security_policy": { "rules": [{ "name": "Rule 1", "index": 1, "enable": True, "match": { "vs_port": { "match_criteria": "IS_IN", "ports": [80] }, "path": { "match_criteria": "CONTAINS", "match_case": "SENSITIVE", "match_str": [".well-known/acme-challenge/{}".format(token)] } }, "action": { "action": "HTTP_SECURITY_ACTION_SEND_RESPONSE", "status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200", "file": { "content_type": "text/plain", "file_content": keyauthorization } } }] }, "is_internal_policy": False } rsp = session.post("httppolicyset", data=httppolicy_data).json() httppolicy_uuid = rsp["uuid"] log.info("Created HTTP policy with uuid {}".format(httppolicy_uuid)) patch_data = { "add": { "http_policies": [{ "http_policy_set_ref": "/api/httppolicyset/{}".format(httppolicy_uuid), "index": 1000001 }] } } if not serving_on_port_80: # Add to port to virtualservice service_on_port_80_data = { "enable_http2": False, "enable_ssl": False, "port": 80, "port_range_end": 80 } patch_data["add"]["services"] = [service_on_port_80_data] rsp = session.patch("virtualservice/{}".format(vs_uuid), patch_data) exception_occured = None try: # check that the file is in place try: wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format( domain, token) assert (disable_check or _do_request(wellknown_url)[0] == keyauthorization) except (AssertionError, ValueError) as e: raise ValueError( "Wrote file to {0}, but couldn't download {1}: {2}".format( 'wellknown_path', 'wellknown_url', e)) # say the challenge is done _send_signed_request( challenge['url'], {}, "Error submitting challenges: {0}".format(domain)) authorization = _poll_until_not( auth_url, ["pending"], "Error checking challenge status for {0}".format(domain)) if authorization['status'] != "valid": raise ValueError("Challenge did not pass for {0}: {1}".format( domain, authorization)) except Exception as e: exception_occured = str(e) finally: # Update the vs patch_data = { "delete": { "http_policies": [{ "http_policy_set_ref": "/api/httppolicyset/{}".format(httppolicy_uuid), "index": 1000001 }] } } if not serving_on_port_80: patch_data["delete"]["services"] = [service_on_port_80_data] rsp = session.patch("virtualservice/{}".format(vs_uuid), patch_data) rsp = session.delete("httppolicyset/{}".format(httppolicy_uuid)) if exception_occured: log.error(exception_occured) raise Exception(exception_occured) log.info("{0} verified!".format(domain)) # finalize the order with the csr log.info("Signing certificate...") csr_der = _cmd(["openssl", "req", "-in", csr, "-outform", "DER"], err_msg="DER Export Error") _send_signed_request(order['finalize'], {"csr": _b64(csr_der)}, "Error finalizing order") # poll the order to monitor when it's done order = _poll_until_not(order_headers['Location'], ["pending", "processing"], "Error checking order status") if order['status'] != "valid": raise ValueError("Order failed: {0}".format(order)) # download the certificate certificate_pem, _, _ = _send_signed_request( order['certificate'], None, "Certificate download failed") log.info("Certificate signed!") return certificate_pem