def put(self): """This method handles the PUT requests to add agents to the Cloud Verifier. Currently, only agents resources are available for PUTing, i.e. /agents. All other PUT uri's will return errors. """ rest_params = config.get_restful_params(self.request.uri) if rest_params is None: config.echo_json_response( self, 405, "Not Implemented: Use /agents/ interface") return if "agents" not in rest_params: config.echo_json_response(self, 400, "uri not supported") logger.warning('PUT returning 400 response. uri not supported: %s', self.request.path) return agent_id = rest_params["agents"] # let Tenant do dirty work of reactivating agent mytenant = tenant.Tenant() mytenant.agent_uuid = agent_id mytenant.do_cvreactivate() config.echo_json_response(self, 200, "Success")
def delete(self): """This method handles the DELETE requests to remove agents from the Cloud Verifier. Currently, only agents resources are available for DELETEing, i.e. /agents. All other DELETE uri's will return errors. agents requests require a single agent_id parameter which identifies the agent to be deleted. """ rest_params = config.get_restful_params(self.request.uri) if rest_params is None: config.echo_json_response( self, 405, "Not Implemented: Use /agents/ interface") return if "agents" not in rest_params: config.echo_json_response(self, 400, "uri not supported") logger.warning( 'DELETE returning 400 response. uri not supported: %s', self.request.path) return agent_id = rest_params["agents"] # let Tenant do dirty work of deleting agent mytenant = tenant.Tenant() mytenant.agent_uuid = agent_id mytenant.do_cvdelete() config.echo_json_response(self, 200, "Success")
def put(self): """This method handles the PUT requests to add agents to the Cloud Verifier. Currently, only agents resources are available for PUTing, i.e. /agents. All other PUT uri's will return errors. """ rest_params = web_util.get_restful_params(self.request.uri) if rest_params is None: web_util.echo_json_response( self, 405, "Not Implemented: Use /agents/ interface") return if "agents" not in rest_params: web_util.echo_json_response(self, 400, "uri not supported") logger.warning('PUT returning 400 response. uri not supported: %s', self.request.path) return agent_id = rest_params["agents"] # If the agent ID is not valid (wrong set of characters), just # do nothing. if not validators.valid_agent_id(agent_id): web_util.echo_json_response(self, 400, "agent_id not not valid") logger.error("PUT received an invalid agent ID: %s", agent_id) return # let Tenant do dirty work of reactivating agent mytenant = tenant.Tenant() mytenant.agent_uuid = agent_id mytenant.do_cvreactivate() web_util.echo_json_response(self, 200, "Success")
def setUpModule(): try: env = os.environ.copy() env['PATH']=env['PATH']+":/usr/local/bin" # Run init_tpm_server and tpm_serverd (start fresh) its = subprocess.Popen(["init_tpm_server"], shell=False, env=env) its.wait() tsd = subprocess.Popen(["tpm_serverd"], shell=False, env=env) tsd.wait() except Exception as e: print("WARNING: Restarting TPM emulator failed!") # Note: the following is required as abrmd is failing to reconnect to MSSIM, once # MSSIM is killed and restarted. If this is an proved an actual bug and is # fixed upstream, the following dbus restart call can be removed. try: sysbus = dbus.SystemBus() systemd1 = sysbus.get_object('org.freedesktop.systemd1', '/org/freedesktop/systemd1') manager = dbus.Interface(systemd1, 'org.freedesktop.systemd1.Manager') # If the systemd service exists, let's restart it. for service in sysbus.list_names(): if "com.intel.tss2.Tabrmd" in service: print("Found dbus service:", str(service)) try: print("Restarting tpm2-abrmd.service.") job = manager.RestartUnit('tpm2-abrmd.service', 'fail') except dbus.exceptions.DBusException as e: print(e) except Exception as e: print("Non systemd agent detected, no tpm2-abrmd restart required.") try: # Start with a clean slate for this test fileRemove(common.WORK_DIR + "/tpmdata.yaml") fileRemove(common.WORK_DIR + "/cv_data.sqlite") fileRemove(common.WORK_DIR + "/reg_data.sqlite") shutil.rmtree(common.WORK_DIR + "/cv_ca", True) except Exception as e: print("WARNING: Cleanup of TPM files failed!") # CV must be run first to create CA and certs! launch_cloudverifier() launch_registrar() #launch_cloudagent() # get the tpm object global tpm try: tpm = tpm_obj.getTPM(need_hw_tpm=True) except Exception as e: print("Error: %s" % e) # Make the Tenant do a lot of set-up work for us global tenant_templ tenant_templ = tenant.Tenant() tenant_templ.cloudagent_ip = "localhost" tenant_templ.agent_uuid = config.get('cloud_agent', 'agent_uuid') tenant_templ.registrar_boot_port = config.get('registrar', 'registrar_port') tenant_templ.registrar_tls_boot_port = config.get('registrar', 'registrar_tls_port')
def setUpModule(): try: env = os.environ.copy() env["PATH"] = env["PATH"] + ":/usr/local/bin" # Run init_tpm_server and tpm_serverd (start fresh) with subprocess.Popen(["init_tpm_server"], shell=False, env=env) as its: its.wait() with subprocess.Popen(["tpm_serverd"], shell=False, env=env) as tsd: tsd.wait() except Exception: print("WARNING: Restarting TPM emulator failed!") # Note: the following is required as abrmd is failing to reconnect to MSSIM, once # MSSIM is killed and restarted. If this is an proved an actual bug and is # fixed upstream, the following dbus restart call can be removed. try: sysbus = dbus.SystemBus() systemd1 = sysbus.get_object("org.freedesktop.systemd1", "/org/freedesktop/systemd1") manager = dbus.Interface(systemd1, "org.freedesktop.systemd1.Manager") # If the systemd service exists, let's restart it. for service in sysbus.list_names(): if "com.intel.tss2.Tabrmd" in service: print("Found dbus service:", str(service)) try: print("Restarting tpm2-abrmd.service.") manager.RestartUnit("tpm2-abrmd.service", "fail") except dbus.exceptions.DBusException as e: print(e) except Exception: print("Non systemd agent detected, no tpm2-abrmd restart required.") try: # Start with a clean slate for this test fileRemove(config.WORK_DIR + "/tpmdata.yaml") fileRemove(config.WORK_DIR + "/cv_data.sqlite") fileRemove(config.WORK_DIR + "/reg_data.sqlite") shutil.rmtree(config.WORK_DIR + "/cv_ca", True) except Exception: print("WARNING: Cleanup of TPM files failed!") # CV must be run first to create CA and certs! launch_cloudverifier() launch_registrar() # launch_cloudagent() # Make the Tenant do a lot of set-up work for us global tenant_templ tenant_templ = tenant.Tenant() tenant_templ.agent_uuid = config.get("cloud_agent", "agent_uuid") tenant_templ.cloudagent_ip = "localhost" tenant_templ.cloudagent_port = config.get("cloud_agent", "cloudagent_port") tenant_templ.verifier_ip = config.get("cloud_verifier", "cloudverifier_ip") tenant_templ.verifier_port = config.get("cloud_verifier", "cloudverifier_port") tenant_templ.registrar_ip = config.get("registrar", "registrar_ip") tenant_templ.registrar_boot_port = config.get("registrar", "registrar_port") tenant_templ.registrar_tls_boot_port = config.get("registrar", "registrar_tls_port") tenant_templ.registrar_base_url = f"{tenant_templ.registrar_ip}:{tenant_templ.registrar_boot_port}" tenant_templ.registrar_base_tls_url = f"{tenant_templ.registrar_ip}:{tenant_templ.registrar_tls_boot_port}" tenant_templ.agent_base_url = f"{tenant_templ.cloudagent_ip}:{tenant_templ.cloudagent_port}" tenant_templ.supported_version = "2.0" # Set up TLS tenant_templ.cert, tenant_templ.agent_cert, _ = tenant_templ.get_tls_context( )
def post(self): """This method handles the POST requests to add agents to the Cloud Verifier. Currently, only agents resources are available for POSTing, i.e. /agents. All other POST uri's will return errors. agents requests require a yaml block sent in the body """ rest_params = config.get_restful_params(self.request.uri) if rest_params is None: config.echo_json_response( self, 405, "Not Implemented: Use /agents/ interface") return if "agents" not in rest_params: config.echo_json_response(self, 400, "uri not supported") logger.warning( 'POST returning 400 response. uri not supported: %s', self.request.path) return agent_id = rest_params["agents"] # Parse payload files (base64 data-uri) if self.get_argument("ptype", Agent_Init_Types.FILE, True) == Agent_Init_Types.FILE: keyfile = None payload = None data = { 'data': parse_data_uri(self.get_argument("file_data", None, True)) } ca_dir = None incl_dir = None ca_dir_pw = None elif self.get_argument("ptype", Agent_Init_Types.FILE, True) == Agent_Init_Types.KEYFILE: keyfile = { 'data': parse_data_uri(self.get_argument("keyfile_data", None, True)), } payload = { 'data': parse_data_uri(self.get_argument("file_data", None, True)) } data = None ca_dir = None incl_dir = None ca_dir_pw = None elif self.get_argument("ptype", Agent_Init_Types.FILE, True) == Agent_Init_Types.CA_DIR: keyfile = None payload = None data = None incl_dir = { 'data': parse_data_uri( self.get_argument("include_dir_data", None, True)), 'name': self.get_argument("include_dir_name", "", True).splitlines() } ca_dir = self.get_argument("ca_dir", 'default', True) if ca_dir == "": ca_dir = 'default' ca_dir_pw = self.get_argument("ca_dir_pw", 'default', True) if ca_dir_pw == "": ca_dir_pw = 'default' else: config.echo_json_response(self, 400, "invalid payload type chosen") logger.warning('POST returning 400 response. malformed query') return # Pull in user-defined v/TPM policies tpm_policy = self.get_argument("tpm_policy", "", True) if tpm_policy == "": tpm_policy = None vtpm_policy = self.get_argument("vtpm_policy", "", True) if vtpm_policy == "": vtpm_policy = None # Pull in allowlist allowlist = None a_list_data = self.get_argument("a_list_data", None, True) if a_list_data != "": allowlist_str = parse_data_uri(a_list_data) if allowlist_str is not None: allowlist = allowlist_str[0].splitlines() # Pull in IMA exclude list ima_exclude = None e_list_data = self.get_argument("e_list_data", None, True) if e_list_data != "": ima_exclude_str = parse_data_uri(e_list_data) if ima_exclude_str is not None: ima_exclude = ima_exclude_str[0].splitlines() # Build args to give to Tenant's init_add method args = { 'agent_ip': self.get_argument("agent_ip", None, True), 'file': data, 'keyfile': keyfile, 'payload': payload, 'ca_dir': ca_dir, 'incl_dir': incl_dir, 'ca_dir_pw': ca_dir_pw, 'tpm_policy': tpm_policy, 'vtpm_policy': vtpm_policy, 'allowlist': allowlist, 'ima_exclude': ima_exclude, } # let Tenant do dirty work of adding agent try: mytenant = tenant.Tenant() mytenant.agent_uuid = agent_id mytenant.init_add(args) mytenant.preloop() mytenant.do_cv() mytenant.do_quote() except Exception as e: logger.exception(e) logger.warning('POST returning 500 response. Tenant error: %s', e) config.echo_json_response(self, 500, "Request failure", str(e)) return config.echo_json_response(self, 200, "Success")
import ssl import traceback import sys import simplejson as json import tornado.ioloop import tornado.web from keylime.requests_client import RequestsClient from keylime.common import states from keylime import config from keylime import keylime_logging from keylime import tenant logger = keylime_logging.init_logging('tenant_webapp') tenant_templ = tenant.Tenant() my_cert, my_priv_key = tenant_templ.get_tls_context() cert = (my_cert, my_priv_key) if config.getboolean('general', "enable_tls"): tls_enabled = True else: tls_enabled = False cert = "" logger.warning( "Warning: TLS is currently disabled, keys will be sent in the clear! This should only be used for testing." ) verifier_ip = config.get('cloud_verifier', 'cloudverifier_ip') verifier_port = config.get('cloud_verifier', 'cloudverifier_port') verifier_base_url = f'{verifier_ip}:{verifier_port}'