class KeychainCredentialService(CRUDService): class Config: datastore = "system.keychaincredential" cli_namespace = "system.keychain_credential" @accepts( Dict( "keychain_credential_create", Str("name", required=True), Str("type", required=True), Dict("attributes", additional_attrs=True, required=True, private=True), register=True, )) async def do_create(self, data): """ Create a Keychain Credential Create a Keychain Credential of any type. Every Keychain Credential has a `name` which is used to distinguish it from others. The following `type`s are supported: * `SSH_KEY_PAIR` Which `attributes` are: * `private_key` * `public_key` (which can be omitted and thus automatically derived from private key) At least one attribute is required. * `SSH_CREDENTIALS` Which `attributes` are: * `host` * `port` (default 22) * `username` (default root) * `private_key` (Keychain Credential ID) * `remote_host_key` (you can use `keychaincredential.remote_ssh_host_key_scan` do discover it) * `cipher`: one of `STANDARD`, `FAST`, or `DISABLED` (last requires special support from both SSH server and client) * `connect_timeout` (default 10) .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.create", "params": [{ "name": "Work SSH connection", "type": "SSH_CREDENTIALS", "attributes": { "host": "work.freenas.org", "private_key": 12, "remote_host_key": "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMn1VjdSMatGnxbOsrneKyai+dh6d4Hm" } }] } """ await self._validate("keychain_credential_create", data) data["id"] = await self.middleware.call( "datastore.insert", self._config.datastore, data, ) return data @accepts(Int("id"), Patch( "keychain_credential_create", "keychain_credential_update", ("attr", { "update": True }), ("rm", { "name": "type" }), )) async def do_update(self, id, data): """ Update a Keychain Credential with specific `id` Please note that you can't change `type` Also you must specify full `attributes` value See the documentation for `create` method for information on payload contents .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.update", "params": [ 13, { "name": "Work SSH connection", "attributes": { "host": "work.ixsystems.com", "private_key": 12, "remote_host_key": "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMn1VjdSMatGnxbOsrneKyai+dh6d4Hm" } } ] } """ old = await self._get_instance(id) new = old.copy() new.update(data) await self._validate("keychain_credentials_update", new, id) await self.middleware.call( "datastore.update", self._config.datastore, id, new, ) if new["type"] in ["SSH_KEY_PAIR", "SSH_CREDENTIALS"]: await self.middleware.call("zettarepl.update_tasks") return new @accepts(Int("id"), Dict("options", Bool("cascade", default=False))) async def do_delete(self, id, options): """ Delete Keychain Credential with specific `id` .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.delete", "params": [ 13 ] } """ instance = await self._get_instance(id) for delegate in TYPES[instance["type"]].used_by_delegates: delegate = delegate(self.middleware) for row in await delegate.query(instance["id"]): if not options["cascade"]: raise CallError( "This credential is used and no cascade option is specified" ) await delegate.unbind(row) await self.middleware.call( "datastore.delete", self._config.datastore, id, ) @accepts(Int("id")) async def used_by(self, id): """ Returns list of objects that use this credential. """ instance = await self._get_instance(id) result = [] for delegate in TYPES[instance["type"]].used_by_delegates: delegate = delegate(self.middleware) for row in await delegate.query(instance["id"]): result.append({ "title": await delegate.get_title(row), "unbind_method": delegate.unbind_method.value, }) if isinstance( delegate, OtherKeychainCredentialKeychainCredentialUsedByDelegate ): result.extend(await self.middleware.call( "keychaincredential.used_by", row["id"])) return result async def _validate(self, schema_name, data, id=None): verrors = ValidationErrors() await self._ensure_unique(verrors, schema_name, "name", data["name"], id) if data["type"] not in TYPES: verrors.add(f"{schema_name}.type", "Invalid type") raise verrors else: type = TYPES[data["type"]] attributes_verrors = validate_attributes(type.credentials_schema, data) verrors.add_child(f"{schema_name}.attributes", attributes_verrors) if verrors: raise verrors await type.validate_and_pre_save(self.middleware, verrors, f"{schema_name}.attributes", data["attributes"]) if verrors: raise verrors @private @accepts(Int("id"), Str("type")) async def get_of_type(self, id, type): try: credential = await self.middleware.call("keychaincredential.query", [["id", "=", id]], {"get": True}) except IndexError: raise CallError("Credential does not exist", errno.ENOENT) else: if credential["type"] != type: raise CallError(f"Credential is not of type {type}", errno.EINVAL) if not credential["attributes"]: raise CallError( f"Decrypting credential {credential['name']} failed", errno.EFAULT) return credential @accepts() def generate_ssh_key_pair(self): """ Generate a public/private key pair Generate a public/private key pair (useful for `SSH_KEY_PAIR` type) .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.generate_ssh_key_pair", "params": [] } """ key = os.path.join( "/tmp", "".join(random.choice(string.ascii_letters) for _ in range(32))) if os.path.exists(key): os.unlink(key) if os.path.exists(f"{key}.pub"): os.unlink(f"{key}.pub") try: subprocess.check_call( ["ssh-keygen", "-t", "rsa", "-f", key, "-N", "", "-q"]) with open(key) as f: private_key = f.read() with open(f"{key}.pub") as f: public_key = f.read() finally: if os.path.exists(key): os.unlink(key) if os.path.exists(f"{key}.pub"): os.unlink(f"{key}.pub") return { "private_key": private_key, "public_key": public_key, } @accepts( Dict( "keychain_remote_ssh_host_key_scan", Str("host", required=True, empty=False), Str("port", default=22), Int("connect_timeout", default=10), )) async def remote_ssh_host_key_scan(self, data): """ Discover a remote host key Discover a remote host key (useful for `SSH_CREDENTIALS`) .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.delete", "params": [{ "host": "work.freenas.org" }] } """ proc = await run([ "ssh-keyscan", "-p", str(data["port"]), "-T", str(data["connect_timeout"]), data["host"] ], check=False, encoding="utf8") if proc.returncode == 0: if proc.stdout: try: return process_ssh_keyscan_output(proc.stdout) except Exception: raise CallError( f"ssh-keyscan failed: {proc.stdout + proc.stderr}" ) from None elif proc.stderr: raise CallError(f"ssh-keyscan failed: {proc.stderr}") else: raise CallError("SSH timeout") else: raise CallError(f"ssh-keyscan failed: {proc.stdout + proc.stderr}") @accepts( Dict( "keychain_remote_ssh_semiautomatic_setup", Str("name", required=True), Str("url", required=True, validators=[URL()]), Str("token", private=True), Str("password", private=True), Str("username", default="root"), Int("private_key", required=True), Str("cipher", enum=["STANDARD", "FAST", "DISABLED"], default="STANDARD"), Int("connect_timeout", default=10), )) def remote_ssh_semiautomatic_setup(self, data): """ Perform semi-automatic SSH connection setup with other FreeNAS machine Perform semi-automatic SSH connection setup with other FreeNAS machine. It creates a `SSH_CREDENTIALS` credential with specified `name` that can be used to connect to FreeNAS machine with specified `url` and temporary auth `token`. Other FreeNAS machine adds `private_key` to allowed `username`'s private keys. Other `SSH_CREDENTIALS` attributes such as `cipher` and `connect_timeout` can be specified as well. .. examples(websocket):: :::javascript { "id": "6841f242-840a-11e6-a437-00e04d680384", "msg": "method", "method": "keychaincredential.keychain_remote_ssh_semiautomatic_setup", "params": [{ "name": "Work SSH connection", "url": "https://work.freenas.org", "token": "8c8d5fd1-f749-4429-b379-9c186db4f834", "private_key": 12 }] } """ replication_key = self.middleware.run_coroutine( get_ssh_key_pair_with_private_key(self.middleware, data["private_key"])) try: client = Client( os.path.join(re.sub("^http", "ws", data["url"]), "websocket")) except Exception as e: raise CallError(f"Unable to connect to remote system: {e}") with client as c: if data.get("token"): if not c.call("auth.token", data["token"]): raise CallError("Invalid token") elif data.get("password"): if not c.call("auth.login", "root", data["password"]): raise CallError("Invalid password") else: raise CallError( "You should specify either remote system password or temporary authentication token" ) try: response = c.call( "replication.pair", { "hostname": "any-host", "public-key": replication_key["attributes"]["public_key"], "user": data["username"], }) except Exception as e: raise CallError( f"Semi-automatic SSH connection setup failed: {e!r}") return self.middleware.call_sync( "keychaincredential.create", { "name": data["name"], "type": "SSH_CREDENTIALS", "attributes": { "host": urllib.parse.urlparse(data["url"]).hostname, "port": response["ssh_port"], "username": data["username"], "private_key": replication_key["id"], "remote_host_key": process_ssh_keyscan_output(response["ssh_hostkey"]), "cipher": data["cipher"], "connect_timeout": data["connect_timeout"], } }) @private @accepts( Dict( "keychain_ssh_pair", Str("remote_hostname", required=True), Str("username", default="root"), Str("public_key", required=True), )) async def ssh_pair(self, data): """ Receives public key, storing it to accept SSH connection and return pertinent SSH data of this machine. """ service = await self.middleware.call("service.query", [("service", "=", "ssh")], {"get": True}) ssh = await self.middleware.call("ssh.config") try: user = await self.middleware.call( "user.query", [("username", "=", data["username"])], {"get": True}) except IndexError: raise CallError(f"User {data['username']} does not exist") if user["home"].startswith("/nonexistent") or not os.path.exists( user["home"]): raise CallError(f"Home directory {user['home']} does not exist", errno.ENOENT) # Make sure SSH is enabled if not service["enable"]: await self.middleware.call("service.update", "ssh", {"enable": True}) await self.middleware.call("service.start", "ssh") # This might be the first time of the service being enabled # which will then result in new host keys we need to grab ssh = await self.middleware.call("ssh.config") # If .ssh dir does not exist, create it dotsshdir = os.path.join(user["home"], ".ssh") if not os.path.exists(dotsshdir): os.mkdir(dotsshdir) os.chown(dotsshdir, user["uid"], user["group"]["bsdgrp_gid"]) # Write public key in user authorized_keys for SSH authorized_keys_file = f"{dotsshdir}/authorized_keys" with open(authorized_keys_file, "a+") as f: f.seek(0) if data["public_key"] not in f.read(): f.write("\n" + data["public_key"] + "\n") ssh_hostkey = "{0} {1}\n{0} {2}\n{0} {3}\n".format( data["remote_hostname"], base64.b64decode(ssh["host_rsa_key_pub"].encode()).decode(), base64.b64decode(ssh["host_ecdsa_key_pub"].encode()).decode(), base64.b64decode(ssh["host_ed25519_key_pub"].encode()).decode(), ) return { "port": ssh["tcpport"], "host_key": ssh_hostkey, }
class GlusterEventsdService(Service): class Config: namespace = 'gluster.eventsd' cli_namespace = 'service.gluster.eventsd' def format_cmd( self, data, delete=False, ): cmd = [ 'gluster-eventsapi', 'webhook-add' if not delete else 'webhook-del' ] # need to add the url as the next param cmd.append(data['url']) if not delete: # add bearer_token if data.get('bearer_token'): cmd.append('-t') cmd.append(data['bearer_token']) # add secret if data.get('secret'): cmd.append('-s') cmd.append(data['secret']) return cmd def run_cmd(self, cmd): proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) out, err = proc.communicate() # the `out` variable is formatted PrettyTable() ouput # which is gross and not useful so instead of trying # to battle that and manipulate it the way we need # just raise CallError on failure otherwise 'SUCCESS' if proc.returncode != 0: raise CallError(err.strip()) return 'SUCCESS' @accepts( Dict( 'webhook_create', Str('url', required=True, validators=[URL()]), Str('bearer_token', required=False), Str('secret', required=False), )) @job(lock=EVENTSD_LOCK) def create(self, job, data): """ Add `url` webhook that will be called with a POST request that will include the event that was triggered along with the relevant data. `url` is a http address (i.e. http://192.168.1.50/endpoint) `bearer_token` is a bearer token `secret` secret to add JWT bearer token """ verrors = ValidationErrors() add_it = result = None # get the current webhooks cw = self.middleware.call_sync('gluster.eventsd.webhooks') if data['url'] not in list(cw['webhooks']): add_it = True # there doesn't seem to be an upper limit on the amount # of webhook endpoints that can be added to the daemon # so place an arbitrary limit of 5 # (excluding the local middlewared webhook) if len(cw['webhooks']) >= 5 and data['url'] != LOCAL_WEBHOOK_URL: verrors.add( f'webhook_create.{data["url"]}', 'Maximum number of webhooks has been met. ' 'Delete one or more and try again.') verrors.check() if add_it: cmd = self.format_cmd(data) result = self.run_cmd(cmd) return result @accepts( Dict( 'webhook_delete', Str('url', required=True, validators=[URL()]), )) @job(lock=EVENTSD_LOCK) def delete(self, job, data): """ Delete `url` webhook `url` is a http address (i.e. http://192.168.1.50/endpoint) """ result = None # get the current webhooks cw = self.middleware.call_sync('gluster.eventsd.webhooks') if data['url'] in list(cw['webhooks']): cmd = self.format_cmd(data, delete=True) result = self.run_cmd(cmd) return result @accepts() def webhooks(self): """ List the current webhooks (if any) """ result = {'webhooks': {}} with contextlib.suppress(FileNotFoundError): with open(WEBHOOKS_FILE, 'r') as f: result['webhooks'] = json.load(f) return result @accepts() @job(lock=EVENTSD_LOCK) def sync(self, job): """ Sync the webhooks config file to all peers in the trusted storage pool """ proc = subprocess.Popen( ['gluster-eventsapi', 'sync', '--json'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) out, err = proc.communicate() if proc.returncode == 0: return json.loads(out.strip())['output'] else: raise CallError(err.strip()) @private @job(lock=EVENTSD_LOCK) def init(self, job): """ Initializes the webhook directory and config file if it doesn't exist. """ webhook_file = pathlib.Path(WEBHOOKS_FILE) # check if the glusterd dataset exists glusterd_dir = webhook_file.parent.parent new_file = False if glusterd_dir.exists() and glusterd_dir.is_mount(): try: # make sure glusterd_dir/events subdir exists webhook_file.parent.mkdir(exist_ok=True) # now create the webhook file if not webhook_file.exists(): webhook_file.touch() new_file = True except Exception as e: raise CallError( f'Failed creating {webhook_file} with error: {e}') else: raise CallError(f'{glusterd_dir} does not exist or is not mounted') init_data = {} if not new_file: # need to know if webhooks have already been added # to the file so act accordingly with webhook_file.open('r') as f: init_data = json.load(f) # dont add local URL if it's already there if not init_data.get(LOCAL_WEBHOOK_URL): # make sure to add local api endpoint to file local_url = {LOCAL_WEBHOOK_URL: {'token': '', 'secret': ''}} init_data.update(local_url) # finally write it out try: with webhook_file.open('w') as f: f.write(json.dumps(init_data)) except Exception as e: raise CallError( f'Failed writing to {webhook_file} with error: {e}') return init_data
class GlusterEventsdService(Service): class Config: namespace = 'gluster.eventsd' cli_namespace = 'service.gluster.eventsd' @private def format_cmd( self, data, delete=False, ): cmd = ['gluster-eventsapi'] cmd.append('webhook-add' if not delete else 'webhook-del') # need to add the url as the next param cmd.append(data['url']) if not delete: # add bearer_token if data.get('bearer_token'): cmd.append('-t') cmd.append(data['bearer_token']) # add secret if data.get('secret'): cmd.append('-s') cmd.append(data['secret']) return cmd @private def run_cmd(self, cmd): proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) out, err = proc.communicate() # the `out` variable is formatted PrettyTable() ouput # which is gross and not useful so instead of trying # to battle that and manipulate it the way we need # just raise CallError on failure otherwise 'SUCCESS' if proc.returncode != 0: raise CallError(err.strip()) return 'SUCCESS' @accepts( Dict( 'webhook_create', Str('url', required=True, validators=[URL()]), Str('bearer_token', required=False), Str('secret', required=False), )) @job(lock=EVENTSD_CRE_OR_DEL) def create(self, job, data): """ Add `url` webhook that will be called with a JSON formatted POST request that will include the event that was triggered along with the relevant data. `url` is a http address (i.e. http://192.168.1.50/endpoint) `bearer_token` is a bearer token `secret` secret to encode the JWT message NOTE: This webhook will be synchronized to all peers in the trusted storage pool. """ verrors = ValidationErrors() result = None cw = self.middleware.call_sync('gluster.eventsd.webhooks') if data['url'] not in list(cw['webhooks']): # there doesn't seem to be an upper limit on the amount # of webhook endpoints that can be added to the daemon # so place an arbitrary limit of 5 (for now) if len(cw['webhooks']) >= 5: verrors.add( f'webhook_create.{data["url"]}', 'Maximum number of webhooks has been met. ' 'Delete one or more and try again.') verrors.check() cmd = self.format_cmd(data) result = self.run_cmd(cmd) # sync the file across to all other peers job = self.middleware.call_sync('gluster.eventsd.sync') job.wait_sync() return result @accepts( Dict( 'webhook_delete', Str('url', required=True, validators=[URL()]), )) @job(lock=EVENTSD_CRE_OR_DEL) def delete(self, job, data): """ Delete `url` webhook `url` is a http address (i.e. http://192.168.1.50/endpoint) """ result = None # get the current webhooks cw = self.middleware.call_sync('gluster.eventsd.webhooks') if data['url'] in list(cw['webhooks']): cmd = self.format_cmd(data, delete=True) result = self.run_cmd(cmd) # sync the file across to all other peers job = self.middleware.call_sync('gluster.eventsd.sync') job.wait_sync() return result @accepts() def webhooks(self): """ List the current webhooks (if any) """ result = {'webhooks': {}} exceptions = (FileNotFoundError, json.decoder.JSONDecodeError) with contextlib.suppress(exceptions): with open(WEBHOOKS_FILE, 'r') as f: result['webhooks'] = json.load(f) return result @accepts() @job(lock='EVENTSD_SYNC') async def sync(self, job): """ Sync the webhooks config file to all peers in the trusted storage pool """ cp = await run(['gluster-eventsapi', 'sync', '--json'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False) if cp.returncode == 0: return json.loads(cp.stdout.strip())['output'] else: raise CallError(cp.stderr.strip()) @private @job(lock='eventsd_init') def init(self, job): """ Initializes the webhook directory and config file if it doesn't exist. """ webhook_file = pathlib.Path(WEBHOOKS_FILE) glusterd_dir = webhook_file.parent.parent # check if the glusterd dataset exists if glusterd_dir.exists() and glusterd_dir.is_mount(): try: # make sure glusterd_dir/events subdir exists webhook_file.parent.mkdir(exist_ok=True) # create the webhooks file if it doesnt exist webhook_file.touch(exist_ok=True) # make sure the file is initialized to empty # json formatted file since 3rd party binary # expects it to be webhook_file.write_text( '{}') if not webhook_file.read_text() else None except Exception as e: raise CallError( f'Failed creating {webhook_file} with error: {e}') else: raise CallError(f'{glusterd_dir} does not exist or is not mounted') # at one point we tried to have glustereventsd send # event messages locally to us but that proved to be # fraught with errors because of upstream issues. # It's pretty clear that the glustereventsd code # isn't tested (or used) often because the JWT # implementation is very much broken so now we remove # localhost api endpoint (it if it's there) data = {'url': LOCAL_WEBHOOK_URL} if self.middleware.call_sync('service.started', 'glustereventsd'): self.middleware.call_sync('gluster.eventsd.delete', data)
class GlusterEventsdService(CRUDService): class Config: namespace = 'gluster.eventsd' WORKDIR = '/var/lib/glusterd' WEBHOOKS_FILE = WORKDIR + '/events/webhooks.json' EVENTSD_LOCK = 'gluster_eventsd_lock' def format_cmd(self, data, delete=False,): cmd = ['gluster-eventsapi', 'webhook-add' if not delete else 'webhook-del'] # need to add the url as the next param cmd.append(data['url']) if not delete: # add bearer_token if data.get('bearer_token'): cmd.append('-t') cmd.append(data['bearer_token']) # add secret if data.get('secret'): cmd.append('-s') cmd.append(data['secret']) return cmd def run_cmd(self, cmd): proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) out, err = proc.communicate() # the `out` variable is formatted PrettyTable() ouput # which is gross and not useful so instead of trying # to battle that and manipulate it the way we need # just raise CallError on failure otherwise 'SUCCESS' if proc.returncode != 0: raise CallError(err.strip()) return 'SUCCESS' @accepts(Dict( 'webhook_create', Str('url', required=True, validators=[URL()]), Str('bearer_token', required=False), Str('secret', required=False), )) @job(lock=EVENTSD_LOCK) def do_create(self, job, data): """ Add `url` webhook that will be called with a POST request that will include the event that was triggered along with the relevant data. `url` is a http address (i.e. http://192.168.1.50/endpoint) `bearer_token` is a bearer token `secret` secret to add JWT bearer token """ verrors = ValidationErrors() # there doesn't seem to be an upper limit on the amount # of webhook endpoints that can be added to the daemon # so place an arbitrary limit of 5 for now if len(self.middleware.call_sync('gluster.eventsd.webhooks')['webhooks']) >= 5: verrors.add( f'webhook_create.{data["url"]}', 'Maximum number of webhooks has been met. ' 'Delete one or more and try again.' ) verrors.check() cmd = self.format_cmd(data) return self.run_cmd(cmd) @accepts(Dict( 'webhook_delete', Str('url', required=True, validators=[URL()]), )) @job(lock=EVENTSD_LOCK) def do_delete(self, job, data): """ Delete `url` webhook `url` is a http address (i.e. http://192.168.1.50/endpoint) """ cmd = self.format_cmd(data, delete=True) return self.run_cmd(cmd) @accepts() def webhooks(self): """ List the current webhooks (if any) """ result = {'webhooks': {}} with contextlib.suppress(FileNotFoundError): with open(self.WEBHOOKS_FILE, 'r') as f: result['webhooks'] = json.load(f) return result @accepts() @job(lock=EVENTSD_LOCK) def sync(self, job): """ Sync the webhooks config file to all peers in the trusted storage pool """ proc = subprocess.Popen( ['gluster-eventsapi', 'sync', '--json'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) out, err = proc.communicate() if proc.returncode == 0: return json.loads(out.strip())['output'] else: raise CallError(err.strip())