@generator def get_temp_pubkeys(self): for k in temp_pubkeys: yield k @private def remove_temp_pubkey(self, key): try: temp_pubkeys.remove(key) except ValueError: pass @description('Exchanges SSH keys with remote FreeNAS machine') @accepts( h.all_of(h.ref('peer'), h.required('type', 'credentials'), h.forbidden('name'))) class FreeNASPeerCreateTask(Task): @classmethod def early_describe(cls): return 'Exchanging SSH keys with remote host' def describe(self, peer, initial_credentials): return TaskDescription('Exchanging SSH keys with the remote {name}', name=q.get(peer, 'credentials.address', '')) def verify(self, peer, initial_credentials): credentials = peer['credentials'] remote = credentials.get('address') username = initial_credentials.get('username') password = initial_credentials.get('password')
@description('Provides access to the alerts filters') class AlertsFiltersProvider(Provider): @query('alert-filter') @generator def query(self, filter=None, params=None): return self.datastore.query_stream( 'alert.filters', *(filter or []), **(params or {}) ) @description("Creates an Alert Filter") @accepts(h.all_of( h.ref('alert-filter'), h.required('id') )) class AlertFilterCreateTask(Task): @classmethod def early_describe(cls): return 'Creating alert filter' def describe(self, alertfilter): return TaskDescription('Creating alert filter {name}', name=alertfilter.get('name', '') if alertfilter else '') def verify(self, alertfilter): return [] def run(self, alertfilter): id = self.datastore.insert('alert.filters', alertfilter) normalize(alertfilter, {
def run(self, updated_params): node = ConfigNode('directory', self.configstore) node.update(updated_params) self.emit_event('directoryservice.changed', {'operation': 'update'}) try: self.dispatcher.call_sync('dscached.management.reload_config') except RpcException as e: raise TaskException( errno.ENXIO, 'Cannot reconfigure directory services: {0}'.format(str(e))) @accepts(h.ref('Directory'), h.required('name', 'type'), h.forbidden('immutable')) @returns(str) class DirectoryServiceCreateTask(Task): @classmethod def early_describe(cls): return "Creating a directory" def describe(self, directory): return TaskDescription("Creating directory {name}", name=directory.get('name', '')) def verify(self, directory): return ['system'] def run(self, directory):
} finally: connect.Disconnect(si) class VMwareDatasetsProvider(Provider): @generator @query('VmwareDataset') def query(self, filter=None, params=None): return self.datastore.query_stream('vmware.datasets', *(filter or []), **(params or {})) @accepts( h.all_of(h.ref('VmwareDataset'), h.required('name', 'dataset', 'datastore', 'peer'))) class VMWareDatasetCreateTask(Task): @classmethod def early_describe(cls): return "Creating VMware dataset mapping" def describe(self, dataset): return TaskDescription("Creating VMware datastore mapping for {name}", name=dataset['datastore']) def verify(self, dataset): return ['system'] def run(self, dataset): normalize(dataset, {'vm_filter_op': 'NONE', 'vm_filter_entries': []})
from lib.system import system, SubprocessException logger = logging.getLogger('NTPPlugin') @description("Provides access to NTP Servers configuration") class NTPServersProvider(Provider): @query('ntp-server') def query(self, filter=None, params=None): return self.datastore.query('ntpservers', *(filter or []), **(params or {})) @description("Adds new NTP Server") @accepts(h.all_of( h.ref('ntp-server'), h.required('address'), ), bool) class NTPServerCreateTask(Task): @classmethod def early_describe(cls): return "Creating NTP Server" def describe(self, ntp, force=False): return TaskDescription("Creating NTP Server {name}", name=ntp['address']) def verify(self, ntp, force=False): errors = ValidationException() try: system('ntpdate', '-q', ntp['address']) except SubprocessException:
node.update(rsyncd) self.dispatcher.call_sync('etcd.generation.generate_group', 'rsyncd') self.dispatcher.dispatch_event('service.rsyncd.changed', { 'operation': 'updated', 'ids': None, }) except RpcException as e: raise TaskException( errno.ENXIO, 'Cannot reconfigure Rsyncd: {0}'.format(str(e)) ) @description("Create a rsync module in the system") @accepts(h.all_of( h.ref('rsyncd-module'), h.required('name', 'path'), )) class RsyncdModuleCreateTask(Task): @classmethod def early_describe(cls): return 'Adding rsync module' def describe(self, rsyncmod): return TaskDescription('Adding rsync module {name}', name=rsyncmod.get('name', '') or '') def verify(self, rsyncmod): errors = ValidationException() if re.search(r'[/\]]', rsyncmod['name']): errors.add((0, 'name'), 'The name cannot contain slash or a closing square backet.')
return self.dispatcher.call_sync('shares.{0}.get_connected_clients'.format(share['type']), share_name) @description("Get shares dependent on provided volume") @accepts(str) @returns(h.array('share')) def get_dependencies(self, volume): return self.query([ ('target', '=', volume) ]) @description("Creates new share") @accepts(h.all_of( h.ref('share'), h.required('name', 'type', 'target', 'properties'), h.forbidden('id') )) class CreateShareTask(Task): def verify(self, share, skip_dataset=False): if not self.dispatcher.call_sync('shares.supported_types').get(share['type']): raise VerifyException(errno.ENXIO, 'Unknown sharing type {0}'.format(share['type'])) if not self.dispatcher.call_sync('volumes.query', [('name', '=', share['target'])], {'single': True}): raise VerifyException(errno.ENXIO, 'Volume {0} doesn\'t exist'.format(share['target'])) if self.datastore.exists( 'shares', ('type', '=', share['type']), ('name', '=', share['name']) ):
gid = None for i in range(start_gid, end_gid): if not self.datastore.exists('groups', ('gid', '=', i)): gid = i break if not gid: raise RpcException(errno.ENOSPC, 'No free GIDs available') return gid @description("Create an user in the system") @accepts(h.all_of( h.ref('user'), h.required('username'), h.forbidden('builtin'), h.object(properties={'password': {'type': ['string', 'null']}}), h.any_of( h.required('password'), h.required('unixhash', 'nthash'), h.required('password_disabled') ) )) class UserCreateTask(Task): def __init__(self, dispatcher, datastore): super(UserCreateTask, self).__init__(dispatcher, datastore) self.id = None self.created_group = False @classmethod
from freenas.dispatcher.rpc import SchemaHelper as h from freenas.utils import normalize @description('Provides information about simulated disks') class FakeDisksProvider(Provider): @generator def query(self, filter=None, params=None): return self.datastore.query_stream('simulator.disks', *(filter or []), **(params or {})) @description("Creates a Simulated Fake Disk with the parameters provided") @accepts( h.all_of( h.ref('SimulatorDisk'), h.required('id') ) ) class CreateFakeDisk(Task): @classmethod def early_describe(cls): return "Creating simulated disk" def describe(self, disk): return TaskDescription("Creating simulated disk {name}", name=disk.get('path', '') if disk else '') def verify(self, disk): return ['system-dataset'] def run(self, disk): defpath = os.path.join(self.dispatcher.call_sync('system_dataset.request_directory', 'simulator'), disk['id'])
if type == 'ZVOL': return os.path.dirname(os.path.join(root, path)) if type == 'DIRECTORY': return path if type == 'FILE': return os.path.dirname(path) raise RpcException(errno.EINVAL, 'Invalid share target type {0}'.format(type)) @description("Creates new share") @accepts(h.all_of( h.ref('share'), h.required('name', 'type', 'target_type', 'target_path', 'properties') )) class CreateShareTask(Task): @classmethod def early_describe(cls): return "Creating share" def describe(self, share): return TaskDescription("Creating share {name}", name=share.get('name') if share else '') def verify(self, share): if not self.dispatcher.call_sync('share.supported_types').get(share['type']): raise VerifyException(errno.ENXIO, 'Unknown sharing type {0}'.format(share['type'])) share_path = self.dispatcher.call_sync('share.expand_path', share['target_path'], share['target_type']) if share['target_type'] != 'FILE':
return TaskDescription(self.early_describe()) def verify(self, updated_params): return ["system"] def run(self, updated_params): node = ConfigNode("directory", self.configstore) node.update(updated_params) try: self.dispatcher.call_sync("dscached.management.reload_config") except RpcException as e: raise TaskException(errno.ENXIO, "Cannot reconfigure directory services: {0}".format(str(e))) @accepts(h.ref("directory"), h.required("name", "type"), h.forbidden("immutable")) @returns(str) class DirectoryServiceCreateTask(Task): @classmethod def early_describe(cls): return "Creating a directory" def describe(self, directory): return TaskDescription("Creating directory {name}", name=directory["name"]) def verify(self, directory): return ["system"] def run(self, directory): try: params = self.dispatcher.call_sync(
os.close(fd) def get_status(self): if not self.started: return TaskStatus(0, 'Erasing disk...') return TaskStatus((self.mediasize - self.remaining) / float(self.mediasize), 'Erasing disk...') @description("Configures online disk parameters") @accepts( str, h.all_of( h.ref('disk'), h.no(h.required('name', 'serial', 'path', 'id', 'mediasize', 'status', 'description')) ) ) class DiskConfigureTask(Task): def verify(self, id, updated_fields): disk = self.datastore.get_by_id('disks', id) errors = [] if not disk: raise VerifyException(errno.ENOENT, 'Disk {0} not found'.format(id)) if not self.dispatcher.call_sync('disks.is_online', disk['path']): raise VerifyException(errno.EINVAL, 'Cannot configure offline disk') if not disk['status']['smart_capable']: if 'smart' in updated_fields:
sessions) obj['status'] = { 'connected': ses.connected if ses else False, 'status': ses.reason if ses else 'Unknown' } return obj return self.datastore.query_stream('iscsi_initiator.targets', *(filter or []), callback=extend, **(params or {})) @description("Creates a new iSCSI initiator target") @accepts(h.all_of(h.ref('IscsiTarget'), h.required('address', 'name'))) class ISCSITargetCreateTask(Task): @classmethod def early_describe(cls): return "Creating iSCSI initiator target" def describe(self, target): return TaskDescription("Creating iSCSI initiator target {name}", name=target['name']) def verify(self, target): return ['system'] def run(self, target): normalize( target, {
node.update(rsyncd) self.dispatcher.call_sync('etcd.generation.generate_group', 'rsyncd') self.dispatcher.dispatch_event('service.rsyncd.changed', { 'operation': 'updated', 'ids': None, }) except RpcException as e: raise TaskException( errno.ENXIO, 'Cannot reconfigure Rsyncd: {0}'.format(str(e))) @description("Create a rsync module in the system") @accepts(h.all_of( h.ref('RsyncdModule'), h.required('name', 'path'), )) class RsyncdModuleCreateTask(Task): @classmethod def early_describe(cls): return 'Adding rsync module' def describe(self, rsyncmod): return TaskDescription('Adding rsync module {name}', name=rsyncmod.get('name', '') or '') def verify(self, rsyncmod): errors = ValidationException() if re.search(r'[/\]]', rsyncmod['name']): errors.add(
) delta = datetime.now() - start_time except: return {'state': 'OFFLINE', 'rtt': None} finally: if si: connect.Disconnect(si) return {'state': 'ONLINE', 'rtt': delta.total_seconds()} @private @description('Creates a VMware peer entry') @accepts(h.all_of( h.ref('Peer'), h.required('type', 'credentials') )) class VMwarePeerCreateTask(Task): @classmethod def early_describe(cls): return 'Creating VMware peer entry' def describe(self, peer, initial_credentials): return TaskDescription('Creating VMware peer entry {name}', name=peer.get('name', '')) def verify(self, peer, initial_credentials): if peer.get('type') != 'vmware': raise VerifyException(errno.EINVAL, 'Peer type must be selected as VMware') return ['system']
except FileNotFoundError: raise RpcException(errno.ENOENT, 'Client config file not available. ' 'Please configure OpenVPN server first.') @returns(str) def provide_tls_auth_key(self): node = ConfigNode('service.openvpn', self.configstore).__getstate__() return node['tls_auth'] @description('Creates OpenVPN config file') @accepts( h.all_of( h.ref('service-openvpn'), h.required('dev', 'ca', 'cert', 'key', 'cipher', 'port', 'proto') )) class OpenVpnConfigureTask(Task): @classmethod def early_describe(cls): return 'Configuring OpenVPN service' def describe(self, openvpn): return TaskDescription('Configuring OpenVPN service') def verify(self, openvpn): interface_pattern = '(tap|tun)[0-9]' node = ConfigNode('service.openvpn', self.configstore).__getstate__() node.update(openvpn) if not re.search(interface_pattern, node['dev']):
gid = None for i in range(start_gid, end_gid): if not self.datastore.exists('groups', ('gid', '=', i)): gid = i break if not gid: raise RpcException(errno.ENOSPC, 'No free GIDs available') return gid @description("Create an user in the system") @accepts( h.all_of( h.ref('User'), h.required('username'), h.forbidden('builtin'), h.object(properties={'password': { 'type': ['password', 'null'] }}), h.any_of(h.required('password'), h.required('unixhash', 'nthash'), h.required('password_disabled')))) class UserCreateTask(Task): def __init__(self, dispatcher): super(UserCreateTask, self).__init__(dispatcher) self.id = None self.created_group = False @classmethod def early_describe(cls): return "Creating user"
def early_describe(cls): return "Importing iSCSI share" def describe(self, share): return TaskDescription("Importing iSCSI share {name}", name=share.get('name', '') if share else '') def verify(self, share): return super(ImportiSCSIShareTask, self).verify(share) def run(self, share): return super(ImportiSCSIShareTask, self).run(share) @accepts(h.all_of( h.ref('share-iscsi-target'), h.required('id') )) @description('Creates iSCSI share target') class CreateISCSITargetTask(Task): @classmethod def early_describe(cls): return 'Creating iSCSI share target' def describe(self, target): return TaskDescription('Creating iSCSI share target {name}', name=target.get('id', '') if target else '') def verify(self, target): return ['system'] def run(self, target): for i in target.get('extents', []):
self.dispatcher.call_sync('etcd.generation.generate_group', 'rsyncd') self.dispatcher.call_sync('services.apply_state', 'rsyncd', True) self.dispatcher.dispatch_event('service.rsyncd.changed', { 'operation': 'updated', 'ids': None, }) except RpcException as e: raise TaskException( errno.ENXIO, 'Cannot reconfigure Rsyncd: {0}'.format(str(e)) ) @description("Create a rsync module in the system") @accepts(h.all_of( h.ref('rsyncd-module'), h.required('name', 'path'), )) class RsyncdModuleCreateTask(Task): def describe(self, rsyncmod): return 'Adding rsync module' def verify(self, rsyncmod): errors = [] if re.search(r'[/\]]', rsyncmod['name']): errors.append('name', errno.EINVAL, 'The name cannot contain slash or a closing square backet.') if errors: raise ValidationException(errors) return ['system']
def describe(self): return TaskDescription(self.early_describe()) def verify(self): return ['system'] def run(self): try: self.dispatcher.call_sync('dscached.management.flush_cache') except RpcException as e: raise TaskException(e.code, f'Cannot flush cache: {e}') @accepts( h.ref('Directory'), h.required('name', 'type'), h.forbidden('immutable') ) @returns(str) class DirectoryServiceCreateTask(Task): @classmethod def early_describe(cls): return "Creating a directory" def describe(self, directory): return TaskDescription("Creating directory {name}", name=directory.get('name', '')) def verify(self, directory): return ['system'] def run(self, directory):
def early_describe(cls): return "Importing iSCSI share" def describe(self, share): return TaskDescription("Importing iSCSI share {name}", name=share.get('name', '') if share else '') def verify(self, share): return super(ImportiSCSIShareTask, self).verify(share) def run(self, share): return super(ImportiSCSIShareTask, self).run(share) @accepts(h.all_of( h.ref('ShareIscsiTarget'), h.required('id') )) @description('Creates iSCSI share target') class CreateISCSITargetTask(Task): @classmethod def early_describe(cls): return 'Creating iSCSI share target' def describe(self, target): return TaskDescription('Creating iSCSI share target {name}', name=target.get('id', '') if target else '') def verify(self, target): return ['system'] def run(self, target): for i in target.get('extents', []):
@description("Returns list of supported backup providers") @accepts() @returns(h.ref('backup-providers')) def supported_providers(self): result = {} for p in list(self.dispatcher.plugins.values()): if p.metadata and p.metadata.get('type') == 'backup': result[p.metadata['method']] = {} return result @accepts(h.all_of( h.ref('backup'), h.required('name', 'provider', 'dataset') )) @description('Creates a backup task') class CreateBackupTask(Task): @classmethod def early_describe(cls): return 'Creating backup task' def describe(self, backup): return TaskDescription('Creating backup task {name}', name=backup.get('name', '') if backup else '') def verify(self, backup): return ['system'] def run(self, backup): if 'id' in backup and self.datastore.exists('backup', ('id', '=', backup['id'])):
'virtual_machines': vms } finally: connect.Disconnect(si) class VMwareDatasetsProvider(Provider): @generator @query('vmware-dataset') def query(self, filter=None, params=None): return self.datastore.query_stream('vmware.datasets', *(filter or []), **(params or {})) @accepts(h.all_of( h.ref('vmware-dataset'), h.required('name', 'dataset', 'datastore', 'peer') )) class VMWareDatasetCreateTask(Task): @classmethod def early_describe(cls): return "Creating VMware dataset mapping" def describe(self, dataset): return TaskDescription("Creating VMware datastore mapping for {name}", name=dataset['datastore']) def verify(self, dataset): return ['system'] def run(self, dataset): normalize(dataset, { 'vm_filter_op': 'NONE',
# Clear out DNS settings node['dns.addresses'] = [] node['dns.search'] = [] try: for code, message in self.dispatcher.call_sync('networkd.configuration.configure_network', timeout=60): self.add_warning(TaskWarning(code, message)) self.dispatcher.call_sync('etcd.generation.generate_group', 'network') except RpcException as e: raise TaskException(errno.ENXIO, 'Cannot reconfigure interface: {0}'.format(str(e))) @accepts(h.all_of( h.ref('network-interface'), h.required('type'), h.forbidden('id', 'status') )) @returns(str) @description('Creates network interface') class CreateInterfaceTask(Task): @classmethod def early_describe(cls): return "Creating network interface" def describe(self, iface): return TaskDescription("Creating {name} network interface", name=iface['type']) def verify(self, iface): return ['system']
@accepts(str) class KerberosRealmDeleteTask(Task): def verify(self, id): return ['system'] def run(self, id): self.datastore.delete('kerberos.realms', id) self.dispatcher.dispatch_event('kerberos.realm.changed', { 'operation': 'delete', 'ids': [id] }) @accepts(h.all_of(h.ref('kerberos-keytab'), h.required('name', 'keytab'))) class KerberosKeytabCreateTask(Task): def verify(self, keytab): return ['system'] def run(self, keytab): if self.datastore.exists('kerberos.keytabs', ('name', '=', keytab['name'])): raise TaskException( errno.EEXIST, 'Keytab {0} already exists'.format(keytab['name'])) id = self.datastore.insert('kerberos.keytabs', keytab) generate_keytab(self.datastore) self.dispatcher.dispatch_event('kerberos.keytab.changed', { 'operation': 'create',
from task import Task, TaskStatus, Provider, TaskException, VerifyException from freenas.dispatcher.rpc import RpcException, description, accepts, returns, private from freenas.dispatcher.rpc import SchemaHelper as h from freenas.utils import normalize class FakeDisksProvider(Provider): def query(self, filter=None, params=None): return self.datastore.query('simulator.disks', *(filter or []), **(params or {})) @description("Creates a Simulated Fake Disk with the parameters provided") @accepts( h.all_of( h.ref('simulator-disk'), h.required('id') ) ) class CreateFakeDisk(Task): def verify(self, disk): return ['system'] def run(self, disk): defpath = os.path.join(self.dispatcher.call_sync('system_dataset.request_directory', 'simulator'), disk['id']) normalize(disk, { 'vendor': 'FreeNAS', 'path': defpath, 'model': 'Virtual Disk', 'serial': self.dispatcher.call_sync('share.iscsi.generate_serial'), 'block_size': 512, 'rpm': '7200',
stream=True, **(params or {})) @description("Returns list of supported backup providers") @accepts() @returns(h.ref('backup-providers')) def supported_providers(self): result = {} for p in list(self.dispatcher.plugins.values()): if p.metadata and p.metadata.get('type') == 'backup': result[p.metadata['method']] = {} return result @accepts(h.all_of(h.ref('backup'), h.required('name', 'provider', 'dataset'))) @description('Creates a backup task') class CreateBackupTask(Task): @classmethod def early_describe(cls): return 'Creating backup task' def describe(self, backup): return TaskDescription('Creating backup task {name}', name=backup.get('name', '') if backup else '') def verify(self, backup): return ['system'] def run(self, backup): if 'id' in backup and self.datastore.exists('backup',
except SubprocessException as e: # sysctl module compatibility raise OSError(str(e.err)) @description("Provides access to OS tunables") class TunablesProvider(Provider): @query('tunable') def query(self, filter=None, params=None): return self.datastore.query('tunables', *(filter or []), **(params or {})) @description("Adds Tunable") @accepts(h.all_of( h.ref('tunable'), h.required('var', 'value', 'type'), )) class TunableCreateTask(Task): def describe(self, tunable): return "Creating Tunable {0}".format(tunable['var']) def verify(self, tunable): errors = ValidationException() if self.datastore.exists('tunables', ('var', '=', tunable['var'])): errors.add((1, 'var'), 'This variable already exists.', code=errno.EEXIST) if '"' in tunable['value'] or "'" in tunable['value']: errors.add((1, 'value'), 'Quotes are not allowed')
} return obj return self.datastore.query_stream( 'iscsi_initiator.targets', *(filter or []), callback=extend, **(params or {}) ) @description("Creates a new iSCSI initiator target") @accepts(h.all_of( h.ref('IscsiTarget'), h.required('address', 'name') )) class ISCSITargetCreateTask(Task): @classmethod def early_describe(cls): return "Creating iSCSI initiator target" def describe(self, target): return TaskDescription("Creating iSCSI initiator target {name}", name=target['name']) def verify(self, target): return ['system'] def run(self, target): normalize(target, { 'enabled': True,
@classmethod def early_describe(cls): return "Importing iSCSI share" def describe(self, share): return TaskDescription("Importing iSCSI share {name}", name=share.get('name', '') if share else '') def verify(self, share): return super(ImportiSCSIShareTask, self).verify(share) def run(self, share): return super(ImportiSCSIShareTask, self).run(share) @accepts(h.all_of(h.ref('ShareIscsiTarget'), h.required('id'))) @description('Creates iSCSI share target') class CreateISCSITargetTask(Task): @classmethod def early_describe(cls): return 'Creating iSCSI share target' def describe(self, target): return TaskDescription('Creating iSCSI share target {name}', name=target.get('id', '') if target else '') def verify(self, target): return ['system'] def run(self, target): for i in target.get('extents', []):
} return obj return self.datastore.query_stream( 'iscsi_initiator.targets', *(filter or []), callback=extend, **(params or {}) ) @description("Creates a new iSCSI initiator target") @accepts(h.all_of( h.ref('iscsi-target'), h.required('address', 'name') )) class ISCSITargetCreateTask(Task): @classmethod def early_describe(cls): return "Creating iSCSI initiator target" def describe(self, target): return TaskDescription("Creating iSCSI initiator target {name}", name=target['name']) def verify(self, target): return ['system'] def run(self, target): normalize(target, { 'enabled': True,
try: for code, message in self.dispatcher.call_sync( 'networkd.configuration.configure_network', timeout=60): self.add_warning(TaskWarning(code, message)) self.dispatcher.call_sync('etcd.generation.generate_group', 'network') except RpcException as e: raise TaskException( errno.ENXIO, 'Cannot reconfigure interface: {0}'.format(str(e))) @accepts( h.all_of(h.ref('NetworkInterface'), h.required('type'), h.forbidden('id', 'status'))) @returns(str) @description('Creates network interface') class CreateInterfaceTask(Task): @classmethod def early_describe(cls): return "Creating network interface" def describe(self, iface): return TaskDescription("Creating {name} network interface", name=iface['type']) def verify(self, iface): return ['system']
def join(self, id): self.dispatcher.call_sync('dsd.configuration.join_activedirectory', id) def enable(self, id): self.dispatcher.call_sync('dsd.configuration.enable', id) def disable(self, id): self.dispatcher.call_sync('dsd.configuration.disable', id) @description("Create directory service") @accepts( h.all_of( h.ref('directoryservice'), h.required('name', 'domain'), h.forbidden('id') ) ) class DirectoryServiceCreateTask(Task): def verify(self, directoryservice): dstypes = self.dispatcher.call_sync('dsd.configuration.get_supported_directories') type = directoryservice['type'] if type not in dstypes: raise VerifyException(errno.ENXIO, 'Unknown directory service type {0}'.format(directoryservice[type])) directoryservices = self.dispatcher.call_sync('dsd.configuration.get_directory_services') for ds in directoryservices: if ds['type'] == type: raise VerifyException(errno.EEXIST, 'THERE CAN ONLY BE ONE!')
return q.query( self.datastore.query_stream('crypto.certificates', callback=extend), *(filter or []), stream=True, **(params or {}) ) @accepts() @returns(h.object()) def get_country_codes(self): return COUNTRY_CODES @accepts(h.all_of( h.ref('crypto-certificate'), h.required('type', 'name', 'country', 'state', 'city', 'organization', 'email', 'common'), h.forbidden('certificate_path', 'privatekey_path'), )) @description('Creates a certificate') class CertificateCreateTask(Task): @classmethod def early_describe(cls): return "Creating certificate" def describe(self, certificate): return TaskDescription("Creating certificate {name}", name=certificate['name']) def verify(self, certificate): certificate['selfsigned'] = certificate.get('selfsigned', False) certificate['signing_ca_name'] = certificate.get('signing_ca_name', False)
logger = logging.getLogger('NTPPlugin') @description("Provides access to NTP Servers configuration") class NTPServersProvider(Provider): @query('ntp-server') @generator def query(self, filter=None, params=None): return self.datastore.query_stream('ntpservers', *(filter or []), **(params or {})) @description("Adds new NTP Server") @accepts(h.all_of( h.ref('ntp-server'), h.required('address'), ), bool) class NTPServerCreateTask(Task): @classmethod def early_describe(cls): return "Creating NTP Server" def describe(self, ntp, force=False): return TaskDescription("Creating NTP Server {name}", name=ntp['address']) def verify(self, ntp, force=False): return ['system'] def run(self, ntp, force=False): if self.datastore.exists('ntpservers', ('address', '=', ntp['address'])): raise TaskException(errno.ENXIO, 'NTP Server with given address already exists')
with open(key_path) as f: keys.append(f.read()) except FileNotFoundError: raise RpcException(errno.ENOENT, 'Key file {0} not found'.format(key_path)) return [i for i in keys] @private def credentials_types(self): return ['replication', 'ssh', 'amazon-s3'] @description('Creates a peer entry') @accepts(h.all_of( h.ref('peer'), h.required('name', 'address', 'type', 'credentials') )) class PeerCreateTask(Task): @classmethod def early_describe(cls): return 'Creating peer entry' def describe(self, peer): return TaskDescription('Creating peer entry {name}', name=peer.get('name', '')) def verify(self, peer): if 'name' not in peer: raise VerifyException(errno.EINVAL, 'Name has to be specified') if 'address' not in peer: raise VerifyException(errno.EINVAL, 'Address has to be specified')
@description('Provides access to the alerts filters') class AlertsFiltersProvider(Provider): @query('AlertFilter') @generator def query(self, filter=None, params=None): return self.datastore.query_stream( 'alert.filters', *(filter or []), **(params or {}) ) @description("Creates an Alert Filter") @accepts(h.all_of( h.ref('AlertFilter'), h.required('id', 'emitter', 'parameters') )) class AlertFilterCreateTask(Task): @classmethod def early_describe(cls): return 'Creating alert filter' def describe(self, alertfilter): return TaskDescription('Creating alert filter {name}', name=alertfilter.get('name', '') if alertfilter else '') def verify(self, alertfilter): return [] def run(self, alertfilter): id = self.datastore.insert('alert.filters', alertfilter) normalize(alertfilter, {
node = ConfigNode('directory', self.configstore) node.update(updated_params) self.dispatcher.emit_event('directoryservice.changed', { 'operation': 'update' }) try: self.dispatcher.call_sync('dscached.management.reload_config') except RpcException as e: raise TaskException(errno.ENXIO, 'Cannot reconfigure directory services: {0}'.format(str(e))) @accepts( h.ref('Directory'), h.required('name', 'type'), h.forbidden('immutable') ) @returns(str) class DirectoryServiceCreateTask(Task): @classmethod def early_describe(cls): return "Creating a directory" def describe(self, directory): return TaskDescription("Creating directory {name}", name=directory.get('name', '')) def verify(self, directory): return ['system'] def run(self, directory):
gid = None for i in range(start_gid, end_gid): if not self.datastore.exists('groups', ('gid', '=', i)): gid = i break if not gid: raise RpcException(errno.ENOSPC, 'No free GIDs available') return gid @description("Create an user in the system") @accepts(h.all_of( h.ref('User'), h.required('username'), h.forbidden('builtin'), h.object(properties={'password': {'type': ['password', 'null']}}), h.any_of( h.required('password'), h.required('unixhash', 'nthash'), h.required('password_disabled') ) )) class UserCreateTask(Task): def __init__(self, dispatcher): super(UserCreateTask, self).__init__(dispatcher) self.id = None self.created_group = False @classmethod
def get_temp_pubkeys(self): for k in temp_pubkeys: yield k @private def remove_temp_pubkey(self, key): try: temp_pubkeys.remove(key) except ValueError: pass @description('Exchanges SSH keys with remote FreeNAS machine') @accepts(h.all_of( h.ref('peer'), h.required('type', 'credentials'), h.forbidden('name') )) class FreeNASPeerCreateTask(Task): @classmethod def early_describe(cls): return 'Exchanging SSH keys with remote host' def describe(self, peer, initial_credentials): return TaskDescription('Exchanging SSH keys with the remote {name}', name=q.get(peer, 'credentials.address', '')) def verify(self, peer, initial_credentials): credentials = peer['credentials'] remote = credentials.get('address') username = initial_credentials.get('username') password = initial_credentials.get('password')
@classmethod def early_describe(cls): return "Importing iSCSI share" def describe(self, share): return TaskDescription("Importing iSCSI share {name}", name=share.get('name', '') if share else '') def verify(self, share): return super(ImportiSCSIShareTask, self).verify(share) def run(self, share): return super(ImportiSCSIShareTask, self).run(share) @accepts(h.all_of(h.ref('share-iscsi-target'), h.required('id'))) @description('Creates iSCSI share target') class CreateISCSITargetTask(Task): @classmethod def early_describe(cls): return 'Creating iSCSI share target' def describe(self, target): return TaskDescription('Creating iSCSI share target {name}', name=target.get('id', '') if target else '') def verify(self, target): return ['system'] def run(self, target): for i in target.get('extents', []):
return True @description('Provides information about calendar tasks') class CalendarTasksProvider(Provider): @query('CalendarTask') @generator def query(self, filter=None, params=None): return q.query(self.dispatcher.call_sync('scheduler.management.query'), *(filter or []), stream=True, **(params or {})) @accepts( h.all_of(h.ref('CalendarTask'), h.required('name'), h.no(h.required('status')))) @returns(str) @description('Creates a calendar task') class CreateCalendarTask(Task): @classmethod def early_describe(cls): return "Creating calendar task" def describe(self, task): return TaskDescription("Creating calendar task {name}", name=task['name']) def verify(self, task): return ['system']
# Load and dump private key to make sure its in desired format # This is code ported from 9.3 and must be reviewed as it may very well be useless certificate['privatekey'] = export_privatekey(certificate['privatekey']) if certificate.get('csr'): certificate['csr_path'] = os.path.join( cert_path, '{0}.csr'.format(certificate['name'])) return certificate return self.datastore.query('crypto.certificates', *(filter or []), callback=extend, **(params or {})) @accepts(h.all_of( h.ref('crypto-certificate'), h.required('signedby', 'name', 'country', 'state', 'city', 'organization', 'email', 'common'), )) class CertificateInternalCreateTask(Task): def verify(self, certificate): errors = [] if self.datastore.exists('crypto.certificates', ('name', '=', certificate['name'])): errors.append(('name', errno.EEXIST, 'Certificate with given name already exists')) if not self.datastore.exists('crypto.certificates', ('id', '=', certificate['signedby'])): errors.append(('signedby', errno.EEXIST, 'Signing certificate does not exist')) if '"' in certificate['name']: errors.append( ('name', errno.EINVAL, 'You cannot issue a certificate with a `"` in its name'))
@accepts(str) class DeleteISCSITargetTask(Task): def verify(self, id): if not self.datastore.exists("iscsi.targets", ("id", "=", id)): raise VerifyException(errno.ENOENT, "Target {0} does not exist".format(id)) return ["service:ctl"] def run(self, id): self.datastore.delete("iscsi.targets", id) self.dispatcher.call_sync("etcd.generation.generate_group", "ctl") self.dispatcher.call_sync("services.reload", "ctl") self.dispatcher.dispatch_event("iscsi.target.changed", {"operation": "delete", "ids": [id]}) @accepts(h.all_of(h.ref("iscsi-auth-group"), h.required("type"))) class CreateISCSIAuthGroupTask(Task): def verify(self, auth_group): return ["service:ctl"] def run(self, auth_group): normalize( auth_group, { "id": self.datastore.collection_get_next_pkey("iscsi.auth", "ag"), "users": None, "initiators": None, "networks": None, }, )
'Connection failed: {0}'.format(str(e))) except requests.Timeout as e: raise RpcException(errno.ETIMEDOUT, 'Connection timed out: {0}'.format(str(e))) if 'error' in data: raise RpcException(errno.EINVAL, data['message']) return data @description("Submits a new support ticket") @accepts( h.all_of( h.ref('SupportTicket'), h.required('subject', 'description', 'category', 'type', 'username', 'password'))) class SupportSubmitTask(Task): @classmethod def early_describe(cls): return 'Submitting ticket' def describe(self, ticket): return TaskDescription('Submitting ticket') def verify(self, ticket): return ['system'] def run(self, ticket): try: version = self.dispatcher.call_sync('system.info.version') project_name = '-'.join(version.split('-')[:2]).lower()
return q.query(self.datastore.query_stream('crypto.certificates', callback=extend), *(filter or []), stream=True, **(params or {})) @accepts() @returns(h.object()) def get_country_codes(self): return COUNTRY_CODES @accepts( h.all_of( h.ref('CryptoCertificate'), h.required('type', 'name', 'country', 'state', 'city', 'organization', 'email', 'common'), h.forbidden('certificate_path', 'privatekey_path'), )) @description('Creates a certificate') class CertificateCreateTask(Task): @classmethod def early_describe(cls): return "Creating certificate" def describe(self, certificate): return TaskDescription("Creating certificate {name}", name=certificate['name']) def verify(self, certificate): certificate['selfsigned'] = certificate.get('selfsigned', False) certificate['signing_ca_name'] = certificate.get(
@accepts(str) class KerberosRealmDeleteTask(Task): def verify(self, id): return ['system'] def run(self, id): self.datastore.delete('kerberos.realms', id) self.dispatcher.dispatch_event('kerberos.realm.changed', { 'operation': 'delete', 'ids': [id] }) @accepts(h.all_of( h.ref('KerberosKeytab'), h.required('name', 'keytab') )) class KerberosKeytabCreateTask(Task): def verify(self, keytab): return ['system'] def run(self, keytab): if self.datastore.exists('kerberos.keytabs', ('name', '=', keytab['name'])): raise TaskException(errno.EEXIST, 'Keytab {0} already exists'.format(keytab['name'])) id = self.datastore.insert('kerberos.keytabs', keytab) generate_keytab(self.datastore) self.dispatcher.dispatch_event('kerberos.keytab.changed', { 'operation': 'create', 'ids': [id] })
*(filter or []), stream=True, **(params or {})) @returns(h.array(str)) def peer_types(self): result = [] for p in self.dispatcher.plugins.values(): if p.metadata and p.metadata.get('type') == 'peering': result.append(p.metadata.get('subtype')) return result @description('Creates a peer entry') @accepts(h.all_of(h.ref('Peer'), h.required('type', 'credentials')), h.ref('PeerInitialCredentials')) class PeerCreateTask(Task): @classmethod def early_describe(cls): return 'Creating peer entry' def describe(self, peer, initial_credentials=None): return TaskDescription('Creating peer entry {name}', name=peer.get('name', '')) def verify(self, peer, initial_credentials=None): if peer.get('type') not in self.dispatcher.call_sync( 'peer.peer_types'): raise VerifyException( errno.EINVAL, 'Unknown peer type {0}'.format(peer.get('type')))
@description("Returns list of supported backup providers") @accepts() @returns(h.ref('BackupProviders')) def supported_providers(self): result = {} for p in list(self.dispatcher.plugins.values()): if p.metadata and p.metadata.get('type') == 'backup': result[p.metadata['method']] = {} return result @accepts(h.all_of( h.ref('Backup'), h.required('name', 'provider', 'dataset') )) @description('Creates a backup task') class CreateBackupTask(Task): @classmethod def early_describe(cls): return 'Creating backup task' def describe(self, backup): return TaskDescription('Creating backup task {name}', name=backup.get('name', '') if backup else '') def verify(self, backup): return ['system'] def run(self, backup): if 'id' in backup and self.datastore.exists('backup', ('id', '=', backup['id'])):
@description('Provides access to the alerts filters') class AlertsFiltersProvider(Provider): @query('alert-filter') def query(self, filter=None, params=None): return self.datastore.query( 'alert.filters', *(filter or []), **(params or {}) ) @description("Creates an Alert Filter") @accepts(h.all_of( h.ref('alert-filter'), h.required('id') )) class AlertFilterCreateTask(Task): @classmethod def early_describe(cls): return 'Creating alert filter' def describe(self, alertfilter): return TaskDescription('Creating alert filter {name}', name=alertfilter.get('name', '') if alertfilter else '') def verify(self, alertfilter): return [] def run(self, alertfilter): id = self.datastore.insert('alert.filters', alertfilter)
config = self.dispatcher.call_sync( 'alert.emitter.{0}.get_config'.format( p.metadata['name'])) yield { 'id': p.metadata['id'], 'name': p.metadata['name'], 'config': config } return q.query(collect(), *(filter or []), **(params or {})) @description("Creates an Alert Filter") @accepts( h.all_of(h.ref('AlertFilter'), h.forbidden('id'), h.required('emitter', 'parameters'))) class AlertFilterCreateTask(Task): @classmethod def early_describe(cls): return 'Creating alert filter' def describe(self, alertfilter): return TaskDescription('Creating alert filter') def verify(self, alertfilter): return ['system'] def run(self, alertfilter): normalize(alertfilter, {'clazz': None, 'predicates': []}) order = self.configstore.get('alert.filter.order')
return os.path.dirname(os.path.join(root, path)) if type == 'DIRECTORY': return path if type == 'FILE': return os.path.dirname(path) raise RpcException(errno.EINVAL, 'Invalid share target type {0}'.format(type)) @description("Creates new share") @accepts( h.all_of( h.ref('Share'), h.required('name', 'type', 'target_type', 'target_path', 'properties') ), h.one_of( h.ref('VolumeDatasetProperties'), None ), bool ) class CreateShareTask(Task): @classmethod def early_describe(cls): return "Creating share" def describe(self, share, dataset_properties=None, enable_service=False): return TaskDescription("Creating share {name}", name=share.get('name') if share else '')
class AlertsProvider(Provider): @query('Alert') @generator def query(self, filter=None, params=None): return self.datastore.query_stream('alerts', *(filter or []), **(params or {})) @private @accepts(str, str) @returns(h.one_of(h.ref('Alert'), None)) def get_active_alert(self, cls, target): return self.datastore.query('alerts', ('clazz', '=', cls), ('target', '=', target), ('active', '=', True), single=True) @description("Dismisses an alert") def dismiss(self, id: int) -> None: alert = self.datastore.get_by_id('alerts', id) if not alert: raise RpcException(errno.ENOENT, 'Alert {0} not found'.format(id)) if alert['dismissed']: raise RpcException(errno.ENOENT, 'Alert {0} is already dismissed'.format(id)) if alert['one_shot']: alert['active'] = False alert.update({'dismissed': True, 'dismissed_at': datetime.utcnow()}) self.datastore.update('alerts', id, alert) self.dispatcher.dispatch_event('alert.changed', { 'operation': 'update', 'ids': [id] }) @description("Dismisses/Deletes all alerts from the database") def dismiss_all(self) -> None: alert_list = self.query([('dismissed', '=', False)]) alert_ids = [] for alert in alert_list: alert.update({ 'dismissed': True, 'dismissed_at': datetime.utcnow() }) self.datastore.update('alerts', alert['id'], alert) alert_ids.append(alert['id']) if alert_ids: self.dispatcher.dispatch_event('alert.changed', { 'operation': 'update', 'ids': [alert_ids] }) @private @description("Emits an event for the provided alert") @accepts(h.all_of(h.ref('Alert'), h.required('clazz'))) @returns(int) def emit(self, alert): cls = self.datastore.get_by_id('alert.classes', alert['clazz']) if not cls: raise RpcException( errno.ENOENT, 'Alert class {0} not found'.format(alert['clazz'])) normalize( alert, { 'when': datetime.utcnow(), 'dismissed': False, 'active': True, 'one_shot': False, 'severity': cls['severity'] }) alert.update({ 'type': cls['type'], 'subtype': cls['subtype'], 'send_count': 0 }) id = self.datastore.insert('alerts', alert) self.dispatcher.dispatch_event('alert.changed', { 'operation': 'create', 'ids': [id] }) try: self.dispatcher.call_sync('alertd.alert.emit', id) except RpcException as err: if err.code == errno.ENOENT: # Alertd didn't start yet. Add alert to the pending queue pending_alerts.append(id) else: raise return id @private @description("Cancels already scheduled alert") def cancel(self, id: int) -> int: alert = self.datastore.get_by_id('alerts', id) if not alert: raise RpcException(errno.ENOENT, 'Alert {0} not found'.format(id)) if not alert['active']: raise RpcException(errno.ENOENT, 'Alert {0} is already cancelled'.format(id)) alert.update({'active': False, 'cancelled_at': datetime.utcnow()}) self.datastore.update('alerts', id, alert) self.dispatcher.dispatch_event('alert.changed', { 'operation': 'update', 'ids': [id] }) try: self.dispatcher.call_sync('alertd.alert.cancel', id) except RpcException as err: if err.code == errno.ENOENT: # Alertd didn't start yet. Add alert to the pending queue pending_cancels.append(id) else: raise return id @description("Returns list of registered alerts") def get_alert_classes(self) -> List[AlertClass]: return self.datastore.query('alert.classes') @description("Returns list of registered alert severities") def get_alert_severities(self) -> Set[AlertSeverity]: alert_classes = self.get_alert_classes() return {alert_class['severity'] for alert_class in alert_classes}
raise OSError(str(e.err)) @description("Provides access to OS tunables") class TunablesProvider(Provider): @query('tunable') @generator def query(self, filter=None, params=None): return self.datastore.query_stream('tunables', *(filter or []), **(params or {})) @description("Adds Tunable") @accepts(h.all_of( h.ref('tunable'), h.required('var', 'value', 'type'), )) class TunableCreateTask(Task): @classmethod def early_describe(cls): return "Creating Tunable" def describe(self, tunable): return TaskDescription("Creating Tunable {name}", name=tunable.get('var', '') if tunable else '') def verify(self, tunable): errors = ValidationException() if '"' in tunable['value'] or "'" in tunable['value']:
obj['schedule'] = None return obj return q.query( self.dispatcher.call_sync('scheduler.management.query'), *(filter or []), stream=True, **(params or {}), callback=extend ) @accepts( h.all_of( h.ref('CalendarTask'), h.required('name'), h.no(h.required('status')) ) ) @returns(str) @description('Creates a calendar task') class CreateCalendarTask(Task): @classmethod def early_describe(cls): return "Creating calendar task" def describe(self, task): return TaskDescription("Creating calendar task {name}", name=task['name']) def verify(self, task): return ['system']
return ['service:ctl'] def run(self, id): self.datastore.delete('iscsi.targets', id) self.dispatcher.call_sync('etcd.generation.generate_group', 'ctl') self.dispatcher.call_sync('service.reload', 'ctl') self.dispatcher.dispatch_event('iscsi.target.changed', { 'operation': 'delete', 'ids': [id] }) @accepts( h.all_of( h.ref('share-iscsi-auth'), h.required('type') ) ) @description('Creates iSCSI auth group') class CreateISCSIAuthGroupTask(Task): @classmethod def early_describe(cls): return 'Creating iSCSI auth group' def describe(self, auth_group): return TaskDescription('Creating iSCSI auth group {name}', name=auth_group.get('id', '')) def verify(self, auth_group): return ['service:ctl'] def run(self, auth_group):
from freenas.dispatcher.rpc import RpcException, description, accepts, returns from freenas.dispatcher.rpc import SchemaHelper as h from task import Provider, Task, VerifyException, TaskException, query from lib.system import system, SubprocessException class CalendarTasksProvider(Provider): @query('calendar-task') def query(self, filter=None, params=None): return self.dispatcher.call_sync('scheduler.management.query', filter, params) @accepts( h.all_of( h.ref('calendar-task'), h.no(h.required('status')) ) ) @returns(str) class CreateCalendarTask(Task): def verify(self, task): return ['system'] def run(self, task): try: tid = self.dispatcher.call_sync('scheduler.management.add', task) except RpcException: raise self.dispatcher.dispatch_event('calendar_task.changed', { 'operation': 'create',
except requests.ConnectionError as e: raise RpcException(errno.ENOTCONN, 'Connection failed: {0}'.format(str(e))) except requests.Timeout as e: raise RpcException(errno.ETIMEDOUT, 'Connection timed out: {0}'.format(str(e))) if 'error' in data: raise RpcException(errno.EINVAL, data['message']) return data @description("Submits a new support ticket") @accepts( h.all_of( h.ref('support-ticket'), h.required('subject', 'description', 'category', 'type', 'username', 'password')) ) class SupportSubmitTask(Task): @classmethod def early_describe(cls): return 'Submitting ticket' def describe(self, ticket): return TaskDescription('Submitting ticket') def verify(self, ticket): return ['system'] def run(self, ticket): try: version = self.dispatcher.call_sync('system.info.version')
@description('Provides access to the alerts filters') class AlertsFiltersProvider(Provider): @query('alert-filter') @generator def query(self, filter=None, params=None): return self.datastore.query_stream( 'alert.filters', *(filter or []), **(params or {}) ) @description("Creates an Alert Filter") @accepts(h.all_of( h.ref('alert-filter'), h.required('id', 'emitter', 'parameters') )) class AlertFilterCreateTask(Task): @classmethod def early_describe(cls): return 'Creating alert filter' def describe(self, alertfilter): return TaskDescription('Creating alert filter {name}', name=alertfilter.get('name', '') if alertfilter else '') def verify(self, alertfilter): return [] def run(self, alertfilter): id = self.datastore.insert('alert.filters', alertfilter) normalize(alertfilter, {