예제 #1
0
class SmartService(SystemServiceService):
    class Config:
        service = "smartd"
        service_model = "smart"
        datastore_extend = "smart.smart_extend"
        datastore_prefix = "smart_"

    @private
    async def smart_extend(self, smart):
        smart["powermode"] = smart["powermode"].upper()
        smart["email"] = smart["email"].split(",")
        return smart

    @accepts(
        Dict('smart_update',
             Int('interval'),
             Str('powermode', enum=['NEVER', 'SLEEP', 'STANDBY', 'IDLE']),
             Int('difference'),
             Int('informational'),
             Int('critical'),
             List('email',
                  validators=[Unique()],
                  items=[Str('email', validators=[Email()])]),
             update=True))
    async def do_update(self, data):
        """
        Update SMART Service Configuration.

        `interval` is an integer value in minutes which defines how often smartd activates to check if any tests
        are configured to run.

        `critical`, `informational` and `difference` are integer values on which alerts for SMART are configured if
        the disks temperature crosses the assigned threshold for each respective attribute. They default to 0 which
        indicates they are disabled.

        Email of log level LOG_CRIT is issued when disk temperature crosses `critical`.

        Email of log level LOG_INFO is issued when disk temperature crosses `informational`.

        If temperature of a disk changes by `difference` degree Celsius since the last report, SMART reports this.

        `email` is a list of valid emails to receive SMART alerts.
        """
        old = await self.config()

        new = old.copy()
        new.update(data)

        new["powermode"] = new["powermode"].lower()
        new["email"] = ",".join([email.strip() for email in new["email"]])

        await self._update_service(old, new)

        if new["powermode"] != old["powermode"]:
            await self.middleware.call("service.restart", "collectd",
                                       {"onetime": False})

        await self.smart_extend(new)

        return new
예제 #2
0
class SNMPService(SystemServiceService):
    class Config:
        service = 'snmp'
        datastore_prefix = 'snmp_'

    @accepts(
        Dict('snmp_update',
             Str('location'),
             Str('contact',
                 validators=[Or(Email(), Match(r'^[-_a-zA-Z0-9\s]*$'))]),
             Bool('traps'),
             Bool('v3'),
             Str('community',
                 validators=[Match(r'^[-_.a-zA-Z0-9\s]*$')],
                 default='public'),
             Str('v3_username'),
             Str('v3_authtype', enum=['', 'MD5', 'SHA']),
             Str('v3_password'),
             Str('v3_privproto', enum=[None, 'AES', 'DES'], null=True),
             Str('v3_privpassphrase'),
             Int('loglevel', validators=[Range(min=0, max=7)]),
             Str('options'),
             update=True))
    async def do_update(self, data):
        old = await self.config()

        new = old.copy()
        new.update(data)

        verrors = ValidationErrors()

        if not new['v3'] and not new['community']:
            verrors.add('snmp_update.community',
                        'This field is required when SNMPv3 is disabled')

        if new['v3_authtype'] and not new['v3_password']:
            verrors.add(
                'snmp_update.v3_password',
                'This field is requires when SNMPv3 auth type is specified',
            )

        if new['v3_password'] and len(new['v3_password']) < 8:
            verrors.add('snmp_update.v3_password',
                        'Password must contain at least 8 characters')

        if new['v3_privproto'] and not new['v3_privpassphrase']:
            verrors.add(
                'snmp_update.v3_privpassphrase',
                'This field is requires when SNMPv3 private protocol is specified',
            )

        if verrors:
            raise verrors

        await self._update_service(old, new)

        return new
예제 #3
0
class SNMPService(SystemServiceService):
    class Config:
        service = "snmp"
        datastore_prefix = "snmp_"

    @accepts(
        Dict(
            'snmp_update',
            Str('location'),
            Str('contact',
                validators=[Or(Email(), Match(r'^[-_a-zA-Z0-9\s]+$'))]),
            Bool('traps'),
            Bool('v3'),
            Str('community', validators=[Match(r'^[-_.a-zA-Z0-9\s]*$')]),
            Str('v3_username'),
            Str('v3_authtype', enum=['', 'MD5', 'SHA']),
            Str('v3_password'),
            Str('v3_privproto', enum=[None, 'AES', 'DES']),
            Str('v3_privpassphrase'),
            Int('loglevel', validators=[Range(min=0, max=7)]),
            Str('options'),
        ))
    async def update(self, data):
        old = await self.config()

        new = old.copy()
        new.update(data)

        verrors = ValidationErrors()

        if not data["v3"] and not data["community"]:
            verrors.add("snmp_update.community",
                        "This field is required when SNMPv3 is disabled")

        if data["v3_authtype"] and not data["v3_password"]:
            verrors.add(
                "snmp_update.v3_password",
                "This field is requires when SNMPv3 auth type is specified")

        if data["v3_password"] and len(data["v3_password"]) < 8:
            verrors.add("snmp_update.v3_password",
                        "Password must contain at least 8 characters")

        if data["v3_privproto"] and not data["v3_privpassphrase"]:
            verrors.add(
                "snmp_update.v3_privpassphrase",
                "This field is requires when SNMPv3 private protocol is specified"
            )

        if verrors:
            raise verrors

        await self._update_service(old, new)

        return new
예제 #4
0
파일: smart.py 프로젝트: jiangge/freenas
class SmartService(SystemServiceService):
    class Config:
        service = "smartd"
        service_model = "smart"
        datastore_extend = "smart.smart_extend"
        datastore_prefix = "smart_"

    @private
    async def smart_extend(self, smart):
        smart["powermode"] = smart["powermode"].upper()
        smart["email"] = smart["email"].split(",")
        return smart

    @accepts(
        Dict('smart_update',
             Int('interval'),
             Str('powermode', enum=['NEVER', 'SLEEP', 'STANDBY', 'IDLE']),
             Int('difference'),
             Int('informational'),
             Int('critical'),
             List('email',
                  validators=[Unique()],
                  items=[Str('email', validators=[Email()])]),
             update=True))
    async def do_update(self, data):
        old = await self.config()

        new = old.copy()
        new.update(data)

        new["powermode"] = new["powermode"].lower()
        new["email"] = ",".join([email.strip() for email in new["email"]])

        await self._update_service(old, new)

        if new["powermode"] != old["powermode"]:
            await self.middleware.call("service.restart", "collectd",
                                       {"onetime": False})

        await self.smart_extend(new)

        return new
예제 #5
0
class SmartService(SystemServiceService):
    class Config:
        service = "smartd"
        service_model = "smart"
        datastore_extend = "smart.smart_extend"
        datastore_prefix = "smart_"

    @private
    async def smart_extend(self, smart):
        smart["powermode"] = smart["powermode"].upper()
        smart["email"] = list(filter(None, re.split(r"\s+", smart["email"])))
        return smart

    @accepts(
        Dict(
            'smart_update',
            Int('interval'),
            Str('powermode', enum=['NEVER', 'SLEEP', 'STANDBY', 'IDLE']),
            Int('difference'),
            Int('informational'),
            Int('critical'),
            List('email', items=[Str('email', validators=[Email()])]),
        ))
    async def update(self, data):
        old = await self.config()

        new = old.copy()
        new.update(data)

        new["powermode"] = new["powermode"].lower()
        new["email"] = " ".join(new["email"])

        await self._update_service(old, new)

        await self.smart_extend(new)

        return new
예제 #6
0
class UPSService(SystemServiceService):
    DRIVERS_AVAILABLE = set(os.listdir(DRIVER_BIN_DIR))

    class Config:
        datastore = 'services.ups'
        datastore_prefix = 'ups_'
        datastore_extend = 'ups.ups_config_extend'
        service = 'ups'
        service_verb = 'restart'

    @private
    async def ups_config_extend(self, data):
        data['mode'] = data['mode'].upper()
        data['shutdown'] = data['shutdown'].upper()
        data['toemail'] = [v for v in data['toemail'].split(';') if v]
        return data

    @accepts()
    async def port_choices(self):
        ports = [x for x in glob.glob('/dev/cua*') if x.find('.') == -1]
        ports.extend(glob.glob('/dev/ugen*'))
        ports.extend(glob.glob('/dev/uhid*'))
        return ports

    @accepts()
    def driver_choices(self):
        ups_choices = {}
        if os.path.exists("/conf/base/etc/local/nut/driver.list"):
            with open('/conf/base/etc/local/nut/driver.list', 'rb') as f:
                d = f.read().decode('utf8', 'ignore')
            r = io.StringIO()
            for line in re.sub(r'[ \t]+', ' ', d, flags=re.M).split('\n'):
                r.write(line.strip() + '\n')
            r.seek(0)
            reader = csv.reader(r, delimiter=' ', quotechar='"')
            for row in reader:
                if len(row) == 0 or row[0].startswith('#'):
                    continue
                if row[-2] == '#':
                    last = -3
                else:
                    last = -1
                driver = row[last].split()[0]
                if driver not in self.DRIVERS_AVAILABLE:
                    continue
                if row[last].find(' (experimental)') != -1:
                    row[last] = row[last].replace(' (experimental)',
                                                  '').strip()
                for i, field in enumerate(list(row)):
                    row[i] = field
                ups_choices['$'.join(
                    [row[last],
                     row[3]])] = '%s (%s)' % (' '.join(row[0:last]), row[last])
        return ups_choices

    @private
    async def validate_data(self, data, schema):
        verrors = ValidationErrors()

        driver = data.get('driver')
        if driver:
            if driver not in (
                    await self.middleware.call('ups.driver_choices')).keys():
                verrors.add(
                    f'{schema}.driver',
                    'Driver selected does not match local machine\'s driver list'
                )

        identifier = data['identifier']
        if identifier:
            if not re.search(r'^[a-z0-9\.\-_]+$', identifier, re.I):
                verrors.add(f'{schema}.identifier',
                            'Use alphanumeric characters, ".", "-" and "_"')

        for field in [
                field for field in ['monpwd', 'monuser'] if data.get(field)
        ]:
            if re.search(r'[ #]', data[field], re.I):
                verrors.add(f'{schema}.{field}',
                            'Spaces or number signs are not allowed')

        mode = data.get('mode')
        if mode:
            if mode == 'MASTER':
                if not data.get('port'):
                    verrors.add(f'{schema}.port', 'This field is required')
            else:
                if not data.get('remotehost'):
                    verrors.add(f'{schema}.remotehost',
                                'This field is required')

        to_emails = data.get('toemail')
        if to_emails:
            data['toemail'] = ';'.join(to_emails)
        else:
            data['toemail'] = ''

        data['mode'] = data['mode'].lower()
        data['shutdown'] = data['shutdown'].lower()

        return verrors, data

    @accepts(
        Dict(
            'ups_update',
            Bool('emailnotify'),
            Bool('powerdown'),
            Bool('rmonitor'),
            Int('nocommwarntime'),
            Int('remoteport'),
            Int('shutdowntimer'),
            Str('description'),
            Str('driver'),
            Str('extrausers'),
            Str('identifier'),
            Str('mode', enum=['MASTER', 'SLAVE']),
            Str('monpwd'),
            Str('monuser'),
            Str('options'),
            Str('optionsupsd'),
            Str('port'),
            Str('remotehost'),
            Str('shutdown', enum=['LOWBATT', 'BATT']),
            Str('shutdowncmd'),
            Str('subject'),
            List('toemail', items=[Str('email', validators=[Email()])]),
        ))
    async def do_update(self, data):
        config = await self.config()
        old_config = config.copy()
        config.update(data)
        verros, config = await self.validate_data(config, 'ups_update')
        if verros:
            raise verros

        old_config['mode'] = old_config['mode'].lower()
        old_config['shutdown'] = old_config['shutdown'].lower()
        old_config['toemail'] = ';'.join(
            old_config['toemail']) if old_config['toemail'] else ''

        if len(set(old_config.items()) ^ set(config.items())) > 0:
            await self._update_service(old_config, config)

        return await self.config()
예제 #7
0
class UserService(CRUDService):
    class Config:
        datastore = 'account.bsdusers'
        datastore_extend = 'user.user_extend'
        datastore_prefix = 'bsdusr_'

    @private
    async def user_extend(self, user):

        # Normalize email, empty is really null
        if user['email'] == '':
            user['email'] = None

        # Get group membership
        user['groups'] = [
            gm['group']['id'] for gm in await self.middleware.call(
                'datastore.query', 'account.bsdgroupmembership', [(
                    'user', '=', user['id'])], {'prefix': 'bsdgrpmember_'})
        ]

        # Get authorized keys
        keysfile = f'{user["home"]}/.ssh/authorized_keys'
        user['sshpubkey'] = None
        if os.path.exists(keysfile):
            try:
                with open(keysfile, 'r') as f:
                    user['sshpubkey'] = f.read()
            except Exception:
                pass
        return user

    @private
    async def user_compress(self, user):
        if 'local' in user:
            user.pop('local')
        if 'id_type_both' in user:
            user.pop('id_type_both')
        return user

    @filterable
    async def query(self, filters=None, options=None):
        """
        Query users with `query-filters` and `query-options`. As a performance optimization, only local users
        will be queried by default.

        Users from directory services such as NIS, LDAP, or Active Directory will be included in query results
        if the option `{'extra': {'search_dscache': True}}` is specified.
        """
        if not filters:
            filters = []

        options = options or {}
        options['extend'] = self._config.datastore_extend
        options['extend_context'] = self._config.datastore_extend_context
        options['prefix'] = self._config.datastore_prefix

        datastore_options = options.copy()
        datastore_options.pop('count', None)
        datastore_options.pop('get', None)

        extra = options.get('extra', {})
        dssearch = extra.pop('search_dscache', False)

        if dssearch:
            return await self.middleware.call('dscache.query', 'USERS',
                                              filters, options)

        result = await self.middleware.call('datastore.query',
                                            self._config.datastore, [],
                                            datastore_options)
        for entry in result:
            entry.update({'local': True, 'id_type_both': False})
        return await self.middleware.run_in_thread(filter_list, result,
                                                   filters, options)

    @accepts(
        Dict(
            'user_create',
            Int('uid'),
            Str('username', required=True, max_length=16),
            Int('group'),
            Bool('group_create', default=False),
            Str('home', default='/nonexistent'),
            Str('home_mode', default='755'),
            Str('shell', default='/bin/csh' if IS_FREEBSD else '/usr/bin/zsh'),
            Str('full_name', required=True),
            Str('email', validators=[Email()], null=True, default=None),
            Str('password', private=True),
            Bool('password_disabled', default=False),
            Bool('locked', default=False),
            Bool('microsoft_account', default=False),
            Bool('smb', default=True),
            Bool('sudo', default=False),
            Str('sshpubkey', null=True, max_length=None),
            List('groups', default=[]),
            Dict('attributes', additional_attrs=True),
            register=True,
        ))
    async def do_create(self, data):
        """
        Create a new user.

        If `uid` is not provided it is automatically filled with the next one available.

        `group` is required if `group_create` is false.

        `password` is required if `password_disabled` is false.

        Available choices for `shell` can be retrieved with `user.shell_choices`.

        `attributes` is a general-purpose object for storing arbitrary user information.

        `smb` specifies whether the user should be allowed access to SMB shares. User
        willl also automatically be added to the `builtin_users` group.
        """
        verrors = ValidationErrors()

        if (not data.get('group') and not data.get('group_create')) or (
                data.get('group') is not None and data.get('group_create')):
            verrors.add(
                'user_create.group',
                f'Enter either a group name or create a new group to '
                'continue.', errno.EINVAL)

        await self.__common_validation(verrors, data, 'user_create')

        if data.get('sshpubkey') and not data['home'].startswith('/mnt'):
            verrors.add(
                'user_create.sshpubkey',
                'The home directory is not writable. Leave this field blank.')

        verrors.check()

        groups = data.pop('groups')
        create = data.pop('group_create')

        if create:
            group = await self.middleware.call(
                'group.query', [('group', '=', data['username'])])
            if group:
                group = group[0]
            else:
                group = await self.middleware.call('group.create', {
                    'name': data['username'],
                    'smb': False
                })
                group = (await self.middleware.call('group.query',
                                                    [('id', '=', group)]))[0]
            data['group'] = group['id']
        else:
            group = await self.middleware.call('group.query',
                                               [('id', '=', data['group'])])
            if not group:
                raise CallError(f'Group {data["group"]} not found')
            group = group[0]

        if data['smb']:
            groups.append(
                (await self.middleware.call('group.query',
                                            [('group', '=', 'builtin_users')],
                                            {'get': True}))['id'])

        # Is this a new directory or not? Let's not nuke existing directories,
        # e.g. /, /root, /mnt/tank/my-dataset, etc ;).
        new_homedir = False
        home_mode = data.pop('home_mode')
        if data['home'] and data['home'] != '/nonexistent':
            try:
                try:
                    os.makedirs(data['home'], mode=int(home_mode, 8))
                    new_homedir = True
                    await self.middleware.call(
                        'filesystem.setperm', {
                            'path': data['home'],
                            'mode': home_mode,
                            'uid': data['uid'],
                            'gid': group['gid'],
                            'options': {
                                'stripacl': True
                            }
                        })
                except FileExistsError:
                    if not os.path.isdir(data['home']):
                        raise CallError(
                            'Path for home directory already '
                            'exists and is not a directory', errno.EEXIST)

                    # If it exists, ensure the user is owner.
                    await self.middleware.call(
                        'filesystem.chown', {
                            'path': data['home'],
                            'uid': data['uid'],
                            'gid': group['gid'],
                        })
                except OSError as oe:
                    raise CallError('Failed to create the home directory '
                                    f'({data["home"]}) for user: {oe}')
            except Exception:
                if new_homedir:
                    shutil.rmtree(data['home'])
                raise

        if not data.get('uid'):
            data['uid'] = await self.get_next_uid()

        pk = None  # Make sure pk exists to rollback in case of an error
        data = await self.user_compress(data)
        try:
            await self.__set_password(data)
            sshpubkey = data.pop('sshpubkey',
                                 None)  # datastore does not have sshpubkey

            pk = await self.middleware.call('datastore.insert',
                                            'account.bsdusers', data,
                                            {'prefix': 'bsdusr_'})

            await self.__set_groups(pk, groups)

        except Exception:
            if pk is not None:
                await self.middleware.call('datastore.delete',
                                           'account.bsdusers', pk)
            if new_homedir:
                # Be as atomic as possible when creating the user if
                # commands failed to execute cleanly.
                shutil.rmtree(data['home'])
            raise

        await self.middleware.call('service.reload', 'user')

        if data['smb']:
            await self.__set_smbpasswd(data['username'])

        if os.path.exists(data['home']):
            for f in os.listdir(SKEL_PATH):
                if f.startswith('dot'):
                    dest_file = os.path.join(data['home'], f[3:])
                else:
                    dest_file = os.path.join(data['home'], f)
                if not os.path.exists(dest_file):
                    shutil.copyfile(os.path.join(SKEL_PATH, f), dest_file)
                    await self.middleware.call(
                        'filesystem.chown', {
                            'path': dest_file,
                            'uid': data['uid'],
                            'gid': group['gid'],
                            'options': {
                                'recursive': True
                            }
                        })

            data['sshpubkey'] = sshpubkey
            try:
                await self.update_sshpubkey(data['home'], data, group['group'])
            except PermissionError as e:
                self.logger.warn('Failed to update authorized keys',
                                 exc_info=True)
                raise CallError(f'Failed to update authorized keys: {e}')

        return pk

    @accepts(
        Int('id'),
        Patch(
            'user_create',
            'user_update',
            ('attr', {
                'update': True
            }),
            ('rm', {
                'name': 'group_create'
            }),
        ),
    )
    async def do_update(self, pk, data):
        """
        Update attributes of an existing user.
        """

        user = await self._get_instance(pk)

        verrors = ValidationErrors()

        if 'group' in data:
            group = await self.middleware.call('datastore.query',
                                               'account.bsdgroups',
                                               [('id', '=', data['group'])])
            if not group:
                verrors.add('user_update.group',
                            f'Group {data["group"]} not found', errno.ENOENT)
            group = group[0]
        else:
            group = user['group']
            user['group'] = group['id']

        await self.__common_validation(verrors, data, 'user_update', pk=pk)

        home = data.get('home') or user['home']
        has_home = home != '/nonexistent'
        # root user (uid 0) is an exception to the rule
        if data.get('sshpubkey'
                    ) and not home.startswith('/mnt') and user['uid'] != 0:
            verrors.add('user_update.sshpubkey',
                        'Home directory is not writable, leave this blank"')

        # Do not allow attributes to be changed for builtin user
        if user['builtin']:
            for i in ('group', 'home', 'home_mode', 'uid', 'username'):
                if i in data:
                    verrors.add(f'user_update.{i}',
                                'This attribute cannot be changed')

        verrors.check()

        must_change_pdb_entry = False
        for k in ('username', 'password', 'locked'):
            new_val = data.get(k)
            old_val = user.get(k)
            if new_val is not None and old_val != new_val:
                if k == 'username':
                    try:
                        await self.middleware.call("smb.remove_passdb_user",
                                                   old_val)
                    except Exception:
                        self.logger.debug(
                            "Failed to remove passdb entry for user [%s]",
                            old_val,
                            exc_info=True)

                must_change_pdb_entry = True

        # Copy the home directory if it changed
        if (has_home and 'home' in data and data['home'] != user['home']
                and not data['home'].startswith(f'{user["home"]}/')):
            home_copy = True
            home_old = user['home']
        else:
            home_copy = False

        # After this point user dict has values from data
        user.update(data)

        if home_copy and not os.path.isdir(user['home']):
            try:
                os.makedirs(user['home'])
                await self.middleware.call(
                    'filesystem.chown', {
                        'path': user['home'],
                        'uid': user['uid'],
                        'gid': group['bsdgrp_gid'],
                    })
            except OSError:
                self.logger.warn('Failed to chown homedir', exc_info=True)
            if not os.path.isdir(user['home']):
                raise CallError(f'{user["home"]} is not a directory')

        home_mode = user.pop('home_mode', None)
        if user['builtin']:
            home_mode = None

        def set_home_mode():
            if home_mode is not None:
                try:
                    # Strip ACL before chmod. This is required when aclmode = restricted
                    setfacl = subprocess.run(
                        ['/bin/setfacl', '-b', user['home']], check=False)
                    if setfacl.returncode != 0 and setfacl.stderr:
                        self.logger.debug('Failed to strip ACL: %s',
                                          setfacl.stderr.decode())
                    os.chmod(user['home'], int(home_mode, 8))
                except OSError:
                    self.logger.warn('Failed to set homedir mode',
                                     exc_info=True)

        try:
            update_sshpubkey_args = [
                home_old if home_copy else user['home'],
                user,
                group['bsdgrp_group'],
            ]
            await self.update_sshpubkey(*update_sshpubkey_args)
        except PermissionError as e:
            self.logger.warn('Failed to update authorized keys', exc_info=True)
            raise CallError(f'Failed to update authorized keys: {e}')
        else:
            if user['uid'] == 0:
                if await self.middleware.call('failover.licensed'):
                    try:
                        await self.middleware.call('failover.call_remote',
                                                   'user.update_sshpubkey',
                                                   update_sshpubkey_args)
                    except Exception:
                        self.logger.error(
                            'Failed to sync root ssh pubkey to standby node',
                            exc_info=True)

        if home_copy:

            def do_home_copy():
                try:
                    command = f"/bin/cp -a {shlex.quote(home_old) + '/'} {shlex.quote(user['home'] + '/')}"
                    subprocess.run(
                        ["/usr/bin/su", "-", user["username"], "-c", command],
                        check=True)
                except subprocess.CalledProcessError as e:
                    self.logger.warn(f"Failed to copy homedir: {e}")
                set_home_mode()

            asyncio.ensure_future(self.middleware.run_in_thread(do_home_copy))
        elif has_home:
            asyncio.ensure_future(self.middleware.run_in_thread(set_home_mode))

        user.pop('sshpubkey', None)
        await self.__set_password(user)

        if 'groups' in user:
            groups = user.pop('groups')
            await self.__set_groups(pk, groups)

        user = await self.user_compress(user)
        await self.middleware.call('datastore.update', 'account.bsdusers', pk,
                                   user, {'prefix': 'bsdusr_'})

        await self.middleware.call('service.reload', 'user')
        if user['smb'] and must_change_pdb_entry:
            await self.__set_smbpasswd(user['username'])

        return pk

    @accepts(Int('id'), Dict('options', Bool('delete_group', default=True)))
    async def do_delete(self, pk, options=None):
        """
        Delete user `id`.

        The `delete_group` option deletes the user primary group if it is not being used by
        any other user.
        """

        user = await self._get_instance(pk)

        if user['builtin']:
            raise CallError('Cannot delete a built-in user', errno.EINVAL)

        if options['delete_group'] and not user['group']['bsdgrp_builtin']:
            count = await self.middleware.call(
                'datastore.query', 'account.bsdgroupmembership',
                [('group', '=', user['group']['id'])], {
                    'prefix': 'bsdgrpmember_',
                    'count': True
                })
            count2 = await self.middleware.call(
                'datastore.query', 'account.bsdusers',
                [('group', '=', user['group']['id']), ('id', '!=', pk)], {
                    'prefix': 'bsdusr_',
                    'count': True
                })
            if count == 0 and count2 == 0:
                try:
                    await self.middleware.call('group.delete',
                                               user['group']['id'])
                except Exception:
                    self.logger.warn(
                        f'Failed to delete primary group of {user["username"]}',
                        exc_info=True)

        if user['smb']:
            await run('smbpasswd', '-x', user['username'], check=False)

        # TODO: add a hook in CIFS service
        cifs = await self.middleware.call('datastore.query', 'services.cifs',
                                          [], {'prefix': 'cifs_srv_'})
        if cifs:
            cifs = cifs[0]
            if cifs['guest'] == user['username']:
                await self.middleware.call('datastore.update', 'services.cifs',
                                           cifs['id'], {'guest': 'nobody'},
                                           {'prefix': 'cifs_srv_'})

        await self.middleware.call('datastore.delete', 'account.bsdusers', pk)
        await self.middleware.call('service.reload', 'user')

        return pk

    @accepts(Int('user_id', default=None, null=True))
    def shell_choices(self, user_id=None):
        """
        Return the available shell choices to be used in `user.create` and `user.update`.

        If `user_id` is provided, shell choices are filtered to ensure the user can access the shell choices provided.
        """
        user = self.middleware.call_sync('user.get_instance',
                                         user_id) if user_id else None
        with open('/etc/shells', 'r') as f:
            shells = [x.rstrip() for x in f.readlines() if x.startswith('/')]
        return {
            shell: os.path.basename(shell)
            for shell in (shells + ['/usr/sbin/nologin'])
            if 'netcli' not in shell or (user and user['username'] == 'root')
        }

    @accepts(
        Dict('get_user_obj', Str('username', default=None),
             Int('uid', default=None)))
    async def get_user_obj(self, data):
        """
        Returns dictionary containing information from struct passwd for the user specified by either
        the username or uid. Bypasses user cache.
        """
        return await self.middleware.call('dscache.get_uncached_user',
                                          data['username'], data['uid'])

    @item_method
    @accepts(
        Int('id'),
        Str('key'),
        Any('value'),
    )
    async def set_attribute(self, pk, key, value):
        """
        Set user general purpose `attributes` dictionary `key` to `value`.

        e.g. Setting key="foo" value="var" will result in {"attributes": {"foo": "bar"}}
        """
        user = await self._get_instance(pk)

        user['attributes'][key] = value

        await self.middleware.call('datastore.update', 'account.bsdusers', pk,
                                   {'attributes': user['attributes']},
                                   {'prefix': 'bsdusr_'})

        return True

    @item_method
    @accepts(
        Int('id'),
        Str('key'),
    )
    async def pop_attribute(self, pk, key):
        """
        Remove user general purpose `attributes` dictionary `key`.
        """
        user = await self._get_instance(pk)

        if key in user['attributes']:
            user['attributes'].pop(key)

            await self.middleware.call('datastore.update', 'account.bsdusers',
                                       pk, {'attributes': user['attributes']},
                                       {'prefix': 'bsdusr_'})
            return True
        else:
            return False

    @accepts()
    async def get_next_uid(self):
        """
        Get the next available/free uid.
        """
        last_uid = 999
        for i in await self.middleware.call('datastore.query',
                                            'account.bsdusers',
                                            [('builtin', '=', False)], {
                                                'order_by': ['uid'],
                                                'prefix': 'bsdusr_'
                                            }):
            # If the difference between the last uid and the current one is
            # bigger than 1, it means we have a gap and can use it.
            if i['uid'] - last_uid > 1:
                return last_uid + 1
            last_uid = i['uid']
        return last_uid + 1

    @no_auth_required
    @accepts()
    async def has_root_password(self):
        """
        Return whether the root user has a valid password set.

        This is used when the system is installed without a password and must be set on
        first use/login.
        """
        return (await self.middleware.call(
            'datastore.query', 'account.bsdusers', [
                ('bsdusr_username', '=', 'root')
            ], {'get': True}))['bsdusr_unixhash'] != '*'

    @no_auth_required
    @accepts(Str('password'),
             Dict(
                 'options',
                 Dict(
                     'ec2',
                     Str('instance_id', required=True),
                 ),
                 update=True,
             ))
    @pass_app()
    async def set_root_password(self, app, password, options):
        """
        Set password for root user if it is not already set.
        """
        if not app.authenticated:
            if await self.middleware.call('user.has_root_password'):
                raise CallError(
                    'You cannot call this method anonymously if root already has a password',
                    errno.EACCES)

            if await self.middleware.call('system.environment') == 'EC2':
                if 'ec2' not in options:
                    raise CallError(
                        'You need to specify instance ID when setting initial root password on EC2 instance',
                        errno.EACCES,
                    )

                if options['ec2']['instance_id'] != await self.middleware.call(
                        'ec2.instance_id'):
                    raise CallError('Incorrect EC2 instance ID', errno.EACCES)

        root = await self.middleware.call('user.query',
                                          [('username', '=', 'root')],
                                          {'get': True})
        await self.middleware.call('user.update', root['id'],
                                   {'password': password})

    async def __common_validation(self, verrors, data, schema, pk=None):

        exclude_filter = [('id', '!=', pk)] if pk else []

        if 'username' in data:
            pw_checkname(verrors, f'{schema}.username', data['username'])

            if await self.middleware.call(
                    'datastore.query', 'account.bsdusers',
                [('username', '=', data['username'])] + exclude_filter,
                {'prefix': 'bsdusr_'}):
                verrors.add(
                    f'{schema}.username',
                    f'The username "{data["username"]}" already exists.',
                    errno.EEXIST)
            if data.get('smb'):
                smb_users = await self.middleware.call(
                    'datastore.query', 'account.bsdusers',
                    [('smb', '=', True)] + exclude_filter,
                    {'prefix': 'bsdusr_'})

                if any(
                        filter(
                            lambda x: data['username'].casefold() == x[
                                'username'].casefold(), smb_users)):
                    verrors.add(
                        f'{schema}.smb',
                        f'Username "{data["username"]}" conflicts with existing SMB user. Note that SMB '
                        f'usernames are case-insensitive.',
                        errno.EEXIST,
                    )

        password = data.get('password')
        if password and '?' in password:
            # See bug #4098
            verrors.add(
                f'{schema}.password',
                'An SMB issue prevents creating passwords containing a '
                'question mark (?).', errno.EINVAL)
        elif not pk and not password and not data.get('password_disabled'):
            verrors.add(f'{schema}.password', 'Password is required')
        elif data.get('password_disabled') and password:
            verrors.add(
                f'{schema}.password_disabled',
                'Leave "Password" blank when "Disable password login" is checked.'
            )

        if 'home' in data:
            if ':' in data['home']:
                verrors.add(f'{schema}.home',
                            '"Home Directory" cannot contain colons (:).')
            if data['home'] != '/nonexistent':
                if not data['home'].startswith('/mnt/'):
                    verrors.add(
                        f'{schema}.home',
                        '"Home Directory" must begin with /mnt/ or set to '
                        '/nonexistent.')
                elif not any(
                        data['home'] == i['path']
                        or data['home'].startswith(i['path'] + '/')
                        for i in await self.middleware.call('pool.query')):
                    verrors.add(
                        f'{schema}.home',
                        f'The path for the home directory "({data["home"]})" '
                        'must include a volume or dataset.')
                elif await self.middleware.call('filesystem.path_is_encrypted',
                                                data['home']):
                    verrors.add(
                        f'{schema}.home',
                        'Path component for "Home Directory" is currently encrypted and locked'
                    )

        if 'home_mode' in data:
            try:
                o = int(data['home_mode'], 8)
                assert o & 0o777 == o
            except (AssertionError, ValueError, TypeError):
                verrors.add(
                    f'{schema}.home_mode',
                    'Please provide a valid value for home_mode attribute')

        if 'groups' in data:
            groups = data.get('groups') or []
            if groups and len(groups) > 64:
                verrors.add(
                    f'{schema}.groups',
                    'A user cannot belong to more than 64 auxiliary groups.')

        if 'full_name' in data and ':' in data['full_name']:
            verrors.add(f'{schema}.full_name',
                        'The ":" character is not allowed in a "Full Name".')

        if 'shell' in data and data['shell'] not in await self.middleware.call(
                'user.shell_choices', pk):
            verrors.add(f'{schema}.shell', 'Please select a valid shell.')

    async def __set_password(self, data):
        if 'password' not in data:
            return
        password = data.pop('password')
        if password:
            data['unixhash'] = crypted_password(password)
            # See http://samba.org.ru/samba/docs/man/manpages/smbpasswd.5.html
            data[
                'smbhash'] = f'{data["username"]}:{data["uid"]}:{"X" * 32}:{nt_password(password)}:[U         ]:LCT-{int(time.time()):X}:'
        else:
            data['unixhash'] = '*'
            data['smbhash'] = '*'
        return password

    async def __set_smbpasswd(self, username):
        """
        This method will update or create an entry in samba's passdb.tdb file.
        Update will only happen if the account's nt_password has changed or
        if the account's 'locked' state has changed. Samba's passdb python
        library will raise an exception if a corresponding Unix user does not
        exist. That is the reason we have two methods/steps to set password.
        """
        await self.middleware.call('smb.update_passdb_user', username)

    async def __set_groups(self, pk, groups):

        groups = set(groups)
        existing_ids = set()
        for gm in await self.middleware.call('datastore.query',
                                             'account.bsdgroupmembership',
                                             [('user', '=', pk)],
                                             {'prefix': 'bsdgrpmember_'}):
            if gm['id'] not in groups:
                await self.middleware.call('datastore.delete',
                                           'account.bsdgroupmembership',
                                           gm['id'])
            else:
                existing_ids.add(gm['id'])

        for _id in groups - existing_ids:
            group = await self.middleware.call('datastore.query',
                                               'account.bsdgroups',
                                               [('id', '=', _id)],
                                               {'prefix': 'bsdgrp_'})
            if not group:
                raise CallError(f'Group {_id} not found', errno.ENOENT)
            await self.middleware.call('datastore.insert',
                                       'account.bsdgroupmembership', {
                                           'group': _id,
                                           'user': pk
                                       }, {'prefix': 'bsdgrpmember_'})

    @private
    async def update_sshpubkey(self, homedir, user, group):
        if 'sshpubkey' not in user:
            return
        if not os.path.isdir(homedir):
            return

        sshpath = f'hotexamples_com/.ssh'
        keysfile = f'{sshpath}/authorized_keys'
        gid = -1

        pubkey = user.get('sshpubkey') or ''
        pubkey = pubkey.strip()
        if pubkey == '':
            try:
                os.unlink(keysfile)
            except OSError:
                pass
            return

        oldpubkey = ''
        try:
            with open(keysfile, 'r') as f:
                oldpubkey = f.read().strip()
        except Exception:
            pass

        if pubkey == oldpubkey:
            return

        if not os.path.isdir(sshpath):
            os.mkdir(sshpath, mode=0o700)
        if not os.path.isdir(sshpath):
            raise CallError(f'{sshpath} is not a directory')

        # Make extra sure to enforce correct mode on .ssh directory.
        # stripping the ACL will allow subsequent chmod calls to succeed even if
        # dataset aclmode is restricted.
        try:
            gid = (await self.middleware.call('group.get_group_obj',
                                              {'groupname': group}))['gr_gid']
        except Exception:
            # leaving gid at -1 avoids altering the GID value.
            self.logger.debug("Failed to convert %s to gid",
                              group,
                              exc_info=True)

        await self.middleware.call(
            'filesystem.setperm', {
                'path': sshpath,
                'mode': str(700),
                'uid': user['uid'],
                'gid': gid,
                'options': {
                    'recursive': True,
                    'stripacl': True
                }
            })

        with open(keysfile, 'w') as f:
            f.write(pubkey)
            f.write('\n')
        await self.middleware.call('filesystem.setperm', {
            'path': keysfile,
            'mode': str(600)
        })
예제 #8
0
class UserService(CRUDService):
    class Config:
        datastore = 'account.bsdusers'
        datastore_extend = 'user.user_extend'
        datastore_prefix = 'bsdusr_'

    @private
    async def user_extend(self, user):

        # Get group membership
        user['groups'] = [
            gm['group']['id'] for gm in await self.middleware.call(
                'datastore.query', 'account.bsdgroupmembership', [(
                    'user', '=', user['id'])], {'prefix': 'bsdgrpmember_'})
        ]

        # Get authorized keys
        keysfile = f'{user["home"]}/.ssh/authorized_keys'
        user['sshpubkey'] = None
        if os.path.exists(keysfile):
            try:
                with open(keysfile, 'r') as f:
                    user['sshpubkey'] = f.read()
            except Exception:
                pass
        return user

    @accepts(
        Dict(
            'user_create',
            Int('uid'),
            Str('username', required=True),
            Int('group'),
            Bool('group_create', default=False),
            Str('home', default='/nonexistent'),
            Str('home_mode', default='755'),
            Str('shell', default='/bin/csh'),
            Str('full_name', required=True),
            Str('email', validators=[Email()], null=True, default=None),
            Str('password', private=True),
            Bool('password_disabled', default=False),
            Bool('locked', default=False),
            Bool('microsoft_account', default=False),
            Bool('sudo', default=False),
            Str('sshpubkey', null=True),
            List('groups', default=[]),
            Dict('attributes', additional_attrs=True),
            register=True,
        ))
    async def do_create(self, data):
        """
        Create a new user.

        If `uid` is not provided it is automatically filled with the next one available.

        `group` is required if `group_create` is false.

        `password` is required if `password_disabled` is false.

        Available choices for `shell` can be retrieved with `user.shell_choices`.

        `attributes` is a general-purpose object for storing arbitrary user information.
        """
        verrors = ValidationErrors()

        if (not data.get('group') and not data.get('group_create')) or (
                data.get('group') is not None and data.get('group_create')):
            verrors.add(
                'user_create.group',
                f'Enter either a group name or create a new group to '
                'continue.', errno.EINVAL)

        await self.__common_validation(verrors, data, 'user_create')

        if data.get('sshpubkey') and not data['home'].startswith('/mnt'):
            verrors.add(
                'user_create.sshpubkey',
                'The home directory is not writable. Leave this field blank.')

        verrors.check()

        groups = data.pop('groups')
        create = data.pop('group_create')

        if create:
            group = await self.middleware.call(
                'group.query', [('group', '=', data['username'])])
            if group:
                group = group[0]
            else:
                group = await self.middleware.call('group.create',
                                                   {'name': data['username']})
                group = (await self.middleware.call('group.query',
                                                    [('id', '=', group)]))[0]
            data['group'] = group['id']
        else:
            group = await self.middleware.call('group.query',
                                               [('id', '=', data['group'])])
            if not group:
                raise CallError(f'Group {data["group"]} not found')
            group = group[0]

        # Is this a new directory or not? Let's not nuke existing directories,
        # e.g. /, /root, /mnt/tank/my-dataset, etc ;).
        new_homedir = False
        home_mode = data.pop('home_mode')
        if data['home'] and data['home'] != '/nonexistent':
            try:
                try:
                    os.makedirs(data['home'], mode=int(home_mode, 8))
                    new_homedir = True
                    os.chown(data['home'], data['uid'], group['gid'])
                except FileExistsError:
                    if not os.path.isdir(data['home']):
                        raise CallError(
                            'Path for home directory already '
                            'exists and is not a directory', errno.EEXIST)

                    # If it exists, ensure the user is owner
                    os.chown(data['home'], data['uid'], group['gid'])
                except OSError as oe:
                    raise CallError('Failed to create the home directory '
                                    f'({data["home"]}) for user: {oe}')
                if os.stat(data['home']).st_dev == os.stat('/mnt').st_dev:
                    raise CallError(
                        f'The path for the home directory "({data["home"]})" '
                        'must include a volume or dataset.')
            except Exception:
                if new_homedir:
                    shutil.rmtree(data['home'])
                raise

        if not data.get('uid'):
            data['uid'] = await self.get_next_uid()

        pk = None  # Make sure pk exists to rollback in case of an error
        try:
            password = await self.__set_password(data)
            sshpubkey = data.pop('sshpubkey',
                                 None)  # datastore does not have sshpubkey

            pk = await self.middleware.call('datastore.insert',
                                            'account.bsdusers', data,
                                            {'prefix': 'bsdusr_'})

            await self.__set_groups(pk, groups)

        except Exception:
            if pk is not None:
                await self.middleware.call('datastore.delete',
                                           'account.bsdusers', pk)
            if new_homedir:
                # Be as atomic as possible when creating the user if
                # commands failed to execute cleanly.
                shutil.rmtree(data['home'])
            raise

        await self.middleware.call('service.reload', 'user')

        await self.__set_smbpasswd(data['username'], password)

        if os.path.exists(data['home']):
            for f in os.listdir(SKEL_PATH):
                if f.startswith('dot'):
                    dest_file = os.path.join(data['home'], f[3:])
                else:
                    dest_file = os.path.join(data['home'], f)
                if not os.path.exists(dest_file):
                    shutil.copyfile(os.path.join(SKEL_PATH, f), dest_file)
                    os.chown(dest_file, data['uid'], group['gid'])

            data['sshpubkey'] = sshpubkey
            try:
                await self.__update_sshpubkey(data['home'], data,
                                              group['group'])
            except PermissionError as e:
                self.logger.warn('Failed to update authorized keys',
                                 exc_info=True)
                raise CallError(f'Failed to update authorized keys: {e}')

        return pk

    @accepts(
        Int('id'),
        Patch(
            'user_create',
            'user_update',
            ('attr', {
                'update': True
            }),
            ('rm', {
                'name': 'group_create'
            }),
        ),
    )
    async def do_update(self, pk, data):

        user = await self._get_instance(pk)

        verrors = ValidationErrors()

        if 'group' in data:
            group = await self.middleware.call('datastore.query',
                                               'account.bsdgroups',
                                               [('id', '=', data['group'])])
            if not group:
                verrors.add('user_update.group',
                            f'Group {data["group"]} not found', errno.ENOENT)
            group = group[0]
        else:
            group = user['group']
            user['group'] = group['id']

        await self.__common_validation(verrors, data, 'user_update', pk=pk)

        home = data.get('home') or user['home']
        # root user (uid 0) is an exception to the rule
        if data.get('sshpubkey'
                    ) and not home.startswith('/mnt') and user['uid'] != 0:
            verrors.add('user_update.sshpubkey',
                        'Home directory is not writable, leave this blank"')

        # Do not allow attributes to be changed for builtin user
        if user['builtin']:
            for i in ('group', 'home', 'home_mode', 'uid', 'username'):
                if i in data:
                    verrors.add(f'user_update.{i}',
                                'This attribute cannot be changed')

        verrors.check()

        # Copy the home directory if it changed
        if ('home' in data
                and data['home'] not in (user['home'], '/nonexistent')
                and not data['home'].startswith(f'{user["home"]}/')):
            home_copy = True
            home_old = user['home']
        else:
            home_copy = False

        # After this point user dict has values from data
        user.update(data)

        if home_copy and not os.path.isdir(user['home']):
            try:
                os.makedirs(user['home'])
                os.chown(user['home'], user['uid'], group['bsdgrp_gid'])
            except OSError:
                self.logger.warn('Failed to chown homedir', exc_info=True)
            if not os.path.isdir(user['home']):
                raise CallError(f'{user["home"]} is not a directory')

        home_mode = user.pop('home_mode', None)
        if user['builtin']:
            home_mode = None

        def set_home_mode():
            if home_mode is not None:
                try:
                    os.chmod(user['home'], int(home_mode, 8))
                except OSError:
                    self.logger.warn('Failed to set homedir mode',
                                     exc_info=True)

        try:
            await self.__update_sshpubkey(
                home_old if home_copy else user['home'],
                user,
                group['bsdgrp_group'],
            )
        except PermissionError as e:
            self.logger.warn('Failed to update authorized keys', exc_info=True)
            raise CallError(f'Failed to update authorized keys: {e}')

        if home_copy:

            def do_home_copy():
                try:
                    subprocess.run(
                        f"/usr/bin/su - {user['username']} -c '/bin/cp -a {home_old}/ {user['home']}/'",
                        shell=True,
                        check=True)
                except subprocess.CalledProcessError as e:
                    self.logger.warn(f"Failed to copy homedir: {e}")
                set_home_mode()

            asyncio.ensure_future(self.middleware.run_in_thread(do_home_copy))
        else:
            set_home_mode()

        user.pop('sshpubkey', None)
        password = await self.__set_password(user)

        if 'groups' in user:
            groups = user.pop('groups')
            await self.__set_groups(pk, groups)

        await self.middleware.call('datastore.update', 'account.bsdusers', pk,
                                   user, {'prefix': 'bsdusr_'})

        await self.middleware.call('service.reload', 'user')

        await self.__set_smbpasswd(user['username'], password)

        return pk

    @accepts(Int('id'), Dict('options', Bool('delete_group', default=True)))
    async def do_delete(self, pk, options=None):
        """
        Delete user `id`.

        The `delete_group` option deletes the user primary group if it is not being used by
        any other user.
        """

        user = await self._get_instance(pk)

        if user['builtin']:
            raise CallError('Cannot delete a built-in user', errno.EINVAL)

        if options['delete_group'] and not user['group']['bsdgrp_builtin']:
            count = await self.middleware.call(
                'datastore.query', 'account.bsdgroupmembership',
                [('group', '=', user['group']['id'])], {
                    'prefix': 'bsdgrpmember_',
                    'count': True
                })
            count2 = await self.middleware.call(
                'datastore.query', 'account.bsdusers',
                [('group', '=', user['group']['id']), ('id', '!=', pk)], {
                    'prefix': 'bsdusr_',
                    'count': True
                })
            if count == 0 and count2 == 0:
                try:
                    await self.middleware.call('group.delete',
                                               user['group']['id'])
                except Exception:
                    self.logger.warn(
                        f'Failed to delete primary group of {user["username"]}',
                        exc_info=True)

        await run('smbpasswd', '-x', user['username'], check=False)

        # TODO: add a hook in CIFS service
        cifs = await self.middleware.call('datastore.query', 'services.cifs',
                                          [], {'prefix': 'cifs_srv_'})
        if cifs:
            cifs = cifs[0]
            if cifs['guest'] == user['username']:
                await self.middleware.call('datastore.update', 'services.cifs',
                                           cifs['id'], {'guest': 'nobody'},
                                           {'prefix': 'cifs_srv_'})

        await self.middleware.call('datastore.delete', 'account.bsdusers', pk)
        await self.middleware.call('service.reload', 'user')

        return pk

    @accepts()
    def shell_choices(self):
        """
        Return the available shell choices to be used in `user.create` and `user.update`.
        """
        with open('/etc/shells', 'r') as f:
            shells = [x.rstrip() for x in f.readlines() if x.startswith('/')]
        return {
            shell: os.path.basename(shell)
            for shell in shells + ['/usr/sbin/nologin']
        }

    @item_method
    @accepts(
        Int('id'),
        Str('key'),
        Any('value'),
    )
    async def set_attribute(self, pk, key, value):
        """
        Set user general purpose `attributes` dictionary `key` to `value`.

        e.g. Setting key="foo" value="var" will result in {"attributes": {"foo": "bar"}}
        """
        user = await self._get_instance(pk)

        user['attributes'][key] = value

        await self.middleware.call('datastore.update', 'account.bsdusers', pk,
                                   {'attributes': user['attributes']},
                                   {'prefix': 'bsdusr_'})

        return True

    @item_method
    @accepts(
        Int('id'),
        Str('key'),
    )
    async def pop_attribute(self, pk, key):
        """
        Remove user general purpose `attributes` dictionary `key`.
        """
        user = await self._get_instance(pk)

        if key in user['attributes']:
            user['attributes'].pop(key)

            await self.middleware.call('datastore.update', 'account.bsdusers',
                                       pk, {'attributes': user['attributes']},
                                       {'prefix': 'bsdusr_'})
            return True
        else:
            return False

    @accepts()
    async def get_next_uid(self):
        """
        Get the next available/free uid.
        """
        last_uid = 999
        for i in await self.middleware.call('datastore.query',
                                            'account.bsdusers',
                                            [('builtin', '=', False)], {
                                                'order_by': ['uid'],
                                                'prefix': 'bsdusr_'
                                            }):
            # If the difference between the last uid and the current one is
            # bigger than 1, it means we have a gap and can use it.
            if i['uid'] - last_uid > 1:
                return last_uid + 1
            last_uid = i['uid']
        return last_uid + 1

    @no_auth_required
    @accepts()
    async def has_root_password(self):
        """
        Return whether the root user has a valid password set.

        This is used when the system is installed without a password and must be set on
        first use/login.
        """
        return (await self.middleware.call(
            'datastore.query', 'account.bsdusers', [
                ('bsdusr_username', '=', 'root')
            ], {'get': True}))['bsdusr_unixhash'] != '*'

    @no_auth_required
    @accepts(Str('password'))
    @pass_app
    async def set_root_password(self, app, password):
        """
        Set password for root user if it is not already set.
        """
        if not app.authenticated and await self.middleware.call(
                'user.has_root_password'):
            raise CallError(
                'You cannot call this method anonymously if root already has a password',
                errno.EACCES)

        root = await self.middleware.call('user.query',
                                          [('username', '=', 'root')],
                                          {'get': True})
        await self.middleware.call('user.update', root['id'],
                                   {'password': password})

    async def __common_validation(self, verrors, data, schema, pk=None):

        exclude_filter = [('id', '!=', pk)] if pk else []

        if 'username' in data:
            pw_checkname(verrors, f'{schema}.username', data['username'])

            if await self.middleware.call(
                    'datastore.query', 'account.bsdusers',
                [('username', '=', data['username'])] + exclude_filter,
                {'prefix': 'bsdusr_'}):
                verrors.add(
                    f'{schema}.username',
                    f'The username "{data["username"]}" already exists.',
                    errno.EEXIST)

        password = data.get('password')
        if password and '?' in password:
            # See bug #4098
            verrors.add(
                f'{schema}.password',
                'An SMB issue prevents creating passwords containing a '
                'question mark (?).', errno.EINVAL)
        elif not pk and not password and not data.get('password_disabled'):
            verrors.add(f'{schema}.password', 'Password is required')
        elif data.get('password_disabled') and password:
            verrors.add(
                f'{schema}.password_disabled',
                'Leave "Password" blank when "Disable password login" is checked.'
            )

        if 'home' in data:
            if ':' in data['home']:
                verrors.add(f'{schema}.home',
                            '"Home Directory" cannot contain colons (:).')
            if not data['home'].startswith(
                    '/mnt/') and data['home'] != '/nonexistent':
                verrors.add(
                    f'{schema}.home',
                    '"Home Directory" must begin with /mnt/ or set to '
                    '/nonexistent.')

        if 'home_mode' in data:
            try:
                o = int(data['home_mode'], 8)
                assert o & 0o777 == o
            except (AssertionError, ValueError, TypeError):
                verrors.add(
                    f'{schema}.home_mode',
                    'Please provide a valid value for home_mode attribute')

        if 'groups' in data:
            groups = data.get('groups') or []
            if groups and len(groups) > 64:
                verrors.add(
                    f'{schema}.groups',
                    'A user cannot belong to more than 64 auxiliary groups.')

        if 'full_name' in data and ':' in data['full_name']:
            verrors.add(f'{schema}.full_name',
                        'The ":" character is not allowed in a "Full Name".')

    async def __set_password(self, data):
        if 'password' not in data:
            return
        password = data.pop('password')
        if password:
            data['unixhash'] = crypted_password(password)
            # See http://samba.org.ru/samba/docs/man/manpages/smbpasswd.5.html
            data[
                'smbhash'] = f'{data["username"]}:{data["uid"]}:{"X" * 32}:{nt_password(password)}:[U          ]:LCT-{int(time.time()):X}:'
        else:
            data['unixhash'] = '*'
            data['smbhash'] = '*'
        return password

    async def __set_smbpasswd(self, username, password):
        """
        Currently the way we set samba passwords is using smbpasswd
        and that can only happen after the user exists in master.passwd.
        That is the reason we have two methods/steps to set password.
        """
        if not password:
            return
        proc = await Popen(['smbpasswd', '-D', '0', '-s', '-a', username],
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE,
                           stdin=subprocess.PIPE)
        await proc.communicate(input=f'{password}\n{password}\n'.encode())

    async def __set_groups(self, pk, groups):

        groups = set(groups)
        existing_ids = set()
        for gm in await self.middleware.call('datastore.query',
                                             'account.bsdgroupmembership',
                                             [('user', '=', pk)],
                                             {'prefix': 'bsdgrpmember_'}):
            if gm['id'] not in groups:
                await self.middleware.call('datastore.delete',
                                           'account.bsdgroupmembership',
                                           gm['id'])
            else:
                existing_ids.add(gm['id'])

        for _id in groups - existing_ids:
            group = await self.middleware.call('datastore.query',
                                               'account.bsdgroups',
                                               [('id', '=', _id)],
                                               {'prefix': 'bsdgrp_'})
            if not group:
                raise CallError(f'Group {_id} not found', errno.ENOENT)
            await self.middleware.call('datastore.insert',
                                       'account.bsdgroupmembership', {
                                           'group': _id,
                                           'user': pk
                                       }, {'prefix': 'bsdgrpmember_'})

    async def __update_sshpubkey(self, homedir, user, group):
        if 'sshpubkey' not in user:
            return
        if not os.path.isdir(homedir):
            return

        sshpath = f'hotexamples_com/.ssh'
        keysfile = f'{sshpath}/authorized_keys'

        pubkey = user.get('sshpubkey') or ''
        pubkey = pubkey.strip()
        if pubkey == '':
            try:
                os.unlink(keysfile)
            except OSError:
                pass
            return

        oldpubkey = ''
        try:
            with open(keysfile, 'r') as f:
                oldpubkey = f.read().strip()
        except Exception:
            pass

        if pubkey == oldpubkey:
            return

        if not os.path.isdir(sshpath):
            os.mkdir(sshpath, mode=0o700)
        if not os.path.isdir(sshpath):
            raise CallError(f'{sshpath} is not a directory')
        with open(keysfile, 'w') as f:
            f.write(pubkey)
            f.write('\n')
        os.chmod(keysfile, 0o600)
        await run('/usr/sbin/chown',
                  '-R',
                  f'{user["username"]}:{group}',
                  sshpath,
                  check=False)
예제 #9
0
파일: ups.py 프로젝트: neojiangtw/freenas
class UPSService(SystemServiceService):
    DRIVERS_AVAILABLE = set(os.listdir(DRIVER_BIN_DIR))

    class Config:
        datastore = 'services.ups'
        datastore_prefix = 'ups_'
        datastore_extend = 'ups.ups_config_extend'
        service = 'ups'
        service_verb = 'restart'

    @private
    async def ups_config_extend(self, data):
        data['mode'] = data['mode'].upper()
        data['shutdown'] = data['shutdown'].upper()
        data['toemail'] = [v for v in data['toemail'].split(';') if v]
        host = 'localhost' if data['mode'] == 'MASTER' else data['remotehost']
        data['complete_identifier'] = f'{data["identifier"]}@{host}:{data["remoteport"]}'
        return data

    @accepts()
    async def port_choices(self):
        ports = [x for x in glob.glob('/dev/cua*') if x.find('.') == -1]
        ports.extend(glob.glob('/dev/ugen*'))
        ports.extend(glob.glob('/dev/uhid*'))
        ports.append('auto')
        return ports

    @accepts()
    def driver_choices(self):
        """
        Returns choices of UPS drivers supported by the system.
        """
        ups_choices = {}
        if os.path.exists("/conf/base/etc/local/nut/driver.list"):
            with open('/conf/base/etc/local/nut/driver.list', 'rb') as f:
                d = f.read().decode('utf8', 'ignore')
            r = io.StringIO()
            for line in re.sub(r'[ \t]+', ' ', d, flags=re.M).split('\n'):
                r.write(line.strip() + '\n')
            r.seek(0)
            reader = csv.reader(r, delimiter=' ', quotechar='"')
            for row in reader:
                if len(row) == 0 or row[0].startswith('#'):
                    continue
                if row[-2] == '#':
                    last = -3
                else:
                    last = -1
                driver_str = row[last]
                driver_annotation = ''
                m = re.match(r'(.+) \((.+)\)', driver_str)  # "blazer_usb (USB ID 0665:5161)"
                if m:
                    driver_str, driver_annotation = m.group(1), m.group(2)
                for driver in driver_str.split(' or '):  # can be "blazer_ser or blazer_usb"
                    driver = driver.strip()
                    if driver not in self.DRIVERS_AVAILABLE:
                        continue
                    for i, field in enumerate(list(row)):
                        row[i] = field
                    ups_choices['$'.join([driver, row[3]])] = '%s (%s)' % (
                        ' '.join(filter(None, row[0:last])),
                        ', '.join(filter(None, [driver, driver_annotation]))
                    )
        return ups_choices

    @private
    async def validate_data(self, data, schema):
        verrors = ValidationErrors()

        driver = data.get('driver')
        if driver:
            if driver not in (await self.middleware.call('ups.driver_choices')).keys():
                verrors.add(
                    f'{schema}.driver',
                    'Driver selected does not match local machine\'s driver list'
                )

        identifier = data['identifier']
        if identifier:
            if not re.search(r'^[a-z0-9\.\-_]+$', identifier, re.I):
                verrors.add(
                    f'{schema}.identifier',
                    'Use alphanumeric characters, ".", "-" and "_"'
                )

        for field in [field for field in ['monpwd', 'monuser'] if data.get(field)]:
            if re.search(r'[ #]', data[field], re.I):
                verrors.add(
                    f'{schema}.{field}',
                    'Spaces or number signs are not allowed'
                )

        mode = data.get('mode')
        if mode == 'MASTER':
            for field in filter(
                lambda f: not data[f],
                ['port', 'driver']
            ):
                verrors.add(
                    f'{schema}.{field}',
                    'This field is required'
                )
        else:
            if not data.get('remotehost'):
                verrors.add(
                    f'{schema}.remotehost',
                    'This field is required'
                )

        to_emails = data.get('toemail')
        if to_emails:
            data['toemail'] = ';'.join(to_emails)
        else:
            data['toemail'] = ''

        data['mode'] = data['mode'].lower()
        data['shutdown'] = data['shutdown'].lower()

        return verrors, data

    @accepts(
        Dict(
            'ups_update',
            Bool('emailnotify'),
            Bool('powerdown'),
            Bool('rmonitor'),
            Int('nocommwarntime', null=True),
            Int('remoteport', validators=[Port()]),
            Int('shutdowntimer'),
            Int('hostsync', validators=[Range(min=0)]),
            Str('description'),
            Str('driver'),
            Str('extrausers', max_length=None),
            Str('identifier', empty=False),
            Str('mode', enum=['MASTER', 'SLAVE']),
            Str('monpwd', empty=False),
            Str('monuser', empty=False),
            Str('options', max_length=None),
            Str('optionsupsd', max_length=None),
            Str('port'),
            Str('remotehost'),
            Str('shutdown', enum=['LOWBATT', 'BATT']),
            Str('shutdowncmd', null=True),
            Str('subject'),
            List('toemail', items=[Str('email', validators=[Email()])]),
            update=True
        )
    )
    async def do_update(self, data):
        """
        Update UPS Service Configuration.

        `emailnotify` when enabled, sends out notifications of different UPS events via email.

        `powerdown` when enabled, sets UPS to power off after shutting down the system.

        `nocommwarntime` is a value in seconds which makes UPS Service wait the specified seconds before alerting that
        the Service cannot reach configured UPS.

        `shutdowntimer` is a value in seconds which tells the Service to wait specified seconds for the UPS before
        initiating a shutdown. This only applies when `shutdown` is set to "BATT".

        `shutdowncmd` is the command which is executed to initiate a shutdown. It defaults to "poweroff".

        `toemail` is a list of valid email id's on which notification emails are sent.
        """
        config = await self.config()
        config.pop('complete_identifier')
        old_config = config.copy()
        config.update(data)
        verros, config = await self.validate_data(config, 'ups_update')
        if verros:
            raise verros

        old_config['mode'] = old_config['mode'].lower()
        old_config['shutdown'] = old_config['shutdown'].lower()
        old_config['toemail'] = ';'.join(old_config['toemail']) if old_config['toemail'] else ''

        if len(set(old_config.items()) ^ set(config.items())) > 0:
            if config['identifier'] != old_config['identifier']:
                await self.dismiss_alerts()

            await self._update_service(old_config, config)

        return await self.config()

    @private
    async def alerts_mapping(self):
        return {
            'LOWBATT': 'UPSBatteryLow',
            'COMMBAD': 'UPSCommbad',
            'COMMOK': 'UPSCommok',
            'ONBATT': 'UPSOnBattery',
            'ONLINE': 'UPSOnline',
            'REPLBATT': 'UPSReplbatt'
        }

    @private
    async def dismiss_alerts(self):
        config = await self.config()

        for alert in (await self.alerts_mapping()).values():
            await self.middleware.call('alert.oneshot_delete', alert, {'ups': config['identifier']})

    @private
    @accepts(
        Str('notify_type')
    )
    async def upssched_event(self, notify_type):
        config = await self.config()
        upsc_identifier = config['complete_identifier']
        if notify_type.lower() == 'shutdown':
            # Before we start FSD with upsmon, lets ensure that ups is not ONLINE (OL).
            # There are cases where battery/charger issues can result in ups.status being "OL LB" at the
            # same time. This will ensure that we don't initiate a shutdown if ups is OL.
            stats_output = (
                await run(
                    '/usr/local/bin/upsc', upsc_identifier,
                    check=False
                )
            ).stdout

            ups_status = re.findall(
                fr'ups.status: (.*)',
                '' if not stats_output else stats_output.decode()
            )
            if ups_status and 'ol' in ups_status[0].lower():
                self.middleware.logger.debug(
                    f'Shutdown not initiated as ups.status ({ups_status[0]}) indicates '
                    f'{config["identifier"]} is ONLINE (OL).'
                )
            else:
                syslog.syslog(syslog.LOG_NOTICE, 'upssched-cmd "issuing shutdown"')
                await run('/usr/local/sbin/upsmon', '-c', 'fsd', check=False)
        elif 'notify' in notify_type.lower():
            # notify_type is expected to be of the following format
            # NOTIFY-EVENT i.e NOTIFY-LOWBATT
            notify_type = notify_type.split('-')[-1]

            # We would like to send alerts for the following events
            alert_mapping = await self.alerts_mapping()

            await self.dismiss_alerts()

            if notify_type in alert_mapping:
                await self.middleware.call(
                    'alert.oneshot_create', alert_mapping[notify_type], {'ups': config['identifier']}
                )

            if config['emailnotify']:
                # Email user with the notification event and details
                # We send the email in the following format ( inclusive line breaks )

                # NOTIFICATION: 'LOWBATT'
                # UPS: 'ups'
                #
                # Statistics recovered:
                #
                # 1) Battery charge (percent)
                # battery.charge: 5
                #
                # 2) Remaining battery level when UPS switches to LB (percent)
                # battery.charge.low: 10
                #
                # 3) Battery runtime (seconds)
                # battery.runtime: 1860
                #
                # 4) Remaining battery runtime when UPS switches to LB (seconds)
                # battery.runtime.low: 900

                ups_name = config['identifier']
                hostname = (await self.middleware.call('system.info'))['hostname']
                current_time = datetime.datetime.now(tz=dateutil.tz.tzlocal()).strftime('%a %b %d %H:%M:%S %Z %Y')
                ups_subject = config['subject'].replace('%d', current_time).replace('%h', hostname)
                body = f'NOTIFICATION: {notify_type!r}<br>UPS: {ups_name!r}<br><br>'

                # Let's gather following stats
                data_points = {
                    'battery.charge': 'Battery charge (percent)',
                    'battery.charge.low': 'Battery level remaining (percent) when UPS switches to Low Battery (LB)',
                    'battery.charge.status': 'Battery charge status',
                    'battery.runtime': 'Battery runtime (seconds)',
                    'battery.runtime.low': 'Battery runtime remaining (seconds) when UPS switches to Low Battery (LB)',
                    'battery.runtime.restart': 'Minimum battery runtime (seconds) to allow UPS restart after power-off',
                }

                stats_output = (
                    await run('/usr/local/bin/upsc', upsc_identifier, check=False)
                ).stdout
                recovered_stats = re.findall(
                    fr'({"|".join(data_points)}): (.*)',
                    '' if not stats_output else stats_output.decode()
                )

                if recovered_stats:
                    body += 'Statistics recovered:<br><br>'
                    # recovered_stats is expected to be a list in this format
                    # [('battery.charge', '5'), ('battery.charge.low', '10'), ('battery.runtime', '1860')]
                    for index, stat in enumerate(recovered_stats):
                        body += f'{index + 1}) {data_points[stat[0]]}<br>  {stat[0]}: {stat[1]}<br><br>'

                else:
                    body += 'Statistics could not be recovered<br>'

                # Subject and body defined, send email
                job = await self.middleware.call(
                    'mail.send', {
                        'subject': ups_subject,
                        'text': body,
                        'to': config['toemail']
                    }
                )

                await job.wait()
                if job.error:
                    self.middleware.logger.debug(f'Failed to send UPS status email: {job.error}')

        else:
            self.middleware.logger.debug(f'Unrecognized UPS notification event: {notify_type}')
예제 #10
0
파일: support.py 프로젝트: zeroyou/freenas
class SupportService(ConfigService):

    class Config:
        datastore = 'system.support'

    @accepts(Dict(
        'support_update',
        Bool('enabled', null=True),
        Str('name'),
        Str('title'),
        Str('email'),
        Str('phone'),
        Str('secondary_name'),
        Str('secondary_title'),
        Str('secondary_email'),
        Str('secondary_phone'),
        update=True
    ))
    async def do_update(self, data):
        """
        Update Proactive Support settings.
        """

        config_data = await self.config()
        config_data.update(data)

        verrors = ValidationErrors()
        if config_data['enabled']:
            for key in ['name', 'title', 'email', 'phone']:
                for prefix in ['', 'secondary_']:
                    field = prefix + key
                    if not config_data[field]:
                        verrors.add(f'support_update.{field}', 'This field is required')
        if verrors:
            raise verrors

        await self.middleware.call(
            'datastore.update',
            self._config.datastore,
            config_data['id'],
            config_data,
        )

        return await self.config()

    @accepts()
    async def is_available(self):
        """
        Returns whether Proactive Support is available for this product type and current license.
        """

        if not await self.middleware.call('system.is_enterprise'):
            return False

        license = (await self.middleware.call('system.info'))['license']
        if license is None:
            return False

        return license['contract_type'] in ['SILVER', 'GOLD']

    @accepts()
    async def is_available_and_enabled(self):
        """
        Returns whether Proactive Support is available and enabled.
        """

        return await self.is_available() and (await self.config())['enabled']

    @accepts()
    async def fields(self):
        """
        Returns list of pairs of field names and field titles for Proactive Support.
        """

        return (
            ("name", "Contact Name"),
            ("title", "Contact Title"),
            ("email", "Contact E-mail"),
            ("phone", "Contact Phone"),
            ("secondary_name", "Secondary Contact Name"),
            ("secondary_title", "Secondary Contact Title"),
            ("secondary_email", "Secondary Contact E-mail"),
            ("secondary_phone", "Secondary Contact Phone"),
        )

    @accepts(
        Str('username'),
        Str('password'),
    )
    def fetch_categories(self, username, password):
        """
        Fetch all the categories available for `username` using `password`.
        Returns a dict with the category name as a key and id as value.
        """

        self.middleware.call_sync('network.general.will_perform_activity', 'support')

        sw_name = 'freenas' if not self.middleware.call_sync('system.is_enterprise') else 'truenas'
        try:
            r = requests.post(
                f'https://{ADDRESS}/{sw_name}/api/v1.0/categories',
                data=json.dumps({
                    'user': username,
                    'password': password,
                }),
                headers={'Content-Type': 'application/json'},
                timeout=10,
            )
            data = r.json()
        except simplejson.JSONDecodeError:
            self.logger.debug(f'Failed to decode ticket attachment response: {r.text}')
            raise CallError('Invalid proxy server response', errno.EBADMSG)
        except requests.ConnectionError as e:
            raise CallError(f'Connection error {e}', errno.EBADF)
        except requests.Timeout:
            raise CallError('Connection time out', errno.ETIMEDOUT)

        if 'error' in data:
            raise CallError(data['message'], errno.EINVAL)

        return data

    @accepts(Dict(
        'new_ticket',
        Str('title', required=True, max_length=None),
        Str('body', required=True, max_length=None),
        Str('category', required=True),
        Bool('attach_debug', default=False),
        Str('username', private=True),
        Str('password', private=True),
        Str('type', enum=['BUG', 'FEATURE']),
        Str('criticality'),
        Str('environment', max_length=None),
        Str('phone'),
        Str('name'),
        Str('email', validators=[Email()]),
        List('cc', items=[Str('email', validators=[Email()])])
    ))
    @job()
    async def new_ticket(self, job, data):
        """
        Creates a new ticket for support.
        This is done using the support proxy API.
        For FreeNAS it will be created on Redmine and for TrueNAS on SupportSuite.

        For FreeNAS `criticality`, `environment`, `phone`, `name` and `email` attributes are not required.
        For TrueNAS `username`, `password` and `type` attributes are not required.
        """

        await self.middleware.call('network.general.will_perform_activity', 'support')

        job.set_progress(1, 'Gathering data')

        sw_name = 'freenas' if not await self.middleware.call('system.is_enterprise') else 'truenas'

        if sw_name == 'freenas':
            required_attrs = ('type', 'username', 'password')
        else:
            required_attrs = ('phone', 'name', 'email', 'criticality', 'environment')
            data['serial'] = (await self.middleware.call('system.dmidecode_info'))['system-serial-number']
            license = (await self.middleware.call('system.info'))['license']
            if license:
                data['company'] = license['customer_name']
            else:
                data['company'] = 'Unknown'

        for i in required_attrs:
            if i not in data:
                raise CallError(f'{i} is required', errno.EINVAL)

        data['version'] = (await self.middleware.call('system.version')).split('-', 1)[-1]
        if 'username' in data:
            data['user'] = data.pop('username')
        debug = data.pop('attach_debug')

        type_ = data.get('type')
        if type_:
            data['type'] = type_.lower()

        job.set_progress(20, 'Submitting ticket')

        try:
            r = await self.middleware.run_in_thread(lambda: requests.post(
                f'https://{ADDRESS}/{sw_name}/api/v1.0/ticket',
                data=json.dumps(data),
                headers={'Content-Type': 'application/json'},
                timeout=10,
            ))
            result = r.json()
        except simplejson.JSONDecodeError:
            self.logger.debug(f'Failed to decode ticket attachment response: {r.text}')
            raise CallError('Invalid proxy server response', errno.EBADMSG)
        except requests.ConnectionError as e:
            raise CallError(f'Connection error {e}', errno.EBADF)
        except requests.Timeout:
            raise CallError('Connection time out', errno.ETIMEDOUT)

        if r.status_code != 200:
            self.logger.debug(f'Support Ticket failed ({r.status_code}): {r.text}', r.status_code, r.text)
            raise CallError('Ticket creation failed, try again later.', errno.EINVAL)

        if result['error']:
            raise CallError(result['message'], errno.EINVAL)

        ticket = result.get('ticketnum')
        url = result.get('message')
        if not ticket:
            raise CallError('New ticket number was not informed', errno.EINVAL)
        job.set_progress(50, f'Ticket created: {ticket}', extra={'ticket': ticket})

        if debug:
            job.set_progress(60, 'Generating debug file')

            debug_job = await self.middleware.call(
                'system.debug', pipes=Pipes(output=self.middleware.pipe()),
            )

            if await self.middleware.call('system.is_enterprise') and await self.middleware.call('failover.licensed'):
                debug_name = 'debug-{}.tar'.format(time.strftime('%Y%m%d%H%M%S'))
            else:
                debug_name = 'debug-{}-{}.txz'.format(
                    socket.gethostname().split('.')[0],
                    time.strftime('%Y%m%d%H%M%S'),
                )

            job.set_progress(80, 'Attaching debug file')

            t = {
                'ticket': ticket,
                'filename': debug_name,
            }
            if 'user' in data:
                t['username'] = data['user']
            if 'password' in data:
                t['password'] = data['password']
            tjob = await self.middleware.call(
                'support.attach_ticket', t, pipes=Pipes(input=self.middleware.pipe()),
            )

            def copy():
                try:
                    rbytes = 0
                    while True:
                        r = debug_job.pipes.output.r.read(1048576)
                        if r == b'':
                            break
                        rbytes += len(r)
                        if rbytes > 20971520:
                            raise CallError('Debug too large to attach', errno.EFBIG)
                        tjob.pipes.input.w.write(r)
                finally:
                    tjob.pipes.input.w.close()

            await self.middleware.run_in_thread(copy)

            await debug_job.wait()
            await tjob.wait()
        else:
            job.set_progress(100)

        return {
            'ticket': ticket,
            'url': url,
        }

    @accepts(Dict(
        'attach_ticket',
        Int('ticket', required=True),
        Str('filename', required=True, max_length=None),
        Str('username', private=True),
        Str('password', private=True),
    ))
    @job(pipes=["input"])
    async def attach_ticket(self, job, data):
        """
        Method to attach a file to a existing ticket.
        """

        await self.middleware.call('network.general.will_perform_activity', 'support')

        sw_name = 'freenas' if not await self.middleware.call('system.is_enterprise') else 'truenas'

        if 'username' in data:
            data['user'] = data.pop('username')
        data['ticketnum'] = data.pop('ticket')
        filename = data.pop('filename')

        try:
            r = await self.middleware.run_in_thread(lambda: requests.post(
                f'https://{ADDRESS}/{sw_name}/api/v1.0/ticket/attachment',
                data=data,
                timeout=300,
                files={'file': (filename, job.pipes.input.r)},
            ))
            data = r.json()
        except simplejson.JSONDecodeError:
            self.logger.debug(f'Failed to decode ticket attachment response: {r.text}')
            raise CallError('Invalid proxy server response', errno.EBADMSG)
        except requests.ConnectionError as e:
            raise CallError(f'Connection error {e}', errno.EBADF)
        except requests.Timeout:
            raise CallError('Connection time out', errno.ETIMEDOUT)

        if data['error']:
            raise CallError(data['message'], errno.EINVAL)
예제 #11
0
파일: snmp.py 프로젝트: bmhughes/freenas
class SNMPService(SystemServiceService):

    class Config:
        service = 'snmp'
        datastore_prefix = 'snmp_'
        cli_namespace = 'service.snmp'

    ENTRY = Dict(
        'snmp_entry',
        Str('location', required=True),
        Str('contact', required=True, validators=[Or(Email(), Match(r'^[-_a-zA-Z0-9\s]*$'))]),
        Bool('traps', required=True),
        Bool('v3', required=True),
        Str('community', validators=[Match(r'^[-_.a-zA-Z0-9\s]*$')], default='public', required=True),
        Str('v3_username', max_length=20, required=True),
        Str('v3_authtype', enum=['', 'MD5', 'SHA'], required=True),
        Str('v3_password', required=True),
        Str('v3_privproto', enum=[None, 'AES', 'DES'], null=True, required=True),
        Str('v3_privpassphrase', required=True, null=True),
        Int('loglevel', validators=[Range(min=0, max=7)], required=True),
        Str('options', max_length=None, required=True),
        Bool('zilstat', required=True),
        Bool('iftop', required=True),
        Int('id', required=True),
    )

    async def do_update(self, data):
        """
        Update SNMP Service Configuration.

        `v3` when set enables SNMP version 3.

        `v3_username`, `v3_authtype`, `v3_password`, `v3_privproto` and `v3_privpassphrase` are only used when `v3`
        is enabled.
        """
        old = await self.config()

        new = old.copy()
        new.update(data)

        verrors = ValidationErrors()

        if not new['v3'] and not new['community']:
            verrors.add('snmp_update.community', 'This field is required when SNMPv3 is disabled')

        if new['v3_authtype'] and not new['v3_password']:
            verrors.add(
                'snmp_update.v3_password',
                'This field is requires when SNMPv3 auth type is specified',
            )

        if new['v3_password'] and len(new['v3_password']) < 8:
            verrors.add('snmp_update.v3_password', 'Password must contain at least 8 characters')

        if new['v3_privproto'] and not new['v3_privpassphrase']:
            verrors.add(
                'snmp_update.v3_privpassphrase',
                'This field is requires when SNMPv3 private protocol is specified',
            )

        if verrors:
            raise verrors

        await self._update_service(old, new)

        return await self.config()
예제 #12
0
class UPSService(SystemServiceService):
    DRIVERS_AVAILABLE = set(os.listdir(DRIVER_BIN_DIR))

    class Config:
        datastore = 'services.ups'
        datastore_prefix = 'ups_'
        datastore_extend = 'ups.ups_config_extend'
        service = 'ups'
        service_verb = 'restart'

    @private
    async def ups_config_extend(self, data):
        data['mode'] = data['mode'].upper()
        data['shutdown'] = data['shutdown'].upper()
        data['toemail'] = [v for v in data['toemail'].split(';') if v]
        return data

    @accepts()
    async def port_choices(self):
        ports = [x for x in glob.glob('/dev/cua*') if x.find('.') == -1]
        ports.extend(glob.glob('/dev/ugen*'))
        ports.extend(glob.glob('/dev/uhid*'))
        return ports

    @accepts()
    def driver_choices(self):
        ups_choices = {}
        if os.path.exists("/conf/base/etc/local/nut/driver.list"):
            with open('/conf/base/etc/local/nut/driver.list', 'rb') as f:
                d = f.read().decode('utf8', 'ignore')
            r = io.StringIO()
            for line in re.sub(r'[ \t]+', ' ', d, flags=re.M).split('\n'):
                r.write(line.strip() + '\n')
            r.seek(0)
            reader = csv.reader(r, delimiter=' ', quotechar='"')
            for row in reader:
                if len(row) == 0 or row[0].startswith('#'):
                    continue
                if row[-2] == '#':
                    last = -3
                else:
                    last = -1
                driver_str = row[last]
                driver_annotation = ''
                m = re.match(r'(.+) \((.+)\)',
                             driver_str)  # "blazer_usb (USB ID 0665:5161)"
                if m:
                    driver_str, driver_annotation = m.group(1), m.group(2)
                for driver in driver_str.split(
                        ' or '):  # can be "blazer_ser or blazer_usb"
                    driver = driver.strip()
                    if driver not in self.DRIVERS_AVAILABLE:
                        continue
                    for i, field in enumerate(list(row)):
                        row[i] = field
                    ups_choices['$'.join(
                        [driver, row[3]])] = '%s (%s)' % (' '.join(
                            filter(None, row[0:last])), ', '.join(
                                filter(None, [driver, driver_annotation])))
        return ups_choices

    @private
    async def validate_data(self, data, schema):
        verrors = ValidationErrors()

        driver = data.get('driver')
        if driver:
            if driver not in (
                    await self.middleware.call('ups.driver_choices')).keys():
                verrors.add(
                    f'{schema}.driver',
                    'Driver selected does not match local machine\'s driver list'
                )

        identifier = data['identifier']
        if identifier:
            if not re.search(r'^[a-z0-9\.\-_]+$', identifier, re.I):
                verrors.add(f'{schema}.identifier',
                            'Use alphanumeric characters, ".", "-" and "_"')

        for field in [
                field for field in ['monpwd', 'monuser'] if data.get(field)
        ]:
            if re.search(r'[ #]', data[field], re.I):
                verrors.add(f'{schema}.{field}',
                            'Spaces or number signs are not allowed')

        mode = data.get('mode')
        if mode:
            if mode == 'MASTER':
                if not data.get('port'):
                    verrors.add(f'{schema}.port', 'This field is required')
            else:
                if not data.get('remotehost'):
                    verrors.add(f'{schema}.remotehost',
                                'This field is required')

        to_emails = data.get('toemail')
        if to_emails:
            data['toemail'] = ';'.join(to_emails)
        else:
            data['toemail'] = ''

        data['mode'] = data['mode'].lower()
        data['shutdown'] = data['shutdown'].lower()

        return verrors, data

    @accepts(
        Dict('ups_update',
             Bool('emailnotify'),
             Bool('powerdown'),
             Bool('rmonitor'),
             Int('nocommwarntime', null=True),
             Int('remoteport'),
             Int('shutdowntimer'),
             Int('hostsync', validators=[Range(min=0)]),
             Str('description'),
             Str('driver'),
             Str('extrausers'),
             Str('identifier'),
             Str('mode', enum=['MASTER', 'SLAVE']),
             Str('monpwd'),
             Str('monuser'),
             Str('options'),
             Str('optionsupsd'),
             Str('port'),
             Str('remotehost'),
             Str('shutdown', enum=['LOWBATT', 'BATT']),
             Str('shutdowncmd'),
             Str('subject'),
             List('toemail', items=[Str('email', validators=[Email()])]),
             update=True))
    async def do_update(self, data):
        config = await self.config()
        old_config = config.copy()
        config.update(data)
        verros, config = await self.validate_data(config, 'ups_update')
        if verros:
            raise verros

        old_config['mode'] = old_config['mode'].lower()
        old_config['shutdown'] = old_config['shutdown'].lower()
        old_config['toemail'] = ';'.join(
            old_config['toemail']) if old_config['toemail'] else ''

        if len(set(old_config.items()) ^ set(config.items())) > 0:
            await self._update_service(old_config, config)

        return await self.config()

    @private
    @accepts(Str('notify_type'))
    async def upssched_event(self, notify_type):
        if notify_type.lower() == 'shutdown':
            syslog.syslog(syslog.LOG_INFO, 'upssched-cmd "issuing shutdown"')
            await run('/usr/local/sbin/upsmon', '-c', 'fsd', check=False)
        elif notify_type.lower() in ('email', 'commbad', 'commok'):
            config = await self.config()
            if config['emailnotify']:
                # Email user with the notification event and details
                # We send the email in the following format ( inclusive line breaks )

                # NOTIFICATION: 'EMAIL'
                # UPS: 'ups'
                #
                # Statistics recovered:
                #
                # 1) Battery charge (percent)
                # battery.charge: 5
                #
                # 2) Remaining battery level when UPS switches to LB (percent)
                # battery.charge.low: 10
                #
                # 3) Battery runtime (seconds)
                # battery.runtime: 1860
                #
                # 4) Remaining battery runtime when UPS switches to LB (seconds)
                # battery.runtime.low: 900

                ups_name = config['identifier']
                hostname = (await
                            self.middleware.call('system.info'))['hostname']
                current_time = datetime.datetime.now(tz=dateutil.tz.tzlocal(
                )).strftime('%a %b %d %H:%M:%S %Z %Y')
                ups_subject = config['subject'].replace('%d',
                                                        current_time).replace(
                                                            '%h', hostname)
                body = f'NOTIFICATION: {notify_type!r}<br>UPS: {ups_name!r}<br><br>'

                # Let's gather following stats
                data_points = {
                    'battery.charge':
                    'Battery charge (percent)',
                    'battery.charge.low':
                    'Battery level remaining (percent) when UPS switches to Low Battery (LB)',
                    'battery.charge.status':
                    'Battery charge status',
                    'battery.runtime':
                    'Battery runtime (seconds)',
                    'battery.runtime.low':
                    'Battery runtime remaining (seconds) when UPS switches to Low Battery (LB)',
                    'battery.runtime.restart':
                    'Minimum battery runtime (seconds) to allow UPS restart after power-off',
                }

                stats_output = (await run(
                    '/usr/local/bin/upsc',
                    f'{ups_name}@localhost:{config["remoteport"]}',
                    check=False)).stdout
                recovered_stats = re.findall(
                    fr'({"|".join(data_points)}): (.*)',
                    '' if not stats_output else stats_output.decode())

                if recovered_stats:
                    body += 'Statistics recovered:<br><br>'
                    # recovered_stats is expected to be a list in this format
                    # [('battery.charge', '5'), ('battery.charge.low', '10'), ('battery.runtime', '1860')]
                    for index, stat in enumerate(recovered_stats):
                        body += f'{index + 1}) {data_points[stat[0]]}<br>  {stat[0]}: {stat[1]}<br><br>'

                else:
                    body += 'Statistics could not be recovered<br>'

                # Subject and body defined, send email
                job = await self.middleware.call('mail.send', {
                    'subject': ups_subject,
                    'text': body,
                    'to': config['toemail']
                })

                await job.wait()
                if job.error:
                    self.middleware.logger.debug(
                        f'Failed to send UPS status email: {job.error}')

        else:
            self.middleware.logger.debug(
                f'Unrecognized UPS notification event: {notify_type}')
예제 #13
0
class CertificateService(CRUDService):
    class Config:
        datastore = 'system.certificate'
        datastore_extend = 'certificate.cert_extend'
        datastore_prefix = 'cert_'

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.map_functions = {
            'CERTIFICATE_CREATE_INTERNAL': self.__create_internal,
            'CERTIFICATE_CREATE_IMPORTED': self.__create_imported_certificate,
            'CERTIFICATE_CREATE_IMPORTED_CSR': self.__create_imported_csr,
            'CERTIFICATE_CREATE': self.__create_certificate,
            'CERTIFICATE_CREATE_CSR': self.__create_csr
        }

    @private
    async def cert_extend(self, cert):
        """Extend certificate with some useful attributes."""

        if cert.get('signedby'):

            # We query for signedby again to make sure it's keys do not have the "cert_" prefix and it has gone through
            # the cert_extend method

            cert['signedby'] = await self.middleware.call(
                'datastore.query', 'system.certificateauthority',
                [('id', '=', cert['signedby']['id'])], {
                    'prefix': 'cert_',
                    'extend': 'certificate.cert_extend',
                    'get': True
                })

        # convert san to list
        cert['san'] = (cert.pop('san', '') or '').split()
        if cert['serial'] is not None:
            cert['serial'] = int(cert['serial'])

        if cert['type'] in (CA_TYPE_EXISTING, CA_TYPE_INTERNAL,
                            CA_TYPE_INTERMEDIATE):
            root_path = CERT_CA_ROOT_PATH
        else:
            root_path = CERT_ROOT_PATH
        cert['root_path'] = root_path
        cert['certificate_path'] = os.path.join(root_path,
                                                '{0}.crt'.format(cert['name']))
        cert['privatekey_path'] = os.path.join(root_path,
                                               '{0}.key'.format(cert['name']))
        cert['csr_path'] = os.path.join(root_path,
                                        '{0}.csr'.format(cert['name']))

        def cert_issuer(cert):
            issuer = None
            if cert['type'] in (CA_TYPE_EXISTING, CERT_TYPE_EXISTING):
                issuer = "external"
            elif cert['type'] == CA_TYPE_INTERNAL:
                issuer = "self-signed"
            elif cert['type'] in (CERT_TYPE_INTERNAL, CA_TYPE_INTERMEDIATE):
                issuer = cert['signedby']
            elif cert['type'] == CERT_TYPE_CSR:
                issuer = "external - signature pending"
            return issuer

        cert['issuer'] = cert_issuer(cert)

        cert['chain_list'] = []
        if cert['chain']:
            certs = RE_CERTIFICATE.findall(cert['certificate'])
        else:
            certs = [cert['certificate']]
            signing_CA = cert['issuer']
            # Recursively get all internal/intermediate certificates
            # FIXME: NONE HAS BEEN ADDED IN THE FOLLOWING CHECK FOR CSR'S WHICH HAVE BEEN SIGNED BY A CA
            while signing_CA not in [
                    "external", "self-signed", "external - signature pending",
                    None
            ]:
                certs.append(signing_CA['certificate'])
                signing_CA['issuer'] = cert_issuer(signing_CA)
                signing_CA = signing_CA['issuer']

        cert_obj = None
        try:
            for c in certs:
                # XXX Why load certificate if we are going to dump it right after?
                # Maybe just to verify its integrity?
                # Logic copied from freenasUI
                if c:
                    cert_obj = crypto.load_certificate(crypto.FILETYPE_PEM, c)
                    cert['chain_list'].append(
                        crypto.dump_certificate(crypto.FILETYPE_PEM,
                                                cert_obj).decode())
        except Exception:
            self.logger.debug('Failed to load certificate {0}'.format(
                cert['name']),
                              exc_info=True)

        try:
            if cert['privatekey']:
                key_obj = crypto.load_privatekey(crypto.FILETYPE_PEM,
                                                 cert['privatekey'])
                cert['privatekey'] = crypto.dump_privatekey(
                    crypto.FILETYPE_PEM, key_obj).decode()
        except Exception:
            self.logger.debug('Failed to load privatekey {0}'.format(
                cert['name']),
                              exc_info=True)

        try:
            if cert['CSR']:
                csr_obj = crypto.load_certificate_request(
                    crypto.FILETYPE_PEM, cert['CSR'])
                cert['CSR'] = crypto.dump_certificate_request(
                    crypto.FILETYPE_PEM, csr_obj).decode()
        except Exception:
            self.logger.debug('Failed to load csr {0}'.format(cert['name']),
                              exc_info=True)

        cert['internal'] = 'NO' if cert['type'] in (
            CA_TYPE_EXISTING, CERT_TYPE_EXISTING) else 'YES'

        obj = None
        # date not applicable for CSR
        cert['from'] = None
        cert['until'] = None
        if cert['type'] == CERT_TYPE_CSR:
            obj = csr_obj
        elif cert_obj:
            obj = crypto.load_certificate(crypto.FILETYPE_PEM,
                                          cert['certificate'])
            notBefore = obj.get_notBefore()
            t1 = dateutil.parser.parse(notBefore)
            t2 = t1.astimezone(dateutil.tz.tzutc())
            cert['from'] = t2.ctime()

            notAfter = obj.get_notAfter()
            t1 = dateutil.parser.parse(notAfter)
            t2 = t1.astimezone(dateutil.tz.tzutc())
            cert['until'] = t2.ctime()

        if obj:
            cert['DN'] = '/' + '/'.join([
                '%s=%s' % (c[0].decode(), c[1].decode())
                for c in obj.get_subject().get_components()
            ])

        return cert

    # HELPER METHODS

    @private
    def create_self_signed_cert(self):
        key = generate_key(2048)
        cert = crypto.X509()
        cert.get_subject().C = 'US'
        cert.get_subject().O = 'iXsystems'
        cert.get_subject().CN = 'localhost'
        cert.set_serial_number(1)

        cert.get_subject().emailAddress = '*****@*****.**'

        cert.gmtime_adj_notBefore(0)
        cert.gmtime_adj_notAfter(3600 * (60 * 60 * 24))

        cert.set_issuer(cert.get_subject())
        cert.set_version(2)
        cert.set_pubkey(key)
        cert.sign(key, 'SHA256')
        return cert, key

    @private
    @accepts(Str('hostname', required=True), Int('port', required=True))
    def get_host_certificates_thumbprint(self, hostname, port):
        try:
            conn = ssl.create_connection((hostname, port))
            context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
            sock = context.wrap_socket(conn, server_hostname=hostname)
            certificate = ssl.DER_cert_to_PEM_cert(sock.getpeercert(True))
            return self.fingerprint(certificate)
        except ConnectionRefusedError:
            return ''
        except socket.gaierror:
            return ''

    @private
    @accepts(Str('certificate', required=True))
    def load_certificate(self, certificate):
        try:
            cert = crypto.load_certificate(crypto.FILETYPE_PEM, certificate)
        except crypto.Error:
            return {}
        else:
            cert_info = self.get_x509_subject(cert)
            cert_info['serial'] = cert.get_serial_number()

            signature_algorithm = cert.get_signature_algorithm().decode()
            m = re.match('^(.+)[Ww]ith', signature_algorithm)
            if m:
                cert_info['digest_algorithm'] = m.group(1).upper()

            return cert_info

    @private
    def get_x509_subject(self, obj):
        return {
            'country': obj.get_subject().C,
            'state': obj.get_subject().ST,
            'city': obj.get_subject().L,
            'organization': obj.get_subject().O,
            'organizational_unit': obj.get_subject().OU,
            'common': obj.get_subject().CN,
            'san': obj.get_subject().subjectAltName,
            'email': obj.get_subject().emailAddress,
        }

    @private
    @accepts(Str('csr', required=True))
    def load_certificate_request(self, csr):
        try:
            csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, csr)
        except crypto.Error:
            return {}
        else:
            return self.get_x509_subject(csr)

    @private
    async def get_fingerprint_of_cert(self, certificate_id):
        certificate_list = await self.query(filters=[('id', '=',
                                                      certificate_id)])
        if len(certificate_list) == 0:
            return None
        else:
            return await self.middleware.run_in_thread(
                self.fingerprint, certificate_list[0]['certificate'])

    @private
    @accepts(Str('cert_certificate', required=True))
    def fingerprint(self, cert_certificate):
        # getting fingerprint of certificate
        try:
            certificate = crypto.load_certificate(crypto.FILETYPE_PEM,
                                                  cert_certificate)
        except Exception:
            return None
        else:
            return certificate.digest('sha1').decode()

    @private
    async def san_to_string(self, san_list):
        # TODO: ADD MORE TYPES WRT RFC'S
        san_string = ''
        ip_validator = IpAddress()
        for count, san in enumerate(san_list or []):
            try:
                ip_validator(san)
            except ValueError:
                san_string += f'DNS: {san}, '
            else:
                san_string += f'IP: {san}, '
        return san_string[:-2] if san_list else ''

    @private
    @accepts(
        Dict('certificate_cert_info',
             Int('key_length'),
             Int('serial', required=False),
             Int('lifetime', required=True),
             Str('country', required=True),
             Str('state', required=True),
             Str('city', required=True),
             Str('organization', required=True),
             Str('organizational_unit'),
             Str('common', required=True),
             Str('email', validators=[Email()], required=True),
             Str('digest_algorithm',
                 enum=['SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512']),
             List('san', items=[Str('san')]),
             register=True))
    def create_certificate(self, cert_info):

        cert_info['san'] = self.middleware.call_sync(
            'certificate.san_to_string', cert_info.pop('san', []))

        cert = crypto.X509()
        cert.get_subject().C = cert_info['country']
        cert.get_subject().ST = cert_info['state']
        cert.get_subject().L = cert_info['city']
        cert.get_subject().O = cert_info['organization']
        if cert_info.get('organizational_unit'):
            cert.get_subject().OU = cert_info['organizational_unit']
        cert.get_subject().CN = cert_info['common']
        # Add subject alternate name in addition to CN

        if cert_info['san']:
            cert.add_extensions([
                crypto.X509Extension(b"subjectAltName", False,
                                     cert_info['san'].encode())
            ])
            cert.get_subject().subjectAltName = cert_info['san']
        cert.get_subject().emailAddress = cert_info['email']

        serial = cert_info.get('serial')
        if serial is not None:
            cert.set_serial_number(serial)

        cert.gmtime_adj_notBefore(0)
        cert.gmtime_adj_notAfter(cert_info['lifetime'] * (60 * 60 * 24))

        cert.set_issuer(cert.get_subject())
        # Setting it to '2' actually results in a v3 cert
        # openssl's cert x509 versions are zero-indexed!
        # see: https://www.ietf.org/rfc/rfc3280.txt
        cert.set_version(2)
        return cert

    @private
    @accepts(
        Patch('certificate_cert_info', 'certificate_signing_request',
              ('rm', {
                  'name': 'lifetime'
              })))
    def create_certificate_signing_request(self, cert_info):

        cert_info['san'] = self.middleware.call_sync(
            'certificate.san_to_string', cert_info.pop('san', []))

        key = generate_key(cert_info['key_length'])

        req = crypto.X509Req()
        req.get_subject().C = cert_info['country']
        req.get_subject().ST = cert_info['state']
        req.get_subject().L = cert_info['city']
        req.get_subject().O = cert_info['organization']
        if cert_info.get('organizational_unit'):
            req.get_subject().OU = cert_info['organizational_unit']
        req.get_subject().CN = cert_info['common']

        if cert_info['san']:
            req.add_extensions([
                crypto.X509Extension(b"subjectAltName", False,
                                     cert_info['san'].encode())
            ])
            req.get_subject().subjectAltName = cert_info['san']
        req.get_subject().emailAddress = cert_info['email']

        req.set_pubkey(key)
        req.sign(key, cert_info['digest_algorithm'])

        return (req, key)

    @private
    async def validate_common_attributes(self, data, schema_name):
        verrors = ValidationErrors()

        await _validate_common_attributes(self.middleware, data, verrors,
                                          schema_name)

        return verrors

    # CREATE METHODS FOR CREATING CERTIFICATES
    # "do_create" IS CALLED FIRST AND THEN BASED ON THE TYPE OF THE CERTIFICATE WHICH IS TO BE CREATED THE
    # APPROPRIATE METHOD IS CALLED
    # FOLLOWING TYPES ARE SUPPORTED
    # CREATE_TYPE ( STRING )          - METHOD CALLED
    # CERTIFICATE_CREATE_INTERNAL     - __create_internal
    # CERTIFICATE_CREATE_IMPORTED     - __create_imported_certificate
    # CERTIFICATE_CREATE_IMPORTED_CSR - __create_imported_csr
    # CERTIFICATE_CREATE              - __create_certificate
    # CERTIFICATE_CREATE_CSR          - __create_csr

    @accepts(
        Dict('certificate_create',
             Int('csr_id'),
             Int('signedby'),
             Int('key_length'),
             Int('type'),
             Int('lifetime'),
             Int('serial', validators=[Range(min=1)]),
             Str('certificate'),
             Str('city'),
             Str('common'),
             Str('country'),
             Str('CSR'),
             Str('email', validators=[Email()]),
             Str('name', required=True),
             Str('organization'),
             Str('organizational_unit'),
             Str('passphrase'),
             Str('privatekey'),
             Str('state'),
             Str('create_type',
                 enum=[
                     'CERTIFICATE_CREATE_INTERNAL',
                     'CERTIFICATE_CREATE_IMPORTED', 'CERTIFICATE_CREATE',
                     'CERTIFICATE_CREATE_CSR',
                     'CERTIFICATE_CREATE_IMPORTED_CSR'
                 ],
                 required=True),
             Str('digest_algorithm',
                 enum=['SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512']),
             List('san', items=[Str('san')]),
             register=True))
    async def do_create(self, data):
        if not data.get('san'):
            data.pop('san', None)

        verrors = await self.validate_common_attributes(
            data, 'certificate_create')

        await validate_cert_name(self.middleware, data['name'],
                                 self._config.datastore, verrors,
                                 'certificate_create.name')

        if verrors:
            raise verrors

        # TODO: ENFORCE THAT THE RIGHT PARAMETERS GO TO THE NEXT CREATE FUNCTION

        data = await self.middleware.run_in_thread(
            self.map_functions[data.pop('create_type')], data)

        data['san'] = ' '.join(data.pop('san', []) or [])

        pk = await self.middleware.call(
            'datastore.insert', self._config.datastore, data,
            {'prefix': self._config.datastore_prefix})

        await self.middleware.call('service.start', 'ix-ssl',
                                   {'onetime': False})

        return await self._get_instance(pk)

    @accepts(
        Patch('certificate_create_internal', 'certificate_create_csr',
              ('rm', {
                  'name': 'signedby'
              }), ('rm', {
                  'name': 'lifetime'
              })))
    def __create_csr(self, data):
        # no signedby, lifetime attributes required
        cert_info = get_cert_info_from_data(data)

        data['type'] = CERT_TYPE_CSR

        req, key = self.create_certificate_signing_request(cert_info)

        data['CSR'] = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req)
        data['privatekey'] = crypto.dump_privatekey(crypto.FILETYPE_PEM, key)

        return data

    @accepts(
        Dict('create_imported_csr', Str('CSR', required=True), Str('name'),
             Str('privatekey', required=True), Str('passphrase')))
    def __create_imported_csr(self, data):

        data['type'] = CERT_TYPE_CSR

        for k, v in self.load_certificate_request(data['CSR']).items():
            data[k] = v

        if 'passphrase' in data:
            data['privatekey'] = export_private_key(data['privatekey'],
                                                    data['passphrase'])

        data.pop('passphrase', None)

        return data

    @accepts(
        Patch('certificate_create', 'create_certificate',
              ('edit', _set_required('certificate')),
              ('edit', _set_required('privatekey')),
              ('edit', _set_required('type')), ('rm', {
                  'name': 'create_type'
              })))
    def __create_certificate(self, data):

        for k, v in self.load_certificate(data['certificate']).items():
            data[k] = v

        return data

    @accepts(
        Dict('certificate_create_imported', Int('csr_id'),
             Str('certificate', required=True), Str('name'), Str('passphrase'),
             Str('privatekey')))
    def __create_imported_certificate(self, data):
        verrors = ValidationErrors()

        csr_id = data.pop('csr_id', None)
        if csr_id:
            csr_obj = self.middleware.call_sync(
                'certificate.query',
                [['id', '=', csr_id], ['CSR', '!=', None]])
            if not csr_obj:
                verrors.add('certificate_create.csr_id',
                            f'No CSR exists with id {csr_id}')
            else:
                data['privatekey'] = csr_obj[0]['privatekey']
                data.pop('passphrase', None)
        elif not data.get('privatekey'):
            verrors.add(
                'certificate_create.privatekey',
                'Private key is required when importing a certificate')

        if verrors:
            raise verrors

        data['type'] = CERT_TYPE_EXISTING

        data = self.__create_certificate(data)

        data['chain'] = True if len(RE_CERTIFICATE.findall(
            data['certificate'])) > 1 else False

        if 'passphrase' in data:
            data['privatekey'] = export_private_key(data['privatekey'],
                                                    data['passphrase'])

        data.pop('passphrase', None)

        return data

    @accepts(
        Patch('certificate_create',
              'certificate_create_internal',
              ('edit', _set_required('key_length')),
              ('edit', _set_required('digest_algorithm')),
              ('edit', _set_required('lifetime')),
              ('edit', _set_required('country')),
              ('edit', _set_required('state')),
              ('edit', _set_required('city')),
              ('edit', _set_required('organization')),
              ('edit', _set_required('email')),
              ('edit', _set_required('common')),
              ('edit', _set_required('signedby')), ('rm', {
                  'name': 'create_type'
              }),
              register=True))
    def __create_internal(self, data):

        cert_info = get_cert_info_from_data(data)
        data['type'] = CERT_TYPE_INTERNAL

        signing_cert = self.middleware.call_sync(
            'certificateauthority.query', [('id', '=', data['signedby'])],
            {'get': True})

        public_key = generate_key(data['key_length'])
        signkey = load_private_key(signing_cert['privatekey'])

        cert = self.middleware.call_sync('certificate.create_certificate',
                                         cert_info)
        cert.set_pubkey(public_key)
        cacert = crypto.load_certificate(crypto.FILETYPE_PEM,
                                         signing_cert['certificate'])
        cert.set_issuer(cacert.get_subject())
        cert.add_extensions([
            crypto.X509Extension(b"subjectKeyIdentifier",
                                 False,
                                 b"hash",
                                 subject=cert),
        ])

        cert_serial = self.middleware.call_sync(
            'certificateauthority.get_serial_for_certificate',
            data['signedby'])

        cert.set_serial_number(cert_serial)
        cert.sign(signkey, data['digest_algorithm'])

        data['certificate'] = crypto.dump_certificate(crypto.FILETYPE_PEM,
                                                      cert)
        data['privatekey'] = crypto.dump_privatekey(crypto.FILETYPE_PEM,
                                                    public_key)
        data['serial'] = cert_serial

        return data

    @accepts(Int('id', required=True), Dict('certificate_update', Str('name')))
    async def do_update(self, id, data):
        old = await self._get_instance(id)
        # signedby is changed back to integer from a dict
        old['signedby'] = old['signedby']['id'] if old.get(
            'signedby') else None

        new = old.copy()

        new.update(data)

        if new['name'] != old['name']:

            verrors = ValidationErrors()

            await validate_cert_name(self.middleware, data['name'],
                                     self._config.datastore, verrors,
                                     'certificate_update.name')

            if verrors:
                raise verrors

            new['san'] = ' '.join(new.pop('san', []) or [])

            await self.middleware.call(
                'datastore.update', self._config.datastore, id, new,
                {'prefix': self._config.datastore_prefix})

            await self.middleware.call('service.start', 'ix-ssl',
                                       {'onetime': False})

        return await self._get_instance(id)

    @accepts(
        Int('id'), )
    async def do_delete(self, id):
        certificate = await self._get_instance(id)

        if (await self.middleware.call('system.general.config')
            )['ui_certificate']['id'] == id:
            verrors = ValidationErrors()

            verrors.add(
                'certificate_delete.id',
                'Selected certificate is being used by system HTTPS server, please select another one'
            )

            raise verrors

        response = await self.middleware.call('datastore.delete',
                                              self._config.datastore, id)

        await self.middleware.call('service.start', 'ix-ssl',
                                   {'onetime': False})

        sentinel = f'/tmp/alert_invalidcert_{certificate["name"]}'
        if os.path.exists(sentinel):
            os.unlink(sentinel)
            await self.middleware.call('alert.process_alerts')

        return response
예제 #14
0
class CryptoKeyService(Service):
    class Config:
        private = True

    def normalize_san(self, san_list):
        return normalize_san(san_list)

    def generate_self_signed_certificate(self):
        return generate_self_signed_certificate()

    @accepts(
        Dict('certificate_cert_info',
             Int('key_length'),
             Int('serial', required=False, null=True),
             Int('lifetime', required=True),
             Str('ca_certificate', required=False, max_length=None),
             Str('ca_privatekey', required=False, max_length=None),
             Str('key_type', required=False),
             Str('ec_curve', required=False),
             Str('country', required=True),
             Str('state', required=True),
             Str('city', required=True),
             Str('organization', required=True),
             Str('organizational_unit'),
             Str('common', null=True),
             Str('email', validators=[Email()], required=True),
             Str('digest_algorithm',
                 enum=['SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512']),
             List('san', items=[Str('san')], required=True, empty=False),
             Dict('cert_extensions',
                  Dict('BasicConstraints', Bool('ca', default=False),
                       Bool('enabled', default=False),
                       Int('path_length', null=True, default=None),
                       Bool('extension_critical', default=False)),
                  Dict('AuthorityKeyIdentifier',
                       Bool('authority_cert_issuer', default=False),
                       Bool('enabled', default=False),
                       Bool('extension_critical', default=False)),
                  Dict('ExtendedKeyUsage',
                       List('usages', items=[Str('usage', enum=EKU_OIDS)]),
                       Bool('enabled', default=False),
                       Bool('extension_critical', default=False)),
                  Dict('KeyUsage', Bool('enabled', default=False),
                       Bool('digital_signature', default=False),
                       Bool('content_commitment', default=False),
                       Bool('key_encipherment', default=False),
                       Bool('data_encipherment', default=False),
                       Bool('key_agreement', default=False),
                       Bool('key_cert_sign', default=False),
                       Bool('crl_sign', default=False),
                       Bool('encipher_only', default=False),
                       Bool('decipher_only', default=False),
                       Bool('extension_critical', default=False)),
                  register=True),
             register=True))
    def generate_certificate(self, data):
        return generate_certificate(data)

    @accepts(Ref('certificate_cert_info'))
    def generate_self_signed_ca(self, data):
        return self.generate_certificate_authority(data)

    @accepts(Ref('certificate_cert_info'))
    def generate_certificate_authority(self, data):
        return generate_certificate_authority(data)

    @accepts(
        Dict('sign_csr', Str('ca_certificate', required=True, max_length=None),
             Str('ca_privatekey', required=True, max_length=None),
             Str('csr', required=True, max_length=None),
             Str('csr_privatekey', required=True, max_length=None),
             Int('serial', required=True),
             Str('digest_algorithm', default='SHA256'),
             Ref('cert_extensions')))
    def sign_csr_with_ca(self, data):
        csr_data = load_certificate_request(data['csr'])
        ca_data = load_certificate(data['ca_certificate'])
        ca_key = load_private_key(data['ca_privatekey'])
        csr_key = load_private_key(data['csr_privatekey'])
        new_cert = generate_builder({
            'crypto_subject_name':
            {k: csr_data.get(v)
             for k, v in CERT_BACKEND_MAPPINGS.items()},
            'crypto_issuer_name':
            {k: ca_data.get(v)
             for k, v in CERT_BACKEND_MAPPINGS.items()},
            'serial': data['serial'],
            'san': normalize_san(csr_data.get('san'))
        })

        new_cert = add_extensions(
            new_cert, data.get('cert_extensions'), csr_key,
            x509.load_pem_x509_certificate(data['ca_certificate'].encode(),
                                           default_backend()))

        new_cert = new_cert.sign(ca_key,
                                 retrieve_signing_algorithm(data, ca_key),
                                 default_backend())

        return new_cert.public_bytes(serialization.Encoding.PEM).decode()
예제 #15
0
class SupportService(ConfigService):

    class Config:
        datastore = 'system.support'
        cli_namespace = 'system.support'

    ENTRY = Dict(
        'support_entry',
        Bool('enabled', null=True, required=True),
        Str('name', required=True),
        Str('title', required=True),
        Str('email', required=True),
        Str('phone', required=True),
        Str('secondary_name', required=True),
        Str('secondary_title', required=True),
        Str('secondary_email', required=True),
        Str('secondary_phone', required=True),
        Int('id', required=True),
    )

    async def do_update(self, data):
        """
        Update Proactive Support settings.
        """

        config_data = await self.config()
        config_data.update(data)

        verrors = ValidationErrors()
        if config_data['enabled']:
            for key in ['name', 'title', 'email', 'phone']:
                for prefix in ['', 'secondary_']:
                    field = prefix + key
                    if not config_data[field]:
                        verrors.add(f'support_update.{field}', 'This field is required')
        if verrors:
            raise verrors

        await self.middleware.call(
            'datastore.update',
            self._config.datastore,
            config_data['id'],
            config_data,
        )

        return await self.config()

    @accepts()
    @returns(Bool('proactive_support_is_available'))
    async def is_available(self):
        """
        Returns whether Proactive Support is available for this product type and current license.
        """

        if not await self.middleware.call('system.is_enterprise'):
            return False

        license = await self.middleware.call('system.license')
        if license is None:
            return False

        return license['contract_type'] in ['SILVER', 'GOLD']

    @accepts()
    @returns(Bool('proactive_support_is_available_and_enabled'))
    async def is_available_and_enabled(self):
        """
        Returns whether Proactive Support is available and enabled.
        """

        return await self.is_available() and (await self.config())['enabled']

    @accepts()
    @returns(List('support_fields', items=[List('support_field', items=[Str('field')])]))
    async def fields(self):
        """
        Returns list of pairs of field names and field titles for Proactive Support.
        """
        return [
            ['name', 'Contact Name'],
            ['title', 'Contact Title'],
            ['email', 'Contact E-mail'],
            ['phone', 'Contact Phone'],
            ['secondary_name', 'Secondary Contact Name'],
            ['secondary_title', 'Secondary Contact Title'],
            ['secondary_email', 'Secondary Contact E-mail'],
            ['secondary_phone', 'Secondary Contact Phone'],
        ]

    @accepts(Str('token'))
    @returns(Dict(additional_attrs=True, example={'API': '11008', 'WebUI': '10004'}))
    async def fetch_categories(self, token):
        """
        Fetch issue categories using access token `token`.
        Returns a dict with the category name as a key and id as value.
        """

        await self.middleware.call('network.general.will_perform_activity', 'support')

        sw_name = 'freenas' if not await self.middleware.call('system.is_enterprise') else 'truenas'
        data = await post(
            f'https://{ADDRESS}/{sw_name}/api/v1.0/categories',
            data=json.dumps({
                'token': token,
            }),
        )

        if 'error' in data:
            raise CallError(data['message'], errno.EINVAL)

        return data

    @accepts(Dict(
        'new_ticket',
        Str('title', required=True, max_length=None),
        Str('body', required=True, max_length=None),
        Str('category', required=True),
        Bool('attach_debug', default=False),
        Str('token', private=True),
        Str('type', enum=['BUG', 'FEATURE']),
        Str('criticality'),
        Str('environment', max_length=None),
        Str('phone'),
        Str('name'),
        Str('email', validators=[Email()]),
        List('cc', items=[Str('email', validators=[Email()])])
    ))
    @returns(Dict(
        'new_ticket_response',
        Int('ticket', null=True),
        Str('url', null=True),
        Bool('has_debug'),
        register=True
    ))
    @job()
    async def new_ticket(self, job, data):
        """
        Creates a new ticket for support.
        This is done using the support proxy API.
        For TrueNAS SCALE it will be created on JIRA and for TrueNAS SCALE Enterprise on Salesforce.

        For SCALE `criticality`, `environment`, `phone`, `name` and `email` attributes are not required.
        For SCALE Enterprise `token` and `type` attributes are not required.
        """

        await self.middleware.call('network.general.will_perform_activity', 'support')

        job.set_progress(1, 'Gathering data')

        sw_name = 'freenas' if not await self.middleware.call('system.is_enterprise') else 'truenas'

        if sw_name == 'freenas':
            required_attrs = ('type', 'token')
        else:
            required_attrs = ('phone', 'name', 'email', 'criticality', 'environment')
            data['serial'] = (await self.middleware.call('system.dmidecode_info'))['system-serial-number']
            license = await self.middleware.call('system.license')
            if license:
                data['company'] = license['customer_name']
            else:
                data['company'] = 'Unknown'

        for i in required_attrs:
            if i not in data:
                raise CallError(f'{i} is required', errno.EINVAL)

        data['version'] = (await self.middleware.call('system.version')).split('-', 1)[-1]
        debug = data.pop('attach_debug')

        type_ = data.get('type')
        if type_:
            data['type'] = type_.lower()

        job.set_progress(20, 'Submitting ticket')

        result = await post(
            f'https://{ADDRESS}/{sw_name}/api/v1.0/ticket',
            data=json.dumps(data),
        )
        if result['error']:
            raise CallError(result['message'], errno.EINVAL)

        ticket = result.get('ticketnum')
        url = result.get('message')
        if not ticket:
            raise CallError('New ticket number was not informed', errno.EINVAL)
        job.set_progress(50, f'Ticket created: {ticket}', extra={'ticket': ticket})

        has_debug = False
        if debug:
            job.set_progress(60, 'Generating debug file')

            debug_job = await self.middleware.call(
                'system.debug', pipes=Pipes(output=self.middleware.pipe()),
            )

            if await self.middleware.call('failover.licensed'):
                debug_name = 'debug-{}.tar'.format(time.strftime('%Y%m%d%H%M%S'))
            else:
                debug_name = 'debug-{}-{}.txz'.format(
                    (await self.middleware.call('system.hostname')).split('.')[0],
                    time.strftime('%Y%m%d%H%M%S'),
                )

            with tempfile.NamedTemporaryFile("w+b") as f:
                def copy1():
                    nonlocal has_debug
                    try:
                        rbytes = 0
                        while True:
                            r = debug_job.pipes.output.r.read(1048576)
                            if r == b'':
                                break

                            rbytes += len(r)
                            if rbytes > DEBUG_MAX_SIZE * 1048576:
                                return

                            f.write(r)

                        f.seek(0)
                        has_debug = True
                    finally:
                        debug_job.pipes.output.r.read()

                await self.middleware.run_in_thread(copy1)
                await debug_job.wait()

                if has_debug:
                    job.set_progress(80, 'Attaching debug file')

                    t = {
                        'ticket': ticket,
                        'filename': debug_name,
                    }
                    if 'token' in data:
                        t['token'] = data['token']
                    tjob = await self.middleware.call(
                        'support.attach_ticket', t, pipes=Pipes(input=self.middleware.pipe()),
                    )

                    def copy2():
                        try:
                            shutil.copyfileobj(f, tjob.pipes.input.w)
                        finally:
                            tjob.pipes.input.w.close()

                    await self.middleware.run_in_thread(copy2)
                    await tjob.wait()
        else:
            job.set_progress(100)

        return {
            'ticket': ticket,
            'url': url,
            'has_debug': has_debug,
        }

    @accepts(Dict(
        'attach_ticket',
        Int('ticket', required=True),
        Str('filename', required=True, max_length=None),
        Str('token', private=True),
    ))
    @returns()
    @job(pipes=["input"])
    def attach_ticket(self, job, data):
        """
        Method to attach a file to a existing ticket.
        """

        self.middleware.call_sync('network.general.will_perform_activity', 'support')

        sw_name = 'freenas' if not self.middleware.call_sync('system.is_enterprise') else 'truenas'

        data['ticketnum'] = data.pop('ticket')
        filename = data.pop('filename')

        try:
            r = requests.post(
                f'https://{ADDRESS}/{sw_name}/api/v1.0/ticket/attachment',
                data=data,
                timeout=300,
                files={'file': (filename, job.pipes.input.r)},
            )
        except requests.ConnectionError as e:
            raise CallError(f'Connection error {e}', errno.EBADF)
        except requests.Timeout:
            raise CallError('Connection time out', errno.ETIMEDOUT)

        try:
            data = r.json()
        except simplejson.JSONDecodeError:
            self.logger.debug(f'Failed to decode ticket attachment response: {r.text}')
            raise CallError('Invalid proxy server response', errno.EBADMSG)

        if data['error']:
            raise CallError(data['message'], errno.EINVAL)

    @accepts()
    @returns(Int())
    async def attach_ticket_max_size(self):
        """
        Returns maximum uploaded file size for `support.attach_ticket`
        """
        return DEBUG_MAX_SIZE
예제 #16
0
파일: mail.py 프로젝트: yaplej/freenas
class MailService(ConfigService):

    oauth_access_token = None
    oauth_access_token_expires_at = None

    class Config:
        datastore = 'system.email'
        datastore_prefix = 'em_'
        datastore_extend = 'mail.mail_extend'
        cli_namespace = 'system.mail'

    ENTRY = Dict(
        'mail_entry',
        Str('fromemail', validators=[Email()], required=True),
        Str('fromname', required=True),
        Str('outgoingserver', required=True),
        Int('port', required=True),
        Str('security', enum=['PLAIN', 'SSL', 'TLS'], required=True),
        Bool('smtp', required=True),
        Str('user', null=True, required=True),
        Str('pass', private=True, null=True, required=True),
        Dict(
            'oauth',
            Str('client_id'),
            Str('client_secret'),
            Str('refresh_token'),
            null=True,
            private=True,
            required=True,
        ),
        Int('id', required=True),
    )

    @private
    async def mail_extend(self, cfg):
        if cfg['security']:
            cfg['security'] = cfg['security'].upper()
        return cfg

    @accepts(
        Patch('mail_entry',
              'mail_update', ('rm', {
                  'name': 'id'
              }), ('replace',
                   Dict(
                       'oauth',
                       Str('client_id', required=True),
                       Str('client_secret', required=True),
                       Str('refresh_token', required=True),
                       null=True,
                       private=True,
                   )), ('attr', {
                       'update': True
                   }),
              register=True))
    async def do_update(self, data):
        """
        Update Mail Service Configuration.

        `fromemail` is used as a sending address which the mail server will use for sending emails.

        `outgoingserver` is the hostname or IP address of SMTP server used for sending an email.

        `security` is type of encryption desired.

        `smtp` is a boolean value which when set indicates that SMTP authentication has been enabled and `user`/`pass`
        are required attributes now.
        """
        config = await self.config()

        new = config.copy()
        new.update(data)
        new['security'] = new['security'].lower()  # Django Model compatibility

        verrors = ValidationErrors()

        if new['smtp'] and new['user'] == '':
            verrors.add(
                'mail_update.user',
                'This field is required when SMTP authentication is enabled',
            )

        if new['oauth']:
            if new['fromemail']:
                verrors.add('mail_update.fromemail',
                            'This field cannot be used with GMail')
            if new['fromname']:
                verrors.add('mail_update.fromname',
                            'This field cannot be used with GMail')

        self.__password_verify(new['pass'], 'mail_update.pass', verrors)

        if verrors:
            raise verrors

        await self.middleware.call('datastore.update', 'system.email',
                                   config['id'], new, {'prefix': 'em_'})

        await self.middleware.call('mail.gmail_initialize')

        return await self.config()

    def __password_verify(self, password, schema, verrors=None):
        if not password:
            return
        if verrors is None:
            verrors = ValidationErrors()
        # FIXME: smtplib does not support non-ascii password yet
        # https://github.com/python/cpython/pull/8938
        try:
            password.encode('ascii')
        except UnicodeEncodeError:
            verrors.add(
                schema,
                'Only plain text characters (7-bit ASCII) are allowed in passwords. '
                'UTF or composed characters are not allowed.')
        return verrors

    @accepts(
        Dict(
            'mail_message',
            Str('subject', required=True),
            Str('text', max_length=None),
            Str('html', null=True, max_length=None),
            List('to', items=[Str('email')]),
            List('cc', items=[Str('email')]),
            Int('interval', null=True),
            Str('channel', null=True),
            Int('timeout', default=300),
            Bool('attachments', default=False),
            Bool('queue', default=True),
            Dict('extra_headers', additional_attrs=True),
            register=True,
        ), Ref('mail_update'))
    @returns(Bool('successfully_sent'))
    @job(pipes=['input'], check_pipes=False)
    def send(self, job, message, config):
        """
        Sends mail using configured mail settings.

        `text` will be formatted to HTML using Markdown and rendered using default E-Mail template.
        You can put your own HTML using `html`. If `html` is null, no HTML MIME part will be added to E-Mail.

        If `attachments` is true, a list compromised of the following dict is required
        via HTTP upload:
          - headers(list)
            - name(str)
            - value(str)
            - params(dict)
          - content (str)

        [
         {
          "headers": [
           {
            "name": "Content-Transfer-Encoding",
            "value": "base64"
           },
           {
            "name": "Content-Type",
            "value": "application/octet-stream",
            "params": {
             "name": "test.txt"
            }
           }
          ],
          "content": "dGVzdAo="
         }
        ]
        """

        product_name = self.middleware.call_sync('system.product_name')

        gc = self.middleware.call_sync('datastore.config',
                                       'network.globalconfiguration')

        hostname = f'{gc["gc_hostname"]}.{gc["gc_domain"]}'

        message['subject'] = f'{product_name} {hostname}: {message["subject"]}'

        add_html = True
        if 'html' in message and message['html'] is None:
            message.pop('html')
            add_html = False

        if 'text' not in message:
            if 'html' not in message:
                verrors = ValidationErrors()
                verrors.add('mail_message.text',
                            'Text is required when HTML is not set')
                verrors.check()

            message['text'] = html2text.html2text(message['html'])

        if add_html and 'html' not in message:
            lookup = TemplateLookup(directories=[
                os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             '../assets/templates')
            ],
                                    module_directory="/tmp/mako/templates")

            tmpl = lookup.get_template('mail.html')

            message['html'] = tmpl.render(
                body=html.escape(message['text']).replace('\n', '<br>\n'))

        return self.send_raw(job, message, config)

    @accepts(Ref('mail_message'), Ref('mail_update'))
    @job(pipes=['input'], check_pipes=False)
    @private
    def send_raw(self, job, message, config):
        config = dict(self.middleware.call_sync('mail.config'), **config)

        if config['fromname']:
            from_addr = Header(config['fromname'], 'utf-8')
            try:
                config['fromemail'].encode('ascii')
            except UnicodeEncodeError:
                from_addr.append(f'<{config["fromemail"]}>', 'utf-8')
            else:
                from_addr.append(f'<{config["fromemail"]}>', 'ascii')
        else:
            try:
                config['fromemail'].encode('ascii')
            except UnicodeEncodeError:
                from_addr = Header(config['fromemail'], 'utf-8')
            else:
                from_addr = Header(config['fromemail'], 'ascii')

        interval = message.get('interval')
        if interval is None:
            interval = timedelta()
        else:
            interval = timedelta(seconds=interval)

        sw_name = self.middleware.call_sync('system.version').split('-', 1)[0]

        channel = message.get('channel')
        if not channel:
            channel = sw_name.lower()
        if interval > timedelta():
            channelfile = '/tmp/.msg.%s' % (channel)
            last_update = datetime.now() - interval
            try:
                last_update = datetime.fromtimestamp(
                    os.stat(channelfile).st_mtime)
            except OSError:
                pass
            timediff = datetime.now() - last_update
            if (timediff >= interval) or (timediff < timedelta()):
                # Make sure mtime is modified
                # We could use os.utime but this is simpler!
                with open(channelfile, 'w') as f:
                    f.write('!')
            else:
                raise CallError(
                    'This message was already sent in the given interval')

        verrors = self.__password_verify(config['pass'], 'mail-config.pass')
        if verrors:
            raise verrors
        to = message.get('to')
        if not to:
            to = [
                self.middleware.call_sync('user.query',
                                          [('username', '=', 'root')],
                                          {'get': True})['email']
            ]
            if not to[0]:
                raise CallError('Email address for root is not configured')

        if message.get('attachments'):
            job.check_pipe("input")

            def read_json():
                f = job.pipes.input.r
                data = b''
                i = 0
                while True:
                    read = f.read(1048576)  # 1MiB
                    if read == b'':
                        break
                    data += read
                    i += 1
                    if i > 50:
                        raise ValueError(
                            'Attachments bigger than 50MB not allowed yet')
                if data == b'':
                    return None
                return json.loads(data)

            attachments = read_json()
        else:
            attachments = None

        if 'html' in message or attachments:
            msg = MIMEMultipart()
            msg.preamble = 'This is a multi-part message in MIME format.'
            if 'html' in message:
                msg2 = MIMEMultipart('alternative')
                msg2.attach(
                    MIMEText(message['text'], 'plain', _charset='utf-8'))
                msg2.attach(MIMEText(message['html'], 'html',
                                     _charset='utf-8'))
                msg.attach(msg2)
            if attachments:
                for attachment in attachments:
                    m = Message()
                    m.set_payload(attachment['content'])
                    for header in attachment.get('headers'):
                        m.add_header(header['name'], header['value'],
                                     **(header.get('params') or {}))
                    msg.attach(m)
        else:
            msg = MIMEText(message['text'], _charset='utf-8')

        msg['Subject'] = message['subject']

        msg['From'] = from_addr
        msg['To'] = ', '.join(to)
        if message.get('cc'):
            msg['Cc'] = ', '.join(message.get('cc'))
        msg['Date'] = formatdate()

        local_hostname = self.middleware.call_sync('system.hostname')

        msg['Message-ID'] = "<%s-%s.%s@%s>" % (
            sw_name.lower(), datetime.utcnow().strftime("%Y%m%d.%H%M%S.%f"),
            base64.urlsafe_b64encode(os.urandom(3)), local_hostname)

        extra_headers = message.get('extra_headers') or {}
        for key, val in list(extra_headers.items()):
            # We already have "Content-Type: multipart/mixed" and setting "Content-Type: text/plain" like some scripts
            # do will break python e-mail module.
            if key.lower() == "content-type":
                continue

            if key in msg:
                msg.replace_header(key, val)
            else:
                msg[key] = val

        syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_MAIL)
        try:
            if config['oauth']:
                self.middleware.call_sync('mail.gmail_send', msg, config)
            else:
                server = self._get_smtp_server(config,
                                               message['timeout'],
                                               local_hostname=local_hostname)
                # NOTE: Don't do this.
                #
                # If smtplib.SMTP* tells you to run connect() first, it's because the
                # mailserver it tried connecting to via the outgoing server argument
                # was unreachable and it tried to connect to 'localhost' and barfed.
                # This is because FreeNAS doesn't run a full MTA.
                # else:
                #    server.connect()
                headers = '\n'.join([f'{k}: {v}' for k, v in msg._headers])
                syslog.syslog(f"sending mail to {', '.join(to)}\n{headers}")
                server.sendmail(from_addr.encode(), to, msg.as_string())
                server.quit()
        except Exception as e:
            # Don't spam syslog with these messages. They should only end up in the
            # test-email pane.
            # We are only interested in ValueError, not subclasses.
            if e.__class__ is ValueError:
                raise CallError(str(e))
            syslog.syslog(f'Failed to send email to {", ".join(to)}: {str(e)}')
            if isinstance(e, smtplib.SMTPAuthenticationError):
                raise CallError(
                    f'Authentication error ({e.smtp_code}): {e.smtp_error}',
                    errno.EAUTH if osc.IS_FREEBSD else errno.EPERM)
            self.logger.warn('Failed to send email: %s', str(e), exc_info=True)
            if message['queue']:
                with MailQueue() as mq:
                    mq.append(msg)
            raise CallError(f'Failed to send email: {e}')
        return True

    def _get_smtp_server(self, config, timeout=300, local_hostname=None):
        self.middleware.call_sync('network.general.will_perform_activity',
                                  'mail')

        if local_hostname is None:
            local_hostname = self.middleware.call_sync('system.hostname')

        if not config['outgoingserver'] or not config['port']:
            # See NOTE below.
            raise ValueError('you must provide an outgoing mailserver and mail'
                             ' server port when sending mail')
        if config['security'] == 'SSL':
            server = smtplib.SMTP_SSL(config['outgoingserver'],
                                      config['port'],
                                      timeout=timeout,
                                      local_hostname=local_hostname)
        else:
            server = smtplib.SMTP(config['outgoingserver'],
                                  config['port'],
                                  timeout=timeout,
                                  local_hostname=local_hostname)
            if config['security'] == 'TLS':
                server.starttls()
        if config['smtp']:
            server.login(config['user'], config['pass'])

        return server

    @periodic(600, run_on_start=False)
    @private
    def send_mail_queue(self):
        with MailQueue() as mq:
            for queue in list(mq.queue):
                try:
                    config = self.middleware.call_sync('mail.config')
                    if config['oauth']:
                        self.middleware.call_sync('mail.gmail_send',
                                                  queue.message, config)
                    else:
                        server = self._get_smtp_server(config)
                        server.sendmail(queue.message['From'].encode(),
                                        queue.message['To'].split(', '),
                                        queue.message.as_string())
                        server.quit()
                except Exception:
                    self.logger.debug('Sending message from queue failed',
                                      exc_info=True)
                    queue.attempts += 1
                    if queue.attempts >= mq.MAX_ATTEMPTS:
                        mq.queue.remove(queue)
                else:
                    mq.queue.remove(queue)
예제 #17
0
class CertificateService(CRUDService):
    class Config:
        datastore = 'system.certificate'
        datastore_extend = 'certificate.cert_extend'
        datastore_extend_context = 'certificate.cert_extend_context'
        datastore_prefix = 'cert_'
        cli_namespace = 'system.certificate'

    ENTRY = CERT_ENTRY

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.map_functions = {
            'CERTIFICATE_CREATE_INTERNAL': self.create_internal,
            'CERTIFICATE_CREATE_IMPORTED': self.create_imported_certificate,
            'CERTIFICATE_CREATE_IMPORTED_CSR': self.create_imported_csr,
            'CERTIFICATE_CREATE_CSR': self.create_csr,
            'CERTIFICATE_CREATE_ACME': self.create_acme_certificate,
        }

    @private
    def cert_extend_context(self, rows, extra):
        context = {
            'cas': {
                c['id']: c
                for c in self.middleware.call_sync(
                    'certificateauthority.query')
            },
        }
        return context

    @private
    def cert_extend(self, cert, context):
        if cert['signedby']:
            cert['signedby'] = context['cas'][cert['signedby']['id']]

        normalize_cert_attrs(cert)
        return cert

    @private
    async def cert_services_validation(self,
                                       id,
                                       schema_name,
                                       raise_verrors=True):
        # General method to check certificate health wrt usage in services
        cert = await self.middleware.call('certificate.query',
                                          [['id', '=', id]])
        verrors = ValidationErrors()
        valid_key_size = {'EC': 28, 'RSA': 2048}
        if cert:
            cert = cert[0]
            if cert['cert_type'] != 'CERTIFICATE' or cert['cert_type_CSR']:
                verrors.add(
                    schema_name,
                    'Selected certificate id is not a valid certificate')
            elif not cert.get('fingerprint'):
                verrors.add(schema_name,
                            f'{cert["name"]} certificate is malformed')

            if not cert['privatekey']:
                verrors.add(
                    schema_name,
                    'Selected certificate does not have a private key')
            elif not cert['key_length']:
                verrors.add(schema_name,
                            'Failed to parse certificate\'s private key')
            elif cert['key_length'] < valid_key_size[cert['key_type']]:
                verrors.add(
                    schema_name,
                    f'{cert["name"]}\'s private key size is less then {valid_key_size[cert["key_type"]]} bits'
                )

            if cert['until'] and datetime.datetime.strptime(
                    cert['until'],
                    '%a %b  %d %H:%M:%S %Y') < datetime.datetime.now():
                verrors.add(
                    schema_name,
                    f'{cert["name"]!r} has expired (it was valid until {cert["until"]!r})'
                )

            if cert['digest_algorithm'] in ['MD5', 'SHA1']:
                verrors.add(
                    schema_name,
                    'Please use a certificate whose digest algorithm has at least 112 security bits'
                )

            if cert['revoked']:
                verrors.add(schema_name, 'This certificate is revoked')
        else:
            verrors.add(schema_name,
                        f'No Certificate found with the provided id: {id}')

        if raise_verrors:
            verrors.check()
        else:
            return verrors

    @private
    async def validate_common_attributes(self, data, schema_name):
        verrors = ValidationErrors()

        await _validate_common_attributes(self.middleware, data, verrors,
                                          schema_name)

        return verrors

    # CREATE METHODS FOR CREATING CERTIFICATES
    # "do_create" IS CALLED FIRST AND THEN BASED ON THE TYPE OF THE CERTIFICATE WHICH IS TO BE CREATED THE
    # APPROPRIATE METHOD IS CALLED
    # FOLLOWING TYPES ARE SUPPORTED
    # CREATE_TYPE ( STRING )          - METHOD CALLED
    # CERTIFICATE_CREATE_INTERNAL     - create_internal
    # CERTIFICATE_CREATE_IMPORTED     - create_imported_certificate
    # CERTIFICATE_CREATE_IMPORTED_CSR - create_imported_csr
    # CERTIFICATE_CREATE_CSR          - create_csr
    # CERTIFICATE_CREATE_ACME         - create_acme_certificate

    @accepts(
        Dict('certificate_create',
             Bool('tos'),
             Dict('dns_mapping', additional_attrs=True),
             Int('csr_id'),
             Int('signedby'),
             Int('key_length', enum=[2048, 4096]),
             Int('renew_days'),
             Int('type'),
             Int('lifetime'),
             Int('serial', validators=[Range(min=1)]),
             Str('acme_directory_uri'),
             Str('certificate', max_length=None),
             Str('city'),
             Str('common', max_length=None, null=True),
             Str('country'),
             Str('CSR', max_length=None),
             Str('ec_curve', enum=EC_CURVES, default=EC_CURVE_DEFAULT),
             Str('email', validators=[Email()]),
             Str('key_type', enum=['RSA', 'EC'], default='RSA'),
             Str('name', required=True),
             Str('organization'),
             Str('organizational_unit'),
             Str('passphrase'),
             Str('privatekey', max_length=None),
             Str('state'),
             Str('create_type',
                 enum=[
                     'CERTIFICATE_CREATE_INTERNAL',
                     'CERTIFICATE_CREATE_IMPORTED', 'CERTIFICATE_CREATE_CSR',
                     'CERTIFICATE_CREATE_IMPORTED_CSR',
                     'CERTIFICATE_CREATE_ACME'
                 ],
                 required=True),
             Str('digest_algorithm',
                 enum=['SHA224', 'SHA256', 'SHA384', 'SHA512']),
             List('san', items=[Str('san')]),
             Ref('cert_extensions'),
             register=True))
    @job(lock='cert_create')
    async def do_create(self, job, data):
        """
        Create a new Certificate

        Certificates are classified under following types and the necessary keywords to be passed
        for `create_type` attribute to create the respective type of certificate

        1) Internal Certificate                 -  CERTIFICATE_CREATE_INTERNAL

        2) Imported Certificate                 -  CERTIFICATE_CREATE_IMPORTED

        3) Certificate Signing Request          -  CERTIFICATE_CREATE_CSR

        4) Imported Certificate Signing Request -  CERTIFICATE_CREATE_IMPORTED_CSR

        5) ACME Certificate                     -  CERTIFICATE_CREATE_ACME

        By default, created certs use RSA keys. If an Elliptic Curve Key is desired, it can be specified with the
        `key_type` attribute. If the `ec_curve` attribute is not specified for the Elliptic Curve Key, then default to
        using "BrainpoolP384R1" curve.

        A type is selected by the Certificate Service based on `create_type`. The rest of the values in `data` are
        validated accordingly and finally a certificate is made based on the selected type.

        `cert_extensions` can be specified to set X509v3 extensions.

        .. examples(websocket)::

          Create an ACME based certificate

            :::javascript
            {
                "id": "6841f242-840a-11e6-a437-00e04d680384",
                "msg": "method",
                "method": "certificate.create",
                "params": [{
                    "tos": true,
                    "csr_id": 1,
                    "acme_directory_uri": "https://acme-staging-v02.api.letsencrypt.org/directory",
                    "name": "acme_certificate",
                    "dns_mapping": {
                        "domain1.com": "1"
                    },
                    "create_type": "CERTIFICATE_CREATE_ACME"
                }]
            }

          Create an Imported Certificate Signing Request

            :::javascript
            {
                "id": "6841f242-840a-11e6-a437-00e04d680384",
                "msg": "method",
                "method": "certificate.create",
                "params": [{
                    "name": "csr",
                    "CSR": "CSR string",
                    "privatekey": "Private key string",
                    "create_type": "CERTIFICATE_CREATE_IMPORTED_CSR"
                }]
            }

          Create an Internal Certificate

            :::javascript
            {
                "id": "6841f242-840a-11e6-a437-00e04d680384",
                "msg": "method",
                "method": "certificate.create",
                "params": [{
                    "name": "internal_cert",
                    "key_length": 2048,
                    "lifetime": 3600,
                    "city": "Nashville",
                    "common": "domain1.com",
                    "country": "US",
                    "email": "*****@*****.**",
                    "organization": "iXsystems",
                    "state": "Tennessee",
                    "digest_algorithm": "SHA256",
                    "signedby": 4,
                    "create_type": "CERTIFICATE_CREATE_INTERNAL"
                }]
            }
        """
        if not data.get('dns_mapping'):
            data.pop('dns_mapping')  # Default dict added

        create_type = data.pop('create_type')
        if create_type in ('CERTIFICATE_CREATE_IMPORTED_CSR',
                           'CERTIFICATE_CREATE_ACME',
                           'CERTIFICATE_CREATE_IMPORTED'):
            for key in ('key_length', 'key_type', 'ec_curve'):
                data.pop(key, None)

        verrors = await self.validate_common_attributes(
            data, 'certificate_create')

        await validate_cert_name(self.middleware, data['name'],
                                 self._config.datastore, verrors,
                                 'certificate_create.name')

        if verrors:
            raise verrors

        job.set_progress(10, 'Initial validation complete')

        if create_type in (
                'CERTIFICATE_CREATE_IMPORTED_CSR',
                'CERTIFICATE_CREATE_ACME',
                'CERTIFICATE_CREATE_IMPORTED',
        ):
            # We add dictionaries/lists by default, so we need to explicitly remove them
            data.pop('cert_extensions')
            data.pop('san')

        if create_type == 'CERTIFICATE_CREATE_ACME':
            data = await self.middleware.run_in_thread(
                self.map_functions[create_type], job, data)
        else:
            data = await self.map_functions[create_type](job, data)

        data = {
            k: v
            for k, v in data.items() if k in [
                'name', 'certificate', 'CSR', 'privatekey', 'type', 'signedby',
                'acme', 'acme_uri', 'domains_authenticators', 'renew_days'
            ]
        }

        pk = await self.middleware.call(
            'datastore.insert', self._config.datastore, data,
            {'prefix': self._config.datastore_prefix})

        await self.middleware.call('service.start', 'ssl')

        job.set_progress(100, 'Certificate created successfully')

        return await self.get_instance(pk)

    @accepts(
        Dict('acme_create', Bool('tos', default=False),
             Int('csr_id', required=True),
             Int('renew_days', default=10, validators=[Range(min=1)]),
             Str('acme_directory_uri', required=True),
             Str('name', required=True),
             Dict('dns_mapping', additional_attrs=True, required=True)))
    @private
    @skip_arg(count=1)
    def create_acme_certificate(self, job, data):

        csr_data = self.middleware.call_sync('certificate.get_instance',
                                             data['csr_id'])
        verrors = ValidationErrors()
        email = (self.middleware.call_sync('user.query',
                                           [['uid', '=', 0]]))[0]['email']
        if not email:
            verrors.add(
                'name',
                'Please configure an email address for "root" user which will be used with the ACME Server.'
            )
        verrors.check()

        data['acme_directory_uri'] += '/' if data['acme_directory_uri'][
            -1] != '/' else ''

        final_order = self.middleware.call_sync('acme.issue_certificate', job,
                                                25, data, csr_data)

        job.set_progress(95, 'Final order received from ACME server')

        cert_dict = {
            'acme':
            self.middleware.call_sync(
                'acme.registration.query',
                [['directory', '=', data['acme_directory_uri']]])[0]['id'],
            'acme_uri':
            final_order.uri,
            'certificate':
            final_order.fullchain_pem,
            'CSR':
            csr_data['CSR'],
            'privatekey':
            csr_data['privatekey'],
            'name':
            data['name'],
            'type':
            CERT_TYPE_EXISTING,
            'domains_authenticators':
            data['dns_mapping'],
            'renew_days':
            data['renew_days']
        }

        return cert_dict

    @accepts(
        Patch('certificate_create_internal', 'certificate_create_csr',
              ('rm', {
                  'name': 'signedby'
              }), ('rm', {
                  'name': 'lifetime'
              })))
    @private
    @skip_arg(count=1)
    async def create_csr(self, job, data):
        # no signedby, lifetime attributes required
        verrors = ValidationErrors()
        cert_info = get_cert_info_from_data(data)
        cert_info['cert_extensions'] = data['cert_extensions']

        if cert_info['cert_extensions']['AuthorityKeyIdentifier']['enabled']:
            verrors.add('cert_extensions.AuthorityKeyIdentifier.enabled',
                        'This extension is not valid for CSR')

        verrors.check()

        data['type'] = CERT_TYPE_CSR
        req, key = generate_certificate_signing_request(cert_info)

        job.set_progress(80)

        data['CSR'] = req
        data['privatekey'] = key

        job.set_progress(90, 'Finalizing changes')

        return data

    @accepts(
        Dict('create_imported_csr',
             Str('CSR', required=True, max_length=None, empty=False),
             Str('name'),
             Str('privatekey', required=True, max_length=None, empty=False),
             Str('passphrase')))
    @private
    @skip_arg(count=1)
    async def create_imported_csr(self, job, data):

        # TODO: We should validate csr with private key ?

        data['type'] = CERT_TYPE_CSR

        job.set_progress(80)

        if 'passphrase' in data:
            data['privatekey'] = export_private_key(data['privatekey'],
                                                    data['passphrase'])

        job.set_progress(90, 'Finalizing changes')

        return data

    @accepts(
        Dict('certificate_create_imported', Int('csr_id'),
             Str('certificate', required=True, max_length=None), Str('name'),
             Str('passphrase'), Str('privatekey', max_length=None)))
    @private
    @skip_arg(count=1)
    async def create_imported_certificate(self, job, data):
        verrors = ValidationErrors()

        csr_id = data.pop('csr_id', None)
        if csr_id:
            csr_obj = await self.query(
                [['id', '=', csr_id], ['type', '=', CERT_TYPE_CSR]],
                {'get': True})

            data['privatekey'] = csr_obj['privatekey']
            data.pop('passphrase', None)
        elif not data.get('privatekey'):
            verrors.add(
                'certificate_create.privatekey',
                'Private key is required when importing a certificate')

        if verrors:
            raise verrors

        job.set_progress(50, 'Validation complete')

        data['type'] = CERT_TYPE_EXISTING

        if 'passphrase' in data:
            data['privatekey'] = export_private_key(data['privatekey'],
                                                    data['passphrase'])

        return data

    @accepts(
        Patch('certificate_create',
              'certificate_create_internal',
              ('edit', _set_required('lifetime')),
              ('edit', _set_required('country')),
              ('edit', _set_required('state')),
              ('edit', _set_required('city')),
              ('edit', _set_required('organization')),
              ('edit', _set_required('email')), ('edit', _set_required('san')),
              ('edit', _set_required('signedby')), ('rm', {
                  'name': 'create_type'
              }),
              register=True))
    @private
    @skip_arg(count=1)
    async def create_internal(self, job, data):

        cert_info = get_cert_info_from_data(data)
        data['type'] = CERT_TYPE_INTERNAL

        signing_cert = await self.middleware.call(
            'certificateauthority.query', [('id', '=', data['signedby'])],
            {'get': True})

        cert_serial = await self.middleware.call(
            'certificateauthority.get_serial_for_certificate',
            data['signedby'])

        cert_info.update({
            'ca_privatekey': signing_cert['privatekey'],
            'ca_certificate': signing_cert['certificate'],
            'serial': cert_serial,
            'cert_extensions': data['cert_extensions']
        })

        cert, key = await self.middleware.call(
            'cryptokey.generate_certificate', cert_info)

        data['certificate'] = cert
        data['privatekey'] = key

        job.set_progress(90, 'Finalizing changes')

        return data

    @accepts(Int('id', required=True),
             Dict('certificate_update', Bool('revoked'), Str('name')))
    @job(lock='cert_update')
    async def do_update(self, job, id, data):
        """
        Update certificate of `id`

        Only name and revoked attribute can be updated.

        When `revoked` is enabled, the specified cert `id` is revoked and if it belongs to a CA chain which
        exists on this system, its serial number is added to the CA's certificate revocation list.

        .. examples(websocket)::

          Update a certificate of `id`

            :::javascript
            {
                "id": "6841f242-840a-11e6-a437-00e04d680384",
                "msg": "method",
                "method": "certificate.update",
                "params": [
                    1,
                    {
                        "name": "updated_name"
                    }
                ]
            }
        """
        old = await self.get_instance(id)
        # signedby is changed back to integer from a dict
        old['signedby'] = old['signedby']['id'] if old.get(
            'signedby') else None
        if old.get('acme'):
            old['acme'] = old['acme']['id']

        new = old.copy()

        new.update(data)

        if any(new[k] != old[k] for k in ('name', 'revoked')):

            verrors = ValidationErrors()

            if new['name'] != old['name']:
                await validate_cert_name(self.middleware, new['name'],
                                         self._config.datastore, verrors,
                                         'certificate_update.name')

            if new['revoked'] and new['cert_type_CSR']:
                verrors.add('certificate_update.revoked',
                            'A CSR cannot be marked as revoked.')
            elif new['revoked'] and not old['revoked'] and not new[
                    'can_be_revoked']:
                verrors.add(
                    'certificate_update.revoked',
                    'Only certificate(s) can be revoked which have a CA present on the system'
                )
            elif old['revoked'] and not new['revoked']:
                verrors.add(
                    'certificate_update.revoked',
                    'Certificate has already been revoked and this cannot be reversed'
                )

            verrors.check()

            if old['revoked'] != new['revoked'] and new['revoked']:
                revoked = {'revoked_date': datetime.datetime.utcnow()}
            else:
                revoked = {}

            await self.middleware.call(
                'datastore.update', self._config.datastore, id, {
                    'name': new['name'],
                    **revoked
                }, {'prefix': self._config.datastore_prefix})

            await self.middleware.call('service.start', 'ssl')

        job.set_progress(90, 'Finalizing changes')

        return await self.get_instance(id)

    @private
    async def delete_domains_authenticator(self, auth_id):
        # Delete provided auth_id from all ACME based certs domains_authenticators
        for cert in await self.query([['acme', '!=', None]]):
            if auth_id in cert['domains_authenticators'].values():
                await self.middleware.call(
                    'datastore.update', self._config.datastore, cert['id'], {
                        'domains_authenticators': {
                            k: v
                            for k, v in cert['domains_authenticators'].items()
                            if v != auth_id
                        }
                    }, {'prefix': self._config.datastore_prefix})

    @accepts(Int('id'), Bool('force', default=False))
    @job(lock='cert_delete')
    def do_delete(self, job, id, force):
        """
        Delete certificate of `id`.

        If the certificate is an ACME based certificate, certificate service will try to
        revoke the certificate by updating it's status with the ACME server, if it fails an exception is raised
        and the certificate is not deleted from the system. However, if `force` is set to True, certificate is deleted
        from the system even if some error occurred while revoking the certificate with the ACME Server

        .. examples(websocket)::

          Delete certificate of `id`

            :::javascript
            {
                "id": "6841f242-840a-11e6-a437-00e04d680384",
                "msg": "method",
                "method": "certificate.delete",
                "params": [
                    1,
                    true
                ]
            }
        """
        check_dependencies(self.middleware, 'CERT', id)

        certificate = self.middleware.call_sync('certificate.get_instance', id)

        if certificate.get('acme'):
            client, key = self.middleware.call_sync(
                'acme.get_acme_client_and_key',
                certificate['acme']['directory'], True)

            try:
                client.revoke(
                    jose.ComparableX509(
                        crypto.load_certificate(crypto.FILETYPE_PEM,
                                                certificate['certificate'])),
                    0)
            except (errors.ClientError, messages.Error) as e:
                if not force:
                    raise CallError(f'Failed to revoke certificate: {e}')

        response = self.middleware.call_sync('datastore.delete',
                                             self._config.datastore, id)

        self.middleware.call_sync('service.start', 'ssl')

        job.set_progress(100)
        return response