Ejemplo n.º 1
0
def merge_pillar_metadata():
    def get_hostname(fqdn):
        return fqdn.split('.')[0]

    metadata = all_metadata()
    try:
        minions = DeepSea.instance().get_minions()
    except rest_client.RequestException:
        logger.exception('failed to get minions')
        minions = []
    ret = []
    for minion in minions:
        minion_hostname = get_hostname(minion['hostname'])
        if minion_hostname in metadata:
            # both `metadata[minion_hostname]` and `minion` contain a "hostname" key. Use the one
            # from minion, as it contains the fqdn instead of just the name.
            ret.append(aggregate_dict(metadata[minion_hostname], minion))
            del metadata[minion_hostname]
        else:
            ret.append(minion)
    for metadata in metadata.values():
        ret.append(metadata)
    for host in ret:
        if 'addresses' in host and not host['addresses']:
            del host['addresses']

    return ret
Ejemplo n.º 2
0
    def get_minions(self):
        keys = self.key_list()

        key_aggr = list()
        for key in keys:
            if key == 'minions_pre':
                key_status = 'unaccepted'
            elif key == 'minions_rejected':
                key_status = 'rejected'
            elif key == 'minions_denied':
                key_status = 'denied'
            elif key == 'minions':
                key_status = 'accepted'
            else:
                continue

            key_aggr.extend([{
                'hostname': hostname,
                'key_status': key_status
            } for hostname in keys[key]])

        out = self.pillar_items()
        minions = [
            aggregate_dict(data, hostname=hostname)
            for (hostname, data) in out.iteritems()
        ]
        minions = zip_by_keys(('hostname', key_aggr), ('hostname', minions))
        return minions
Ejemplo n.º 3
0
    def _do_scrub(self, request, object_list):
        deep_scrub = get_request_data(request).get('deep-scrub', False)
        if not object_list:
            logger.info('scrub triggered without minions')
        results = []
        for minion in object_list:
            results.append(minion.scrub(deep_scrub=deep_scrub))

        res = {
            'command': "deep-scrub" if deep_scrub else "scrub",
            'result': aggregate_dict(*results)
        }
        return Response(res, status=status.HTTP_200_OK)
Ejemplo n.º 4
0
 def get_all_objects(context, query):
     assert context is not None
     api = RadosMixin.mon_api(context.fsid)
     osd_tree = api.osd_list()  # key=id
     osd_dump_data = api.osd_dump()['osds']  # key=osd
     pg_dump_data = api.pg_dump()['osd_stats']  # key=osd
     osd_metadata = api.osd_metadata()  # key=id
     zipped_data = zip_by_keys(('id', osd_tree), ('osd', osd_dump_data),
                               ('osd', pg_dump_data), ('id', osd_metadata))
     return [
         CephOsd(**CephOsd.make_model_args(
             aggregate_dict(data,
                            in_state=data['in'] if 'in' in data else 0,
                            cluster_id=context.fsid)))
         for data in zipped_data
     ]
Ejemplo n.º 5
0
    def get_all_objects(context, query):
        assert context is not None
        api = RadosMixin.rbd_api(context.fsid)

        pools = CephPool.objects.all()
        rbd_name_pools = itertools.chain.from_iterable(
            (((image, pool) for image in api.list(pool.name))
             for pool in pools))

        rbds = []
        for (image_name, pool) in rbd_name_pools:
            rbds.append(
                aggregate_dict(name=image_name,
                               pool_id=pool.id,
                               id=CephRbd.make_key(pool, image_name)))

        return [CephRbd(**CephRbd.make_model_args(rbd)) for rbd in rbds]