Esempio n. 1
0
 def test_optimize_one_step(self):
     pg_num = 2048
     size = 3
     a = Ceph().constructor([
         'optimize',
         '--no-multithread',
         '--replication-count',
         str(size),
         '--pool',
         '3',
         '--pg-num',
         str(pg_num),
         '--pgp-num',
         str(pg_num),
         '--rule',
         'data',
         '--choose-args',
         'optimize',
         '--step',
         '64',
     ])
     c = Crush(backward_compatibility=True)
     c.parse('tests/test_optimize_small_cluster.json')
     crushmap = c.get_crushmap()
     (count, crushmap) = a.optimize(crushmap)
     assert 240 == count
Esempio n. 2
0
 def test_optimize_step(self):
     pg_num = 2048
     size = 3
     a = Ceph().constructor([
         'optimize',
         '--no-multithread',
         '--replication-count',
         str(size),
         '--pool',
         '3',
         '--pg-num',
         str(pg_num),
         '--pgp-num',
         str(pg_num),
         '--rule',
         'data',
         '--choose-args',
         'optimize',
         '--step',
         '64',
     ])
     c = Crush(backward_compatibility=True)
     c.parse('tests/test_optimize_small_cluster.json')
     crushmap = c.get_crushmap()
     converged = False
     for i in range(20):
         (count, crushmap) = a.optimize(crushmap)
         if count <= 0:
             converged = True
             break
         print("moved " + str(count) + " values")
     assert converged
Esempio n. 3
0
 def analyze(self):
     self.pre_sanity_check_args()
     c = Crush(backward_compatibility=self.args.backward_compatibility)
     c.parse(self.main.convert_to_crushmap(self.args.crushmap))
     self.post_sanity_check_args()
     (take, failure_domain) = c.rule_get_take_failure_domain(self.args.rule)
     d = self.run_simulation(c, take, failure_domain)
     worst = self.analyze_failures(c, take, failure_domain)
     return (d, worst, failure_domain)
Esempio n. 4
0
 def test_map(self):
     crushmap = self.build_crushmap()
     c = Crush(verbose=1)
     assert c.parse(crushmap)
     assert len(
         c.map(rule="data",
               value=1234,
               replication_count=1,
               weights={},
               choose_args=[])) == 1
Esempio n. 5
0
def run_crush():
    value = sys.argv[1]
    rcount = sys.argv[2]
    crushmap = open("./config/crushmap.json", "r").read()

    c = Crush()
    c.parse(json.loads(crushmap))
    devices = c.map(rule="data",
                    value=int(value),
                    replication_count=int(rcount))
    return devices
Esempio n. 6
0
    def run_compare(self):
        self.pre_sanity_check_args()
        self.set_origin_crushmap(self.args.origin)
        self.set_destination_crushmap(self.args.destination)
        self.post_sanity_check_args()

        if self.args.origin_weights:
            with open(self.args.origin_weights) as f_ow:
                self.orig_weights = Crush.parse_weights_file(f_ow)
        if self.args.destination_weights:
            with open(self.args.destination_weights) as f_dw:
                self.dest_weights = Crush.parse_weights_file(f_dw)

        self.compare()
Esempio n. 7
0
    def analyze_failures(self, c, take, failure_domain):
        if failure_domain == 0:  # failure domain == device is a border case
            return None
        root = c.find_bucket(take)
        worst = pd.DataFrame()
        available_buckets = c.collect_buckets_by_type([root], failure_domain)
        if len(available_buckets) <= self.args.replication_count:
            log.error("there are not enough " + failure_domain +
                      " to sustain failure")
            return None
        for may_fail in available_buckets:
            f = Crush(verbose=self.args.debug,
                      backward_compatibility=self.args.backward_compatibility)
            f.crushmap = copy.deepcopy(c.get_crushmap())
            root = f.find_bucket(take)
            f.filter(lambda x: x.get('name') != may_fail.get('name'), root)
            f.parse(f.crushmap)
            try:
                a = self.run_simulation(f, take, failure_domain)
                a['~over filled %~'] = a['~over/under filled %~']
                a = a[['~type~', '~over filled %~']]
                worst = pd.concat([worst,
                                   a]).groupby(['~type~']).max().reset_index()
            except BadMapping:
                log.error("mapping failed when removing {}".format(may_fail))

        return worst.set_index('~type~')
Esempio n. 8
0
class Root(RestController):
    config = Config()
    crush = Crush()
    doc = Doc()
    mon = Mon()
    osd = Osd()
    pool = Pool()
    request = Request()
    server = Server()

    @expose(template='json')
    def get(self, **kwargs):
        """
        Show the basic information for the REST API
        This includes values like api version or auth method
        """
        return {
            'api_version':
            1,
            'auth':
            'Use "ceph tell mgr restful create_key <key>" to create a key pair, '
            'pass it as HTTP Basic auth to authenticate',
            'doc':
            'See /doc endpoint',
            'info':
            "Ceph Manager RESTful API server",
        }
Esempio n. 9
0
    def test_collect_dataframe(self):
        tree = {
            'name':
            'rack0',
            'type':
            'rack',
            'id':
            -1,
            'children': [
                {
                    'name': 'host0',
                    'type': 'host',
                    'id': -2,
                    'children': [
                        {
                            'name': 'osd.3',
                            'id': 3
                        },
                    ]
                },
                {
                    'name': 'host1',
                    'type': 'host',
                    'id': -3,
                    'children': [
                        {
                            'name': 'osd.4',
                            'id': 4
                        },
                    ]
                },
            ]
        }
        c = Crush(verbose=1)
        c.parse({"trees": [tree]})
        d = Analyze.collect_dataframe(c, tree)
        expected = """\
        ~id~  ~weight~  ~type~   rack   host device
~name~                                             
rack0     -1       1.0    rack  rack0    NaN    NaN
host0     -2       1.0    host  rack0  host0    NaN
osd.3      3       1.0  device  rack0  host0  osd.3
host1     -3       1.0    host  rack0  host1    NaN
osd.4      4       1.0  device  rack0  host1  osd.4\
""" # noqa trailing whitespaces are expected
        assert expected == str(d)
Esempio n. 10
0
    def run_optimize(self, p, rule_name, crushmap, with_positions=True):
        pd.set_option('display.max_rows', None)
        pd.set_option('display.width', 160)

        p.extend(['--rule', rule_name])
        a = Ceph().constructor([
            'analyze',
        ] + p)

        c = Crush(backward_compatibility=True)
        c.parse(crushmap)
        (take, failure_domain) = c.rule_get_take_failure_domain(rule_name)
        crushmap = c.get_crushmap()
        crushmap['choose_args'] = {
            "optimize": [],
        }

        d = a.run_simulation(c, take, failure_domain)
        if d['~overweight~'].any():
            raise ValueError(
                'no way to optimize when there is an overweight item')
        print(str(d))
        print(a._format_report(d, 'device'))
        print(a._format_report(d, failure_domain))
        print(a.analyze_failures(c, take, failure_domain))

        p.extend(['--choose-args', 'optimize'])

        pool = Pool()
        children = [c.find_bucket(take)]
        while len(children) > 0:
            a = [(p, crushmap, item, with_positions) for item in children]
            r = pool.map(o, a)
            #            r = map(o, a)
            choose_args = filter(None, r)
            crushmap['choose_args']['optimize'].extend(choose_args)
            nc = []
            for item in children:
                nc.extend(item.get('children', []))
            # fail if all children are not of the same type
            children = nc

        pprint.pprint(crushmap)
        c.parse(crushmap)
        a = Ceph().constructor([
            'analyze',
        ] + p)
        d = a.run_simulation(c, take, failure_domain)
        print(a._format_report(d, 'device'))
        print(a._format_report(d, failure_domain))
        print(a.analyze_failures(c, take, failure_domain))
Esempio n. 11
0
    def define_crushmaps_1(self):
        crushmap = self.define_crushmap_10()
        pprint(crushmap)
        c1 = Crush()
        c1.parse(crushmap)

        m2 = copy.deepcopy(crushmap)
        del m2['trees'][0]['children'][2]['children'][1]
        c2 = Crush()
        c2.parse(m2)

        return (c1, c2)
Esempio n. 12
0
    def define_crushmaps_2(self):
        crushmap = {
            "trees": [{
                "type": "root",
                "id": -1,
                "name": "dc1",
                "children": [],
            }],
            "rules": {
                "firstn": [["take", "dc1"],
                           ["chooseleaf", "firstn", 0, "type", "host"],
                           ["emit"]],
                "indep": [["take", "dc1"],
                          ["chooseleaf", "indep", 0, "type", "host"],
                          ["emit"]],
            }
        }
        crushmap['trees'][0]['children'].extend([{
            "type":
            "host",
            "id":
            -(i + 2),
            "name":
            "host%d" % i,
            "children": [
                {
                    "id": (2 * i),
                    "name": "device%02d" % (2 * i),
                    "weight": 5.0
                },
                {
                    "id": (2 * i + 1),
                    "name": "device%02d" % (2 * i + 1),
                    "weight": 5.0
                },
            ],
        } for i in range(0, 5)])
        first = crushmap['trees'][0]['children'][0]['children']
        first[0]['weight'] = 0.5
        first[1]['weight'] = 0.5
        pprint(crushmap)
        c1 = Crush(verbose=1)
        c1.parse(crushmap)

        m2 = copy.deepcopy(crushmap)
        del m2['trees'][0]['children'][4]
        c2 = Crush(verbose=1)
        c2.parse(m2)

        return (c1, c2)
Esempio n. 13
0
 def test_filter_real(self):
     name = 'cloud6-1429'
     c = Crush()
     c.parse('tests/test_crush_filter.json')
     crushmap = c.get_crushmap()
     optimize = sorted(crushmap['choose_args']['optimize'],
                       key=lambda v: v['bucket_id'])
     assert 3 == len(optimize)
     assert -1 == optimize[2]['bucket_id']
     assert 7 == len(optimize[2]['weight_set'][0])
     bucket = c.find_bucket(name)
     assert name == bucket['name']
     c.filter(lambda x: x.get('name') != name, crushmap['trees'][0])
     optimize = crushmap['choose_args']['optimize']
     assert 2 == len(optimize)
     assert -1 == optimize[1]['bucket_id']
     assert 6 == len(optimize[1]['weight_set'][0])
     assert c.find_bucket(name) is None
Esempio n. 14
0
    def test_parse_weights_file(self):

        # Test Simple weights file
        weights = Crush.parse_weights_file(open("tests/ceph/weights.json"))
        assert weights == {"osd.0": 0.0, "osd.2": 0.5}

        # Test OSDMap
        weights = Crush.parse_weights_file(open("tests/ceph/osdmap.json"))
        assert weights == {"osd.0": 1.0, "osd.1": 0.95, "osd.2": 1.0}

        with pytest.raises(AssertionError):
            Crush.parse_weights_file(open("tests/ceph/weights-notfloat.json"))
        with pytest.raises(AssertionError):
            Crush.parse_weights_file(open("tests/ceph/osdmap-invalid.json"))
        with pytest.raises(AssertionError):
            Crush.parse_weights_file(open("tests/sample-ceph-crushmap.txt"))
Esempio n. 15
0
 def test_convert_to_crushmap(self, caplog):
     crushmap = {}
     assert crushmap == Crush._convert_to_crushmap(crushmap)
     crushmap = Crush._convert_to_crushmap("tests/sample-crushmap.json")
     assert 'trees' in crushmap
     crushmap = Crush._convert_to_crushmap("tests/sample-ceph-crushmap.txt")
     assert 'trees' in crushmap
     crushmap = Crush._convert_to_crushmap(
         "tests/sample-ceph-crushmap.crush")
     assert 'trees' in crushmap
     crushmap = Crush._convert_to_crushmap(
         "tests/sample-ceph-crushmap.json")
     assert 'trees' in crushmap
     with pytest.raises(ValueError) as e:
         crushmap = Crush._convert_to_crushmap(
             "tests/sample-bugous-crushmap.json")
     assert "Expecting property name" in str(e.value)
Esempio n. 16
0
 def test_collect_buckets_by_type(self):
     children = [{
         'type': 'host',
         'name': 'host0',
     }, {
         'type':
         'rack',
         'children': [{
             'name': 'host1',
             'type': 'host',
         }, {
             'name': 'host2',
             'type': 'other',
         }],
     }]
     expected = [{
         'name': 'host0',
         'type': 'host'
     }, {
         'name': 'host1',
         'type': 'host'
     }]
     assert expected == Crush.collect_buckets_by_type(children, 'host')
Esempio n. 17
0
         [
            "chooseleaf",
            "firstn",
            0,
            "type",
            "host"
         ],
         [
            "emit"
         ]
      ]
   }
}
"""

crush = Crush()
crush.parse(json.loads(crushmap))


@app.route('/testlog', methods=['GET'])
def testlog():
    app.logger.error("A warning")
    return ""


def find_part_for_user_cart(id):
    #crush code
    itemToBeSent = int(hashlib.sha1(str(id)).hexdigest(), 16) % (10**8)
    arr = crush.map(rule="CartRule", value=itemToBeSent, replication_count=1)
    val = arr[0][-1:]
    return int(val)
Esempio n. 18
0
 def test_get_item_by_(self):
     crushmap = self.build_crushmap()
     c = Crush(verbose=1)
     assert c.parse(crushmap)
     assert c.get_item_by_id(-2)['name'] == 'host0'
     assert c.get_item_by_name('host0')['id'] == -2
Esempio n. 19
0
 def set_destination_crushmap(self, destination):
     self.args.choose_args = self.args.destination_choose_args
     d = Crush(backward_compatibility=self.args.backward_compatibility)
     d.parse(self.main.convert_to_crushmap(destination))
     self.set_destination(d)
Esempio n. 20
0
    def optimize(self, crushmap):
        c = Crush(backward_compatibility=self.args.backward_compatibility)
        c.parse(crushmap)
        crushmap = c.get_crushmap()
        if 'choose_args' not in crushmap:
            crushmap['choose_args'] = {}
            c.parse(crushmap)
        if self.args.choose_args not in crushmap['choose_args']:
            crushmap['choose_args'][self.args.choose_args] = []
            c.parse(crushmap)
        (take, failure_domain) = c.rule_get_take_failure_domain(self.args.rule)

        parser = analyze.Analyze.get_parser()
        self.main.hook_analyze_args(parser)
        p = self.main.get_trimmed_argv(parser, self.args)
        a = self.main.clone().constructor(['analyze'] + p)

        if self.args.multithread:
            from multiprocessing import Pool
            pool = Pool()
        children = [c.find_bucket(take)]
        total_count = 0
        over_step = False
        n = self.main.value_name()
        while not over_step and len(children) > 0:
            a = [(self, p, c.get_crushmap(), item) for item in children]
            if self.args.multithread:
                r = list(pool.map(top_optimize, a))
            else:
                r = list(map(top_optimize, a))
            for i in range(len(children)):
                if r[i] is None:
                    continue
                (count, choose_arg) = r[i]
                total_count += count
                c.update_choose_args(self.args.choose_args, [choose_arg])
                log.info(children[i]['name'] + " weights updated with " + str(choose_arg))
                if self.args.step and count > 0:
                    log.warning(children[i]['name'] + " will swap " +
                                str(count) + " " + n)
                over_step = self.args.step and total_count > self.args.step
                if over_step:
                    break
            nc = []
            for item in children:
                nc.extend(item.get('children', []))
            # fail if all children are not of the same type
            children = nc
        return (total_count, c.get_crushmap())
Esempio n. 21
0
    def test_filter_basic(self):
        root = {
            'name':
            'root',
            'id':
            -5,
            'children': [
                {
                    'name': 'bucket2',
                    'id': -3
                },
                {
                    'name': 'bucket1',
                    'id': -2,
                    'children': [{
                        'id': 1
                    }, {
                        'id': 2
                    }, {
                        'id': 4
                    }]
                },
                {
                    'name': 'bucket3',
                    'id': -1,
                    'children': [{
                        'id': 5
                    }, {
                        'id': 6
                    }, {
                        'id': 7
                    }]
                },
            ]
        }
        expected_root = {
            'name':
            'root',
            'id':
            -5,
            'children': [
                {
                    'name': 'bucket2',
                    'id': -3
                },
                {
                    'name': 'bucket1',
                    'id': -2,
                    'children': [{
                        'id': 1
                    }]
                },
                {
                    'name': 'bucket3',
                    'id': -1,
                    'children': [{
                        'id': 5
                    }, {
                        'id': 7
                    }]
                },
            ]
        }
        choose_args = [
            {
                'bucket_id': -2,
                'ids': [11, 12, 14],
                'weight_set': [[11.0, 12.0, 14.0]]
            },
            {
                'bucket_id': -1,
                'ids': [15, 16, 17]
            },
        ]
        expected_choose_args = [
            {
                'bucket_id': -2,
                'ids': [11],
                'weight_set': [[11.0]]
            },
            {
                'bucket_id': -1,
                'ids': [15, 17]
            },
        ]
        c = Crush()
        c.crushmap = {}
        c.crushmap['trees'] = [root]
        c.crushmap['choose_args'] = {"one": choose_args}

        def fun(x):
            if x.get('id') in (2, 4, 6):
                return False
            return True

        c.filter(fun, root)
        assert expected_root == root
        assert expected_choose_args == choose_args
Esempio n. 22
0
 def convert_to_crushmap(self, crushmap):
     c = Crush(verbose=self.args.debug,
               backward_compatibility=self.args.backward_compatibility)
     c.parse(crushmap)
     return c.get_crushmap()
Esempio n. 23
0
    def test_merge_split_choose_args(self):
        c = Crush()
        split = {
            'choose_args': {
                'a': [
                    {
                        'bucket_id': -3
                    },
                    {
                        'bucket_id': -2
                    },
                    {
                        'bucket_id': -1
                    },
                ],
                'b': [
                    {
                        'bucket_id': -1
                    },
                ]
            },
            'trees': [
                {
                    'id': -1,
                    'children': [{
                        'id': -3,
                        'children': [{
                            'id': -4
                        }]
                    }],
                },
                {
                    'id': -2
                },
            ]
        }
        merged = {
            'trees': [{
                'id':
                -1,
                'children': [{
                    'id': -3,
                    'children': [{
                        'id': -4
                    }],
                    'choose_args': {
                        'a': {
                            'bucket_id': -3
                        }
                    },
                }],
                'choose_args': {
                    'a': {
                        'bucket_id': -1
                    },
                    'b': {
                        'bucket_id': -1
                    }
                },
            }, {
                'id': -2,
                'choose_args': {
                    'a': {
                        'bucket_id': -2
                    }
                }
            }]
        }

        c.crushmap = copy.deepcopy(split)
        assert c._merge_choose_args()
        assert merged == c.crushmap
        # do nothing if no choose_args
        assert c._merge_choose_args() is False
        c._split_choose_args()
        assert split == c.crushmap
Esempio n. 24
0
         [
            "chooseleaf",
            "firstn",
            0,
            "type",
            "host"
         ],
         [
            "emit"
         ]
      ]
   }
}
"""

crush = Crush()
crush.parse(simplejson.loads(crushmap))


def find_part_for_inv(item):
    #crush code
    itemToBeSent = int(hashlib.sha1(item).hexdigest(), 16) % (10**8)
    arr = crush.map(rule="InvRule", value=itemToBeSent, replication_count=1)
    val = arr[0][-1:]
    return int(val)


wb2 = load_workbook('sample.xlsx')
worksheet1 = wb2['Sheet1']  # one way to load a worksheet

items = []
Esempio n. 25
0
    def optimize_replica(self, p, origin_crushmap,
                         crushmap, bucket,
                         replication_count, choose_arg_position):
        a = self.main.clone().constructor(['analyze'] + p)
        a.args.replication_count = replication_count

        parser = compare.Compare.get_parser()
        self.main.hook_compare_args(parser)
        cp = self.main.get_trimmed_argv(parser, self.args)
        compare_instance = self.main.clone().constructor(['compare'] + cp)
        compare_instance.args.replication_count = replication_count
        compare_instance.set_origin_crushmap(origin_crushmap)

        choose_arg = self.get_choose_arg(crushmap, bucket)
        self.set_choose_arg_position(choose_arg, bucket, choose_arg_position)
        id2weight = collections.OrderedDict()
        for pos in range(len(bucket['children'])):
            v = choose_arg['weight_set'][choose_arg_position][pos]
            id2weight[bucket['children'][pos]['id']] = v

        log.info(bucket['name'] + " optimizing replica " + str(replication_count) + " " +
                 str(list(id2weight.values())))
        log.debug(bucket['name'] + " optimizing replica " + str(replication_count) + " " +
                  str(dict(id2weight)))
        c = Crush(backward_compatibility=self.args.backward_compatibility)
        c.parse(crushmap)

        (take, failure_domain) = c.rule_get_take_failure_domain(a.args.rule)
        #
        # initial simulation
        #
        i = a.run_simulation(c, take, failure_domain)
        i = i.reset_index()
        s = i['~name~'] == 'KKKK'  # init to False, there must be a better way
        for item in bucket['children']:
            s |= (i['~name~'] == item['name'])

        previous_delta = None
        improve_tolerance = 10
        no_improvement = 0
        max_iterations = 1000
        from_to_count = 0
        best_weights = list(id2weight.values())
        n = self.main.value_name()
        for iterations in range(max_iterations):
            choose_arg['weight_set'][choose_arg_position] = list(id2weight.values())
            c.parse(crushmap)
            z = a.run_simulation(c, take, failure_domain)
            z = z.reset_index()
            d = z[s].copy()
            d['~delta~'] = d['~' + n + '~'] - d['~expected~']
            d['~delta%~'] = d['~delta~'] / d['~expected~']
            delta = d['~delta~'].abs().sum()
            if previous_delta is not None:
                if previous_delta < delta:
                    no_improvement += 1
                else:
                    previous_delta = delta
                    best_weights = list(id2weight.values())
                    no_improvement = 0
                if no_improvement >= improve_tolerance:
                    log.info("stop because " + str(no_improvement) + " tries")
                    break
            else:
                best_weights = list(id2weight.values())
                previous_delta = delta
            if delta == 0:
                log.info("stop because the distribution is perfect")
                break
            log.info(bucket['name'] + " delta " + str(delta))
            if self.args.step and no_improvement == 0:
                compare_instance.set_destination(c)
                (from_to, in_out) = compare_instance.compare_bucket(bucket)
                from_to_count = sum(map(lambda x: sum(x.values()), from_to.values()))
                in_out_count = sum(map(lambda x: sum(x.values()), in_out.values()))
                log.debug("moved from_to " + str(from_to_count) +
                          " in_out " + str(in_out_count))
                if from_to_count > self.args.step:
                    log.info("stopped because moved " + str(from_to_count) +
                             " --step " + str(self.args.step))
                    break
            d = d.sort_values('~delta~', ascending=False)
            if d.iloc[0]['~delta~'] <= 0 or d.iloc[-1]['~delta~'] >= 0:
                log.info("stop because [" + str(d.iloc[0]['~delta~']) + "," +
                         str(d.iloc[-1]['~delta~']) + "]")
                break
            # there should not be a need to keep the sum of the weights to the same value, they
            # are only used locally for placement and have no impact on the upper weights
            # nor are they derived from the weights from below *HOWEVER* in case of a failure
            # the weights need to be as close as possible from the target weight to limit
            # the negative impact
            shift = int(id2weight[d.iloc[0]['~id~']] * min(0.01, abs(d.iloc[0]['~delta%~'])))
            if shift <= 0:
                log.info("stop because shift is zero")
                break
            log.debug("shift from " + str(d.iloc[0]['~id~']) +
                      " to " + str(d.iloc[-1]['~id~']))
            id2weight[d.iloc[0]['~id~']] -= shift
            id2weight[d.iloc[-1]['~id~']] += shift

        choose_arg['weight_set'][choose_arg_position] = best_weights
        c.parse(crushmap)
        compare_instance.set_destination(c)
        (from_to, in_out) = compare_instance.compare_bucket(bucket)
        from_to_count = sum(map(lambda x: sum(x.values()), from_to.values()))

        if iterations >= max_iterations - 1:
            log.info("stopped after " + str(iterations))
        log.info(bucket['name'] + " replica " + str(replication_count) + " optimized")
        log.info(bucket['name'] + " weights " + str(choose_arg['weight_set'][choose_arg_position]))
        return from_to_count
Esempio n. 26
0
 def analyze_crushmap(self, crushmap):
     c = Crush(backward_compatibility=self.args.backward_compatibility)
     c.parse(crushmap)
     (take, failure_domain) = c.rule_get_take_failure_domain(self.args.rule)
     return self.run_simulation(c, take, failure_domain)
Esempio n. 27
0
    def parse_report(self, report):
        if report['health']['overall_status'] != 'HEALTH_OK':
            raise HealthError(
                "expected health overall_status == HEALTH_OK but got " +
                report['health']['overall_status'] + "instead")

        v = report['version'].split('.')
        if v[0] == "0":
            if v[1] == '94':
                version = 'h'
            elif v[1] == '87':
                version = 'g'
            elif v[1] == '80':
                version = 'f'
        else:
            version = chr(ord('a') + int(v[0]) - 1)

        crushmap = CephCrushmapConverter().parse_ceph(
            report['crushmap'], version=version, recover_choose_args=False)
        mappings = collections.defaultdict(lambda: {})
        for pg_stat in report['pgmap']['pg_stats']:
            mappings[pg_stat['pgid']] = pg_stat['acting']

        ruleset2name = {}
        for rule in crushmap['private']['rules']:
            ruleset2name[rule['ruleset']] = rule['rule_name']

        c = LibCrush(backward_compatibility=True)
        c.parse(crushmap)

        name2id = {}

        def collect_items(children):
            for child in children:
                if 'id' in child:
                    name2id[child['name']] = child['id']
                collect_items(child.get('children', []))

        collect_items(crushmap['trees'])

        weights = Crush.parse_osdmap_weights(report['osdmap'])

        for osd in report['osdmap']["osds"]:
            if osd["primary_affinity"] != 1.0:
                raise UnsupportedError("osd." + str(osd["osd"]) +
                                       " primary affinity is != 1.0")

        failed_mapping = False
        for pool in report['osdmap']['pools']:
            if pool['type'] != 1:
                raise UnsupportedError(
                    "pool " + pool['pool_name'] + " is type " +
                    str(pool['type']) +
                    " is not supported, only type == 1 (replicated)")
            if pool['object_hash'] != 2:
                raise UnsupportedError(
                    "pool " + pool['pool_name'] + " object_hash " +
                    str(pool['object_hash']) +
                    " is not supported, only object_hash == 2 (rjenkins)")
            if pool['flags_names'] != 'hashpspool':
                raise UnsupportedError("pool " + pool['pool_name'] +
                                       " has flags_names " + "'" +
                                       str(pool['flags_names']) + "'" +
                                       " is no supported, only hashpspool")
            ruleset = pool['crush_ruleset']
            if str(ruleset) in crushmap.get('choose_args', {}):
                choose_args = str(ruleset)
            else:
                choose_args = None
            rule = ruleset2name[ruleset]
            size = pool['size']
            log.info("verifying pool {} pg_num {} pgp_num {}".format(
                pool['pool'], pool['pg_num'], pool['pg_placement_num']))
            values = LibCrush().ceph_pool_pps(pool['pool'], pool['pg_num'],
                                              pool['pg_placement_num'])
            kwargs = {
                "rule": str(rule),
                "replication_count": size,
            }
            if choose_args:
                kwargs["choose_args"] = choose_args
            if weights:
                kwargs["weights"] = weights
            for (name, pps) in values.items():
                if name not in mappings:
                    failed_mapping = True
                    log.error(name + " is not in pgmap")
                    continue
                kwargs["value"] = pps
                mapped = c.map(**kwargs)
                osds = [name2id[x] for x in mapped]
                if osds != mappings[name]:
                    failed_mapping = True
                    log.error("{} map to {} instead of {}".format(
                        name, osds, mappings[name]))
                    continue
        if failed_mapping:
            raise MappingError(
                "some mapping failed, please file a bug at "
                "http://libcrush.org/main/python-crush/issues/new")

        crushmap = CephCrushmapConverter().parse_ceph(report['crushmap'],
                                                      version=version,
                                                      recover_choose_args=True)
        crushmap['private']['pools'] = report['osdmap']['pools']
        crushmap['private']['version'] = version

        return crushmap
Esempio n. 28
0
class Gateway():

    logger = logging.getLogger('gatewaylogger')
    logger.setLevel(logging.DEBUG)
    ch = logging.FileHandler('gateway.log', 'w')
    formatter = logging.Formatter(
        '[%(asctime)s] %(message)s %(funcName)s:%(lineno)d')
    ch.setFormatter(formatter)
    logger.addHandler(ch)
    zk = None
    constants = Constants()
    crush_object = Crush()

    def __init__(self):
        super().__init__()
        try:
            Gateway.zk = KazooClient(hosts='172.25.0.101:2181')
            Gateway.zk.start()
            # print(Gateway.constants.SERVER_PREFIX + Gateway.constants.MESSAGE_CONNECTED + "with 127.0.0.1:2181")
            Gateway.zk.add_listener(self.connection_listener)
            ChildrenWatch(Gateway.zk,
                          '/nodes',
                          func=Gateway.handle_dbnodes_change)
        except Exception as e:
            Gateway.print_error(e)
        self.add_myself_to_zookeeper()
        self.dbnodes = []

    @staticmethod
    def print_error(e):
        print(Gateway.constants.ERROR_PREFIX + e.__str__())

    def add_myself_to_zookeeper(self):
        hostname = socket.gethostname()
        ip = socket.gethostbyname(hostname)
        try:
            node_data = {'ip': ip}
            Gateway.zk.ensure_path("/gateways")
            Gateway.zk.create("/gateways/gateway",
                              str.encode(json.dumps(node_data)),
                              ephemeral=True,
                              sequence=True)
            print('Added a gateway node to zookeeper')
        except Exception as e:
            Gateway.print_error(e)

    def connection_listener(self, state):

        if state == KazooState.LOST:
            print('session lost')
        elif state == KazooState.SUSPENDED:
            print('session suspended')
        else:
            print('running in state {}'.format(state))

    @staticmethod
    def handle_dbnodes_change(children):
        # print('Nodes cluster changed, current cluster configuration:')
        # for node in children:
        # 	data,stat = Gateway.zk.get('/nodes/{}'.format(node))
        # 	print('Node: {}'.format(node))
        # 	print('Data: {}'.format(json.loads(data.decode())))
        # 	print('Data version: {}'.format(stat.version))
        # 	print('Data length: {}'.format(stat.data_length))
        crush_map_children = []
        for i in range(len(children)):
            crush_map_children.append(
                Gateway.constants.CRUSH_MAP_CHILDREN_NODE_FMT.format(
                    i, -2 - i, i, children[i]))
        crush_map = json.loads(
            Gateway.constants.CRUSH_MAP_FMT.format(
                ','.join(crush_map_children)))
        # crush_map =	Gateway.constants.DEFAULT_CRUSH_MAP
        print(crush_map)
        if len(crush_map['trees'][0]['children']) == 0:
            return
        Gateway.crush_object.parse(crush_map)
        Gateway.zk.ensure_path('/crush_map')
        Gateway.zk.set('/crush_map', str.encode(json.dumps(crush_map)))
Esempio n. 29
0
         "children": [
          { "id": 2, "name": "device2", "weight": 1.0 },
          { "id": 3, "name": "device3", "weight": 2.0 }
         ]
        },
        {
         "type": "host", "name": "host2", "id": -4,
         "children": [
          { "id": 4, "name": "device4", "weight": 1.0 },
          { "id": 5, "name": "device5", "weight": 2.0 }
         ]
        }
      ]
    }
  ],
  "rules": {
    "data": [
      [ "take", "dc1" ],
      [ "chooseleaf", "firstn", 0, "type", "host" ],
      [ "emit" ]
    ]
  }
}

"""

c = Crush()
c.parse(json.loads(crushmap))
print c.map(rule="data", value=1234, replication_count=1)
print c.map(rule="data", value=1234, replication_count=2)
Esempio n. 30
0
 def crushmap_to_file(self, crushmap):
     c = Crush(verbose=self.args.debug,
               backward_compatibility=self.args.backward_compatibility)
     c.parse(crushmap)
     c.to_file(self.args.out_path)