Exemple #1
0
 def test_stringify_key(self):
     items = [(b('bytes'), 'bytes'),
              (1.0, '1.0'),
              (10, '10'),
              (True, 'true'),
              (False, 'false'),
              (None, 'null'),
              (long_type(100), '100')]
     for k, expect in items:
         self.assertEqual(
             json.loads(json.dumpsJSON({k: expect})),
             {expect: expect})
         self.assertEqual(
             json.loads(json.dumpsJSON({k: expect}, sort_keys=True)),
             {expect: expect})
     self.assertRaises(TypeError, json.dumpsJSON, {json: 1})
     for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]:
         for sort_keys in [False, True]:
             v0 = dict(v)
             v0[json] = 1
             v1 = dict((as_text_type(key), val) for (key, val) in v.items())
             self.assertEqual(
                 json.loads(json.dumpsJSON(v0, skipkeys=True, sort_keys=sort_keys)),
                 v1)
             self.assertEqual(
                 json.loads(json.dumpsJSON({'': v0}, skipkeys=True, sort_keys=sort_keys)),
                 {'': v1})
             self.assertEqual(
                 json.loads(json.dumpsJSON([v0], skipkeys=True, sort_keys=sort_keys)),
                 [v1])
    def test_separators(self):
        h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
             {'nifty': 87}, {'field': 'yes', 'morefield': False} ]

        expect = textwrap.dedent("""\
        [
          [
            "blorpie"
          ] ,
          [
            "whoops"
          ] ,
          [] ,
          "d-shtaeou" ,
          "d-nthiouh" ,
          "i-vhbjkhnth" ,
          {
            "nifty" : 87
          } ,
          {
            "field" : "yes" ,
            "morefield" : false
          }
        ]""")


        d1 = json.dumpsJSON(h)
        d2 = json.dumpsJSON(h, indent='  ', sort_keys=True, separators=(' ,', ' : '))

        h1 = json.loads(d1)
        h2 = json.loads(d2)

        self.assertEqual(h1, h)
        self.assertEqual(h2, h)
        self.assertEqual(d2, expect)
Exemple #3
0
    def check(self, name, file, inputCr):
        text = self.load(file, inputCr)
        shouldFail = name[0:4] == "fail"

        try:
            data = hjson.loads(text)
            self.assertFalse(shouldFail)

            text1 = hjson.dumpsJSON(data)
            hjson1 = hjson.dumps(data, ensure_ascii=False);
            result = hjson.loads(self.load(name + "_result.json", inputCr))
            text2 = hjson.dumpsJSON(result)
            hjson2 = self.load(name + "_result.hjson", False)

            if self.verma>2 or self.vermi>6:
                # final check fails on py2.6 because of string formatting issues
                self.assertEqual(text2, text1)
                self.assertEqual(hjson2, hjson1)

            # dbg
            # with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
            # with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))


        except hjson.HjsonDecodeError as e:
            self.assertTrue(shouldFail)
Exemple #4
0
 def test_empty_objects(self):
     s = "{}"
     self.assertEqual(json.loads(s), eval(s))
     s = "[]"
     self.assertEqual(json.loads(s), eval(s))
     s = '""'
     self.assertEqual(json.loads(s), eval(s))
Exemple #5
0
    def check(self, name, file, inputCr):
        text = self.load(file, inputCr)
        shouldFail = name[0:4] == "fail"

        try:
            data = hjson.loads(text)
            self.assertFalse(shouldFail, file)

            text1 = hjson.dumpsJSON(data)
            hjson1 = hjson.dumps(data, ensure_ascii=False);
            result = hjson.loads(self.load(name + "_result.json", inputCr))
            text2 = hjson.dumpsJSON(result)
            hjson2 = self.load(name + "_result.hjson", False)

            # dbg
            # with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
            # with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))
            # with codecs.open(name + "_dbg3.txt", 'w', 'utf-8') as tmp: hjson.dump(data, tmp)

            if self.verma>2 or self.vermi>6:
                # final check fails on py2.6 because of string formatting issues
                self.assertEqual(text2, text1, file)
                self.assertEqual(hjson2, hjson1, file)

        except hjson.HjsonDecodeError as e:
            if not shouldFail:
                self.fail("raised error on parsing %s: %r" % (file, e))
 def test_empty_objects(self):
     s = '{}'
     self.assertEqual(json.loads(s), eval(s))
     s = '[]'
     self.assertEqual(json.loads(s), eval(s))
     s = '""'
     self.assertEqual(json.loads(s), eval(s))
 def test_ints(self):
     for opts in self.options:
         for val, expect in self.values:
             self.assertEqual(
                 val,
                 json.loads(json.dumpsJSON(val)))
             self.assertEqual(
                 expect,
                 json.loads(json.dumpsJSON(val, **opts)))
 def test_failures(self):
     for idx, doc in enumerate(JSONDOCS):
         idx = idx + 1
         try:
             json.loads(doc)
         except json.HjsonDecodeError:
             pass
         else:
             self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
 def test_ints(self):
     for val, expect in self.values:
         self.assertEqual(
             val,
             json.loads(json.dumpsJSON(val)))
         self.assertEqual(
             expect,
             json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)),
             )
 def test_dict_keys(self):
     for val, _ in self.values:
         expect = {str(val): 'value'}
         val = {val: 'value'}
         self.assertEqual(
             expect,
             json.loads(json.dumpsJSON(val)))
         self.assertEqual(
             expect,
             json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
 def test_lists(self):
     for val, expect in self.values:
         val = [val, val]
         expect = [expect, expect]
         self.assertEqual(
             val,
             json.loads(json.dumpsJSON(val)))
         self.assertEqual(
             expect,
             json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
 def test_dicts(self):
     for val, expect in self.values:
         val = {'k': val}
         expect = {'k': expect}
         self.assertEqual(
             val,
             json.loads(json.dumpsJSON(val)))
         self.assertEqual(
             expect,
             json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
Exemple #13
0
 def test_object_pairs_hook(self):
     s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
     p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4), ("qrt", 5), ("pad", 6), ("hoy", 7)]
     self.assertEqual(json.loads(s), eval(s))
     self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
     self.assertEqual(json.load(StringIO(s), object_pairs_hook=lambda x: x), p)
     od = json.loads(s, object_pairs_hook=OrderedDict)
     self.assertEqual(od, OrderedDict(p))
     self.assertEqual(type(od), OrderedDict)
     # the object_pairs_hook takes priority over the object_hook
     self.assertEqual(json.loads(s, object_pairs_hook=OrderedDict, object_hook=lambda x: None), OrderedDict(p))
Exemple #14
0
 def test_scan_error(self):
     err = None
     for t in (u, b):
         try:
             json.loads(t('{"asdf": "'))
         except json.HjsonDecodeError:
             err = sys.exc_info()[1]
         else:
             self.fail('Expected HjsonDecodeError')
         self.assertEqual(err.lineno, 1)
         self.assertEqual(err.colno, 10)
 def test_dict_keys(self):
     for opts in self.options:
         for val, _ in self.values:
             expect = {str(val): 'value'}
             val = {val: 'value'}
             self.assertEqual(
                 expect,
                 json.loads(json.dumpsJSON(val)))
             self.assertEqual(
                 expect,
                 json.loads(json.dumpsJSON(val, **opts)))
 def test_dicts(self):
     for opts in self.options:
         for val, expect in self.values:
             val = {'k': val}
             expect = {'k': expect}
             self.assertEqual(
                 val,
                 json.loads(json.dumpsJSON(val)))
             self.assertEqual(
                 expect,
                 json.loads(json.dumpsJSON(val, **opts)))
 def test_lists(self):
     for opts in self.options:
         for val, expect in self.values:
             val = [val, val]
             expect = [expect, expect]
             self.assertEqual(
                 val,
                 json.loads(json.dumpsJSON(val)))
             self.assertEqual(
                 expect,
                 json.loads(json.dumpsJSON(val, **opts)))
Exemple #18
0
 def test_decode_error(self):
     err = None
     try:
         json.loads('{}\na\nb')
     except json.HjsonDecodeError:
         err = sys.exc_info()[1]
     else:
         self.fail('Expected HjsonDecodeError')
     self.assertEqual(err.lineno, 2)
     self.assertEqual(err.colno, 1)
     self.assertEqual(err.endlineno, 3)
     self.assertEqual(err.endcolno, 2)
 def test_array_decoder_issue46(self):
     # http://code.google.com/p/simplejson/issues/detail?id=46
     for doc in [u'[,]', '[,]']:
         try:
             json.loads(doc)
         except json.HjsonDecodeError:
             pass
         except Exception:
             e = sys.exc_info()[1]
             self.fail("Unexpected exception raised %r %s" % (e, e))
         else:
             self.fail("Unexpected success parsing '[,]'")
 def test_namedtuple_dumps(self):
     for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
         d = v._asdict()
         self.assertEqual(d, json.loads(json.dumpsJSON(v)))
         self.assertEqual(
             d,
             json.loads(json.dumpsJSON(v, namedtuple_as_object=True)))
         self.assertEqual(d, json.loads(json.dumpsJSON(v, tuple_as_array=False)))
         self.assertEqual(
             d,
             json.loads(json.dumpsJSON(v, namedtuple_as_object=True,
                                   tuple_as_array=False)))
 def test_object_pairs_hook_with_unicode(self):
     s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
     p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4),
          (u"qrt", 5), (u"pad", 6), (u"hoy", 7)]
     self.assertEqual(json.loads(s), eval(s))
     self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
     od = json.loads(s, object_pairs_hook=json.OrderedDict)
     self.assertEqual(od, json.OrderedDict(p))
     self.assertEqual(type(od), json.OrderedDict)
     # the object_pairs_hook takes priority over the object_hook
     self.assertEqual(json.loads(s,
                                 object_pairs_hook=json.OrderedDict,
                                 object_hook=lambda x: None),
                      json.OrderedDict(p))
Exemple #22
0
    def test_error_is_pickable(self):
        err = None
        try:
            json.loads('{}\na\nb')
        except json.HjsonDecodeError:
            err = sys.exc_info()[1]
        else:
            self.fail('Expected HjsonDecodeError')
        s = pickle.dumps(err)
        e = pickle.loads(s)

        self.assertEqual(err.msg, e.msg)
        self.assertEqual(err.doc, e.doc)
        self.assertEqual(err.pos, e.pos)
        self.assertEqual(err.end, e.end)
Exemple #23
0
def read_config(config_filename):
    """Read the expected system configuration from the config file."""

    config = None
    with open(config_filename, 'r') as config_file:
        config = hjson.loads(config_file.read())

    config_checks = []

    for config_check_hjson in config:
        expected = None
        if config_check_hjson['type'] == 'exact match':
            expected = config_check_hjson['expected_stdout']
        elif config_check_hjson['type'] == 'regex match':
            expected = config_check_hjson['expected_regex']
        else:
            sys.exit("Expected comparison string does not match 'type' field.")

        config_check = ConfigCheck(
            command=config_check_hjson['command'],
            comparison_type=config_check_hjson['type'],
            expected=expected,
            fix=config_check_hjson['fix'],
            case_sensitive=(True if config_check_hjson['case_sensitive'] == \
                            'true' else False),
            description=config_check_hjson['description'],
            confidence=config_check_hjson['confidence'])
        config_checks.append(config_check)

    return config_checks
Exemple #24
0
    def test_indent(self):
        h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh',
             'i-vhbjkhnth',
             {'nifty': 87}, {'field': 'yes', 'morefield': False} ]

        expect = textwrap.dedent("""\
        [
        \t[
        \t\t"blorpie"
        \t],
        \t[
        \t\t"whoops"
        \t],
        \t[],
        \t"d-shtaeou",
        \t"d-nthiouh",
        \t"i-vhbjkhnth",
        \t{
        \t\t"nifty": 87
        \t},
        \t{
        \t\t"field": "yes",
        \t\t"morefield": false
        \t}
        ]""")


        d1 = json.dumpsJSON(h)
        d2 = json.dumpsJSON(h, indent='\t', sort_keys=True, separators=(',', ': '))
        d3 = json.dumpsJSON(h, indent='  ', sort_keys=True, separators=(',', ': '))
        d4 = json.dumpsJSON(h, indent=2, sort_keys=True, separators=(',', ': '))

        h1 = json.loads(d1)
        h2 = json.loads(d2)
        h3 = json.loads(d3)
        h4 = json.loads(d4)

        self.assertEqual(h1, h)
        self.assertEqual(h2, h)
        self.assertEqual(h3, h)
        self.assertEqual(h4, h)
        self.assertEqual(d3, expect.replace('\t', '  '))
        self.assertEqual(d4, expect.replace('\t', '  '))
        # NOTE: Python 2.4 textwrap.dedent converts tabs to spaces,
        #       so the following is expected to fail. Python 2.4 is not a
        #       supported platform in hjson 2.1.0+.
        self.assertEqual(d2, expect)
 def test_namedtuple_dumps_false(self):
     for v in [Value(1), Point(1, 2)]:
         l = list(v)
         self.assertEqual(
             l,
             json.loads(json.dumpsJSON(v, namedtuple_as_object=False)))
         self.assertRaises(TypeError, json.dumpsJSON, v,
             tuple_as_array=False, namedtuple_as_object=False)
Exemple #26
0
 def test_sort_keys(self):
     # https://github.com/simplejson/simplejson/issues/106
     for num_keys in range(2, 32):
         p = dict((str(x), x) for x in range(num_keys))
         sio = StringIO()
         json.dumpJSON(p, sio, sort_keys=True)
         self.assertEqual(sio.getvalue(), json.dumpsJSON(p, sort_keys=True))
         self.assertEqual(json.loads(sio.getvalue()), p)
 def assertRoundTrip(self, obj, other, for_json=True):
     if for_json is None:
         # None will use the default
         s = json.dumpsJSON(obj)
     else:
         s = json.dumpsJSON(obj, for_json=for_json)
     self.assertEqual(
         json.loads(s),
         other)
 def test_namedtuple_dump_false(self):
     for v in [Value(1), Point(1, 2)]:
         l = list(v)
         sio = StringIO()
         json.dumpJSON(v, sio, namedtuple_as_object=False)
         self.assertEqual(
             l,
             json.loads(sio.getvalue()))
         self.assertRaises(TypeError, json.dumpJSON, v, StringIO(),
             tuple_as_array=False, namedtuple_as_object=False)
 def test_namedtuple_dump(self):
     for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
         d = v._asdict()
         sio = StringIO()
         json.dumpJSON(v, sio)
         self.assertEqual(d, json.loads(sio.getvalue()))
         sio = StringIO()
         json.dumpJSON(v, sio, namedtuple_as_object=True)
         self.assertEqual(
             d,
             json.loads(sio.getvalue()))
         sio = StringIO()
         json.dumpJSON(v, sio, tuple_as_array=False)
         self.assertEqual(d, json.loads(sio.getvalue()))
         sio = StringIO()
         json.dumpJSON(v, sio, namedtuple_as_object=True,
                   tuple_as_array=False)
         self.assertEqual(
             d,
             json.loads(sio.getvalue()))
def convert(hjson_filename):
    """Convert HJson file on disk to JSON format and write to disk."""
    assert hjson_filename.endswith('.hjson')
    json_filename = hjson_filename.replace('.hjson', '.json')

    with open(json_filename, 'w') as json_out:
        with open(hjson_filename, 'r') as hjson_in:
            config = hjson.loads(hjson_in.read())
            config = [const.JSON_WARNING] + config
            json_format = hjson.dumpsJSON(config)
            json_out.write(json_format)
 def test_ints(self):
     for val, expect in self.values:
         self.assertEqual(val, json.loads(json.dumpsJSON(val)))
         self.assertEqual(
             expect, json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)),
         )
 def test_float(self):
     rval = json.loads('1', parse_int=float)
     self.assertTrue(isinstance(rval, float))
     self.assertEqual(rval, 1.0)
 def test_decoder_optimizations(self):
     # Several optimizations were made that skip over calls to
     # the whitespace regex, so this test is designed to try and
     # exercise the uncommon cases. The array cases are already covered.
     rval = json.loads('{   "key"    :    "value"    ,  "k":"v"    }')
     self.assertEqual(rval, {"key":"value", "k":"v"})
 def test_ints(self):
     for opts in self.options:
         for val, expect in self.values:
             self.assertEqual(val, json.loads(json.dumpsJSON(val)))
             self.assertEqual(expect,
                              json.loads(json.dumpsJSON(val, **opts)))
Exemple #35
0
    def parse_layout(self, layout):
        # Wrap this in a dictionary so hjson will parse KLE raw data
        layout = '{"layout": [' + layout + ']}'
        layout = hjson.loads(layout)['layout']

        # Initialize our state machine
        current_key = self.key_skel.copy()
        current_row = Decimal(0)
        current_col = Decimal(0)
        current_x = 0
        current_y = self.key_width / 2

        if isinstance(layout[0], dict):
            self.attrs(layout[0])
            layout = layout[1:]

        for row_num, row in enumerate(layout):
            self.append([])

            # Process the current row
            for key in row:
                if isinstance(key, dict):
                    if 'w' in key and key['w'] != Decimal(1):
                        current_key['width'] = Decimal(key['w'])
                    if 'w2' in key and 'h2' in key and key[
                            'w2'] == 1.5 and key['h2'] == 1:
                        # FIXME: ISO Key uses these params: {x:0.25,w:1.25,h:2,w2:1.5,h2:1,x2:-0.25}
                        current_key['isoenter'] = True
                    if 'h' in key and key['h'] != Decimal(1):
                        current_key['height'] = Decimal(key['h'])
                    if 'a' in key:
                        current_key['label_style'] = self.key_skel[
                            'label_style'] = int(key['a'])
                        if current_key['label_style'] < 0:
                            current_key['label_style'] = 0
                        elif current_key['label_style'] > 9:
                            current_key['label_style'] = 9
                    if 'f' in key:
                        font_size = int(key['f'])
                        if font_size > 9:
                            font_size = 9
                        elif font_size < 1:
                            font_size = 1
                        current_key['label_size'] = self.key_skel[
                            'label_size'] = font_size
                    if 'p' in key:
                        current_key['keycap_profile'] = self.key_skel[
                            'keycap_profile'] = key['p']
                    if 'c' in key:
                        current_key['keycap_color'] = self.key_skel[
                            'keycap_color'] = key['c']
                    if 't' in key:
                        # FIXME: Need to do better validation, plus figure out how to support multiple colors
                        if '\n' in key['t']:
                            key['t'] = key['t'].split('\n')[0]
                        if key['t'] == "0":
                            key['t'] = "#000000"
                        current_key['label_color'] = self.key_skel[
                            'label_color'] = key['t']
                    if 'x' in key:
                        current_col += Decimal(key['x'])
                        current_x += Decimal(key['x']) * self.key_width
                    if 'y' in key:
                        current_row += Decimal(key['y'])
                        current_y += Decimal(key['y']) * self.key_width
                    if 'd' in key:
                        current_key['decal'] = True

                else:
                    current_key['name'] = key
                    current_key['row'] = current_row
                    current_key['column'] = current_col

                    # Determine the X center
                    x_center = (current_key['width'] * self.key_width) / 2
                    current_x += x_center
                    current_key['x'] = current_x
                    current_x += x_center

                    # Determine the Y center
                    y_center = (current_key['height'] * self.key_width) / 2
                    y_offset = y_center - (self.key_width / 2)
                    current_key['y'] = (current_y + y_offset)

                    # Tend to our row/col count
                    current_col += current_key['width']
                    if current_col > self.columns:
                        self.columns = current_col

                    # Invert the y-axis if neccesary
                    if self.invert_y:
                        current_key['y'] = -current_key['y']

                    # Store this key
                    self[-1].append(current_key)
                    current_key = self.key_skel.copy()

            # Move to the next row
            current_x = 0
            current_y += self.key_width
            current_col = Decimal(0)
            current_row += Decimal(1)
            if current_row > self.rows:
                self.rows = Decimal(current_row)
Exemple #36
0
def main():
    """Generate the Occamy system and all corresponding configuration files."""
    parser = argparse.ArgumentParser(prog="clustergen")
    parser.add_argument("--cfg",
                        "-c",
                        metavar="file",
                        type=argparse.FileType('r'),
                        required=True,
                        help="A cluster configuration file")
    parser.add_argument("--outdir",
                        "-o",
                        type=pathlib.Path,
                        required=True,
                        help="Target directory.")
    # Parse arguments.
    parser.add_argument("--top-sv",
                        metavar="TOP_SV",
                        help="Name of top-level file (output).")
    parser.add_argument("--soc-sv",
                        metavar="TOP_SYNC_SV",
                        help="Name of synchronous SoC file (output).")
    parser.add_argument("--pkg-sv",
                        metavar="PKG_SV",
                        help="Name of top-level package file (output)")
    parser.add_argument("--quadrant-s1",
                        metavar="QUADRANT_S1",
                        help="Name of S1 quadrant template file (output)")
    parser.add_argument(
        "--quadrant-s1-ctrl",
        metavar="QUADRANT_S1_CTL",
        help="Name of S1 quadrant controller template file (output)")
    parser.add_argument("--xilinx-sv",
                        metavar="XILINX_SV",
                        help="Name of the Xilinx wrapper file (output).")
    parser.add_argument("--testharness-sv",
                        metavar="TESTHARNESS_SV",
                        help="Name of the testharness wrapper file (output).")
    parser.add_argument("--cva6-sv",
                        metavar="CVA6_SV",
                        help="Name of the CVA6 wrapper file (output).")
    parser.add_argument("--chip",
                        metavar="CHIP_TOP",
                        help="(Optional) Chip Top-level")
    parser.add_argument("--hbm-ctrl",
                        metavar="HBM_CTRL",
                        help="(Optional) HBM controller")
    parser.add_argument("--bootdata",
                        metavar="BOOTDATA",
                        help="Name of the bootdata file (output)")
    parser.add_argument("--cheader",
                        metavar="CHEADER",
                        help="Name of the cheader file (output)")
    parser.add_argument("--csv",
                        metavar="CSV",
                        help="Name of the csv file (output)")

    parser.add_argument("--graph", "-g", metavar="DOT")
    parser.add_argument("--memories", "-m", action="store_true")
    parser.add_argument("--wrapper", "-w", action="store_true")
    parser.add_argument("--am-cheader", "-D", metavar="ADDRMAP_CHEADER")
    parser.add_argument("--am-csv", "-aml", metavar="ADDRMAP_CSV")
    parser.add_argument("--dts", metavar="DTS", help="System's device tree.")
    parser.add_argument("--name",
                        metavar="NAME",
                        default=DEFAULT_NAME,
                        help="System's name.")

    parser.add_argument("-v",
                        "--verbose",
                        help="increase output verbosity",
                        action="store_true")

    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    # Read HJSON description of System.
    with args.cfg as file:
        try:
            srcfull = file.read()
            obj = hjson.loads(srcfull, use_decimal=True)
            obj = JsonRef.replace_refs(obj)
        except ValueError:
            raise SystemExit(sys.exc_info()[1])

    # If name argument provided, change config
    if args.name != DEFAULT_NAME:
        obj["cluster"]["name"] = args.name + "_cluster"
        # occamy.cfg["cluster"]["name"] = args.name

    occamy = Occamy(obj)

    # Arguments.
    nr_s1_quadrants = occamy.cfg["nr_s1_quadrant"]
    nr_s1_clusters = occamy.cfg["s1_quadrant"]["nr_clusters"]
    is_remote_quadrant = occamy.cfg["is_remote_quadrant"]
    # Iterate over Hives to get the number of cores.
    nr_cluster_cores = len([
        core for hive in occamy.cfg["cluster"]["hives"]
        for core in hive["cores"]
    ])

    if not args.outdir.is_dir():
        exit("Out directory is not a valid path.")

    outdir = args.outdir
    outdir.mkdir(parents=True, exist_ok=True)

    if args.wrapper:
        with open(outdir / f"{args.name}_cluster_wrapper.sv", "w") as f:
            f.write(occamy.render_wrapper())

    if args.memories:
        with open(outdir / f"{args.name}_memories.json", "w") as f:
            f.write(occamy.cluster.memory_cfg())

    ####################
    # Address Map (AM) #
    ####################
    # Create the address map.
    am = solder.AddrMap()
    # Create a device tree object.
    dts = device_tree.DeviceTree()

    # Toplevel crossbar address map
    am_soc_narrow_xbar = am.new_node("soc_narrow_xbar")
    am_soc_wide_xbar = am.new_node("soc_wide_xbar")

    # Quadrant pre-crossbar address map
    am_quadrant_pre_xbar = list()
    for i in range(nr_s1_quadrants):
        am_quadrant_pre_xbar.append(
            am.new_node("am_quadrant_pre_xbar_{}".format(i)))

    # Quadrant inter crossbar address map:
    am_quadrant_inter_xbar = am.new_node("am_quadrant_inter_xbar")

    # HBM crossbar address map
    am_hbm_xbar = am.new_node("am_hbm_xbar")

    # Quadrant crossbar address map
    am_wide_xbar_quadrant_s1 = list()
    am_narrow_xbar_quadrant_s1 = list()
    for i in range(nr_s1_quadrants):
        am_wide_xbar_quadrant_s1.append(
            am.new_node("wide_xbar_quadrant_s1_{}".format(i)))
        am_narrow_xbar_quadrant_s1.append(
            am.new_node("narrow_xbar_quadrant_s1_{}".format(i)))

    # Peripheral crossbar address map
    am_soc_axi_lite_periph_xbar = am.new_node("soc_axi_lite_periph_xbar")
    am_soc_regbus_periph_xbar = am.new_node("soc_periph_regbus_xbar")
    am_hbm_cfg_xbar = am.new_node("hbm_cfg_xbar")

    ############################
    # AM: Periph AXI Lite XBar #
    ############################
    nr_axi_lite_peripherals = len(
        occamy.cfg["peripherals"]["axi_lite_peripherals"])
    am_axi_lite_peripherals = []

    for p in range(nr_axi_lite_peripherals):
        am_axi_lite_peripherals.append(
            am.new_leaf(
                occamy.cfg["peripherals"]["axi_lite_peripherals"][p]["name"],
                occamy.cfg["peripherals"]["axi_lite_peripherals"][p]["length"],
                occamy.cfg["peripherals"]["axi_lite_peripherals"][p]
                ["address"]).attach_to(am_soc_axi_lite_periph_xbar))
        # add debug module to devicetree
        if occamy.cfg["peripherals"]["axi_lite_peripherals"][p][
                "name"] == "debug":
            dts.add_device("debug", "riscv,debug-013",
                           am_axi_lite_peripherals[p], [
                               "interrupts-extended = <&CPU0_intc 65535>",
                               "reg-names = \"control\""
                           ])

    ##########################
    # AM: Periph Regbus XBar #
    ##########################
    nr_regbus_peripherals = len(
        occamy.cfg["peripherals"]["regbus_peripherals"])
    am_regbus_peripherals = []

    for p in range(nr_regbus_peripherals):
        am_regbus_peripherals.append(
            am.new_leaf(
                occamy.cfg["peripherals"]["regbus_peripherals"][p]["name"],
                occamy.cfg["peripherals"]["regbus_peripherals"][p]["length"],
                occamy.cfg["peripherals"]["regbus_peripherals"][p]
                ["address"]).attach_to(am_soc_regbus_periph_xbar))
        # add uart to devicetree
        if occamy.cfg["peripherals"]["regbus_peripherals"][p][
                "name"] == "uart":
            dts.add_device(
                "serial", "lowrisc,serial", am_regbus_peripherals[p], [
                    "clock-frequency = <50000000>", "current-speed = <115200>",
                    "interrupt-parent = <&PLIC0>", "interrupts = <1>"
                ])
        # add plic to devicetree
        elif occamy.cfg["peripherals"]["regbus_peripherals"][p][
                "name"] == "plic":
            dts.add_plic([0], am_regbus_peripherals[p])

    # add bootrom seperately
    am_bootrom = am.new_leaf("bootrom",
                             occamy.cfg["peripherals"]["rom"]["length"],
                             occamy.cfg["peripherals"]["rom"]
                             ["address"]).attach_to(am_soc_regbus_periph_xbar)

    # add clint seperately
    am_clint = am.new_leaf("clint",
                           occamy.cfg["peripherals"]["clint"]["length"],
                           occamy.cfg["peripherals"]["clint"]
                           ["address"]).attach_to(am_soc_regbus_periph_xbar)

    # add clint to devicetree
    dts.add_clint([0], am_clint)

    ##################
    # AM: SPM / PCIE #
    ##################
    # Connect PCIE to Wide AXI
    am_pcie = am.new_leaf(
        "pcie", occamy.cfg["pcie"]["length"], occamy.cfg["pcie"]["address_io"],
        occamy.cfg["pcie"]["address_mm"]).attach_to(am_soc_narrow_xbar)

    # Connect SPM to Narrow AXI
    am_spm = am.new_leaf(
        "spm", occamy.cfg["spm"]["length"],
        occamy.cfg["spm"]["address"]).attach_to(am_soc_narrow_xbar)

    ############
    # AM: IDMA #
    ############
    am_sys_idma_cfg = am.new_leaf(
        "sys_idma_cfg", occamy.cfg["sys_idma_cfg"]["length"],
        occamy.cfg["sys_idma_cfg"]["address"]).attach_to(am_soc_narrow_xbar)

    ###########
    # AM: HBI #
    ###########
    am_hbi = am.new_leaf("hbi", occamy.cfg["hbi"]["length"],
                         occamy.cfg["hbi"]["address"])
    am_soc_wide_xbar.attach(am_hbi)
    am_soc_narrow_xbar.attach(am_hbi)

    ###########
    # AM: RMQ #
    ###########
    # Add a remote quadrant port
    nr_remote_quadrants = len(occamy.cfg["remote_quadrants"])
    nr_remote_cores = 0
    rmq_cluster_cnt = 0
    am_remote_quadrants = list()
    for i, rq in enumerate(occamy.cfg["remote_quadrants"]):
        node = am.new_node("rmq_{}".format(i))
        am_remote_quadrants.append(node)
        alen = rq["nr_clusters"] * 0x40000
        addr = 0x10000000 + (nr_s1_clusters * nr_s1_quadrants +
                             rmq_cluster_cnt) * 0x40000
        leaf = am.new_leaf("rmq_{}".format(i), alen, addr)
        node.attach(leaf)
        node.attach_to(am_soc_narrow_xbar)
        node.attach_to(am_quadrant_inter_xbar)
        nr_remote_cores += rq["nr_clusters"] * rq["nr_cluster_cores"]
        rmq_cluster_cnt += rq["nr_clusters"]
        # remote quadrant control
        alen = occamy.cfg["s1_quadrant"]["cfg_base_offset"]
        addr = occamy.cfg["s1_quadrant"]["cfg_base_addr"] + (
            i + nr_s1_quadrants) * alen
        leaf = am.new_leaf("rmq_{}_cfg".format(i), alen, addr)
        node.attach(leaf)
        node.attach_to(am_soc_narrow_xbar)

    ###########
    # AM: HBM #
    ###########
    am_hbm = list()

    hbm_base_address_0 = occamy.cfg["hbm"]["address_0"]
    hbm_base_address_1 = occamy.cfg["hbm"]["address_1"]

    nr_hbm_channels = occamy.cfg["hbm"]["nr_channels_total"]
    nr_channels_base_0 = occamy.cfg["hbm"]["nr_channels_address_0"]

    hbm_channel_size = occamy.cfg["hbm"]["channel_size"]

    for i in range(nr_hbm_channels):
        bases = list()
        # Map first channels on both base addresses
        if i < nr_channels_base_0:
            bases.append(hbm_base_address_0 + i * hbm_channel_size)
        # Map all channels on second base address
        bases.append(hbm_base_address_1 + i * hbm_channel_size)
        # create address map
        am_hbm.append(
            am.new_leaf("hbm_{}".format(i), hbm_channel_size,
                        *bases).attach_to(am_hbm_xbar))

    dts.add_memory(am_hbm[0])

    ##############################
    # AM: Quadrants and Clusters #
    ##############################
    cluster_base_offset = occamy.cfg["cluster"]["cluster_base_offset"]
    cluster_tcdm_size = occamy.cfg["cluster"]["tcdm"][
        "size"] * 1024  # config is in KiB
    cluster_periph_size = occamy.cfg["cluster"]["cluster_periph_size"] * 1024
    cluster_zero_mem_size = occamy.cfg["cluster"]["zero_mem_size"] * 1024

    # assert memory region allocation
    error_str = "ERROR: cluster peripherals, zero memory and tcdm do not fit into the allocated memory region!!!"
    assert (cluster_tcdm_size + cluster_periph_size +
            cluster_zero_mem_size) <= cluster_base_offset, error_str

    cluster_base_addr = occamy.cfg["cluster"]["cluster_base_addr"]
    quadrant_size = cluster_base_offset * nr_s1_clusters

    for i in range(nr_s1_quadrants):
        cluster_i_start_addr = cluster_base_addr + i * quadrant_size

        am_clusters = list()
        for j in range(nr_s1_clusters):
            bases_cluster = list()
            bases_cluster.append(cluster_i_start_addr +
                                 j * cluster_base_offset + 0)
            am_clusters.append(
                am.new_leaf("quadrant_{}_cluster_{}_tcdm".format(i, j),
                            cluster_tcdm_size, *bases_cluster).attach_to(
                                am_wide_xbar_quadrant_s1[i]).attach_to(
                                    am_narrow_xbar_quadrant_s1[i]))

            bases_cluster = list()
            bases_cluster.append(cluster_i_start_addr +
                                 j * cluster_base_offset + cluster_tcdm_size)
            am_clusters.append(
                am.new_leaf("quadrant_{}_cluster_{}_periph".format(i, j),
                            cluster_periph_size, *bases_cluster).attach_to(
                                am_wide_xbar_quadrant_s1[i]).attach_to(
                                    am_narrow_xbar_quadrant_s1[i]))

            bases_cluster = list()
            bases_cluster.append(cluster_i_start_addr +
                                 j * cluster_base_offset + cluster_tcdm_size +
                                 cluster_periph_size)
            am_clusters.append(
                am.new_leaf("quadrant_{}_cluster_{}_zero_mem".format(i, j),
                            cluster_zero_mem_size, *bases_cluster).attach_to(
                                am_wide_xbar_quadrant_s1[i]).attach_to(
                                    am_narrow_xbar_quadrant_s1[i]))

        am.new_leaf(
            "quad_{}_cfg".format(i),
            occamy.cfg["s1_quadrant"]["cfg_base_offset"],
            occamy.cfg["s1_quadrant"]["cfg_base_addr"] +
            i * occamy.cfg["s1_quadrant"]["cfg_base_offset"]).attach_to(
                am_narrow_xbar_quadrant_s1[i]).attach_to(am_soc_narrow_xbar)

    #################
    # AM: Crossbars #
    #################
    # Connect quadrants AXI xbar
    for i in range(nr_s1_quadrants):
        am_narrow_xbar_quadrant_s1[i].attach(am_wide_xbar_quadrant_s1[i])
        am_wide_xbar_quadrant_s1[i].attach(am_quadrant_pre_xbar[i])
        am_soc_narrow_xbar.attach(am_narrow_xbar_quadrant_s1[i])
        am_quadrant_inter_xbar.attach(am_wide_xbar_quadrant_s1[i])

    # Connect quadrant inter xbar
    am_soc_wide_xbar.attach(am_quadrant_inter_xbar)
    am_quadrant_inter_xbar.attach(am_soc_wide_xbar)
    for i in range(nr_s1_quadrants):
        am_quadrant_pre_xbar[i].attach(am_quadrant_inter_xbar)

    # Connect HBM xbar masters (memory slaves already attached)
    am_soc_wide_xbar.attach(am_hbm_xbar)
    for i in range(nr_s1_quadrants):
        am_quadrant_pre_xbar[i].attach(am_hbm_xbar)

    # Connect narrow xbar
    am_soc_narrow_xbar.attach(am_soc_axi_lite_periph_xbar)
    am_soc_narrow_xbar.attach(am_soc_regbus_periph_xbar)
    am_soc_narrow_xbar.attach(am_soc_wide_xbar)

    am_soc_axi_lite_periph_xbar.attach(am_soc_narrow_xbar)

    # Connect wide xbar
    am_soc_wide_xbar.attach(am_soc_narrow_xbar)

    # Connect HBM config xbar to regbus xbar
    am_soc_regbus_periph_xbar.attach(am_hbm_cfg_xbar)

    #######################
    # SoC Peripheral Xbar #
    #######################
    # AXI-Lite
    soc_axi_lite_periph_xbar = solder.AxiLiteXbar(
        48,
        64,
        name="soc_axi_lite_periph_xbar",
        clk="clk_periph_i",
        rst="rst_periph_ni",
        node=am_soc_axi_lite_periph_xbar)

    soc_axi_lite_periph_xbar.add_input("soc")
    soc_axi_lite_periph_xbar.add_output_entry("soc", am_soc_narrow_xbar)

    # connect AXI lite peripherals
    for p in range(nr_axi_lite_peripherals):
        soc_axi_lite_periph_xbar.add_input(
            occamy.cfg["peripherals"]["axi_lite_peripherals"][p]["name"])
        soc_axi_lite_periph_xbar.add_output_entry(
            occamy.cfg["peripherals"]["axi_lite_peripherals"][p]["name"],
            am_axi_lite_peripherals[p])

    ###############
    # HBM control #
    ###############
    hbm_cfg_xbar = solder.RegBusXbar(
        48,
        32,
        context="hbm_ctrl",
        name="hbm_cfg_xbar",
        # Use internal clock and reset
        clk="cfg_clk",
        rst="cfg_rst_n",
        node=am_hbm_cfg_xbar)

    for name, region in occamy.cfg["hbm"]["cfg_regions"].items():
        leaf = am.new_leaf(f"hbm_cfg_{name}", region["length"],
                           region["address"]).attach_to(am_hbm_cfg_xbar)
        hbm_cfg_xbar.add_output_entry(name, leaf)

    hbm_cfg_xbar.add_input("cfg")

    ##########
    # RegBus #
    ##########
    soc_regbus_periph_xbar = solder.RegBusXbar(48,
                                               32,
                                               name="soc_regbus_periph_xbar",
                                               clk="clk_periph_i",
                                               rst="rst_periph_ni",
                                               node=am_soc_regbus_periph_xbar)

    soc_regbus_periph_xbar.add_input("soc")

    # connect Regbus peripherals
    for p in range(nr_regbus_peripherals):
        soc_regbus_periph_xbar.add_output_entry(
            occamy.cfg["peripherals"]["regbus_peripherals"][p]["name"],
            am_regbus_peripherals[p])

    # add bootrom and clint seperately
    soc_regbus_periph_xbar.add_output_entry("bootrom", am_bootrom)
    soc_regbus_periph_xbar.add_output_entry("clint", am_clint)

    # add hbm cfg xbar separately
    soc_regbus_periph_xbar.add_output_entry("hbm_cfg", am_hbm_cfg_xbar)

    ##################
    # SoC Wide Xbars #
    ##################

    # Quadrant pre xbars
    # Each connects one quadrant master to the HBM and quadrant xbars
    quadrant_pre_xbars = list()
    for i in range(nr_s1_quadrants):
        quadrant_pre_xbar = solder.AxiXbar(
            48,
            512,
            occamy.cfg["pre_xbar_slv_id_width_no_rocache"] +
            (1 if occamy.cfg["s1_quadrant"].get("ro_cache_cfg") else 0),
            name="quadrant_pre_xbar_{}".format(i),
            clk="clk_i",
            rst="rst_ni",
            max_slv_trans=occamy.cfg["quadrant_pre_xbar"]["max_slv_trans"],
            max_mst_trans=occamy.cfg["quadrant_pre_xbar"]["max_mst_trans"],
            fall_through=occamy.cfg["quadrant_pre_xbar"]["fall_through"],
            no_loopback=True,
            atop_support=False,
            context="soc",
            node=am_quadrant_pre_xbar[i])

        # Default port:
        quadrant_pre_xbar.add_output_entry("quadrant_inter_xbar",
                                           am_quadrant_inter_xbar)
        quadrant_pre_xbar.add_output_entry("hbm_xbar", am_hbm_xbar)
        quadrant_pre_xbar.add_input("quadrant")

        quadrant_pre_xbars.append(quadrant_pre_xbar)

    # Quadrant inter xbar
    # Connects all quadrant pre xbars to all quadrants, with additional wide xbar M/S pair
    quadrant_inter_xbar = solder.AxiXbar(
        48,
        512,
        quadrant_pre_xbars[0].iw_out(),
        name="quadrant_inter_xbar",
        clk="clk_i",
        rst="rst_ni",
        max_slv_trans=occamy.cfg["quadrant_inter_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["quadrant_inter_xbar"]["max_mst_trans"],
        fall_through=occamy.cfg["quadrant_inter_xbar"]["fall_through"],
        no_loopback=True,
        atop_support=False,
        context="soc",
        node=am_quadrant_inter_xbar)

    # Default port: soc wide xbar
    quadrant_inter_xbar.add_output_entry("wide_xbar", am_soc_wide_xbar)
    quadrant_inter_xbar.add_input("wide_xbar")
    for i in range(nr_s1_quadrants):
        # Default route passes HBI through quadrant 0
        # --> mask this route, forcing it through default wide xbar
        quadrant_inter_xbar.add_output_entry("quadrant_{}".format(i),
                                             am_wide_xbar_quadrant_s1[i])
        quadrant_inter_xbar.add_input("quadrant_{}".format(i))
    for i, rq in enumerate(occamy.cfg["remote_quadrants"]):
        quadrant_inter_xbar.add_input("rmq_{}".format(i))
        quadrant_inter_xbar.add_output_entry("rmq_{}".format(i),
                                             am_remote_quadrants[i])
    # Connectrion from remote
    if is_remote_quadrant:
        quadrant_inter_xbar.add_output("remote", [])
        quadrant_inter_xbar.add_input("remote")

    hbm_xbar = solder.AxiXbar(
        48,
        512,
        quadrant_pre_xbars[0].iw_out(),
        name="hbm_xbar",
        clk="clk_i",
        rst="rst_ni",
        max_slv_trans=occamy.cfg["hbm_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["hbm_xbar"]["max_mst_trans"],
        fall_through=occamy.cfg["hbm_xbar"]["fall_through"],
        no_loopback=True,
        atop_support=False,
        context="soc",
        node=am_hbm_xbar)

    # Default port: HBM 0
    for i in range(nr_hbm_channels):
        hbm_xbar.add_output_entry("hbm_{}".format(i), am_hbm[i])
    for i in range(nr_s1_quadrants):
        hbm_xbar.add_input("quadrant_{}".format(i))
    hbm_xbar.add_input("wide_xbar")

    soc_wide_xbar = solder.AxiXbar(
        48,
        512,
        # This is the cleanest solution minimizing ID width conversions
        quadrant_pre_xbars[0].iw,
        name="soc_wide_xbar",
        clk="clk_i",
        rst="rst_ni",
        max_slv_trans=occamy.cfg["wide_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["wide_xbar"]["max_mst_trans"],
        fall_through=occamy.cfg["wide_xbar"]["fall_through"],
        no_loopback=True,
        atop_support=False,
        context="soc",
        node=am_soc_wide_xbar)

    # Default port: HBI (always escalate "upwards" in hierarchy -> off-chip)
    if not is_remote_quadrant:
        soc_wide_xbar.add_output_entry("hbi", am_hbi)
    soc_wide_xbar.add_output_entry("hbm_xbar", am_hbm_xbar)
    soc_wide_xbar.add_output_entry("quadrant_inter_xbar",
                                   am_quadrant_inter_xbar)
    soc_wide_xbar.add_output_entry("soc_narrow", am_soc_narrow_xbar)
    soc_wide_xbar.add_input("hbi")
    soc_wide_xbar.add_input("quadrant_inter_xbar")
    soc_wide_xbar.add_input("soc_narrow")
    soc_wide_xbar.add_input("sys_idma_mst")

    ###################
    # SoC Narrow Xbar #
    ###################
    soc_narrow_xbar = solder.AxiXbar(
        48,
        64,
        occamy.cfg["narrow_xbar_slv_id_width"],
        name="soc_narrow_xbar",
        clk="clk_i",
        rst="rst_ni",
        max_slv_trans=occamy.cfg["narrow_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["narrow_xbar"]["max_mst_trans"],
        fall_through=occamy.cfg["narrow_xbar"]["fall_through"],
        no_loopback=True,
        context="soc",
        node=am_soc_narrow_xbar)

    for i in range(nr_s1_quadrants):
        soc_narrow_xbar.add_output_symbolic_multi(
            "s1_quadrant_{}".format(i),
            [("s1_quadrant_base_addr", "S1QuadrantAddressSpace"),
             ("s1_quadrant_cfg_base_addr", "S1QuadrantCfgAddressSpace")])
        soc_narrow_xbar.add_input("s1_quadrant_{}".format(i))

    soc_narrow_xbar.add_input("cva6")
    soc_narrow_xbar.add_input("soc_wide")
    soc_narrow_xbar.add_input("periph")
    soc_narrow_xbar.add_input("pcie")
    soc_narrow_xbar.add_input("hbi")
    dts.add_cpu("eth,ariane")

    # Default port: wide xbar
    soc_narrow_xbar.add_output_entry("soc_wide", am_soc_wide_xbar)
    if not is_remote_quadrant:
        soc_narrow_xbar.add_output_entry("hbi", am_hbi)
    soc_narrow_xbar.add_output_entry("periph", am_soc_axi_lite_periph_xbar)
    soc_narrow_xbar.add_output_entry("spm", am_spm)
    soc_narrow_xbar.add_output_entry("sys_idma_cfg", am_sys_idma_cfg)
    soc_narrow_xbar.add_output_entry("regbus_periph",
                                     am_soc_regbus_periph_xbar)
    soc_narrow_xbar.add_output_entry("pcie", am_pcie)
    for i, rq in enumerate(occamy.cfg["remote_quadrants"]):
        soc_narrow_xbar.add_input("rmq_{}".format(i))
        soc_narrow_xbar.add_output_entry("rmq_{}".format(i),
                                         am_remote_quadrants[i])
    # Connectrion from remote
    if is_remote_quadrant:
        soc_narrow_xbar.add_output("remote", [])
        soc_narrow_xbar.add_input("remote")

    ##########################
    # S1 Quadrant controller #
    ##########################

    # We need 3 "crossbars", which are really simple muxes and demuxes
    quadrant_s1_ctrl_xbars = dict()
    for name, (iw, lm) in {
            'soc_to_quad':
        (soc_narrow_xbar.iw_out(), "axi_pkg::CUT_SLV_PORTS"),
            'quad_to_soc': (soc_narrow_xbar.iw, "axi_pkg::CUT_MST_PORTS"),
    }.items():
        # Reuse (preserve) narrow Xbar IDs and max transactions
        quadrant_s1_ctrl_xbars[name] = solder.AxiXbar(
            48,
            64,
            iw,
            name="quadrant_s1_ctrl_{}_xbar".format(name),
            clk="clk_i",
            rst="rst_ni",
            max_slv_trans=occamy.cfg["narrow_xbar"]["max_slv_trans"],
            max_mst_trans=occamy.cfg["narrow_xbar"]["max_mst_trans"],
            fall_through=occamy.cfg["narrow_xbar"]["fall_through"],
            latency_mode=lm,
            context="quadrant_s1_ctrl")

    for name in ['soc_to_quad', 'quad_to_soc']:
        quadrant_s1_ctrl_xbars[name].add_output("out", [])
        quadrant_s1_ctrl_xbars[name].add_input("in")
        quadrant_s1_ctrl_xbars[name].add_output_symbolic(
            "internal", "internal_xbar_base_addr", "S1QuadrantCfgAddressSpace")

    # AXI Lite mux to combine register requests
    quadrant_s1_ctrl_mux = solder.AxiLiteXbar(
        48,
        32,
        name="quadrant_s1_ctrl_mux",
        clk="clk_i",
        rst="rst_ni",
        max_slv_trans=occamy.cfg["narrow_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["narrow_xbar"]["max_mst_trans"],
        fall_through=False,
        latency_mode="axi_pkg::CUT_MST_PORTS",
        context="quadrant_s1_ctrl")

    quadrant_s1_ctrl_mux.add_output("out", [(0, (1 << 48) - 1)])
    quadrant_s1_ctrl_mux.add_input("soc")
    quadrant_s1_ctrl_mux.add_input("quad")

    ################
    # S1 Quadrants #
    ################
    # Dummy entries to generate associated types.
    wide_xbar_quadrant_s1 = solder.AxiXbar(
        48,
        512,
        occamy.cfg["s1_quadrant"]["wide_xbar_slv_id_width"],
        name="wide_xbar_quadrant_s1",
        clk="clk_quadrant",
        rst="rst_quadrant_n",
        max_slv_trans=occamy.cfg["s1_quadrant"]["wide_xbar"]["max_slv_trans"],
        max_mst_trans=occamy.cfg["s1_quadrant"]["wide_xbar"]["max_mst_trans"],
        fall_through=occamy.cfg["s1_quadrant"]["wide_xbar"]["fall_through"],
        no_loopback=True,
        atop_support=False,
        context="quadrant_s1",
        node=am_wide_xbar_quadrant_s1[0])

    narrow_xbar_quadrant_s1 = solder.AxiXbar(
        48,
        64,
        occamy.cfg["s1_quadrant"]["narrow_xbar_slv_id_width"],
        name="narrow_xbar_quadrant_s1",
        clk="clk_quadrant",
        rst="rst_quadrant_n",
        max_slv_trans=occamy.cfg["s1_quadrant"]["narrow_xbar"]
        ["max_slv_trans"],
        max_mst_trans=occamy.cfg["s1_quadrant"]["narrow_xbar"]
        ["max_mst_trans"],
        fall_through=occamy.cfg["s1_quadrant"]["narrow_xbar"]["fall_through"],
        no_loopback=True,
        context="quadrant_s1")

    wide_xbar_quadrant_s1.add_output("top", [])
    wide_xbar_quadrant_s1.add_input("top")

    narrow_xbar_quadrant_s1.add_output("top", [])
    narrow_xbar_quadrant_s1.add_input("top")

    for i in range(nr_s1_clusters):
        wide_xbar_quadrant_s1.add_output_symbolic("cluster_{}".format(i),
                                                  "cluster_base_addr",
                                                  "ClusterAddressSpace")

        wide_xbar_quadrant_s1.add_input("cluster_{}".format(i))
        narrow_xbar_quadrant_s1.add_output_symbolic("cluster_{}".format(i),
                                                    "cluster_base_addr",
                                                    "ClusterAddressSpace")
        narrow_xbar_quadrant_s1.add_input("cluster_{}".format(i))

    # remote downstream mux
    rmq_mux = [None] * max(nr_remote_quadrants, 1 if is_remote_quadrant else 0)
    rmq_demux = [None] * max(nr_remote_quadrants,
                             1 if is_remote_quadrant else 0)
    for i in range(max(nr_remote_quadrants, 1 if is_remote_quadrant else 0)):
        rmq_mux[i] = solder.AxiMux(48,
                                   512,
                                   4,
                                   max_w_trans=occamy.cfg["txns"]["rmq"],
                                   context="xilinx_wrapper",
                                   name="rmq_mux_{}".format(i),
                                   clk="clk_i",
                                   rst="rst_ni")
        rmq_mux[i].add_input("narrow")
        rmq_mux[i].add_input("wide")
        rmq_demux[i] = solder.AxiDemux(48,
                                       512,
                                       5,
                                       "rmq_demux_awsel[{}]".format(i),
                                       "rmq_demux_arsel[{}]".format(i),
                                       max_trans=occamy.cfg["txns"]["rmq"],
                                       look_bits=3,
                                       context="xilinx_wrapper",
                                       name="rmq_demux_{}".format(i),
                                       clk="clk_i",
                                       rst="rst_ni")
        rmq_demux[i].add_output("narrow")
        rmq_demux[i].add_output("wide")

    # Generate the Verilog code.
    solder.render()

    ###############
    # HBM APB CTL #
    ###############
    if is_remote_quadrant:
        apb_hbm_cfg = None
    else:
        apb_hbm_cfg = solder.ApbBus(clk=soc_regbus_periph_xbar.clk,
                                    rst=soc_regbus_periph_xbar.rst,
                                    aw=soc_regbus_periph_xbar.aw,
                                    dw=soc_regbus_periph_xbar.dw,
                                    name="apb_hbm_cfg")

    kwargs = {
        "solder":
        solder,
        "util":
        util,
        "args":
        args,
        "name":
        args.name,
        "soc_narrow_xbar":
        soc_narrow_xbar,
        "soc_wide_xbar":
        soc_wide_xbar,
        "quadrant_pre_xbars":
        quadrant_pre_xbars,
        "hbm_xbar":
        hbm_xbar,
        "quadrant_inter_xbar":
        quadrant_inter_xbar,
        "quadrant_s1_ctrl_xbars":
        quadrant_s1_ctrl_xbars,
        "quadrant_s1_ctrl_mux":
        quadrant_s1_ctrl_mux,
        "wide_xbar_quadrant_s1":
        wide_xbar_quadrant_s1,
        "narrow_xbar_quadrant_s1":
        narrow_xbar_quadrant_s1,
        "soc_regbus_periph_xbar":
        soc_regbus_periph_xbar,
        "hbm_cfg_xbar":
        hbm_cfg_xbar,
        "apb_hbm_cfg":
        apb_hbm_cfg,
        "cfg":
        occamy.cfg,
        "cores":
        nr_s1_quadrants * nr_s1_clusters * nr_cluster_cores + nr_remote_cores +
        1,
        "lcl_cores":
        nr_s1_quadrants * nr_s1_clusters * nr_cluster_cores +
        (0 if is_remote_quadrant else 1),
        "remote_quadrants":
        occamy.cfg["remote_quadrants"],
        "is_remote_quadrant":
        occamy.cfg["is_remote_quadrant"],
        "nr_s1_quadrants":
        nr_s1_quadrants,
        "nr_remote_quadrants":
        nr_remote_quadrants,
        "nr_s1_clusters":
        nr_s1_clusters,
        "nr_cluster_cores":
        nr_cluster_cores,
        "hbm_channel_size":
        hbm_channel_size,
        "nr_hbm_channels":
        nr_hbm_channels,
        "rmq_mux":
        rmq_mux,
        "rmq_demux":
        rmq_demux
    }

    # Emit the code.
    #############
    # Top-Level #
    #############
    write_template(args.top_sv,
                   outdir,
                   fname="{}_top.sv".format(args.name),
                   module=solder.code_module['default'],
                   soc_periph_xbar=soc_axi_lite_periph_xbar,
                   **kwargs)

    ###########################
    # SoC (fully synchronous) #
    ###########################
    write_template(args.soc_sv,
                   outdir,
                   module=solder.code_module['soc'],
                   soc_periph_xbar=soc_axi_lite_periph_xbar,
                   **kwargs)

    ##########################
    # S1 Quadrant controller #
    ##########################
    write_template(args.quadrant_s1_ctrl,
                   outdir,
                   module=solder.code_module['quadrant_s1_ctrl'],
                   **kwargs)

    ###############
    # S1 Quadrant #
    ###############
    if nr_s1_quadrants > 0:
        write_template(args.quadrant_s1,
                       outdir,
                       fname="{}_quadrant_s1.sv".format(args.name),
                       module=solder.code_module['quadrant_s1'],
                       **kwargs)
    else:
        tpl_path = args.quadrant_s1
        if tpl_path:
            tpl_path = pathlib.Path(tpl_path).absolute()
            if tpl_path.exists():
                print(outdir, args.name)
                with open("{}/{}_quadrant_s1.sv".format(outdir, args.name),
                          'w') as f:
                    f.write("// no quadrants in this design")

    ##################
    # Xilinx Wrapper #
    ##################
    has_rmq_code = nr_remote_quadrants > 0 or is_remote_quadrant
    write_template(
        args.xilinx_sv,
        outdir,
        fname="{}_xilinx.sv".format(args.name),
        module=solder.code_module['xilinx_wrapper'] if has_rmq_code else "",
        **kwargs)
    ###########
    # Package #
    ###########
    write_template(args.pkg_sv, outdir, **kwargs, package=solder.code_package)

    ###############
    # Testharness #
    ###############
    write_template(args.testharness_sv, outdir, **kwargs)

    ################
    # CVA6 Wrapper #
    ################
    write_template(args.cva6_sv, outdir, **kwargs)

    ###################
    # Generic CHEADER #
    ###################
    write_template(args.cheader, outdir, **kwargs)

    ###################
    # ADDRMAP CHEADER #
    ###################
    if args.am_cheader:
        with open(args.am_cheader, "w") as file:
            file.write(am.print_cheader())

    ###############
    # ADDRMAP CSV #
    ###############
    if args.am_csv:
        with open(args.am_csv, 'w', newline='') as csvfile:
            csv_writer = csv.writer(csvfile, delimiter=',')
            am.print_csv(csv_writer)

    ###############
    # HBM control #
    ###############
    write_template(args.hbm_ctrl,
                   outdir,
                   module=solder.code_module['hbm_ctrl'],
                   **kwargs)

    ############
    # CHIP TOP #
    ############
    write_template(args.chip, outdir, **kwargs)

    ############
    # BOOTDATA #
    ############
    write_template(args.bootdata, outdir, **kwargs)

    #######
    # DTS #
    #######
    # TODO(niwis, zarubaf): We probably need to think about genrating a couple
    # of different systems here. I can at least think about two in that context:
    # 1. RTL sim
    # 2. FPGA
    # 3. (ASIC) in the private wrapper repo
    # I think we have all the necessary ingredients for this. What is missing is:
    # - Create a second(/third) configuration file.
    # - Generate the RTL into dedicated directories
    # - (Manually) adapt the `Bender.yml` to include the appropriate files.
    htif = dts.add_node("htif", "ucb,htif0")
    dts.add_chosen("stdout-path = \"{}\";".format(htif))

    if args.dts:
        # TODO(zarubaf): Figure out whether there are any requirements on the
        # model and compatability.
        dts_str = dts.emit("eth,occamy-dev", "eth,occamy")
        with open(args.dts, "w") as file:
            file.write(dts_str)
        # Compile to DTB and save to a file with `.dtb` extension.
        with open(pathlib.Path(args.dts).with_suffix(".dtb"), "wb") as file:
            run(["dtc", args.dts],
                input=dts_str,
                stdout=file,
                shell=True,
                text=True)

    # Emit the address map as a dot file if requested.
    if args.graph:
        with open(args.graph, "w") as file:
            file.write(am.render_graphviz())
Exemple #37
0
    d['flvHeight'] = template['flvHeight']
    d['flvWidth'] = template['flvWidth']
    d['walkingPeopleFlvPath'] = template['walkingPeopleFlvPath']
    d['thumbnail'] = template['thumbnail']
    d['nodes'] = mapNodes(template['nodes'])
    return d


# Get a virtual room
url = "https://demo.6connexlocal.com/controlpanel/canvas-trendy/virtual-builder/room/22"
session = requests.Session()
session.auth = ('*****@*****.**', 'qwertyui1!')
session.verify = 'cert.pem'
response = session.get(url)

virtualroom = hjson.loads(response.text)

# Mapping of virtual room json output to input needed by creation endpoint
creationInput = {}
creationInput['settings'] = virtualroom['settings']
# change key settings to avoid collision:
creationInput['settings']['description'] = 'NewBooth'
creationInput['settings']['name'] = 'NewBooth'
creationInput['settings']['locationKey'] = 'NewBooth'

creationInput['template'] = mapTemplate(virtualroom['template'])

print(json.dumps(creationInput, indent=2, sort_keys=False))

# Create a virtual room using the creation endpoint
curl = "https://demo.6connexlocal.com/controlpanel/canvas-trendy/virtual-builder/"
Exemple #38
0
def main():
    format = 'hjson'
    verbose = 0

    parser = argparse.ArgumentParser(
        prog="regtool",
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESC)
    parser.add_argument('input',
                        nargs='?',
                        metavar='file',
                        type=argparse.FileType('r'),
                        default=sys.stdin,
                        help='input file in hjson type')
    parser.add_argument('-d',
                        action='store_true',
                        help='Output register documentation (html)')
    parser.add_argument('--cdefines',
                        '-D',
                        action='store_true',
                        help='Output C defines header')
    parser.add_argument('--ctdefines',
                        '-T',
                        action='store_true',
                        help='Output C defines header (Titan style)')
    parser.add_argument('--doc',
                        action='store_true',
                        help='Output source file documentation (gfm)')
    parser.add_argument('-j',
                        action='store_true',
                        help='Output as formatted JSON')
    parser.add_argument('-c', action='store_true', help='Output as JSON')
    parser.add_argument('-r',
                        action='store_true',
                        help='Output as SystemVerilog RTL')
    parser.add_argument('-s',
                        action='store_true',
                        help='Output as UVM Register class')
    parser.add_argument('--outdir', '-t',
                        help='Target directory for generated RTL, '\
                             'tool uses ../rtl if blank.')
    parser.add_argument('--outfile',
                        '-o',
                        type=argparse.FileType('w'),
                        default=sys.stdout,
                        help='Target filename for json, html, gfm.')
    parser.add_argument('--verbose',
                        '-v',
                        action='store_true',
                        help='Verbose and run validate twice')
    parser.add_argument('--param',
                        '-p',
                        type=str,
                        default="",
                        help='''Change the Parameter values.
                                Only integer value is supported.
                                You can add multiple param arguments.

                                  Format: ParamA=ValA;ParamB=ValB
                                  ''')
    parser.add_argument('--version',
                        '-V',
                        action='store_true',
                        help='Show version')
    parser.add_argument('--novalidate',
                        action='store_true',
                        help='Skip validate, just output json')

    args = parser.parse_args()

    if args.version:
        version.show_and_exit(__file__, ["Hjson", "Mako"])

    verbose = args.verbose

    if args.j: format = 'json'
    elif args.c: format = 'compact'
    elif args.d: format = 'html'
    elif args.doc: format = 'doc'
    elif args.r: format = 'rtl'
    elif args.s: format = 'dv'
    elif args.cdefines: format = 'cdh'
    elif args.ctdefines: format = 'cth'

    if (verbose):
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    outfile = args.outfile

    infile = args.input

    params = args.param.split(';')

    if format == 'rtl':
        if args.outdir:
            outdir = args.outdir
        elif infile != sys.stdin:
            outdir = str(PurePath(infile.name).parents[1].joinpath("rtl"))
        else:
            # Using sys.stdin. not possible to generate RTL
            log.error("-r option cannot be used with pipe or stdin")
    elif format == 'dv':
        if args.outdir:
            outdir = args.outdir
        elif infile != sys.stdin:
            outdir = str(PurePath(infile.name).parents[1].joinpath("dv"))
        else:
            # Using sys.stdin. not possible to generate RTL
            log.error("-s option cannot be used with pipe or stdin")
    else:
        # Ignore
        outdir = "."

    if format == 'doc':
        with outfile:
            gen_selfdoc.document(outfile)
        exit(0)

    with infile:
        try:
            srcfull = infile.read()
            obj = hjson.loads(srcfull,
                              use_decimal=True,
                              object_pairs_hook=validate.checking_dict)
        except ValueError:
            raise SystemExit(sys.exc_info()[1])

    if args.novalidate:
        with outfile:
            gen_json.gen_json(obj, outfile, format)
            outfile.write('\n')
    elif (validate.validate(obj, params=params) == 0):
        if (verbose):
            log.info("Second validate pass (should show added optional keys)")
            validate.validate(obj, params=params)

        if format == 'rtl':
            gen_rtl.gen_rtl(obj, outdir)
            return 0
        if format == 'dv':
            gen_dv.gen_dv(obj, outdir)
            return 0

        src_lic = None
        src_copy = ''
        found_spdx = None
        found_lunder = None
        copy = re.compile(r'.*(copyright.*)|(.*\(c\).*)', re.IGNORECASE)
        spdx = re.compile(r'.*(SPDX-License-Identifier:.+)')
        lunder = re.compile(r'.*(Licensed under.+)', re.IGNORECASE)
        for line in srcfull.splitlines():
            mat = copy.match(line)
            if mat != None:
                src_copy += mat.group(1)
            mat = spdx.match(line)
            if mat != None:
                found_spdx = mat.group(1)
            mat = lunder.match(line)
            if mat != None:
                found_lunder = mat.group(1)
        if found_lunder:
            src_lic = found_lunder
        if found_spdx:
            src_lic += '\n' + found_spdx

        with outfile:
            if format == 'html':
                gen_html.gen_html(obj, outfile)
            elif format == 'cdh':
                gen_cheader.gen_cdefines(obj, outfile, src_lic, src_copy)
            elif format == 'cth':
                gen_ctheader.gen_cdefines(obj, outfile, src_lic, src_copy)
            else:
                gen_json.gen_json(obj, outfile, format)

            outfile.write('\n')
Exemple #39
0
def getPlayerRoomID(id: int):
    return players[id]["room"]


def getPlayerRoom(id: int) -> dict:
    return rooms[getPlayerRoomID(id)]


def setPlayerRoom(id: int, roomID: str) -> dict:
    players[id]["room"] = roomID
    return rooms[roomID]


# structure defining the rooms in the game. Try adding more rooms to the game!
rooms = hjson.loads(Path("data/rooms.hjson").read_text())

# stores the players in the game
players = {}

# start the server
mud = MudServer()


def processNewPlayers():
    for id in mud.get_new_players():
        # add the new player to the dictionary, noting that they've not been
        # named yet.
        # The dictionary key is the player's id number. We set their room to
        # None initially until they have entered a name
        # Try adding more player stats - level, gold, inventory, etc
 def test_parse(self):
     # test in/out equivalence and parsing
     res = json.loads(JSON)
     out = json.dumpsJSON(res)
     self.assertEqual(res, json.loads(out))
def get_prices(instanceType, OS, AMI, performance):

    req = urllib.request.Request(
        AMI,
        data=None,
        headers={
            'User-Agent':
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:73.0) Gecko/20100101 Firefox/73.0'
        })

    page = urllib.request.urlopen(req)
    page_content = page.read().decode('utf-8')
    cd = page_content[page_content.find('/*<![CDATA[*/') + 13:page_content.
                      find('/*]]>*/',
                           page_content.find('/*<![CDATA[*/') + 13)]
    productId = cd[cd.find('productId: "') +
                   12:cd.find('"',
                              cd.find('productId: "') + 12)]
    hjson_string = cd[cd.find('var awsmpInitialState = ') +
                      24:cd.find('};',
                                 cd.find('var awsmpInitialState = ') + 24) + 1]
    hjson_dict = hjson.loads(hjson_string)

    for region in hjson_dict['offerIon']['offer']['pricing']['regions']:
        for idx, row in enumerate(
                hjson_dict['offerIon']['offer']['pricing']['regions'][region]
            [productId]['hourly']['displayElements']['properties']['rows']):
            if hjson_dict['offerIon']['offer']['pricing']['regions'][region][
                    productId]['hourly']['displayElements']['properties'][
                        'rows'][idx]['instanceType'] == instanceType:
                hourly_rate = float(
                    hjson_dict['offerIon']['offer']['pricing']['regions']
                    [region][productId]['hourly']['displayElements']
                    ['properties']['rows'][idx]['totalRate'])
                instanceRegion = instanceType + region
                try:
                    idx = [
                        instanceRegion for instanceRegion, InstanceType,
                        Region, WindowsHourly, LinuxHourly, Windows100M,
                        Linux100M in results
                    ].index(instanceRegion)
                    if OS == 'Windows':
                        results[idx][3] = hourly_rate
                        results[idx][5] = round(
                            (100000000 / performance) * (hourly_rate / 3600),
                            3)
                    else:
                        results[idx][4] = hourly_rate
                        results[idx][6] = round(
                            (100000000 / performance) * (hourly_rate / 3600),
                            3)
                except:
                    if OS == 'Windows':
                        results.append([
                            instanceRegion, instanceType, region, hourly_rate,
                            '',
                            round((100000000 / performance) *
                                  (hourly_rate / 3600), 3), ''
                        ])
                    else:
                        results.append([
                            instanceRegion, instanceType, region, '',
                            hourly_rate, '',
                            round((100000000 / performance) *
                                  (hourly_rate / 3600), 3)
                        ])
Exemple #42
0
def main(argv):
    parser = argparse.ArgumentParser(prog="vendor", description=DESC)
    parser.add_argument(
        '--update',
        '-U',
        dest='update',
        action='store_true',
        help='Update locked version of repository with upstream changes')
    parser.add_argument('--refresh-patches',
                        action='store_true',
                        help='Refresh the patches from the patch repository')
    parser.add_argument('--commit',
                        '-c',
                        action='store_true',
                        help='Commit the changes')
    parser.add_argument('desc_file',
                        metavar='file',
                        type=argparse.FileType('r', encoding='UTF-8'),
                        help='vendoring description file (*.vendor.hjson)')
    parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
    args = parser.parse_args()

    global verbose
    verbose = args.verbose
    if (verbose):
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    desc_file_path = Path(args.desc_file.name).resolve()
    vendor_file_base_dir = desc_file_path.parent

    # Precondition: Ensure description file matches our naming rules
    if not str(desc_file_path).endswith('.vendor.hjson'):
        log.fatal("Description file names must have a .vendor.hjson suffix.")
        raise SystemExit(1)

    # Precondition: Check for a clean working directory when commit is requested
    if args.commit:
        if not git_is_clean_workdir(vendor_file_base_dir):
            log.fatal("A clean git working directory is required for "
                      "--commit/-c. git stash your changes and try again.")
            raise SystemExit(1)

    # Load description file
    try:
        desc = hjson.loads(args.desc_file.read(), use_decimal=True)
    except ValueError:
        raise SystemExit(sys.exc_info()[1])
    desc['_base_dir'] = vendor_file_base_dir

    desc_file_stem = desc_file_path.name.rsplit('.', 2)[0]
    lock_file_path = desc_file_path.with_name(desc_file_stem + '.lock.hjson')

    # Importing may use lock file upstream, information, so make a copy now
    # which we can overwrite with the upstream information from the lock file.
    import_desc = desc.copy()

    # Load lock file contents (if possible)
    try:
        with open(str(lock_file_path), 'r') as f:
            lock = hjson.loads(f.read(), use_decimal=True)

        # Use lock file information for import
        if not args.update:
            import_desc['upstream'] = lock['upstream'].copy()
    except FileNotFoundError:
        lock = None
        if not args.update:
            log.warning("Updating upstream repo as lock file %s not found.",
                        str(lock_file_path))
            args.update = True

    if args.refresh_patches:
        refresh_patches(import_desc)

    clone_dir = Path(tempfile.mkdtemp())
    try:
        # clone upstream repository
        upstream_new_rev = clone_git_repo(import_desc['upstream']['url'],
                                          clone_dir,
                                          rev=import_desc['upstream']['rev'])

        if not args.update:
            if upstream_new_rev != lock['upstream']['rev']:
                log.fatal(
                    "Revision mismatch. Unable to re-clone locked version of repository."
                )
                log.fatal("Attempted revision: %s",
                          import_desc['upstream']['rev'])
                log.fatal("Re-cloned revision: %s", upstream_new_rev)
                raise SystemExit(1)

        upstream_only_subdir = ''
        clone_subdir = clone_dir
        if 'only_subdir' in import_desc['upstream']:
            upstream_only_subdir = import_desc['upstream']['only_subdir']
            clone_subdir = clone_dir / upstream_only_subdir
            if not clone_subdir.is_dir():
                log.fatal("subdir '%s' does not exist in repo",
                          upstream_only_subdir)
                raise SystemExit(1)

        # apply patches to upstream sources
        if 'patch_dir' in import_desc:
            patches = path_resolve(import_desc['patch_dir'],
                                   vendor_file_base_dir).glob('*.patch')
            for patch in sorted(patches):
                log.info("Applying patch %s" % str(patch))
                apply_patch(clone_subdir, patch)

        # import selected (patched) files from upstream repo
        exclude_files = []
        if 'exclude_from_upstream' in import_desc:
            exclude_files += import_desc['exclude_from_upstream']
        exclude_files += EXCLUDE_ALWAYS

        import_from_upstream(
            clone_subdir,
            path_resolve(import_desc['target_dir'], vendor_file_base_dir),
            exclude_files)

        # get shortlog
        get_shortlog = bool(args.update)
        if lock is None:
            get_shortlog = False
            log.warning("No lock file %s: unable to summarize changes.",
                        str(lock_file_path))
        elif lock['upstream']['url'] != import_desc['upstream']['url']:
            get_shortlog = False
            log.warning(
                "The repository URL changed since the last run. Unable to get log of changes."
            )

        shortlog = None
        if get_shortlog:
            shortlog = produce_shortlog(clone_subdir, lock['upstream']['rev'],
                                        upstream_new_rev)

            # Ensure fully-qualified issue/PR references for GitHub repos
            gh_repo_info = github_parse_url(import_desc['upstream']['url'])
            if gh_repo_info:
                shortlog = github_qualify_references(shortlog, gh_repo_info[0],
                                                     gh_repo_info[1])

            log.info("Changes since the last import:\n" +
                     format_list_to_str(shortlog))

        # write lock file
        if args.update:
            lock = {}
            lock['upstream'] = import_desc['upstream'].copy()
            lock['upstream']['rev'] = upstream_new_rev
            with open(str(lock_file_path), 'w', encoding='UTF-8') as f:
                f.write(LOCK_FILE_HEADER)
                hjson.dump(lock, f)
                f.write("\n")
                log.info("Wrote lock file %s", str(lock_file_path))

        # Commit changes
        if args.commit:
            sha_short = git_get_short_rev(clone_subdir, upstream_new_rev)

            repo_info = github_parse_url(import_desc['upstream']['url'])
            if repo_info is not None:
                sha_short = "%s/%s@%s" % (repo_info[0], repo_info[1],
                                          sha_short)

            commit_msg_subject = 'Update %s to %s' % (import_desc['name'],
                                                      sha_short)
            subdir_msg = ' '
            if upstream_only_subdir:
                subdir_msg = ' subdir %s in ' % upstream_only_subdir
            intro = 'Update code from%supstream repository %s to revision %s' % (
                subdir_msg, import_desc['upstream']['url'], upstream_new_rev)
            commit_msg_body = textwrap.fill(intro, width=70)

            if shortlog:
                commit_msg_body += "\n\n"
                commit_msg_body += format_list_to_str(shortlog, width=70)

            commit_msg = commit_msg_subject + "\n\n" + commit_msg_body

            commit_paths = []
            commit_paths.append(
                path_resolve(import_desc['target_dir'], vendor_file_base_dir))
            if args.refresh_patches:
                commit_paths.append(
                    path_resolve(import_desc['patch_dir'],
                                 vendor_file_base_dir))
            commit_paths.append(lock_file_path)

            git_add_commit(vendor_file_base_dir, commit_paths, commit_msg)

    finally:
        shutil.rmtree(str(clone_dir), ignore_errors=True)

    log.info('Import finished')
 def _load_rules(mapping: str) -> OrderedDict:
     return hjson.loads(mapping)
 def test_ints_outside_range_fails(self):
     self.assertNotEqual(
         str(1 << 15),
         json.loads(json.dumpsJSON(1 << 15, int_as_string_bitcount=16)),
     )
Exemple #45
0
 def __init__(self, handle):
     data = hjson.loads(handle.read(), use_decimal=True)
     self.upstream = get_field(handle.name, 'at top-level', data, 'upstream',
                               constructor=lambda data: Upstream(handle.name, data))
Exemple #46
0
 def test_ints(self):
     for num in [1, long_type(1), 1 << 32, 1 << 64]:
         self.assertEqual(json.dumpsJSON(num), str(num))
         self.assertEqual(int(json.dumpsJSON(num)), num)
         self.assertEqual(json.loads(json.dumpsJSON(num)), num)
         self.assertEqual(json.loads(text_type(json.dumpsJSON(num))), num)
Exemple #47
0
 def test_degenerates_ignore(self):
     for f in (PosInf, NegInf, NaN):
         self.assertEqual(json.loads(json.dumpsJSON(f)), None)
Exemple #48
0
    def __init__(self, handle, desc_overrides):

        # Ensure description file matches our naming rules (otherwise we don't
        # know the name for the lockfile). This regex checks that we have the
        # right suffix and a nonempty name.
        if not re.match(r'.+\.vendor\.hjson', handle.name):
            raise ValueError(
                "Description file names must have a .vendor.hjson suffix.")

        data = hjson.loads(handle.read(), use_decimal=True)
        where = 'at top-level'

        self.apply_overrides(data, desc_overrides)

        path = Path(handle.name)

        def take_path(p):
            return path.parent / p

        self.path = path
        self.name = get_field(path, where, data, 'name', expected_type=str)
        self.target_dir = get_field(path,
                                    where,
                                    data,
                                    'target_dir',
                                    expected_type=str,
                                    constructor=take_path)
        self.upstream = get_field(
            path,
            where,
            data,
            'upstream',
            constructor=lambda data: Upstream(path, data))
        self.patch_dir = get_field(path,
                                   where,
                                   data,
                                   'patch_dir',
                                   optional=True,
                                   expected_type=str,
                                   constructor=take_path)
        self.patch_repo = get_field(
            path,
            where,
            data,
            'patch_repo',
            optional=True,
            constructor=lambda data: PatchRepo(path, data))
        self.exclude_from_upstream = (get_field(path,
                                                where,
                                                data,
                                                'exclude_from_upstream',
                                                optional=True,
                                                expected_type=list) or [])
        self.mapping = get_field(
            path,
            where,
            data,
            'mapping',
            optional=True,
            expected_type=list,
            constructor=lambda data: Mapping.make(path, data))

        # Add default exclusions
        self.exclude_from_upstream += EXCLUDE_ALWAYS

        # It doesn't make sense to define a patch_repo, but not a patch_dir
        # (where should we put the patches that we get?)
        if self.patch_repo is not None and self.patch_dir is None:
            raise JsonError(path, 'Has patch_repo but not patch_dir.')

        # We don't currently support a patch_repo and a mapping (just because
        # we haven't written the code to generate the patches across subdirs
        # yet). Tracked in issue #2317.
        if self.patch_repo is not None and self.mapping is not None:
            raise JsonError(
                path, "vendor.py doesn't currently support patch_repo "
                "and mapping at the same time (see issue #2317).")

        # If a patch_dir is defined and there is no mapping, we will look in
        # that directory for patches and apply them in (the only) directory
        # that we copy stuff into.
        #
        # If there is a mapping check that there is a patch_dir if and only if
        # least one mapping entry uses it.
        if self.mapping is not None:
            if self.patch_dir is not None:
                if not self.mapping.has_patch_dir():
                    raise JsonError(
                        path, 'Has patch_dir, but no mapping item uses it.')
            else:
                if self.mapping.has_patch_dir():
                    raise JsonError(
                        path, 'Has a mapping item with a patch directory, '
                        'but there is no global patch_dir key.')

        # Check that exclude_from_upstream really is a list of strings. Most of
        # this type-checking is in the constructors for field types, but we
        # don't have a "ExcludeList" class, so have to do it explicitly here.
        for efu in self.exclude_from_upstream:
            if not isinstance(efu, str):
                raise JsonError(
                    path,
                    'exclude_from_upstream has entry {}, which is not a string.'
                    .format(efu))
 def test_empty_strings(self):
     self.assertEqual(json.loads('""'), "")
     self.assertEqual(json.loads(u'""'), u"")
     self.assertEqual(json.loads('[""]'), [""])
     self.assertEqual(json.loads(u'[""]'), [u""])
Exemple #50
0
 def json_data(self):
     "Returns the json_text as parsed json_data"
     return hjson.loads(self.json_text)
Exemple #51
0
 def test_constants(self):
     for c in [None, True, False]:
         self.assertTrue(json.loads(json.dumpsJSON(c)) is c)
         self.assertTrue(json.loads(json.dumpsJSON([c]))[0] is c)
         self.assertTrue(json.loads(json.dumpsJSON({'a': c}))['a'] is c)
Exemple #52
0
    def parse_product(self, response):
        product = response.meta['product']
        product['_subitem'] = True

        if self._parse_no_longer_available(response):
            product['no_longer_available'] = True
            return product
        else:
            product['no_longer_available'] = False

        cond_set(
            product, 'title',
            response.xpath(
                "//h1[contains(@class, 'product-title')]/text()").extract())
        brand = response.xpath("//h2[@itemprop='brand']/text()").extract()
        brand = ["".join(brand).strip()]
        cond_set(product, 'brand', brand)

        cond_set(
            product, 'image_url',
            response.xpath("//div[@class='product_mainimg']/img/@src |"
                           "//img[@id='mainImage']/@src").extract())

        cond_set(
            product, 'price',
            response.xpath("//div[@class='pricingReg']"
                           "/span[@itemprop='price']/text()").extract())

        reseller_id_regex = "\/(\d+)"
        reseller_id = re.findall(reseller_id_regex, response.url)
        reseller_id = reseller_id[0] if reseller_id else None
        cond_set_value(product, 'reseller_id', reseller_id)

        if product.get('price', None):
            if not '$' in product['price']:
                self.log('Unknown currency at' % response.url)
            else:
                product['price'] = Price(price=product['price'].replace(
                    ',', '').replace('$', '').strip(),
                                         priceCurrency='USD')

        if not product.get('price'):
            price = response.xpath(
                "//div[@class='pricingReg']"
                "/span[@itemprop='price']/text() |"
                "//div[contains(@class, 'pricingReg')]/span[@itemprop='price']"
            ).re(FLOATING_POINT_RGEX)
            if price:
                product["price"] = Price(priceCurrency="USD", price=price[0])

        try:
            product['model'] = response.css(
                '.product_details.modelNo ::text').extract()[0].replace(
                    'Model', '').replace('#', '').strip()
        except IndexError:
            pass

        internet_no = response.css('#product_internet_number ::text').extract()
        if internet_no:
            internet_no = internet_no[0]

        upc = is_empty(re.findall("ItemUPC=\'(\d+)\'", response.body))
        if upc:
            product["upc"] = upc

        upc = response.xpath("//upc/text()").re('\d+')
        if upc:
            product["upc"] = upc[0]

        desc = response.xpath("//div[@id='product_description']"
                              "/div[contains(@class,'main_description')]"
                              "/descendant::*[text()]/text()"
                              "//div[contains(@class, 'main_description')] |"
                              "//div[@id='product_description']").extract()
        desc = " ".join(l.strip() for l in desc if len(l.strip()) > 0)
        product['description'] = desc

        product['locale'] = "en-US"

        metadata = response.xpath(
            "//script[contains(text(),'PRODUCT_METADATA_JSON')]"
            "/text()").re('var PRODUCT_METADATA_JSON = (.*);')
        skus = []
        if metadata:
            metadata = metadata[0]
            jsmeta = hjson.loads(metadata)
            try:
                skus = [jsmeta["attributeDefinition"]["defaultSku"]]
                response.meta['skus'] = skus
                metaname = jsmeta['attributeDefinition']['attributeListing'][
                    0]['label']
                response.meta['attributelabel'] = metaname
            except (KeyError, IndexError):
                self.log("Incomplete data from Javascript.", DEBUG)

        certona_payload = self._gen_payload(response)

        if certona_payload:
            new_meta = response.meta.copy()
            new_meta['product'] = product
            new_meta['handle_httpstatus_list'] = [404, 415]
            new_meta['internet_no'] = internet_no
            headers = {
                'Proxy-Connection': 'keep-alive',
                'Content-Type': 'application/json'
            }
            return Request(
                self.RECOMMENDED_URL,
                callback=self._parse_related_products,
                headers=headers,
                body=json.dumps(certona_payload),
                method="POST",
                meta=new_meta,
                priority=1000,
            )

        if internet_no:
            return Request(
                url=self.REVIEWS_URL % internet_no,
                callback=self.parse_buyer_reviews,
                meta={"product": product},
                dont_filter=True,
            )

        return self._gen_variants_requests(response, product, skus,
                                           internet_no)
 def test_decimal(self):
     rval = json.loads('1.1', parse_float=decimal.Decimal)
     self.assertTrue(isinstance(rval, decimal.Decimal))
     self.assertEqual(rval, decimal.Decimal('1.1'))
Exemple #54
0
def main():
    verbose = 0

    parser = argparse.ArgumentParser(
        prog="regtool",
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESC)
    parser.add_argument('input',
                        nargs='?',
                        metavar='file',
                        type=argparse.FileType('r'),
                        default=sys.stdin,
                        help='input file in Hjson type')
    parser.add_argument('-d',
                        action='store_true',
                        help='Output register documentation (html)')
    parser.add_argument('--cdefines',
                        '-D',
                        action='store_true',
                        help='Output C defines header')
    parser.add_argument('--doc',
                        action='store_true',
                        help='Output source file documentation (gfm)')
    parser.add_argument('-j',
                        action='store_true',
                        help='Output as formatted JSON')
    parser.add_argument('-c', action='store_true', help='Output as JSON')
    parser.add_argument('-r',
                        action='store_true',
                        help='Output as SystemVerilog RTL')
    parser.add_argument('-s',
                        action='store_true',
                        help='Output as UVM Register class')
    parser.add_argument('-f',
                        action='store_true',
                        help='Output as FPV CSR rw assertion module')
    parser.add_argument('--outdir',
                        '-t',
                        help='Target directory for generated RTL; '
                        'tool uses ../rtl if blank.')
    parser.add_argument('--dv-base-prefix',
                        default='dv_base',
                        help='Prefix for the DV register classes from which '
                        'the register models are derived.')
    parser.add_argument('--outfile',
                        '-o',
                        type=argparse.FileType('w'),
                        default=sys.stdout,
                        help='Target filename for json, html, gfm.')
    parser.add_argument('--verbose',
                        '-v',
                        action='store_true',
                        help='Verbose and run validate twice')
    parser.add_argument('--param',
                        '-p',
                        type=str,
                        default="",
                        help='''Change the Parameter values.
                                Only integer value is supported.
                                You can add multiple param arguments.

                                  Format: ParamA=ValA;ParamB=ValB
                                  ''')
    parser.add_argument('--version',
                        '-V',
                        action='store_true',
                        help='Show version')
    parser.add_argument('--novalidate',
                        action='store_true',
                        help='Skip validate, just output json')

    args = parser.parse_args()

    if args.version:
        version.show_and_exit(__file__, ["Hjson", "Mako"])

    verbose = args.verbose
    if (verbose):
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    # Entries are triples of the form (arg, (format, dirspec)).
    #
    # arg is the name of the argument that selects the format. format is the
    # name of the format. dirspec is None if the output is a single file; if
    # the output needs a directory, it is a default path relative to the source
    # file (used when --outdir is not given).
    arg_to_format = [('j', ('json', None)), ('c', ('compact', None)),
                     ('d', ('html', None)), ('doc', ('doc', None)),
                     ('r', ('rtl', 'rtl')), ('s', ('dv', 'dv')),
                     ('f', ('fpv', 'fpv/vip')), ('cdefines', ('cdh', None))]
    format = None
    dirspec = None
    for arg_name, spec in arg_to_format:
        if getattr(args, arg_name):
            if format is not None:
                log.error('Multiple output formats specified on '
                          'command line ({} and {}).'.format(format, spec[0]))
                sys.exit(1)
            format, dirspec = spec
    if format is None:
        format = 'hjson'

    infile = args.input
    params = args.param.split(';')

    # Define either outfile or outdir (but not both), depending on the output
    # format.
    outfile = None
    outdir = None
    if dirspec is None:
        if args.outdir is not None:
            log.error('The {} format expects an output file, '
                      'not an output directory.'.format(format))
            sys.exit(1)

        outfile = args.outfile
    else:
        if args.outfile is not sys.stdout:
            log.error('The {} format expects an output directory, '
                      'not an output file.'.format(format))
            sys.exit(1)

        if args.outdir is not None:
            outdir = args.outdir
        elif infile is not sys.stdin:
            outdir = str(PurePath(infile.name).parents[1].joinpath(dirspec))
        else:
            # We're using sys.stdin, so can't infer an output directory name
            log.error(
                'The {} format writes to an output directory, which '
                'cannot be inferred automatically if the input comes '
                'from stdin. Use --outdir to specify it manually.'.format(
                    format))
            sys.exit(1)

    if format == 'doc':
        with outfile:
            gen_selfdoc.document(outfile)
        exit(0)

    try:
        srcfull = infile.read()
        obj = hjson.loads(srcfull,
                          use_decimal=True,
                          object_pairs_hook=validate.checking_dict)
    except ValueError:
        raise SystemExit(sys.exc_info()[1])

    if args.novalidate:
        with outfile:
            gen_json.gen_json(obj, outfile, format)
            outfile.write('\n')
    elif (validate.validate(obj, params=params) == 0):
        if (verbose):
            log.info("Second validate pass (should show added optional keys)")
            validate.validate(obj, params=params)

        if format == 'rtl':
            return gen_rtl.gen_rtl(obj, outdir)
        if format == 'dv':
            return gen_dv.gen_dv(obj, args.dv_base_prefix, outdir)
        if format == 'fpv':
            return gen_fpv.gen_fpv(obj, outdir)
        src_lic = None
        src_copy = ''
        found_spdx = None
        found_lunder = None
        copy = re.compile(r'.*(copyright.*)|(.*\(c\).*)', re.IGNORECASE)
        spdx = re.compile(r'.*(SPDX-License-Identifier:.+)')
        lunder = re.compile(r'.*(Licensed under.+)', re.IGNORECASE)
        for line in srcfull.splitlines():
            mat = copy.match(line)
            if mat is not None:
                src_copy += mat.group(1)
            mat = spdx.match(line)
            if mat is not None:
                found_spdx = mat.group(1)
            mat = lunder.match(line)
            if mat is not None:
                found_lunder = mat.group(1)
        if found_lunder:
            src_lic = found_lunder
        if found_spdx:
            src_lic += '\n' + found_spdx

        with outfile:
            if format == 'html':
                return gen_html.gen_html(obj, outfile)
            elif format == 'cdh':
                return gen_cheader.gen_cdefines(obj, outfile, src_lic, src_copy)
            else:
                return gen_json.gen_json(obj, outfile, format)

            outfile.write('\n')
Exemple #55
0
def generate_alert_handler(top, out_path):
    # default values
    esc_cnt_dw = 32
    accu_cnt_dw = 16
    async_on = "'0"
    # leave this constant
    n_classes = 4

    topname = top["name"]

    # check if there are any params to be passed through reggen and placed into
    # the generated package
    ip_list_in_top = [x["name"].lower() for x in top["module"]]
    ah_idx = ip_list_in_top.index("alert_handler")
    if 'localparam' in top['module'][ah_idx]:
        if 'EscCntDw' in top['module'][ah_idx]['localparam']:
            esc_cnt_dw = int(top['module'][ah_idx]['localparam']['EscCntDw'])
        if 'AccuCntDw' in top['module'][ah_idx]['localparam']:
            accu_cnt_dw = int(top['module'][ah_idx]['localparam']['AccuCntDw'])

    if esc_cnt_dw < 1:
        log.error("EscCntDw must be larger than 0")
    if accu_cnt_dw < 1:
        log.error("AccuCntDw must be larger than 0")

    # Count number of alerts
    n_alerts = sum([x["width"] if "width" in x else 1 for x in top["alert"]])

    if n_alerts < 1:
        # set number of alerts to 1 such that the config is still valid
        # that input will be tied off
        n_alerts = 1
        log.warning("no alerts are defined in the system")
    else:
        async_on = ""
        for alert in top['alert']:
            for k in range(alert['width']):
                async_on = str(alert['async']) + async_on
        async_on = ("%d'b" % n_alerts) + async_on

    log.info("alert handler parameterization:")
    log.info("NAlerts   = %d" % n_alerts)
    log.info("EscCntDw  = %d" % esc_cnt_dw)
    log.info("AccuCntDw = %d" % accu_cnt_dw)
    log.info("AsyncOn   = %s" % async_on)

    # Define target path
    rtl_path = out_path / 'ip/alert_handler/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    doc_path = out_path / 'ip/alert_handler/data/autogen'
    doc_path.mkdir(parents=True, exist_ok=True)
    dv_path = out_path / 'ip/alert_handler/dv'
    dv_path.mkdir(parents=True, exist_ok=True)

    # Generating IP top module script is not generalized yet.
    # So, topgen reads template files from alert_handler directory directly.
    tpl_path = Path(__file__).resolve().parent / '../hw/ip/alert_handler/data'
    hjson_tpl_path = tpl_path / 'alert_handler.hjson.tpl'
    dv_tpl_path = tpl_path / 'alert_handler_env_pkg__params.sv.tpl'

    # Generate Register Package and RTLs
    out = StringIO()
    with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
        hjson_tpl = Template(fin.read())
        try:
            out = hjson_tpl.render(n_alerts=n_alerts,
                                   esc_cnt_dw=esc_cnt_dw,
                                   accu_cnt_dw=accu_cnt_dw,
                                   async_on=async_on,
                                   n_classes=n_classes)
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())
        log.info("alert_handler hjson: %s" % out)

    if out == "":
        log.error("Cannot generate alert_handler config file")
        return

    hjson_gen_path = doc_path / "alert_handler.hjson"
    gencmd = (
        "// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson --alert-handler-only "
        "-o hw/top_{topname}/\n\n".format(topname=topname))
    with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
        fout.write(genhdr + gencmd + out)

    # Generate register RTLs (currently using shell execute)
    # TODO: More secure way to gneerate RTL
    hjson_obj = hjson.loads(out,
                            use_decimal=True,
                            object_pairs_hook=validate.checking_dict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))

    # generate testbench for alert_handler
    with dv_tpl_path.open(mode='r', encoding='UTF-8') as fin:
        dv_tpl = Template(fin.read())
        try:
            out = dv_tpl.render(n_alerts=n_alerts, async_on=async_on)
        except:  # noqa : E722
            log.error(exceptions.text_error_template().render())
        log.info("ALERT_HANDLER DV: %s" % out)
        if out == "":
            log.error("Cannot generate dv alert_handler parameter file")
            return

        dv_gen_path = dv_path / 'alert_handler_env_pkg__params.sv'
        with dv_gen_path.open(mode='w', encoding='UTF-8') as fout:
            fout.write(genhdr + gencmd + out)
    def _populate_from_js(self, response, product):
        """
        Gets data out of JS script straight from html body
        """

        self._JS_PROD_IMG_RE = re.compile(r'walmartData\.graphicsEnlargedURLS\s+=\s+([^;]*\])', re.DOTALL)
        meta = response.meta.copy()
        reqs = meta.get('reqs', [])

        # Extract base product info from JS
        data = is_empty(
            re.findall(
                r'productPurchaseCartridgeData\["\d+"\]\s*=\s*(\{(.|\n)*?\});',
                response.body_as_unicode().encode('utf-8')
            )
        )

        if data:
            data = list(data)[0]
            data = data.decode('utf-8').replace(' :', ':')
            try:
                product_data = hjson.loads(data, object_pairs_hook=dict)
            except ValueError:
                self.log("Impossible to get product data from JS %r." % response.url, WARNING)
                return
        else:
            self.log("No JS for product info matched in %r." % response.url, WARNING)
            return

        product_data['baseProdInfo'] = product_data['variantDataRaw'][0]

        # Set product sku
        try:
            sku_id = is_empty(product_data['baseProdInfo']['sku_id'])
            response.meta['sku_id'] = sku_id

        except (ValueError, KeyError):
            self.log("Impossible to get sku id - %r." % response.url, WARNING)

        # Set product UPC
        try:
            upc = is_empty(product_data['baseProdInfo']['upc_nbr'])
            cond_set_value(
                product, 'upc', upc, conv=unicode
            )
        except (ValueError, KeyError):
            self.log("Impossible to get UPC" % response.url, WARNING)  # Not really a UPC.

        # Set brand
        try:
            brand = is_empty(product_data['baseProdInfo']['brand_name_en'])
            cond_set_value(product, 'brand', brand)
        except (ValueError, KeyError):
            self.log("Impossible to get brand - %r" % response.url, WARNING)

        # No brand - trying to get it from product title
        if not product.get("brand"):
            brand = product.get("title")
            cond_set(
                product, 'brand', (guess_brand_from_first_words(brand.strip()),)
            )

        # Set if special price
        try:
            special_price = product_data['baseProdInfo']['price_store_was_price']
            online_status = product_data['baseProdInfo']['online_status'][0]

            if online_status != u'90':
                cond_set_value(product, 'special_pricing', True)
            else:
                cond_set_value(product, 'special_pricing', False)
        except (ValueError, KeyError):
            cond_set_value(product, 'special_pricing', False)

        # Set variants
        number_of_variants = product_data.get('numberOfVariants', 0)
        data_variants = product_data['variantDataRaw']
        skus = []
        if number_of_variants:
            try:
                variants = {}

                for var in data_variants:
                    variant = dict()
                    properties = dict()

                    sku_id = is_empty(
                        var.get('sku_id', ''),
                        ''
                    )
                    skus.append(sku_id)

                    price = var.get('price_store_price')
                    if price:
                        price = is_empty(price, None)
                        price = price.replace(',', '')
                        price = format(float(price), '.2f')
                    variant['price'] = price

                    color = is_empty(var.get('variantKey_en_Colour', []))
                    size = is_empty(var.get('variantKey_en_Size', []))
                    waist_size = is_empty(var.get('variantKey_en_Waist_size_-_inches'),[])
                    if size:
                        properties['size'] = size
                    if color:
                        properties['color'] = color
                    if waist_size:
                        properties['waist_size'] = waist_size
                    variant['properties'] = properties

                    variants[sku_id] = variant
            except (KeyError, ValueError):
                variants = []

        else:
            skus = [sku_id]
            variants = []

        product['variants'] = variants
        response.meta['skus'] = skus

        # Set product images urls
        image = re.findall(self._JS_PROD_IMG_RE, response.body_as_unicode())
        if image:
            try:
                image = image[1]
            except:
                image = image[0]

            try:
                image = image.decode('utf-8').replace(' :', ':').replace('//', 'http://')
                data_image = hjson.loads(image, object_pairs_hook=dict)
                image_urls = [value['enlargedURL'] for k, value in enumerate(data_image)]
                if image_urls and isinstance(image_urls, (tuple, list)):
                    image_urls = [image_urls[0]]
                cond_set(product, 'image_url', image_urls)
            except (ValueError, KeyError):
                self.log("Impossible to set image urls in %r." % response.url, WARNING)

        else:
            self.log("No JS for product image matched in %r." % response.url, WARNING)

        if reqs:
            return self.send_next_request(reqs, response)

        return product
Exemple #57
0
def generate_plic(top, out_path):
    topname = top["name"]
    # Count number of interrupts
    # Interrupt source 0 is tied to 0 to conform RISC-V PLIC spec.
    # So, total number of interrupts are the number of entries in the list + 1
    src = sum([x["width"] if "width" in x else 1
               for x in top["interrupt"]]) + 1

    # Target and priority: Currently fixed
    target = int(top["num_cores"], 0) if "num_cores" in top else 1
    prio = 3

    # Define target path
    #   rtl: rv_plic.sv & rv_plic_reg_pkg.sv & rv_plic_reg_top.sv
    #   data: rv_plic.hjson
    rtl_path = out_path / 'ip/rv_plic/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    doc_path = out_path / 'ip/rv_plic/data/autogen'
    doc_path.mkdir(parents=True, exist_ok=True)
    hjson_path = out_path / 'ip/rv_plic/data/autogen'
    hjson_path.mkdir(parents=True, exist_ok=True)

    # Generating IP top module script is not generalized yet.
    # So, topgen reads template files from rv_plic directory directly.
    # Next, if the ip top gen tool is placed in util/ we can import the library.
    tpl_path = Path(__file__).resolve().parent / '../hw/ip/rv_plic/data'
    hjson_tpl_path = tpl_path / 'rv_plic.hjson.tpl'
    rtl_tpl_path = tpl_path / 'rv_plic.sv.tpl'

    # Generate Register Package and RTLs
    out = StringIO()
    with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
        hjson_tpl = Template(fin.read())
        try:
            out = hjson_tpl.render(src=src, target=target, prio=prio)
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())
        log.info("RV_PLIC hjson: %s" % out)

    if out == "":
        log.error("Cannot generate interrupt controller config file")
        return

    hjson_gen_path = hjson_path / "rv_plic.hjson"
    gencmd = (
        "// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson --plic-only "
        "-o hw/top_{topname}/\n\n".format(topname=topname))
    with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
        fout.write(genhdr + gencmd + out)

    # Generate register RTLs (currently using shell execute)
    # TODO: More secure way to generate RTL
    hjson_obj = hjson.loads(out,
                            use_decimal=True,
                            object_pairs_hook=OrderedDict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))

    # Generate RV_PLIC Top Module
    with rtl_tpl_path.open(mode='r', encoding='UTF-8') as fin:
        rtl_tpl = Template(fin.read())
        try:
            out = rtl_tpl.render(src=src, target=target, prio=prio)
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())
        log.info("RV_PLIC RTL: %s" % out)

    if out == "":
        log.error("Cannot generate interrupt controller RTL")
        return

    rtl_gen_path = rtl_path / "rv_plic.sv"
    with rtl_gen_path.open(mode='w', encoding='UTF-8') as fout:
        fout.write(genhdr + gencmd + out)
Exemple #58
0
def generate_pinmux_and_padctrl(top, out_path):
    topname = top["name"]
    # MIO Pads
    n_mio_pads = top["pinmux"]["num_mio"]
    if n_mio_pads <= 0:
        # TODO: add support for no MIO case
        log.error("Topgen does currently not support generation of a top " +
                  "without a pinmux.")
        return

    if "padctrl" not in top:
        # TODO: add support for no MIO case
        log.error("Topgen does currently not support generation of a top " +
                  "without a padctrl instance.")
        return

    # Get number of wakeup detectors
    if "num_wkup_detect" in top["pinmux"]:
        num_wkup_detect = top["pinmux"]["num_wkup_detect"]
    else:
        num_wkup_detect = 1

    if num_wkup_detect <= 0:
        # TODO: add support for no wakeup counter case
        log.error("Topgen does currently not support generation of a top " +
                  "without DIOs.")
        return

    if "wkup_cnt_width" in top["pinmux"]:
        wkup_cnt_width = top["pinmux"]["wkup_cnt_width"]
    else:
        wkup_cnt_width = 8

    if wkup_cnt_width <= 1:
        log.error("Wakeup counter width must be greater equal 2.")
        return

    # Total inputs/outputs
    # Validation ensures that the width field is present.
    num_mio_inputs = sum([x["width"] for x in top["pinmux"]["inputs"]])
    num_mio_outputs = sum([x["width"] for x in top["pinmux"]["outputs"]])
    num_mio_inouts = sum([x["width"] for x in top["pinmux"]["inouts"]])

    num_dio_inputs = sum([
        x["width"] if x["type"] == "input" else 0 for x in top["pinmux"]["dio"]
    ])
    num_dio_outputs = sum([
        x["width"] if x["type"] == "output" else 0
        for x in top["pinmux"]["dio"]
    ])
    num_dio_inouts = sum([
        x["width"] if x["type"] == "inout" else 0 for x in top["pinmux"]["dio"]
    ])

    n_mio_periph_in = num_mio_inouts + num_mio_inputs
    n_mio_periph_out = num_mio_inouts + num_mio_outputs
    n_dio_periph_in = num_dio_inouts + num_dio_inputs
    n_dio_periph_out = num_dio_inouts + num_dio_outputs
    n_dio_pads = num_dio_inouts + num_dio_inputs + num_dio_outputs

    if n_dio_pads <= 0:
        # TODO: add support for no DIO case
        log.error("Topgen does currently not support generation of a top " +
                  "without DIOs.")
        return

    log.info("Generating pinmux with following info from hjson:")
    log.info("num_mio_inputs:  %d" % num_mio_inputs)
    log.info("num_mio_outputs: %d" % num_mio_outputs)
    log.info("num_mio_inouts:  %d" % num_mio_inouts)
    log.info("num_dio_inputs:  %d" % num_dio_inputs)
    log.info("num_dio_outputs: %d" % num_dio_outputs)
    log.info("num_dio_inouts:  %d" % num_dio_inouts)
    log.info("num_wkup_detect: %d" % num_wkup_detect)
    log.info("wkup_cnt_width:  %d" % wkup_cnt_width)
    log.info("This translates to:")
    log.info("n_mio_periph_in:  %d" % n_mio_periph_in)
    log.info("n_mio_periph_out: %d" % n_mio_periph_out)
    log.info("n_dio_periph_in:  %d" % n_dio_periph_in)
    log.info("n_dio_periph_out: %d" % n_dio_periph_out)
    log.info("n_dio_pads:       %d" % n_dio_pads)

    # Target path
    #   rtl: pinmux_reg_pkg.sv & pinmux_reg_top.sv
    #   data: pinmux.hjson
    rtl_path = out_path / 'ip/pinmux/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    data_path = out_path / 'ip/pinmux/data/autogen'
    data_path.mkdir(parents=True, exist_ok=True)

    # Template path
    tpl_path = Path(
        __file__).resolve().parent / '../hw/ip/pinmux/data/pinmux.hjson.tpl'

    # Generate register package and RTLs
    gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
              "-o hw/top_{topname}/\n\n".format(topname=topname))

    hjson_gen_path = data_path / "pinmux.hjson"

    out = StringIO()
    with tpl_path.open(mode='r', encoding='UTF-8') as fin:
        hjson_tpl = Template(fin.read())
        try:
            # TODO: pass in information about always-on peripherals
            # TODO: pass in information on which DIOs can be selected
            # as wakeup signals
            # TODO: pass in signal names such that we can introduce
            # named enums for select signals
            out = hjson_tpl.render(
                n_mio_periph_in=n_mio_periph_in,
                n_mio_periph_out=n_mio_periph_out,
                n_mio_pads=n_mio_pads,
                # each DIO has in, out and oe wires
                # some of these have to be tied off in the
                # top, depending on the type.
                n_dio_periph_in=n_dio_pads,
                n_dio_periph_out=n_dio_pads,
                n_dio_pads=n_dio_pads,
                n_wkup_detect=num_wkup_detect,
                wkup_cnt_width=wkup_cnt_width)
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())
        log.info("PINMUX HJSON: %s" % out)

    if out == "":
        log.error("Cannot generate pinmux HJSON")
        return

    with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
        fout.write(genhdr + gencmd + out)

    hjson_obj = hjson.loads(out,
                            use_decimal=True,
                            object_pairs_hook=validate.checking_dict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))

    # Target path
    #   rtl: padctrl_reg_pkg.sv & padctrl_reg_top.sv
    #   data: padctrl.hjson
    rtl_path = out_path / 'ip/padctrl/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    data_path = out_path / 'ip/padctrl/data/autogen'
    data_path.mkdir(parents=True, exist_ok=True)

    # Template path
    tpl_path = Path(
        __file__).resolve().parent / '../hw/ip/padctrl/data/padctrl.hjson.tpl'

    # Generate register package and RTLs
    gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
              "-o hw/top_{topname}/\n\n".format(topname=topname))

    hjson_gen_path = data_path / "padctrl.hjson"

    out = StringIO()
    with tpl_path.open(mode='r', encoding='UTF-8') as fin:
        hjson_tpl = Template(fin.read())
        try:
            out = hjson_tpl.render(n_mio_pads=n_mio_pads,
                                   n_dio_pads=n_dio_pads,
                                   attr_dw=10)
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())
        log.info("PADCTRL HJSON: %s" % out)

    if out == "":
        log.error("Cannot generate padctrl HJSON")
        return

    with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
        fout.write(genhdr + gencmd + out)

    hjson_obj = hjson.loads(out,
                            use_decimal=True,
                            object_pairs_hook=validate.checking_dict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))
Exemple #59
0
 def from_text(txt: str, param_defaults: List[Tuple[str, str]],
               where: str) -> 'IpBlock':
     '''Load an IpBlock from an hjson description in txt'''
     return IpBlock.from_raw(param_defaults,
                             hjson.loads(txt, use_decimal=True), where)
Exemple #60
0
 def _load(self):
     confstr = agog.serverman.Storage().open(('profile','userconfig.hjson'))
     config = hjson.loads(confstr)
     return config