예제 #1
0
def main(opts, args):
    diff=False
    get=False
    push=False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        elif o in ('-d', '--diff'):
            diff=True
        elif o in ('-p', '--push'):
            if get:
                print("You can't both get and push. Choose one!")
                usage()
                sys.exit(2)
            push=True
        elif o in ('-g', '--get'):
            if push:
                print("You can't both get and push. Choose one!")
                usage()
                sys.exit(2)
            get=True
        else:
            assert False, "unhandled option {}".format(o)


    with open('sync.json') as fd:
        config = json.load(fd)
    site = connect(config)
    docs = get_mediawiki_files_from_disk(config)
    compare_site_and_disk(config, diff, site, docs, push, get)
예제 #2
0
파일: hgeo.py 프로젝트: akoon/OldPipeline
def _ginfo(filename):
    try:
        fp = open(filename, "r")
    except:
        print "Unable to open", filename
        return
    _Verbose("Loading %s" % filename)
    fdata = hjson.load(fp)
    _Verbose("Done Loading %s" % filename)
    d = Detail()
    d.loadJSON(fdata)
    print "=" * 10, filename, "=" * 10
    print "%12d Points" % d.pointCount()
    print "%12d Vertices" % d.vertexCount()
    print "%12d Primitives" % d.primitiveCount()
    print "-" * 5, "Attributes", "-" * 5
    _ginfoAttributes("Point", d.PointAttributes)
    _ginfoAttributes("Vertex", d.VertexAttributes)
    _ginfoAttributes("Primitive", d.PrimitiveAttributes)
    _ginfoAttributes("Global", d.GlobalAttributes)
    _ginfoGroups("Point", d.PointGroups)
    _ginfoGroups("Vertex", d.VertexGroups)
    _ginfoGroups("Primitive", d.PrimitiveGroups)
    _ginfoPrimitives(d.Primitives)
    _dumpPrimitive(d, 0)
예제 #3
0
def main():
    # Configuration loading
    config_location = os.path.dirname(sys.argv[0]) + "/" + "auth02mozdef.json"
    with open(config_location) as fd:
        config = DotDict(hjson.load(fd))

    if config is None:
        print("No configuration file 'auth02mozdef.json' found.")
        sys.exit(1)

    headers = {"Authorization": "Bearer {}".format(config.auth0.token), "Accept": "application/json"}

    fromid = load_state(config.state_file)
    # Auth0 will interpret a 0 state as an error on our hosted instance, but will accept an empty parameter "as if it was 0"
    if fromid == 0 or fromid == "0":
        fromid = ""
    totals = 1
    start = 0
    length = 0

    # Fetch until we've gotten all messages
    while totals > start + length:
        (totals, start, length, lastid) = fetch_auth0_logs(config, headers, fromid)

        if totals == -1:
            if fromid == lastid:
                # We got everything, we're done!
                break
        fromid = lastid

    save_state(config.state_file, lastid)
예제 #4
0
def main():

    # Parse the command-line options.
    try:
        options = docopt.docopt(__doc__)
    except docopt.DocoptLanguageError:
        print('Dumb chronicler. Contact Doomy.')
        exit(1)

    # Load the chronicle file.
    try:
        raw = hjson.load(open(options['FILE']))
        jsonschema.validate(raw, schema)
        chronicle = Chronicle(raw)
    except FileNotFoundError:
        print('No chronicle to read.')
        exit(1)
    except hjson.HjsonDecodeError as e:
        print('This chronicle can\'t be deciphered.')
        print('L%d, C%d: %s' % (e.lineno, e.colno, e.msg))
        exit(1)
    except jsonschema.ValidationError as e: 
        print('This chronicle isn\'t correctly engraved.')
        print('%s: %s' % (list(e.path), e.message))
        exit(1)

    # Get the filters to apply.
    filters = []
    if options['--filters'] != None:
        for raw in options['--filters'].split(','):
            filters.append(Filter(raw))

    # Get the games matching the filters.
    games = chronicle.filter(filters)

    # Early exit if there is no game.
    if len(games) == 0:
        print('There is no battle like this.')
        exit(0)

    # Aggregate the games in groups.
    groups = options['--groups'].split(',')
    grouper = operator.itemgetter(groups)
    results = []
    totalPlayed = 0
    totalWon = 0
    for key, games in itertools.groupby(sorted(games, key=grouper), grouper):
        played = 0
        won = 0
        for game in games:
            played += 1
            totalPlayed += 1
            won += int(game['result.victory'])
            totalWon += int(game['result.victory'])
        results.append(key + [played, won, math.trunc(won/played*100)])
    results.append(['TOTAL'] + ['' for i in groups[1:]] + [totalPlayed, totalWon, math.trunc(totalWon/totalPlayed*100)])

    # Print the results.
    headers = groups + ['played', 'won', '%']
    print(tabulate.tabulate(results, headers, tablefmt='psql'))
예제 #5
0
def main():
    debug('Debug mode on')

    singleteam = None
    try:
        optlist, args = getopt.getopt(sys.argv[1:], 'ht:')
    except getopt.GetoptError as err:
        sys.stderr.write(str(err) + '\n')
        usage()
        sys.exit(1)
    for o, a in optlist:
        if o == '-h':
            usage()
            sys.exit(0)
        elif o == '-t':
            singleteam = a

    with open('vuln2bugs.json') as fd:
        config = json.load(fd)

    teams = config['teamsetup']

    # Note that the pyes library returns DotDicts which are addressable like mydict['hi'] an mydict.hi
    for team in teams:
        if singleteam != None and team != singleteam:
            continue
        if 'name' not in teams[team]:
            teams[team]['name'] = team
        debug('Processing team: {} using filter {}'.format(team, teams[team]['filter']))
        teamvulns = TeamVulns(config, team)
        processor = VulnProcessor(config, teamvulns, team)
        debug('{} assets affected by vulnerabilities with the selected filter.'.format(len(teamvulns.assets)))
        bug_type_flat(config, team, teamvulns, processor)
예제 #6
0
def main():
    # Argument parsing
    parser = ArgumentParser(description=DESCRIPTION)
    parser.add_argument("-c", "--configfile", help="path to the config file", default="config.hjson")
    parser.add_argument("-o", "--output", help="path to the output file", default="output.csv")
    args = parser.parse_args()

    # Configuration parsing
    config = hjson.load(open(args.configfile))

    # Create the asyncio loop
    loop = asyncio.get_event_loop()

    try:
        df = pd.DataFrame(columns=["timestamp"] + config["targets"])
        for i in range(len(config["targets"])):
            asyncio.async(awsum(config, i, df).start(loop))
    # Except if a value was missing in the config file
    except awsum.InvalidConfigFile as err_msg:
        print("Error: IncorrectConfigFile: %s" % err_msg)

    # Adding signal handlers
    loop.add_signal_handler(getattr(signal, "SIGINT"), exit_sigcatch, "SIGINT", loop)
    loop.add_signal_handler(getattr(signal, "SIGTERM"), exit_sigcatch, "SIGTERM", loop)
    loop.run_forever()
    df = df.groupby(by="timestamp").sum()
    df.to_csv(args.output)
    print(df)
    sys.exit(0)
예제 #7
0
def _ginfo(filename):
    try:
        fp = open(filename, 'r')
    except:
        print('Unable to open', filename)
        return
    _Verbose('Loading %s' % filename)
    fdata = hjson.load(fp)
    _Verbose('Done Loading %s' % filename)
    d = Detail()
    d.loadJSON(fdata)
    print('='*10, filename, '='*10)
    print('%12d Points' % d.pointCount())
    print('%12d Vertices' % d.vertexCount())
    print('%12d Primitives' % d.primitiveCount())
    print('-'*5, 'Attributes', '-'*5)
    _ginfoAttributes('Point', d.PointAttributes)
    _ginfoAttributes('Vertex', d.VertexAttributes)
    _ginfoAttributes('Primitive', d.PrimitiveAttributes)
    _ginfoAttributes('Global', d.GlobalAttributes)
    _ginfoGroups('Point', d.PointGroups)
    _ginfoGroups('Vertex', d.VertexGroups)
    _ginfoGroups('Primitive', d.PrimitiveGroups)
    _ginfoPrimitives(d.Primitives)
    _dumpPrimitive(d, 0)
예제 #8
0
 def _load_test_case_exception(self):
     with open(self.path) as f:
         result = hjson.load(f, 'utf8')
         if not result:
             return None
         if type(result) not in [dict, OrderedDict]:
             raise Exception("Malformed test case, only dict is expected")
         return result
예제 #9
0
파일: cli.py 프로젝트: bjinwright/Zappa
 def load_settings_file(self, settings_file="zappa_settings.json"):
     try:
         with open(settings_file) as json_file:
             self.zappa_settings = json.load(json_file)
     except Exception as e: # pragma: no cover
         print("Problem parsing settings file.")
         print(e)
         sys.exit(1) # pragma: no cover
예제 #10
0
파일: lists.py 프로젝트: DasFranck/ConDeBot
def load_lists(lists_path):
    if (os.path.isfile(lists_path)):
        with open(lists_path, encoding="utf8") as lists_file:
            try:
                return hjson.load(lists_file)
            except:
                return None
    else:
        return []
예제 #11
0
    def loadFromJson(self,conf=""):
        if not conf == "":
            print '#> ',conf
            fp = open(conf, 'r')

            config_map = hjson.load(fp)
            for k,v in config_map.iteritems():
                if k=='import':
                    self.loadFromJson(v)
                self._CONFIG[k] = v
            fp.close()
예제 #12
0
 def test_object_pairs_hook(self):
     s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
     p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4), ("qrt", 5), ("pad", 6), ("hoy", 7)]
     self.assertEqual(json.loads(s), eval(s))
     self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
     self.assertEqual(json.load(StringIO(s), object_pairs_hook=lambda x: x), p)
     od = json.loads(s, object_pairs_hook=OrderedDict)
     self.assertEqual(od, OrderedDict(p))
     self.assertEqual(type(od), OrderedDict)
     # the object_pairs_hook takes priority over the object_hook
     self.assertEqual(json.loads(s, object_pairs_hook=OrderedDict, object_hook=lambda x: None), OrderedDict(p))
예제 #13
0
def main():
    with open('rra2json.json') as fd:
        config = json.load(fd)
        rra2jsonconfig = config['rra2json']
        authconfig = config['oauth2']
        rrajson_skel = config['rrajson']
        data_levels = config['data_levels']
        risk_levels = config['risk_levels']


    #Disable debugging messages by assigning a null/none function, if configured to do so.
    if rra2jsonconfig['debug'] != 'true':
        debug = lambda x: None
    else:
        debug = globals()['debug']

    gc = gspread_authorize(authconfig['client_email'], authconfig['private_key'], authconfig['spread_scope'])

    if not gc:
        fatal('Authorization failed')

    # Looking at the XML feed is the only way to get sheet document title for some reason.
    sheets = get_sheet_titles(gc)
    # Do not traverse sheets manually, it's very slow due to the API delays.
    # Opening all at once, including potentially non-useful sheet is a zillion times faster as it's a single API call.
    gsheets = gc.openall()
    for s in gsheets:
        rra_version = detect_version(gc, s)
        if rra_version != None:
            #virtual function pointer
            try:
                parse_rra = globals()["parse_rra_{}".format(rra_version)]
            except (KeyError, UnboundLocalError) as e:
                # If this error is reached, you want to add a new parse_rra_... function that will parse the new format!
                debug("Unsupported RRA version {}. rra2json needs to add explicit support before it can be parsed. Skipping RRA {} - id {}.".format(rra_version, sheets[s.id], s.id))
                continue

            try:
                rrajsondoc = parse_rra(gc, s, sheets[s.id], rra_version, DotDict(dict(copy.deepcopy(rrajson_skel))), list(data_levels),
                        list(risk_levels))
                if rrajsondoc == None:
                    debug('Document {} ({}) could not be parsed and is probably not an RRA'.format(sheets[s.id], s.id))
                    continue
            except:
                import traceback
                traceback.print_exc()
                debug('Exception occured while parsing RRA {} - id {}'.format(sheets[s.id], s.id))
            else:
                post_rra_to_mozdef(config['mozdef'], rrajsondoc)

            debug('Parsed {}: {}'.format(sheets[s.id], rra_version))
        else:
            debug('Document {} ({}) could not be parsed and is probably not an RRA (no version detected)'.format(sheets[s.id], s.id))
def init():
  global logstash_client

  try:  
    with open(LOGSTASH_CONF) as data_file:    
      config = _merge(dict(DEFAULT_CONFIG), hjson.load(data_file))
  
    logstash_client = Client(servers = config['network']['servers'], 
                             ssl_certificate = config['network']['ssl-ca'],
                             window_size = config['network']['window-size'],
                             timeout = config['network']['timeout'])
    return True
  except:
    traceback.print_exc()
    print("Error was a {0}".format(_fullname(sys.exc_info()[1])))
    raise
예제 #15
0
def main():
    #Configuration loading
    with open('auth02mozdef.json') as fd:
        config = DotDict(hjson.load(fd))

    if config == None:
        print("No configuration file 'auth02mozdef.json' found.")
        sys.exit(1)

    headers = {'Authorization': 'Bearer {}'.format(config.auth0.token),
            'Accept': 'application/json'}

    fromid = load_state(config.state_file)

    r = requests.get('{url}?take={reqnr}&sort=date:1&per_page={reqnr}&include_totals=true&from={fromid}'.format(
        url=config.auth0.url,
        reqnr=config.auth0.reqnr,
        fromid=fromid),
        headers=headers)

    #If we fail here, auth0 is not responding to us the way we expected it
    if (not r.ok):
        raise Exception(r.url, r.reason, r.status_code, r.json())
    ret = r.json()

    #Process all new auth0 log msgs, normalize and send them to mozdef
    for msg in ret:
        mozmsg = mozdef.MozDefEvent(config.mozdef.url)
        if config.DEBUG:
            mozmsg.set_send_to_syslog(True, only_syslog=True)
        mozmsg.source = config.auth0.url
        mozmsg.tags = ['auth0']
        msg = DotDict(msg)
        lastid = msg._id

        #Fill in mozdef msg fields from the auth0 msg
        try:
            mozmsg = process_msg(mozmsg, msg)
        except KeyError as e:
            #if this happens the msg was malformed in some way
            mozmsg.details['error'] = 'true'
            mozmsg.details['errormsg'] = e
            mozmsg.summary = 'Failed to parse auth0 message'
        mozmsg.send()

    save_state(config.state_file, lastid)
예제 #16
0
def createObjectFromTemplate(tpl,tpl_type):
    tpl_file = os.path.join(tpldir, tpl_type + '.' + tpl + '.hjson')

    print "read tlp_file %s" % tpl_file
    with open(tpl_file,'r') as f:
        tpl_dict =  hjson.load(f)

    content = str(tpl_dict['tpl-text'])
    for k,v in tpl_dict['cfg'].iteritems():
        if v != '':
            seStr = k
            #repStr = eval(v)  # implicit use of cfg,the mad solution :-)
            repStr = getValFromCfg(v)  # implicit use of cfg,the mad solution :-)

            print "replace(%s = %s)" % (k,repStr)
            #conten = content.replace('@@'+seStr+'@@',repStr)
            content = content.replace(seStr, repStr)
    return content
예제 #17
0
    def __init__(self):
        '''
        sends geomodel alert to SSO dashboard
        '''
        self.alert_classname = 'AlertGeomodel'

        config_file_path = os.path.join(os.path.dirname(__file__), 'dashboard_geomodel.json')
        json_obj = {}
        with open(config_file_path, "r") as fd:
            try:
                json_obj = hjson.load(fd)
            except ValueError:
                logger.error("FAILED to open the configuration file" + str(config_file_path))
        self.config = json_obj

        self.connect_db()

        self.registration = 'geomodel'
        self.priority = 1
예제 #18
0
def main():
    format = 'hjson'
    args = []
    for arg in sys.argv[1:]:
        if arg == '-h' or arg == '--help':
            showerr(HELP)
            return
        elif arg == '-j': format = 'json'
        elif arg == '-c': format = 'compact'
        elif arg == '-V' or arg == '--version':
            showerr('Hjson ' + pkg_resources.require("Hjson")[0].version)
            return

        elif arg[0] == '-':
            showerr(HELP)
            raise SystemExit('unknown option ' + arg)
        else:
            args.append(arg)

    outfile = sys.stdout
    if len(args) == 0:
        infile = sys.stdin
    elif len(args) == 1:
        infile = open(args[0], 'r')
    else:
        showerr(HELP)
        raise SystemExit('unknown options')

    with infile:
        try:
            obj = hjson.load(infile, use_decimal=True)
        except ValueError:
            raise SystemExit(sys.exc_info()[1])

    with outfile:
        if format == 'json':
            hjson.dumpJSON(obj, outfile, use_decimal=True, indent='  ')
        elif format == 'compact':
            hjson.dumpJSON(obj, outfile, use_decimal=True, separators=(',', ':'))
        else:
            hjson.dump(obj, outfile, use_decimal=True)

        outfile.write('\n')
예제 #19
0
 def serve(self, message, args, drink):
     """
     Check if the coffee is for someone else
     (And if the sender didn't forget the recipient)
     """
     with open(self.COFFEE_FILE_PATH, 'r', encoding="utf8") as quotes_file:
         quotes = hjson.load(quotes_file)
         if ('>' in args):
             index = args.index('>') + 1
             return ("Here {}, that's your {}.\n{}".format(
                 " ".join(args[index:]),
                 drink,
                 random.choice(quotes[drink]))
             )
         else:
             return ("Here {}, that's your {}.\n{}".format(
                 message.author.mention,
                 drink,
                 random.choice(quotes[drink]))
             )
예제 #20
0
def to_html(filename):
    with open(filename) as input_file:
        thoughts_dict = hjson.load(input_file)

    toc_html = [toc_header]
    for section_name, section in thoughts_dict.items():
        toc_html.append(section_html(section_name))

        for subsection_name, subsection in section.items():
            toc_html.append(subsection_html(subsection_name))

            for i, (entry_name, date) in enumerate(subsection.items()):
                link = link_for_entry(section_name, subsection_name, i + 1)
                toc_html.append(entry_html(entry_name, link, date))

        toc_html.append(section_end_html())

    toc_html.append(toc_footer)

    return bs('\n'.join(toc_html), "html.parser").prettify(formatter="html")
예제 #21
0
 def __init__(self, stream=None, key_transform=identity, value_transform=identity):
     if stream is None:
         stream = pkg_resources.resource_stream(__name__, "data/motifs.hjson")
     data = hjson.load(stream)
     motif_classes = set()
     motif_categories = set()
     for motif in data:
         name = motif['name']
         motif_class = motif['class']
         motif_category = motif['category']
         motif_structure = glycoct.loads(motif['glycoct'])
         motif_structure.motif_name = name
         motif_structure.motif_class = motif_class
         motif_structure.motif_category = motif_category
         motif_structure.is_core_motif = motif["core_motif"]
         self[name] = motif_structure
         motif_classes.add(motif_class)
         motif_categories.add(motif_category)
     self._category_map = {}
     self._class_map = {}
     self.motif_classes = motif_classes
     self.motif_categories = motif_categories
예제 #22
0
파일: tool.py 프로젝트: adribaena/hjson-py
def main():
    todo = "-h"
    args = []
    for arg in sys.argv[1:]:
        if arg[0] == '-':
            todo = arg
        else:
            args.append(arg)

    if len(args) == 0:
        infile = sys.stdin
        outfile = sys.stdout
    elif len(args) == 1:
        infile = open(args[0], 'r')
        outfile = sys.stdout
    elif len(args) == 2:
        infile = open(args[0], 'r')
        outfile = open(args[1], 'w')
    else:
        raise SystemExit(sys.argv[0] + " {-h|-j|-c} [infile [outfile]]")

    with infile:
        try:
            obj = hjson.load(infile, use_decimal=True)
        except ValueError:
            raise SystemExit(sys.exc_info()[1])

    with outfile:
        if todo == '-j':
            hjson.dumpJSON(obj, outfile, use_decimal=True, indent="  ")
        elif todo == '-c':
            hjson.dumpJSON(obj, outfile, use_decimal=True, separators=(',', ':'))
        else:
            hjson.dump(obj, outfile, use_decimal=True)

        outfile.write('\n')
예제 #23
0
 def load_discord_msg(self):
     with open("modules/standard/discord/discord.msg",
               mode="r",
               encoding="utf-8") as f:
         return hjson.load(f)
예제 #24
0
    def __init__(self, abilitySets, abilityData, settings):
        self.abilitySets = abilitySets # BP groups, icons, etc.
        self.abilityData = abilityData # Stats, names, etc.

        # Settings for the skill shuffler
        self.weaponShuffle = settings['skills-weapons']
        self.divineSeparately = settings['skills-one-divine']
        self.advancedSeparately = settings['skills-separate']

        # Use for filtering groups, reorganizing data, adding descriptions, etc.
        with open(get_filename('json/ability.json'),'r') as file:
            self.abilityJSON = hjson.load(file)
        with open(get_filename('json/capture.json'),'r') as file:
            self.captureJSON = hjson.load(file)
        # For checking if job allows for use of that weapon
        with open(get_filename('json/weapons.json'),'r') as file:
            self.jobWeapons = hjson.load(file)

        # Group all the data needed about each set
        self.sets = {}
        self.candidates = {}
        self.capture = {}
        self.modRatio = {}
        self.ratioPercentChange = {}
        for key, data in self.abilitySets.table.items():
            # Get ability names
            abilities = self.abilitySets.getAbilityNames(key)
            # Store data in appropriate dictionary
            if abilities[0] in self.abilityJSON:
                self.sets[key] = {
                    'Job': self.abilityJSON[abilities[0]]['Job'],
                    'Divine': self.abilityJSON[abilities[0]]['Divine'],
                    'Advanced': self.abilityJSON[abilities[0]]['Advanced'],
                    'Weapon': self.abilityJSON[abilities[0]]['Weapon'],
                    'Element': self.abilityJSON[abilities[0]]['Element'],
                    'Priority': self.abilityJSON[abilities[0]]['Priority'],
                    'Swap': None,
                }
                self.candidates[key] = {
                    'Key': key,
                    'Weapon': self.abilityJSON[abilities[0]]['Weapon'],
                    'Element': self.abilityJSON[abilities[0]]['Element'],
                    'Data': data,
                }
                # Store whether or not to mod ratio for job skills
                for ability in abilities:
                    self.modRatio[ability] = self.abilityJSON[ability]['ModRatio']
                # Print/Spoiler logs only
                self.ratioPercentChange[key] = 0
            elif abilities[0] in self.captureJSON:
                self.capture[key] = {
                    'Weapon': self.captureJSON[abilities[0]]['Weapon'],
                    'Element': self.captureJSON[abilities[0]]['Element'],
                    'Icon': self.captureJSON[abilities[0]]['Icon'],
                }
            else:
                # E.g. Exclude Attack, Run, Concoct skills. 
                continue

        # RUNE to ABILITY -- used for identifying ability data to update ratios
        self.runeToAbility = {
            'BT_ABI_397': 'BT_ABI_FIRE',
            'BT_ABI_401': 'BT_ABI_ICE',
            'BT_ABI_405': 'BT_ABI_THUNDER',
            'BT_ABI_409': 'BT_ABI_WIND',
            'BT_ABI_413': 'BT_ABI_LIGHT',
            'BT_ABI_417': 'BT_ABI_DARK',
        }
예제 #25
0
 def _get_evaluation_expect(self):
     path = self.path + '.eval'
     if not os.path.exists(path):
         return None
     with open(path, 'rb') as f:
         return hjson.load(f, 'utf8')
예제 #26
0
 def __loadObj__ (self, path):
   result = None
   fp = codecs.open(path, mode='r', encoding='utf-8')
   result = hjson.load(fp)
   return result
def _load_config_template(hjson_config_template):
    config_dict = None
    with open(hjson_config_template, "r") as hjson_file:
        config_dict = hjson.load(hjson_file)
    return config_dict
예제 #28
0
    def _gen_results(self):
        # '''
        # The function is called after the regression has completed. It looks
        # for a regr_results.hjson file with aggregated results from the
        # synthesis run. The hjson needs to have the following (potentially
        # empty) fields
        #
        # results = {
        #     "tool": "dc",
        #     "top" : <name of toplevel>,
        #
        #     "messages": {
        #         "flow_errors"      : [],
        #         "flow_warnings"    : [],
        #         "analyze_errors"   : [],
        #         "analyze_warnings" : [],
        #         "elab_errors"      : [],
        #         "elab_warnings"    : [],
        #         "compile_errors"   : [],
        #         "compile_warnings" : [],
        #     },
        #
        #     "timing": {
        #         # per timing group (ususally a clock domain)
        #         # in nano seconds
        #         <group>  : {
        #             "tns"    : <value>,
        #             "wns"    : <value>,
        #             "period" : <value>,
        #         ...
        #         }
        #     },
        #
        #     "area": {
        #         # gate equivalent of a NAND2 gate
        #         "ge"     : <value>,
        #
        #         # summary report, in GE
        #         "comb"   : <value>,
        #         "buf"    : <value>,
        #         "reg"    : <value>,
        #         "macro"  : <value>,
        #         "total"  : <value>,
        #
        #         # hierchical report of first submodule level
        #         "instances" : {
        #             <name> : {
        #               "comb"  : <value>,
        #               "buf"   : <value>,
        #               "reg"   : <value>,
        #               "macro" : <value>,
        #               "total" : <value>,
        #             },
        #             ...
        #         },
        #     },
        #
        #     "power": {
        #         "net"  : <value>,
        #         "int"  : <value>,
        #         "leak" : <value>,
        #     },
        #
        #     "units": {
        #         "voltage"     : <value>,
        #         "capacitance" : <value>,
        #         "time"        : <value>,
        #         "dynamic"     : <value>,
        #         "static"      : <value>,
        #     }
        # }
        #
        # note that if this is a primary config, the results will
        # be generated using the _gen_results_summary function
        # '''

        def _create_entry(val, norm=1.0, total=None, perctag="%"):
            """
            Create normalized entry with an optional
            percentage appended in brackets.
            """
            if val is not None and norm is not None:
                if total is not None:
                    perc = float(val) / float(total) * 100.0
                    entry = "%2.1f %s" % (perc, perctag)
                else:
                    value = float(val) / norm
                    entry = "%2.1f" % (value)
            else:
                entry = "--"

            return entry

        self.result = {}

        # Generate results table for runs.
        results_str = "## " + self.results_title + "\n\n"
        results_str += "### " + self.timestamp_long + "\n"
        if self.revision_string:
            results_str += "### " + self.revision_string + "\n"
        results_str += "### Synthesis Tool: " + self.tool.upper() + "\n\n"

        # TODO: extend this to support multiple build modes
        for mode in self.build_modes:

            # results_str += "## Build Mode: " + mode.name + "\n\n"

            result_data = Path(
                subst_wildcards(self.build_dir, {"build_mode": mode.name}) +
                '/results.hjson')
            log.info("looking for result data file at %s", result_data)

            try:
                with result_data.open() as results_file:
                    self.result = hjson.load(results_file, use_decimal=True)
            except IOError as err:
                log.warning("%s", err)
                self.result = {
                    "messages": {
                        "flow_errors": ["IOError: %s" % err],
                        "flow_warnings": [],
                        "analyze_errors": [],
                        "analyze_warnings": [],
                        "elab_errors": [],
                        "elab_warnings": [],
                        "compile_errors": [],
                        "compile_warnings": [],
                    },
                }

            # Message summary
            # results_str += "### Tool Message Summary\n\n"
            if "messages" in self.result:

                header = [
                    "Build Mode", "Flow Warnings", "Flow Errors",
                    "Analyze Warnings", "Analyze Errors", "Elab Warnings",
                    "Elab Errors", "Compile Warnings", "Compile Errors"
                ]
                colalign = ("left", ) + ("center", ) * (len(header) - 1)
                table = [header]

                messages = self.result["messages"]
                table.append([
                    mode.name,
                    str(len(messages["flow_warnings"])) + " W ",
                    str(len(messages["flow_errors"])) + " E ",
                    str(len(messages["analyze_warnings"])) + " W ",
                    str(len(messages["analyze_errors"])) + " E ",
                    str(len(messages["elab_warnings"])) + " W ",
                    str(len(messages["elab_errors"])) + " E ",
                    str(len(messages["compile_warnings"])) + " W ",
                    str(len(messages["compile_errors"])) + " E ",
                ])

                if len(table) > 1:
                    results_str += tabulate(table,
                                            headers="firstrow",
                                            tablefmt="pipe",
                                            colalign=colalign) + "\n\n"
                else:
                    results_str += "No messages found\n\n"
            else:
                results_str += "No messages found\n\n"

            # Hierarchical Area report
            results_str += "### Circuit Complexity in [kGE]\n\n"
            if "area" in self.result:

                header = [
                    "Instance", "Comb ", "Buf/Inv", "Regs", "Macros", "Total",
                    "Total [%]"
                ]
                colalign = ("left", ) + ("center", ) * (len(header) - 1)
                table = [header]

                # print top-level summary first
                row = ["**" + self.result["top"] + "**"]
                try:
                    kge = float(self.result["area"]["ge"]) * 1000.0

                    for field in ["comb", "buf", "reg", "macro", "total"]:
                        row += [
                            "**" +
                            _create_entry(self.result["area"][field], kge) +
                            "**"
                        ]

                    row += ["**--**"]
                    table.append(row)

                    # go through submodules
                    for name in self.result["area"]["instances"].keys():
                        if name == self.result["top"]:
                            continue
                        row = [name]
                        for field in ["comb", "buf", "reg", "macro", "total"]:
                            row += [
                                _create_entry(
                                    self.result["area"]["instances"][name]
                                    [field], kge)
                            ]

                        # add percentage  of total
                        row += [
                            _create_entry(
                                self.result["area"]["instances"][name][field],
                                kge, self.result["area"]["total"], "%u")
                        ]

                        table.append(row)

                except TypeError:
                    results_str += "Gate equivalent is not properly defined\n\n"

                if len(table) > 1:
                    results_str += tabulate(table,
                                            headers="firstrow",
                                            tablefmt="pipe",
                                            colalign=colalign) + "\n\n"
                else:
                    results_str += "No area report found\n\n"
            else:
                results_str += "No area report found\n\n"

            # Timing report
            results_str += "### Timing in [ns]\n\n"
            if "timing" in self.result and "units" in self.result:

                header = ["Clock", "Period", "WNS", "TNS"]
                colalign = ("left", ) + ("center", ) * (len(header) - 1)
                table = [header]

                for clock in self.result["timing"].keys():
                    row = [clock]
                    row += [
                        _create_entry(
                            self.result["timing"][clock]["period"],
                            1.0E-09 / float(self.result["units"]["time"])),
                        _create_entry(
                            self.result["timing"][clock]["wns"], 1.0E-09 /
                            float(self.result["units"]["time"])) + " EN",
                        _create_entry(
                            self.result["timing"][clock]["tns"], 1.0E-09 /
                            float(self.result["units"]["time"])) + " EN"
                    ]
                    table.append(row)

                if len(table) > 1:
                    results_str += tabulate(table,
                                            headers="firstrow",
                                            tablefmt="pipe",
                                            colalign=colalign) + "\n\n"
                else:
                    results_str += "No timing report found\n\n"
            else:
                results_str += "No timing report found\n\n"

            # Power report
            results_str += "### Power Estimates in [mW]\n\n"
            if "power" in self.result and "units" in self.result:

                header = ["Network", "Internal", "Leakage", "Total"]
                colalign = ("center", ) * len(header)
                table = [header]

                try:
                    self.result["power"]["net"]

                    power = [
                        float(self.result["power"]["net"]) *
                        float(self.result["units"]["dynamic"]),
                        float(self.result["power"]["int"]) *
                        float(self.result["units"]["dynamic"]),
                        float(self.result["power"]["leak"]) *
                        float(self.result["units"]["static"])
                    ]

                    total_power = sum(power)

                    row = [_create_entry(power[0], 1.0E-3) + " / " +
                           _create_entry(power[0], 1.0E-3, total_power),
                           _create_entry(power[1], 1.0E-3) + " / " +
                           _create_entry(power[1], 1.0E-3, total_power),
                           _create_entry(power[2], 1.0E-3) + " / " +
                           _create_entry(power[2], 1.0E-3, total_power),
                           _create_entry(total_power, 1.0E-3)]

                    table.append(row)
                # in case fp values are NoneType
                except TypeError:
                    results_str += "No power report found\n\n"

                if len(table) > 1:
                    results_str += tabulate(table,
                                            headers="firstrow",
                                            tablefmt="pipe",
                                            colalign=colalign) + "\n\n"
            else:
                results_str += "No power report found\n\n"

            # Append detailed messages if they exist
            # Note that these messages are omitted in publication mode
            hdr_key_pairs = [("Flow Warnings", "flow_warnings"),
                             ("Flow Errors", "flow_errors"),
                             ("Analyze Warnings", "analyze_warnings"),
                             ("Analyze Errors", "analyze_errors"),
                             ("Elab Warnings", "elab_warnings"),
                             ("Elab Errors", "elab_errors"),
                             ("Compile Warnings", "compile_warnings"),
                             ("Compile Errors", "compile_errors")]

            # Synthesis fails if any warning or error message has occurred
            self.errors_seen = False
            fail_msgs = ""
            for _, key in hdr_key_pairs:
                if key in self.result['messages']:
                    if self.result['messages'].get(key):
                        self.errors_seen = True
                        break

            if self.errors_seen:
                fail_msgs += "\n### Errors and Warnings for Build Mode `'" + mode.name + "'`\n"
                for hdr, key in hdr_key_pairs:
                    msgs = self.result['messages'].get(key)
                    fail_msgs += print_msg_list("#### " + hdr, msgs, self.max_msg_count)

            # the email and published reports will default to self.results_md if they are
            # empty. in case they need to be sanitized, override them and do not append
            # detailed messages.
            if self.sanitize_email_results:
                self.email_results_md = results_str
            if self.sanitize_publish_results:
                self.publish_results_md = results_str

            # locally generated result always contains all details
            self.results_md = results_str + fail_msgs

            # TODO: add support for pie / bar charts for area splits and
            # QoR history

        # Write results to the scratch area
        self.results_file = self.scratch_path + "/results_" + self.timestamp + ".md"
        log.info("Detailed results are available at %s", self.results_file)
        with open(self.results_file, 'w') as f:
            f.write(self.results_md)

        return self.results_md
 def test_strip_bom(self):
     content = u"\u3053\u3093\u306b\u3061\u308f"
     json_doc = codecs.BOM_UTF8 + b(json.dumpsJSON(content))
     self.assertEqual(json.load(BytesIO(json_doc)), content)
     for doc in json_doc, json_doc.decode('utf8'):
         self.assertEqual(json.loads(doc), content)
예제 #30
0
import unittest
import hjson
import itertools

from glypy.structure import named_structures, constants, monosaccharide, substituent, glycan
from glypy.composition import structure_composition, Composition, composition_transform
from glypy.io import glycoct

from common import StringIO, load, pickle

monosaccharide_structures = hjson.load(
    open("./glypy/structure/data/monosaccharides.hjson"))

wiki_masses = {
    "Iduronic Acid": 194.04,
    "Bacillosamine": 162.10,
    "Allose": 180.06,
}

ReducedEnd = monosaccharide.ReducedEnd


class MonosaccharideTests(unittest.TestCase):
    _file_path = "./test_data/glycoct.txt"
    glycan = iter(glycoct.read(_file_path)).next()

    def test_from_glycoct(self):
        s = self.glycan.root.to_glycoct()
        b = StringIO(s)
        g = iter(glycoct.read(b)).next()
        self.assertEqual(g.root.to_glycoct(), s)
예제 #31
0
def main():
    parser = argparse.ArgumentParser(prog="tlgen")
    parser.add_argument('--topcfg',
                        '-t',
                        metavar='file',
                        type=argparse.FileType('r'),
                        help="`top_cfg.hjson` file.")
    parser.add_argument('--doc',
                        '-d',
                        action='store_true',
                        help='Generate self HTML document in stdout')
    parser.add_argument(
        '--outdir',
        '-o',
        help=
        "Target directory. tlgen needs 'rtl/' and 'dv/' directory under the target dir"
    )
    parser.add_argument('--ip-path',
                        default="",
                        help='''
        Additional path to generated rtl/ or dv/ folders: outdir/ip_path/rtl
        Only needed when there are multiple xbar in outdir''')
    parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')

    args = parser.parse_args()

    if args.verbose:
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    if args.doc:
        # Generate Doc and return
        sys.stdout.write(tlgen.selfdoc(heading=3, cmd='tlgen.py --doc'))
        return

    # Check if topcfg defined
    if not args.topcfg or not args.outdir:
        log.error("--topcfg option is mandatory to generate codes.")

    # Check if outdir exists. If not, show error and exit
    if not Path(args.outdir).is_dir():
        log.error("'--outdir' should point to writable directory")

    # Load contents of top_cfg
    # Skip this part and use internal structure at this time
    try:
        obj = hjson.load(args.topcfg, use_decimal=True)
    except ValueError:
        raise SystemExit(sys.exc_info()[1])

    log.info(obj)

    xbar = tlgen.validate(obj)
    xbar.ip_path = args.ip_path

    if not tlgen.elaborate(xbar):
        log.error("Elaboration failed." + repr(xbar))

    # Generate
    out_rtl, out_pkg, out_core = tlgen.generate(xbar)

    rtl_path = Path(args.outdir) / args.ip_path / 'rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    dv_path = Path(args.outdir) / args.ip_path / 'dv/autogen'
    dv_path.mkdir(parents=True, exist_ok=True)

    rtl_filename = "xbar_%s.sv" % (xbar.name)
    rtl_filepath = rtl_path / rtl_filename
    with rtl_filepath.open(mode='w', encoding='UTF-8') as fout:
        fout.write(out_rtl)

    pkg_filename = "tl_%s_pkg.sv" % (xbar.name)
    pkg_filepath = rtl_path / pkg_filename
    with pkg_filepath.open(mode='w', encoding='UTF-8') as fout:
        fout.write(out_pkg)

    core_filename = "xbar_%s.core" % (xbar.name)
    core_filepath = rtl_path / core_filename
    with core_filepath.open(mode='w', encoding='UTF-8') as fout:
        fout.write(out_core)

    # generate TB
    tlgen.generate_tb(xbar, dv_path)
예제 #32
0
파일: log_reader.py 프로젝트: superplay1/c3
import hjson
import numpy as np
from c3.utils.utils import num3str
from rich.console import Console
from rich.table import Table


parser = argparse.ArgumentParser()
parser.add_argument("log_file")
args = parser.parse_args()

log = None

try:
    with open(args.log_file) as file:
        log = hjson.load(file)
except FileNotFoundError:
    print("Logfile not found.")

if log:
    opt_map = log["opt_map"]
    optim_status = log["optim_status"]
    units = log["units"]
    params = optim_status["params"]
    grads = optim_status["gradient"]

    print(f"Optimization reached {optim_status['goal']:0.3g} at {optim_status['time']}\n")
    table = Table(show_header=True, header_style="bold magenta")
    table.add_column("Parameter")
    table.add_column("Value", justify="right")
    table.add_column("Gradient", justify="right")
def main():

    current = Path(__file__).parent.absolute()

    hjson_tpl = Template(filename=str(current / '../data/rstmgr.hjson.tpl'))
    rtl_tpl   = Template(filename=str(current / '../data/rstmgr.sv.tpl'))
    pkg_tpl   = Template(filename=str(current / '../data/rstmgr_pkg.sv.tpl'))

    hjson_out = current / '../data/rstmgr.hjson'
    rtl_out   = current / '../rtl/rstmgr.sv'
    pkg_out   = current / '../rtl/rstmgr_pkg.sv'

    cfgpath   = current / '../../../top_earlgrey/data/autogen/top_earlgrey.gen.hjson'

    try:
        with open(cfgpath, 'r') as cfg:
            topcfg = hjson.load(cfg,use_decimal=True,object_pairs_hook=OrderedDict)
    except ValueError:
        log.error("{} not found".format(cfgpath))
        raise SystemExit(sys.exc_info()[1])

    # Parameters needed for generation
    clks = []
    output_rsts = OrderedDict()
    sw_rsts = OrderedDict()
    leaf_rsts = OrderedDict()

    # unique clocks
    for rst in topcfg["resets"]["nodes"]:
        if rst['type'] != "ext" and rst['clk'] not in clks:
            clks.append(rst['clk'])

    # resets sent to reset struct
    output_rsts = [rst for rst in topcfg["resets"]["nodes"] if rst['type'] == "top"]

    # sw controlled resets
    sw_rsts = [rst for rst in topcfg["resets"]["nodes"] if 'sw' in rst and rst['sw'] == 1]

    # leaf resets
    leaf_rsts = [rst for rst in topcfg["resets"]["nodes"] if rst['gen']]

    log.info("output resets {}".format(output_rsts))
    log.info("software resets {}".format(sw_rsts))
    log.info("leaf resets {}".format(leaf_rsts))

    # Number of reset requests
    n_rstreqs = len(topcfg["reset_requests"])

    # generate hjson
    hjson_out.write_text(
         hjson_tpl.render(clks=clks,
                          power_domains=topcfg['power']['domains'],
                          num_rstreqs=n_rstreqs,
                          sw_rsts=sw_rsts,
                          output_rsts=output_rsts,
                          leaf_rsts=leaf_rsts,
                          export_rsts=topcfg['exported_rsts']))

    # generate rtl package
    pkg_out.write_text(
        pkg_tpl.render(clks=clks,
                       power_domains=topcfg['power']['domains'],
                       num_rstreqs=n_rstreqs,
                       sw_rsts=sw_rsts,
                       output_rsts=output_rsts,
                       leaf_rsts=leaf_rsts,
                       export_rsts=topcfg['exported_rsts']))

    # generate top level
    rtl_out.write_text(
         rtl_tpl.render(clks=clks,
                        power_domains=topcfg['power']['domains'],
                        num_rstreqs=n_rstreqs,
                        sw_rsts=sw_rsts,
                        output_rsts=output_rsts,
                        leaf_rsts=leaf_rsts,
                        export_rsts=topcfg['exported_rsts']))
예제 #34
0
def main():

    # Display INFO log messages and up.
    log.basicConfig(level=log.INFO, format="%(levelname)s: %(message)s")

    parser = argparse.ArgumentParser(
        prog="fpga_size_reduce",
        description=wrapped_docstring(),
        formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument('--toppath',
                        '-t',
                        default='.',
                        help='provide top path.')

    parser.add_argument('--target',
                        '-g',
                        default='nexysvideo',
                        choices=['nexysvideo'],
                        help='fpga reduction target')

    parser.add_argument('--build',
                        '-b',
                        default=False,
                        action='store_true',
                        help='Build ROM based on reduced design')

    args = parser.parse_args()

    # Get path to top-level directory
    top_path = args.toppath
    top_hjson = top_path + '/hw/top_earlgrey/data/top_earlgrey.hjson'
    orig_hjson = top_hjson + '.orig'

    # Modify hjson to change flash size
    with open(top_hjson, "r") as hjson_file:
        cfg = hjson.load(hjson_file, use_decimal=True)

    # write out original version reformatted
    with open(orig_hjson, "w") as hjson_file:
        hjson_file.write(orighdr + hjson.dumps(cfg, hjson_file))

    # update value based on target selection
    globals()["_{}_reduce".format(args.target)](cfg)

    # write back updated hjson
    with open(top_hjson, "w") as hjson_file:
        hjson_file.write(genhdr + hjson.dumps(cfg, hjson_file))

    # Regenerate auto-generated files
    print("Regenerating all auto-generated files...")
    cmd = ["make", "-C", top_path + "/hw"]
    try:
        subprocess.run(cmd,
                       check=True,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.STDOUT,
                       universal_newlines=True)

    except subprocess.CalledProcessError as e:
        log.error("Failed to regenerate auto-generated files: " + str(e))
        log.error(e.stdout)
        sys.exit(1)

    # Regenerate boot ROM
    if (args.build):
        log.info("Regenerating boot ROM...")
        cmd = [
            "ninja", "-C", top_path + "/build-out",
            "sw/device/lib/testing/test_rom/test_rom_export_fpga_nexysvideo"
        ]
        try:
            subprocess.run(cmd,
                           check=True,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT,
                           universal_newlines=True)

        except subprocess.CalledProcessError as e:
            log.error("Failed to regenerate boot ROM: " + str(e))
            log.error(e.stdout)
            sys.exit(1)

            return 0
예제 #35
0
def load_configuration_from_path(path, apply=True):
    cfg = hjson.load(open(path))
    if apply:
        process(cfg)
    return cfg
예제 #36
0
    def _gen_results(self):
        # '''
        # The function is called after the regression has completed. It looks
        # for a regr_results.hjson file with aggregated results from the lint run.
        # The hjson needs to have the following (potentially empty) fields
        #
        # {
        #   tool: ""
        #   errors: []
        #   warnings: []
        #   lint_errors: []
        #   lint_warning: []
        #   lint_infos: []
        # }
        #
        # where each entry is a string representing a lint message. This allows
        # to reuse the same LintCfg class with different tools since just the
        # parsing script that transforms the tool output into the hjson above
        # needs to be adapted.
        #
        # note that if this is a master config, the results will
        # be generated using the _gen_results_summary function
        # '''

        # Generate results table for runs.
        results_str = "## " + self.results_title + "\n\n"
        results_str += "### " + self.timestamp_long + "\n"
        results_str += "### Lint Tool: " + self.tool.upper() + "\n\n"

        header = [
            "Build Mode", "Tool Warnings", "Tool Errors", "Lint Warnings",
            "Lint Errors"
        ]
        colalign = ("center", ) * len(header)
        table = [header]

        # aggregated counts
        self.result_summary["warnings"] = []
        self.result_summary["errors"] = []
        self.result_summary["lint_warnings"] = []
        self.result_summary["lint_errors"] = []

        fail_msgs = ""
        for mode in self.build_modes:

            result_data = Path(
                subst_wildcards(self.build_dir, {"build_mode": mode.name}) +
                '/results.hjson')
            log.info("looking for result data file at %s", result_data)

            try:
                with open(result_data, "r") as results_file:
                    self.result = hjson.load(results_file, use_decimal=True)
            except IOError as err:
                log.warning("%s", err)
                self.result = {
                    "tool": "",
                    "errors": ["IOError: %s" % err],
                    "warnings": [],
                    "lint_errors": [],
                    "lint_warnings": [],
                    "lint_infos": []
                }
            if self.result:
                table.append([
                    mode.name,
                    str(len(self.result["warnings"])) + " W ",
                    str(len(self.result["errors"])) + " E",
                    # We currently do not publish these infos at
                    # the moment len(self.result["lint_infos"]),
                    str(len(self.result["lint_warnings"])) + " W",
                    str(len(self.result["lint_errors"])) + " E"
                ])
            else:
                self.result = {
                    "tool": "",
                    "errors": [],
                    "warnings": [],
                    "lint_errors": [],
                    "lint_warnings": [],
                    "lint_infos": []
                }

            self.result_summary["warnings"] += self.result["warnings"]
            self.result_summary["errors"] += self.result["errors"]
            self.result_summary["lint_warnings"] += self.result[
                "lint_warnings"]
            self.result_summary["lint_errors"] += self.result["lint_errors"]

            # Append detailed messages if they exist
            hdr_key_pairs = [("Tool Warnings", "warnings"),
                             ("Tool Errors", "errors"),
                             ("Lint Warnings", "lint_warnings"),
                             ("Lint Errors", "lint_errors")]

            has_msg = False
            for _, key in hdr_key_pairs:
                if key in self.result:
                    has_msg = True
                    break

            if has_msg:
                fail_msgs += "\n### Errors and Warnings for Build Mode `'" + mode.name + "'`\n"
                for hdr, key in hdr_key_pairs:
                    msgs = self.result.get(key)
                    fail_msgs += print_msg_list("#### " + hdr, msgs,
                                                self.max_msg_count)

        if len(table) > 1:
            self.results_md = results_str + tabulate(
                table, headers="firstrow", tablefmt="pipe",
                colalign=colalign) + "\n" + fail_msgs
        else:
            self.results_md = results_str + "\nNo results to display.\n"

        # Write results to the scratch area
        self.results_file = self.scratch_path + "/results_" + self.timestamp + ".md"
        with open(self.results_file, 'w') as f:
            f.write(self.results_md)

        log.info("[results page]: [%s] [%s]", self.name, results_file)
        return self.results_md
예제 #37
0
파일: topgen.py 프로젝트: poena/opentitan
def generate_rstmgr(topcfg, out_path):
    log.info("Generating rstmgr")

    # Define target path
    rtl_path = out_path / 'ip/rstmgr/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    doc_path = out_path / 'ip/rstmgr/data/autogen'
    doc_path.mkdir(parents=True, exist_ok=True)
    tpl_path = out_path / '../ip/rstmgr/data'

    # Read template files from ip directory.
    tpls = []
    outputs = []
    names = ['rstmgr.hjson', 'rstmgr.sv', 'rstmgr_pkg.sv']

    for x in names:
        tpls.append(tpl_path / Path(x + ".tpl"))
        if "hjson" in x:
            outputs.append(doc_path / Path(x))
        else:
            outputs.append(rtl_path / Path(x))

    # Parameters needed for generation
    clks = []
    output_rsts = OrderedDict()
    sw_rsts = OrderedDict()
    leaf_rsts = OrderedDict()

    # unique clocks
    for rst in topcfg["resets"]["nodes"]:
        if rst['type'] != "ext" and rst['clk'] not in clks:
            clks.append(rst['clk'])

    # resets sent to reset struct
    output_rsts = [
        rst for rst in topcfg["resets"]["nodes"] if rst['type'] == "top"
    ]

    # sw controlled resets
    sw_rsts = [
        rst for rst in topcfg["resets"]["nodes"]
        if 'sw' in rst and rst['sw'] == 1
    ]

    # leaf resets
    leaf_rsts = [rst for rst in topcfg["resets"]["nodes"] if rst['gen']]

    log.info("output resets {}".format(output_rsts))
    log.info("software resets {}".format(sw_rsts))
    log.info("leaf resets {}".format(leaf_rsts))

    # Number of reset requests
    n_rstreqs = len(topcfg["reset_requests"])

    # Generate templated files
    for idx, t in enumerate(tpls):
        out = StringIO()
        with t.open(mode='r', encoding='UTF-8') as fin:
            tpl = Template(fin.read())
            try:
                out = tpl.render(clks=clks,
                                 num_rstreqs=n_rstreqs,
                                 sw_rsts=sw_rsts,
                                 output_rsts=output_rsts,
                                 leaf_rsts=leaf_rsts,
                                 export_rsts=topcfg['exported_rsts'])

            except:  # noqa: E722
                log.error(exceptions.text_error_template().render())

        if out == "":
            log.error("Cannot generate {}".format(names[idx]))
            return

        with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
            fout.write(genhdr + out)

    # Generate reg files
    hjson_path = outputs[0]
    with open(str(hjson_path), 'r') as out:
        hjson_obj = hjson.load(out,
                               use_decimal=True,
                               object_pairs_hook=OrderedDict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))
def parse_args_config():
    config_parser = argparse.ArgumentParser(add_help=False)
    config_parser.add_argument('--config')
    config_args = config_parser.parse_known_args()
    
    config_path = config_args[0].config
    
    parser = argparse.ArgumentParser(description='')
    
    parser.add_argument('--config', required=True,
        help='config file')
    parser.add_argument('--labels', required=False,
        help='labels file')
    
    parser.add_argument('--bias',
        help='overrides the mode in the config file with `bias` and the --loadcell-out'\
            'params with the provided value')
    parser.add_argument('--monitor', action='store_true',
        help="overrides the mode in the config file with `monitor`")
    
    parser.add_argument('--overwrite', action='store_true',
        help='overwrite existing output files')
    
    parser.add_argument('--no-start-pulse', action='store_true',
        help='do not wait for plexon start pulse or enter press, skips initial 3 second wait')
    parser.add_argument('--loadcell-out', default='./loadcell_tilt.csv',
        help='file to write loadcell csv data to (default ./loadcell_tilt.csv)')
    parser.add_argument('--no-record', action='store_true',
        help='skip recording loadcell data')
    parser.add_argument('--live', action='store_true',
        help='show real time data')
    parser.add_argument('--live-cal', action='store_true',
        help='show real time calibrated data')
    parser.add_argument('--live-bias',
        help='path to a bias file or glob pattern matching the bias file for live view')
    parser.add_argument('--live-secs', type=int, default=5,
        help='number of seconds to keep data for in live view (5)')
    
    parser.add_argument('--template-out',
        help='output path for generated template')
    parser.add_argument('--template-in',
        help='input path for template')
    
    parser.add_argument('--no-spike-wait', action='store_true',
        help=argparse.SUPPRESS)
    parser.add_argument('--fixed-spike-wait', action='store_true',
        help=argparse.SUPPRESS)
    parser.add_argument('--retry', action='store_true',
        help=argparse.SUPPRESS)
    parser.add_argument('--mock', action='store_true',
        help=argparse.SUPPRESS)
    parser.add_argument('--dbg-motor-control', action='store_true',
        help=argparse.SUPPRESS)
    
    if config_path is not None:
        raw_args = copy(sys.argv[1:])
        with open(config_path) as f:
            config_data = hjson.load(f)
        
        if '-' in config_data:
            raw_args.extend(config_data['-'])
            del config_data['-']
        
        for k, v in config_data.items():
            if k.startswith('-'):
                raw_args.append(f"{k}")
                if v is not None:
                    raw_args.append(str(v))
    else:
        raw_args = None
    
    args = parser.parse_args(args=raw_args)
    
    return args
예제 #39
0
 def make_planners_params(self, classifier_model_dir: pathlib.Path,
                          full_dynamics_model_dirs: List[pathlib.Path],
                          udnn_model_dirs: List[pathlib.Path],
                          planners_params_common_filename: pathlib.Path,
                          planning_evaluation_params: Dict,
                          recovery_model_dir: pathlib.Path):
     # NOTE: the order of planner_params is going to affect the name of the subfolder that results are put in.
     #  this was only done because if you compare multiple methods with the same "method name"
     planners_params = []
     for method_name in planning_evaluation_params['methods']:
         with planners_params_common_filename.open(
                 'r') as planners_params_common_file:
             planner_params = hjson.load(planners_params_common_file)
         if method_name == "classifier":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in udnn_model_dirs
             ]
             method_classifier_model_dir = [
                 classifier_model_dir / 'best_checkpoint'
             ]
             recovery = {'use_recovery': False}
         elif method_name == "learned_recovery":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in udnn_model_dirs
             ]
             method_classifier_model_dir = [
                 classifier_model_dir / 'best_checkpoint'
             ]
             recovery = {
                 'recovery_model_dir':
                 recovery_model_dir / 'best_checkpoint',
                 'use_recovery': True,
             }
         elif method_name == "random_recovery_no_classifier":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in udnn_model_dirs
             ]
             method_classifier_model_dir = [
                 pathlib.Path('cl_trials/none_baseline/none')
             ]
             link_bot_planning_path = pathlib.Path(
                 r.get_path('link_bot_planning'))
             recovery = {
                 'recovery_model_dir':
                 link_bot_planning_path / 'recovery_trials' / 'random' /
                 'random',
                 'use_recovery':
                 True,
             }
         elif method_name == "random_recovery":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in udnn_model_dirs
             ]
             method_classifier_model_dir = [
                 classifier_model_dir / 'best_checkpoint'
             ]
             link_bot_planning_path = pathlib.Path(
                 r.get_path('link_bot_planning'))
             recovery = {
                 'recovery_model_dir':
                 link_bot_planning_path / 'recovery_trials' / 'random' /
                 'random',
                 'use_recovery':
                 True,
             }
         elif method_name == "no_classifier":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in udnn_model_dirs
             ]
             method_classifier_model_dir = [
                 pathlib.Path('cl_trials/none_baseline/none')
             ]
             recovery = {'use_recovery': False}
         elif method_name == "full_dynamics":
             method_fwd_model_dirs = [
                 d / 'best_checkpoint' for d in full_dynamics_model_dirs
             ]
             method_classifier_model_dir = [
                 pathlib.Path('cl_trials/none_baseline/none')
             ]
             recovery = {'use_recovery': False}
         else:
             raise NotImplementedError(
                 f"Method {method_name} not implemented")
         planner_params['fwd_model_dir'] = method_fwd_model_dirs
         planner_params[
             'classifier_model_dir'] = method_classifier_model_dir
         planner_params['recovery'] = recovery
         planner_params['method_name'] = method_name
         planners_params.append((method_name, planner_params))
     return planners_params
예제 #40
0
pydir =  os.path.dirname(os.path.abspath(__file__))
basedir = os.path.dirname(pydir)
confdir = os.path.join(basedir,"config")
tpldir = os.path.join(basedir,"tpl")


cfg_defaults = {}
hosts = {}

filename = "virt-install-cmd.xen.hjson"
in_file = os.path.join(tpldir,filename)

#print "read " + in_file

with open(in_file,'r') as f:
    dict =  hjson.load(f)


for k in dict['cfg'].keys():
    #print "key=%s\n" % k
    pp(dict['cfg'][k])


    # cfg['initial-cluster-string'] = getCoreosInitialClusterString()
    # cfg['install-img-path'] = getInstallImgPath()
    # cfg['install-img-format'] = cfg['disks']['disk0']['img-format']
    # cfg['install-bridge'] = cfg['nics']['nic0']['bridge']
    # cfg['install-mac'] = cfg['nics']['nic0']['mac']

예제 #41
0
def generate_clkmgr(top, cfg_path, out_path):

    # Target paths
    rtl_path = out_path / 'ip/clkmgr/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    data_path = out_path / 'ip/clkmgr/data/autogen'
    data_path.mkdir(parents=True, exist_ok=True)

    # Template paths
    hjson_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.hjson.tpl'
    rtl_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.sv.tpl'
    pkg_tpl = cfg_path / '../ip/clkmgr/data/clkmgr_pkg.sv.tpl'

    hjson_out = data_path / 'clkmgr.hjson'
    rtl_out = rtl_path / 'clkmgr.sv'
    pkg_out = rtl_path / 'clkmgr_pkg.sv'

    tpls = [hjson_tpl, rtl_tpl, pkg_tpl]
    outputs = [hjson_out, rtl_out, pkg_out]
    names = ['clkmgr.hjson', 'clkmgr.sv', 'clkmgr_pkg.sv']

    # clock classification
    grps = top['clocks']['groups']

    src_aon_attr = OrderedDict()
    ft_clks = OrderedDict()
    rg_clks = OrderedDict()
    sw_clks = OrderedDict()
    hint_clks = OrderedDict()

    # construct a dictionary of the aon attribute for easier lookup
    # ie, src_name_A: True, src_name_B: False
    for src in top['clocks']['srcs']:
        if src['aon'] == 'yes':
            src_aon_attr[src['name']] = True
        else:
            src_aon_attr[src['name']] = False

    rg_srcs = [src for (src, attr) in src_aon_attr.items() if not attr]

    # clocks fed through clkmgr but are not disturbed in any way
    # This maintains the clocking structure consistency
    ft_clks = {
        clk: src
        for grp in grps for (clk, src) in grp['clocks'].items()
        if src_aon_attr[src]
    }

    # root-gate clocks
    rg_clks = {
        clk: src
        for grp in grps for (clk, src) in grp['clocks'].items()
        if grp['name'] != 'powerup' and grp['sw_cg'] == 'no'
        and not src_aon_attr[src]
    }

    # direct sw control clocks
    sw_clks = {
        clk: src
        for grp in grps for (clk, src) in grp['clocks'].items()
        if grp['sw_cg'] == 'yes' and not src_aon_attr[src]
    }

    # sw hint clocks
    hint_clks = {
        clk: src
        for grp in grps for (clk, src) in grp['clocks'].items()
        if grp['sw_cg'] == 'hint' and not src_aon_attr[src]
    }

    out = StringIO()
    for idx, tpl in enumerate(tpls):
        with tpl.open(mode='r', encoding='UTF-8') as fin:
            tpl = Template(fin.read())
            try:
                out = tpl.render(cfg=top,
                                 rg_srcs=rg_srcs,
                                 ft_clks=ft_clks,
                                 rg_clks=rg_clks,
                                 sw_clks=sw_clks,
                                 hint_clks=hint_clks)
            except:  # noqa: E722
                log.error(exceptions.text_error_template().render())

        if out == "":
            log.error("Cannot generate {}".format(names[idx]))
            return

        with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
            fout.write(genhdr + out)

    # Generate reg files
    with open(str(hjson_out), 'r') as out:
        hjson_obj = hjson.load(out,
                               use_decimal=True,
                               object_pairs_hook=OrderedDict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))
    def pass_to_pipeline_if_article(self,
                                    response,
                                    source_domain,
                                    original_url,
                                    rss_title=None):
        """
        Responsible for passing a NewscrawlerItem to the pipeline if the
        response contains an article.

        :param obj response: the scrapy response to work on
        :param str source_domain: the response's domain as set for the crawler
        :param str original_url: the url set in the json file
        :param str rss_title: the title extracted by an rssCrawler
        :return NewscrawlerItem: NewscrawlerItem to pass to the pipeline
        """

        repo_folder_path = os.getenv('newspapers_analytics_path')
        f = open(
            os.path.join(repo_folder_path,
                         'json_data\\logs\\already_cleaned.json'), 'r')
        already_cleaned = json.load(f)
        f.close()
        config_path = pd.read_csv(
            os.path.join(repo_folder_path,
                         'configs_path.csv'))['config_path'].tolist()[0]
        f = open(os.path.join(config_path, 'sitelist.hjson'), 'r')
        nc = hjson.load(f)
        #newspaper_url=nc['base_urls'][0]['url']
        newspaper_url = original_url
        f.close()
        not_founded = True
        for key_ in already_cleaned.keys():
            if newspaper_url.find(key_) != -1:
                f = open(
                    repo_folder_path + '\\json_data\\already_crawled_urls\\' +
                    key_ + '.json', 'r')
                already_urls = json.load(f)
                already_urls = already_urls[key_]
                f.close()
                not_founded = False
                print('-' * 200)
                print('Check: ', key_, newspaper_url)
                print('Connection NOT Founded: ', not_founded)
                break

        if not_founded:
            print('-' * 200)
            print('Check: ', key_, newspaper_url)
            print('Connection NOT Founded: ', not_founded)
            already_urls = []

        time.sleep(10)

        if self.helper.heuristics.is_article(response, original_url) \
            and not response.url in already_urls:
            print('*' * 100)
            print('New Article Founded')
            print('*' * 100)
            return self.pass_to_pipeline(response,
                                         source_domain,
                                         rss_title=None)
예제 #43
0
    def _gen_results(self, results):
        # This function is called after the regression and looks for
        # results.hjson file with aggregated results from the formal logfile.
        # The hjson file is required to follow this format:
        # {
        #   "messages": {
        #      "errors"      : []
        #      "warnings"    : []
        #      "cex"         : ["property1", "property2"...],
        #      "undetermined": [],
        #      "unreachable" : [],
        #   },
        #
        #   "summary": {
        #      "errors"      : 0
        #      "warnings"    : 2
        #      "proven"      : 20,
        #      "cex"         : 5,
        #      "covered"     : 18,
        #      "undetermined": 7,
        #      "unreachable" : 2,
        #      "pass_rate"   : "90 %",
        #      "cover_rate"  : "90 %"
        #   },
        # }
        # The categories for property results are: proven, cex, undetermined,
        # covered, and unreachable.
        #
        # If coverage was enabled then results.hjson will also have an item that
        # shows formal coverage. It will have the following format:
        #   "coverage": {
        #      stimuli: "90 %",
        #      coi    : "90 %",
        #      proof  : "80 %"
        #   }
        results_str = "## " + self.results_title + "\n\n"
        results_str += "### " + self.timestamp_long + "\n"
        if self.revision:
            results_str += "### " + self.revision + "\n"
        results_str += "### Branch: " + self.branch + "\n"
        results_str += "### Tool: " + self.tool.upper() + "\n"
        summary = [self.name]  # cfg summary for publish results

        assert len(self.deploy) == 1
        mode = self.deploy[0]

        if results[mode] == "P":
            result_data = Path(
                subst_wildcards(self.build_dir, {"build_mode": mode.name}),
                'results.hjson')
            try:
                with open(result_data, "r") as results_file:
                    self.result = hjson.load(results_file, use_decimal=True)
            except IOError as err:
                log.warning("%s", err)
                self.result = {
                    "messages": {
                        "errors": ["IOError: %s" % err],
                    }
                }

        results_str += "\n\n## Formal " + self.sub_flow.upper() + " Results\n"
        formal_result_str, formal_summary = self.get_summary(self.result)
        results_str += formal_result_str
        summary += formal_summary

        if self.cov:
            results_str += "\n\n## Coverage Results\n"
            results_str += ("### Coverage html file dir: " +
                            self.scratch_path + "/default/formal-icarus\n\n")
            cov_result_str, cov_summary = self.get_coverage(self.result)
            results_str += cov_result_str
            summary += cov_summary
        else:
            summary += ["N/A", "N/A", "N/A"]

        if results[mode] != "P":
            results_str += "\n## List of Failures\n" + ''.join(
                mode.launcher.fail_msg)

        messages = self.result.get("messages")
        if messages is not None:
            results_str += self.parse_dict_to_str(messages)

        # Write results to the scratch area
        self.results_md = results_str
        results_file = self.scratch_path + "/results_" + self.timestamp + ".md"
        with open(results_file, 'w') as f:
            f.write(self.results_md)

        # Generate result summary
        self.result_summary[self.name] = summary

        log.log(VERBOSE, "[results page]: [%s] [%s]", self.name, results_file)
        return self.results_md
예제 #44
0
 def load_global_msg(self):
     with open("core/global.msg", mode="r", encoding="UTF-8") as f:
         return hjson.load(f)
예제 #45
0
def main():
    log.basicConfig(level=log.INFO, format="%(levelname)s: %(message)s")

    # Make sure the script can also be called from other dirs than
    # just the project root by adapting the default paths accordingly.
    proj_root = Path(__file__).parent.joinpath('../../')
    lc_state_def_file = Path(proj_root).joinpath(LC_STATE_DEFINITION_FILE)
    mmap_def_file = Path(proj_root).joinpath(MMAP_DEFINITION_FILE)
    img_def_file = Path(proj_root).joinpath(IMAGE_DEFINITION_FILE)
    hex_file = Path(MEMORY_HEX_FILE)

    parser = argparse.ArgumentParser(
        prog="gen-otp-img",
        description=wrapped_docstring(),
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.register('action', 'extend', ExtendAction)
    parser.add_argument('--quiet',
                        '-q',
                        action='store_true',
                        help='''Don't print out progress messages.''')
    parser.add_argument('--seed',
                        type=int,
                        metavar='<seed>',
                        help="Custom seed used for randomization.")
    parser.add_argument('--img-seed',
                        type=int,
                        metavar='<seed>',
                        help='''
                        Custom seed for RNG to compute randomized items in OTP image.

                        Can be used to override the seed value specified in the image
                        config Hjson.
                        ''')
    parser.add_argument('--lc-seed',
                        type=int,
                        metavar='<seed>',
                        help='''
                        Custom seed for RNG to compute randomized life cycle netlist constants.

                        Note that this seed must coincide with the seed used for generating
                        the LC state encoding (gen-lc-state-enc.py).

                        This value typically does not need to be specified as it is taken from
                        the LC state encoding definition Hjson.
                        ''')
    parser.add_argument('--otp-seed',
                        type=int,
                        metavar='<seed>',
                        help='''
                        Custom seed for RNG to compute randomized OTP netlist constants.

                        Note that this seed must coincide with the seed used for generating
                        the OTP memory map (gen-otp-mmap.py).

                        This value typically does not need to be specified as it is taken from
                        the OTP memory map definition Hjson.
                        ''')
    parser.add_argument('-o',
                        '--out',
                        type=Path,
                        metavar='<path>',
                        default=hex_file,
                        help='''
                        Custom output path for generated hex file.
                        Defaults to {}
                        '''.format(hex_file))
    parser.add_argument('--lc-state-def',
                        type=Path,
                        metavar='<path>',
                        default=lc_state_def_file,
                        help='''
                        Life cycle state definition file in Hjson format.
                        ''')
    parser.add_argument('--mmap-def',
                        type=Path,
                        metavar='<path>',
                        default=mmap_def_file,
                        help='''
                        OTP memory map file in Hjson format.
                        ''')
    parser.add_argument('--img-cfg',
                        type=Path,
                        metavar='<path>',
                        default=img_def_file,
                        help='''
                        Image configuration file in Hjson format.
                        Defaults to {}
                        '''.format(img_def_file))
    parser.add_argument('--add-cfg',
                        type=Path,
                        metavar='<path>',
                        action='extend',
                        nargs='+',
                        default=[],
                        help='''
                        Additional image configuration file in Hjson format.

                        This switch can be specified multiple times.
                        Image configuration files are parsed in the same
                        order as they are specified on the command line,
                        and partition item values that are specified multiple
                        times are overridden in that order.

                        Note that seed values in additional configuration files
                        are ignored.
                        ''')
    parser.add_argument('--data-perm',
                        type=_permutation_string,
                        metavar='<map>',
                        default='',
                        help='''
                        This is a post-processing option and allows permuting
                        the bit positions before writing the hexfile.
                        The bit mapping needs to be supplied as a comma separated list
                        of bit slices, where the numbers refer to the bit positions in
                        the original data word before remapping, for example:

                        "[7:0],[16:8]".

                        The mapping must be bijective - otherwise this will generate
                        an error.
                        ''')

    args = parser.parse_args()

    if args.quiet:
        log.getLogger().setLevel(log.WARNING)

    log.info('Loading LC state definition file {}'.format(args.lc_state_def))
    with open(args.lc_state_def, 'r') as infile:
        lc_state_cfg = hjson.load(infile)
    log.info('Loading OTP memory map definition file {}'.format(args.mmap_def))
    with open(args.mmap_def, 'r') as infile:
        otp_mmap_cfg = hjson.load(infile)
    log.info('Loading main image configuration file {}'.format(args.img_cfg))
    with open(args.img_cfg, 'r') as infile:
        img_cfg = hjson.load(infile)

    # Set the initial random seed so that the generated image is
    # deterministically randomized.
    random.seed(args.seed)

    # If specified, override the seeds.
    _override_seed(args, 'lc_seed', lc_state_cfg)
    _override_seed(args, 'otp_seed', otp_mmap_cfg)
    _override_seed(args, 'img_seed', img_cfg)

    try:
        otp_mem_img = OtpMemImg(lc_state_cfg, otp_mmap_cfg, img_cfg,
                                args.data_perm)

        for f in args.add_cfg:
            log.info(
                'Processing additional image configuration file {}'.format(f))
            log.info('')
            with open(f, 'r') as infile:
                cfg = hjson.load(infile)
                otp_mem_img.override_data(cfg)
            log.info('')

    except RuntimeError as err:
        log.error(err)
        exit(1)

    # Print all defined args into header comment for referqence
    argstr = ''
    for arg, argval in sorted(vars(args).items()):
        if argval:
            if not isinstance(argval, list):
                argval = [argval]
            for a in argval:
                argname = '-'.join(arg.split('_'))
                # Get absolute paths for all files specified.
                a = a.resolve() if isinstance(a, Path) else a
                argstr += ' \\\n//   --' + argname + ' ' + str(a) + ''

    dt = datetime.datetime.now(datetime.timezone.utc)
    dtstr = dt.strftime("%a, %d %b %Y %H:%M:%S %Z")
    memfile_header = '// Generated on {} with\n// $ gen-otp-img.py {}\n//\n'.format(
        dtstr, argstr)

    hexfile_content = memfile_header + otp_mem_img.streamout_hexfile()

    with open(args.out, 'w') as outfile:
        outfile.write(hexfile_content)
예제 #46
0
def parse_data_to_markup(source, dest, format_='yaml',
                         template='standard_entry.md.jinja'):
    """Given the path to a source data file and a destination, turn the source
    file into a Python dictionary and then pass it to a Jinja template, writing
    to the destination.

    Args:
        source (file): File-like object to read and parse data from.
        dest (file): File-like object to write the rendered template to.

    Kwargs:
        format (string): What format the source file is in. Default assumption
            is `yaml`.
        template (string): Name of the template we should read and then render.
    """
    data = None

    if format_ == 'yaml':
        with open(source, 'r') as f:
            data = yaml.load(f)
    elif format_ == 'hjson':
        import hjson
        with open(source, 'r') as f:
            data = hjson.load(f)
    elif format_ == 'cfg':
        # config parser needs the most... massging
        config = ConfigParser.RawConfigParser()
        config.read(source)

        data = config.items('trip')
        data = dict(map(lambda x: (x[0], x[1].replace('\\n', '\n')), data))

        guests = map(lambda x: x[1], config.items('guests'))
        data['guest_list'] = guests
    elif format_ == 'plist':
        import plistlib
        data = plistlib.readPlist(source)
    elif format_ == 'wiki':
        from mediawiki_parser.html import make_parser as make_parser_html
        from mediawiki_parser.preprocessor import make_parser
        preprocessor = make_parser({})

        parser = make_parser_html([], [], [], {}, {})

        with open(source, 'r') as f:
            preprocessed_text = preprocessor.parse(f.read())

        output = parser.parse(preprocessed_text.leaves())

        dest.write(output.value)

        return
    else:
        raise RuntimeError("No usable format given to data parser!")

    loader = jinja2.FileSystemLoader('tools/templates')
    env = jinja2.Environment(loader=loader)

    template = env.get_template(template)

    data['source'] = source

    dest.write(template.render(**data))
예제 #47
0
def load_configuration_from_path(path):
    cfg = hjson.load(open(path))
    process(cfg)
    return cfg
예제 #48
0
from faker import Faker
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from dateutil.parser import parse

load_dotenv("dev.env")

Faker.seed(int(os.getenv("seed")))
fake = Faker()

num_uuid = shortuuid.ShortUUID()
num_uuid.set_alphabet("0123456789")
back_range = 61

with open("config.hjson") as f:
    config = hjson.load(f)

key_name = ("test_" if os.getenv("mode") == "test" else "") + "study_roles"
role_settings = config[key_name]
role_name_to_begin_hours = {role_name: float(role_info['hours'].split("-")[0]) for role_name, role_info in
                            role_settings.items()}
role_names = list(role_settings.keys())

num_intervals = 24 * 1
delta = timedelta(days=1)
interval = delta / num_intervals


def get_rank_categories(flatten=False, string=True):
    rank_categories = {}
예제 #49
0
파일: cli.py 프로젝트: abnerjacobsen/Zappa
    def load_settings(self, settings_file="zappa_settings.json", session=None):
        """
        Load the local zappa_settings.json file. 

        Returns the loaded Zappa object.
        """

        # Ensure we're passesd a valid settings file.
        if not os.path.isfile(settings_file):
            print("Please configure your zappa_settings file.")
            quit() # pragma: no cover

        # Load up file
        try:
            with open(settings_file) as json_file:
                self.zappa_settings = json.load(json_file)
        except Exception as e: # pragma: no cover
            print("Problem parsing settings file.")
            print(e)
            quit() # pragma: no cover

        # Make sure that this environment is our settings
        if self.api_stage not in self.zappa_settings.keys():
            print("Please define '%s' in your Zappa settings." % self.api_stage)
            quit() # pragma: no cover

        # We need a working title for this project. Use one if supplied, else cwd dirname.
        if 'project_name' in self.zappa_settings[self.api_stage]: # pragma: no cover
            self.project_name = self.zappa_settings[self.api_stage]['project_name']
        else:
            self.project_name = slugify.slugify(os.getcwd().split(os.sep)[-1])

        # The name of the actual AWS Lambda function, ex, 'helloworld-dev'
        # Django's slugify doesn't replace _, but this does.
        self.lambda_name = slugify.slugify(self.project_name + '-' + self.api_stage)

        # Load environment-specific settings
        self.s3_bucket_name = self.zappa_settings[self.api_stage]['s3_bucket']
        self.vpc_config = self.zappa_settings[
            self.api_stage].get('vpc_config', {})
        self.memory_size = self.zappa_settings[
            self.api_stage].get('memory_size', 512)
        self.app_function = self.zappa_settings[
            self.api_stage].get('app_function', None)
        self.aws_region = self.zappa_settings[
            self.api_stage].get('aws_region', 'us-east-1')
        self.debug = self.zappa_settings[
            self.api_stage].get('debug', True)
        self.prebuild_script = self.zappa_settings[
            self.api_stage].get('prebuild_script', None)
        self.profile_name = self.zappa_settings[
            self.api_stage].get('profile_name', None)

        # Create an Zappa object..
        self.zappa = Zappa(session)
        self.zappa.aws_region = self.aws_region

        # Load your AWS credentials from ~/.aws/credentials
        self.zappa.load_credentials(session)

        # ..and configure it
        for setting in CUSTOM_SETTINGS:
            if self.zappa_settings[self.api_stage].has_key(setting):
                setattr(self.zappa, setting, self.zappa_settings[
                        self.api_stage][setting])        

        return self.zappa
예제 #50
0
 def load_org_members_msg(self):
     with open("modules/core/org_members/org_members.msg", mode="r", encoding="utf-8") as f:
         return hjson.load(f)
 def loads(self, s, **kw):
     sio = StringIO(s)
     res = json.loads(s, **kw)
     self.assertEqual(res, json.load(sio, **kw))
     return res
예제 #52
0
 def __init__(self):
     self.default = hjson.load(open("config/text.default.hjson", "r"))
     try:
         self.custom = hjson.load(open("config/text.hjson", "r"))
     except FileNotFoundError:
         self.custom = None
예제 #53
0
                success = verify_fields_and_nag(config, rrajsondoc)
                if success:
                    if rra2jsonconfig['debug_level'] < 2:
                        post_rra_to_servicemap(config['servicemap'], rrajsondoc)
                    else:
                        debug('Not posting RRA - debug mode')

            debug('Parsed {}: {}'.format(sheets[s.id], rra_version))
        else:
            debug('Document {} ({}) could not be parsed and is probably not an RRA (no version detected)'.format(sheets[s.id], s.id))

if __name__ == "__main__":
    #Load defaults, config
    os.environ['TZ']='UTC'
    with open('rra2json.json') as fd:
        config = json.load(fd)

    #Warn about any global config issue
    if len(config['bugzilla']['api_key']) == 0:
         debug('Notice, bugzilla functionality is disabled (no configured API key)')

    #Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("-a", "--assign-rras", help="autoassign pending rras only (no rra conversion, etc. done)", action="store_true")
    args = parser.parse_args()

    if args.assign_rras:
        # Use this opportunity to do some house keeping!
        if len(config['bugzilla']['autoassign']) == 0:
            debug("Notice, autoassign option is disabled")
        else:
예제 #54
0
 def load_config_msg(self):
     with open("modules/core/config/config.msg", mode="r",
               encoding="UTF-8") as f:
         return hjson.load(f)
예제 #55
0
 def load_ban_msg(self):
     with open("modules/core/ban/ban.msg", mode="r", encoding="UTF-8") as f:
         return hjson.load(f)
예제 #56
0
def main():
    parser = argparse.ArgumentParser(prog="topgen")
    parser.add_argument('--topcfg',
                        '-t',
                        required=True,
                        help="`top_{name}.hjson` file.")
    parser.add_argument(
        '--tpl',
        '-c',
        help=
        "The directory having top_{name}_core.sv.tpl and top_{name}.tpl.sv.")
    parser.add_argument(
        '--outdir',
        '-o',
        help='''Target TOP directory.
             Module is created under rtl/. (default: dir(topcfg)/..)
             ''')  # yapf: disable
    parser.add_argument('--verbose', '-v', action='store_true', help="Verbose")

    # Generator options: 'no' series. cannot combined with 'only' series
    parser.add_argument(
        '--no-top',
        action='store_true',
        help="If defined, topgen doesn't generate top_{name} RTLs.")
    parser.add_argument(
        '--no-xbar',
        action='store_true',
        help="If defined, topgen doesn't generate crossbar RTLs.")
    parser.add_argument(
        '--no-plic',
        action='store_true',
        help="If defined, topgen doesn't generate the interrup controller RTLs."
    )

    # Generator options: 'only' series. cannot combined with 'no' series
    parser.add_argument(
        '--top-only',
        action='store_true',
        help="If defined, the tool generates top RTL only")  # yapf:disable
    parser.add_argument(
        '--xbar-only',
        action='store_true',
        help="If defined, the tool generates crossbar RTLs only")
    parser.add_argument(
        '--plic-only',
        action='store_true',
        help="If defined, the tool generates RV_PLIC RTL and Hjson only")
    parser.add_argument(
        '--alert-handler-only',
        action='store_true',
        help="If defined, the tool generates alert handler hjson only")
    # Generator options: generate dv ral model
    parser.add_argument(
        '--top_ral',
        '-r',
        default=False,
        action='store_true',
        help="If set, the tool generates top level RAL model for DV")

    args = parser.parse_args()

    # check combinations
    if args.top_ral:
        args.no_top = True

    if (args.no_top or args.no_xbar or
            args.no_plic) and (args.top_only or args.xbar_only or
                               args.plic_only or args.alert_handler_only):
        log.error(
            "'no' series options cannot be used with 'only' series options")
        raise SystemExit(sys.exc_info()[1])

    if not (args.top_ral or args.plic_only or args.alert_handler_only or
            args.tpl):
        log.error(
            "Template file can be omitted only if '--hjson-only' is true")
        raise SystemExit(sys.exc_info()[1])

    if args.verbose:
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    if not args.outdir:
        outdir = Path(args.topcfg).parent / ".."
        log.info("TOP directory not given. Use %s", (outdir))
    elif not Path(args.outdir).is_dir():
        log.error("'--outdir' should point to writable directory")
        raise SystemExit(sys.exc_info()[1])
    else:
        outdir = Path(args.outdir)

    out_path = Path(outdir)
    cfg_path = Path(args.topcfg).parents[1]

    try:
        with open(args.topcfg, 'r') as ftop:
            topcfg = hjson.load(ftop,
                                use_decimal=True,
                                object_pairs_hook=OrderedDict)
    except ValueError:
        raise SystemExit(sys.exc_info()[1])

    # Create generated list
    # These modules are generated through topgen
    generated_list = [
        module['type'] for module in topcfg['module']
        if 'generated' in module and module['generated'] == 'true'
    ]
    log.info("Filtered list is {}".format(generated_list))

    # These modules are NOT generated but belong to a specific top
    # and therefore not part of "hw/ip"
    top_only_list = [
        module['type'] for module in topcfg['module']
        if 'top_only' in module and module['top_only'] == 'true'
    ]
    log.info("Filtered list is {}".format(top_only_list))

    topname = topcfg["name"]

    # Sweep the IP directory and gather the config files
    ip_dir = Path(__file__).parents[1] / 'hw/ip'
    ips = search_ips(ip_dir)

    # exclude filtered IPs (to use top_${topname} one) and
    exclude_list = generated_list + top_only_list
    ips = [x for x in ips if not x.parents[1].name in exclude_list]

    # Hack alert
    # Generate clkmgr.hjson here so that it can be included below
    # Unlike other generated hjsons, clkmgr thankfully does not require
    # ip.hjson information.  All the information is embedded within
    # the top hjson file
    amend_clocks(topcfg)
    generate_clkmgr(topcfg, cfg_path, out_path)

    # It may require two passes to check if the module is needed.
    # TODO: first run of topgen will fail due to the absent of rv_plic.
    # It needs to run up to amend_interrupt in merge_top function
    # then creates rv_plic.hjson then run xbar generation.
    hjson_dir = Path(args.topcfg).parent

    for ip in generated_list:
        log.info("Appending {}".format(ip))
        ip_hjson = hjson_dir.parent / "ip/{}/data/autogen/{}.hjson".format(
            ip, ip)
        ips.append(ip_hjson)

    for ip in top_only_list:
        log.info("Appending {}".format(ip))
        ip_hjson = hjson_dir.parent / "ip/{}/data/{}.hjson".format(
            ip, ip)
        ips.append(ip_hjson)

    # load Hjson and pass validate from reggen
    try:
        ip_objs = []
        for x in ips:
            # Skip if it is not in the module list
            if x.stem not in [ip["type"] for ip in topcfg["module"]]:
                log.info("Skip module %s as it isn't in the top module list" %
                         x.stem)
                continue

            obj = hjson.load(x.open('r'),
                             use_decimal=True,
                             object_pairs_hook=OrderedDict)
            if validate.validate(obj) != 0:
                log.info("Parsing IP %s configuration failed. Skip" % x)
                continue
            ip_objs.append(obj)

    except ValueError:
        raise SystemExit(sys.exc_info()[1])

    # Read the crossbars under the top directory
    xbar_objs = get_hjsonobj_xbars(hjson_dir)

    log.info("Detected crossbars: %s" %
             (", ".join([x["name"] for x in xbar_objs])))

    topcfg, error = validate_top(topcfg, ip_objs, xbar_objs)
    if error != 0:
        raise SystemExit("Error occured while validating top.hjson")

    completecfg = merge_top(topcfg, ip_objs, xbar_objs)

    if args.top_ral:
        generate_top_ral(completecfg, ip_objs, out_path)
        sys.exit()

    # Generate PLIC
    if not args.no_plic and \
       not args.alert_handler_only and \
       not args.xbar_only:
        generate_plic(completecfg, out_path)
        if args.plic_only:
            sys.exit()

    # Generate Alert Handler
    if not args.xbar_only:
        generate_alert_handler(completecfg, out_path)
        if args.alert_handler_only:
            sys.exit()

    # Generate Pinmux
    generate_pinmux_and_padctrl(completecfg, out_path)

    # Generate Pwrmgr
    generate_pwrmgr(completecfg, out_path)

    # Generate top only modules
    # These modules are not templated, but are not in hw/ip
    generate_top_only(top_only_list, out_path)

    # Generate xbars
    if not args.no_xbar or args.xbar_only:
        generate_xbars(completecfg, out_path)

    # All IPs are generated. Connect phase now
    # Find {memory, module} <-> {xbar} connections first.
    im.autoconnect(completecfg)

    # Generic Inter-module connection
    im.elab_intermodule(completecfg)

    top_name = completecfg["name"]

    # Generate top.gen.hjson right before rendering
    hjson_dir = Path(args.topcfg).parent
    genhjson_path = hjson_dir / ("autogen/top_%s.gen.hjson" %
                                 completecfg["name"])
    gencmd = (
        "// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson --hjson-only "
        "-o hw/top_{topname}/\n".format(topname=topname))

    genhjson_path.write_text(genhdr + gencmd +
                             hjson.dumps(completecfg, for_json=True))

    if not args.no_top or args.top_only:
        tpl_path = Path(args.tpl)

        def render_template(out_name_tpl, out_dir, **other_info):
            top_tplpath = tpl_path / ((out_name_tpl + '.tpl') % (top_name))
            template_contents = generate_top(completecfg, str(top_tplpath),
                                             **other_info)

            rendered_dir = out_path / out_dir
            rendered_dir.mkdir(parents=True, exist_ok=True)
            rendered_path = rendered_dir / (out_name_tpl % (top_name))

            with rendered_path.open(mode='w', encoding='UTF-8') as fout:
                fout.write(template_contents)

            return rendered_path

        # SystemVerilog Top:
        # 'top_earlgrey.sv.tpl' -> 'rtl/autogen/top_earlgrey.sv'
        render_template('top_%s.sv', 'rtl/autogen')

        # 'top_earlgrey_pkg.sv.tpl' -> 'rtl/autogen/top_earlgrey_pkg.sv'
        render_template('top_%s_pkg.sv', 'rtl/autogen')

        # C Header + C File + Clang-format file
        # The C file needs some information from when the header is generated,
        # so we keep this in a dictionary here.
        c_gen_info = {}

        # 'clang-format' -> 'sw/autogen/.clang-format'
        cformat_tplpath = tpl_path / 'clang-format'
        cformat_dir = out_path / 'sw/autogen'
        cformat_dir.mkdir(parents=True, exist_ok=True)
        cformat_path = cformat_dir / '.clang-format'
        cformat_path.write_text(cformat_tplpath.read_text())

        # 'top_earlgrey.h.tpl' -> 'sw/autogen/top_earlgrey.h'
        cheader_path = render_template('top_%s.h',
                                       'sw/autogen',
                                       c_gen_info=c_gen_info).resolve()

        # Save the relative header path into `c_gen_info`
        rel_header_path = cheader_path.relative_to(SRCTREE_TOP)
        c_gen_info["header_path"] = str(rel_header_path)

        # 'top_earlgrey.c.tpl' -> 'sw/autogen/top_earlgrey.c'
        render_template('top_%s.c', 'sw/autogen', c_gen_info=c_gen_info)

        # 'top_earlgrey_memory.ld.tpl' -> 'sw/autogen/top_earlgrey_memory.ld'
        render_template('top_%s_memory.ld', 'sw/autogen')

        # 'top_earlgrey_memory.h.tpl' -> 'sw/autogen/top_earlgrey_memory.h'
        memory_cheader_path = render_template('top_%s_memory.h', 'sw/autogen')

        # Fix the C header guards, which will have the wrong name
        subprocess.run(["util/fix_include_guard.py",
                        str(cheader_path),
                        str(memory_cheader_path)],
                       universal_newlines=True,
                       stdout=subprocess.DEVNULL,
                       stderr=subprocess.DEVNULL,
                       check=True,
                       cwd=str(SRCTREE_TOP))  # yapf: disable

        # generate chip level xbar TB
        tb_files = ["xbar_env_pkg__params.sv", "tb__xbar_connect.sv"]
        for fname in tb_files:
            tpl_fname = "%s.tpl" % (fname)
            xbar_chip_data_path = tpl_path / tpl_fname
            template_contents = generate_top(completecfg,
                                             str(xbar_chip_data_path))

            rendered_dir = tpl_path / '../dv/autogen'
            rendered_dir.mkdir(parents=True, exist_ok=True)
            rendered_path = rendered_dir / fname

            with rendered_path.open(mode='w', encoding='UTF-8') as fout:
                fout.write(template_contents)
 def test_strip_bom(self):
     content = u"\u3053\u3093\u306b\u3061\u308f"
     json_doc = codecs.BOM_UTF8 + b(json.dumpsJSON(content))
     self.assertEqual(json.load(BytesIO(json_doc)), content)
     for doc in json_doc, json_doc.decode('utf8'):
         self.assertEqual(json.loads(doc), content)
예제 #58
0
 def load_private_channel_msg(self):
     with open("modules/core/private_channel/private_channel.msg",
               mode="r",
               encoding="utf-8") as f:
         return hjson.load(f)
예제 #59
0
def main():
    parser = argparse.ArgumentParser(prog="topgen")
    parser.add_argument('--topcfg',
                        '-t',
                        required=True,
                        help="`top_{name}.hjson` file.")
    parser.add_argument(
        '--outdir',
        '-o',
        help='''Target TOP directory.
             Module is created under rtl/. (default: dir(topcfg)/..)
             ''')  # yapf: disable
    parser.add_argument('--verbose', '-v', action='store_true', help="Verbose")

    # Generator options: 'no' series. cannot combined with 'only' series
    parser.add_argument(
        '--no-top',
        action='store_true',
        help="If defined, topgen doesn't generate top_{name} RTLs.")
    parser.add_argument(
        '--no-xbar',
        action='store_true',
        help="If defined, topgen doesn't generate crossbar RTLs.")
    parser.add_argument(
        '--no-plic',
        action='store_true',
        help="If defined, topgen doesn't generate the interrup controller RTLs."
    )

    # Generator options: 'only' series. cannot combined with 'no' series
    parser.add_argument(
        '--top-only',
        action='store_true',
        help="If defined, the tool generates top RTL only")  # yapf:disable
    parser.add_argument(
        '--xbar-only',
        action='store_true',
        help="If defined, the tool generates crossbar RTLs only")
    parser.add_argument(
        '--plic-only',
        action='store_true',
        help="If defined, the tool generates RV_PLIC RTL and Hjson only")
    parser.add_argument(
        '--alert-handler-only',
        action='store_true',
        help="If defined, the tool generates alert handler hjson only")
    # Generator options: generate dv ral model
    parser.add_argument(
        '--top_ral',
        '-r',
        default=False,
        action='store_true',
        help="If set, the tool generates top level RAL model for DV")
    parser.add_argument('--dv-base-prefix',
                        default='dv_base',
                        help='Prefix for the DV register classes from which '
                        'the register models are derived.')
    # Generator options for compile time random netlist constants
    parser.add_argument(
        '--rnd_cnst_seed',
        type=int,
        metavar='<seed>',
        help='Custom seed for RNG to compute netlist constants.')

    args = parser.parse_args()

    # check combinations
    if args.top_ral:
        args.no_top = True

    if (args.no_top or args.no_xbar or
            args.no_plic) and (args.top_only or args.xbar_only or
                               args.plic_only or args.alert_handler_only):
        log.error(
            "'no' series options cannot be used with 'only' series options")
        raise SystemExit(sys.exc_info()[1])

    if args.verbose:
        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
    else:
        log.basicConfig(format="%(levelname)s: %(message)s")

    if not args.outdir:
        outdir = Path(args.topcfg).parent / ".."
        log.info("TOP directory not given. Use %s", (outdir))
    elif not Path(args.outdir).is_dir():
        log.error("'--outdir' should point to writable directory")
        raise SystemExit(sys.exc_info()[1])
    else:
        outdir = Path(args.outdir)

    out_path = Path(outdir)
    cfg_path = Path(args.topcfg).parents[1]

    try:
        with open(args.topcfg, 'r') as ftop:
            topcfg = hjson.load(ftop,
                                use_decimal=True,
                                object_pairs_hook=OrderedDict)
    except ValueError:
        raise SystemExit(sys.exc_info()[1])

    # TODO, long term, the levels of dependency should be automatically determined instead
    # of hardcoded.  The following are a few examples:
    # Example 1: pinmux depends on amending all modules before calculating the correct number of
    #            pins.
    #            This would be 1 level of dependency and require 2 passes.
    # Example 2: pinmux depends on amending all modules, and pwrmgr depends on pinmux generation to
    #            know correct number of wakeups.  This would be 2 levels of dependency and require 3
    #            passes.
    #
    # How does mulit-pass work?
    # In example 1, the first pass gathers all modules and merges them.  However, the merge process
    # uses a stale pinmux.  The correct pinmux is then generated using the merged configuration. The
    # second pass now merges all the correct modules (including the generated pinmux) and creates
    # the final merged config.
    #
    # In example 2, the first pass gathers all modules and merges them.  However, the merge process
    # uses a stale pinmux and pwrmgr. The correct pinmux is then generated using the merged
    # configuration.  However, since pwrmgr is dependent on this new pinmux, it is still generated
    # incorrectly.  The second pass merge now has an updated pinmux but stale pwrmgr.  The correct
    # pwrmgr can now be generated.  The final pass then merges all the correct modules and creates
    # the final configuration.
    #
    # This fix is related to #2083
    process_dependencies = 1
    for pass_idx in range(process_dependencies + 1):
        log.debug("Generation pass {}".format(pass_idx))
        if pass_idx < process_dependencies:
            cfg_copy = deepcopy(topcfg)
            _process_top(cfg_copy, args, cfg_path, out_path, pass_idx)
        else:
            completecfg, name_to_block = _process_top(topcfg, args, cfg_path, out_path, pass_idx)

    topname = topcfg["name"]

    # Generate xbars
    if not args.no_xbar or args.xbar_only:
        generate_xbars(completecfg, out_path)

    # All IPs are generated. Connect phase now
    # Find {memory, module} <-> {xbar} connections first.
    im.autoconnect(completecfg, name_to_block)

    # Generic Inter-module connection
    im.elab_intermodule(completecfg)

    # Generate top.gen.hjson right before rendering
    genhjson_dir = out_path / "data/autogen"
    genhjson_dir.mkdir(parents=True, exist_ok=True)
    genhjson_path = genhjson_dir / ("top_%s.gen.hjson" % completecfg["name"])

    # Header for HJSON
    gencmd = '''//
// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson \\
//                -o hw/top_{topname}/ \\
//                --hjson-only \\
//                --rnd_cnst_seed {seed}
'''.format(topname=topname, seed=completecfg['rnd_cnst_seed'])

    genhjson_path.write_text(genhdr + gencmd +
                             hjson.dumps(completecfg, for_json=True))

    if not args.no_top or args.top_only:
        def render_template(template_path: str, rendered_path: Path, **other_info):
            template_contents = generate_top(completecfg, name_to_block,
                                             str(template_path), **other_info)

            rendered_path.parent.mkdir(exist_ok=True, parents=True)
            with rendered_path.open(mode='w', encoding='UTF-8') as fout:
                fout.write(template_contents)

        # Header for SV files
        gencmd = warnhdr + '''//
// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson \\
//                -o hw/top_{topname}/ \\
//                --rnd_cnst_seed {seed}
'''.format(topname=topname, seed=topcfg['rnd_cnst_seed'])

        # SystemVerilog Top:
        # 'toplevel.sv.tpl' -> 'rtl/autogen/top_{topname}.sv'
        render_template(TOPGEN_TEMPLATE_PATH / "toplevel.sv.tpl",
                        out_path / f"rtl/autogen/top_{topname}.sv",
                        gencmd=gencmd)

        # Multiple chip-levels (ASIC, FPGA, Verilator, etc)
        for target in topcfg['targets']:
            render_template(TOPGEN_TEMPLATE_PATH / "chiplevel.sv.tpl",
                            out_path / f"rtl/autogen/chip_{topname}_{target['name']}.sv",
                            gencmd=gencmd,
                            target=target)

        # The C / SV file needs some complex information, so we initialize this
        # object to store it.
        c_helper = TopGenC(completecfg, name_to_block)

        # 'toplevel_pkg.sv.tpl' -> 'rtl/autogen/top_{topname}_pkg.sv'
        render_template(TOPGEN_TEMPLATE_PATH / "toplevel_pkg.sv.tpl",
                        out_path / f"rtl/autogen/top_{topname}_pkg.sv",
                        helper=c_helper,
                        gencmd=gencmd)

        # compile-time random netlist constants
        render_template(TOPGEN_TEMPLATE_PATH / "toplevel_rnd_cnst_pkg.sv.tpl",
                        out_path / f"rtl/autogen/top_{topname}_rnd_cnst_pkg.sv",
                        gencmd=gencmd)

        # C Header + C File + Clang-format file

        # Since SW does not use FuseSoC and instead expects those files always
        # to be in hw/top_{topname}/sw/autogen, we currently create these files
        # twice:
        # - Once under out_path/sw/autogen
        # - Once under hw/top_{topname}/sw/autogen
        for path in [out_path.resolve(),
                     (SRCTREE_TOP / 'hw/top_{}/'.format(topname)).resolve()]:

            # 'clang-format' -> 'sw/autogen/.clang-format'
            cformat_tplpath = TOPGEN_TEMPLATE_PATH / 'clang-format'
            cformat_dir = path / 'sw/autogen'
            cformat_dir.mkdir(parents=True, exist_ok=True)
            cformat_path = cformat_dir / '.clang-format'
            cformat_path.write_text(cformat_tplpath.read_text())

            # 'top_{topname}.h.tpl' -> 'sw/autogen/top_{topname}.h'
            cheader_path = cformat_dir / f"top_{topname}.h"
            render_template(TOPGEN_TEMPLATE_PATH / "toplevel.h.tpl",
                            cheader_path,
                            helper=c_helper)

            # Save the relative header path into `c_gen_info`
            rel_header_path = cheader_path.relative_to(path.parents[1])
            c_helper.header_path = str(rel_header_path)

            # 'toplevel.c.tpl' -> 'sw/autogen/top_{topname}.c'
            render_template(TOPGEN_TEMPLATE_PATH / "toplevel.c.tpl",
                            cformat_dir / f"top_{topname}.c",
                            helper=c_helper)

            # 'toplevel_memory.ld.tpl' -> 'sw/autogen/top_{topname}_memory.ld'
            render_template(TOPGEN_TEMPLATE_PATH / "toplevel_memory.ld.tpl",
                            cformat_dir / f"top_{topname}_memory.ld")

            # 'toplevel_memory.h.tpl' -> 'sw/autogen/top_{topname}_memory.h'
            memory_cheader_path = cformat_dir / f"top_{topname}_memory.h"
            render_template(TOPGEN_TEMPLATE_PATH / "toplevel_memory.h.tpl",
                            memory_cheader_path,
                            helper=c_helper)

            try:
                cheader_path.relative_to(SRCTREE_TOP)
            except ValueError:
                log.error("cheader_path %s is not within SRCTREE_TOP %s",
                          cheader_path, SRCTREE_TOP)
                log.error("Thus skipping util/fix_include_guard.py")
                continue

            # Fix the C header guards, which will have the wrong name
            subprocess.run(["util/fix_include_guard.py",
                            str(cheader_path),
                            str(memory_cheader_path)],
                           universal_newlines=True,
                           stdout=subprocess.DEVNULL,
                           stderr=subprocess.DEVNULL,
                           check=True,
                           cwd=str(SRCTREE_TOP))  # yapf: disable

        # generate chip level xbar and alert_handler TB
        tb_files = [
            "xbar_env_pkg__params.sv", "tb__xbar_connect.sv",
            "tb__alert_handler_connect.sv"
        ]
        for fname in tb_files:
            tpl_fname = "%s.tpl" % (fname)
            xbar_chip_data_path = TOPGEN_TEMPLATE_PATH / tpl_fname
            template_contents = generate_top(completecfg, name_to_block,
                                             str(xbar_chip_data_path))

            rendered_dir = out_path / 'dv/autogen'
            rendered_dir.mkdir(parents=True, exist_ok=True)
            rendered_path = rendered_dir / fname

            with rendered_path.open(mode='w', encoding='UTF-8') as fout:
                fout.write(template_contents)

        # generate parameters for chip-level environment package
        tpl_fname = 'chip_env_pkg__params.sv.tpl'
        alert_handler_chip_data_path = TOPGEN_TEMPLATE_PATH / tpl_fname
        template_contents = generate_top(completecfg, name_to_block,
                                         str(alert_handler_chip_data_path))

        rendered_dir = out_path / 'dv/env/autogen'
        rendered_dir.mkdir(parents=True, exist_ok=True)
        rendered_path = rendered_dir / 'chip_env_pkg__params.sv'

        with rendered_path.open(mode='w', encoding='UTF-8') as fout:
            fout.write(template_contents)
예제 #60
0
파일: topgen.py 프로젝트: poena/opentitan
def generate_clkmgr(top, cfg_path, out_path):

    # Target paths
    rtl_path = out_path / 'ip/clkmgr/rtl/autogen'
    rtl_path.mkdir(parents=True, exist_ok=True)
    data_path = out_path / 'ip/clkmgr/data/autogen'
    data_path.mkdir(parents=True, exist_ok=True)

    # Template paths
    hjson_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.hjson.tpl'
    rtl_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.sv.tpl'
    pkg_tpl = cfg_path / '../ip/clkmgr/data/clkmgr_pkg.sv.tpl'

    hjson_out = data_path / 'clkmgr.hjson'
    rtl_out = rtl_path / 'clkmgr.sv'
    pkg_out = rtl_path / 'clkmgr_pkg.sv'

    tpls = [hjson_tpl, rtl_tpl, pkg_tpl]
    outputs = [hjson_out, rtl_out, pkg_out]
    names = ['clkmgr.hjson', 'clkmgr.sv', 'clkmgr_pkg.sv']

    # clock classification
    grps = top['clocks']['groups']

    ft_clks = OrderedDict()
    rg_clks = OrderedDict()
    sw_clks = OrderedDict()
    src_aon_attr = OrderedDict()
    hint_clks = OrderedDict()

    # construct a dictionary of the aon attribute for easier lookup
    # ie, src_name_A: True, src_name_B: False
    for src in top['clocks']['srcs'] + top['clocks']['derived_srcs']:
        if src['aon'] == 'yes':
            src_aon_attr[src['name']] = True
        else:
            src_aon_attr[src['name']] = False

    rg_srcs = [src for (src, attr) in src_aon_attr.items() if not attr]

    # clocks fed through clkmgr but are not disturbed in any way
    # This maintains the clocking structure consistency
    # This includes two groups of clocks
    # Clocks fed from the always-on source
    # Clocks fed to the powerup group
    ft_clks = OrderedDict([(clk, src) for grp in grps
                           for (clk, src) in grp['clocks'].items()
                           if src_aon_attr[src] or grp['name'] == 'powerup'])

    # root-gate clocks
    rg_clks = OrderedDict([(clk, src) for grp in grps
                           for (clk, src) in grp['clocks'].items()
                           if grp['name'] != 'powerup' and grp['sw_cg'] == 'no'
                           and not src_aon_attr[src]])

    # direct sw control clocks
    sw_clks = OrderedDict([(clk, src) for grp in grps
                           for (clk, src) in grp['clocks'].items()
                           if grp['sw_cg'] == 'yes' and not src_aon_attr[src]])

    # sw hint clocks
    hints = OrderedDict([(clk, src) for grp in grps
                         for (clk, src) in grp['clocks'].items()
                         if grp['sw_cg'] == 'hint' and not src_aon_attr[src]])

    # hint clocks dict
    for clk, src in hints.items():
        # the clock is constructed as clk_{src_name}_{module_name}.
        # so to get the module name we split from the right and pick the last entry
        hint_clks[clk] = OrderedDict()
        hint_clks[clk]['name'] = (clk.rsplit('_', 1)[-1])
        hint_clks[clk]['src'] = src

    for idx, tpl in enumerate(tpls):
        out = ""
        with tpl.open(mode='r', encoding='UTF-8') as fin:
            tpl = Template(fin.read())
            try:
                out = tpl.render(cfg=top,
                                 div_srcs=top['clocks']['derived_srcs'],
                                 rg_srcs=rg_srcs,
                                 ft_clks=ft_clks,
                                 rg_clks=rg_clks,
                                 sw_clks=sw_clks,
                                 export_clks=top['exported_clks'],
                                 hint_clks=hint_clks)
            except:  # noqa: E722
                log.error(exceptions.text_error_template().render())

        if out == "":
            log.error("Cannot generate {}".format(names[idx]))
            return

        with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
            fout.write(genhdr + out)

    # Generate reg files
    with open(str(hjson_out), 'r') as out:
        hjson_obj = hjson.load(out,
                               use_decimal=True,
                               object_pairs_hook=OrderedDict)
    validate.validate(hjson_obj)
    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))