Ejemplo n.º 1
0
def main():

    # Get path to top-level directory
    top_path = os.path.normpath(
        os.path.join(os.path.dirname(__file__), "../../.."))
    top_hjson = top_path + "/hw/top_earlgrey/data/top_earlgrey.hjson"

    # Modify hjson to change flash size
    with open(top_hjson, "r") as hjson_file:
        cfg = hjson.load(hjson_file, use_decimal=True)

    # write out original version reformatted
    with open(top_path + "/hw/top_earlgrey/data/top_earlgrey.original.hjson",
              "w") as hjson_file:
        hjson_file.write(orighdr + hjson.dumps(cfg, hjson_file))

    # update value
    log.info("Updating flash pages_per_bank to 32")
    for mem in cfg["memory"]:
        if mem['type'] == 'eflash':
            mem['pages_per_bank'] = 32

    # write back updated hjson
    with open(top_hjson, "w") as hjson_file:
        hjson_file.write(genhdr + hjson.dumps(cfg, hjson_file))

    # Regenerate auto-generated files
    print("Regenerating all auto-generated files...")
    cmd = ["make", "-C", top_path + "/hw"]
    try:
        subprocess.run(cmd,
                       check=True,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.STDOUT,
                       universal_newlines=True)

    except subprocess.CalledProcessError as e:
        log.error("Failed to regenerate auto-generated files: " + str(e))
        log.error(e.stdout)
        sys.exit(1)

    # Regenerate boot ROM
    print("Regenerating boot ROM...")
    cmd = [
        "ninja", "-C", top_path + "/build-out",
        "sw/device/boot_rom/boot_rom_export_fpga_nexysvideo"
    ]
    try:
        subprocess.run(cmd,
                       check=True,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.STDOUT,
                       universal_newlines=True)

    except subprocess.CalledProcessError as e:
        log.error("Failed to regenerate boot ROM: " + str(e))
        log.error(e.stdout)
        sys.exit(1)

    return 0
Ejemplo n.º 2
0
    def log_parameters(self) -> None:
        """
        Log the current status. Write parameters to log. Update the current best
        parameters. Call plotting functions as set up.

        """
        if self.optim_status["goal"] < self.current_best_goal:
            self.current_best_goal = self.optim_status["goal"]
            self.current_best_params = self.optim_status["params"]
            with open(self.logdir + "best_point_" + self.logname,
                      "w") as best_point:
                best_dict = {
                    "opt_map": self.pmap.opt_map,
                    "units": self.pmap.get_opt_units(),
                    "optim_status": self.optim_status,
                }
                best_point.write(hjson.dumps(best_dict))
                best_point.write("\n")
        if self.store_unitaries:
            self.exp.store_Udict(self.optim_status["goal"])
            self.exp.store_unitaries_counter += 1
        with open(self.logdir + self.logname, "a") as logfile:
            logfile.write(
                f"\nFinished evaluation {self.evaluation} at {time.asctime()}\n"
            )
            # logfile.write(hjson.dumps(self.optim_status, indent=2))
            logfile.write(hjson.dumps(self.optim_status))
            logfile.write("\n")
            logfile.flush()
Ejemplo n.º 3
0
def create_job(analysis,
               user=None,
               json_text='',
               json_data={},
               name=None,
               state=Job.QUEUED,
               uid=None,
               save=True,
               fill_with={}):
    """
    Note: Parameter 'fill_with' needs to be a flat key:value dictionary.
    """
    state = state or Job.QUEUED
    owner = user or analysis.project.owner
    project = analysis.project

    if json_data:
        json_text = hjson.dumps(json_data)
    else:
        json_text = json_text or analysis.json_text

    # Needs the json_data to set the summary.
    json_data = hjson.loads(json_text)

    # Generate a meaningful job title.
    name = make_job_title(recipe=analysis, data=json_data)
    uid = uid or util.get_uuid(8)

    # Create the job instance.
    job = Job(name=name,
              state=state,
              json_text=json_text,
              security=Job.AUTHORIZED,
              project=project,
              analysis=analysis,
              owner=owner,
              template=analysis.template,
              uid=uid)

    # Fill the json data.
    json_data = fill_json_data(job=job,
                               source_data=json_data,
                               project=project,
                               fill_with=fill_with)

    # Generate a meaningful job title.
    name = make_job_title(recipe=analysis, data=json_data)
    # Update the json_text and name
    job.json_text = hjson.dumps(json_data)
    job.name = name

    if save:
        job.save()

        # Update the projects lastedit user when a job is created
        Project.objects.filter(uid=project.uid).update(lastedit_user=owner,
                                                       lastedit_date=now())
        logger.info(f"Created job id={job.id} name={job.name}")

    return job
Ejemplo n.º 4
0
def update_hjson():
    print('updating')

    def conc_disp(json_disp):
        print(json_disp)
        if len(json_disp) == 1:
            return_word = str(json_disp['0'])
        else:
            return_word = str(json_disp['0']) + " " + str(json_disp['1'])
        return return_word

    if os.path.exists("../Au/buttons.hjson"):
        os.remove("../Au/buttons.hjson")
    if os.path.exists("../sonos-cast/buttons.hjson"):
        os.remove("../sonos-cast/buttons.hjson")

    text_file = open("../Au/buttons.hjson", "w+")
    text_file2 = open("../sonos-cast/buttons.hjson", "w+")
    podcast_write = pod.query.order_by(pod.seq_butt.asc()).all()
    print(podcast_write)
    pod_list_dict = OrderedDict()
    for look_pod in podcast_write:
        new_title = look_pod.title.replace(":",
                                           "").replace(",",
                                                       "").replace(" ", "_")
        display_this = conc_disp(json.loads(look_pod.disp_title))
        pod_list_dict.update({
            new_title: {
                'label': display_this,
                'method': ["get_recent", "get_random"],
                'pod_id': look_pod.id,
                'seq': look_pod.seq_butt
            }
        })

    rooms = OrderedDict()
    rooms.update({'Lib': '192.168.1.136'})
    rooms.update({'Kitch': '192.168.1.145'})
    rooms.update({'Master': '192.168.1.101'})
    rooms.update({'Living': '192.168.1.116'})
    n = text_file.write("{Pods:" + hjson.dumps(pod_list_dict) + "Rooms:" +
                        hjson.dumps(rooms) + "}")
    text_file.close()
    n = text_file2.write("{Pods:" + hjson.dumps(pod_list_dict) + "Rooms:" +
                         hjson.dumps(rooms) + "}")
    text_file2.close()
    #note: auto discover room info.
    #temp_rooms_dict = {'Rooms':
    #{
    #'Lib':'192.168.1.136'
    #'Kitch':'192.168.1.145'
    #'Master':'192.168.1.101'
    #'Living':'192.168.1.116'
    #}}
    time.sleep(3)
    socketio.emit('message', {'data': 'Connected'})
    if request.method == "GET":
        succ_response = {"status": 'success'}
        return jsonify(succ_response)
Ejemplo n.º 5
0
def create_job(analysis, user=None, json_text='', json_data={}, name=None, state=None, uid=None, save=True):
    state = state or Job.QUEUED
    owner = user or analysis.project.owner

    project = analysis.project

    if json_data:
        json_text = hjson.dumps(json_data)
    else:
        json_text = json_text or analysis.json_text

    # Needs the json_data to set the summary.
    json_data = hjson.loads(json_text)

    # Generate the summary from the data.
    summary = make_summary(json_data, summary=analysis.summary)

    # Generate a meaningful job title.
    name = make_job_title(recipe=analysis, data=json_data)

    # Create the job instance.
    job = Job(name=name, summary=summary, state=state, json_text=json_text,
              security=analysis.security, project=project, analysis=analysis, owner=owner,
              template=analysis.template, uid=uid)

    if save:
        job.save()
        logger.info(f"Created job id={job.id} name={job.name}")

    return job
Ejemplo n.º 6
0
def strip_json(json_text):
    """
    Strip settings parameter in json_text to only contain execute options
    Deletes the 'settings' parameter if there are no execute options.
    """
    try:
        local_json = hjson.loads(json_text)
    except Exception as exep:
        logger.error(f'Error loading json text: {exep}')
        return

    # Fetch the execute options
    execute_options = local_json.get('settings', {}).get('execute', {})

    # Check to see if it is present
    if execute_options:
        # Strip run settings of every thing but execute options
        local_json['settings'] = dict(execute=execute_options)
    else:
        # NOTE: Delete 'settings' from json text
        local_json['settings'] = ''
        del local_json['settings']

    new_json = hjson.dumps(local_json)
    return new_json
Ejemplo n.º 7
0
    def check(self, name, file, inputCr):
        text = self.load(file, inputCr)
        shouldFail = name[0:4] == "fail"

        try:
            data = hjson.loads(text)
            self.assertFalse(shouldFail)

            text1 = hjson.dumpsJSON(data)
            hjson1 = hjson.dumps(data, ensure_ascii=False)
            result = hjson.loads(self.load(name + "_result.json", inputCr))
            text2 = hjson.dumpsJSON(result)
            hjson2 = self.load(name + "_result.hjson", False)

            if self.verma > 2 or self.vermi > 6:
                # final check fails on py2.6 because of string formatting issues
                self.assertEqual(text2, text1)
                self.assertEqual(hjson2, hjson1)

            # dbg
            # with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
            # with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))

        except hjson.HjsonDecodeError as e:
            self.assertTrue(shouldFail)
Ejemplo n.º 8
0
    def store_values(self, path: str, optim_status=None) -> None:
        """
        Write current parameter values to file. Stores the numeric values, as well as the names
        in form of the opt_map and physical units. If an optim_status is given that will be
        used.

        Parameters
        ----------
        path : str
            Location of the resulting logfile.
        optim_status: dict
            Dictionary containing current parameters and goal function value.
        """
        if optim_status is None:
            optim_status = {
                "params":
                [par.numpy().tolist() for par in self.get_parameters()]
            }
        with open(path, "w") as value_file:
            val_dict = {
                "opt_map": self.get_opt_map(),
                "units": self.get_opt_units(),
                "optim_status": optim_status,
            }
            value_file.write(hjson.dumps(val_dict, default=hjson_encode))
            value_file.write("\n")
def getAnnotationForEntry(path, schema):
    schemedTypeOfParent = getSchemedType(path[:-1],schema)
    if schemedTypeOfParent and schemedTypeOfParent['mode'] == "aggregate":
        memberSpec = (
                list(
                    filter(
                        lambda x: x['id'] == path[-1],
                        schemedTypeOfParent["members"]
                    )
                ) or [None]
            )[0]
        if memberSpec:
            return "\n".join(
                [path[-1]]
                + (["name: " + memberSpec.get('name')] if (memberSpec.get('name') and (memberSpec.get('name') != path[-1])) else [])
                + list(
                    map(
                        lambda k: k + ": " + hjson.dumps(memberSpec[k]),
                        filter(
                            lambda k: k not in ['id','name'],
                            memberSpec.keys()
                        )
                    )
                )   
            )
        else:
            return "THIS ELEMENT IS NOT SPECIFIED IN THE SCHEMA."
    else:
        return None
Ejemplo n.º 10
0
    def check(self, name, file, inputCr):
        text = self.load(file, inputCr)
        shouldFail = name[0:4] == "fail"

        try:
            data = hjson.loads(text)
            self.assertFalse(shouldFail)

            text1 = hjson.dumpsJSON(data)
            hjson1 = hjson.dumps(data, ensure_ascii=False);
            result = hjson.loads(self.load(name + "_result.json", inputCr))
            text2 = hjson.dumpsJSON(result)
            hjson2 = self.load(name + "_result.hjson", False)

            if self.verma>2 or self.vermi>6:
                # final check fails on py2.6 because of string formatting issues
                self.assertEqual(text2, text1)
                self.assertEqual(hjson2, hjson1)

            # dbg
            # with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
            # with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))


        except hjson.HjsonDecodeError as e:
            self.assertTrue(shouldFail)
Ejemplo n.º 11
0
def dumps(obj, human=False, **kw):
    if 'cls' not in kw:
        kw['cls'] = HJSEncoder
    if human:
        return hjson.dumps(obj, **kw)
    else:
        return hjson.dumpsJSON(obj, **kw)
Ejemplo n.º 12
0
    def check(self, name, file, inputCr):
        text = self.load(file, inputCr)
        shouldFail = name[0:4] == "fail"

        try:
            data = hjson.loads(text)
            self.assertFalse(shouldFail, file)

            text1 = hjson.dumpsJSON(data)
            hjson1 = hjson.dumps(data, ensure_ascii=False);
            result = hjson.loads(self.load(name + "_result.json", inputCr))
            text2 = hjson.dumpsJSON(result)
            hjson2 = self.load(name + "_result.hjson", False)

            # dbg
            # with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
            # with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))
            # with codecs.open(name + "_dbg3.txt", 'w', 'utf-8') as tmp: hjson.dump(data, tmp)

            if self.verma>2 or self.vermi>6:
                # final check fails on py2.6 because of string formatting issues
                self.assertEqual(text2, text1, file)
                self.assertEqual(hjson2, hjson1, file)

        except hjson.HjsonDecodeError as e:
            if not shouldFail:
                self.fail("raised error on parsing %s: %r" % (file, e))
Ejemplo n.º 13
0
def hjson_parser(path):
    import hjson
    from collections import OrderedDict
    with open(path, 'r') as fp:
        result = hjson.load(fp)

    return hjson.dumps(result, indent=2, sort_keys=True)
Ejemplo n.º 14
0
    def _conv_inline_cfg_to_hjson(self, idict):
        '''Dump a temp hjson file in the scratch space from input dict.
        This method is to be called only by a master cfg'''

        if not self.is_master_cfg:
            log.fatal("This method can only be called by a master cfg")
            sys.exit(1)

        name = idict["name"] if "name" in idict.keys() else None
        if not name:
            log.error(
                "In-line entry in use_cfgs list does not contain "
                "a \"name\" key (will be skipped!):\n%s", idict)
            return None

        # Check if temp cfg file already exists
        temp_cfg_file = (self.scratch_root + "/." + self.branch + "__" + name +
                         "_cfg.hjson")

        # Create the file and dump the dict as hjson
        log.log(VERBOSE, "Dumping inline cfg \"%s\" in hjson to:\n%s", name,
                temp_cfg_file)
        try:
            with open(temp_cfg_file, "w") as f:
                f.write(hjson.dumps(idict, for_json=True))
        except Exception as e:
            log.error(
                "Failed to hjson-dump temp cfg file\"%s\" for \"%s\""
                "(will be skipped!) due to:\n%s", temp_cfg_file, name, e)
            return None

        # Return the temp cfg file created
        return temp_cfg_file
Ejemplo n.º 15
0
def add_to_interface(request):
    # Returns a recipe interface field json

    display_type = request.POST.get('display_types', '')
    json_text = request.POST.get('json_text', '')

    display_dict = get_display_dict(display_type=display_type)

    json_data = hjson.loads(json_text)
    field_name = display_type
    count = 0
    # Check if the field name exists
    while field_name in json_data:
        field_name = display_type + f'{count}'
        count += 1

    new_field = {field_name: display_dict}
    json_data.update(new_field)
    new_json = hjson.dumps(json_data)

    tmpl = loader.get_template('widgets/json_field.html')
    context = dict(json_text=new_json, focus=True)
    json_field = tmpl.render(context=context)

    return ajax_success(html=json_field,
                        json_text=new_json,
                        msg="Rendered json")
Ejemplo n.º 16
0
    def regenerateSettingsFile(self , settingsFile = None):

        if(settingsFile == None):
            settingsFile = self.settingsFile

        # writing the file
        with open(self.path , "w+") as file:
            file.write(hjson.dumps(settingsFile))
Ejemplo n.º 17
0
def new_timeline(username) -> str:
    user_timeline = resource_filename("timelines", f"{username}.hjson")
    if os.path.exists(user_timeline):
        raise TimelineError(f"User '{username}' already has a timeline.")

    with open(user_timeline, "w") as out:
        out.write(hjson.dumps(blank_timeline()))
    return user_timeline
Ejemplo n.º 18
0
 def save_timeline_to_file(self) -> None:
     with open(resource_filename("timelines", f"{self.username}.hjson"),
               "w") as out:
         out_dict = {
             "timeline": self.timeline,
             "activities": self.activities,
             "last_day": self.last_day
         }
         out.write(hjson.dumps(out_dict))
Ejemplo n.º 19
0
 def log_shapes(self):
     # TODO log shapes in the generator instead
     with open(self.logdir + self.logname, "a") as logfile:
         signal = {}
         for key in self.signal:
             signal[key] = self.signal[key].numpy().tolist()
         logfile.write(hjson.dumps(signal, default=hjson_encode))
         logfile.write("\n")
         logfile.flush()
Ejemplo n.º 20
0
 def template(self, type: str, only_required: bool, output: str) -> Optional[Dict]:
     schema = self._template(type, only_required)
     if output == "hjson":
         print(hjson.dumps(schema, indent=4, item_sort_key=sort_attrs))
     elif output == "json":
         print(hjson.dumpsJSON(schema, indent=4, item_sort_key=sort_attrs))
     elif output == "dict":
         return json.loads(hjson.dumpsJSON(schema, item_sort_key=sort_attrs))
     else:
         raise ValueError("unrecognized output")
Ejemplo n.º 21
0
    def start_log(self) -> None:
        """
        Initialize the log with current time.

        """
        self.start_time = time.time()
        start_time_str = str(f"{time.asctime(time.localtime())}\n\n")
        with open(self.logdir + self.logname, "a") as logfile:
            logfile.write("Starting optimization at ")
            logfile.write(start_time_str)
            logfile.write("Optimization parameters:\n")
            logfile.write(hjson.dumps(self.pmap.opt_map))
            logfile.write("\n")
            logfile.write("Units:\n")
            logfile.write(hjson.dumps(self.pmap.get_opt_units()))
            logfile.write("\n")
            logfile.write("Algorithm options:\n")
            logfile.write(hjson.dumps(self.options))
            logfile.write("\n")
            logfile.flush()
Ejemplo n.º 22
0
def generate_xbars(top, out_path):
    topname = top["name"]
    gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
              "-o hw/top_{topname}/\n\n".format(topname=topname))

    for obj in top["xbar"]:
        xbar_path = out_path / 'ip/xbar_{}/data/autogen'.format(obj["name"])
        xbar_path.mkdir(parents=True, exist_ok=True)
        xbar = tlgen.validate(obj)
        xbar.ip_path = 'hw/top_' + top["name"] + '/ip/{dut}'

        # Generate output of crossbar with complete fields
        xbar_hjson_path = xbar_path / "xbar_{}.gen.hjson".format(xbar.name)
        xbar_hjson_path.write_text(genhdr + gencmd +
                                   hjson.dumps(obj, for_json=True))

        if not tlgen.elaborate(xbar):
            log.error("Elaboration failed." + repr(xbar))

        try:
            results = tlgen.generate(xbar, "top_" + top["name"])
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())

        ip_path = out_path / 'ip/xbar_{}'.format(obj["name"])

        for filename, filecontent in results:
            filepath = ip_path / filename
            filepath.parent.mkdir(parents=True, exist_ok=True)
            with filepath.open(mode='w', encoding='UTF-8') as fout:
                fout.write(filecontent)

        dv_path = out_path / 'ip/xbar_{}/dv/autogen'.format(obj["name"])
        dv_path.mkdir(parents=True, exist_ok=True)

        # generate testbench for xbar
        tlgen.generate_tb(xbar, dv_path, "top_" + top["name"])

        # Read back the comportable IP and amend to Xbar
        xbar_ipfile = ip_path / ("data/autogen/xbar_%s.hjson" % obj["name"])
        with xbar_ipfile.open() as fxbar:
            xbar_ipobj = hjson.load(fxbar,
                                    use_decimal=True,
                                    object_pairs_hook=OrderedDict)

            r_inter_signal_list = check_list(
                xbar_ipobj.get('inter_signal_list', []),
                'inter_signal_list field')
            obj['inter_signal_list'] = [
                InterSignal.from_raw(
                    'entry {} of the inter_signal_list field'.format(idx + 1),
                    entry) for idx, entry in enumerate(r_inter_signal_list)
            ]
Ejemplo n.º 23
0
    def download(url, uid, outfile="recipe", is_json=False):

        # Make the recipe directory
        dir = os.path.join(base_dir, project_uid, uid)
        os.makedirs(dir, exist_ok=True)
        # Get full url and read content
        fullurl = urljoin(base_url, url) + f"?k={api_key}"
        data = urlopen(url=fullurl).read().decode()
        # Format data and dump content into file
        data = hjson.dumps(hjson.loads(data)) if is_json else data
        outfile = os.path.join(dir, outfile)
        open(outfile, "w").write(data)
Ejemplo n.º 24
0
    def start_log(self):
        """
        Initialize the log with current time.

        """
        super().start_log()
        with open(self.logdir + self.logname, "a") as logfile:
            logfile.write("Robust values ")
            print(len(self.noise_map))
            logfile.write(hjson.dumps(self.noise_map, default=hjson_encode))
            logfile.write("\n")
            logfile.flush()
Ejemplo n.º 25
0
    def start_log(self):
        """
        Initialize the log with current time.

        """
        super().start_log()
        with open(self.logdir + self.logname, 'a') as logfile:
            logfile.write("Robust values ")
            print(len(self.noise_map))
            logfile.write(hjson.dumps(jsonify_list(self.noise_map)))
            logfile.write("\n")
            logfile.flush()
Ejemplo n.º 26
0
def generate_xbars(top, out_path):
    topname = top["name"]
    gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
              "-o hw/top_{topname}/\n\n".format(topname=topname))

    for obj in top["xbar"]:
        xbar_path = out_path / 'ip/xbar_{}/data/autogen'.format(obj["name"])
        xbar_path.mkdir(parents=True, exist_ok=True)
        xbar = tlgen.validate(obj)
        xbar.ip_path = 'hw/top_' + top["name"] + '/ip/{dut}'

        # Generate output of crossbar with complete fields
        xbar_hjson_path = xbar_path / "xbar_{}.gen.hjson".format(xbar.name)
        xbar_hjson_path.write_text(genhdr + gencmd +
                                   hjson.dumps(obj, for_json=True))

        if not tlgen.elaborate(xbar):
            log.error("Elaboration failed." + repr(xbar))

        try:
            results = tlgen.generate(xbar, "top_" + top["name"])
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())

        ip_path = out_path / 'ip/xbar_{}'.format(obj["name"])

        for filename, filecontent in results:
            filepath = ip_path / filename
            filepath.parent.mkdir(parents=True, exist_ok=True)
            with filepath.open(mode='w', encoding='UTF-8') as fout:
                fout.write(filecontent)

        dv_path = out_path / 'ip/xbar_{}/dv/autogen'.format(obj["name"])
        dv_path.mkdir(parents=True, exist_ok=True)

        # generate testbench for xbar
        tlgen.generate_tb(xbar, dv_path, "top_" + top["name"])

        # Read back the comportable IP and amend to Xbar
        xbar_ipfile = ip_path / ("data/autogen/xbar_%s.hjson" % obj["name"])
        with xbar_ipfile.open() as fxbar:
            xbar_ipobj = hjson.load(fxbar,
                                    use_decimal=True,
                                    object_pairs_hook=OrderedDict)

            # Deepcopy of the inter_signal_list.
            # As of writing the code, it is not expected to write-back the
            # read xbar objects into files. Still, as `inter_signal_list` is
            # modified in the `elab_intermodule()` stage, it is better to keep
            # the original content.
            obj["inter_signal_list"] = deepcopy(
                xbar_ipobj["inter_signal_list"])
Ejemplo n.º 27
0
def sync_json(sender, instance, created, raw, update_fields, **kwargs):
    # Sync the json["settings"] with the recipe.text and name.

    current_json = instance.json_data

    data = current_json.get("settings") or {}

    data["name"] = instance.name
    data["help"] = instance.text

    current_json["settings"] = data

    Analysis.objects.get_all(uid=instance.uid).update(json_text=hjson.dumps(current_json))
Ejemplo n.º 28
0
    def recordDepsStates(self):
        states = self.getStates()
        statesString = ""
        for dep in self.getSortedDependencies():
            state = states[dep.name]
            statesString += dep.name + ": " + hjson.dumps(state,
                                                          indent="\t") + "\n"

        logging.info("Writing states to %s", self.gluestatesPath())
        f = open(self.gluestatesPath(), "w")
        # logging.info("Writing states: \n%s", statesString)
        f.write(statesString)
        f.close()
Ejemplo n.º 29
0
def fill_bug(config, nags, rrajsondoc):
    bcfg = config['bugzilla']

    # If no API key has been specified, just skip this
    if len(bcfg['api_key']) == 0:
        return

    b = bugzilla.Bugzilla(url=bcfg['url'], api_key=bcfg['api_key'])

    #Did we already report this?
    terms = [{'product': bcfg['product']}, {'component': bcfg['component']},
            {'creator': bcfg['creator']}, {'whiteboard': 'autoentry'},
            {'resolution': ''},{'status': 'NEW'}, {'status': 'ASSIGNED'},
            {'status': 'REOPENED'}, {'status': 'UNCONFIRMED'},
            {'whiteboard': 'rra2json={}'.format(rrajsondoc.source)}
            ]

    bugs = b.search_bugs(terms)['bugs']
    try:
        bugzilla.DotDict(bugs[-1])
        debug("bug for {} is already present, not re-filling".format(rrajsondoc.source))
        return
    except IndexError:
        pass

    #If not, report now
    bug = bugzilla.DotDict()
    bug.product = bcfg['product']
    bug.component = bcfg['component']
    bug.summary = "There are {} issues with an RRA".format(len(nags))
    bug.description = json.dumps(nags)
    bug.whiteboard = 'autoentry rra2json={}'.format(rrajsondoc.source)
    if 'analyst' in rrajsondoc.details.metadata:
        bug.assigned_to = rrajsondoc.details.metadata.analyst
    try:
        ret = b.post_bug(bug)
        debug("Filled bug {} {}".format(rrajsondoc.source, ret))
    except Exception as e:
        # Code 51 = assigned_to user does not exist, just assign to default then
        url, estr, ecode, edict = e.args
        if edict['code'] == 51: 
            del bug.assigned_to
            try:
                ret = b.post_bug(bug)
                debug("Filled bug {} {}".format(rrajsondoc.source, ret))
            except Exception as e1:
                debug("Filling bug failed: {}".format(e1))
        else:
            debug("Filling bug failed: {}".format(e))
def modify_hparams(in_dir: pathlib.Path,
                   out_dir: pathlib.Path,
                   update: Optional[Dict] = None):
    if update is None:
        update = {}
    out_dir.mkdir(exist_ok=True, parents=False)
    with (in_dir / 'hparams.hjson').open("r") as in_f:
        in_hparams_str = in_f.read()
    in_hparams = hjson.loads(in_hparams_str)

    out_hparams = in_hparams
    algorithms.update(out_hparams, update)
    out_hparams_str = hjson.dumps(out_hparams)
    with (out_dir / 'hparams.hjson').open("w") as out_f:
        out_f.write(out_hparams_str)
Ejemplo n.º 31
0
def generate_xbars(top, out_path):
    topname = top["name"]
    gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
              "-o hw/top_{topname}/\n\n".format(topname=topname))

    for obj in top["xbar"]:
        xbar_path = out_path / 'ip/xbar_{}/data/autogen'.format(obj["name"])
        xbar_path.mkdir(parents=True, exist_ok=True)
        xbar = tlgen.validate(obj)
        xbar.ip_path = 'hw/top_' + top["name"] + '/ip/{dut}'

        # Generate output of crossbar with complete fields
        xbar_hjson_path = xbar_path / "xbar_{}.gen.hjson".format(xbar.name)
        xbar_hjson_path.write_text(genhdr + gencmd +
                                   hjson.dumps(obj, for_json=True))

        if not tlgen.elaborate(xbar):
            log.error("Elaboration failed." + repr(xbar))

        try:
            out_rtl, out_pkg, out_core = tlgen.generate(
                xbar, "top_" + top["name"])
        except:  # noqa: E722
            log.error(exceptions.text_error_template().render())

        rtl_path = out_path / 'ip/xbar_{}/rtl/autogen'.format(obj["name"])
        rtl_path.mkdir(parents=True, exist_ok=True)
        dv_path = out_path / 'ip/xbar_{}/dv/autogen'.format(obj["name"])
        dv_path.mkdir(parents=True, exist_ok=True)

        rtl_filename = "xbar_%s.sv" % (xbar.name)
        rtl_filepath = rtl_path / rtl_filename
        with rtl_filepath.open(mode='w', encoding='UTF-8') as fout:
            fout.write(out_rtl)

        pkg_filename = "tl_%s_pkg.sv" % (xbar.name)
        pkg_filepath = rtl_path / pkg_filename
        with pkg_filepath.open(mode='w', encoding='UTF-8') as fout:
            fout.write(out_pkg)

        core_filename = "xbar_%s.core" % (xbar.name)
        core_filepath = rtl_path / core_filename
        with core_filepath.open(mode='w', encoding='UTF-8') as fout:
            fout.write(out_core)

        # generate testbench for xbar
        tlgen.generate_tb(xbar, dv_path, "top_" + top["name"])
Ejemplo n.º 32
0
def fill_bug(config, nags, source):
    bcfg = config['bugzilla']
    b = bugzilla.Bugzilla(url=bcfg['url'], api_key=bcfg['api_key'])

    #Did we already report this?
    terms = [{
        'product': bcfg['product']
    }, {
        'component': bcfg['component']
    }, {
        'creator': bcfg['creator']
    }, {
        'whiteboard': 'autoentry'
    }, {
        'resolution': ''
    }, {
        'status': 'NEW'
    }, {
        'status': 'ASSIGNED'
    }, {
        'status': 'REOPENED'
    }, {
        'status': 'UNCONFIRMED'
    }, {
        'whiteboard': 'rra2json={}'.format(source)
    }]

    bugs = b.search_bugs(terms)['bugs']
    try:
        bugzilla.DotDict(bugs[-1])
        debug("bug for {} is already present, not re-filling".format(source))
        return
    except IndexError:
        pass

    #If not, report now
    bug = bugzilla.DotDict()
    bug.product = bcfg['product']
    bug.component = bcfg['component']
    bug.summary = "There are {} issues with an RRA".format(len(nags))
    bug.description = json.dumps(nags)
    bug.whiteboard = 'autoentry rra2json={}'.format(source)
    try:
        ret = b.post_bug(bug)
    except e:
        debug("Filling bug failed: {}".format(e))
    debug("Filled bug {} {}".format(source, ret))
Ejemplo n.º 33
0
def fill_bug(config, nags, source):
    bcfg = config['bugzilla']

    # If no API key has been specified, just skip this
    if len(bcfg['api_key']) == 0:
        return

    b = bugzilla.Bugzilla(url=bcfg['url'], api_key=bcfg['api_key'])

    #Did we already report this?
    terms = [{'product': bcfg['product']}, {'component': bcfg['component']},
            {'creator': bcfg['creator']}, {'whiteboard': 'autoentry'},
            {'resolution': ''},{'status': 'NEW'}, {'status': 'ASSIGNED'},
            {'status': 'REOPENED'}, {'status': 'UNCONFIRMED'},
            {'whiteboard': 'rra2json={}'.format(source)}
            ]

    bugs = b.search_bugs(terms)['bugs']
    try:
        bugzilla.DotDict(bugs[-1])
        debug("bug for {} is already present, not re-filling".format(source))
        return
    except IndexError:
        pass

    #If not, report now
    bug = bugzilla.DotDict()
    bug.product = bcfg['product']
    bug.component = bcfg['component']
    bug.summary = "There are {} issues with an RRA".format(len(nags))
    bug.description = json.dumps(nags)
    bug.whiteboard = 'autoentry rra2json={}'.format(source)
    try:
        ret = b.post_bug(bug)
    except e:
        debug("Filling bug failed: {}".format(e))
    debug("Filled bug {} {}".format(source, ret))
Ejemplo n.º 34
0
def dict_to_cfg(data_dict):
    data_h = hjson.dumps(data_dict)
    text = re.sub('\}', '};', re.sub('\]', ');', re.sub('\[', '(', data_h)))
    text = text[:-2].lstrip('{')
    return text
Ejemplo n.º 35
0
 def _json_clone(self, j):
     return hjson.loads(hjson.dumps(j))