Ejemplo n.º 1
0
def _load_json_to_mongo(database: pymongo.database.Database,
                        collection: str) -> None:
    """Load a MongoDB collection from a JSON file."""

    with open(path.join(_TESTDATA_FOLDER, collection + '.json')) as json_file:
        json_blob = pyjson5.load(json_file)
    database[collection].insert_many(json_blob)
Ejemplo n.º 2
0
def annotate_speed_model(tile_type, reduced_tile, root_dir):
    """ Updates the reduced tile with the correct speed information """

    speed_model_indices = get_speed_model_indices(reduced_tile)

    tmp_indices_file = os.path.join(root_dir,
                                    '{}_speed_index.tmp'.format(tile_type))

    with open(tmp_indices_file, "w") as f:
        for index in speed_model_indices:
            print(index, file=f)

    # Get vivado path
    vivado = os.getenv('XRAY_VIVADO')
    assert vivado is not None

    subprocess.check_call(
        "{} -mode batch -source get_speed_model.tcl -tclargs {}".format(
            vivado, tmp_indices_file),
        shell=True,
        stdout=subprocess.DEVNULL)

    with open(tmp_indices_file, "r") as f:
        speed_model_data = json5.load(f)

    for site in reduced_tile['sites']:
        annotate_site_pins_speed_model(site['site_pins'], speed_model_data)

    annotate_pips_speed_model(reduced_tile['pips'], speed_model_data)
    annotate_wires_speed_model(reduced_tile['wires'], speed_model_data)
Ejemplo n.º 3
0
def read_json5(fname, nodes):
    node_lookup = prjxray.lib.NodeLookup()
    node_lookup.load_from_nodes(nodes)

    #print('{} Reading {} (in pid {})'.format(datetime.datetime.now(), fname, os.getpid()))
    with open(fname) as f:
        tile = json5.load(f)

    #print('{} Done reading {}'.format(datetime.datetime.now(), fname))
    def get_site_types():
        for site in tile['sites']:
            yield get_prototype_site(site)

    site_types = tuple(get_site_types())
    sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
    pips = get_pips(tile['tile'], tile['pips'])

    def inner():
        for wire in tile['wires']:
            assert wire['wire'].startswith(tile['tile'] + '/')
            yield wire['wire'][len(tile['tile']) + 1:]

    wires = set(inner())
    wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
    assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes))

    return fname, tile, site_types, sites, pips, wires
Ejemplo n.º 4
0
def read_json5(fname, database_file):
    node_lookup = prjxray.node_lookup.NodeLookup(database_file)

    with open(fname) as f:
        tile = json5.load(f)

    def get_site_types():
        for site in tile['sites']:
            yield get_prototype_site(site)

    site_types = tuple(get_site_types())
    sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
    pips = get_pips(tile['tile'], tile['pips'])

    def inner():
        for wire in tile['wires']:
            assert wire['wire'].startswith(tile['tile'] + '/')

            if wire['res'] != '0.000' or wire['cap'] != '0.000':
                wire_delay_model = {
                    'res': wire['res'],
                    'cap': wire['cap'],
                }
            else:
                wire_delay_model = None

            yield wire['wire'][len(tile['tile']) + 1:], wire_delay_model

    wires = {k: v for (k, v) in inner()}
    wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
    assert len(wires_from_nodes - wires.keys()) == 0, repr(
        (wires, wires_from_nodes))

    return fname, tile, site_types, sites, pips, wires
Ejemplo n.º 5
0
 def load_from_root_csv(self, nodes):
     import pyjson5 as json5
     import progressbar
     for node in progressbar.progressbar(nodes):
         with open(node) as f:
             node_wires = json5.load(f)
             assert node_wires['node'] not in self.nodes
             self.nodes[node_wires['node']] = node_wires['wires']
Ejemplo n.º 6
0
    def load(cls, filename: str):
        with open(filename, 'r') as f:
            data = json.load(f)
        assert data['$schema'] == 'vscode://schemas/color-theme'

        parent = None
        if parent_filename := data.get('include'):
            directory = pathlib.Path(os.path.dirname(filename))
            parent = cls.load(directory / parent_filename)
Ejemplo n.º 7
0
	def __init__(self, path):
		with open(path, "rt") as config_file:
			json_config = pyjson5.load(config_file)
		#handling secrets
		if "secrets" in json_config:
			secrets_path = os.path.join(os.path.dirname(path), json_config["secrets"])
			with open(secrets_path, "rt") as secrets_json_file:
				secrets_config = pyjson5.load(secrets_json_file)
			for key, value in secrets_config.items():
				if key in json_config:
					json_config[key].update(value)
				else:
					json_config[key] = value

		self.smtp = SmtpConfig(json_config["smtp"])
		self.bug_report = BugReportConfig(json_config["bug_report"])
		self.parser = ParserConfig(json_config["parser"])
		self.www = WwwConfig(json_config["www"])
		self.db = DatabaseConfig(json_config["db"])
		self.unittest_mode = "DANCEBOOKS_UNITTEST" in os.environ
Ejemplo n.º 8
0
    def __init__(self, path):
        with open(path, "rt") as config_file:
            json_config = pyjson5.load(config_file)
        #handling secrets
        if "secrets" in json_config:
            secrets_path = os.path.join(os.path.dirname(path),
                                        json_config["secrets"])
            with open(secrets_path, "rt") as secrets_json_file:
                secrets_config = pyjson5.load(secrets_json_file)
            for key, value in secrets_config.items():
                if key in json_config:
                    json_config[key].update(value)
                else:
                    json_config[key] = value

        self.smtp = SmtpConfig(json_config["smtp"])
        self.bug_report = BugReportConfig(json_config["bug_report"])
        self.parser = ParserConfig(json_config["parser"])
        self.www = WwwConfig(json_config["www"])
        self.db = DatabaseConfig(json_config["db"])
        self.unittest_mode = "DANCEBOOKS_UNITTEST" in os.environ
Ejemplo n.º 9
0
def get_tile_grid_info(fname):
    with open(fname, 'r') as f:
        tile = json5.load(f)

    return {
        tile['tile']: {
            'type': tile['type'],
            'grid_x': tile['x'],
            'grid_y': tile['y'],
            'sites': dict(
                (site['site'], site['type']) for site in tile['sites']),
            'wires': set((wire['wire'] for wire in tile['wires']))
        },
    }
Ejemplo n.º 10
0
    def build_database(self, nodes, tiles):
        create_tables(self.conn)

        c = self.conn.cursor()
        tile_names = []
        for tile_type in tiles:
            for tile in tiles[tile_type]:
                tile_names.append(tile)

        tile_pkeys = {}
        for tile_file in progressbar.progressbar(tile_names):
            # build/specimen_001/tile_DSP_L_X34Y145.json5
            root, _ = os.path.splitext(os.path.basename(tile_file))
            tile = root[5:]
            c.execute("INSERT INTO tile(name) VALUES (?);", (tile, ))
            tile_pkeys[tile] = c.lastrowid

        nodes_processed = set()
        for node in progressbar.progressbar(nodes):
            with open(node) as f:
                node_wires = json5.load(f)
                assert node_wires['node'] not in nodes_processed
                nodes_processed.add(node_wires['node'])

                c.execute("INSERT INTO node(name) VALUES (?);",
                          (node_wires['node'], ))
                node_pkey = c.lastrowid

                for wire in node_wires['wires']:
                    tile = wire['wire'].split('/')[0]

                    tile_pkey = tile_pkeys[tile]
                    c.execute(
                        """
INSERT INTO wire(name, tile_pkey, node_pkey) VALUES (?, ?, ?);""",
                        (wire['wire'], tile_pkey, node_pkey))

        self.conn.commit()

        c = self.conn.cursor()
        c.execute("CREATE INDEX tile_names ON tile(name);")
        c.execute("CREATE INDEX node_names ON node(name);")
        c.execute("CREATE INDEX wire_node_tile ON wire(node_pkey, tile_pkey);")
        c.execute("CREATE INDEX wire_tile ON wire(tile_pkey);")
        self.conn.commit()
Ejemplo n.º 11
0
    def load_set(cls, filename: str) -> Dict[str, '_Persona']:
        """Load a set of personas from a JSON file."""

        with open(filename) as personas_file:
            personas_json = pyjson5.load(personas_file)
        personas: Dict[str, _Persona] = {}
        for name, blob in personas_json.items():
            user = user_pb2.User()
            assert proto.parse_from_mongo(blob['user'], user.profile)
            if 'featuresEnabled' in blob:
                assert proto.parse_from_mongo(blob['featuresEnabled'],
                                              user.features_enabled)
            if 'project' in blob:
                assert proto.parse_from_mongo(blob['project'],
                                              user.projects.add())
            if 'projects' in blob:
                for project in blob['projects']:
                    assert proto.parse_from_mongo(project, user.projects.add())
            assert name not in personas
            personas[name] = cls(name, user)
        return personas
Ejemplo n.º 12
0
def get_tile_grid_info(fname):
    with open(fname, 'r') as f:
        tile = json5.load(f)

    tile_type = tile['type']
    ignored = int(tile['ignored']) != 0

    if ignored:
        tile_type = 'NULL'

    return {
        tile['tile']: {
            'type': tile_type,
            'ignored': ignored,
            'grid_x': tile['x'],
            'grid_y': tile['y'],
            'sites': dict(
                (site['site'], site['type']) for site in tile['sites']),
            'wires': set((wire['wire'] for wire in tile['wires']))
        },
    }
Ejemplo n.º 13
0
def read_json5(fname, database_file):
    node_lookup = prjxray.node_lookup.NodeLookup(database_file)

    with open(fname) as f:
        tile = json5.load(f)

    def get_site_types():
        for site in tile['sites']:
            yield get_prototype_site(site)

    site_types = tuple(get_site_types())
    sites = tuple(get_sites(tile, node_lookup.site_pin_node_to_wires))
    pips = get_pips(tile['tile'], tile['pips'])

    def inner():
        for wire in tile['wires']:
            assert wire['wire'].startswith(tile['tile'] + '/')
            yield wire['wire'][len(tile['tile']) + 1:]

    wires = set(inner())
    wires_from_nodes = set(node_lookup.wires_for_tile(tile['tile']))
    assert len(wires_from_nodes - wires) == 0, repr((wires, wires_from_nodes))

    return fname, tile, site_types, sites, pips, wires
Ejemplo n.º 14
0
def config_load(config):
    """Loads the given config file, and puts it in the expected schema.
    """
    app_config = pyjson5.load(open(config))
    return config_schema(app_config['parsers'])
Ejemplo n.º 15
0
def read_json5(fname):
    with open(fname, 'r') as f:
        return json5.load(f)
Ejemplo n.º 16
0
def main():
    simplejson.dump(pyjson5.load(sys.stdin), sys.stdout, indent=2)
Ejemplo n.º 17
0
def read_json5(fname):
    with OpenSafeFile(fname, 'r') as f:
        return json5.load(f)
Ejemplo n.º 18
0
def process_file(src_path, dest_path):
    print(src_path, dest_path)
    with open(src_path) as infile:
        data = pyjson5.load(infile)
        with open(dest_path, 'w') as outfile:
            json.dump(data, outfile, indent=2)
Ejemplo n.º 19
0
def _check_config_file(config):
    """For non-deployment editions, we must load config from a json5 file.

    This also gets run for deployments, as this function is responsible for
    merging multiple configs into the single root config.

    This function also contains all schema validation of json5 files, as
    the version stored in the observatory image is separate.
    """
    global CONFIG_FOLDER, IMAGE_TAG

    # Not a deployment -- requires CONFIG_FOLDER
    if CONFIG_FOLDER is None:
        assert config is not None
        CONFIG_FOLDER = config
    else:
        assert config is None

    import pyjson5, schema as s
    config_data = pyjson5.load(open(os.path.join(CONFIG_FOLDER,
            'config.json5')))

    # Before applying schema, merge in child configs
    for child_name in os.listdir(CONFIG_FOLDER):
        child_path = os.path.join(CONFIG_FOLDER, child_name)
        if not os.path.isdir(child_path):
            continue
        child_config_path = os.path.join(child_path, 'config.json5')
        if not os.path.lexists(child_config_path):
            continue

        child_config = pyjson5.load(open(child_config_path))

        # First traversal -- patch keys and values
        nodes = [([], child_config)]
        while nodes:
            path, src = nodes.pop()
            for k, v in src.items():
                ## Rewrite v
                # Check executables; amend cwd
                if path and (
                        'file_detail_views' == path[-1]
                        or 'decision_views' == path[-1]
                        or 'parsers' == path[-1]
                        or 'tasks' == path[-1]):
                    if 'cwd' in v:
                        v['cwd'] = f'{child_name}/' + v['cwd']
                    else:
                        v['cwd'] = child_name

                # Docker build stage patch
                if path == ['build', 'stages']:
                    if 'commands' in v:
                        v['commands'] = [
                                vv
                                    .replace('{disttarg}', f'{{disttarg}}/{child_name}')
                                    .replace('{dist}', f'{{dist}}/{child_name}')
                                for vv in v['commands']]

                if isinstance(v, dict):
                    nodes.append((path + [k], v))

        # Second traversal -- merge
        nodes = [([], config_data, child_config)]
        while nodes:
            path, dst, src = nodes.pop()
            for k, v in src.items():
                ## Rewrite k
                # Docker build stage patch -- rewrites `k`
                if path == ['build', 'stages']:
                    # Adding a build stage. If not 'base' or 'final', then
                    # prepend config folder name
                    if k not in ['base', 'final']:
                        k = f'{child_name}_{k}'

                # New entries only for these
                if path in [
                        ['pipelines'],
                        ['file_detail_views'],
                        ['decision_views'],
                        ['parsers'],
                        ]:
                    # Amend these with the child's name, to allow for copying
                    k = f'{child_name.replace("_", "-")}-{k}'
                    assert k not in dst, f'Cannot extend {path} {k}; must be new'

                ## Check for merge type
                # Check if new -- if so, assign and be done
                if k not in dst:
                    # Non-existent key; add it to the dictionary
                    dst[k] = v
                    continue

                # Do merge
                if isinstance(v, dict):
                    if not isinstance(dst[k], dict):
                        raise ValueError(f'{path} {k} type does not match base config')

                    # Dictionary merge
                    nodes.append((path + [k], dst[k], v))
                elif isinstance(v, list):
                    if not isinstance(dst[k], list):
                        raise ValueError(f'{path} {k} type does not match base config')

                    # Add to end.
                    dst[k].extend(v)
                else:
                    raise ValueError(f'May not extend {path} {k}: base config type {dst[k]}')

    # Pull in parser-specific schema
    import importlib.util
    spec = importlib.util.spec_from_file_location('etl_parse',
            os.path.join(faw_dir, 'common', 'pdf-etl-parse', 'parse_schema.py'))
    etl_parse = importlib.util.module_from_spec(spec)
    spec.loader.exec_module(etl_parse)

    schema_views = {
            s.Optional('decision_views', default={}): s.Or({}, {
                str: {
                    'label': str,
                    'type': 'program',
                    'exec': [s.Or(
                        s.And(str, lambda x: not x.startswith('<')),
                        s.And(str, lambda x: x in [
                            '<filesPath>', '<apiInfo>', '<jsonArguments>', '<mongo>', '<outputHtml>',
                            '<workbenchApiUrl>']),
                        )],
                    s.Optional('cwd', default='.'): str,
                    'execStdin': s.And(str, lambda x: all([
                        y.group(0) in ['<referenceDecisions>', '<statsbyfile>']
                        for y in re.finditer('<[^>]*>', x)]),
                        error="Must be string with any <'s being one of: "
                            "<referenceDecisions>, <statsbyfile>"),
                },
            }),
            s.Optional('file_detail_views', default={}): s.Or({}, {
                str: {
                    'label': str,
                    'type': 'program_to_html',
                    'exec': [str],
                    s.Optional('cwd', default='.'): str,
                    s.Optional('outputMimeType', default='text/html'): str,
                },
            }),
    }

    # NOTE -- primary schema validation is here, but NOT for submodules such
    # as pdf-etl-parse.
    sch = s.Schema({
        'name': s.And(str, s.Regex(r'^[a-zA-Z0-9-]+$')),
        # parsers validated by pdf-etl-parse
        'parsers': etl_parse.schema_get(),
        s.Optional('parserDefaultTimeout', default=30): s.Or(float, int),
        'decision_default': str,
        s.Optional('pipelines', default={}): s.Or({}, {
            s.And(str, lambda x: '_' not in x and '.' not in x,
                    error='Must not have underscore or dot'): {
                s.Optional('label'): str,
                s.Optional('disabled', default=False): s.Or(True, False),
                s.Optional('tasks', default={}): s.Or({},
                    s.And(
                        {
                            s.And(str, lambda x: '_' not in x and '.' not in x,
                                    error='Must not have underscore or dot'): {
                                s.Optional('disabled', default=False): s.Or(True, False),
                                'version': str,
                                'exec': [str],
                                s.Optional('cwd', default='.'): str,
                                s.Optional('dependsOn', default=[]): [str],
                            },
                        },
                        lambda x: all([d in x for _, task in x.items() for d in task['dependsOn']]),
                        error="Task `dependsOn` had invalid name",
                    )),
                s.Optional('parsers', default={}): etl_parse.schema_get(),
                **schema_views,
            },
        }),
        'build': {
            'stages': {
                str: {
                    s.Optional('from'): str,
                    s.Optional('commands'): [str],
                    s.Optional('copy_output'): {
                        str: s.Or(str, bool),
                    },
                },
            },
        },
        **schema_views,
    })
    try:
        config_data = sch.validate(config_data)
    except Exception as e:
        traceback.print_exc()
        sys.stderr.write('\n'.join([str(v) for v in e.autos]) + '\n')
        sys.exit(1)
    IMAGE_TAG = config_data['name']
    return config_data
Ejemplo n.º 20
0
    parser.add_argument('file',
                        metavar='FILE',
                        type=str,
                        help='The file to be verified')
    parser.add_argument('--order-desc',
                        dest='desc',
                        action='store_true',
                        help="ID numbers are in descending ordering")
    return parser.parse_args()


if __name__ == '__main__':
    args = parse_args()

    with open(args.file, 'r') as f:
        data = load(f)

    in_order = operator.gt if args.desc else operator.lt

    reservations = []
    error = False
    for i, item in enumerate(data, 1):
        if len(item) != 3:
            print(f"Invalid record number {i}: {item}", file=sys.stderr)
            error = True
        else:
            first, last, username = item
            x = (first, last)
            ok = True
            order = True
            for (num, y, name) in reservations: