Пример #1
0
def categorize_top_level_objects(data: List, context: ParseContext):
    """Look at all of the top-level declarations and categorize them"""
    top_level_collections: Dict = {
        "option": [],
        "include_file": [],
        "macro": [],
        "plugin": [],
        "statement": [],  # objects with side-effects
    }
    assert isinstance(data, list)
    for obj in data:
        if not isinstance(obj, dict):
            raise exc.DataGenSyntaxError(
                f"Top level elements in a data generation template should all be dictionaries, not {obj}",
                **context.line_num(data),
            )
        obj_category = None
        for declaration, category in collection_rules.items():
            typ = obj.get(declaration)
            if typ:
                if obj_category:
                    raise exc.DataGenError(
                        f"Top level element seems to match two name patterns: {declaration, obj_category}",
                        **context.line_num(obj),
                    )
                obj_category = category
        if obj_category:
            top_level_collections[obj_category].append(obj)
        else:
            raise exc.DataGenError(f"Unknown object type {obj}",
                                   **context.line_num(obj))
    return top_level_collections
def categorize_top_level_objects(data: List, context: ParseContext):
    """Look at all of the top-level declarations and categorize them"""
    top_level_collections: Dict = {
        "option": [],
        "include_file": [],
        "macro": [],
        "plugin": [],
        "object": [],
    }
    assert isinstance(data, list)
    for obj in data:
        if not isinstance(obj, dict):
            raise exc.DataGenSyntaxError(
                f"Top level elements in a data generation template should all be dictionaries, not {obj}",
                **context.line_num(data),
            )
        parent_collection = None
        for collection in top_level_collections:
            if obj.get(collection):
                if parent_collection:
                    raise exc.DataGenError(
                        f"Top level element seems to match two name patterns: {collection, parent_collection}",
                        **context.line_num(obj),
                    )
                parent_collection = collection
        if parent_collection:
            top_level_collections[parent_collection].append(obj)
        else:
            raise exc.DataGenError(
                f"Unknown object type {obj}", **context.line_num(obj)
            )
    return top_level_collections
Пример #3
0
def get_output_stream_class(output_format):
    from snowfakery.plugins import resolve_plugin_alternatives, plugin_path

    if "." in output_format:
        output_stream_classname = output_format
    else:
        output_format = output_format.lower() if output_format else None
        output_stream_classname = OUTPUT_FORMATS.get(output_format)
    if not output_stream_classname:
        raise exc.DataGenError(
            f"Unknown format or file extension: {output_format}")
    with plugin_path([]):
        rc = resolve_plugin_alternatives(output_stream_classname)
    if not rc:
        raise exc.DataGenError(f"Cannot load {output_stream_classname}")
    return rc
Пример #4
0
    def _parse_special_args(self, args):
        """Parse args of SpecialObject"""
        nickname = None
        if isinstance(args, str):
            sobj = args
        elif isinstance(args, dict):
            sobj = args["name"]
            if not isinstance(sobj, str):
                raise exc.DataGenError(
                    f"`name` argument should be a string {sobj}: {type(sobj)}")
            nickname = args.get("nickname")
            if nickname and not isinstance(nickname, str):
                raise exc.DataGenError(
                    f"`nickname` argument should be a string {nickname}: {type(sobj)}"
                )

        return sobj, nickname
def parse_element(
    dct: Dict,
    element_type: str,
    mandatory_keys: Dict,
    optional_keys: Dict,
    context: ParseContext,
) -> Any:
    expected_keys = {
        **mandatory_keys,
        **optional_keys,
        "__line__": LineTracker,
        element_type: str,
    }
    rc_obj: Any = DictValuesAsAttrs()
    rc_obj.line_num = dct["__line__"]
    with context.change_current_parent_object(dct):
        for key in dct:
            key_definition = expected_keys.get(key)
            if not key_definition:
                raise exc.DataGenError(
                    f"Unexpected key: {key}", **context.line_num(key)
                )
            else:
                value = dct[key]
                if not isinstance(value, key_definition):
                    raise exc.DataGenError(
                        f"Expected `{key}` to be of type {key_definition} instead of {type(value)}.",
                        **context.line_num(dct),
                    )
                else:
                    setattr(rc_obj, key, value)

        missing_keys = set(mandatory_keys) - set(dct.keys())
        if missing_keys:
            raise exc.DataGenError(
                f"Expected to see `{missing_keys}` in `{element_type}``.",
                **context.line_num(dct),
            )
        defaulted_keys = set(optional_keys) - set(dct.keys())
        for key in defaulted_keys:
            setattr(rc_obj, key, None)

        return rc_obj
Пример #6
0
def parse_included_file(parent_path: Path, inclusion: Dict,
                        context: ParseContext) -> List[Dict]:
    relpath, linenum = relpath_from_inclusion_element(inclusion, context)
    inclusion_path = parent_path.parent / relpath
    # someday add a check that we don't go outside of the project dir
    if not inclusion_path.exists():
        raise exc.DataGenError(f"Cannot load include file {inclusion_path}",
                               **linenum._asdict())
    with inclusion_path.open() as f:
        incl_objects = parse_file(f, context)
        return incl_objects
Пример #7
0
 def SpecialObject(self, context, args) -> ObjectTemplate:
     """Currently there is only one special object defined: PersonContact"""
     sobj, nickname = self._parse_special_args(args)
     line_info = context.line_num()
     if sobj == "PersonContact":
         return self._render_person_contact(context, sobj, nickname,
                                            line_info)
     else:
         raise exc.DataGenError(
             f"Unknown special object '{sobj}'. Did you mean 'PersonContact'?",
             None,
             None,
         )
def include_macro(
    name: str, context: ParseContext, parent_macros=()
) -> Tuple[List[FieldFactory], List[TemplateLike]]:
    macro = context.macros.get(name)
    if not macro:
        raise exc.DataGenNameError(
            f"Cannot find macro named {name}", **context.line_num()
        )
    parsed_macro = parse_element(
        macro, "macro", {}, {"fields": Dict, "friends": List, "include": str}, context
    )
    fields = parsed_macro.fields or {}
    friends = parsed_macro.friends or []
    fields, friends = parse_fields(fields, context), parse_friends(friends, context)
    if name in parent_macros:
        idx = parent_macros.index(name)
        raise exc.DataGenError(
            f"Macro `{name}` calls `{'` which calls `'.join(parent_macros[idx+1:])}` which calls `{name}`",
            **context.line_num(macro),
        )
    parse_inclusions(macro, fields, friends, context, parent_macros + (name,))
    return fields, friends
Пример #9
0
def _get_output_streams(dburls, output_files, output_format, output_folder):
    with ExitStack() as onexit:
        output_streams = []  # we allow multiple output streams
        for dburl in dburls:
            output_streams.append(SqlDbOutputStream.from_url(dburl))

        if output_format and not output_files:
            output_stream_cls = get_output_stream_class(output_format)

            if output_stream_cls.is_text and not output_files:
                output_streams.append(output_stream_cls(sys.stdout))

            if output_stream_cls.uses_folder:
                output_streams.append(output_stream_cls(output_folder))

        if output_files:
            for f in output_files:
                format = output_format
                if output_folder and isinstance(f, (str, Path)):
                    f = Path(output_folder,
                             f)  # put the file in the output folder
                file_context = open_file_like(f, "w")
                path, open_file = onexit.enter_context(file_context)
                if path and not format:
                    format = Path(path).suffix[1:].lower()

                if not format:
                    raise exc.DataGenError("No format supplied or inferrable")

                output_stream_cls = get_output_stream_class(format)
                if output_stream_cls.uses_path:
                    open_file.close()
                    open_file = path
                output_streams.append(
                    output_stream_cls(open_file, format=format))

        yield output_streams