Esempio n. 1
0
def validate_data(request, attrs=None, validators=None):

    logger.debug('attrs: %s', attrs)
    logger.debug('body: %s', request.body)

    data = json.loads(request.body)
    data_to_validate = {
        k: v
        for k, v in data.items() if k in (attrs or {}).keys()
    }
    logger.debug('data_to_validate: %s', data_to_validate)

    # missing field
    if attrs and not data_to_validate:
        raise Http400

    # invalid field
    if data_to_validate:
        try:
            attr.make_class("C", attrs)(**data_to_validate)
        except (TypeError, ValueError):
            raise Http400

    # custom validation
    for validate_func in (validators or []):
        validate_func(request)

    return data
Esempio n. 2
0
def test_field_set_can_be_used_to_declare_multiple_fields_in_one_attribute():
    AuthorRecord = attr.make_class("AuthorRecord", ["name"])
    BookRecord = attr.make_class("BookRecord", ["title"])

    class Author(StaticDataObjectType):
        __records__ = [AuthorRecord("PG Wodehouse")]

        name = field(type=String)

    class Book(StaticDataObjectType):
        __records__ = [BookRecord("Leave it to Psmith")]

        title = field(type=String)

    class Root(RootType):
        fields = field_set(
            author=single(lambda: StaticDataObjectType.select(Author)),
            book=single(lambda: StaticDataObjectType.select(Book)),
        )

    result = executor(Root)("{ author { name } book { title } }")
    assert_that(
        result,
        is_successful_result(
            data={
                "author": {
                    "name": "PG Wodehouse"
                },
                "book": {
                    "title": "Leave it to Psmith"
                },
            }))
Esempio n. 3
0
def main():
    p = Person(name="kute", salary=1000, age=18)
    print(p)
    print(p.tall)
    print(attr.asdict(p))  # 字典转换
    print(attr.astuple(p))  # 元组
    print(attr.fields(Person))

    s = Square(5)
    print(s, s.y)

    personlist = PersonList(persons=[
        Person("kute", 1000, 18),
        Person("bai", 2000, 20),
        Person("yalong", 3000, 22)
    ],
                            id="10086",
                            num=76)
    result = attr.asdict(personlist, filter=filter_pro)  # 过滤方式1
    print(result)
    # 过滤 id 和 num成员,以及 值类型为 str 的值
    result = attr.asdict(personlist,
                         filter=attr.filters.exclude(
                             attr.fields(PersonList).id, str,
                             attr.fields(PersonList).num))
    print(result)

    # 创建一个类
    C = attr.make_class("C", ["x", "y"])
    B = attr.make_class("B", {"x": attr.ib(), "y": attr.ib()})
    c = C(x=1, y=2)
    b = B(2, 3)
    print(c, b)
Esempio n. 4
0
def validate_data(request, attrs=None, validators=None):

    logger.debug('attrs: %s', attrs)
    logger.debug('body: %s', request.body)

    data = json.loads(request.body)
    data_to_validate = {k: v
                        for k, v in data.items()
                        if k in (attrs or {}).keys()}
    logger.debug('data_to_validate: %s', data_to_validate)

    # missing field
    if attrs and not data_to_validate:
        raise Http400

    # invalid field
    if data_to_validate:
        try:
            attr.make_class("C", attrs)(**data_to_validate)
        except (TypeError, ValueError):
            raise Http400

    # custom validation
    for validate_func in (validators or []):
        validate_func(request)

    return data
Esempio n. 5
0
def test_internal_fields_cannot_be_queried_directly():
    AuthorRecord = attr.make_class("AuthorRecord", ["id", "name"])
    BookRecord = attr.make_class("BookRecord", ["author_id", "title"])

    class Author(StaticDataObjectType):
        __records__ = [
            AuthorRecord("PGW", "PG Wodehouse"),
            AuthorRecord("JH", "Joseph Heller"),
        ]

        id = field(type=String)
        name = field(type=String)

    class Book(StaticDataObjectType):
        __records__ = [
            BookRecord("PGW", "Leave it to Psmith"),
            BookRecord("PGW", "The Code of the Woosters"),
        ]

        author_id = field(type=String, internal=True)
        author = single(lambda: StaticDataObjectType.select(
            Author,
            join={Book.author_id: Author.id},
        ))
        title = field(type=String)

    class Root(RootType):
        books = many(lambda: StaticDataObjectType.select(Book))

    execute = executor(Root)
    assert_that(
        execute("{ books { title authorId } }"),
        is_invalid_result(errors=contains_inanyorder(
            has_string(starts_with('Cannot query field "authorId"')), )),
    )
    # Check that internal fields can still be used for joining
    assert_that(
        execute("{ books { title author { name } } }"),
        is_successful_result(
            data={
                "books": [
                    {
                        "title": "Leave it to Psmith",
                        "author": {
                            "name": "PG Wodehouse"
                        }
                    },
                    {
                        "title": "The Code of the Woosters",
                        "author": {
                            "name": "PG Wodehouse"
                        }
                    },
                ],
            }),
    )
Esempio n. 6
0
 def test_create_dockerfile(self):
     cls = attr.make_class(
         'MockRecipes',
         ['install_nodejs', 'install_rust', 'yum_install', 'pip3_install'])
     subrecipes = cls(None, None, None, None)
     cls = attr.make_class('Mock', ['from_', 'recipes', 'workdir'])
     obj = cls('baseImage', subrecipes, None)
     lines = phase3.create_dockerfile(obj)
     self.assertEqual(lines, 'FROM baseImage')
     obj.workdir = '/test'
     lines = phase3.create_dockerfile(obj)
     self.assertEqual(lines, 'FROM baseImage\nWORKDIR /test')
Esempio n. 7
0
 def test_create_dockerfile_mock(self, pip3, nodejs, rust, yum):
     pip3.return_value = 'pip3'
     nodejs.return_value = ['nodejs']
     rust.return_value = ['rust']
     yum.return_value = 'yum'
     cls = attr.make_class(
         'MockRecipes',
         ['install_nodejs', 'install_rust', 'yum_install', 'pip3_install'])
     subrecipes = cls('a', 'b', 'c', 'd')
     cls = attr.make_class('Mock', ['from_', 'recipes', 'workdir'])
     obj = cls('baseImage', subrecipes, None)
     text = phase3.create_dockerfile(obj)
     self.assertEqual(text, 'FROM baseImage\nyum\nrust\nnodejs\npip3')
Esempio n. 8
0
def make_klass(spec):
    """
    Create a data class given a spec.

    Parameters
    ----------
    spec :
        TODO

    """
    if spec is None:
        return None
    fields = spec.fields
    if fields:
        newfields = dict()
        for item in fields:
            if len(item) == 2:
                if isinstance(item[1], attr._make._CountingAttr):
                    newfields[item[0]] = item[1]
                else:
                    newfields[item[0]] = attr.ib(type=item[1])
            else:
                if isinstance(item[2], attr._make._CountingAttr):
                    raise ValueError("Three part should not have attr")
                    # newfields[item[0]] = item[2]
                else:
                    newfields[item[0]] = attr.ib(item[2], type=item[1])
        fields = newfields
    return attr.make_class(spec.name, fields, bases=spec.bases, kw_only=True)
Esempio n. 9
0
    def _get_mock_data():

        tmpdir = tmp_path_factory.mktemp(tools.get_output_name())

        create_data_dir(tmpdir)
        tmpdirs.append(tmpdir)

        outdir = get_outdir(tmpdir)
        data = tools.get_command_info(outdir)
        cmd_exe_vals = ["user", "host", "cmd", "workdir"]
        for k, v in data.items():
            if "/" in v:
                data[k] = Path(v)

        data["create_sample_output"] = False
        data["save_sample_output"] = False

        for k in cmd_exe_vals:
            data.pop(k, None)

        DataClass = attr.make_class(data["test_name"] + "_data",
                                    [k for k in data.keys()],
                                    slots=True)
        data = DataClass(*[v for v in data.values()])
        return tmpdir, data
Esempio n. 10
0
def test_structure_prefers_attrib_converters(converter_type):
    attrib_converter = Mock()
    attrib_converter.side_effect = lambda val: str(val)

    converter = converter_type(prefer_attrib_converters=True)
    cl = make_class(
        "HasConverter",
        {
            # non-built-in type with custom converter
            "ip":
            attrib(type=Union[IPv4Address, IPv6Address], converter=ip_address),
            # attribute without type
            "x":
            attrib(converter=attrib_converter),
            # built-in types converters
            "y":
            attrib(type=int, converter=attrib_converter),
            # attribute with type and default value
            "z":
            attrib(type=int, converter=attrib_converter, default=5),
        },
    )

    inst = converter.structure(dict(ip="10.0.0.0", x=1, y=3), cl)

    assert inst.ip == IPv4Address("10.0.0.0")

    attrib_converter.assert_any_call(1)
    assert inst.x == "1"

    attrib_converter.assert_any_call(3)
    assert inst.y == "3"

    attrib_converter.assert_any_call(5)
    assert inst.z == "5"
Esempio n. 11
0
def test_structure_fallback_to_attrib_converters(converter_type):
    attrib_converter = Mock()
    attrib_converter.side_effect = lambda val: str(val)

    def called_after_default_converter(val):
        if not isinstance(val, int):
            raise ValueError(
                "The 'int' conversion should have happened first by the built-in hooks"
            )
        return 42

    converter = converter_type()
    cl = make_class(
        "HasConverter",
        {
            # non-built-in type with custom converter
            "ip":
            attrib(type=Union[IPv4Address, IPv6Address], converter=ip_address),
            # attribute without type
            "x":
            attrib(converter=attrib_converter),
            # built-in types converters
            "z":
            attrib(type=int, converter=called_after_default_converter),
        },
    )

    inst = converter.structure(dict(ip="10.0.0.0", x=1, z="3"), cl)

    assert inst.ip == IPv4Address("10.0.0.0")
    assert inst.x == "1"
    attrib_converter.assert_any_call(1)
    assert inst.z == 42
Esempio n. 12
0
 def test_run(self):
     cls = attr.make_class('MockSet', ['a'])
     recipe = cls(types.Value('b'))
     steps = [types.Step('set', recipe, None)]
     ctx = phase3.Context()
     phase3.run(steps, ctx)
     self.assertEqual(ctx.variables, {'a': 'b'})
Esempio n. 13
0
def _get_plugin_reqs(plugin, injectables=None):
    if injectables is None:  # allow overriding for testability
        injectables = get_injectables()
        injectables.update(**plugin.overrides or {})
    built = typesnap.lazy_snap(injectables, plugin.requires or (),
                               plugin.maybe_requires or ())
    return attr.make_class('Requirements', sorted(built))(**built)
Esempio n. 14
0
def _process_class(cls, kw_only: bool, make_init: bool, dynamic: bool):
    """Process a given class

    Args:
        cls: basic class definition
        kw_only: set kwarg only
        make_init: make an init function
        dynamic: allows inherited classes to not be @spock decorated

    Returns:
        cls with attrs dunder methods added

    """
    # Handles the MRO and gets old annotations
    bases, attrs_dict, merged_annotations = _base_attr(cls, kw_only, make_init,
                                                       dynamic)
    # Dynamically make an attr class
    obj = attr.make_class(
        name=cls.__name__,
        bases=bases,
        attrs=attrs_dict,
        kw_only=kw_only,
        frozen=True,
        auto_attribs=True,
        init=make_init,
    )
    # For each class we dynamically create we need to register it within the system modules for pickle to work
    setattr(sys.modules["spock"].addons.tune.config, obj.__name__, obj)
    # Swap the __doc__ string from cls to obj
    obj.__doc__ = cls.__doc__
    # Set the __init__ function
    # Handle __annotations__ from the MRO
    obj.__annotations__ = merged_annotations
    return obj
Esempio n. 15
0
 def load_object(self, schema, val, path):
     if isinstance(val, types.Step):
         return self.load_step(schema, val, path)
     else:
         if not isinstance(val, collections.abc.Mapping):
             raise TypeMismatch(path)
         temp_obj = {}
         if schema.wildcard_key():
             for key in val:
                 subpath = path + [key]
                 temp_obj[key] = self.load_one(schema.fields[Key('*')],
                                               val[key], subpath)
         else:
             for key in val:
                 if Key(key) not in schema.fields:
                     raise InvalidKey(path, key)
                 subpath = path + [key]
                 temp_obj[key] = self.load_one(schema.fields[Key(key)],
                                               val[key], subpath)
         for key in schema.fields:
             if key.name not in temp_obj and key.name != '*':
                 temp_obj[key.name] = types.Value(None)
             if key.is_required:
                 if key.name not in val:
                     raise MissingKey(path)
         temp_obj = make_keys_safe(temp_obj)
         cls = attr.make_class('SchemaClass', list(temp_obj.keys()))
         return cls(**temp_obj)
Esempio n. 16
0
def _create_hyp_class(attrs_and_strategy, frozen=None):
    """
    A helper function for Hypothesis to generate attrs classes.

    The result is a tuple: an attrs class, and a tuple of values to
    instantiate it.
    """

    def key(t):
        return t[0].default is not NOTHING

    attrs_and_strat = sorted(attrs_and_strategy, key=key)
    attrs = [a[0] for a in attrs_and_strat]
    for i, a in enumerate(attrs):
        a.counter = i
    vals = tuple((a[1]) for a in attrs_and_strat)
    return st.tuples(
        st.builds(
            lambda f: make_class(
                "HypClass", OrderedDict(zip(gen_attr_names(), attrs)), frozen=f
            ),
            st.booleans() if frozen is None else st.just(frozen),
        ),
        st.tuples(*vals),
    )
Esempio n. 17
0
def test_arg_refiner_can_take_context():
    AuthorRecord = attr.make_class("AuthorRecord", ["name"])

    class Author(StaticDataObjectType):
        __records__ = [
            AuthorRecord("PG Wodehouse"),
            AuthorRecord("Joseph Heller"),
        ]

        name = field(type=String)

    class Root(RootType):
        author = single(lambda: StaticDataObjectType.select(Author))

        @author.arg("nameStartsWith", Boolean)
        def author_arg_starts_with(records, _, context):
            return list(
                filter(
                    lambda record: record.name.startswith(context),
                    records,
                ))

    result = executor(Root)(
        """{ author(nameStartsWith: true) { name } }""",
        context="P",
    )
    assert_that(
        result,
        is_successful_result(data={
            "author": {
                "name": "PG Wodehouse"
            },
        }))
Esempio n. 18
0
def entity(cls):
    for k, v in cls.__dict__.items():
        if hasattr(v, 'init') and isinstance(v.init, bool):
            v.init = False
    result = attr.make_class(cls.__name__, (),
                             bases=(_Signature, attr.s(cls), _Entity))
    return functools.wraps(cls, updated=())(result)
Esempio n. 19
0
def data(request, output_dir):
    """A function-scoped test fixture used for AFNI's testing. The fixture
    sets up output directories as required and provides the named tuple "data"
    to the calling function. The data object contains some fields convenient
    for writing tests like the output directory. Finally the data fixture
    handles test input data.files  listed in a data_paths dictionary (if
    defined within the test module) the fixture will download them to a local
    datalad repository as required. Paths should be listed relative to the
    repository base-directory.

    Args: request (pytest.fixture): A function level pytest request object
        providing information about the calling test function.

    Returns:
        collections.NameTuple: A data object for conveniently handling the specification
    """
    test_name = get_current_test_name()
    tests_data_dir = get_tests_data_dir()

    # Set module specific values:
    try:
        data_paths = request.module.data_paths
    except AttributeError:
        data_paths = {}

    module_outdir = output_dir / Path(request.module.__file__).stem.replace(
        "test_", "")
    test_logdir = module_outdir / get_current_test_name() / "captured_output"
    if not test_logdir.exists():
        os.makedirs(test_logdir, exist_ok=True)

    # This will be created as required later
    sampdir = tools.convert_to_sample_dir_path(test_logdir.parent)

    # start creating output dict, downloading test data as required
    out_dict = {
        k: misc.process_path_obj(v, tests_data_dir)
        for k, v in data_paths.items()
    }

    # Get the comparison directory and check if it needs to be downloaded
    comparison_dir = get_test_comparison_dir_path(module_outdir)

    # Define output for calling module and get data as required:
    out_dict.update({
        "module_outdir": module_outdir,
        "outdir": module_outdir / get_current_test_name(),
        "sampdir": sampdir,
        "logdir": test_logdir,
        "comparison_dir": comparison_dir,
        "base_comparison_dir": get_base_comparison_dir_path(),
        "base_outdir": output_dir,
        "tests_data_dir": tests_data_dir,
        "test_name": test_name,
    })

    DataClass = attr.make_class(test_name + "_data",
                                [k for k in out_dict.keys()],
                                slots=True)
    return DataClass(*[v for v in out_dict.values()])
Esempio n. 20
0
def concepticon_concepts(concept_lists):
    attrib = {}
    for cl in concept_lists:
        for col in cl.metadata.tableSchema.columns:
            if col.name not in ['ID', 'CONCEPTICON_ID', 'CONCEPTICON_GLOSS']:
                attrib[col.name] = attr.ib(default=None)
    return attr.make_class("ConcepticonConcept", attrib, bases=(Concept,))
Esempio n. 21
0
def test_can_define_input_object_types():
    AuthorRecord = attr.make_class("AuthorRecord", ["name"])

    class AuthorSelection(InputObjectType):
        name_starts_with = field(type=String)

    class Author(StaticDataObjectType):
        __records__ = [
            AuthorRecord("PG Wodehouse"),
            AuthorRecord("Joseph Heller"),
        ]

        name = field(type=String)

    class Root(RootType):
        author = single(lambda: StaticDataObjectType.select(Author))

        @author.arg("selection", AuthorSelection)
        def author_arg_selection(records, selection):
            return list(
                filter(
                    lambda record: record.name.startswith(selection.
                                                          name_starts_with),
                    records,
                ))

    result = executor(Root)(
        """{ author(selection: {nameStartsWith: "P"}) { name } }""")
    assert_that(
        result,
        is_successful_result(data={
            "author": {
                "name": "PG Wodehouse"
            },
        }))
Esempio n. 22
0
def test_structure_simple_from_dict_default(converter, cl_and_vals):
    """Test structuring non-nested attrs classes with default value."""
    a, _ = cl_and_vals
    cl = make_class("HypClass", {"a": a})
    obj = cl()
    loaded = converter.structure({}, cl)
    assert obj == loaded
Esempio n. 23
0
    def __attrs_post_init__(self):
        """ Build the constructor that can create feature structures of this type """
        name = _string_to_valid_classname(self.name)
        fields = {feature.name: attr.ib(default=None) for feature in self.all_features}
        fields["type"] = attr.ib(default=self.name)

        self._constructor = attr.make_class(name, fields, bases=(FeatureStructure,), slots=True, cmp=False)
Esempio n. 24
0
def test_arg_method_can_be_used_as_decorator_to_refine_query():
    AuthorRecord = attr.make_class("AuthorRecord", ["name"])

    class Author(StaticDataObjectType):
        __records__ = [
            AuthorRecord("PG Wodehouse"),
            AuthorRecord("Joseph Heller"),
        ]

        name = field(type=String)

    class Root(RootType):
        author = single(lambda: StaticDataObjectType.select(Author))

        @author.arg("nameStartsWith", String)
        def author_arg_starts_with(records, prefix):
            return list(
                filter(
                    lambda record: record.name.startswith(prefix),
                    records,
                ))

    result = executor(Root)("""{ author(nameStartsWith: "P") { name } }""")
    assert_that(
        result,
        is_successful_result(data={
            "author": {
                "name": "PG Wodehouse"
            },
        }))
Esempio n. 25
0
def test_can_implement_graphql_core_interfaces():
    HasName = GraphQLInterfaceType("HasName",
                                   fields={
                                       "name": GraphQLField(GraphQLString),
                                   },
                                   resolve_type=lambda: None)

    AuthorRecord = attr.make_class("AuthorRecord", ["name"])

    class Author(StaticDataObjectType):
        __interfaces__ = [HasName]

        __records__ = [AuthorRecord("PG Wodehouse")]

        name = field(type=String)

    class Root(RootType):
        author = single(lambda: StaticDataObjectType.select(Author))

    result = executor(Root)("""{
        author {
            ...on HasName {
                name
            }
        }
    }""")
    assert_that(
        result,
        is_successful_result(data={
            "author": {
                "name": "PG Wodehouse"
            },
        }))
Esempio n. 26
0
def test_interfaces_can_be_declared_using_function():
    AuthorRecord = attr.make_class("AuthorRecord", ["name"])

    class Author(StaticDataObjectType):
        __interfaces__ = lambda: [HasName]

        __records__ = [AuthorRecord("PG Wodehouse")]

        name = field(type=String)

    class HasName(InterfaceType):
        name = field(type=String)

    class Root(RootType):
        author = single(lambda: StaticDataObjectType.select(Author))

    result = executor(Root)("""{
        author {
            ...on HasName {
                name
            }
        }
    }""")
    assert_that(
        result,
        is_successful_result(data={
            "author": {
                "name": "PG Wodehouse"
            },
        }))
Esempio n. 27
0
    def __init__(
        self,
        name: str,
        desc: str,
        labels: Sequence[str],
        sub_metrics: Sequence[str],
    ):
        self.name = name
        self.desc = desc
        self.labels = labels
        self.sub_metrics = sub_metrics

        # Create a class which have the sub_metrics values as attributes, which
        # default to 0 on initialization. Used to pass to registered callbacks.
        self._metrics_class: Type[MetricsEntry] = attr.make_class(
            "_MetricsEntry",
            attrs={x: attr.ib(default=0) for x in sub_metrics},
            slots=True,
        )

        # Counts number of in flight blocks for a given set of label values
        self._registrations: Dict[
            Tuple[str, ...], Set[Callable[[MetricsEntry], None]]
        ] = {}

        # Protects access to _registrations
        self._lock = threading.Lock()

        self._register_with_collector()
Esempio n. 28
0
def create_parameter_group(
        config_dict_section: Union[dict, DictConfig]) -> ParameterGroupTypeVar:
    """
    Creates a parameter group object out of a config_dict_section, which is a dictionary or DictConfig representing a
    parameter group. This method should only be used for simple groups, i.e. parameter groups not containing any other
    parameter groups. For nested groups, the function 'create_nested_parameter_group' should be used instead.

    :param config_dict_section: Dictionary representation of the parameter group to construct
    :return: ParameterGroup or ConfigurableParameters object constructed according to config_dict_section
    """
    params_and_values = gather_parameter_arguments_and_values_from_dict(
        config_dict_section)
    make_arguments = params_and_values["make_arguments"]
    call_arguments = params_and_values["call_arguments"]
    all_parameter_values = params_and_values["values"]
    group_type = str(config_dict_section.pop(metadata_keys.TYPE))

    group_constructor_type = GroupElementMapping[group_type].value
    group_constructor = attr.make_class(
        GroupElementMapping[group_type].name,
        bases=(group_constructor_type, ),
        attrs=make_arguments,
        eq=False,
        order=False,
    )

    parameter_group = group_constructor(**call_arguments)

    for parameter, value in all_parameter_values.items():
        if value is not None:
            setattr(parameter_group, parameter, value)

    return parameter_group
Esempio n. 29
0
    def test_programmatic(self, slots, frozen):
        """
        `attr.make_class` works.
        """
        PC = attr.make_class("PC", ["a", "b"], slots=slots, frozen=frozen)

        assert (
            Attribute(
                name="a",
                default=NOTHING,
                validator=None,
                repr=True,
                cmp=None,
                eq=True,
                order=True,
                hash=None,
                init=True,
            ),
            Attribute(
                name="b",
                default=NOTHING,
                validator=None,
                repr=True,
                cmp=None,
                eq=True,
                order=True,
                hash=None,
                init=True,
            ),
        ) == attr.fields(PC)
Esempio n. 30
0
 def __init__(self, name):
     self.name = name
     self.items = dict()
     self.items["displayId"] = name
     default_sha = "e685cbba9aab1683a4a504582b4e30af36cdfddb"
     self.commit = attr.make_class("Commit",
                                   {"sha": attr.ib(default=default_sha)})()
Esempio n. 31
0
    def grammar(self):
        ret = dict()

        @attr.s
        class GrammarStub(object):
            name = attr.ib()
            grammar = attr.ib()

            @classmethod
            def make(cls, name, *a):
                if isinstance(a[0], str):
                    ret[name] = re.compile(a[0])
                    return ret[name]
                else:
                    self = cls(name, a)
                    ret[name] = self
                    return self

        make = GrammarStub.make

        lineSep = make("lineSep", b'\r')
        lineSepLax = make("lineSepLax", b'\n')
        segmentName = make("segmentName", '[a-zA-Z0-9_]+')
        component = make("component", '.*?(?=[' + self.fieldSep + '])')  # lookahead whee
        Field = make("Field", component, pp_maybe_some(self.componentSep, component))
        Segment = make("Segment", pp_attr('name', segmentName), pp_maybe_some(self.fieldSep, Field), lineSep)  # fixme use ignore() on separators
        MSH = make("MSH", pp_attr('name', MSH_SEGMENT), pp_maybe_some(self.fieldSep, Field), lineSep)
        ## InterfaceMessage = make("InterfaceMessage", START_BLOCK, MSH, lineSep, pp_maybe_some(Segment), END_BLOCK)
        ## InterfaceMessageLax = make("InterfaceMessageLax", MSH, lineSepLax, pp_maybe_some(Segment))

        return attr.make_class(b"Grammar", ret.keys())(**ret)
Esempio n. 32
0
    def test_make_class_ordered(self):
        """
        If `make_class()` is passed ordered attrs, their order is respected
        instead of the counter.
        """
        b = attr.ib(default=2)
        a = attr.ib(default=1)

        C = attr.make_class("C", ordered_dict([("a", a), ("b", b)]))

        assert "C(a=1, b=2)" == repr(C())
Esempio n. 33
0
 def test_programmatic(self):
     """
     `attr.make_class` works.
     """
     PC = attr.make_class("PC", ["a", "b"])
     assert (
         Attribute(name="a", default=NOTHING, validator=None,
                   repr=True, cmp=True, hash=True, init=True),
         Attribute(name="b", default=NOTHING, validator=None,
                   repr=True, cmp=True, hash=True, init=True),
     ) == attr.fields(PC)
 def _convert(cls, result_cls, data):
     try:
         return result_cls(**data)
     except TypeError:
         try:
             fields = [f.name for f in attr.fields(result_cls)]
             return result_cls(**{
                 k: v
                 for k, v in data.items() if k in fields
             })
         except TypeError:
             return attr.make_class("ApiResponse", list(data.keys()))(**data)
Esempio n. 35
0
def make_attrs_class(typename, d):

    import attr

    vals = {}
    for k, v in d.items():
        if v[2] == float:
            vals[k] = attr.ib(
                default=v[0], validator=attr.validators.instance_of(v[2]))
        else:
            vals[k] = attr.ib(default=v[0])
    C = attr.make_class(typename, vals)
    return C()
Esempio n. 36
0
    def __init__(self, name, desc, labels, sub_metrics):
        self.name = name
        self.desc = desc
        self.labels = labels
        self.sub_metrics = sub_metrics

        # Create a class which have the sub_metrics values as attributes, which
        # default to 0 on initialization. Used to pass to registered callbacks.
        self._metrics_class = attr.make_class(
            "_MetricsEntry",
            attrs={x: attr.ib(0) for x in sub_metrics},
            slots=True,
        )

        # Counts number of in flight blocks for a given set of label values
        self._registrations = {}

        # Protects access to _registrations
        self._lock = threading.Lock()

        self._register_with_collector()
Esempio n. 37
0
from . import load_tf_lib
from .cube_dim_transcoder import CubeDimensionTranscoder
from .staging_area_wrapper import create_staging_area_wrapper
from .sources import (SourceContext, DefaultsSourceProvider)
from .sinks import (SinkContext, NullSinkProvider)
from .start_context import StartContext
from .stop_context import StopContext
from .init_context import InitialisationContext

ONE_KB, ONE_MB, ONE_GB = 1024, 1024**2, 1024**3

QUEUE_SIZE = 10

rime = load_tf_lib()

DataSource = attr.make_class("DataSource", ['source', 'dtype', 'name'],
    slots=True, frozen=True)
DataSink = attr.make_class("DataSink", ['sink', 'name'],
    slots=True, frozen=True)
FeedOnce = attr.make_class("FeedOnce", ['ph', 'var', 'assign_op'],
    slots=True, frozen=True)

class RimeSolver(MontblancTensorflowSolver):
    """ RIME Solver Implementation """

    def __init__(self, slvr_cfg):
        """
        RimeSolver Constructor

        Parameters:
            slvr_cfg : SolverConfiguration
                Solver Configuration variables
Esempio n. 38
0
            self.a = a
            self.b = b
            self.c = c
            super(super_dumb_class, self).__init__(a=a, b=b, c=c)
    return super_dumb_class
super_dumb_class = make_super_dumb_class()

class dumb_class(object):
    def __init__(self, a, b, c="abc"):
        self.a = a
        self.b = b
        self.c = c

namedtuple_class = namedtuple("namedtuple_class", ["a", "b", "c"])

attrs_class = make_class("attrs_class", ["a", "b", "c"])


def test_characteristic(benchmark):
    assert benchmark(partial(characteristic_class, a=1, b=2, c=1))


def test_fields(benchmark):
    assert benchmark(partial(fields_class, a=1, b=2, c=1))


def test_fields_nosuper(benchmark):
    assert benchmark(partial(fields_nosuper_class, a=1, b=2, c=1))


def test_slots_fields(benchmark):
Esempio n. 39
0
    a_number = attr.ib(default=42)
    list_of_numbers = attr.ib(default=attr.Factory(list))

    def hard_math(self, another_number):
        return self.a_number + sum(self.list_of_numbers) * another_number

sc = SomeClass(1, [1, 2, 3])
# sc

SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
sc.hard_math(3)
# 19

sc == SomeClass(1, [1, 2, 3])
# True

sc != SomeClass(2, [3, 2, 1])
# True

attr.asdict(sc)
# {'a_number': 1, 'list_of_numbers': [1, 2, 3]}

SomeClass()
SomeClass(a_number=42, list_of_numbers=[])

C = attr.make_class("C", ["a", "b"])
C("foo", "bar")
# C(a='foo', b='bar')

print("Finished...")
Esempio n. 40
0
    pass


@attr.s
@six.add_metaclass(Meta)
class WithMeta(object):
    pass


@attr.s(slots=True)
@six.add_metaclass(Meta)
class WithMetaSlots(object):
    pass


FromMakeClass = attr.make_class("FromMakeClass", ["x"])


class TestDarkMagic(object):
    """
    Integration tests.
    """
    @pytest.mark.parametrize("cls", [C2, C2Slots])
    def test_fields(self, cls):
        """
        `attr.fields` works.
        """
        assert (
            Attribute(name="x", default=foo, validator=None,
                      repr=True, cmp=True, hash=None, init=True),
            Attribute(name="y", default=attr.Factory(list), validator=None,