def as_type(self, objector_obj): self._serde = objector_obj objector = entity.Entity(entity.Entity.objector, objector_obj) self.set_objector(objector) self.append_debug_info("\nserde: " + str(objector_obj)) return self
def as_type(self, objector_obj): from bigflow import serde assert isinstance(objector_obj, serde.Serde), type(objector_obj) self._serde = objector_obj objector = entity.Entity(entity.Entity.objector, objector_obj) self.set_objector(objector) self.append_debug_info("\nserde: " + str(objector_obj)) return self
def test_processor_entities(self): from bigflow.core import entity_names acc = entity.Entity( "doodle", entity.AccumulateProcessor(entity.PyFn(lambda: 0), entity.CartesianFn())) msg = acc.to_proto_message() self.assertEqual(entity_names.__dict__["AccumulateProcessor"], msg.name) config = processor_pb2.PbPythonProcessorConfig() config.ParseFromString(msg.config) fn_names = [] fn_config = [] for config in config.functor: fn_names.append(config.name) fn_config.append(config.config) expect = ["PythonImplFunctor", "CartesianFn"] expect = map(lambda name: entity_names.__dict__[name], expect) self.assertEqual(expect, fn_names) self.assertEqual(False, cPickle.loads(fn_config[0])["expect_iterable"]) self.assertEqual("", fn_config[1]) flat_map = entity.Entity( 'doodle', entity.FlatMapProcessor(entity.PyFn(lambda: 0))) msg = flat_map.to_proto_message() self.assertEqual(entity_names.__dict__["FlatMapProcessor"], msg.name) config = processor_pb2.PbPythonProcessorConfig() config.ParseFromString(msg.config) fns = [] for config in config.functor: fns.append(config) self.assertEqual(1, len(fns)) fn = fns[0] self.assertEqual(entity_names.__dict__["PythonImplFunctor"], fn.name) fn_config = cPickle.loads(fn.config) self.assertEqual(True, fn_config["expect_iterable"])
def by(self, sinker_obj): sinker = entity.Entity(entity.Entity.sinker, sinker_obj) self.set_sinker(sinker) return self