def get_calculated_remains(storage_id): remains = dumps( get_data_of_model(Remain, filters=Remain.storage == storage_id)) acts = dumps( get_data_of_model(Act, filters=(and_(Act.storage == storage_id, Act.is_active == True, Act.is_upload == False)))) acts = map(lambda act: act['id'], acts) acts_strings = dumps( get_data_of_model(ActTable, filters=ActTable.act.in_(acts))) return calculate_remains(remains, acts_strings)
def test_toml_class(self): toml_class = serializer.dumps(test_objects.MyClass, 'toml') deserialized_class = serializer.loads(toml_class, 'toml') self.assertEqual(deserialized_class.a, test_objects.MyClass.a) self.assertEqual(deserialized_class.my_func(1), test_objects.MyClass.my_func(1))
def get_table_of_acts(act_id): ''' Get data of table from act. :param act_id: ID of act. ''' return dumps(get_data_of_model(ActTable, filters=ActTable.act == act_id))
def get_acts_for_storage(count, act_type, upload_start, upload_end, start_date, end_date): ''' Get data of acts for storage. :param storage_id: ID ''' filters = [] if upload_start: filters.append(Act.upload_date >= upload_start) if upload_end: filters.append(Act.upload_date <= upload_end) if start_date: filters.append(Act.act_date >= start_date) if end_date: filters.append(Act.act_date <= end_date) return dumps( get_data_of_model(Act, limit=count, order_by=Act.id.desc(), filters=and_(*filters) if filters else None))
def get_storages_for_subdivision(): ''' Get data of storages for subdivision. :param subdivision_id: ID of subdivision. ''' return dumps(get_data_of_model(Storage))
def test_pickle_class(self): pickle_class = serializer.dumps(test1.MyClass, 'pickle') deserialized_class = serializer.loads(pickle_class, 'pickle') self.assertEqual(deserialized_class.a, test1.MyClass.a) self.assertEqual(deserialized_class.my_func(1), test1.MyClass.my_func(1))
def assert_vars(obj): for language in serializer.get_formats(): serialized = serializer.dumps(obj, language) restored = serializer.loads(serialized, language) for var in vars(obj): if var not in ("__dict__", "__weakref__", "__module__"): assert getattr(obj, var) == getattr(restored, var)
def test_yaml_class(self): yaml_class = serializer.dumps(test1.MyClass, 'yaml') deserialized_class = serializer.loads(yaml_class, 'yaml') self.assertEqual(deserialized_class.a, test1.MyClass.a) self.assertEqual(deserialized_class.my_func(1), test1.MyClass.my_func(1))
def test_json_class(self): json_class = serializer.dumps(test1.MyClass, 'json') deserialized_class = serializer.loads(json_class, 'json') self.assertEqual(deserialized_class.a, test1.MyClass.a) self.assertEqual(deserialized_class.my_func(1), test1.MyClass.my_func(1))
def test_closure_func(): subject = indent(5) result = subject(10) serialized = serializer.dumps(subject, "json") restored = serializer.loads(serialized, "json") restored_result = restored(10) assert_restored_object(subject) assert result == restored_result
def assert_slots(obj): for language in serializer.get_formats(): serialized = serializer.dumps(obj, language) restored = serializer.loads(serialized, language) for var in type(obj).__slots__: if hasattr(obj, var) and hasattr(restored, var): assert getattr(obj, var) == getattr(restored, var) else: assert hasattr(obj, var) == hasattr(restored, var)
def test_inheritance(): subject = ChildCls assert_vars(subject) serialized = serializer.dumps(subject, "json") restored = serializer.loads(serialized, "json") assert restored.__bases__[0].__name__ == subject.__bases__[0].__name__ assert restored.__bases__[1].__name__ == subject.__bases__[1].__name__ assert type(restored).__name__ == type(subject).__name__
def assert_restored_object(obj): for language in serializer.get_formats(): serialized = serializer.dumps(obj, language) restored = serializer.loads(serialized, language) attrs = type_to_attrs[type(obj)] for attr in attrs: if attr in ("__closure__", "__module__"): continue assert getattr(obj, attr) == getattr(restored, attr)
def genereate_acts(subdivision_id): ''' From active acts with type is 1 generate acts with type 0. ''' new_acts = [] new_acts_rows = [] acts = _get_active_acts(subdivision_id) session = create_session(DATABASES['main'].metadata.bind) act_rows = dumps(get_data_of_model(ActTable, filters=ActTable.act.in_(_get_act_ids(acts)))) for act in dumps(acts): new_act = _get_act_from_act_list(act, new_acts) if not new_act: new_act = _create_new_act(act) new_acts.append(new_act) for act_row in _get_act_rows(act, act_rows): new_row = ActTable(act_relation=new_act, date_of_write_off=act['date'], **_get_new_row(act_row)) new_acts_rows.append(new_row) session.add_all(new_acts) session.add_all(new_acts_rows) session.commit()
def __slave_send(self, data, ID, SRC, DST): data_bytes = serializer.dumps(data) size = len(data_bytes) size_bytes = struct.pack("<I", size) self.__mem_check(size + 4) with self.__sem1: size_ptr = self.head - 4 data_ptr = size_ptr - size self.__slave_mem_write(size_ptr, size_bytes, data_ptr, data_bytes) self.cap = self.cap - size - 4 self.mem.flush() self.__write_message('R', ID, SRC, DST)
def _render(value, system): request = system.get('request') #Getting time of rendering xml t = time() resp = dumps(value, self.adapters) t = abs(t - time()) if request is not None: response = request.response response.content_type = 'application/xml' #If the param 'xml_render.timeit' was set to 'true', adding a header 'Xml-Rendering-Time' with the number of seconds that were spent for the creating of the output xml rendering_time = system['renderer_info'].settings.get('xml_render.timeit') if rendering_time == 'true': response.headers['Xml-Rendering-Time'] = str(t) + ' seconds' return resp
def test_indent_func(): subject = indent x_result = subject(5) y_result = x_result(10) serialized = serializer.dumps(subject, "json") restored = serializer.loads(serialized, "json") restored_x_result = restored(5) restored_y_result = restored_x_result(10) assert_restored_object(subject) assert_restored_object(x_result) assert y_result == restored_y_result
def pack(self, msg, ID, src, dst, data=None): ID = struct.pack('<i', ID) src = struct.pack('<i', src) dst = struct.pack('<i', dst) msg += " " * (self.msg_len - len(msg)) msgBytes = msg.encode() # .encode() if data is None: headerLength = struct.pack( '<i', 20) #in case data is None, the header len is 20 packed = headerLength + ID + src + dst + msgBytes return packed headerLength = struct.pack( '<i', 24) #in case data is not None, the header len is 24 dataBytes = serializer.dumps(data) dataLength = struct.pack('<i', len(dataBytes)) packed = headerLength + ID + src + dst + msgBytes + dataLength + dataBytes return packed
def test_globals_func(): subject = f_globals import math globals()["math"] = math globals()["x"] = 5 result = subject() del math serialized = serializer.dumps(subject, "json") del globals()["math"] del globals()["x"] restored = serializer.loads(serialized, "json") restored_result = restored() assert_restored_object(subject) assert result == restored_result
def pcall(self, funcs, background=False): """Execute multiple calls in parallel. given 'funcs' as a dictionary of format {'key': ('function', kwargs)} return a a dictionary of format {'key': function(**kwargs)}""" ts = Taskset() for key, func in funcs.items(): arg = {'method': func[0], 'params': func[1]} arg.update(self.add_meta()) task = Task(func=self.service, arg=serializer.dumps(arg)) task.key = key task.background = background ts.add(task) self.gearman.do_taskset(ts) if not background: out = {} for task in ts.values(): res = serializer.loads(task.result) if res['error']: raise getattr(self.exception, res['error']['type'])(res['error']['message'], res['error'].get('traceback')) out[task.key] = res['result'] return out
def dumps_base_path(self, object, base_path): return serializer.dumps(object, base_path)
def test_yaml_string(self): yaml_str = serializer.dumps(test1.string, 'yaml') des_str = serializer.loads(yaml_str, 'yaml') self.assertEqual(test1.string, des_str)
def test_yaml_func_with_defaults(self): yaml_func = serializer.dumps(test1.func_with_defaults, 'yaml') des_func = serializer.loads(yaml_func, 'yaml') self.assertEqual(test1.func_with_defaults(), des_func())
def test_yaml_lambda(self): yaml_lmbd = serializer.dumps(test1.lmbd, 'yaml') des_lmbd = serializer.loads(yaml_lmbd, 'yaml') self.assertEqual(test1.lmbd(1), des_lmbd(1))
def test_yaml_func(self): yaml_func = serializer.dumps(test1.func, 'yaml') des_func = serializer.loads(yaml_func, 'yaml') self.assertEqual(test1.func(2), des_func(2))
def test_yaml_dict(self): yaml_dict = serializer.dumps(test1.dict, 'yaml') des_dict = serializer.loads(yaml_dict, 'yaml') self.assertEqual(test1.dict, des_dict)
def test_yaml_list(self): yaml_list = serializer.dumps(test1.list, 'yaml') des_list = serializer.loads(yaml_list, 'yaml') self.assertEqual(test1.list, des_list)
def assert_restored_object(subject): for language in serializer.get_formats(): serialized = serializer.dumps(subject, language) restored = serializer.loads(serialized, language) assert restored == subject
def test_toml_string(self): toml_str = serializer.dumps(test_objects.string, 'toml') des_str = serializer.loads(toml_str, 'toml') self.assertEqual(test_objects.string, des_str)
def test_toml_func_with_defaults(self): toml_func = serializer.dumps(test_objects.func_with_defaults, 'toml') des_func = serializer.loads(toml_func, 'toml') self.assertEqual(test_objects.func_with_defaults(), des_func())
def test_toml_lambda(self): toml_lmbd = serializer.dumps(test_objects.lmbd, 'toml') des_lmbd = serializer.loads(toml_lmbd, 'toml') self.assertEqual(test_objects.lmbd(1), des_lmbd(1))
def test_toml_func(self): toml_func = serializer.dumps(test_objects.func, 'toml') des_func = serializer.loads(toml_func, 'toml') self.assertEqual(test_objects.func(2), des_func(2))
def dumps(self, object): return serializer.dumps(object)
def test_toml_dict(self): toml_dict = serializer.dumps(test_objects.dict, 'toml') des_dict = serializer.loads(toml_dict, 'toml') self.assertEqual(test_objects.dict, des_dict)