def post(self, request): user_defined_triangles = request.data.get("output") input_json = request.data.get("input") dh = DataHolder.decode(input_json) #dh, group_ids, sheet_names = RowParser.set_card_ids(user_defined_triangles, dh) change = request.data.get("change") filename = request.data.get("filename") # Update connection with the change variable RowParser.make_changes(dh, user_defined_triangles, change) user_defined_triangles = RowParser.parse_output_from_triangle_forms( user_defined_triangles, dh) SheetWriter.trngs_to_existing_excel( user_defined_triangles, pdir.TEMP_DIR + ps.OUTPUT_NAME + filename) return Response({'output': user_defined_triangles})
def test_serialization(self): self.dh.data_struct_list[0].roles.append("Claims Paid") self.dh.data_struct_list[0].df_data.sort_values("col1", ascending=False, inplace=True) serialized = self.dh.encode() data_framed = DataHolder.decode(serialized) assert_frame_equal(self.dh.data_struct_list[0].df_data, data_framed.data_struct_list[0].df_data) assert_frame_equal(self.dh.data_struct_list[1].df_data, data_framed.data_struct_list[1].df_data) assert_frame_equal(self.dh.data_struct_list[2].df_data, data_framed.data_struct_list[2].df_data) assert_frame_equal(self.dh.data_struct_list[0].df_profiles, data_framed.data_struct_list[0].df_profiles) assert_frame_equal(self.dh.data_struct_list[1].df_profiles, data_framed.data_struct_list[1].df_profiles) assert_frame_equal(self.dh.data_struct_list[2].df_profiles, data_framed.data_struct_list[2].df_profiles) self.assertEqual(data_framed.data_struct_list[0].roles[0], "Claims Paid") # Test conservation of ids for ind in range(len(self.dh.data_struct_list)): self.assertEqual(data_framed.data_struct_list[ind].id, self.dh.data_struct_list[ind].id)
def test_output(self): path = pdir.RESOURCES_DIR + "left_triangles/outtake/" #path = pdir.RESOURCES_DIR + "left_triangles/" for file in os.listdir(path): if file.endswith(".pickle"): print(file) with open(path + file, 'rb') as f: read_data = pickle.load(f) dh = DataHolder.decode(read_data["DataHolder"]) info_dict = read_data["extra_content"] print(info_dict) user_defined_triangles = OutputTriangleParser.generate_output_triangles( info_dict) data_holder, group_ids, sheet_names = RowParser.set_card_ids( user_defined_triangles, dh) user_defined_triangles = InputMatcher.match_triangles_to_output( user_defined_triangles, data_holder) user_defined_triangles = RowParser.parse_output_from_triangle_forms( user_defined_triangles, data_holder) head, sep, tail = file.partition(".xls") SheetWriter.trngs_to_excel(user_defined_triangles, head + sep)
def post(self, request): # Need to post - str_data_holder, output triangles (templates) str_data_holder = request.data.get('str_data_holder') data_holder = DataHolder.decode(str_data_holder) response_data = {} if data_holder is None: raise ValueError("No data holder found") elif data_holder.n == 0: raise ValueError("No sheets in data holder") #Recieve triangle formats user_defined_triangles = request.data.get('templates') try: #DataHolder manipulation data_holder, group_ids, sheet_names = RowParser.set_card_ids( user_defined_triangles, data_holder) user_defined_triangles = InputMatcher.match_triangles_to_output( user_defined_triangles, data_holder) user_defined_triangles = RowParser.parse_output_from_triangle_forms( user_defined_triangles, data_holder) except DataHolderException as err: data = {} data['message'] = err.message data['dh'] = err.dh return Response({'response_error': data}) #SheetWriter.trngs_to_existing_excel(user_defined_triangles, pdir.TEMP_DIR + ps.OUTPUT_NAME + filename) response_data["group_ids"] = group_ids response_data['output_triangles'] = user_defined_triangles response_data[ "unit_triangles"] = ChangeDimensionAPIView.make_unit_triangle_list( data_holder) return Response({'data': response_data})
dh = DataHolder() for sr in sr_list: dh.add_sheet(sr.sheet_name, pd.DataFrame(columns=sr.headers, data=sr.row_vals), pd.DataFrame(columns=sr.headers, data=sr.xls_types), orig_sheet_name=sr.sheet_name) dh = SheetPreProcessor.separate_components(dh) raw_dict[file_name] = dh.encode() dh = HorizontalMerger.horizontal_merge(dh) #temp_path = pdir.RESOURCES_DIR + "/temp/" #dh.write_excel(temp_path + file_name) solutions_dict[file_name] = dh solutions_dict = MergePararametersOptimizer.make_ind_col_dict(solutions_dict) with open(pdir.RESOURCES_DIR + "/test/merge_solutions.obj", "wb") as temp_file: pickle.dump(solutions_dict, temp_file) with open(pdir.RESOURCES_DIR + "/test/raw_test.obj", "wb") as temp_file: pickle.dump(raw_dict, temp_file) if __name__ == '__main__': MergePararametersOptimizer.make_sol_dict() with open(pdir.RESOURCES_DIR + "/test/raw_test.obj", "rb") as temp_file: raw_dh = pickle.load(temp_file) raw_dh = {key: DataHolder.decode(raw_dh[key]) for key in raw_dh} with open(pdir.RESOURCES_DIR + "/test/merge_solutions.obj", "rb") as temp_file: solution_dh = pickle.load(temp_file) solution_dh = {key: solution_dh[key] for key in solution_dh} start = time.time() MergePararametersOptimizer.evaluate(raw_dh, solution_dh) end = time.time() print(end - start)