def subtest_render_mixed_json_to_layer_chunk(self, obj, chunk_size=100, empty_chunk=None): with self.subTest(chunk_size=chunk_size): layer = self.new_layer() o = dict(obj) obj_feat = obj["features"] lst_map_feat = list() map_fields = dict() lst_chunk: list = [obj_feat[i0:i0+chunk_size] for i0 in range(0,len(obj_feat), chunk_size)] if empty_chunk: step = max(2,len(lst_chunk)//empty_chunk) for i in reversed(range(0, len(lst_chunk), step)): lst_chunk.insert(i, list()) for chunk in lst_chunk: o["features"] = chunk map_feat, _ = parser.xyz_json_to_feature_map(o, map_fields) test_parser.TestParser()._assert_parsed_map(chunk, map_feat, map_fields) lst_map_feat.append(map_feat) self._render_layer(layer, map_feat, map_fields) # self._log_debug("len feat", len(chunk)) # self._log_debug("parsed feat", len_of_struct(map_feat)) # self._log_debug("parsed fields", len_of_struct(map_fields)) lst_feat = flatten([x.values() for x in lst_map_feat]) self.assertEqual(len(lst_feat), len(obj["features"])) self.assert_layer(layer, obj, map_fields) self.remove_layer(layer) return map_fields
def subtest_parse_xyzjson_map_chunk(self, obj, chunk_size=100): similarity_threshold = self.similarity_threshold with self.subTest(chunk_size=chunk_size, similarity_threshold=similarity_threshold): o = dict(obj) obj_feat = obj["features"] lst_map_feat = list() map_fields = dict() for i0 in range(0, len(obj_feat), chunk_size): chunk = obj_feat[i0:i0 + chunk_size] o["features"] = chunk map_feat, _ = parser.xyz_json_to_feature_map( o, map_fields, similarity_threshold) self._assert_parsed_map(chunk, map_feat, map_fields) lst_map_feat.append(map_feat) # self._log_debug("len feat", len(chunk)) # self._log_debug("parsed feat", len_of_struct(map_feat)) # self._log_debug("parsed fields", len_of_struct(map_fields)) lst_feat = flatten([x.values() for x in lst_map_feat]) self.assertEqual(len(lst_feat), len(obj["features"])) return map_fields