def test_boundary_dataset_add_list(self): boundary1 = OptimalBoundary(cells=[ CellID('N'), CellID('O0'), CellID('P123'), CellID('S34567') ]) boundary2 = OptimalBoundary(cells=[CellID('O35'), CellID('P234')]) boundary3 = OptimalBoundary(cells=[CellID('S034'), CellID('S57')]) boundary4 = OptimalBoundary(cells=[CellID('P33'), CellID('P44')]) data = Data('') b_ds = { boundary1.boundary_ID.value: (boundary1, data), boundary2.boundary_ID.value: (boundary2, data), } b_dataset = BoundaryDataSet('id', b_ds) b_dataset.add_list([(boundary3, data), (boundary4, data)]) b_ds2 = { boundary1.boundary_ID.value: (boundary1, data), boundary2.boundary_ID.value: (boundary2, data), boundary3.boundary_ID.value: (boundary3, data), boundary4.boundary_ID.value: (boundary4, data) } self.assertEqual(b_dataset.boundary_data_set, b_ds2)
def test_delete_boundary_in_boundary_datasets(self): store.dropAll() bds = BoundaryDataSet("id") boundaries = [ 'O23P12P34S56', 'P10P11P2', 'N0', 'N8O2P0', 'O6S0S1S2', 'Q' ] for boundary in boundaries: bds.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.insert(bds) deleted_boundaries = store.delete_boundary_in_boundary_datasets( "id", (Boundary(boundary_ID=BoundaryID('O23P12P34S56')))) self.assertEqual(deleted_boundaries, 1) stored_bds = store.query_by_boundary_in_boundary_datasets( "id", (Boundary(boundary_ID=BoundaryID('O23P12P34S56')))) num_bds = 0 for bds in stored_bds: assert bds.get_boundaries().__len__() == 0 num_bds = num_bds + 1 assert num_bds == 1 self.assertEqual(num_bds, 1) store.dropAll()
def test_update_cell_in_cell_datasets(self): store.dropAll() bds = BoundaryDataSet("id") boundaries = [ 'O23P12P34S56', 'P10P11P2', 'N0', 'N8O2P0', 'O6S0S1S2', 'Q' ] for boundary in boundaries: bds.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.insert(bds) store.update_boundary_in_boundary_datasets( "id", (Boundary(boundary_ID=BoundaryID('O23P12P34S56'))), Data("test")) stored_bds = store.query_by_boundary_in_boundary_datasets( "id", (Boundary(boundary_ID=BoundaryID('O23P12P34S56')))) num_bds = 0 num_boundaries = 0 for bds in stored_bds: for boundary, data in bds.get_boundaries_and_data(): assert boundaries.__contains__(boundary.AUID_to_CUIDs()) assert data.content == Data("test").content num_boundaries = num_boundaries + 1 num_bds = num_bds + 1 self.assertEqual(num_bds, 1) self.assertEqual(num_boundaries, 1) store.dropAll()
def query_by_boundary_in_boundary_datasets(self, id, boundary): """ :param id: identifier of the BoundaryDataset :param boundary: Boundary or OptimalBoundary. If it is not optimal, it is optimized before making the query. :return: Boundary and data associated stored in the BoundaryDataset with that id. """ if boundary.is_optimal(): optimal_boundary = boundary else: optimal_boundary = boundary.optimize() boundaries_datasets_founded = self.db.b_data_sets.find({"_id": id}) boundary_data_sets = [] for boundary_dataset in boundaries_datasets_founded: bds = BoundaryDataSet(id=id) boundaries_in_bds_founded = self.db.boundaries.find({ "boundary_dataset_id": boundary_dataset["_id"], "auid": optimal_boundary.boundary_ID.value }) for boundary in boundaries_in_bds_founded: bds.add(OptimalBoundary(boundary_ID=AUID(boundary["auid"])), Data(boundary["data"])) boundary_data_sets.append(bds) return boundary_data_sets
def to_internal_value(self, data): bds = BoundaryDataSet(id='') for item in data: if not re.match(r'^[A-Za-z0-9]+$', item['boundary']): raise ValidationError('Incorrect format: ' + item['boundary']) bds.add(Boundary(boundary_ID=BoundaryID(item['boundary'])), Data(item['data'])) return bds.boundary_data_set
def test_update_boundary_dataset(self): store.dropAll() bds = BoundaryDataSet("id") boundaries = [ 'O23P12P34S56', 'P10P11P2', 'N0', 'N8O2P0', 'O6S0S1S2', 'Q' ] for boundary in boundaries: bds.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.insert(bds) bds2 = BoundaryDataSet("id") boundaries2 = ['N0', 'N8O2P0', 'O6S0S1S2', 'Q'] for boundary in boundaries2: bds2.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.update_boundary_dataset(bds2) stored_bds = store.query_by_boundary_dataset_id("id") num_bds = 0 num_boundaries = 0 for bds in stored_bds: for boundary in bds.get_boundaries(): assert boundaries2.__contains__(boundary.AUID_to_CUIDs()) num_boundaries = num_boundaries + 1 num_bds = num_bds + 1 self.assertEqual(num_bds, 1) self.assertEqual(num_boundaries, len(boundaries2)) store.dropAll()
def all_boundary_datasets(self): """ :return: List of all stored BoundaryDatasets """ boundary_data_sets = [] boundaries_datasets_founded = self.db.b_data_sets.find() for boundary_dataset in boundaries_datasets_founded: bds = BoundaryDataSet(id=boundary_dataset["_id"]) boundaries_in_bds_founded = self.db.boundaries.find( {"boundary_dataset_id": boundary_dataset["_id"]}) for boundary in boundaries_in_bds_founded: bds.add(OptimalBoundary(boundary_ID=AUID(boundary["auid"])), Data(boundary["data"])) boundary_data_sets.append(bds) return boundary_data_sets
def query_by_boundary_dataset_id(self, id): """ :param id: identifier of the BoundaryDataset :return: BoundaryDataset stored with that id. """ boundaries_datasets_founded = self.db.b_data_sets.find({"_id": id}) boundary_data_sets = [] for boundary_dataset in boundaries_datasets_founded: bds = BoundaryDataSet(id=id) boundaries_in_bds_founded = self.db.boundaries.find( {"boundary_dataset_id": boundary_dataset["_id"]}) for boundary in boundaries_in_bds_founded: bds.add(OptimalBoundary(boundary_ID=AUID(boundary["auid"])), Data(boundary["data"])) boundary_data_sets.append(bds) return boundary_data_sets
def test_delete_boundary_dataset(self): store.dropAll() bds = BoundaryDataSet("id") boundaries = [ 'O23P12P34S56', 'P10P11P2', 'N0', 'N8O2P0', 'O6S0S1S2', 'Q' ] for boundary in boundaries: bds.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.insert(bds) deleted_bds = store.delete_boundary_dataset("id") self.assertEqual(deleted_bds, 1) stored_bds = store.query_by_boundary_dataset_id("id") self.assertEqual(stored_bds.__len__(), 0) store.dropAll()
def test_insert_and_all_boudnaries(self): store.dropAll() bds = BoundaryDataSet("id") boundaries = [ 'O23P12P34S56', 'P10P11P2', 'N0', 'N8O2P0', 'O6S0S1S2', 'Q' ] for boundary in boundaries: bds.add(Boundary(boundary_ID=BoundaryID(boundary)), Data("")) store.insert(bds) stored_boundaries = store.all_boundaries() num_boundaries = 0 for boundary in stored_boundaries: assert boundaries.__contains__(boundary[0].AUID_to_CUIDs()) num_boundaries = num_boundaries + 1 self.assertEqual(num_boundaries, len(boundaries)) store.dropAll()
def test_get_max_refinement(self): boundary1 = OptimalBoundary(cells=[ CellID('N'), CellID('O0'), CellID('P123'), CellID('S34567') ]) boundary2 = OptimalBoundary(cells=[CellID('O35'), CellID('P234')]) boundary3 = OptimalBoundary(cells=[CellID('S034'), CellID('S57')]) data = Data('') b_ds = { boundary1.boundary_ID.value: (boundary1, data), boundary2.boundary_ID.value: (boundary2, data), boundary3.boundary_ID.value: (boundary3, data) } b_dataset = BoundaryDataSet('id', b_ds) self.assertEqual(b_dataset.get_max_refinement(), 5)
def get_boundary_dataset_from_shp_file(self, dir, id, with_ids, refinement=None, unic_data=False): import os import glob os.chdir(dir) boundaries = [] for file in glob.glob('*.shp'): boundaries.append(fiona.open(file)) bds = BoundaryDataSet(id=id) for boundary in boundaries: data = {} boundary_id = '' first = True for polygon in boundary: if with_ids: boundary_id = boundary_id + polygon['properties']['id'] else: assert refinement is not None cell = self.get_cell_ID(polygon, int(refinement)) boundary_id = boundary_id + cell.value if not unic_data: if 'data' in polygon['properties']: data[polygon['properties'] ['id']] = polygon['properties']['data'] else: data[polygon['properties'] ['id']] = polygon['properties'] elif unic_data and first: if 'data' in polygon['properties']: data = polygon['properties']['data'] else: data = polygon['properties'] bds.add(boundary=Boundary(boundary_ID=BoundaryID(boundary_id)), data=Data(data)) return bds
def test_get_boundary_data_list(self): boundary1 = OptimalBoundary(cells=[ CellID('N'), CellID('O0'), CellID('P123'), CellID('S34567') ]) boundary2 = OptimalBoundary(cells=[CellID('O35'), CellID('P234')]) boundary3 = OptimalBoundary(cells=[CellID('S034'), CellID('S57')]) data = Data('data') data2 = Data('data2') b_ds = { boundary1.boundary_ID.value: (boundary1, data), boundary2.boundary_ID.value: (boundary2, data), boundary3.boundary_ID.value: (boundary3, data2) } b_dataset = BoundaryDataSet('id', b_ds) self.assertEqual( b_dataset.get_boundary_data_list( [boundary1.boundary_ID, boundary3.boundary_ID]), [data, data2])
def query_by_boundary_to_boundary_datasets(self, boundary): """ :param boundary: Boundary or OptimalBoundary. If it is not optimal, it is optimized before making the query. :return: List of BoundaryDataSets where boundary is located """ if boundary.is_optimal(): optimal_boundary = boundary else: optimal_boundary = boundary.optimize() boundaries_founded = self.db.boundaries.find( {"auid": optimal_boundary.boundary_ID.value}) boundary_data_sets = [] for boundary in boundaries_founded: bds = BoundaryDataSet() boundaries_in_bds_founded = self.db.boundaries.find( {"boundary_dataset_id": boundary["boundary_dataset_id"]}) for boundary_2 in boundaries_in_bds_founded: boundary, data = self.get_Boundary_Data(boundary_2) bds.add(boundary, data) boundary_data_sets.append(bds) return boundary_data_sets
def shp_files_from_boundary_dataset_cli(self, boundary_dataset_file, out_shp, bbox): with open(boundary_dataset_file) as json_file: bds = BoundaryDataSet("").fromJSON(json_file, file=True) self.shp_files_from_boundary_dataset(bds, out_shp, bbox)
def save(self, store): bds = BoundaryDataSet( id=self.validated_data['id'], boundary_data_set=self.validated_data['boundary_data_set']) store.insert(bds)
def save(self, store, bds_id): bds = BoundaryDataSet( id=bds_id, boundary_data_set=self.validated_data['boundary_data_set']) store.update_boundary_dataset(bds)