def __init__(self, session=None, create_tables=True, **kwargs): self.session = session self.setUp() ORM_DAO.__init__(self, session=self.session, schema=self.schema, **kwargs) if create_tables: self.create_tables()
def test_obj_query(self): schema = self.schemas['schema1'] dao = ORM_DAO(session=self.session, schema=schema) q = { 'ID': 'obj_q', 'SELECT': ['__TestClass1'], } results = dao.execute_queries(query_defs=[q]) self.assertIsNotNone(results)
def test_combined_query(self): schema = self.schemas['schema1'] dao = ORM_DAO(session=self.session, schema=schema) q = { 'ID': 'obj_q', 'SELECT': [ {'ID': 'foo', 'EXPRESSION': '__TestClass1'}, '__TestClass1__id'], } results = dao.execute_queries(query_defs=[q]) self.assertIsNotNone(results)
def test_batched_results(self): schema = self.schemas['schema1'] dao = ORM_DAO(session=self.session, schema=schema) simple_q = { 'ID': 'simple_q', 'SELECT': ['__TestClass1'], } q = dao.get_query(simple_q) num_results = q.count() batched_results = dao.get_batched_results(q, 3) results = [r for r in batched_results] self.assertEquals(len(results), num_results)
def test_dao_csv_ingestor(self): Base = declarative_base() class TestClass(Base): __tablename__ = 'testclass' id = Column(Integer, primary_key=True) attr1 = Column(Integer) attr2 = Column(String) Base.metadata.create_all(self.connection) schema = { 'sources': { 'TestClass': TestClass } } dao = ORM_DAO(session=self.session, schema=schema) csv_data = StringIO() writer = csv.DictWriter(csv_data, fieldnames=['s_attr1', 's_attr2']) writer.writeheader() for i in range(5): record = { 's_attr1': i, 's_attr2': "s_attr2_%s" % i, } writer.writerow(record) csv_file = StringIO(csv_data.getvalue()) mappings = [ { 'source': 's_attr1', 'target': 'attr1', 'processor': lambda value: int(value) * 10 }, { 'source': 's_attr2', 'target': 'attr2', }, ] Ingestor( reader=CSVReader(csv_file=csv_file), processors=[ ClassMapper(clazz=TestClass, mappings=mappings), DAOWriter(dao=dao, commit=False), ] ).ingest() results = dao.query({ 'SELECT': ['__TestClass'] }).all() for r in results: print r.__dict__
def test_join_query(self): schema = self.schemas['schema1'] dao = ORM_DAO(session=self.session, schema=schema) simple_q = { 'ID': 'simple_q', 'SELECT': ['__TestClass1'], 'FROM': [ { 'SOURCE': 'TestClass1', 'JOINS': [ [ 'TestClass2', [ {'TYPE': 'ENTITY', 'EXPRESSION': '__TestClass1__id'}, '==', {'TYPE': 'ENTITY', 'EXPRESSION': '__TestClass2__id'} ] ] ] } ], } results = dao.execute_queries(query_defs=[simple_q]) self.assertIsNotNone(results)
def test_shapefile_ingestor(self): Base = declarative_base() class TestClass(Base): __tablename__ = 'testclass' id = Column(Integer, primary_key=True) attr1 = Column(Integer) attr2 = Column(String) geom = GeometryColumn(MultiPolygon(2)) GeometryDDL(TestClass.__table__) schema = { 'sources': { 'TestClass': TestClass } } Base.metadata.create_all(self.connection) dao = ORM_DAO(session=self.session, schema=schema) shapedir = tempfile.mkdtemp() shapefile = os.path.join(shapedir, "test.shp") schema = { 'geometry': 'MultiPolygon', 'properties': { 'S_ATTR1': 'int', 'S_ATTR2': 'str', } } records = [] for i in range(5): coords = [[dg.generate_polygon_coords(x=i, y=i)]] records.append({ 'id': i, 'geometry': { 'type': 'MultiPolygon', 'coordinates': coords }, 'properties': { 'S_ATTR1': i, 'S_ATTR2': str(i), } }) writer = shapefile_util.get_shapefile_writer( shapefile=shapefile, crs='EPSG:4326', schema=schema, ) for record in records: writer.write(record) writer.close() mappings = [ { 'source': 'S_ATTR1', 'target': 'attr1', 'processor': lambda value: int(value) * 10 }, { 'source': 'S_ATTR2', 'target': 'attr2', }, { 'source': '__shape', 'target': 'geom', 'processor': gis_util.shape_to_wkt, } ] Ingestor( reader=ShapefileReader(shp_file=shapefile), processors=[ ClassMapper(clazz=TestClass, mappings=mappings), DAOWriter(dao=dao, commit=False), ] ).ingest() results = dao.query({ 'SELECT': ['__TestClass'] }).all() for r in results: print r.attr1, r.attr2, dao.session.scalar(r.geom.wkt)
def test_shapefile_ingestor(self): Base = declarative_base() class TestClass(Base): __tablename__ = 'testclass' id = Column(Integer, primary_key=True) attr1 = Column(Integer) attr2 = Column(String) geom = GeometryColumn(MultiPolygon(2)) GeometryDDL(TestClass.__table__) schema = {'sources': {'TestClass': TestClass}} Base.metadata.create_all(self.connection) dao = ORM_DAO(session=self.session, schema=schema) shapedir = tempfile.mkdtemp() shapefile = os.path.join(shapedir, "test.shp") schema = { 'geometry': 'MultiPolygon', 'properties': { 'S_ATTR1': 'int', 'S_ATTR2': 'str', } } records = [] for i in range(5): coords = [[dg.generate_polygon_coords(x=i, y=i)]] records.append({ 'id': i, 'geometry': { 'type': 'MultiPolygon', 'coordinates': coords }, 'properties': { 'S_ATTR1': i, 'S_ATTR2': str(i), } }) writer = shapefile_util.get_shapefile_writer( shapefile=shapefile, crs='EPSG:4326', schema=schema, ) for record in records: writer.write(record) writer.close() mappings = [{ 'source': 'S_ATTR1', 'target': 'attr1', 'processor': lambda value: int(value) * 10 }, { 'source': 'S_ATTR2', 'target': 'attr2', }, { 'source': '__shape', 'target': 'geom', 'processor': gis_util.shape_to_wkt, }] Ingestor(reader=ShapefileReader(shp_file=shapefile), processors=[ ClassMapper(clazz=TestClass, mappings=mappings), DAOWriter(dao=dao, commit=False), ]).ingest() results = dao.query({'SELECT': ['__TestClass']}).all() for r in results: print r.attr1, r.attr2, dao.session.scalar(r.geom.wkt)