def test_range_and_in(self): """ search by `table2 from 1978 to 1979 in california` (geographic bounds and temporal bounds) """ DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory() table = TableFactory(dataset=dataset, description='table2', name='table2') partition = PartitionFactory(dataset=dataset, table=table, time=1, grain_coverage=['county'], space_coverage=['california'], time_coverage=['1978', '1979']) self._my_library.database.commit() self._my_library.search.index_dataset(dataset) self._my_library.search.index_partition(partition) # finds dataset extended with partition found = list( self._my_library.search.search( 'table2 from 1978 to 1979 in california')) self.assertEqual(len(found), 1) self.assertEqual(len(found[0].partitions), 1) self.assertIn(partition.vid, found[0].partitions)
def test_search_in(self): """ search by `source example.com in California` (geographic bounds) """ DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory() table = TableFactory(dataset=dataset, name='table2', description='table2') partition = PartitionFactory(dataset=dataset, table=table, time=1, space_coverage=['california']) self._my_library.search.index_dataset(dataset) self._my_library.search.index_partition(partition) # find partition in the partition index. self._assert_finds_partition(partition, 'in California') # finds dataset extended with partition found = list( self._my_library.search.search('source example.com in California')) self.assertEqual(len(found), 1) self.assertEqual(len(found[0].partitions), 1) self.assertIn(partition.vid, found[0].partitions)
def test_search_by(self): """ search by `source example.com by county` (granularity search) """ DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory(source='example.com') table = TableFactory(dataset=dataset, description='table2', name='table2') partition = PartitionFactory(dataset=dataset, table=table, grain_coverage=['county']) self._my_library.database.commit() self._my_library.search.index_dataset(dataset) self._my_library.search.index_partition(partition) # find partition in the partition index. self._assert_finds_partition(partition, 'by county') # finds dataset extended with partition found = list( self._my_library.search.search('source example.com by county')) self.assertEqual(len(found), 1) self.assertEqual(len(found[0].partitions), 1) self.assertIn(partition.vid, found[0].partitions)
def test_search_with(self): """ search by `* with cucumber` """ DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory() table = TableFactory(dataset=dataset, name='table2', description='table2') partition = PartitionFactory(dataset=dataset, table=table, time=1) self._my_library.database.commit() partition.table.add_column('id') partition.table.add_column('column1', description='cucumber') self._my_library.database.commit() self._my_library.search.index_dataset(dataset) self._my_library.search.index_partition(partition) # find partition in the partition index. self._assert_finds_partition(partition, 'dataset with cucumber') # finds dataset extended with partition found = list(self._my_library.search.search('dataset with cucumber')) self.assertEqual(len(found), 1) self.assertEqual(len(found[0].partitions), 1) self.assertIn(partition.vid, found[0].partitions)
def test_search_years_range(self): """ search by `source example.com from 1978 to 1979` (temporal bounds) """ DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory() table = TableFactory(dataset=dataset, name='table2', description='table2') partition = PartitionFactory(dataset=dataset, table=table, time=1, time_coverage=['1978', '1979']) self._my_library.database.commit() self._my_library.search.index_partition(partition) self._my_library.search.index_dataset(dataset) # find partition in the partition index. self._assert_finds_partition(partition, 'from 1978 to 1979') # find dataset extended with partition found = list( self._my_library.search.search( 'source example.com from 1978 to 1979')) self.assertEqual(len(found), 1) self.assertEqual(len(found[0].partitions), 1) self.assertIn(partition.vid, found[0].partitions)
def test_creates_table_table(self): DatasetFactory._meta.sqlalchemy_session = self.db.session TableFactory._meta.sqlalchemy_session = self.db.session # Now all tables are created. Can we use ORM to create datasets? ds1 = DatasetFactory() self.db.commit() TableFactory(dataset=ds1) self.db.commit()
def test_creates_column_table(self): DatasetFactory._meta.sqlalchemy_session = self.db.session TableFactory._meta.sqlalchemy_session = self.db.session ds1 = DatasetFactory() self.db.commit() table = TableFactory(dataset=ds1) ColumnFactory._meta.sqlalchemy_session = self.db.session # Now all tables are created. Can we use ORM to create columns? ColumnFactory(name='id', table=table) self.db.commit()
def test_search_partition_by_name(self): DatasetFactory._meta.sqlalchemy_session = self._my_library.database.session PartitionFactory._meta.sqlalchemy_session = self._my_library.database.session TableFactory._meta.sqlalchemy_session = self._my_library.database.session dataset = DatasetFactory() table = TableFactory(dataset=dataset, name='table2', description='table2') partition = PartitionFactory(dataset=dataset, table=table, time=1, name='Partition1') self._my_library.database.commit() self._my_library.search.index_partition(partition) self._assert_finds_partition(partition, str(partition.identity.name))