Пример #1
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))
Пример #2
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #3
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)
    
    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()
        
    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))               
Пример #4
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #5
0
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']

        try:
            backup = config['backup']
        except KeyError:
            backup = True

        try:
            backup_postfix = config['backup_postfix']
        except KeyError:
            backup_postfix = '_old'

        dbconfig = DatabaseServerConfiguration(protocol='mysql',
                                               host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)

            db.close()

        db_server.close()
Пример #6
0
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']
        
        try: backup = config['backup']
        except KeyError: backup = True
        
        try: backup_postfix = config['backup_postfix']
        except KeyError: backup_postfix = '_old'
        
        dbconfig = DatabaseServerConfiguration(
            protocol = 'mysql',
            host_name = db_config.host_name,
            user_name = db_config.user_name,
            password = db_config.password                                       
        )
        db_server = DatabaseServer(dbconfig)
        
        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)
                
            db.close()
            
        db_server.close()
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql',))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail('Output table job_building_tpes already exists. (Check setUp)')
        except: pass
        
        
    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
    
            
    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        expected_results = [
            ['id', 'name', 'home_based'],
            [1, "commercial", 0],
            [3, "industrial", 0],
            [2, "governmental", 0],
            [4, "home_based", 1]
            ]
        
        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
        
        self.assert_(expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql', ))

        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()

    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail(
                'Output table job_building_tpes already exists. (Check setUp)')
        except:
            pass

    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        expected_results = [['id', 'name', 'home_based'], [1, "commercial", 0],
                            [3, "industrial", 0], [2, "governmental", 0],
                            [4, "home_based", 1]]

        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

        self.assert_(
            expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
Пример #9
0
class RunManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_setup_run(self):
        base_directory = tempfile.mkdtemp(prefix='opus_tmp')
        run_name = 'test_scenario_name'
        run_manager = RunManager(self.config)

        run_manager.setup_new_run(cache_directory = os.path.join(base_directory, run_name),
                                  configuration = {})
        resulting_cache_directory = run_manager.get_current_cache_directory()
        self.assertTrue(resulting_cache_directory.find(run_name)>-1)
        self.assertEquals(os.path.dirname(resulting_cache_directory), base_directory)
        self.assertTrue(run_manager.ready_to_run)
        self.assertTrue(not os.path.exists(resulting_cache_directory))
        run_manager.services_db.close()
        os.rmdir(base_directory)

    def test_add_row_to_history(self):
        run_manager = RunManager(self.config)
        cache_directory = tempfile.mkdtemp(prefix='opus_tmp')
        resources = {'cache_directory':cache_directory,
                     'description':'test_run',
                     'base_year':2000,
                     'project_name': 'test'}

        run_manager.add_row_to_history(run_id = 1,
                                       resources = resources,
                                       status = 'done')

        db = self.db_server.get_database(self.database_name)
        run_activity_table = db.get_table('run_activity')

        s = select([run_activity_table.c.run_description,
                    run_activity_table.c.status],
                    whereclause = run_activity_table.c.run_id == 1)

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        run_name, status = results[0]
        self.assertEqual(status, 'done')
        self.assertEqual(run_name, 'test_run')

        run_manager.services_db.close()
        os.rmdir(cache_directory)
    def _create_db_from_chain_via_python(self, from_database_configuration,
                                         to_database_configuration,
                                         tables_to_copy):

        db_server_from = DatabaseServer(from_database_configuration)
        db_server_to = DatabaseServer(to_database_configuration)
        db_server_to.drop_database(to_database_configuration.database_name)
        db_server_to.create_database(to_database_configuration.database_name)

        database_out = db_server_to.get_database(
            to_database_configuration.database_name)

        scenario_db_manager = ScenarioDatabaseManager(
            server_configuration=from_database_configuration,
            base_scenario_database_name=from_database_configuration.
            database_name)
        table_mapping = scenario_db_manager.get_database_to_table_mapping()

        cross_db_operations = CrossDatabaseOperations()

        #by default, copy all tables
        if tables_to_copy == []:
            tables_to_copy = sum(table_mapping.values(),
                                 [])  # flat a list of lists
        elif 'scenario_information' not in tables_to_copy:
            tables_to_copy.append('scenario_information')

        for database_name, tables in table_mapping.items():
            database_in = db_server_from.get_database(database_name)
            for table in tables:
                if table not in tables_to_copy:
                    continue

                logger.start_block("Copying table '%s' from database '%s'" %
                                   (table, database_name))

                try:
                    cross_db_operations.copy_table(table_to_copy=table,
                                                   database_in=database_in,
                                                   database_out=database_out,
                                                   use_chunking=True)
                finally:
                    logger.end_block()
            database_in.close()

        self._fix_scenario_information_table(database_out)
        database_out.close()
        db_server_from.close()
        db_server_to.close()
Пример #11
0
 def convert_databases(self, db_config, databases, tables, patterns, backup=True, backup_postfix='_old'):
     dbconfig = DatabaseServerConfiguration(
         host_name = db_config.host_name,
         protocol = 'mysql',
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     for db_name in databases:
         db = db_server.get_database(db_name)
         self.convert_database(db, tables[db_name], patterns, backup, backup_postfix)
         db.close()
         
     db_server.close()
 def _create_db_from_chain_via_python(self, 
                                      from_database_configuration, 
                                      to_database_configuration,
                                      tables_to_copy):
             
     db_server_from = DatabaseServer(from_database_configuration)
     db_server_to = DatabaseServer(to_database_configuration)
     db_server_to.drop_database(to_database_configuration.database_name)
     db_server_to.create_database(to_database_configuration.database_name)
     
     database_out = db_server_to.get_database(to_database_configuration.database_name)
     
     scenario_db_manager = ScenarioDatabaseManager(
         server_configuration = from_database_configuration, 
         base_scenario_database_name = from_database_configuration.database_name)
     table_mapping = scenario_db_manager.get_database_to_table_mapping()
 
     cross_db_operations = CrossDatabaseOperations()
     
     #by default, copy all tables
     if tables_to_copy == []:
         tables_to_copy = sum(table_mapping.values(), [])  # flat a list of lists
     elif 'scenario_information' not in tables_to_copy:
         tables_to_copy.append('scenario_information')
         
     for database_name, tables in table_mapping.items():
         database_in = db_server_from.get_database(database_name)
         for table in tables:
             if table not in tables_to_copy:
                 continue
             
             logger.start_block("Copying table '%s' from database '%s'" 
                                % (table, database_name)) 
             
             try:               
                 cross_db_operations.copy_table(table_to_copy = table, 
                                                database_in = database_in, 
                                                database_out = database_out, 
                                                use_chunking = True)
             finally:
                 logger.end_block()        
         database_in.close()
         
     self._fix_scenario_information_table(database_out)            
     database_out.close()
     db_server_from.close()
     db_server_to.close()
Пример #13
0
class ResultsManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_add_computed_indicator(self):
        result_manager = ResultsManager(self.config)

        indicator_name = 'test'
        dataset_name = 'ds'
        expression = 'exp'
        run_id = None
        data_path = '/home'

        result_manager.add_computed_indicator(indicator_name,
                                              dataset_name,
                                              expression,
                                              run_id,
                                              data_path,
                                              project_name='test')

        db = self.db_server.get_database(self.database_name)
        computed_indicators_table = db.get_table('computed_indicators')

        s = select(
            [
                computed_indicators_table.c.indicator_name,
                computed_indicators_table.c.expression
            ],
            whereclause=computed_indicators_table.c.dataset_name == 'ds')

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        i_name, exp = results[0]
        self.assertEqual(indicator_name, i_name)
        self.assertEqual(expression, exp)

        result_manager.services_db.close()
Пример #14
0
    def convert_databases(self,
                          db_config,
                          databases,
                          tables,
                          patterns,
                          backup=True,
                          backup_postfix='_old'):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)
            self.convert_database(db, tables[db_name], patterns, backup,
                                  backup_postfix)
            db.close()

        db_server.close()
Пример #15
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from building_types;')
            self.fail('Output table building_tpes already exists. (Check setUp)')
        except: pass
        
    def test_create_table(self):
        creator = TableCreator()
        db = creator._get_db(TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        self.assert_(not db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))
        
        db.DoQuery('CREATE TABLE test_table '
                '(id INT, name varchar(50), units varchar(50));')
        self.assert_(db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))

        creator._backup_table(db, 'test_table')
        self.assert_(db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
        
        creator._drop_table(db, 'test_table')
        self.assert_(not db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
Пример #16
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from building_types;')
            self.fail('Output table building_tpes already exists. (Check setUp)')
        except: pass
        
    def test_create_table(self):
        creator = TableCreator()
        db = creator._get_db(TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        self.assert_(not db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))
        
        db.DoQuery('CREATE TABLE test_table '
                '(id INT, name varchar(50), units varchar(50));')
        self.assert_(db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))

        creator._backup_table(db, 'test_table')
        self.assert_(db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
        
        creator._drop_table(db, 'test_table')
        self.assert_(not db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
Пример #17
0
class ResultsManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)
    
    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()
                
    def test_add_computed_indicator(self):
        result_manager = ResultsManager(self.config)
        
        indicator_name = 'test'
        dataset_name = 'ds'
        expression = 'exp'
        run_id = None
        data_path = '/home'        
        
        result_manager.add_computed_indicator(indicator_name, dataset_name, expression, run_id, data_path, project_name = 'test')
        
        db = self.db_server.get_database(self.database_name)
        computed_indicators_table = db.get_table('computed_indicators')
        
        s = select([computed_indicators_table.c.indicator_name,
                    computed_indicators_table.c.expression],
                    whereclause = computed_indicators_table.c.dataset_name == 'ds')

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)
        
        i_name, exp = results[0]
        self.assertEqual(indicator_name, i_name)
        self.assertEqual(expression, exp)
        
        result_manager.services_db.close()                
 def tearDown(self):
     # Turn off the logger, so we can delete the cache directory.
     logger.disable_all_file_logging()
     db_server = DatabaseServer(self.config)
     db_server.drop_database("eugene_services_test")
     db_server.close()
Пример #19
0
class TestConvertDatabases(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1', 
            'convert_database_test_db2',
            ]
        
        self.test_table_names = [
            'table1',
            'table2',
            'table3',
            ]
            
        table_structure = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"No opus dot.","constant"),'
            '(2,"opus.core.miscellaneous","No opus dot."),'
            '(3,"opus.urbansim.gridcell.percent_water",'
                '"gc_cbd = gridcell.disaggregate('
                    'opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"),'
            '(4,"","")'
            )
            
        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"No opus dot.","constant"],
            [2,"opus.core.miscellaneous","No opus dot."],
            [3,"opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                    "opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"],
            [4,"",""]
            ]
        
        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"No opus dot.","constant"],
            [2,"opus.core.miscellaneous","No opus dot."],
            [3,"opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                    "psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"],
            [4,"",""]
            ]
            
        insert_items_template = (
            "insert into %(table)s values %(data)s;")
        
        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]
        
        self.config = {
            'databases':self.test_db_names,
            'tables':table_list,
            
            'backup':True,
            'backup_postfix':'_old',
            }
            
            
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]
        
        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' 
                    % (table_name,
                       table_structure))
                
                db.DoQuery(insert_items_template 
                            % {'table':table_name, 'data':table_data})
                
        
    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            
        for db in self.dbs:
            db.close()
            
        self.db_server.close()
        
        
    def test_convert_table(self):
        ConvertDatabase().convert_table(self.dbs[0], self.test_table_names[0])
        
        db = self.dbs[0]
        
        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." 
                % (table0,
                   self.expected_output_changed,
                   results))
                       
        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (table,
                               self.expected_output_unchanged,
                               results))
        
        
    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]
        
        ConvertDatabase().convert_table(db, table,
                                        backup=True, backup_postfix='_old')
        
        results = db.GetResultsFromQuery('select * from %s_old;' % table)
        self.assert_(results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." 
                % (table,
                   self.expected_output_unchanged,
                   results))
                   
                   
    def test_convert_database(self):
        ConvertDatabase().convert_database(self.dbs[0], 
            self.test_table_names[0:2])
        
        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_changed,
                       results))
                       
        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for i in range(len(self.dbs[1:])):
            db = self.dbs[i+1]
            
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (i,
                               table, 
                               self.expected_output_unchanged,
                               results))

                               
    def test_convert_databases(self):
        ConvertDatabase().convert_databases(TestDatabaseConfiguration(protocol='mysql'), self.config)
        
        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)
            
            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                        " Expected %s. Recieved %s." 
                            % (db_name,
                               table,
                               self.expected_output_changed,
                               results))
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):
        self.databases = ["db_chain_son", "db_chain_dad", "db_chain_granddad"]

        self.config = TestDatabaseConfiguration(protocol=get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database("db_chain_granddad")
        self.db_chain_dad = self.server.get_database("db_chain_dad")
        self.db_chain_son = self.server.get_database("db_chain_son")

        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table("base_schema")
        self.dad_schema = self.db_chain_dad.get_table("base_schema")
        self.granddad_schema2 = self.db_chain_granddad.get_table("base_schema2")
        self.son_schema2 = self.db_chain_son.get_table("base_schema2")

        self._seed_values()

    def _create_databases(self):

        for db in self.databases:
            try:
                self.server.drop_database(db)
            except:
                pass

            self.server.create_database(db)

    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema("base_schema", base_schema)
        self.db_chain_granddad.create_table_from_schema("base_schema2", base_schema2)
        self.db_chain_granddad.create_table_from_schema("scenario_information", base_scenario_information_schema)

        self.db_chain_dad.create_table_from_schema("base_schema", base_schema)
        self.db_chain_dad.create_table_from_schema("scenario_information", base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema("base_schema2", base_schema2)
        self.db_chain_son.create_table_from_schema("scenario_information", base_scenario_information_schema)

    def _seed_values(self):
        u = self.db_chain_granddad.get_table("scenario_information").insert(
            values={self.db_chain_granddad.get_table("scenario_information").c.parent_database_url: ""}
        )
        self.db_chain_granddad.execute(u)

        u = self.db_chain_dad.get_table("scenario_information").insert(
            values={self.db_chain_dad.get_table("scenario_information").c.parent_database_url: "db_chain_granddad"}
        )

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table("scenario_information").insert(
            values={self.db_chain_son.get_table("scenario_information").c.parent_database_url: "db_chain_dad"}
        )
        self.db_chain_son.execute(u)

        granddad_vals = [
            {"integer_col": 0, "clob_col": "0", "smallinteger_col": 0, "float_col": 0.0},
            {"integer_col": 2, "clob_col": "2", "smallinteger_col": 2, "float_col": 2.0},
            {"integer_col": 4, "clob_col": "4", "smallinteger_col": 4, "float_col": 4.0},
        ]

        dad_vals = [
            {"integer_col": 0, "clob_col": "0", "smallinteger_col": 0, "float_col": 0.0},
            {"integer_col": 1, "clob_col": "1", "smallinteger_col": 1, "float_col": 1.0},
            {"integer_col": 2, "clob_col": "2", "smallinteger_col": 2, "float_col": 2.0},
            {"integer_col": 3, "clob_col": "3", "smallinteger_col": 3, "float_col": 3.0},
            {"integer_col": 4, "clob_col": "4", "smallinteger_col": 4, "float_col": 4.0},
        ]

        granddad_vals2 = [
            {"integer_col": 0, "varchar_col": "0", "boolean_col": True, "numeric_col": 0.0},
            {"integer_col": 2, "varchar_col": "2", "boolean_col": True, "numeric_col": 2.0},
            {"integer_col": 4, "varchar_col": "4", "boolean_col": True, "numeric_col": 4.0},
        ]

        son_vals2 = [
            {"integer_col": 0, "varchar_col": "0", "boolean_col": False, "numeric_col": 0.0},
            {"integer_col": 4, "varchar_col": "4", "boolean_col": False, "numeric_col": 4.0},
        ]

        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(), granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(), granddad_vals2)
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)

    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()

        self.server.drop_database("db_chain_granddad")
        self.server.drop_database("db_chain_dad")
        self.server.drop_database("db_chain_son")

        self.server.close()
Пример #21
0
class RunManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_setup_run(self):
        base_directory = tempfile.mkdtemp(prefix='opus_tmp')
        run_name = 'test_scenario_name'
        run_manager = RunManager(self.config)

        run_manager.setup_new_run(cache_directory=os.path.join(
            base_directory, run_name),
                                  configuration={})
        resulting_cache_directory = run_manager.get_current_cache_directory()
        self.assertTrue(resulting_cache_directory.find(run_name) > -1)
        self.assertEquals(os.path.dirname(resulting_cache_directory),
                          base_directory)
        self.assertTrue(run_manager.ready_to_run)
        self.assertTrue(not os.path.exists(resulting_cache_directory))
        run_manager.services_db.close()
        os.rmdir(base_directory)

    def test_add_row_to_history(self):
        run_manager = RunManager(self.config)
        cache_directory = tempfile.mkdtemp(prefix='opus_tmp')
        resources = {
            'cache_directory': cache_directory,
            'description': 'test_run',
            'base_year': 2000,
            'project_name': 'test'
        }

        run_manager.add_row_to_history(run_id=1,
                                       resources=resources,
                                       status='done')

        db = self.db_server.get_database(self.database_name)
        run_activity_table = db.get_table('run_activity')

        s = select([
            run_activity_table.c.run_description, run_activity_table.c.status
        ],
                   whereclause=run_activity_table.c.run_id == 1)

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        run_name, status = results[0]
        self.assertEqual(status, 'done')
        self.assertEqual(run_name, 'test_run')

        run_manager.services_db.close()
        os.rmdir(cache_directory)

    def test_get_runs(self):
        from numpy.random import randint
        run_manager = RunManager(self.config)
        run_ids = range(1, 11)
        run_names = ['run ' + str(id) for id in run_ids]
        resources = {'cache_directory': None}
        status = ['done', 'failed'] * 5
        for idx, run_id in enumerate(run_ids):
            run_manager.add_row_to_history(run_id=run_id,
                                           resources=resources,
                                           status=status[idx],
                                           run_name=run_names[idx])
        results = run_manager.get_runs(return_columns=['run_name'], run_id=5)
        expected = [('run 5', )]
        self.assertEqual(results, expected)
        results = run_manager.get_runs(return_columns=['run_id'],
                                       status='done')
        expected = [(1, ), (3, ), (5, ), (7, ), (9, )]
        self.assertEqual(results, expected)

        results = run_manager.get_runs(return_columns=['run_name'],
                                       return_rs=True,
                                       run_id=5)
        expected = 'run 5'
        self.assertEqual(results.fetchone()['run_name'], expected)
        results = run_manager.get_runs(return_columns=['run_id'],
                                       return_rs=True,
                                       status='done')
        results = [rs['run_id'] for rs in results]
        expected = [1, 3, 5, 7, 9]
        self.assertEqual(results, expected)
Пример #22
0
class TestCombineTables(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))
        self.db_name = 'test_combine_tables'
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

        self.from_tables = (
            ('table_a', 'A'),
            ('table_b', 'B'),
            ('table_c', 'C'),
            ('table_d', 'D'),
        )
        table_ids = {
            'table_a': [1],
            'table_b': [2, 3],
            'table_c': [4, 5, 6, 7],
            'table_d': [8, 9, 10, 11, 12, 13, 14, 15],
        }
        self.to_table = 'to_table'

        for table, type in self.from_tables:
            self.db.DoQuery('CREATE TABLE %s (job_id INT, sector_id INT, '
                            'grid_id INT, sic INT, building_type varchar(5), '
                            'home_based tinyint(4), impute_flag tinyint(4));' %
                            table)

            values = ','.join([
                '(%(j)s, %(j)s, %(j)s, %(j)s, "%(type)s", %(j)s, %(j)s)' % {
                    'j': j,
                    'type': type
                } for j in table_ids[table]
            ])
            self.db.DoQuery(
                'INSERT INTO %(table_name)s (job_id, sector_id, '
                'grid_id, sic, building_type, home_based, impute_flag) VALUES '
                '%(values)s;' % {
                    'table_name': table,
                    'values': values,
                })
            number_rows = self.db.GetResultsFromQuery(
                'SELECT count(*) FROM %s' % table)[1][0]
            self.assertEqual(number_rows, len(table_ids[table]))

    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()

    def test_setUp(self):
        for table, type in self.from_tables:
            try:
                self.db.DoQuery('select * from %s;' % table)
            except:
                self.fail(
                    'Expected input table %s did not exist. (Check setUp)' %
                    self.from_table)

        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
            self.fail('Output table %s already exists. (Check setUp)' %
                      self.to_table)
        except:
            pass

    def test_create_table(self):
        CombineTables().combine_tables(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name,
            [i[0] for i in self.from_tables], self.to_table)

        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
        except:
            self.fail('Output table %s not created.' % self.to_table)

    def test_combine_tables(self):
        CombineTables().combine_tables(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name,
            [i[0] for i in self.from_tables], self.to_table)

        expected_rows = 0
        for table, type in self.from_tables:
            count = self.db.GetResultsFromQuery('select count(*) from %s' %
                                                table)[1:][0][0]
            expected_rows += count

        try:
            count = self.db.GetResultsFromQuery('select count(*) from %s;' %
                                                self.to_table)[1:][0][0]
        except:
            self.fail('Expected output table %s does not exist.' %
                      self.to_table)

        if (expected_rows != count):
            for table_name, type in self.from_tables:
                self._print_table(table_name)
            self._print_table(self.to_table)

        self.assert_(
            expected_rows == count,
            'Incorrect number of rows in output. Expected %s; received %s.' %
            (expected_rows, count))

    def _print_table(self, table_name):
        """Provide debugging info to figure out why the above test is failing, sometimes."""
        try:
            results = self.db.GetResultsFromQuery('select * from %s' %
                                                  table_name)
            logger.start_block('Contents of table %s' % table_name)
            try:
                for row in results:
                    logger.log_status(row)
            finally:
                logger.end_block()
        except:
            logger.log_status('Error accessing table %s' % table_name)
            logger.log_stack_trace()
Пример #23
0
class TestDBSubPattern(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1', 
            'convert_database_test_db2',
            ]
        
        self.test_table_names = [
            'table1',
            'table2',
            'table3',
            ]
            
        table_schema = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"Does not match P A T T E R N.","Matches pattern."),'
            '(2,"Matches pattern.","Does not match P A T T E R N."),'
            '(3,NULL,NULL),'
            '(4,"","")'
            )
            
        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"Does not match P A T T E R N.","Matches pattern."],
            [2,"Matches pattern.","Does not match P A T T E R N."],
            [3,None,None],
            [4,"",""]
            ]
        
        self.patterns = [
            (r'(pattern)(\.)', r'\1 well\2'),
            (r'^Matches pattern well\.$', r'Matches pattern perfectly!')
            ]
        
        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"Does not match P A T T E R N.","Matches pattern perfectly!"],
            [2,"Matches pattern.","Does not match P A T T E R N."],
            [3,None,None],
            [4,"",""]
            ]
            
        insert_items_template = (
            "insert into %(table)s values %(data)s;")
      
        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]
        
        self.config = {
            'databases':self.test_db_names,
            'tables':table_list,
            
            'backup':True,
            'backup_postfix':'_old',
            }
            
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]
        
        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' 
                    % (table_name,
                       table_schema))
                
                db.DoQuery(insert_items_template 
                            % {'table':table_name, 'data':table_data})
                
        
    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            
        for db in self.dbs:
            db.close()
            
        self.db_server.close()
        
        
    def test_convert_table(self):
        DBSubPattern().convert_table(self.dbs[0], self.test_table_names[0], 
            self.patterns)
        
        db = self.dbs[0]
        
        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." 
                % (table0,
                   self.expected_output_changed,
                   results))
                       
        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (table,
                               self.expected_output_unchanged,
                               results))
        
        
    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]
        
        DBSubPattern().convert_table(db, table, self.patterns,
                                        backup=True, backup_postfix='_old')
        backup_table_name = '%s_old' % table
        try:
            results = db.GetResultsFromQuery('select * from %s' % backup_table_name)
        except:
            self.fail("Backup failed for single table (%s) -- backup table (%s) not "
                "created." % (table, backup_table_name))
            
        self.assert_(results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." 
                % (table, self.expected_output_unchanged, results)
            )
                                
                   
    def test_convert_database(self):
        DBSubPattern().convert_database(self.dbs[0], 
            self.test_table_names[0:2], self.patterns)
        
        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_changed,
                       results))
                       
        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for i in range(len(self.dbs[1:])):
            db = self.dbs[i+1]
            
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (i,
                               table, 
                               self.expected_output_unchanged,
                               results))

                               
    def test_convert_databases(self):
        DBSubPattern().convert_databases(TestDatabaseConfiguration(protocol='mysql'), 
            self.config['databases'], self.config['tables'], self.patterns)
        
        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)
            
            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                        " Expected %s. Recieved %s." 
                            % (db_name,
                               table,
                               self.expected_output_changed,
                               results))
Пример #24
0
class TestDBSubPattern(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1',
            'convert_database_test_db2',
        ]

        self.test_table_names = [
            'table1',
            'table2',
            'table3',
        ]

        table_schema = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = ('(1,"Does not match P A T T E R N.","Matches pattern."),'
                      '(2,"Matches pattern.","Does not match P A T T E R N."),'
                      '(3,NULL,NULL),'
                      '(4,"","")')

        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "Does not match P A T T E R N.", "Matches pattern."],
            [2, "Matches pattern.", "Does not match P A T T E R N."],
            [3, None, None], [4, "", ""]
        ]

        self.patterns = [(r'(pattern)(\.)', r'\1 well\2'),
                         (r'^Matches pattern well\.$',
                          r'Matches pattern perfectly!')]

        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "Does not match P A T T E R N.", "Matches pattern perfectly!"],
            [2, "Matches pattern.", "Does not match P A T T E R N."],
            [3, None, None], [4, "", ""]
        ]

        insert_items_template = ("insert into %(table)s values %(data)s;")

        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]

        self.config = {
            'databases': self.test_db_names,
            'tables': table_list,
            'backup': True,
            'backup_postfix': '_old',
        }

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))

        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]

        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' % (table_name, table_schema))

                db.DoQuery(insert_items_template % {
                    'table': table_name,
                    'data': table_data
                })

    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)

        for db in self.dbs:
            db.close()

        self.db_server.close()

    def test_convert_table(self):
        DBSubPattern().convert_table(self.dbs[0], self.test_table_names[0],
                                     self.patterns)

        db = self.dbs[0]

        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(
            results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." %
            (table0, self.expected_output_changed, results))

        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (table, self.expected_output_unchanged, results))

    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]

        DBSubPattern().convert_table(db,
                                     table,
                                     self.patterns,
                                     backup=True,
                                     backup_postfix='_old')
        backup_table_name = '%s_old' % table
        try:
            results = db.GetResultsFromQuery('select * from %s' %
                                             backup_table_name)
        except:
            self.fail(
                "Backup failed for single table (%s) -- backup table (%s) not "
                "created." % (table, backup_table_name))

        self.assert_(
            results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." %
            (table, self.expected_output_unchanged, results))

    def test_convert_database(self):
        DBSubPattern().convert_database(self.dbs[0],
                                        self.test_table_names[0:2],
                                        self.patterns)

        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." %
                (table, self.expected_output_changed, results))

        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for i in range(len(self.dbs[1:])):
            db = self.dbs[i + 1]

            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (i, table, self.expected_output_unchanged, results))

    def test_convert_databases(self):
        DBSubPattern().convert_databases(
            TestDatabaseConfiguration(protocol='mysql'),
            self.config['databases'], self.config['tables'], self.patterns)

        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)

            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                    " Expected %s. Recieved %s." %
                    (db_name, table, self.expected_output_changed, results))
class AbstractFunctionalTest(object):
    protocol = ''

    def setUp(self):
        self.db_config = TestDatabaseConfiguration(protocol=self.protocol)
        self.db_config_node = self.db_config._database_configuration_node()
        self.db_server = DatabaseServer(self.db_config)

        self.test_db = 'OpusDatabaseTestDatabase'

        self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql"
        self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache"
        self.year = 1000

        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.test_data = {
            self.year: {
                'table_a': {
                    'tablea_id': array([1, 2, 3]),
                    'tablea_id_name': array(['1', '2', '3']),
                    'value1': array([1.0, 2.001, 3], dtype='float'),
                    'value2':
                    array([True, False, False], dtype='i'
                          ),  ## sqlit is having problem handling bool type
                },
                'table_b': {
                    'tableb_id': array([1, 2, 3]),
                    'tableb_id_name': array(['one', 'two', 'three']),
                    'value3': array([1.0, 2.001, 3], dtype='float'),
                },
            },
        }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir,
                                                       self.test_data)

    def tearDown(self):
        if self.db_server.has_database(self.test_db):
            self.db_server.drop_database(self.test_db)
        self.db_server.close()
        if os.path.exists(self.temp_dir):
            rmtree(self.temp_dir)

    def test_export_all_tables(self):
        logger.log_status("Test export all tables for %s with %s" %
                          (self.protocol, self.__class__))
        optional_args = [
            '-c',
            os.path.join(self.temp_dir, str(self.year)), '-d', self.test_db,
            '--database_configuration=%s' % self.db_config_node
        ]
        self._call_script(self.export_from_cache_opus_path, args=optional_args)

        self.assertTrue(
            self.db_server.has_database(database_name=self.test_db))
        db = OpusDatabase(database_server_configuration=self.db_config,
                          database_name=self.test_db)

        table_names = self.test_data[self.year].keys()
        existing_tables = db.get_tables_in_database()
        self.assertEqual(set(existing_tables), set(table_names))

        ## export data from db to cache
        export_year = str(self.year + 100)
        exp_dir = os.path.join(self.temp_dir, export_year)

        optional_args = [
            '-d', self.test_db, '-c', self.temp_dir, '-y', export_year,
            '--database_configuration=%s' % self.db_config_node
        ]
        self._call_script(self.export_to_cache_opus_path, args=optional_args)

        exported_datasets = [os.path.split(f)[1] for f in glob(exp_dir + '/*')]
        self.assertEqual(set(exported_datasets), set(table_names))

        org_dir = os.path.join(self.temp_dir, str(self.year))
        self._two_caches_are_identical(org_dir, exp_dir)

        db.close()
        self.db_server.drop_database(self.test_db)
        rmtree(exp_dir)

    def test_export_one_table(self):
        logger.log_status("Test export single table for %s with %s" %
                          (self.protocol, self.__class__))
        for table_name in self.test_data[self.year].keys():
            self._test_export_one_table(table_name)

    def _test_export_one_table(self, table_name):
        optional_args = [
            '-c',
            os.path.join(self.temp_dir, str(self.year)), '-d', self.test_db,
            '-t', table_name,
            '--database_configuration=%s' % self.db_config_node
        ]
        self._call_script(self.export_from_cache_opus_path, args=optional_args)

        self.assertTrue(
            self.db_server.has_database(database_name=self.test_db))
        db = OpusDatabase(database_server_configuration=self.db_config,
                          database_name=self.test_db)
        existing_tables = db.get_tables_in_database()
        self.assertEqual(set(existing_tables), set([table_name]))

        export_year = str(self.year + 100)
        exp_dir = os.path.join(self.temp_dir, export_year)

        optional_args = [
            '-d', self.test_db, '-c', self.temp_dir, '-y', export_year, '-t',
            table_name,
            '--database_configuration=%s' % self.db_config_node
        ]
        self._call_script(self.export_to_cache_opus_path, args=optional_args)

        exported_datasets = [
            os.path.split(f)[1]
            for f in glob(os.path.join(self.temp_dir, export_year) + '/*')
        ]
        self.assertEqual(set(exported_datasets), set([table_name]))

        org_dir = os.path.join(self.temp_dir, str(self.year))
        self._two_caches_are_identical(org_dir,
                                       exp_dir,
                                       table_names=[table_name])

        db.close()
        self.db_server.drop_database(self.test_db)
        rmtree(exp_dir)

    def _call_script(self, opus_path, args):
        Popen(" %s %s %s" %
              (sys.executable, module_path_from_opus_path(opus_path),
               ' '.join(args)),
              shell=True).communicate()

    def _two_caches_are_identical(self, cache_a, cache_b, table_names=None):
        """ Check to see if two caches contains identical datasets 
        even though their data types can be different
        """
        if table_names is None:
            table_names = os.listdir(cache_a)
        for table_name in table_names:
            field_names_a = glob(os.path.join(cache_a, table_name) + '/*')
            field_names_b = glob(os.path.join(cache_b, table_name) + '/*')
            self.assertEqual(len(field_names_a), len(field_names_b))
            field_names_a.sort()
            field_names_b.sort()
            [
                self.assertTrue(cmp(f_a, f_b))
                for f_a, f_b in zip(field_names_a, field_names_b)
            ]
Пример #26
0
def add_runs_to_services_db_from_disk(projects = None):
        
    server_config = ServicesDatabaseConfiguration()
    
    if server_config.protocol == 'sqlite':
        

        
        datapath = paths.OPUS_DATA_PATH
        
            
        for project_name in os.listdir(datapath):
            if projects is not None and project_name not in projects: continue
            
            if not os.path.isdir(os.path.join(datapath, project_name)): continue
            os.environ['OPUSPROJECTNAME'] = project_name
            server = DatabaseServer(server_config)
            server.drop_database(database_name = 'run_activity')
            server.close()
            
            run_manager = RunManager(server_config)
            
            baseyear_directory = os.path.join(datapath, project_name, 'base_year_data')
            
            if os.path.exists(baseyear_directory):
                years = []
                if os.path.exists(baseyear_directory):
                    for dir in os.listdir(baseyear_directory):
                        if len(dir) == 4 and dir.isdigit():
                            years.append(int(dir))
                    start_year = min(years)
                    end_year = max(years)
                    run_name = 'base_year_data'
                    run_id = run_manager._get_new_run_id()
                    resources = {
                         'cache_directory': baseyear_directory,
                         'description': 'base year data',
                         'years': (start_year, end_year)
                    }
                    logger.log_status('Adding run %s of project %s to run_activity table'%(run_name, project_name))
                    run_manager.add_row_to_history(run_id = run_id, 
                                                   resources = resources, 
                                                   status = 'done', 
                                                   run_name = run_name)

            data_directory = os.path.join(datapath, project_name, 'runs')
            if not os.path.exists(data_directory): continue
            
            for run_name in os.listdir(data_directory):
                try:
                    cache_directory = os.path.join(data_directory,run_name)
                    years = []
                    if not os.path.isdir(cache_directory): continue
                    
                    for dir in os.listdir(cache_directory):
                        if len(dir) == 4 and dir.isdigit():
                            years.append(int(dir))
                    start_year = min(years)
                    end_year = max(years)
                    run_id = run_manager._get_new_run_id()
                    resources = {
                         'cache_directory': cache_directory,
                         'description': '',
                         'years': (start_year, end_year)
                    }
                    logger.log_status('Adding run %s of project %s to run_activity table'%(run_name, project_name))
                    run_manager.add_row_to_history(run_id = run_id, 
                                                   resources = resources, 
                                                   status = 'done', 
                                                   run_name = run_name)
                except: pass        
Пример #27
0
class TestConvertDatabases(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1',
            'convert_database_test_db2',
        ]

        self.test_table_names = [
            'table1',
            'table2',
            'table3',
        ]

        table_structure = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"No opus dot.","constant"),'
            '(2,"opus.core.miscellaneous","No opus dot."),'
            '(3,"opus.urbansim.gridcell.percent_water",'
            '"gc_cbd = gridcell.disaggregate('
            'opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"),'
            '(4,"","")')

        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "No opus dot.", "constant"],
            [2, "opus.core.miscellaneous", "No opus dot."],
            [
                3, "opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                "opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"
            ], [4, "", ""]
        ]

        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "No opus dot.", "constant"],
            [2, "opus.core.miscellaneous", "No opus dot."],
            [
                3, "opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                "psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"
            ], [4, "", ""]
        ]

        insert_items_template = ("insert into %(table)s values %(data)s;")

        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]

        self.config = {
            'databases': self.test_db_names,
            'tables': table_list,
            'backup': True,
            'backup_postfix': '_old',
        }

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))

        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]

        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' %
                           (table_name, table_structure))

                db.DoQuery(insert_items_template % {
                    'table': table_name,
                    'data': table_data
                })

    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)

        for db in self.dbs:
            db.close()

        self.db_server.close()

    def test_convert_table(self):
        ConvertDatabase().convert_table(self.dbs[0], self.test_table_names[0])

        db = self.dbs[0]

        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(
            results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." %
            (table0, self.expected_output_changed, results))

        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (table, self.expected_output_unchanged, results))

    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]

        ConvertDatabase().convert_table(db,
                                        table,
                                        backup=True,
                                        backup_postfix='_old')

        results = db.GetResultsFromQuery('select * from %s_old;' % table)
        self.assert_(
            results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." %
            (table, self.expected_output_unchanged, results))

    def test_convert_database(self):
        ConvertDatabase().convert_database(self.dbs[0],
                                           self.test_table_names[0:2])

        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." %
                (table, self.expected_output_changed, results))

        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for i in range(len(self.dbs[1:])):
            db = self.dbs[i + 1]

            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (i, table, self.expected_output_unchanged, results))

    def test_convert_databases(self):
        ConvertDatabase().convert_databases(
            TestDatabaseConfiguration(protocol='mysql'), self.config)

        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)

            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                    " Expected %s. Recieved %s." %
                    (db_name, table, self.expected_output_changed, results))
Пример #28
0
class RunManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_setup_run(self):
        base_directory = tempfile.mkdtemp(prefix='opus_tmp')
        run_name = 'test_scenario_name'
        run_manager = RunManager(self.config)

        run_manager.setup_new_run(cache_directory = os.path.join(base_directory, run_name),
                                  configuration = {})
        resulting_cache_directory = run_manager.get_current_cache_directory()
        self.assertTrue(resulting_cache_directory.find(run_name)>-1)
        self.assertEquals(os.path.dirname(resulting_cache_directory), base_directory)
        self.assertTrue(run_manager.ready_to_run)
        self.assertTrue(not os.path.exists(resulting_cache_directory))
        run_manager.services_db.close()
        os.rmdir(base_directory)

    def test_add_row_to_history(self):
        run_manager = RunManager(self.config)
        cache_directory = tempfile.mkdtemp(prefix='opus_tmp')
        resources = {'cache_directory':cache_directory,
                     'description':'test_run',
                     'base_year':2000,
                     'project_name': 'test'}

        run_manager.add_row_to_history(run_id = 1,
                                       resources = resources,
                                       status = 'done')

        db = self.db_server.get_database(self.database_name)
        run_activity_table = db.get_table('run_activity')

        s = select([run_activity_table.c.run_description,
                    run_activity_table.c.status],
                    whereclause = run_activity_table.c.run_id == 1)

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        run_name, status = results[0]
        self.assertEqual(status, 'done')
        self.assertEqual(run_name, 'test_run')

        run_manager.services_db.close()
        os.rmdir(cache_directory)

    def test_get_runs(self):
        from numpy.random import randint
        run_manager = RunManager(self.config)
        run_ids = range(1, 11)
        run_names = ['run ' + str(id) for id in run_ids]
        resources = {'cache_directory':None}
        status = ['done', 'failed'] * 5
        for idx, run_id in enumerate(run_ids):
            run_manager.add_row_to_history(run_id = run_id,
                                           resources = resources,
                                           status = status[idx],
                                           run_name = run_names[idx])
        results = run_manager.get_runs(return_columns = ['run_name'], run_id=5)
        expected = [('run 5',)]
        self.assertEqual(results, expected)
        results = run_manager.get_runs(return_columns = ['run_id'], status='done')
        expected = [(1,),(3,), (5,), (7,), (9,)]
        self.assertEqual(results, expected)
        
        results = run_manager.get_runs(return_columns = ['run_name'], return_rs=True, run_id=5)
        expected = 'run 5'
        self.assertEqual(results.fetchone()['run_name'], expected)
        results = run_manager.get_runs(return_columns = ['run_id'], return_rs=True, status='done')
        results = [rs['run_id'] for rs in results]
        expected = [1,3,5,7,9]
        self.assertEqual(results, expected)
class AbstractFunctionalTest(object):
    protocol = ''
    def setUp(self):
        self.db_config = TestDatabaseConfiguration(protocol = self.protocol)
        self.db_config_node = self.db_config._database_configuration_node()
        self.db_server = DatabaseServer(self.db_config)

        self.test_db = 'OpusDatabaseTestDatabase'
        
        self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql"
        self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache"
        self.year = 1000
        
        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.test_data = {
            self.year:{
                'table_a':{
                    'tablea_id':array([1,2,3]),
                    'tablea_id_name': array(['1','2','3']),
                    'value1': array([1.0, 2.001, 3], dtype='float'),
                    'value2': array([True, False, False], dtype='i'),  ## sqlit is having problem handling bool type
                    },
                'table_b':{
                    'tableb_id':array([1,2,3]),
                    'tableb_id_name': array(['one','two','three']),
                    'value3': array([1.0, 2.001, 3], dtype='float'),
                    },
                },
            }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)
                
    def tearDown(self):
        if self.db_server.has_database(self.test_db):
            self.db_server.drop_database(self.test_db)
        self.db_server.close()
        if os.path.exists(self.temp_dir):
            rmtree(self.temp_dir)

    def test_export_all_tables(self):
        logger.log_status("Test export all tables for %s with %s" % (self.protocol, self.__class__))
        optional_args = ['-c', os.path.join(self.temp_dir, str(self.year)), 
                         '-d', self.test_db, 
                         '--database_configuration=%s' % self.db_config_node ]
        self._call_script(self.export_from_cache_opus_path,
                          args = optional_args)

        
        self.assertTrue(self.db_server.has_database(database_name = self.test_db))
        db = OpusDatabase(database_server_configuration = self.db_config, 
                          database_name = self.test_db)
        
        table_names = self.test_data[self.year].keys()
        existing_tables = db.get_tables_in_database()        
        self.assertEqual( set(existing_tables), set(table_names) )

        ## export data from db to cache
        export_year = str(self.year + 100)        
        exp_dir = os.path.join(self.temp_dir, export_year)

        optional_args = ['-d', self.test_db, 
                         '-c', self.temp_dir, 
                         '-y', export_year,
                         '--database_configuration=%s' % self.db_config_node ]
        self._call_script(self.export_to_cache_opus_path,
                          args = optional_args)

        exported_datasets = [os.path.split(f)[1] for f in glob(exp_dir + '/*')]
        self.assertEqual( set(exported_datasets), set(table_names))
        
        org_dir = os.path.join(self.temp_dir, str(self.year))
        self._two_caches_are_identical(org_dir, exp_dir)

        db.close()
        self.db_server.drop_database(self.test_db)
        rmtree(exp_dir)
        
    def test_export_one_table(self):
        logger.log_status("Test export single table for %s with %s" % (self.protocol, self.__class__))
        for table_name in self.test_data[self.year].keys():
            self._test_export_one_table(table_name)
            
    def _test_export_one_table(self, table_name):
        optional_args = ['-c', os.path.join(self.temp_dir, str(self.year)), 
                         '-d', self.test_db,
                         '-t', table_name,
                         '--database_configuration=%s' % self.db_config_node ]
        self._call_script(self.export_from_cache_opus_path,
                          args = optional_args)
        
        self.assertTrue(self.db_server.has_database(database_name = self.test_db))
        db = OpusDatabase(database_server_configuration = self.db_config, 
                          database_name = self.test_db)
        existing_tables = db.get_tables_in_database()
        self.assertEqual( set(existing_tables), set([table_name]) )
        
        export_year = str(self.year + 100)        
        exp_dir = os.path.join(self.temp_dir, export_year)
        
        optional_args = ['-d', self.test_db, 
                         '-c', self.temp_dir, 
                         '-y', export_year,
                         '-t', table_name,
                         '--database_configuration=%s' % self.db_config_node ]
        self._call_script(self.export_to_cache_opus_path,
                          args = optional_args)

        exported_datasets = [os.path.split(f)[1] for f in glob(os.path.join(self.temp_dir, export_year) + '/*')]
        self.assertEqual( set(exported_datasets), set([table_name]))
        
        org_dir = os.path.join(self.temp_dir, str(self.year))
        self._two_caches_are_identical(org_dir, exp_dir, table_names=[table_name])

        db.close()
        self.db_server.drop_database(self.test_db)
        rmtree(exp_dir)

    def _call_script(self, opus_path, args):
            Popen( " %s %s %s" % (sys.executable, module_path_from_opus_path(opus_path), ' '.join(args)),
                   shell = True
                 ).communicate()        

    def _two_caches_are_identical(self, cache_a, cache_b, table_names=None):
        """ Check to see if two caches contains identical datasets 
        even though their data types can be different
        """
        if table_names is None:
            table_names = os.listdir(cache_a)
        for table_name in table_names:
            field_names_a = glob(os.path.join(cache_a, table_name) + '/*')
            field_names_b = glob(os.path.join(cache_b, table_name) + '/*')
            self.assertEqual(len(field_names_a), len(field_names_b))
            field_names_a.sort(); field_names_b.sort()
            [self.assertTrue(cmp(f_a, f_b)) for f_a, f_b in zip(field_names_a, field_names_b)]        
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):        
        self.databases = ['db_chain_son', 'db_chain_dad', 'db_chain_granddad']
        
        self.config = TestDatabaseConfiguration(protocol = get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database('db_chain_granddad')
        self.db_chain_dad = self.server.get_database('db_chain_dad')
        self.db_chain_son = self.server.get_database('db_chain_son')
        
        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table('base_schema')
        self.dad_schema = self.db_chain_dad.get_table('base_schema')
        self.granddad_schema2 = self.db_chain_granddad.get_table('base_schema2')
        self.son_schema2 = self.db_chain_son.get_table('base_schema2')
        
        self._seed_values()
        
    def _create_databases(self):
        
        for db in self.databases:
            try: 
                self.server.drop_database(db)
            except:
                pass
            
            self.server.create_database(db)
        
    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_granddad.create_table_from_schema('base_schema2', base_schema2)
        self.db_chain_granddad.create_table_from_schema('scenario_information', base_scenario_information_schema)
        
        self.db_chain_dad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_dad.create_table_from_schema('scenario_information', base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema('base_schema2', base_schema2)
        self.db_chain_son.create_table_from_schema('scenario_information', base_scenario_information_schema)
        
    def _seed_values(self):
        u = self.db_chain_granddad.get_table('scenario_information').insert(
              values = {
                self.db_chain_granddad.get_table('scenario_information').c.parent_database_url:''})   
        self.db_chain_granddad.execute(u)
        
        u = self.db_chain_dad.get_table('scenario_information').insert(
              values = {
                self.db_chain_dad.get_table('scenario_information').c.parent_database_url:'db_chain_granddad'})

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table('scenario_information').insert(
              values = {
                self.db_chain_son.get_table('scenario_information').c.parent_database_url:'db_chain_dad'})   
        self.db_chain_son.execute(u)
        
        granddad_vals = [
            {'integer_col': 0, 'clob_col': '0', 'smallinteger_col': 0, 'float_col': 0.0},                      
            {'integer_col': 2, 'clob_col': '2', 'smallinteger_col': 2, 'float_col': 2.0},                          
            {'integer_col': 4, 'clob_col': '4', 'smallinteger_col': 4, 'float_col': 4.0}                                         
        ]

        dad_vals = [
            {'integer_col': 0, 'clob_col': '0', 'smallinteger_col': 0, 'float_col': 0.0},  
            {'integer_col': 1, 'clob_col': '1', 'smallinteger_col': 1, 'float_col': 1.0},                   
            {'integer_col': 2, 'clob_col': '2', 'smallinteger_col': 2, 'float_col': 2.0},  
            {'integer_col': 3, 'clob_col': '3', 'smallinteger_col': 3, 'float_col': 3.0},                        
            {'integer_col': 4, 'clob_col': '4', 'smallinteger_col': 4, 'float_col': 4.0}                                         
        ]    
        
        granddad_vals2 = [
            {'integer_col': 0, 'varchar_col': '0', 'boolean_col': True, 'numeric_col': 0.0},                      
            {'integer_col': 2, 'varchar_col': '2', 'boolean_col': True, 'numeric_col': 2.0},                          
            {'integer_col': 4, 'varchar_col': '4', 'boolean_col': True, 'numeric_col': 4.0}                                         
        ]

        son_vals2 = [
            {'integer_col': 0, 'varchar_col': '0', 'boolean_col': False, 'numeric_col': 0.0},                      
            {'integer_col': 4, 'varchar_col': '4', 'boolean_col': False, 'numeric_col': 4.0}                                         
        ]
        
        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(), granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(), granddad_vals2)        
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)
                     
    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()
        
        self.server.drop_database('db_chain_granddad')
        self.server.drop_database('db_chain_dad')
        self.server.drop_database('db_chain_son')
        
        self.server.close()
        
Пример #31
0
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):
        self.databases = ['db_chain_son', 'db_chain_dad', 'db_chain_granddad']

        self.config = TestDatabaseConfiguration(
            protocol=get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database('db_chain_granddad')
        self.db_chain_dad = self.server.get_database('db_chain_dad')
        self.db_chain_son = self.server.get_database('db_chain_son')

        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table('base_schema')
        self.dad_schema = self.db_chain_dad.get_table('base_schema')
        self.granddad_schema2 = self.db_chain_granddad.get_table(
            'base_schema2')
        self.son_schema2 = self.db_chain_son.get_table('base_schema2')

        self._seed_values()

    def _create_databases(self):

        for db in self.databases:
            try:
                self.server.drop_database(db)
            except:
                pass

            self.server.create_database(db)

    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema(
            'base_schema', base_schema)
        self.db_chain_granddad.create_table_from_schema(
            'base_schema2', base_schema2)
        self.db_chain_granddad.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

        self.db_chain_dad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_dad.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema('base_schema2',
                                                   base_schema2)
        self.db_chain_son.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

    def _seed_values(self):
        u = self.db_chain_granddad.get_table('scenario_information').insert(
            values={
                self.db_chain_granddad.get_table('scenario_information').c.parent_database_url:
                ''
            })
        self.db_chain_granddad.execute(u)

        u = self.db_chain_dad.get_table('scenario_information').insert(
            values={
                self.db_chain_dad.get_table('scenario_information').c.parent_database_url:
                'db_chain_granddad'
            })

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table('scenario_information').insert(
            values={
                self.db_chain_son.get_table('scenario_information').c.parent_database_url:
                'db_chain_dad'
            })
        self.db_chain_son.execute(u)

        granddad_vals = [{
            'integer_col': 0,
            'clob_col': '0',
            'smallinteger_col': 0,
            'float_col': 0.0
        }, {
            'integer_col': 2,
            'clob_col': '2',
            'smallinteger_col': 2,
            'float_col': 2.0
        }, {
            'integer_col': 4,
            'clob_col': '4',
            'smallinteger_col': 4,
            'float_col': 4.0
        }]

        dad_vals = [{
            'integer_col': 0,
            'clob_col': '0',
            'smallinteger_col': 0,
            'float_col': 0.0
        }, {
            'integer_col': 1,
            'clob_col': '1',
            'smallinteger_col': 1,
            'float_col': 1.0
        }, {
            'integer_col': 2,
            'clob_col': '2',
            'smallinteger_col': 2,
            'float_col': 2.0
        }, {
            'integer_col': 3,
            'clob_col': '3',
            'smallinteger_col': 3,
            'float_col': 3.0
        }, {
            'integer_col': 4,
            'clob_col': '4',
            'smallinteger_col': 4,
            'float_col': 4.0
        }]

        granddad_vals2 = [{
            'integer_col': 0,
            'varchar_col': '0',
            'boolean_col': True,
            'numeric_col': 0.0
        }, {
            'integer_col': 2,
            'varchar_col': '2',
            'boolean_col': True,
            'numeric_col': 2.0
        }, {
            'integer_col': 4,
            'varchar_col': '4',
            'boolean_col': True,
            'numeric_col': 4.0
        }]

        son_vals2 = [{
            'integer_col': 0,
            'varchar_col': '0',
            'boolean_col': False,
            'numeric_col': 0.0
        }, {
            'integer_col': 4,
            'varchar_col': '4',
            'boolean_col': False,
            'numeric_col': 4.0
        }]

        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(),
                                              granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(),
                                              granddad_vals2)
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)

    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()

        self.server.drop_database('db_chain_granddad')
        self.server.drop_database('db_chain_dad')
        self.server.drop_database('db_chain_son')

        self.server.close()
Пример #32
0
class TestRenameBySwappingEmploymentAndCommercialOrIndustrialForElcm(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_rename_commercial_and_industrial'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
        
        self.tables_to_convert = (
            'employment_commercial_location_choice_model_specification',
            'employment_commercial_location_choice_model_coefficients',
            'employment_industrial_location_choice_model_specification',
            'employment_industrial_location_choice_model_coefficients',
            )
            
        self.other_tables = (
            'i_am_not_to_be_renamed_location_choice_model_specifications',
            'i_am_not_to_be_renamed_location_choice_model_coefficients',
            'employment_industrial_location_choice_model_coefficients_old',
            )
        
        for table in self.tables_to_convert + self.other_tables:
            self.db.DoQuery('CREATE TABLE %s (id INT);' % table)
            
        self.output_tables = (
            'commercial_employment_location_choice_model_specification',
            'commercial_employment_location_choice_model_coefficients',
            'industrial_employment_location_choice_model_specification',
            'industrial_employment_location_choice_model_coefficients',
            )
        
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        for table in self.tables_to_convert + self.other_tables:
            if not self.db.table_exists(table):
                self.fail('Expected test table %s did not exist. (Check setUp)' 
                    % table)
                    
        for table in self.output_tables:
            if self.db.table_exists(table):
                self.fail('Unexpected results table %s exists. (Check setUp)' 
                    % table)
        
        
    def test_rename_tables(self):
        r = RenameBySwappingEmploymentAndCommercialOrIndustrialOrHomeBasedForElcm()
        r.rename_by_swapping_employment_and_commercial_or_industrial_or_home_based_for_elcm(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        for table in self.output_tables + self.other_tables:
            if not self.db.table_exists(table):
                self.fail('Expected output table %s does not exist.' % table)
                    
        for table in self.tables_to_convert:
            if self.db.table_exists(table):
                self.fail('Input table %s still exists.' % table)
Пример #33
0
 def tearDown(self):
     # Turn off the logger, so we can delete the cache directory.
     logger.disable_all_file_logging()
     db_server = DatabaseServer(self.config)
     db_server.drop_database('eugene_services_test')
     db_server.close()
Пример #34
0
class TestCombineTables(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_name = 'test_combine_tables'
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
        
        self.from_tables = (
            ('table_a', 'A'), 
            ('table_b', 'B'),
            ('table_c', 'C'),
            ('table_d', 'D'),
            )
        table_ids = {
            'table_a': [1], 
            'table_b': [2,3],
            'table_c': [4,5,6,7],
            'table_d': [8,9,10,11,12,13,14,15],
            }
        self.to_table = 'to_table'
        
        for table, type in self.from_tables:
            self.db.DoQuery('CREATE TABLE %s (job_id INT, sector_id INT, '
                'grid_id INT, sic INT, building_type varchar(5), '
                'home_based tinyint(4), impute_flag tinyint(4));' 
                    % table)

            values = ','.join(
                ['(%(j)s, %(j)s, %(j)s, %(j)s, "%(type)s", %(j)s, %(j)s)' 
                    % {'j':j, 'type':type} for j in table_ids[table]]
                )
            self.db.DoQuery('INSERT INTO %(table_name)s (job_id, sector_id, '
                'grid_id, sic, building_type, home_based, impute_flag) VALUES '
                '%(values)s;' 
                    % {'table_name':table,
                       'values':values,
                      }
                )
            number_rows = self.db.GetResultsFromQuery('SELECT count(*) FROM %s' % table)[1][0]
            self.assertEqual(number_rows, len(table_ids[table]))
                        
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        for table, type in self.from_tables:
            try:
                self.db.DoQuery('select * from %s;' % table)
            except:
                self.fail('Expected input table %s did not exist. (Check setUp)' 
                    % self.from_table)
                    
        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
            self.fail('Output table %s already exists. (Check setUp)' 
                % self.to_table)
        except: pass
        
    
    def test_create_table(self):
        CombineTables().combine_tables(TestDatabaseConfiguration(protocol='mysql'), self.db_name, 
            [i[0] for i in self.from_tables], 
            self.to_table)
        
        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
        except: 
            self.fail('Output table %s not created.' % self.to_table)
        
    
    def test_combine_tables(self):
        CombineTables().combine_tables(TestDatabaseConfiguration(protocol='mysql'), self.db_name, 
            [i[0] for i in self.from_tables], 
            self.to_table)
        
        expected_rows = 0
        for table, type in self.from_tables:
            count = self.db.GetResultsFromQuery('select count(*) from %s' 
                % table)[1:][0][0]
            expected_rows += count
        
        try:
            count = self.db.GetResultsFromQuery('select count(*) from %s;' 
                % self.to_table)[1:][0][0]
        except:
            self.fail('Expected output table %s does not exist.' % self.to_table)

        if (expected_rows != count):
            for table_name, type in self.from_tables:
                self._print_table(table_name)
            self._print_table(self.to_table)
            
        self.assert_(expected_rows == count,
            'Incorrect number of rows in output. Expected %s; received %s.'
                % (expected_rows, count))

    def _print_table(self, table_name):
        """Provide debugging info to figure out why the above test is failing, sometimes."""
        try:
            results = self.db.GetResultsFromQuery('select * from %s' % table_name)
            logger.start_block('Contents of table %s' % table_name)
            try:
                for row in results:
                    logger.log_status(row)
            finally:
                logger.end_block()
        except:
            logger.log_status('Error accessing table %s' % table_name)
            logger.log_stack_trace()