Пример #1
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(
            new_instance=True, base_cache_dir=cache_directory, start_time=self.config.get("base_year", 0)
        )

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config["cache_directory"] is None:
            self.config["cache_directory"] = self.simulation_state.get_cache_directory()

        SessionConfiguration(
            new_instance=True,
            package_order=self.config["dataset_pool_configuration"].package_order,
            in_storage=AttributeCache(),
        )

        ForkProcess().fork_new_process(
            self.config["creating_baseyear_cache_configuration"].cache_scenario_database, self.config
        )

        # Create output database (normally done by run manager)
        if "estimation_database_configuration" in self.config:
            db_server = DatabaseServer(self.config["estimation_database_configuration"])
            if not db_server.has_database(self.config["estimation_database_configuration"].database_name):
                db_server.create_database(self.config["estimation_database_configuration"].database_name)
Пример #3
0
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        ForkProcess().fork_new_process(
            self.config['creating_baseyear_cache_configuration'].
            cache_scenario_database, self.config)

        # Create output database (normally done by run manager)
        if 'estimation_database_configuration' in self.config:
            db_server = DatabaseServer(
                self.config['estimation_database_configuration'])
            if not db_server.has_database(
                    self.config['estimation_database_configuration'].
                    database_name):
                db_server.create_database(
                    self.config['estimation_database_configuration'].
                    database_name)
Пример #4
0
    def setUp(self):
        db_configs = []
        for engine in _get_installed_database_engines():
            config = TestDatabaseConfiguration(protocol=engine)
            db_configs.append(config)

        self.test_db = "OpusDatabaseTestDatabase"
        test_table = "test_table"

        self.dbs = []
        for config in db_configs:
            try:
                server = DatabaseServer(config)
                if server.has_database(self.test_db):
                    server.drop_database(self.test_db)
                server.create_database(self.test_db)
                self.assertTrue(server.has_database(database_name=self.test_db))
                db = OpusDatabase(database_server_configuration=config, database_name=self.test_db)
                self.assertFalse(db.table_exists(test_table))
                self.dbs.append((db, server))
            except:
                import traceback

                traceback.print_exc()

                logger.log_warning("Could not start server for protocol %s" % config.protocol)
Пример #5
0
    def setUp(self):
        db_configs = []
        for engine in _get_installed_database_engines():
            config = TestDatabaseConfiguration(protocol=engine)
            db_configs.append(config)

        self.test_db = 'OpusDatabaseTestDatabase'
        test_table = 'test_table'

        self.dbs = []
        for config in db_configs:
            try:
                server = DatabaseServer(config)
                if server.has_database(self.test_db):
                    server.drop_database(self.test_db)
                server.create_database(self.test_db)
                self.assertTrue(
                    server.has_database(database_name=self.test_db))
                db = OpusDatabase(database_server_configuration=config,
                                  database_name=self.test_db)
                self.assertFalse(db.table_exists(test_table))
                self.dbs.append((db, server))
            except:
                import traceback
                traceback.print_exc()

                logger.log_warning('Could not start server for protocol %s' %
                                   config.protocol)
Пример #6
0
        def setUp(self):

            db_configs = []
            for engine in get_testable_engines():
                config = TestDatabaseConfiguration(protocol = engine)
                db_configs.append(config)
            
            self.database_name = 'test_database'
            self.dbs = []
            for config in db_configs:
                try:
                    server = DatabaseServer(config)
                    if server.has_database(self.database_name):
                        server.drop_database(self.database_name)
                    server.create_database(self.database_name)
                    self.assertTrue(server.has_database(database_name = self.database_name))
                    db = OpusDatabase(database_server_configuration = config, 
                                       database_name = self.database_name)
                    storage = sql_storage(
                                    storage_location = db
                                    )
                    self.dbs.append((db,server,storage))
                    self.storage = storage
                except:
                    import traceback
                    traceback.print_exc()
                    
                    print 'WARNING: could not start server for protocol %s'%config.protocol
Пример #7
0
        def setUp(self):

            db_configs = []
            for engine in get_testable_engines():
                config = TestDatabaseConfiguration(protocol=engine)
                db_configs.append(config)

            self.database_name = 'test_database'
            self.dbs = []
            for config in db_configs:
                try:
                    server = DatabaseServer(config)
                    if server.has_database(self.database_name):
                        server.drop_database(self.database_name)
                    server.create_database(self.database_name)
                    self.assertTrue(
                        server.has_database(database_name=self.database_name))
                    db = OpusDatabase(database_server_configuration=config,
                                      database_name=self.database_name)
                    storage = sql_storage(storage_location=db)
                    self.dbs.append((db, server, storage))
                    self.storage = storage
                except:
                    import traceback
                    traceback.print_exc()

                    print 'WARNING: could not start server for protocol %s' % config.protocol
Пример #8
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)
    
    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()
        
    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))               
Пример #9
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #10
0
    def create_storage(self):

        try:
            server = DatabaseServer(self.server_config)
        except:
            logger.log_error(
                'Cannot connect to the database server that the services database is hosted on %s.'
                % self.server_config.database_name)
            raise

        if not server.has_database(self.server_config.database_name):
            server.create_database(self.server_config.database_name)

        try:
            services_db = server.get_database(self.server_config.database_name)
        except:
            logger.log_error('Cannot connect to a services database on %s.' %
                             server.get_connection_string(scrub=True))
            raise

        metadata.bind = services_db.engine
        setup_all()
        create_all()

        return services_db
Пример #11
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #12
0
    def run_run(self, run_resources, run_name = None, run_as_multiprocess=True, run_in_background=False):
        """check run hasn't already been marked running
           log it in to run_activity
           run simulation
           mark run as done/failed
           """

        if not self.ready_to_run:
            raise 'RunManager.setup_new_run must be execute before RunManager.run_run'

        if run_resources['cache_directory'] != self.current_cache_directory:
            raise 'The configuration and the RunManager conflict on the proper cache_directory'

        self.add_row_to_history(self.run_id, run_resources, "started", run_name = run_name)

        try:
            # Test pre-conditions
            model_system_class_path = run_resources.get('model_system', None)
            if model_system_class_path is None:
                raise TypeError, ("The configuration must specify model_system, the"
                    " full Opus path to the model system to be used.")

            # Create baseyear cache
            self.create_baseyear_cache(run_resources)

            # Create brand-new output database (deletes any prior contents)
            if 'estimation_database_configuration' in run_resources:
                db_server = DatabaseServer(run_resources['estimation_database_configuration'])
                if not db_server.has_database(run_resources['estimation_database_configuration'].database_name):
                    db_server.create_database(run_resources['estimation_database_configuration'].database_name)


            # Run simulation
            exec('from %s import ModelSystem' % model_system_class_path)

            model_system = ModelSystem()
            self.model_system = model_system

            if 'base_year' not in run_resources:
                run_resources['base_year'] = run_resources['years'][0] - 1

            self._create_seed_dictionary(run_resources)
#            model_system.run_in_same_process(run_resources)
            if run_as_multiprocess:
                model_system.run_multiprocess(run_resources)
            else:
                model_system.run_in_one_process(run_resources, run_in_background=run_in_background, class_path=model_system_class_path)

            self.model_system = None

        except:
            self.add_row_to_history(self.run_id, run_resources, "failed", run_name = run_name)
            self.ready_to_run = False
            raise # This re-raises the last exception
        else:
            self.add_row_to_history(self.run_id, run_resources, "done", run_name = run_name)

        self.ready_to_run = False
        return self.run_id
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql',))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail('Output table job_building_tpes already exists. (Check setUp)')
        except: pass
        
        
    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
    
            
    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        expected_results = [
            ['id', 'name', 'home_based'],
            [1, "commercial", 0],
            [3, "industrial", 0],
            [2, "governmental", 0],
            [4, "home_based", 1]
            ]
        
        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
        
        self.assert_(expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
Пример #14
0
    def prepare_for_run(self, database_configuration, database_name):
        ## sql protocol, hostname, username and password are set in
        ## $OPUS_HOME/settings/database_server_setting.xml
        db_config = DatabaseConfiguration(database_name=database_name, database_configuration=database_configuration)
        db_server = DatabaseServer(db_config)
        if not db_server.has_database(database_name):
            db_server.create_database(database_name)
        db = db_server.get_database(database_name)
        self.out_storage = sql_storage(storage_location=db)

        return self.out_storage
Пример #15
0
    def save_results(self, out_storage=None, model_name=None):
        if self.specification is None or self.coefficients is None:
            raise ValueError, "model specification or coefficient is None"

        #invalid = self.coefficients.is_invalid()
        if False:
            logger.log_warning('Invalid coefficients. Not saving results!')
            return

        if model_name is None:
            model_name = self.config.get('model_name_for_coefficients', None)
            
        if model_name is None:
            if self.model_name is not None:
                model_name = self.model_name
            else:
                raise ValueError, "model_name unspecified"

        out_storage_available = True
        if out_storage:
            pass
        elif 'estimation_database_configuration' in self.config:
            try:
                db_server = DatabaseServer(self.config['estimation_database_configuration'])
                database_name = self.config["estimation_database_configuration"].database_name
    
                if not db_server.has_database(database_name):
                    db_server.create_database(database_name)
    
                output_db = db_server.get_database(database_name)
                out_storage = StorageFactory().get_storage(
                    type='sql_storage',
                    storage_location=output_db)
            except:
                logger.log_warning("Problem with connecting database given by 'estimation_database_configuration'.")
                out_storage_available = False
        else:
            logger.log_warning("No estimation_database_configuration given.")
            out_storage_available = False

        # the original model name of development_project_lcm is too long as a mysql db table name, truncate it
        if model_name.rfind("_development_project_location_choice_model") >=0:
            model_name = model_name.replace('_project', '')
        specification_table = '%s_specification' % model_name
        coefficients_table = '%s_coefficients' % model_name
        if out_storage_available:
            logger.start_block("Writing specification and coefficients into storage given by 'estimation_database_configuration'")
            self.specification.write(out_storage=out_storage, out_table_name=specification_table)
            self.coefficients.write(out_storage=out_storage, out_table_name=coefficients_table)
            logger.end_block()
        logger.start_block("Writing specification and coefficients into %s" % AttributeCache().get_storage_location())
        self.specification.write(out_storage=AttributeCache(), out_table_name=specification_table)
        self.coefficients.write(out_storage=AttributeCache(), out_table_name=coefficients_table)        
        logger.end_block()
Пример #16
0
    def save_results(self, out_storage=None, model_name=None):
        if self.specification is None or self.coefficients is None:
            raise ValueError, "model specification or coefficient is None"

        #invalid = self.coefficients.is_invalid()
        if False:
            logger.log_warning('Invalid coefficients. Not saving results!')
            return

        if model_name is None:
            model_name = self.config.get('model_name_for_coefficients', None)
            
        if model_name is None:
            if self.model_name is not None:
                model_name = self.model_name
            else:
                raise ValueError, "model_name unspecified"

        out_storage_available = True
        if out_storage:
            pass
        elif 'estimation_database_configuration' in self.config:
            try:
                db_server = DatabaseServer(self.config['estimation_database_configuration'])
                database_name = self.config["estimation_database_configuration"].database_name
    
                if not db_server.has_database(database_name):
                    db_server.create_database(database_name)
    
                output_db = db_server.get_database(database_name)
                out_storage = StorageFactory().get_storage(
                    type='sql_storage',
                    storage_location=output_db)
            except:
                logger.log_warning("Problem with connecting database given by 'estimation_database_configuration'.")
                out_storage_available = False
        else:
            logger.log_warning("No estimation_database_configuration given.")
            out_storage_available = False

        # the original model name of development_project_lcm is too long as a mysql db table name, truncate it
        if model_name.rfind("_development_project_location_choice_model") >=0:
            model_name = model_name.replace('_project', '')
        specification_table = '%s_specification' % model_name
        coefficients_table = '%s_coefficients' % model_name
        if out_storage_available:
            logger.start_block("Writing specification and coefficients into storage given by 'estimation_database_configuration'")
            self.specification.write(out_storage=out_storage, out_table_name=specification_table)
            self.coefficients.write(out_storage=out_storage, out_table_name=coefficients_table)
            logger.end_block()
        logger.start_block("Writing specification and coefficients into %s" % AttributeCache().get_storage_location())
        self.specification.write(out_storage=AttributeCache(), out_table_name=specification_table)
        self.coefficients.write(out_storage=AttributeCache(), out_table_name=coefficients_table)
        logger.end_block()
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql', ))

        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()

    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail(
                'Output table job_building_tpes already exists. (Check setUp)')
        except:
            pass

    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        expected_results = [['id', 'name', 'home_based'], [1, "commercial", 0],
                            [3, "industrial", 0], [2, "governmental", 0],
                            [4, "home_based", 1]]

        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

        self.assert_(
            expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
Пример #18
0
    def __init__(self,
                 indicator_directory,
                 name = None,
                 output_type = None,
                 storage_location = None,
                 output_style = ALL,
                 fixed_field_format = None  # Only used with the 'fixed_field' output type
                ):

        if output_type == 'sql' and not isinstance(storage_location, DatabaseConfiguration):
            raise Exception("If Table output_type is 'sql', a Database object must be passed as storage_location.")
        elif output_type in ['dbf', 'csv', 'tab', 'esri', 'fixed_field'] and \
               storage_location is not None and \
               not isinstance(storage_location,str):
            raise Exception("If Table output_type is %s, storage_location must be a path to the output directory"%output_type)
        elif output_type not in ['dbf', 'csv', 'tab', 'sql', 'esri', 'fixed_field']:
            raise Exception("Table output_type must be either dbf, csv, tab, sql, esri, fixed_field, not %s"%output_type)

        if output_type == "fixed_field" and not fixed_field_format:
            raise ValueError("If Table output_type is 'fixed_field', an XML format string must be passed as fixed_field_format.")
        
        self.fixed_field_format = fixed_field_format

        if output_style not in [Table.ALL,
                                Table.PER_YEAR,
                                Table.PER_ATTRIBUTE]:
            raise Exception(('%s output_style is not appropriate.'%output_style,
                   'Choose from Table.ALL, Table.PER_YEAR, ',
                   'and Table.PER_ATTRIBUTE'))

        self.output_type = output_type
        self.output_style = output_style

        if storage_location is None:
            storage_location = indicator_directory
        elif output_type == 'sql':
            server = DatabaseServer(database_server_configuration = storage_location)
            if not server.has_database(database_name = storage_location.database_name):
                server.create_database(database_name = storage_location.database_name)
            storage_location = server.get_database(
                                   database_name = storage_location.database_name)
        self.storage_location = storage_location

        self.output_storage = StorageFactory().get_storage(
            type = '%s_storage'%(self.output_type),
            storage_location = storage_location
        )

        self.name = name
        self.indicator_directory = indicator_directory
    def _create_db_from_chain_via_python(self, from_database_configuration,
                                         to_database_configuration,
                                         tables_to_copy):

        db_server_from = DatabaseServer(from_database_configuration)
        db_server_to = DatabaseServer(to_database_configuration)
        db_server_to.drop_database(to_database_configuration.database_name)
        db_server_to.create_database(to_database_configuration.database_name)

        database_out = db_server_to.get_database(
            to_database_configuration.database_name)

        scenario_db_manager = ScenarioDatabaseManager(
            server_configuration=from_database_configuration,
            base_scenario_database_name=from_database_configuration.
            database_name)
        table_mapping = scenario_db_manager.get_database_to_table_mapping()

        cross_db_operations = CrossDatabaseOperations()

        #by default, copy all tables
        if tables_to_copy == []:
            tables_to_copy = sum(table_mapping.values(),
                                 [])  # flat a list of lists
        elif 'scenario_information' not in tables_to_copy:
            tables_to_copy.append('scenario_information')

        for database_name, tables in table_mapping.items():
            database_in = db_server_from.get_database(database_name)
            for table in tables:
                if table not in tables_to_copy:
                    continue

                logger.start_block("Copying table '%s' from database '%s'" %
                                   (table, database_name))

                try:
                    cross_db_operations.copy_table(table_to_copy=table,
                                                   database_in=database_in,
                                                   database_out=database_out,
                                                   use_chunking=True)
                finally:
                    logger.end_block()
            database_in.close()

        self._fix_scenario_information_table(database_out)
        database_out.close()
        db_server_from.close()
        db_server_to.close()
 def _create_db_from_chain_via_python(self, 
                                      from_database_configuration, 
                                      to_database_configuration,
                                      tables_to_copy):
             
     db_server_from = DatabaseServer(from_database_configuration)
     db_server_to = DatabaseServer(to_database_configuration)
     db_server_to.drop_database(to_database_configuration.database_name)
     db_server_to.create_database(to_database_configuration.database_name)
     
     database_out = db_server_to.get_database(to_database_configuration.database_name)
     
     scenario_db_manager = ScenarioDatabaseManager(
         server_configuration = from_database_configuration, 
         base_scenario_database_name = from_database_configuration.database_name)
     table_mapping = scenario_db_manager.get_database_to_table_mapping()
 
     cross_db_operations = CrossDatabaseOperations()
     
     #by default, copy all tables
     if tables_to_copy == []:
         tables_to_copy = sum(table_mapping.values(), [])  # flat a list of lists
     elif 'scenario_information' not in tables_to_copy:
         tables_to_copy.append('scenario_information')
         
     for database_name, tables in table_mapping.items():
         database_in = db_server_from.get_database(database_name)
         for table in tables:
             if table not in tables_to_copy:
                 continue
             
             logger.start_block("Copying table '%s' from database '%s'" 
                                % (table, database_name)) 
             
             try:               
                 cross_db_operations.copy_table(table_to_copy = table, 
                                                database_in = database_in, 
                                                database_out = database_out, 
                                                use_chunking = True)
             finally:
                 logger.end_block()        
         database_in.close()
         
     self._fix_scenario_information_table(database_out)            
     database_out.close()
     db_server_from.close()
     db_server_to.close()
Пример #21
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from building_types;')
            self.fail('Output table building_tpes already exists. (Check setUp)')
        except: pass
        
    def test_create_table(self):
        creator = TableCreator()
        db = creator._get_db(TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        self.assert_(not db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))
        
        db.DoQuery('CREATE TABLE test_table '
                '(id INT, name varchar(50), units varchar(50));')
        self.assert_(db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))

        creator._backup_table(db, 'test_table')
        self.assert_(db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
        
        creator._drop_table(db, 'test_table')
        self.assert_(not db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
Пример #22
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from building_types;')
            self.fail('Output table building_tpes already exists. (Check setUp)')
        except: pass
        
    def test_create_table(self):
        creator = TableCreator()
        db = creator._get_db(TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        self.assert_(not db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))
        
        db.DoQuery('CREATE TABLE test_table '
                '(id INT, name varchar(50), units varchar(50));')
        self.assert_(db.table_exists('test_table'))
        self.assert_(not db.table_exists('test_table_bak'))

        creator._backup_table(db, 'test_table')
        self.assert_(db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
        
        creator._drop_table(db, 'test_table')
        self.assert_(not db.table_exists('test_table'))
        self.assert_(db.table_exists('test_table_bak'))
Пример #23
0
    def create_storage(self):

        try:
            server = DatabaseServer(self.server_config)
        except:
            logger.log_error('Cannot connect to the database server that the services database is hosted on %s.' % self.server_config.database_name)
            raise
        
        if not server.has_database(self.server_config.database_name):
            server.create_database(self.server_config.database_name)

        try:
            services_db = server.get_database(self.server_config.database_name)
        except:
            logger.log_error('Cannot connect to a services database on %s.'% server.get_connection_string(scrub = True))
            raise

        metadata.bind = services_db.engine
        setup_all()
        create_all()
        
        return services_db
class TestTravelModelInputFileWriter(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_tm_input_file_writer_with_worker_files'
        
        self.dbconfig = TestDatabaseConfiguration()
        
        self.db_server = DatabaseServer(self.dbconfig)
        
        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)
        
        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.create_persons_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')

    def tearDown(self):
        self.database.close()
        self.db_server.drop_database(self.database_name)
        if os.path.exists(self.tempdir_path):
            rmtree(self.tempdir_path)

    def test_create_tripgen_travel_model_input_files(self):
        in_storage = StorageFactory().get_storage(
              'sql_storage',
              storage_location = self.database)
        sc = SessionConfiguration(new_instance=True,
                             package_order = ['urbansim', 'psrc'],
                             in_storage=in_storage)
        dataset_pool = sc.get_dataset_pool()
        
        TravelModelInputFileWriter().run(self.tempdir_path, 2000, dataset_pool)
        
        logger.log_status('tazdata path: ', self.tempdir_path)
        # expected values - data format: {zone:{column_value:value}}
        expected_tazdata = {1: [[1,1], [1,2]], 
                            2: [[2,2]], 
                            3: [],
                            4: [[2,2]]
                            }
        # get real data from file
        real_tazdata = {1:[],2:[], 3:[], 4:[]}
        # income groups 1 to 4
        for i in [1,2,3,4]:
            tazdata_file = open(os.path.join(self.tempdir_path, 'tripgen', 'inputtg', 'tazdata.mf9%s' % i), 'r')
            for a_line in tazdata_file.readlines():
                if a_line[0].isspace():
                    numbers = a_line.split()
                    zone_id = int(numbers[0])
                    job_zone_id = int(numbers[1])
                    real_tazdata[i].append([zone_id, job_zone_id])
                    
        for group in expected_tazdata.keys():
            self.assertEqual(real_tazdata[group], expected_tazdata[group],
                                       "income group %d, columns did not match up."%group)

    def create_households_table(self, database):
        database.drop_table("households")
        schema = {
                  'household_id': 'INTEGER',
                  'zone_id': 'INTEGER',
                  'income': 'INTEGER',
                  'year': 'INTEGER',
                  'building_id': 'INTEGER'
        }
        database.create_table_from_schema('households', schema)
        values = [{'household_id':a, 'zone_id':b, 'income':c, 'year':d, 'building_id':e} for a,b,c,d,e in \
                     [(1, 1, 10, 2000, 1), (2, 1, 11, 2000, 2), (3, 2, 12, 2000, 4), (4, 2, 13, 2000, 4), (5, 2, 14, 2000, 5), (6, 1, 15, 2000, 1), (7, 2, 16, 2000, 5), (8, 2, 16, 2000, 6), (9, 2, 17, 2000, 7)]]
        households = database.get_table('households')
        database.engine.execute(households.insert(), values) 
        # 9 houses total
        #incomes: 10, 11, 12, 13, 14, 15, 16, 16, 17
        # med=14, low_med=11.5, upper_med=16
        # in zone_1: 1,2,6

    def create_persons_table(self, database):
        database.drop_table("persons")
        schema = {
                  'person_id': 'INTEGER',
                  'household_id': 'FLOAT',
                  'job_id': 'INTEGER',
                  'is_worker': 'INTEGER',
                  'work_at_home': 'INTEGER'
        }
        database.create_table_from_schema('persons', schema)
        values = [{'person_id':z, 'household_id':a, 'job_id':b, 'is_worker':c, 'work_at_home':d} for z,a,b,c,d in \
                    [(1, 1, 3, 1, 0), (2, 4, 8, 1, 0), (3, 1, 9, 1, 0), (4, 7, 2, 1, 1), (5, 6, -1, 1, 0), (6, 9, 6, 1, 0), (7, 9, -1, 0, 0), (8, 2, 1, 1, 1), (9, 2, 4, 1, 1)]]

        persons = database.get_table('persons')
        database.engine.execute(persons.insert(), values)
        
    def create_zones_table(self, database):
        database.drop_table('zones')
        schema = {
                  'zone_id': 'INTEGER',
        }
        database.create_table_from_schema('zones', schema)
        
        zones = database.get_table('zones')
        values = [{'zone_id':1}, {'zone_id':2}]
        database.engine.execute(zones.insert(), values)

    def create_employment_sector_groups_table(self, database):
        database.drop_table('employment_sectors')
        schema = {
                  'sector_id': 'INTEGER',
        }
        database.create_table_from_schema('employment_sectors', schema)
        values = [{'sector_id':i} for i in range(1,20)]
        employment_sectors = database.get_table('employment_sectors')
        database.engine.execute(employment_sectors.insert(), values)            
        
        database.drop_table('employment_adhoc_sector_groups')
        schema = {
                  'group_id': 'INTEGER',
                  'name': 'TEXT'
        }
        database.create_table_from_schema('employment_adhoc_sector_groups', schema)
        values = [{'group_id':a, 'name':b} for a,b in [(2, 'retail'), (21, 'manu'), (22, 'wtcu'), (24, 'fires'), (25, 'gov'), (26, 'edu')]]
        employment_sectors = database.get_table('employment_adhoc_sector_groups')
        database.engine.execute(employment_sectors.insert(), values)            

        schema = {
                  'sector_id': 'INTEGER',
                  'group_id': 'INTEGER',
        }            
        database.drop_table('employment_adhoc_sector_group_definitions')
        database.create_table_from_schema('employment_adhoc_sector_group_definitions', schema)
        values = [{'sector_id':a, 'group_id':b} for a,b in [(7, 2), (14, 2), (3,21), (4,21), (5,21), (6,22), (8,22), (9,22), (10,22),  (11,24), (12,24), (13,24), (16,24), (17,24), (18,25), (15,26), (19,26)]]
        employment_sectors = database.get_table('employment_adhoc_sector_group_definitions')
        database.engine.execute(employment_sectors.insert(), values)    
                    
    def create_jobs_table(self, database):
        database.drop_table('jobs')
        schema = {
                  'job_id': 'INTEGER',
                  'zone_id': 'INTEGER',
                  'sector_id': 'INTEGER',
                  'year': 'INTEGER',
        }
        database.create_table_from_schema('jobs', schema)

        values = [{'job_id':1, 'zone_id':1, 'sector_id':1, 'year':2000}, 
                  {'job_id':2, 'zone_id':1, 'sector_id':3, 'year':2000}, 
                  {'job_id':3, 'zone_id':1, 'sector_id':4, 'year':2000}, 
                  {'job_id':4, 'zone_id':1, 'sector_id':7, 'year':2000}, 
                  {'job_id':5, 'zone_id':2, 'sector_id':9, 'year':2000},
                  {'job_id':6, 'zone_id':2, 'sector_id':11, 'year':2000}, 
                  {'job_id':7, 'zone_id':2, 'sector_id':15, 'year':2000}, 
                  {'job_id':8, 'zone_id':2, 'sector_id':16, 'year':2000}, 
                  {'job_id':9, 'zone_id':2, 'sector_id':17, 'year':2000}]
        jobs = database.get_table('jobs')
        database.engine.execute(jobs.insert(), values)

    def create_constant_taz_columns_table(self, database):
        database.drop_table('constant_taz_columns')
        schema = {
                  'TAZ': 'INTEGER',
                  'PCTMF': 'FLOAT',
                  'GQI': 'INTEGER',
                  'GQN': 'INTEGER',
                  'FTEUNIV': 'INTEGER',
                  'DEN': 'INTEGER',
                  'FAZ': 'INTEGER',
                  'YEAR': 'INTEGER',
        }
        database.create_table_from_schema('constant_taz_columns', schema)
        values = [{'TAZ':a, 'PCTMF':b, 'GQI':c, 'GQN':d, 'FTEUNIV':e, 'DEN':f, 'FAZ':g, 'YEAR':h} for a,b,c,d,e,f,g,h in \
                    [(1, 19.9, 3, 11, 42, 1, 1, 2000),(2, 29.9, 1, 3, 241, 2, 2, 2000)]
                  ]
        constant_taz_columns = database.get_table('constant_taz_columns')
        database.engine.execute(constant_taz_columns.insert(), values)
Пример #25
0
class TestDBSubPattern(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1', 
            'convert_database_test_db2',
            ]
        
        self.test_table_names = [
            'table1',
            'table2',
            'table3',
            ]
            
        table_schema = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"Does not match P A T T E R N.","Matches pattern."),'
            '(2,"Matches pattern.","Does not match P A T T E R N."),'
            '(3,NULL,NULL),'
            '(4,"","")'
            )
            
        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"Does not match P A T T E R N.","Matches pattern."],
            [2,"Matches pattern.","Does not match P A T T E R N."],
            [3,None,None],
            [4,"",""]
            ]
        
        self.patterns = [
            (r'(pattern)(\.)', r'\1 well\2'),
            (r'^Matches pattern well\.$', r'Matches pattern perfectly!')
            ]
        
        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"Does not match P A T T E R N.","Matches pattern perfectly!"],
            [2,"Matches pattern.","Does not match P A T T E R N."],
            [3,None,None],
            [4,"",""]
            ]
            
        insert_items_template = (
            "insert into %(table)s values %(data)s;")
      
        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]
        
        self.config = {
            'databases':self.test_db_names,
            'tables':table_list,
            
            'backup':True,
            'backup_postfix':'_old',
            }
            
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]
        
        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' 
                    % (table_name,
                       table_schema))
                
                db.DoQuery(insert_items_template 
                            % {'table':table_name, 'data':table_data})
                
        
    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            
        for db in self.dbs:
            db.close()
            
        self.db_server.close()
        
        
    def test_convert_table(self):
        DBSubPattern().convert_table(self.dbs[0], self.test_table_names[0], 
            self.patterns)
        
        db = self.dbs[0]
        
        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." 
                % (table0,
                   self.expected_output_changed,
                   results))
                       
        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (table,
                               self.expected_output_unchanged,
                               results))
        
        
    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]
        
        DBSubPattern().convert_table(db, table, self.patterns,
                                        backup=True, backup_postfix='_old')
        backup_table_name = '%s_old' % table
        try:
            results = db.GetResultsFromQuery('select * from %s' % backup_table_name)
        except:
            self.fail("Backup failed for single table (%s) -- backup table (%s) not "
                "created." % (table, backup_table_name))
            
        self.assert_(results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." 
                % (table, self.expected_output_unchanged, results)
            )
                                
                   
    def test_convert_database(self):
        DBSubPattern().convert_database(self.dbs[0], 
            self.test_table_names[0:2], self.patterns)
        
        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_changed,
                       results))
                       
        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for i in range(len(self.dbs[1:])):
            db = self.dbs[i+1]
            
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (i,
                               table, 
                               self.expected_output_unchanged,
                               results))

                               
    def test_convert_databases(self):
        DBSubPattern().convert_databases(TestDatabaseConfiguration(protocol='mysql'), 
            self.config['databases'], self.config['tables'], self.patterns)
        
        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)
            
            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                        " Expected %s. Recieved %s." 
                            % (db_name,
                               table,
                               self.expected_output_changed,
                               results))
Пример #26
0
class TestConvertDatabases(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1',
            'convert_database_test_db2',
        ]

        self.test_table_names = [
            'table1',
            'table2',
            'table3',
        ]

        table_structure = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"No opus dot.","constant"),'
            '(2,"opus.core.miscellaneous","No opus dot."),'
            '(3,"opus.urbansim.gridcell.percent_water",'
            '"gc_cbd = gridcell.disaggregate('
            'opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"),'
            '(4,"","")')

        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "No opus dot.", "constant"],
            [2, "opus.core.miscellaneous", "No opus dot."],
            [
                3, "opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                "opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"
            ], [4, "", ""]
        ]

        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "No opus dot.", "constant"],
            [2, "opus.core.miscellaneous", "No opus dot."],
            [
                3, "opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                "psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"
            ], [4, "", ""]
        ]

        insert_items_template = ("insert into %(table)s values %(data)s;")

        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]

        self.config = {
            'databases': self.test_db_names,
            'tables': table_list,
            'backup': True,
            'backup_postfix': '_old',
        }

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))

        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]

        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' %
                           (table_name, table_structure))

                db.DoQuery(insert_items_template % {
                    'table': table_name,
                    'data': table_data
                })

    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)

        for db in self.dbs:
            db.close()

        self.db_server.close()

    def test_convert_table(self):
        ConvertDatabase().convert_table(self.dbs[0], self.test_table_names[0])

        db = self.dbs[0]

        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(
            results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." %
            (table0, self.expected_output_changed, results))

        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (table, self.expected_output_unchanged, results))

    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]

        ConvertDatabase().convert_table(db,
                                        table,
                                        backup=True,
                                        backup_postfix='_old')

        results = db.GetResultsFromQuery('select * from %s_old;' % table)
        self.assert_(
            results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." %
            (table, self.expected_output_unchanged, results))

    def test_convert_database(self):
        ConvertDatabase().convert_database(self.dbs[0],
                                           self.test_table_names[0:2])

        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." %
                (table, self.expected_output_changed, results))

        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for i in range(len(self.dbs[1:])):
            db = self.dbs[i + 1]

            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (i, table, self.expected_output_unchanged, results))

    def test_convert_databases(self):
        ConvertDatabase().convert_databases(
            TestDatabaseConfiguration(protocol='mysql'), self.config)

        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)

            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                    " Expected %s. Recieved %s." %
                    (db_name, table, self.expected_output_changed, results))
Пример #27
0
         database_name = database_name,
         database_configuration = options.database_configuration
         )
     )                
     db = db_server.get_database(database_name)
     directory = db
     
 if storage_outtype == 'sql':
     database_name = output_file
     db_server = DatabaseServer(DatabaseConfiguration(
         database_name = database_name,
         database_configuration = options.database_configuration
         )
     )
     if not db_server.has_database(database_name):
         db_server.create_database(database_name)                          
     db = db_server.get_database(database_name)
     output_file = db
     
 input_storage = StorageFactory().get_storage('%s_storage' % storage_intype, storage_location = directory)
 output_storage = StorageFactory().get_storage('%s_storage' % storage_outtype, storage_location = output_file)
 
 if storage_outtype in create_output_directory and not os.path.exists(output_file):
     os.makedirs(output_file)
     
 logger.start_block("Converting table '%s' from %s into %s ..." % (table_name, storage_intype, storage_outtype))
 kwargs = {'nchunks': nchunks}
 for arg in arg_list.get(storage_outtype, []):
     kwargs[arg] = getattr(options, arg, None)
 try:
     if hasattr(output_storage,"drop_table"):
Пример #28
0
    def __init__(self,
                 indicator_directory,
                 name = None,
                 output_type = None,
                 storage_location = None,
                 output_style = ALL,
                 fixed_field_format = None,  # Only used with the 'fixed_field' output type
                 **kwargs
                ):

        if output_type == 'sql' and not isinstance(storage_location, DatabaseConfiguration):
            raise Exception("If Table output_type is 'sql', a Database object must be passed as storage_location.")
        elif output_type in ['dbf', 'csv', 'tab', 'esri', 'fixed_field', 'xls'] and \
               storage_location is not None and \
               not isinstance(storage_location,str):
            raise Exception("If Table output_type is %s, storage_location must be a path to the output directory"%output_type)
        elif output_type not in ['dbf', 'csv', 'tab', 'sql', 'esri', 'fixed_field', 'xls']:
            raise Exception("Table output_type must be either dbf, csv, tab, sql, esri, fixed_field, xls, not %s"%output_type)

        if output_type == "fixed_field" and not fixed_field_format:
            raise ValueError("If Table output_type is 'fixed_field', an XML format string must be passed as fixed_field_format.")
        
        self.fixed_field_format = fixed_field_format

        if output_style not in [Table.ALL,
                                Table.PER_YEAR,
                                Table.PER_ATTRIBUTE]:
            raise Exception(('%s output_style is not appropriate.'%output_style,
                   'Choose from Table.ALL, Table.PER_YEAR, ',
                   'and Table.PER_ATTRIBUTE'))

        self.output_type = output_type
        self.output_style = output_style

        if storage_location is None:
            storage_location = indicator_directory
        elif output_type == 'sql':
            server = DatabaseServer(database_server_configuration = storage_location)
            if not server.has_database(database_name = storage_location.database_name):
                server.create_database(database_name = storage_location.database_name)
            storage_location = server.get_database(
                                   database_name = storage_location.database_name)
        elif output_type == 'xls':
            storage_location = os.path.join(indicator_directory, storage_location)
            # We want clean output.  So remove the file if it exists
            if os.path.exists(storage_location):
                os.remove(storage_location)
        self.storage_location = storage_location

        self.output_storage = StorageFactory().get_storage(
            type = '%s_storage'%(self.output_type),
            storage_location = storage_location
        )

        self.name = name
        self.indicator_directory = indicator_directory
        
        #checking for new append_col_type argument
        if kwargs:
            try:
                self.append_col_type = kwargs['append_col_type']
            except:
                self.append_col_type = 'True'
        else:
            self.append_col_type = False
Пример #29
0
    def test__output_types(self):
        output_types = ['csv','tab','fixed_field']
        try:
            import dbfpy
        except ImportError:
            pass
        else:
            output_types.append('dbf')

        try:

            test_db_name = 'test_db_for_indicator_framework'
            database_config = DatabaseConfiguration(
                 database_name = test_db_name,
                 test = True,
            )

            server = DatabaseServer(database_config)
            server.drop_database(database_name = test_db_name)
            server.create_database(database_name = test_db_name)

        except:
            has_sql = False
        else:
            has_sql = True
            output_types.append('sql')

        indicator = Indicator(
                  dataset_name = 'test',
                  attribute = 'opus_core.test.attribute'
        )

        maker = Maker(project_name = 'test', test = True)
        computed_indicators = maker.create_batch(
            indicators = {'attr1':indicator},
            source_data = self.source_data)

        for output_type in output_types:
            kwargs = {}
            if output_type == 'sql':
                kwargs['storage_location'] = database_config
            elif output_type == 'fixed_field':
                kwargs['fixed_field_format'] = '<fixed_field><field name="attribute_1980" format="10f" /></fixed_field>'

            table = Table(
                        indicator_directory = self.source_data.get_indicator_directory(),
                        output_type = output_type,
                        **kwargs)
            table._create_input_stores(self.source_data.years)
            viz_result = table.visualize(
                        indicators_to_visualize = ['attr1'],
                        computed_indicators = computed_indicators)[0]
            if output_type in ['csv','dbf','tab','fixed_field']:
                self.assertTrue(os.path.exists(
                   os.path.join(viz_result.storage_location,
                   viz_result.table_name + '.' + viz_result.file_extension)))
            elif output_type == 'sql':
                self.assertTrue(server.has_database(test_db_name))
                db = server.get_database(test_db_name)
                self.assertTrue(db.table_exists(table_name = viz_result.table_name))
        if has_sql:
            server.drop_database(database_name = test_db_name)
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):
        self.databases = ["db_chain_son", "db_chain_dad", "db_chain_granddad"]

        self.config = TestDatabaseConfiguration(protocol=get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database("db_chain_granddad")
        self.db_chain_dad = self.server.get_database("db_chain_dad")
        self.db_chain_son = self.server.get_database("db_chain_son")

        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table("base_schema")
        self.dad_schema = self.db_chain_dad.get_table("base_schema")
        self.granddad_schema2 = self.db_chain_granddad.get_table("base_schema2")
        self.son_schema2 = self.db_chain_son.get_table("base_schema2")

        self._seed_values()

    def _create_databases(self):

        for db in self.databases:
            try:
                self.server.drop_database(db)
            except:
                pass

            self.server.create_database(db)

    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema("base_schema", base_schema)
        self.db_chain_granddad.create_table_from_schema("base_schema2", base_schema2)
        self.db_chain_granddad.create_table_from_schema("scenario_information", base_scenario_information_schema)

        self.db_chain_dad.create_table_from_schema("base_schema", base_schema)
        self.db_chain_dad.create_table_from_schema("scenario_information", base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema("base_schema2", base_schema2)
        self.db_chain_son.create_table_from_schema("scenario_information", base_scenario_information_schema)

    def _seed_values(self):
        u = self.db_chain_granddad.get_table("scenario_information").insert(
            values={self.db_chain_granddad.get_table("scenario_information").c.parent_database_url: ""}
        )
        self.db_chain_granddad.execute(u)

        u = self.db_chain_dad.get_table("scenario_information").insert(
            values={self.db_chain_dad.get_table("scenario_information").c.parent_database_url: "db_chain_granddad"}
        )

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table("scenario_information").insert(
            values={self.db_chain_son.get_table("scenario_information").c.parent_database_url: "db_chain_dad"}
        )
        self.db_chain_son.execute(u)

        granddad_vals = [
            {"integer_col": 0, "clob_col": "0", "smallinteger_col": 0, "float_col": 0.0},
            {"integer_col": 2, "clob_col": "2", "smallinteger_col": 2, "float_col": 2.0},
            {"integer_col": 4, "clob_col": "4", "smallinteger_col": 4, "float_col": 4.0},
        ]

        dad_vals = [
            {"integer_col": 0, "clob_col": "0", "smallinteger_col": 0, "float_col": 0.0},
            {"integer_col": 1, "clob_col": "1", "smallinteger_col": 1, "float_col": 1.0},
            {"integer_col": 2, "clob_col": "2", "smallinteger_col": 2, "float_col": 2.0},
            {"integer_col": 3, "clob_col": "3", "smallinteger_col": 3, "float_col": 3.0},
            {"integer_col": 4, "clob_col": "4", "smallinteger_col": 4, "float_col": 4.0},
        ]

        granddad_vals2 = [
            {"integer_col": 0, "varchar_col": "0", "boolean_col": True, "numeric_col": 0.0},
            {"integer_col": 2, "varchar_col": "2", "boolean_col": True, "numeric_col": 2.0},
            {"integer_col": 4, "varchar_col": "4", "boolean_col": True, "numeric_col": 4.0},
        ]

        son_vals2 = [
            {"integer_col": 0, "varchar_col": "0", "boolean_col": False, "numeric_col": 0.0},
            {"integer_col": 4, "varchar_col": "4", "boolean_col": False, "numeric_col": 4.0},
        ]

        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(), granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(), granddad_vals2)
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)

    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()

        self.server.drop_database("db_chain_granddad")
        self.server.drop_database("db_chain_dad")
        self.server.drop_database("db_chain_son")

        self.server.close()
Пример #31
0
        database_name = directory
        db_server = DatabaseServer(
            DatabaseConfiguration(
                database_name=database_name,
                database_configuration=options.database_configuration))
        db = db_server.get_database(database_name)
        directory = db

    if storage_outtype == 'sql':
        database_name = output_file
        db_server = DatabaseServer(
            DatabaseConfiguration(
                database_name=database_name,
                database_configuration=options.database_configuration))
        if not db_server.has_database(database_name):
            db_server.create_database(database_name)
        db = db_server.get_database(database_name)
        output_file = db

    input_storage = StorageFactory().get_storage('%s_storage' % storage_intype,
                                                 storage_location=directory)
    output_storage = StorageFactory().get_storage('%s_storage' %
                                                  storage_outtype,
                                                  storage_location=output_file)

    if storage_outtype in create_output_directory and not os.path.exists(
            output_file):
        os.makedirs(output_file)

    logger.start_block("Converting table '%s' from %s into %s ..." %
                       (table_name, storage_intype, storage_outtype))
Пример #32
0
    def __init__(
        self,
        indicator_directory,
        name=None,
        output_type=None,
        storage_location=None,
        output_style=ALL,
        fixed_field_format=None  # Only used with the 'fixed_field' output type
    ):

        if output_type == 'sql' and not isinstance(storage_location,
                                                   DatabaseConfiguration):
            raise Exception(
                "If Table output_type is 'sql', a Database object must be passed as storage_location."
            )
        elif output_type in ['dbf', 'csv', 'tab', 'esri', 'fixed_field'] and \
               storage_location is not None and \
               not isinstance(storage_location,str):
            raise Exception(
                "If Table output_type is %s, storage_location must be a path to the output directory"
                % output_type)
        elif output_type not in [
                'dbf', 'csv', 'tab', 'sql', 'esri', 'fixed_field'
        ]:
            raise Exception(
                "Table output_type must be either dbf, csv, tab, sql, esri, fixed_field, not %s"
                % output_type)

        if output_type == "fixed_field" and not fixed_field_format:
            raise ValueError(
                "If Table output_type is 'fixed_field', an XML format string must be passed as fixed_field_format."
            )

        self.fixed_field_format = fixed_field_format

        if output_style not in [
                Table.ALL, Table.PER_YEAR, Table.PER_ATTRIBUTE
        ]:
            raise Exception(
                ('%s output_style is not appropriate.' % output_style,
                 'Choose from Table.ALL, Table.PER_YEAR, ',
                 'and Table.PER_ATTRIBUTE'))

        self.output_type = output_type
        self.output_style = output_style

        if storage_location is None:
            storage_location = indicator_directory
        elif output_type == 'sql':
            server = DatabaseServer(
                database_server_configuration=storage_location)
            if not server.has_database(
                    database_name=storage_location.database_name):
                server.create_database(
                    database_name=storage_location.database_name)
            storage_location = server.get_database(
                database_name=storage_location.database_name)
        self.storage_location = storage_location

        self.output_storage = StorageFactory().get_storage(
            type='%s_storage' % (self.output_type),
            storage_location=storage_location)

        self.name = name
        self.indicator_directory = indicator_directory
Пример #33
0
    def run(self, config, year, storage_type='sql'):
        """ 
        """
        
        tm_config = config['travel_model_configuration']
        database_server_config = tm_config.get("database_server_configuration", 'simtravel_database_server')
        database_name = tm_config.get("database_name", 'mag_zone_baseyear')
        
        cache_directory = config['cache_directory']
        simulation_state = SimulationState()
        simulation_state.set_cache_directory(cache_directory)
        simulation_state.set_current_time(year)
        attribute_cache = AttributeCache()
        dataset_pool = SessionConfiguration(new_instance=True,
                                            package_order=config['dataset_pool_configuration'].package_order,
                                            in_storage=attribute_cache).get_dataset_pool()

        if storage_type == 'sql':
            db_server = DatabaseServer(DatabaseConfiguration(
                                                             database_name = database_name,
                                                             database_configuration = database_server_config
                                                             )
                                                             )
            if not db_server.has_database(database_name): 
                print "Db doesn't exist creating one"
                db_server.create_database(database_name)
            db = db_server.get_database(database_name) 
            output_storage = sql_storage(storage_location = db)
        elif storage_type == 'csv':
            csv_directory = os.path.join(cache_directory, 'csv', str(year))
            output_storage = csv_storage(storage_location=csv_directory)
        else:
            raise ValueError, "Unsupported output storage type {}".format(storage_type)
                                                            
        logger.start_block('Compute and export data to openAMOS...')

        hh = dataset_pool.get_dataset('household')
        hh_recs = dataset_pool.get_dataset('households_recs')
        #hh_recs.add_attribute(0,"htaz1")
        #hh_recs.flush_dataset()
        #syn_hh = dataset_pool.get_dataset('synthetic_household')

        hh_variables = ['houseid=household.household_id',
                        "hhsize=household.number_of_agents(person)",
                        "one=(household.household_id>0).astype('i')",
                        "inclt35k=(household.income<35000).astype('i')",
                        "incge35k=(household.income>=35000).astype('i')",
                        "incge50k=(household.income>=50000).astype('i')",
                        "incge75k=(household.income>=75000).astype('i')",
                        "incge100k=(household.income>=100000).astype('i')",
                        "inc35t50=((household.income>=35000) & (household.income<50000)).astype('i')",
                        "inc50t75=((household.income>=50000) & (household.income<75000)).astype('i')",
                        "inc75t100=((household.income>=75000) & (household.income<100000)).astype('i')",
                        'htaz = ((houseid>0)*(household.disaggregate(building.zone_id)-100) + (houseid<=0)*0)',
                        #'htaz = ((houseid>0) & (htaz1>100))*(htaz1-100)+((houseid>0) & (htaz1==-1))*1122',
                        "withchild = (household.aggregate(person.age<18)>0).astype('i')",
                        "noc = household.aggregate(person.age<18)",
                        "numadlt = household.aggregate(person.age>=18)",
                        "hinc=household.income",
                        "wif=household.workers",
                        #"wif=household.aggregate(mag_zone.person.is_employed)",
                        'numwrkr=household.workers',
                        #'numwrkr=household.aggregate(mag_zone.person.is_employed)',
                        'nwrkcnt=household.number_of_agents(person) - household.workers',
                        #'nwrkcnt=household.number_of_agents(person) - household.aggregate(mag_zone.person.is_employed)',

                        'yrbuilt=mag_zone.household.yrbuilt',
                        'mag_zone.household.sparent',
                        'mag_zone.household.rur',
                        'mag_zone.household.urb',
                        'zonetid = household.disaggregate(building.zone_id)',
                        ]
        
        self.prepare_attributes(hh, hh_variables)
        attrs_to_export = hh_recs.get_known_attribute_names()
       
        self.write_dataset(hh, attrs_to_export, output_storage)
        dataset_pool._remove_dataset(hh.dataset_name)

        persons = dataset_pool.get_dataset('person')
        persons.out_table_name_default = 'persons'

        # Recoding invalid work and school locations to some random valid values
        persons_recs = dataset_pool.get_dataset('persons_recs')
        persons_recs.add_attribute(persons['person_id'],"personuniqueid")
        persons_recs.add_attribute(persons['marriage_status'],"marstat")
        persons_recs.add_attribute(persons['student_status'],"schstat")


        """
        persons_recs.add_attribute(persons['wtaz0'],"htaz_act")
        persons_recs.add_attribute(0,"wtaz_rec")
        persons_recs.add_attribute(0,"wtaz_rec1")
        persons_recs.add_attribute(0,"wtaz_rec2")

        persons_recs.add_attribute(0,"wtaz1_1")
        persons_recs.add_attribute(0,"wtaz1_2")
        persons_recs.add_attribute(0,"wtaz1_3")
        #persons_recs.add_attribute(persons['student_status'],"schstat")
        """

        persons_recs.add_attribute(0,"wtaz1")
        persons_recs.add_attribute(0,"htaz")
        persons_recs.add_attribute(0,"schtaz1")

        persons_recs.flush_dataset()

        #syn_persons = dataset_pool.get_dataset('synthetic_person')
        persons_variables = ['personid=mag_zone.person.member_id',
                             'personuniqueid=person.person_id',
                             'houseid=person.household_id',
                             "one=(person.person_id>0).astype('i')",
                             'trvtime=mag_zone.person.travel_time_from_home_to_work',
                             'timetowk=mag_zone.person.travel_time_from_home_to_work',
                             #'mag_zone.person.tmtowrk',
                             #'tmtowrk=person.disaggregate(synthetic_person.tmtowrk)',
                             "ag5t10=((person.age>=5) & (person.age<=10)).astype('i')",
                             "ag11t14=((person.age>=11) & (person.age<=14)).astype('i')",
                             "ag15t17=((person.age>=15) & (person.age<=17)).astype('i')",
                             "ag18t24=((person.age>=18) & (person.age<=24)).astype('i')",
                             "ag25t34=((person.age>=25) & (person.age<=34)).astype('i')",
                             "ag35t44=((person.age>=35) & (person.age<=44)).astype('i')",
                             "ag45t54=((person.age>=45) & (person.age<=54)).astype('i')",
                             "ag55t64=((person.age>=55) & (person.age<=64)).astype('i')",
                             "agge65=(person.age>=65).astype('i')",

                             "ag12t17=((person.age>=12) & (person.age<=17)).astype('i')",
                             "ag5t14=((person.age>=5) & (person.age<=14)).astype('i')",
                             "agge15=(person.age>=15).astype('i')",

                             "wrkr=(person.employment_status==1).astype('i')",
                             "isemploy=(person.employment_status==1).astype('i')",
                             "fulltim=(mag_zone.person.full_time==1).astype('i')",
                             'parttim=mag_zone.person.part_time',

                             'htaz = ((houseid>0)*(person.disaggregate(building.zone_id, intermediates=[household])-100) + (houseid<=0)*0)',

                             'wtaz1=(person.wtaz <= 0)*0 + (person.wtaz > 0)*(person.wtaz-100)',
                       
                             "presch = ((person.age < 5)&(houseid>0)).astype('i')",
                             "mag_zone.person.schstat",


                             'schtaz1 = (person.schtaz <= 0)*0 + (person.schtaz > 0)*(person.schtaz-100)',
                             'marstat = person.marriage_status',

                             'enroll = person.student_status',
                             'grade = person.student_status & person.education',
                             'educ = person.education',
                             "male = (person.sex==1).astype('i')",
                             "female = (person.sex==2).astype('i')",

                             "coled = (person.education >= 10).astype('i')",

                             'race1 = person.race',
                             "white = (person.race == 1).astype('i')",
                             'person.hispanic'
                             ]
        self.prepare_attributes(persons, persons_variables)

        attrs_to_export = persons_recs.get_known_attribute_names()

        self.write_dataset(persons, attrs_to_export, output_storage)
        dataset_pool._remove_dataset(persons.dataset_name)

        zones = dataset_pool.get_dataset('zone')
        zones_variables = [
                             "retail_employment=zone.aggregate(mag_zone.job.sector_group=='retail')",
                             "public_employment=zone.aggregate(mag_zone.job.sector_group=='public')",
                             "office_employment=zone.aggregate(mag_zone.job.sector_group=='office')",
                             "industrial_employment=zone.aggregate(mag_zone.job.sector_group=='individual')",
                             "other_employment=zone.aggregate(mag_zone.job.sector_group=='other')",

                             "retail_employment_density=zone.aggregate(mag_zone.job.sector_group=='retail')/zone.acres",
                             "public_employment_density=zone.aggregate(mag_zone.job.sector_group=='public')/zone.acres",
                             "office_employment_density=zone.aggregate(mag_zone.job.sector_group=='office')/zone.acres",
                             "industrial_employment_density=zone.aggregate(mag_zone.job.sector_group=='individual')/zone.acres",
                             "other_employment_density=zone.aggregate(mag_zone.job.sector_group=='other')/zone.acres",

                             "total_area=zone.acres",

                             "lowest_income=zone.aggregate(household.income < scoreatpercentile(household.income, 20))",
                             "low_income=zone.aggregate(household.income < scoreatpercentile(household.income, 40))",
                             "high_income=zone.aggregate(household.income > scoreatpercentile(household.income, 80))",

                             #"institutional_population=zone.disaggregate(locations.institutional_population)",
                             #"groupquarter_households=zone.disaggregate(locations.groupquarter_households)",

                             "residential_households=zone.number_of_agents(household)",

                             "locationid=zone.zone_id",
                             ]
        
        locations = dataset_pool['locations']
        self.prepare_attributes(zones, zones_variables, dataset2=locations)
        attrs_to_export = locations.get_known_attribute_names()

        self.write_dataset(locations, attrs_to_export, output_storage)
        dataset_pool._remove_dataset(locations.dataset_name)
        #raw_input("check location block")

        logger.end_block()
Пример #34
0
    def run_run(
        self, run_resources, run_name=None, scenario_name=None, run_as_multiprocess=True, run_in_background=False
    ):
        """check run hasn't already been marked running
           log it in to run_activity
           run simulation
           mark run as done/failed
           """
        if not self.ready_to_run:
            raise "RunManager.setup_new_run must be execute before RunManager.run_run"

        if run_resources["cache_directory"] != self.current_cache_directory:
            raise "The configuration and the RunManager conflict on the proper cache_directory"

        run_resources["run_id"] = self.run_id
        if scenario_name is not None:
            run_resources["scenario_name"] = scenario_name

        self.add_row_to_history(self.run_id, run_resources, "started", run_name=run_name, scenario_name=scenario_name)

        try:
            # Test pre-conditions
            model_system_class_path = run_resources.get("model_system", None)
            if model_system_class_path is None:
                raise TypeError, (
                    "The configuration must specify model_system, the" " full Opus path to the model system to be used."
                )

            # Create baseyear cache
            self.create_baseyear_cache(run_resources)

            # Create brand-new output database (deletes any prior contents)
            if "estimation_database_configuration" in run_resources:
                db_server = DatabaseServer(run_resources["estimation_database_configuration"])
                if not db_server.has_database(run_resources["estimation_database_configuration"].database_name):
                    db_server.create_database(run_resources["estimation_database_configuration"].database_name)

            # Run simulation
            exec("from %s import ModelSystem" % model_system_class_path)

            model_system = ModelSystem()
            self.model_system = model_system

            if "base_year" not in run_resources:
                run_resources["base_year"] = run_resources["years"][0] - 1

            base_year = run_resources["base_year"]
            ## create a large enough seed_array so that a restarted run
            ## can still have seed when running pass the original end_year
            ## the size needed to store seed_dict of 100 seeds is about 12568 Bytes
            self._create_seed_dictionary(run_resources, start_year=base_year, end_year=base_year + 100)

            if "run_in_same_process" in run_resources and run_resources["run_in_same_process"]:
                model_system.run_in_same_process(run_resources)
            elif run_as_multiprocess:
                model_system.run_multiprocess(run_resources)
            else:
                model_system.run_in_one_process(
                    run_resources, run_in_background=run_in_background, class_path=model_system_class_path
                )

            self.model_system = None

        except:
            self.add_row_to_history(
                self.run_id, run_resources, "failed", run_name=run_name, scenario_name=scenario_name
            )
            self.ready_to_run = False
            raise  # This re-raises the last exception
        else:
            self.add_row_to_history(self.run_id, run_resources, "done", run_name=run_name, scenario_name=scenario_name)

        self.ready_to_run = False
        return self.run_id
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):        
        self.databases = ['db_chain_son', 'db_chain_dad', 'db_chain_granddad']
        
        self.config = TestDatabaseConfiguration(protocol = get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database('db_chain_granddad')
        self.db_chain_dad = self.server.get_database('db_chain_dad')
        self.db_chain_son = self.server.get_database('db_chain_son')
        
        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table('base_schema')
        self.dad_schema = self.db_chain_dad.get_table('base_schema')
        self.granddad_schema2 = self.db_chain_granddad.get_table('base_schema2')
        self.son_schema2 = self.db_chain_son.get_table('base_schema2')
        
        self._seed_values()
        
    def _create_databases(self):
        
        for db in self.databases:
            try: 
                self.server.drop_database(db)
            except:
                pass
            
            self.server.create_database(db)
        
    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_granddad.create_table_from_schema('base_schema2', base_schema2)
        self.db_chain_granddad.create_table_from_schema('scenario_information', base_scenario_information_schema)
        
        self.db_chain_dad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_dad.create_table_from_schema('scenario_information', base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema('base_schema2', base_schema2)
        self.db_chain_son.create_table_from_schema('scenario_information', base_scenario_information_schema)
        
    def _seed_values(self):
        u = self.db_chain_granddad.get_table('scenario_information').insert(
              values = {
                self.db_chain_granddad.get_table('scenario_information').c.parent_database_url:''})   
        self.db_chain_granddad.execute(u)
        
        u = self.db_chain_dad.get_table('scenario_information').insert(
              values = {
                self.db_chain_dad.get_table('scenario_information').c.parent_database_url:'db_chain_granddad'})

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table('scenario_information').insert(
              values = {
                self.db_chain_son.get_table('scenario_information').c.parent_database_url:'db_chain_dad'})   
        self.db_chain_son.execute(u)
        
        granddad_vals = [
            {'integer_col': 0, 'clob_col': '0', 'smallinteger_col': 0, 'float_col': 0.0},                      
            {'integer_col': 2, 'clob_col': '2', 'smallinteger_col': 2, 'float_col': 2.0},                          
            {'integer_col': 4, 'clob_col': '4', 'smallinteger_col': 4, 'float_col': 4.0}                                         
        ]

        dad_vals = [
            {'integer_col': 0, 'clob_col': '0', 'smallinteger_col': 0, 'float_col': 0.0},  
            {'integer_col': 1, 'clob_col': '1', 'smallinteger_col': 1, 'float_col': 1.0},                   
            {'integer_col': 2, 'clob_col': '2', 'smallinteger_col': 2, 'float_col': 2.0},  
            {'integer_col': 3, 'clob_col': '3', 'smallinteger_col': 3, 'float_col': 3.0},                        
            {'integer_col': 4, 'clob_col': '4', 'smallinteger_col': 4, 'float_col': 4.0}                                         
        ]    
        
        granddad_vals2 = [
            {'integer_col': 0, 'varchar_col': '0', 'boolean_col': True, 'numeric_col': 0.0},                      
            {'integer_col': 2, 'varchar_col': '2', 'boolean_col': True, 'numeric_col': 2.0},                          
            {'integer_col': 4, 'varchar_col': '4', 'boolean_col': True, 'numeric_col': 4.0}                                         
        ]

        son_vals2 = [
            {'integer_col': 0, 'varchar_col': '0', 'boolean_col': False, 'numeric_col': 0.0},                      
            {'integer_col': 4, 'varchar_col': '4', 'boolean_col': False, 'numeric_col': 4.0}                                         
        ]
        
        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(), granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(), granddad_vals2)        
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)
                     
    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()
        
        self.server.drop_database('db_chain_granddad')
        self.server.drop_database('db_chain_dad')
        self.server.drop_database('db_chain_son')
        
        self.server.close()
        
Пример #36
0
class TestCombineTables(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))
        self.db_name = 'test_combine_tables'
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

        self.from_tables = (
            ('table_a', 'A'),
            ('table_b', 'B'),
            ('table_c', 'C'),
            ('table_d', 'D'),
        )
        table_ids = {
            'table_a': [1],
            'table_b': [2, 3],
            'table_c': [4, 5, 6, 7],
            'table_d': [8, 9, 10, 11, 12, 13, 14, 15],
        }
        self.to_table = 'to_table'

        for table, type in self.from_tables:
            self.db.DoQuery('CREATE TABLE %s (job_id INT, sector_id INT, '
                            'grid_id INT, sic INT, building_type varchar(5), '
                            'home_based tinyint(4), impute_flag tinyint(4));' %
                            table)

            values = ','.join([
                '(%(j)s, %(j)s, %(j)s, %(j)s, "%(type)s", %(j)s, %(j)s)' % {
                    'j': j,
                    'type': type
                } for j in table_ids[table]
            ])
            self.db.DoQuery(
                'INSERT INTO %(table_name)s (job_id, sector_id, '
                'grid_id, sic, building_type, home_based, impute_flag) VALUES '
                '%(values)s;' % {
                    'table_name': table,
                    'values': values,
                })
            number_rows = self.db.GetResultsFromQuery(
                'SELECT count(*) FROM %s' % table)[1][0]
            self.assertEqual(number_rows, len(table_ids[table]))

    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()

    def test_setUp(self):
        for table, type in self.from_tables:
            try:
                self.db.DoQuery('select * from %s;' % table)
            except:
                self.fail(
                    'Expected input table %s did not exist. (Check setUp)' %
                    self.from_table)

        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
            self.fail('Output table %s already exists. (Check setUp)' %
                      self.to_table)
        except:
            pass

    def test_create_table(self):
        CombineTables().combine_tables(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name,
            [i[0] for i in self.from_tables], self.to_table)

        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
        except:
            self.fail('Output table %s not created.' % self.to_table)

    def test_combine_tables(self):
        CombineTables().combine_tables(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name,
            [i[0] for i in self.from_tables], self.to_table)

        expected_rows = 0
        for table, type in self.from_tables:
            count = self.db.GetResultsFromQuery('select count(*) from %s' %
                                                table)[1:][0][0]
            expected_rows += count

        try:
            count = self.db.GetResultsFromQuery('select count(*) from %s;' %
                                                self.to_table)[1:][0][0]
        except:
            self.fail('Expected output table %s does not exist.' %
                      self.to_table)

        if (expected_rows != count):
            for table_name, type in self.from_tables:
                self._print_table(table_name)
            self._print_table(self.to_table)

        self.assert_(
            expected_rows == count,
            'Incorrect number of rows in output. Expected %s; received %s.' %
            (expected_rows, count))

    def _print_table(self, table_name):
        """Provide debugging info to figure out why the above test is failing, sometimes."""
        try:
            results = self.db.GetResultsFromQuery('select * from %s' %
                                                  table_name)
            logger.start_block('Contents of table %s' % table_name)
            try:
                for row in results:
                    logger.log_status(row)
            finally:
                logger.end_block()
        except:
            logger.log_status('Error accessing table %s' % table_name)
            logger.log_stack_trace()
Пример #37
0
    def test__output_types(self):
        output_types = ['csv', 'tab', 'fixed_field']
        try:
            import dbfpy
        except ImportError:
            pass
        else:
            output_types.append('dbf')

        try:

            test_db_name = 'test_db_for_indicator_framework'
            database_config = DatabaseConfiguration(
                database_name=test_db_name,
                test=True,
            )

            server = DatabaseServer(database_config)
            server.drop_database(database_name=test_db_name)
            server.create_database(database_name=test_db_name)

        except:
            has_sql = False
        else:
            has_sql = True
            output_types.append('sql')

        indicator = Indicator(dataset_name='test',
                              attribute='opus_core.test.attribute')

        maker = Maker(project_name='test', test=True)
        computed_indicators = maker.create_batch(
            indicators={'attr1': indicator}, source_data=self.source_data)

        for output_type in output_types:
            kwargs = {}
            if output_type == 'sql':
                kwargs['storage_location'] = database_config
            elif output_type == 'fixed_field':
                kwargs[
                    'fixed_field_format'] = '<fixed_field><field name="attribute_1980" format="10f" /></fixed_field>'

            table = Table(
                indicator_directory=self.source_data.get_indicator_directory(),
                output_type=output_type,
                **kwargs)
            table._create_input_stores(self.source_data.years)
            viz_result = table.visualize(
                indicators_to_visualize=['attr1'],
                computed_indicators=computed_indicators)[0]
            if output_type in ['csv', 'dbf', 'tab', 'fixed_field']:
                self.assertTrue(
                    os.path.exists(
                        os.path.join(
                            viz_result.storage_location,
                            viz_result.table_name + '.' +
                            viz_result.file_extension)))
            elif output_type == 'sql':
                self.assertTrue(server.has_database(test_db_name))
                db = server.get_database(test_db_name)
                self.assertTrue(
                    db.table_exists(table_name=viz_result.table_name))
        if has_sql:
            server.drop_database(database_name=test_db_name)
Пример #38
0
class TestDBSubPattern(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1',
            'convert_database_test_db2',
        ]

        self.test_table_names = [
            'table1',
            'table2',
            'table3',
        ]

        table_schema = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = ('(1,"Does not match P A T T E R N.","Matches pattern."),'
                      '(2,"Matches pattern.","Does not match P A T T E R N."),'
                      '(3,NULL,NULL),'
                      '(4,"","")')

        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "Does not match P A T T E R N.", "Matches pattern."],
            [2, "Matches pattern.", "Does not match P A T T E R N."],
            [3, None, None], [4, "", ""]
        ]

        self.patterns = [(r'(pattern)(\.)', r'\1 well\2'),
                         (r'^Matches pattern well\.$',
                          r'Matches pattern perfectly!')]

        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1, "Does not match P A T T E R N.", "Matches pattern perfectly!"],
            [2, "Matches pattern.", "Does not match P A T T E R N."],
            [3, None, None], [4, "", ""]
        ]

        insert_items_template = ("insert into %(table)s values %(data)s;")

        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]

        self.config = {
            'databases': self.test_db_names,
            'tables': table_list,
            'backup': True,
            'backup_postfix': '_old',
        }

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))

        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]

        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' % (table_name, table_schema))

                db.DoQuery(insert_items_template % {
                    'table': table_name,
                    'data': table_data
                })

    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)

        for db in self.dbs:
            db.close()

        self.db_server.close()

    def test_convert_table(self):
        DBSubPattern().convert_table(self.dbs[0], self.test_table_names[0],
                                     self.patterns)

        db = self.dbs[0]

        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(
            results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." %
            (table0, self.expected_output_changed, results))

        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (table, self.expected_output_unchanged, results))

    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]

        DBSubPattern().convert_table(db,
                                     table,
                                     self.patterns,
                                     backup=True,
                                     backup_postfix='_old')
        backup_table_name = '%s_old' % table
        try:
            results = db.GetResultsFromQuery('select * from %s' %
                                             backup_table_name)
        except:
            self.fail(
                "Backup failed for single table (%s) -- backup table (%s) not "
                "created." % (table, backup_table_name))

        self.assert_(
            results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." %
            (table, self.expected_output_unchanged, results))

    def test_convert_database(self):
        DBSubPattern().convert_database(self.dbs[0],
                                        self.test_table_names[0:2],
                                        self.patterns)

        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." %
                (table, self.expected_output_changed, results))

        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(
                results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." %
                (table, self.expected_output_unchanged, results))

        for i in range(len(self.dbs[1:])):
            db = self.dbs[i + 1]

            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                    " table(s). Expected %s. Recieved %s." %
                    (i, table, self.expected_output_unchanged, results))

    def test_convert_databases(self):
        DBSubPattern().convert_databases(
            TestDatabaseConfiguration(protocol='mysql'),
            self.config['databases'], self.config['tables'], self.patterns)

        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)

            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(
                    results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                    " Expected %s. Recieved %s." %
                    (db_name, table, self.expected_output_changed, results))
Пример #39
0
class TestConvertDatabases(opus_unittest.OpusTestCase):
    def setUp(self):
        self.test_db_names = [
            'convert_database_test_db1', 
            'convert_database_test_db2',
            ]
        
        self.test_table_names = [
            'table1',
            'table2',
            'table3',
            ]
            
        table_structure = 'id INT, do_not_change_this_column TEXT, variable_name TEXT'
        table_data = (
            '(1,"No opus dot.","constant"),'
            '(2,"opus.core.miscellaneous","No opus dot."),'
            '(3,"opus.urbansim.gridcell.percent_water",'
                '"gc_cbd = gridcell.disaggregate('
                    'opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"),'
            '(4,"","")'
            )
            
        self.expected_output_unchanged = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"No opus dot.","constant"],
            [2,"opus.core.miscellaneous","No opus dot."],
            [3,"opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                    "opus.psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"],
            [4,"",""]
            ]
        
        self.expected_output_changed = [
            ['id', 'do_not_change_this_column', 'variable_name'],
            [1,"No opus dot.","constant"],
            [2,"opus.core.miscellaneous","No opus dot."],
            [3,"opus.urbansim.gridcell.percent_water",
                "gc_cbd = gridcell.disaggregate("
                    "psrc.zone.generalized_cost_hbw_am_drive_alone_to_129)"],
            [4,"",""]
            ]
            
        insert_items_template = (
            "insert into %(table)s values %(data)s;")
        
        table_list = {}
        for db_name in self.test_db_names:
            table_list[db_name] = []
            for table in self.test_table_names:
                table_list[db_name] += [table]
        
        self.config = {
            'databases':self.test_db_names,
            'tables':table_list,
            
            'backup':True,
            'backup_postfix':'_old',
            }
            
            
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.dbs = []
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            self.db_server.create_database(db_name)
            self.dbs += [self.db_server.get_database(db_name)]
        
        for db in self.dbs:
            for table_name in self.test_table_names:
                db.DoQuery('create table %s (%s)' 
                    % (table_name,
                       table_structure))
                
                db.DoQuery(insert_items_template 
                            % {'table':table_name, 'data':table_data})
                
        
    def tearDown(self):
        for db_name in self.test_db_names:
            self.db_server.drop_database(db_name)
            
        for db in self.dbs:
            db.close()
            
        self.db_server.close()
        
        
    def test_convert_table(self):
        ConvertDatabase().convert_table(self.dbs[0], self.test_table_names[0])
        
        db = self.dbs[0]
        
        table0 = self.test_table_names[0]
        results = db.GetResultsFromQuery('select * from %s;' % table0)
        self.assert_(results == self.expected_output_changed,
            "Convert failed for single table (%s) -- incorrect conversion."
            " Expected %s. Recieved %s." 
                % (table0,
                   self.expected_output_changed,
                   results))
                       
        for table in self.test_table_names[1:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for single table (%s) -- incorrect conversion."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for db in self.dbs[1:]:
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for single table (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (table,
                               self.expected_output_unchanged,
                               results))
        
        
    def test_convert_table_backup(self):
        db = self.dbs[0]
        table = self.test_table_names[0]
        
        ConvertDatabase().convert_table(db, table,
                                        backup=True, backup_postfix='_old')
        
        results = db.GetResultsFromQuery('select * from %s_old;' % table)
        self.assert_(results == self.expected_output_unchanged,
            "Backup failed for single table (%s) -- changed contents."
            " Expected %s. Recieved %s." 
                % (table,
                   self.expected_output_unchanged,
                   results))
                   
                   
    def test_convert_database(self):
        ConvertDatabase().convert_database(self.dbs[0], 
            self.test_table_names[0:2])
        
        db = self.dbs[0]
        for table in self.test_table_names[0:2]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_changed,
                "Convert failed for database0 (%s) -- incorrect "
                "conversion. Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_changed,
                       results))
                       
        for table in self.test_table_names[2:]:
            results = db.GetResultsFromQuery('select * from %s;' % table)
            self.assert_(results == self.expected_output_unchanged,
                "Convert failed for database0 (%s) -- changed wrong table(s)."
                " Expected %s. Recieved %s." 
                    % (table,
                       self.expected_output_unchanged,
                       results))
        
        for i in range(len(self.dbs[1:])):
            db = self.dbs[i+1]
            
            for table in self.test_table_names:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_unchanged,
                    "Convert failed for database%s (%s) -- converted wrong"
                        " table(s). Expected %s. Recieved %s." 
                            % (i,
                               table, 
                               self.expected_output_unchanged,
                               results))

                               
    def test_convert_databases(self):
        ConvertDatabase().convert_databases(TestDatabaseConfiguration(protocol='mysql'), self.config)
        
        for db_name in self.config['databases']:
            db = self.db_server.get_database(db_name)
            
            tables = self.config['tables'][db_name]
            for table in tables:
                results = db.GetResultsFromQuery('select * from %s;' % table)
                self.assert_(results == self.expected_output_changed,
                    "Convert failed %s (%s) -- incorrect conversion."
                        " Expected %s. Recieved %s." 
                            % (db_name,
                               table,
                               self.expected_output_changed,
                               results))
Пример #40
0
    def __init__(
            self,
            indicator_directory,
            name=None,
            output_type=None,
            storage_location=None,
            output_style=ALL,
            fixed_field_format=None,  # Only used with the 'fixed_field' output type
            **kwargs):

        if output_type == 'sql' and not isinstance(storage_location,
                                                   DatabaseConfiguration):
            raise Exception(
                "If Table output_type is 'sql', a Database object must be passed as storage_location."
            )
        elif output_type in ['dbf', 'csv', 'tab', 'esri', 'fixed_field', 'xls'] and \
               storage_location is not None and \
               not isinstance(storage_location,str):
            raise Exception(
                "If Table output_type is %s, storage_location must be a path to the output directory"
                % output_type)
        elif output_type not in [
                'dbf', 'csv', 'tab', 'sql', 'esri', 'fixed_field', 'xls'
        ]:
            raise Exception(
                "Table output_type must be either dbf, csv, tab, sql, esri, fixed_field, xls, not %s"
                % output_type)

        if output_type == "fixed_field" and not fixed_field_format:
            raise ValueError(
                "If Table output_type is 'fixed_field', an XML format string must be passed as fixed_field_format."
            )

        self.fixed_field_format = fixed_field_format

        if output_style not in [
                Table.ALL, Table.PER_YEAR, Table.PER_ATTRIBUTE
        ]:
            raise Exception(
                ('%s output_style is not appropriate.' % output_style,
                 'Choose from Table.ALL, Table.PER_YEAR, ',
                 'and Table.PER_ATTRIBUTE'))

        self.output_type = output_type
        self.output_style = output_style

        if storage_location is None:
            storage_location = indicator_directory
        elif output_type == 'sql':
            server = DatabaseServer(
                database_server_configuration=storage_location)
            if not server.has_database(
                    database_name=storage_location.database_name):
                server.create_database(
                    database_name=storage_location.database_name)
            storage_location = server.get_database(
                database_name=storage_location.database_name)
        elif output_type == 'xls':
            storage_location = os.path.join(indicator_directory,
                                            storage_location)
            # We want clean output.  So remove the file if it exists
            if os.path.exists(storage_location):
                os.remove(storage_location)
        self.storage_location = storage_location

        self.output_storage = StorageFactory().get_storage(
            type='%s_storage' % (self.output_type),
            storage_location=storage_location)

        self.name = name
        self.indicator_directory = indicator_directory

        #checking for new append_col_type argument
        if kwargs:
            try:
                self.append_col_type = kwargs['append_col_type']
            except:
                self.append_col_type = 'True'
        else:
            self.append_col_type = False
    cache_path = options.cache_path
    database_name = options.database_name    
    if database_name is None or cache_path is None:
        parser.print_help()
        sys.exit(1)
    
    table_name = options.table_name
    
    logger.log_status('Initializing database...')
    db_server = DatabaseServer(EstimationDatabaseConfiguration(
            database_name = database_name,
            database_configuration = options.database_configuration
            )
        )
    if not db_server.has_database(database_name): # if only one table should be exported,
        db_server.create_database(database_name)                            # the database can exist

    db = db_server.get_database(database_name)
   
    input_storage = flt_storage(storage_location = cache_path)
    
    output_storage = sql_storage(
                        storage_location = db)
            
    with logger.block('Exporting cache to sql...'):
        if table_name is None:
            ExportStorage().export(in_storage=input_storage, out_storage=output_storage)
        else:
            db.drop_table(table_name)
            ExportStorage().export_dataset(table_name, in_storage=input_storage, out_storage=output_storage)        
Пример #42
0
class TestTravelModelInputFileWriter(opus_unittest.OpusIntegrationTestCase):
    def setUp(self):
        self.database_name = 'test_travel_model_input_file_writer'

        self.dbconfig = TestDatabaseConfiguration()

        self.db_server = DatabaseServer(self.dbconfig)

        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)

        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')

    def tearDown(self):
        self.database.close()
        self.db_server.drop_database(self.database_name)
        if os.path.exists(self.tempdir_path):
            rmtree(self.tempdir_path)

    def test_create_tripgen_travel_model_input_file(self):

        in_storage = StorageFactory().get_storage(
            'sql_storage', storage_location=self.database)

        sc = SessionConfiguration(new_instance=True,
                                  package_order=['urbansim', 'psrc'],
                                  in_storage=in_storage)
        dataset_pool = sc.get_dataset_pool()
        #zone_set = dataset_pool.get_dataset('zone')
        #hh_set = dataset_pool.get_dataset('household')
        #job_set = dataset_pool.get_dataset('job')
        #taz_col_set = dataset_pool.get_dataset('constant_taz_column')

        TravelModelInputFileWriter().run(self.tempdir_path, 2000, dataset_pool)

        logger.log_status('tazdata path: ', self.tempdir_path)
        # expected values - data format: {zone:{column_value:value}}
        expected_tazdata = {
            1: {
                101: 19.9,
                102: 2.,
                103: 0.,
                104: 1.,
                105: 0.,
                106: 3.,
                107: 11.,
                109: 1.,
                110: 0.,
                111: 0.,
                112: 0.,
                113: 0.,
                114: 0.,
                115: 0.,
                116: 0.,
                117: 0.,
                118: 0.,
                119: 0.,
                120: 2.,
                121: 42.,
                122: 0.,
                123: 0.,
                124: 11.
            },
            2: {
                101: 29.9,
                102: 0.,
                103: 2.,
                104: 1.,
                105: 3.,
                106: 1.,
                107: 3.,
                109: 0.,
                110: 0.,
                111: 0.,
                112: 0.,
                113: 3.,
                114: 0.,
                115: 0.,
                116: 0.,
                117: 0.,
                118: 1.,
                119: 1.,
                120: 0.,
                121: 241.,
                122: 0.,
                123: 0.,
                124: 3.
            }
        }

        # get real data from file
        real_tazdata = {1: {}, 2: {}}
        tazdata_file = open(
            os.path.join(self.tempdir_path, 'tripgen', 'inputtg',
                         'tazdata.ma2'), 'r')
        for a_line in tazdata_file.readlines():
            if a_line[0].isspace():
                numbers = a_line.replace(
                    ':', ' ').split()  # data line format:  1   101:  15.5
                zone_id = int(numbers[0])
                column_var = int(numbers[1])
                value = float(numbers[2])
                if value != -1:
                    real_tazdata[zone_id][column_var] = value

        for zone in expected_tazdata.keys():
            for col_var in expected_tazdata[zone].keys():
                self.assertAlmostEqual(real_tazdata[zone][col_var], expected_tazdata[zone][col_var], 3,\
                                       "zone %d, column variable %d did not match up."%(zone, col_var))

    def create_households_table(self, database):
        database.drop_table("households")
        schema = {
            'household_id': 'INTEGER',
            'zone_id': 'INTEGER',
            'income': 'INTEGER',
            'year': 'INTEGER',
        }
        database.create_table_from_schema('households', schema)
        values = [{'household_id':a, 'zone_id':b, 'income':c, 'year':d} for a,b,c,d in \
                     [(1, 1, 10, 2000), (2, 1, 11, 2000), (3, 2, 12, 2000), (4, 2, 13, 2000), (5, 2, 14, 2000), (6, 1, 15, 2000), (7, 2, 16, 2000), (8, 2, 16, 2000), (9, 2, 17, 2000)]]
        households = database.get_table('households')
        database.engine.execute(households.insert(), values)
        # 9 houses total
        #incomes: 10, 11, 12, 13, 14, 15, 16, 16, 17
        # med=14, low_med=11.5, upper_med=16
        # in zone_1: 1,2,6

    def create_zones_table(self, database):
        database.drop_table('zones')
        schema = {
            'zone_id': 'INTEGER',
        }
        database.create_table_from_schema('zones', schema)

        zones = database.get_table('zones')
        values = [{'zone_id': 1}, {'zone_id': 2}]
        database.engine.execute(zones.insert(), values)

    def create_employment_sector_groups_table(self, database):
        database.drop_table('employment_sectors')
        schema = {
            'sector_id': 'INTEGER',
        }
        database.create_table_from_schema('employment_sectors', schema)
        values = [{'sector_id': i} for i in range(1, 20)]
        employment_sectors = database.get_table('employment_sectors')
        database.engine.execute(employment_sectors.insert(), values)

        database.drop_table('employment_adhoc_sector_groups')
        schema = {'group_id': 'INTEGER', 'name': 'TEXT'}
        database.create_table_from_schema('employment_adhoc_sector_groups',
                                          schema)
        values = [{
            'group_id': a,
            'name': b
        } for a, b in [(2, 'retail'), (21, 'manu'), (22, 'wtcu'), (
            24, 'fires'), (25, 'gov'), (26, 'edu')]]
        employment_sectors = database.get_table(
            'employment_adhoc_sector_groups')
        database.engine.execute(employment_sectors.insert(), values)

        schema = {
            'sector_id': 'INTEGER',
            'group_id': 'INTEGER',
        }
        database.drop_table('employment_adhoc_sector_group_definitions')
        database.create_table_from_schema(
            'employment_adhoc_sector_group_definitions', schema)
        values = [{
            'sector_id': a,
            'group_id': b
        } for a, b in [(7, 2), (14, 2), (3, 21), (4, 21), (5, 21), (
            6, 22), (8, 22), (9, 22), (10, 22), (11, 24), (12, 24), (
                13, 24), (16, 24), (17, 24), (18, 25), (15, 26), (19, 26)]]
        employment_sectors = database.get_table(
            'employment_adhoc_sector_group_definitions')
        database.engine.execute(employment_sectors.insert(), values)

    def create_jobs_table(self, database):
        database.drop_table('jobs')
        schema = {
            'job_id': 'INTEGER',
            'zone_id': 'INTEGER',
            'sector_id': 'INTEGER',
            'year': 'INTEGER',
        }
        database.create_table_from_schema('jobs', schema)

        values = [{
            'job_id': 1,
            'zone_id': 1,
            'sector_id': 1,
            'year': 2000
        }, {
            'job_id': 2,
            'zone_id': 1,
            'sector_id': 3,
            'year': 2000
        }, {
            'job_id': 3,
            'zone_id': 1,
            'sector_id': 4,
            'year': 2000
        }, {
            'job_id': 4,
            'zone_id': 1,
            'sector_id': 7,
            'year': 2000
        }, {
            'job_id': 5,
            'zone_id': 2,
            'sector_id': 9,
            'year': 2000
        }, {
            'job_id': 6,
            'zone_id': 2,
            'sector_id': 11,
            'year': 2000
        }, {
            'job_id': 7,
            'zone_id': 2,
            'sector_id': 15,
            'year': 2000
        }, {
            'job_id': 8,
            'zone_id': 2,
            'sector_id': 16,
            'year': 2000
        }, {
            'job_id': 9,
            'zone_id': 2,
            'sector_id': 17,
            'year': 2000
        }]
        jobs = database.get_table('jobs')
        database.engine.execute(jobs.insert(), values)

    def create_constant_taz_columns_table(self, database):
        database.drop_table('constant_taz_columns')
        schema = {
            'TAZ': 'INTEGER',
            'PCTMF': 'FLOAT',
            'GQI': 'INTEGER',
            'GQN': 'INTEGER',
            'FTEUNIV': 'INTEGER',
            'DEN': 'INTEGER',
            'FAZ': 'INTEGER',
            'YEAR': 'INTEGER',
        }
        database.create_table_from_schema('constant_taz_columns', schema)
        values = [{'TAZ':a, 'PCTMF':b, 'GQI':c, 'GQN':d, 'FTEUNIV':e, 'DEN':f, 'FAZ':g, 'YEAR':h} for a,b,c,d,e,f,g,h in \
                    [(1, 19.9, 3, 11, 42, 1, 1, 2000),(2, 29.9, 1, 3, 241, 2, 2, 2000)]
                  ]
        constant_taz_columns = database.get_table('constant_taz_columns')
        database.engine.execute(constant_taz_columns.insert(), values)
class TestTravelModelInputFileWriter(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_tm_input_file_writer_with_worker_files'

        self.dbconfig = TestDatabaseConfiguration()

        self.db_server = DatabaseServer(self.dbconfig)

        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)

        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.create_persons_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')

    def tearDown(self):
        self.database.close()
        self.db_server.drop_database(self.database_name)
        if os.path.exists(self.tempdir_path):
            rmtree(self.tempdir_path)

    def test_create_tripgen_travel_model_input_files(self):
        in_storage = StorageFactory().get_storage(
            'sql_storage', storage_location=self.database)
        sc = SessionConfiguration(new_instance=True,
                                  package_order=['urbansim', 'psrc'],
                                  in_storage=in_storage)
        dataset_pool = sc.get_dataset_pool()

        TravelModelInputFileWriter().run(self.tempdir_path, 2000, dataset_pool)

        logger.log_status('tazdata path: ', self.tempdir_path)
        # expected values - data format: {zone:{column_value:value}}
        expected_tazdata = {
            1: [[1, 1], [1, 2]],
            2: [[2, 2]],
            3: [],
            4: [[2, 2]]
        }
        # get real data from file
        real_tazdata = {1: [], 2: [], 3: [], 4: []}
        # income groups 1 to 4
        for i in [1, 2, 3, 4]:
            tazdata_file = open(
                os.path.join(self.tempdir_path, 'tripgen', 'inputtg',
                             'tazdata.mf9%s' % i), 'r')
            for a_line in tazdata_file.readlines():
                if a_line[0].isspace():
                    numbers = a_line.split()
                    zone_id = int(numbers[0])
                    job_zone_id = int(numbers[1])
                    real_tazdata[i].append([zone_id, job_zone_id])

        for group in expected_tazdata.keys():
            self.assertEqual(
                real_tazdata[group], expected_tazdata[group],
                "income group %d, columns did not match up." % group)

    def create_households_table(self, database):
        database.drop_table("households")
        schema = {
            'household_id': 'INTEGER',
            'zone_id': 'INTEGER',
            'income': 'INTEGER',
            'year': 'INTEGER',
            'building_id': 'INTEGER'
        }
        database.create_table_from_schema('households', schema)
        values = [{'household_id':a, 'zone_id':b, 'income':c, 'year':d, 'building_id':e} for a,b,c,d,e in \
                     [(1, 1, 10, 2000, 1), (2, 1, 11, 2000, 2), (3, 2, 12, 2000, 4), (4, 2, 13, 2000, 4), (5, 2, 14, 2000, 5), (6, 1, 15, 2000, 1), (7, 2, 16, 2000, 5), (8, 2, 16, 2000, 6), (9, 2, 17, 2000, 7)]]
        households = database.get_table('households')
        database.engine.execute(households.insert(), values)
        # 9 houses total
        #incomes: 10, 11, 12, 13, 14, 15, 16, 16, 17
        # med=14, low_med=11.5, upper_med=16
        # in zone_1: 1,2,6

    def create_persons_table(self, database):
        database.drop_table("persons")
        schema = {
            'person_id': 'INTEGER',
            'household_id': 'FLOAT',
            'job_id': 'INTEGER',
            'is_worker': 'INTEGER',
            'work_at_home': 'INTEGER'
        }
        database.create_table_from_schema('persons', schema)
        values = [{'person_id':z, 'household_id':a, 'job_id':b, 'is_worker':c, 'work_at_home':d} for z,a,b,c,d in \
                    [(1, 1, 3, 1, 0), (2, 4, 8, 1, 0), (3, 1, 9, 1, 0), (4, 7, 2, 1, 1), (5, 6, -1, 1, 0), (6, 9, 6, 1, 0), (7, 9, -1, 0, 0), (8, 2, 1, 1, 1), (9, 2, 4, 1, 1)]]

        persons = database.get_table('persons')
        database.engine.execute(persons.insert(), values)

    def create_zones_table(self, database):
        database.drop_table('zones')
        schema = {
            'zone_id': 'INTEGER',
        }
        database.create_table_from_schema('zones', schema)

        zones = database.get_table('zones')
        values = [{'zone_id': 1}, {'zone_id': 2}]
        database.engine.execute(zones.insert(), values)

    def create_employment_sector_groups_table(self, database):
        database.drop_table('employment_sectors')
        schema = {
            'sector_id': 'INTEGER',
        }
        database.create_table_from_schema('employment_sectors', schema)
        values = [{'sector_id': i} for i in range(1, 20)]
        employment_sectors = database.get_table('employment_sectors')
        database.engine.execute(employment_sectors.insert(), values)

        database.drop_table('employment_adhoc_sector_groups')
        schema = {'group_id': 'INTEGER', 'name': 'TEXT'}
        database.create_table_from_schema('employment_adhoc_sector_groups',
                                          schema)
        values = [{
            'group_id': a,
            'name': b
        } for a, b in [(2, 'retail'), (21, 'manu'), (22, 'wtcu'), (
            24, 'fires'), (25, 'gov'), (26, 'edu')]]
        employment_sectors = database.get_table(
            'employment_adhoc_sector_groups')
        database.engine.execute(employment_sectors.insert(), values)

        schema = {
            'sector_id': 'INTEGER',
            'group_id': 'INTEGER',
        }
        database.drop_table('employment_adhoc_sector_group_definitions')
        database.create_table_from_schema(
            'employment_adhoc_sector_group_definitions', schema)
        values = [{
            'sector_id': a,
            'group_id': b
        } for a, b in [(7, 2), (14, 2), (3, 21), (4, 21), (5, 21), (
            6, 22), (8, 22), (9, 22), (10, 22), (11, 24), (12, 24), (
                13, 24), (16, 24), (17, 24), (18, 25), (15, 26), (19, 26)]]
        employment_sectors = database.get_table(
            'employment_adhoc_sector_group_definitions')
        database.engine.execute(employment_sectors.insert(), values)

    def create_jobs_table(self, database):
        database.drop_table('jobs')
        schema = {
            'job_id': 'INTEGER',
            'zone_id': 'INTEGER',
            'sector_id': 'INTEGER',
            'year': 'INTEGER',
        }
        database.create_table_from_schema('jobs', schema)

        values = [{
            'job_id': 1,
            'zone_id': 1,
            'sector_id': 1,
            'year': 2000
        }, {
            'job_id': 2,
            'zone_id': 1,
            'sector_id': 3,
            'year': 2000
        }, {
            'job_id': 3,
            'zone_id': 1,
            'sector_id': 4,
            'year': 2000
        }, {
            'job_id': 4,
            'zone_id': 1,
            'sector_id': 7,
            'year': 2000
        }, {
            'job_id': 5,
            'zone_id': 2,
            'sector_id': 9,
            'year': 2000
        }, {
            'job_id': 6,
            'zone_id': 2,
            'sector_id': 11,
            'year': 2000
        }, {
            'job_id': 7,
            'zone_id': 2,
            'sector_id': 15,
            'year': 2000
        }, {
            'job_id': 8,
            'zone_id': 2,
            'sector_id': 16,
            'year': 2000
        }, {
            'job_id': 9,
            'zone_id': 2,
            'sector_id': 17,
            'year': 2000
        }]
        jobs = database.get_table('jobs')
        database.engine.execute(jobs.insert(), values)

    def create_constant_taz_columns_table(self, database):
        database.drop_table('constant_taz_columns')
        schema = {
            'TAZ': 'INTEGER',
            'PCTMF': 'FLOAT',
            'GQI': 'INTEGER',
            'GQN': 'INTEGER',
            'FTEUNIV': 'INTEGER',
            'DEN': 'INTEGER',
            'FAZ': 'INTEGER',
            'YEAR': 'INTEGER',
        }
        database.create_table_from_schema('constant_taz_columns', schema)
        values = [{'TAZ':a, 'PCTMF':b, 'GQI':c, 'GQN':d, 'FTEUNIV':e, 'DEN':f, 'FAZ':g, 'YEAR':h} for a,b,c,d,e,f,g,h in \
                    [(1, 19.9, 3, 11, 42, 1, 1, 2000),(2, 29.9, 1, 3, 241, 2, 2, 2000)]
                  ]
        constant_taz_columns = database.get_table('constant_taz_columns')
        database.engine.execute(constant_taz_columns.insert(), values)
Пример #44
0
class TestRenameBySwappingEmploymentAndCommercialOrIndustrialForElcm(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_rename_commercial_and_industrial'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
        
        self.tables_to_convert = (
            'employment_commercial_location_choice_model_specification',
            'employment_commercial_location_choice_model_coefficients',
            'employment_industrial_location_choice_model_specification',
            'employment_industrial_location_choice_model_coefficients',
            )
            
        self.other_tables = (
            'i_am_not_to_be_renamed_location_choice_model_specifications',
            'i_am_not_to_be_renamed_location_choice_model_coefficients',
            'employment_industrial_location_choice_model_coefficients_old',
            )
        
        for table in self.tables_to_convert + self.other_tables:
            self.db.DoQuery('CREATE TABLE %s (id INT);' % table)
            
        self.output_tables = (
            'commercial_employment_location_choice_model_specification',
            'commercial_employment_location_choice_model_coefficients',
            'industrial_employment_location_choice_model_specification',
            'industrial_employment_location_choice_model_coefficients',
            )
        
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        for table in self.tables_to_convert + self.other_tables:
            if not self.db.table_exists(table):
                self.fail('Expected test table %s did not exist. (Check setUp)' 
                    % table)
                    
        for table in self.output_tables:
            if self.db.table_exists(table):
                self.fail('Unexpected results table %s exists. (Check setUp)' 
                    % table)
        
        
    def test_rename_tables(self):
        r = RenameBySwappingEmploymentAndCommercialOrIndustrialOrHomeBasedForElcm()
        r.rename_by_swapping_employment_and_commercial_or_industrial_or_home_based_for_elcm(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        for table in self.output_tables + self.other_tables:
            if not self.db.table_exists(table):
                self.fail('Expected output table %s does not exist.' % table)
                    
        for table in self.tables_to_convert:
            if self.db.table_exists(table):
                self.fail('Input table %s still exists.' % table)
Пример #45
0
class TestTravelModelInputFileWriter(opus_unittest.OpusIntegrationTestCase):
    def setUp(self):
        self.database_name = 'test_travel_model_input_file_writer'
        
        self.dbconfig = TestDatabaseConfiguration()
        
        self.db_server = DatabaseServer(self.dbconfig)
        
        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)
        
        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')

    def tearDown(self):
        self.database.close()
        self.db_server.drop_database(self.database_name)
        if os.path.exists(self.tempdir_path):
            rmtree(self.tempdir_path)

    def test_create_tripgen_travel_model_input_file(self):
        
        in_storage = StorageFactory().get_storage(
              'sql_storage',
              storage_location = self.database)

        sc = SessionConfiguration(new_instance=True,
                             package_order = ['urbansim', 'psrc'],
                             in_storage=in_storage)
        dataset_pool = sc.get_dataset_pool()
        #zone_set = dataset_pool.get_dataset('zone')
        #hh_set = dataset_pool.get_dataset('household')
        #job_set = dataset_pool.get_dataset('job')
        #taz_col_set = dataset_pool.get_dataset('constant_taz_column')
        
        TravelModelInputFileWriter().run(self.tempdir_path, 2000, dataset_pool)
        
        logger.log_status('tazdata path: ', self.tempdir_path)
        # expected values - data format: {zone:{column_value:value}}
        expected_tazdata = {1:{101: 19.9, 
                               102: 2., 103: 0., 104:1., 105:0.,
                               106: 3., 107:11., 109:1., 
                               110:0., 111:0., 112:0., 113:0., 114:0., 
                               115:0., 116:0., 117:0., 118:0., 119:0., 
                               120:2., 121:42., 122:0., 123:0., 124:11.}, 
                            2:{101: 29.9, 
                               102: 0., 103: 2., 104:1., 105:3.,
                               106: 1., 107:3., 109:0., 
                               110:0., 111:0., 112:0., 113:3., 114:0., 
                               115:0., 116:0., 117:0., 118:1., 119:1., 
                               120:0., 121:241., 122:0., 123:0., 124:3.}}
        
        # get real data from file
        real_tazdata = {1:{},2:{}}
        tazdata_file = open(os.path.join(self.tempdir_path, 'tripgen', 'inputtg', 'tazdata.ma2'), 'r')
        for a_line in tazdata_file.readlines():
            if a_line[0].isspace():
                numbers = a_line.replace(':', ' ').split() # data line format:  1   101:  15.5
                zone_id = int(numbers[0])
                column_var = int(numbers[1])
                value = float(numbers[2])
                if value != -1:
                    real_tazdata[zone_id][column_var] = value

        for zone in expected_tazdata.keys():
            for col_var in expected_tazdata[zone].keys():
                self.assertAlmostEqual(real_tazdata[zone][col_var], expected_tazdata[zone][col_var], 3,\
                                       "zone %d, column variable %d did not match up."%(zone, col_var))
                    
    def create_households_table(self, database):
        database.drop_table("households")
        schema = {
                  'household_id': 'INTEGER',
                  'zone_id': 'INTEGER',
                  'income': 'INTEGER',
                  'year': 'INTEGER',
        }
        database.create_table_from_schema('households', schema)
        values = [{'household_id':a, 'zone_id':b, 'income':c, 'year':d} for a,b,c,d in \
                     [(1, 1, 10, 2000), (2, 1, 11, 2000), (3, 2, 12, 2000), (4, 2, 13, 2000), (5, 2, 14, 2000), (6, 1, 15, 2000), (7, 2, 16, 2000), (8, 2, 16, 2000), (9, 2, 17, 2000)]]
        households = database.get_table('households')
        database.engine.execute(households.insert(), values) 
        # 9 houses total
        #incomes: 10, 11, 12, 13, 14, 15, 16, 16, 17
        # med=14, low_med=11.5, upper_med=16
        # in zone_1: 1,2,6

    def create_zones_table(self, database):
        database.drop_table('zones')
        schema = {
                  'zone_id': 'INTEGER',
        }
        database.create_table_from_schema('zones', schema)
        
        zones = database.get_table('zones')
        values = [{'zone_id':1}, {'zone_id':2}]
        database.engine.execute(zones.insert(), values)

    def create_employment_sector_groups_table(self, database):
        database.drop_table('employment_sectors')
        schema = {
                  'sector_id': 'INTEGER',
        }
        database.create_table_from_schema('employment_sectors', schema)
        values = [{'sector_id':i} for i in range(1,20)]
        employment_sectors = database.get_table('employment_sectors')
        database.engine.execute(employment_sectors.insert(), values)            
        
        database.drop_table('employment_adhoc_sector_groups')
        schema = {
                  'group_id': 'INTEGER',
                  'name': 'TEXT'
        }
        database.create_table_from_schema('employment_adhoc_sector_groups', schema)
        values = [{'group_id':a, 'name':b} for a,b in [(2, 'retail'), (21, 'manu'), (22, 'wtcu'), (24, 'fires'), (25, 'gov'), (26, 'edu')]]
        employment_sectors = database.get_table('employment_adhoc_sector_groups')
        database.engine.execute(employment_sectors.insert(), values)            

        schema = {
                  'sector_id': 'INTEGER',
                  'group_id': 'INTEGER',
        }            
        database.drop_table('employment_adhoc_sector_group_definitions')
        database.create_table_from_schema('employment_adhoc_sector_group_definitions', schema)
        values = [{'sector_id':a, 'group_id':b} for a,b in [(7, 2), (14, 2), (3,21), (4,21), (5,21), (6,22), (8,22), (9,22), (10,22),  (11,24), (12,24), (13,24), (16,24), (17,24), (18,25), (15,26), (19,26)]]
        employment_sectors = database.get_table('employment_adhoc_sector_group_definitions')
        database.engine.execute(employment_sectors.insert(), values)            


    def create_jobs_table(self, database):
        database.drop_table('jobs')
        schema = {
                  'job_id': 'INTEGER',
                  'zone_id': 'INTEGER',
                  'sector_id': 'INTEGER',
                  'year': 'INTEGER',
        }
        database.create_table_from_schema('jobs', schema)

        values = [{'job_id':1, 'zone_id':1, 'sector_id':1, 'year':2000}, 
                  {'job_id':2, 'zone_id':1, 'sector_id':3, 'year':2000}, 
                  {'job_id':3, 'zone_id':1, 'sector_id':4, 'year':2000}, 
                  {'job_id':4, 'zone_id':1, 'sector_id':7, 'year':2000}, 
                  {'job_id':5, 'zone_id':2, 'sector_id':9, 'year':2000},
                  {'job_id':6, 'zone_id':2, 'sector_id':11, 'year':2000}, 
                  {'job_id':7, 'zone_id':2, 'sector_id':15, 'year':2000}, 
                  {'job_id':8, 'zone_id':2, 'sector_id':16, 'year':2000}, 
                  {'job_id':9, 'zone_id':2, 'sector_id':17, 'year':2000}]
        jobs = database.get_table('jobs')
        database.engine.execute(jobs.insert(), values)

    def create_constant_taz_columns_table(self, database):
        database.drop_table('constant_taz_columns')
        schema = {
                  'TAZ': 'INTEGER',
                  'PCTMF': 'FLOAT',
                  'GQI': 'INTEGER',
                  'GQN': 'INTEGER',
                  'FTEUNIV': 'INTEGER',
                  'DEN': 'INTEGER',
                  'FAZ': 'INTEGER',
                  'YEAR': 'INTEGER',
        }
        database.create_table_from_schema('constant_taz_columns', schema)
        values = [{'TAZ':a, 'PCTMF':b, 'GQI':c, 'GQN':d, 'FTEUNIV':e, 'DEN':f, 'FAZ':g, 'YEAR':h} for a,b,c,d,e,f,g,h in \
                    [(1, 19.9, 3, 11, 42, 1, 1, 2000),(2, 29.9, 1, 3, 241, 2, 2, 2000)]
                  ]
        constant_taz_columns = database.get_table('constant_taz_columns')
        database.engine.execute(constant_taz_columns.insert(), values)
Пример #46
0
class TestCombineTables(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_name = 'test_combine_tables'
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
        
        self.from_tables = (
            ('table_a', 'A'), 
            ('table_b', 'B'),
            ('table_c', 'C'),
            ('table_d', 'D'),
            )
        table_ids = {
            'table_a': [1], 
            'table_b': [2,3],
            'table_c': [4,5,6,7],
            'table_d': [8,9,10,11,12,13,14,15],
            }
        self.to_table = 'to_table'
        
        for table, type in self.from_tables:
            self.db.DoQuery('CREATE TABLE %s (job_id INT, sector_id INT, '
                'grid_id INT, sic INT, building_type varchar(5), '
                'home_based tinyint(4), impute_flag tinyint(4));' 
                    % table)

            values = ','.join(
                ['(%(j)s, %(j)s, %(j)s, %(j)s, "%(type)s", %(j)s, %(j)s)' 
                    % {'j':j, 'type':type} for j in table_ids[table]]
                )
            self.db.DoQuery('INSERT INTO %(table_name)s (job_id, sector_id, '
                'grid_id, sic, building_type, home_based, impute_flag) VALUES '
                '%(values)s;' 
                    % {'table_name':table,
                       'values':values,
                      }
                )
            number_rows = self.db.GetResultsFromQuery('SELECT count(*) FROM %s' % table)[1][0]
            self.assertEqual(number_rows, len(table_ids[table]))
                        
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        for table, type in self.from_tables:
            try:
                self.db.DoQuery('select * from %s;' % table)
            except:
                self.fail('Expected input table %s did not exist. (Check setUp)' 
                    % self.from_table)
                    
        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
            self.fail('Output table %s already exists. (Check setUp)' 
                % self.to_table)
        except: pass
        
    
    def test_create_table(self):
        CombineTables().combine_tables(TestDatabaseConfiguration(protocol='mysql'), self.db_name, 
            [i[0] for i in self.from_tables], 
            self.to_table)
        
        try:
            self.db.DoQuery('select * from %s;' % self.to_table)
        except: 
            self.fail('Output table %s not created.' % self.to_table)
        
    
    def test_combine_tables(self):
        CombineTables().combine_tables(TestDatabaseConfiguration(protocol='mysql'), self.db_name, 
            [i[0] for i in self.from_tables], 
            self.to_table)
        
        expected_rows = 0
        for table, type in self.from_tables:
            count = self.db.GetResultsFromQuery('select count(*) from %s' 
                % table)[1:][0][0]
            expected_rows += count
        
        try:
            count = self.db.GetResultsFromQuery('select count(*) from %s;' 
                % self.to_table)[1:][0][0]
        except:
            self.fail('Expected output table %s does not exist.' % self.to_table)

        if (expected_rows != count):
            for table_name, type in self.from_tables:
                self._print_table(table_name)
            self._print_table(self.to_table)
            
        self.assert_(expected_rows == count,
            'Incorrect number of rows in output. Expected %s; received %s.'
                % (expected_rows, count))

    def _print_table(self, table_name):
        """Provide debugging info to figure out why the above test is failing, sometimes."""
        try:
            results = self.db.GetResultsFromQuery('select * from %s' % table_name)
            logger.start_block('Contents of table %s' % table_name)
            try:
                for row in results:
                    logger.log_status(row)
            finally:
                logger.end_block()
        except:
            logger.log_status('Error accessing table %s' % table_name)
            logger.log_stack_trace()
Пример #47
0
class DatabaseManagementTestInterface(opus_unittest.OpusTestCase):
    def setUp(self):
        self.databases = ['db_chain_son', 'db_chain_dad', 'db_chain_granddad']

        self.config = TestDatabaseConfiguration(
            protocol=get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database('db_chain_granddad')
        self.db_chain_dad = self.server.get_database('db_chain_dad')
        self.db_chain_son = self.server.get_database('db_chain_son')

        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table('base_schema')
        self.dad_schema = self.db_chain_dad.get_table('base_schema')
        self.granddad_schema2 = self.db_chain_granddad.get_table(
            'base_schema2')
        self.son_schema2 = self.db_chain_son.get_table('base_schema2')

        self._seed_values()

    def _create_databases(self):

        for db in self.databases:
            try:
                self.server.drop_database(db)
            except:
                pass

            self.server.create_database(db)

    def _create_tables(self):
        self.db_chain_granddad.create_table_from_schema(
            'base_schema', base_schema)
        self.db_chain_granddad.create_table_from_schema(
            'base_schema2', base_schema2)
        self.db_chain_granddad.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

        self.db_chain_dad.create_table_from_schema('base_schema', base_schema)
        self.db_chain_dad.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

        self.db_chain_son.create_table_from_schema('base_schema2',
                                                   base_schema2)
        self.db_chain_son.create_table_from_schema(
            'scenario_information', base_scenario_information_schema)

    def _seed_values(self):
        u = self.db_chain_granddad.get_table('scenario_information').insert(
            values={
                self.db_chain_granddad.get_table('scenario_information').c.parent_database_url:
                ''
            })
        self.db_chain_granddad.execute(u)

        u = self.db_chain_dad.get_table('scenario_information').insert(
            values={
                self.db_chain_dad.get_table('scenario_information').c.parent_database_url:
                'db_chain_granddad'
            })

        self.db_chain_dad.execute(u)
        u = self.db_chain_son.get_table('scenario_information').insert(
            values={
                self.db_chain_son.get_table('scenario_information').c.parent_database_url:
                'db_chain_dad'
            })
        self.db_chain_son.execute(u)

        granddad_vals = [{
            'integer_col': 0,
            'clob_col': '0',
            'smallinteger_col': 0,
            'float_col': 0.0
        }, {
            'integer_col': 2,
            'clob_col': '2',
            'smallinteger_col': 2,
            'float_col': 2.0
        }, {
            'integer_col': 4,
            'clob_col': '4',
            'smallinteger_col': 4,
            'float_col': 4.0
        }]

        dad_vals = [{
            'integer_col': 0,
            'clob_col': '0',
            'smallinteger_col': 0,
            'float_col': 0.0
        }, {
            'integer_col': 1,
            'clob_col': '1',
            'smallinteger_col': 1,
            'float_col': 1.0
        }, {
            'integer_col': 2,
            'clob_col': '2',
            'smallinteger_col': 2,
            'float_col': 2.0
        }, {
            'integer_col': 3,
            'clob_col': '3',
            'smallinteger_col': 3,
            'float_col': 3.0
        }, {
            'integer_col': 4,
            'clob_col': '4',
            'smallinteger_col': 4,
            'float_col': 4.0
        }]

        granddad_vals2 = [{
            'integer_col': 0,
            'varchar_col': '0',
            'boolean_col': True,
            'numeric_col': 0.0
        }, {
            'integer_col': 2,
            'varchar_col': '2',
            'boolean_col': True,
            'numeric_col': 2.0
        }, {
            'integer_col': 4,
            'varchar_col': '4',
            'boolean_col': True,
            'numeric_col': 4.0
        }]

        son_vals2 = [{
            'integer_col': 0,
            'varchar_col': '0',
            'boolean_col': False,
            'numeric_col': 0.0
        }, {
            'integer_col': 4,
            'varchar_col': '4',
            'boolean_col': False,
            'numeric_col': 4.0
        }]

        self.db_chain_granddad.engine.execute(self.granddad_schema.insert(),
                                              granddad_vals)
        self.db_chain_granddad.engine.execute(self.granddad_schema2.insert(),
                                              granddad_vals2)
        self.db_chain_dad.engine.execute(self.dad_schema.insert(), dad_vals)
        self.db_chain_son.engine.execute(self.son_schema2.insert(), son_vals2)

    def tearDown(self):
        self.db_chain_granddad.close()
        self.db_chain_dad.close()
        self.db_chain_son.close()

        self.server.drop_database('db_chain_granddad')
        self.server.drop_database('db_chain_dad')
        self.server.drop_database('db_chain_son')

        self.server.close()
    cache_path = options.cache_path
    database_name = options.database_name
    if database_name is None or cache_path is None:
        parser.print_help()
        sys.exit(1)

    table_name = options.table_name

    logger.log_status('Initializing database...')
    db_server = DatabaseServer(
        EstimationDatabaseConfiguration(
            database_name=database_name,
            database_configuration=options.database_configuration))
    if not db_server.has_database(
            database_name):  # if only one table should be exported,
        db_server.create_database(database_name)  # the database can exist

    db = db_server.get_database(database_name)

    input_storage = flt_storage(storage_location=cache_path)

    output_storage = sql_storage(storage_location=db)

    logger.start_block('Exporting cache to sql...')
    try:
        if table_name is None:
            ExportStorage().export(in_storage=input_storage,
                                   out_storage=output_storage)
        else:
            db.drop_table(table_name)
            ExportStorage().export_dataset(table_name,