def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(
            new_instance=True, base_cache_dir=cache_directory, start_time=self.config.get("base_year", 0)
        )

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config["cache_directory"] is None:
            self.config["cache_directory"] = self.simulation_state.get_cache_directory()

        SessionConfiguration(
            new_instance=True,
            package_order=self.config["dataset_pool_configuration"].package_order,
            in_storage=AttributeCache(),
        )

        ForkProcess().fork_new_process(
            self.config["creating_baseyear_cache_configuration"].cache_scenario_database, self.config
        )

        # Create output database (normally done by run manager)
        if "estimation_database_configuration" in self.config:
            db_server = DatabaseServer(self.config["estimation_database_configuration"])
            if not db_server.has_database(self.config["estimation_database_configuration"].database_name):
                db_server.create_database(self.config["estimation_database_configuration"].database_name)
Пример #2
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    households_table_name = 'raw_pums_hh_data'

    query = "DELETE h.* FROM %s AS h WHERE h.persons = '00' IS NULL" % (
        households_table_name)

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)
    # Do Query
    logCB("Deleting empty household records...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
Пример #3
0
    def combine_tables(self, db_config, db_name, from_tables_names, to_table_name):
        dbconfig = DatabaseServerConfiguration(
            host_name = db_config.host_name,
            protocol = 'mysql',
            user_name = db_config.user_name,
            password = db_config.password                                       
        )
        db_server = DatabaseServer(dbconfig)
        
        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        union_statements = []
        for from_table_name in from_tables_names:
            union_statements.append('(SELECT * FROM %s)' % from_table_name)
            
        create_table_query = "CREATE TABLE %s " % to_table_name
        create_table_query += ' UNION ALL '.join(union_statements)
        create_table_query += ';'
        
        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % to_table_name)
            db.DoQuery(create_table_query)
        except:
            raise NameError, "Unknown or invalid table specified!"
Пример #4
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    query = param_dict['query']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Do Query
    logCB("Running Query...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
Пример #5
0
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']
        
        try: backup = config['backup']
        except KeyError: backup = True
        
        try: backup_postfix = config['backup_postfix']
        except KeyError: backup_postfix = '_old'
        
        dbconfig = DatabaseServerConfiguration(
            protocol = 'mysql',
            host_name = db_config.host_name,
            user_name = db_config.user_name,
            password = db_config.password                                       
        )
        db_server = DatabaseServer(dbconfig)
        
        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)
                
            db.close()
            
        db_server.close()
def opusRun(progressCB,logCB,params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)
        
    
    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    households_table_name = 'raw_pums_hh_data'
    
    query = "DELETE h.* FROM %s AS h WHERE h.persons = '00' IS NULL" % (households_table_name)

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opus_db = server.get_database(database_name=database_name)   
    # Do Query       
    logCB("Deleting empty household records...\n")
    opus_db.execute(query)
    
    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
Пример #7
0
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']

        try:
            backup = config['backup']
        except KeyError:
            backup = True

        try:
            backup_postfix = config['backup_postfix']
        except KeyError:
            backup_postfix = '_old'

        dbconfig = DatabaseServerConfiguration(protocol='mysql',
                                               host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)

            db.close()

        db_server.close()
    def create_building_types_table(self, db_config, db_name):
        table_name = 'job_building_types'

        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               protocol='mysql',
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        logger.log_status('Creating table %s.' % table_name)
        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % table_name)
            db.DoQuery('CREATE TABLE %s '
                       '(id INT, name varchar(50), home_based INT);' %
                       table_name)
        except:
            raise NameError, "Invalid table name specified! (%s)" % table_name

        db.DoQuery('INSERT INTO %s (id, name, home_based) VALUES'
                   '(1, "commercial", 0),'
                   '(3, "industrial", 0),'
                   '(2, "governmental", 0),'
                   '(4, "home_based", 1);' % table_name)
    def setUp(self):
        self.db_config = TestDatabaseConfiguration(protocol=self.protocol)
        self.db_config_node = self.db_config._database_configuration_node()
        self.db_server = DatabaseServer(self.db_config)

        self.test_db = 'OpusDatabaseTestDatabase'

        self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql"
        self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache"
        self.year = 1000

        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.test_data = {
            self.year: {
                'table_a': {
                    'tablea_id': array([1, 2, 3]),
                    'tablea_id_name': array(['1', '2', '3']),
                    'value1': array([1.0, 2.001, 3], dtype='float'),
                    'value2':
                    array([True, False, False], dtype='i'
                          ),  ## sqlit is having problem handling bool type
                },
                'table_b': {
                    'tableb_id': array([1, 2, 3]),
                    'tableb_id_name': array(['one', 'two', 'three']),
                    'value3': array([1.0, 2.001, 3], dtype='float'),
                },
            },
        }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir,
                                                       self.test_data)
Пример #10
0
    def __init__(self, config):
        ss = SimulationState(new_instance=True)
        ss.set_current_time(config['base_year'])
        ss.set_cache_directory(config['cache_directory'])

        SessionConfiguration(new_instance=True,
                             package_order=config['dataset_pool_configuration'].package_order,
                             in_storage=AttributeCache())
        #if not os.path.exists(config['cache_directory']):  ## if cache exists, it will automatically skip
        cacher = CreateBaseyearCache()
        cache_dir = cacher.run(config)

        if 'estimation_database_configuration' in config:
            db_server = DatabaseServer(config['estimation_database_configuration'])
            db = db_server.get_database(config['estimation_database_configuration'].database_name)
            out_storage = StorageFactory().get_storage(
                'sql_storage', 
                storage_location = db)
        else:
            output_cache = os.path.join(config['cache_directory'], str(config['base_year']+1))
            out_storage = StorageFactory().get_storage('flt_storage', storage_location=output_cache)

        dataset_pool = SessionConfiguration().get_dataset_pool()
        households = dataset_pool.get_dataset("household")
        buildings = dataset_pool.get_dataset("building")
        zones = dataset_pool.get_dataset("zone")
        zone_ids = zones.get_id_attribute()
        capacity_attribute_name = "residential_units"  #_of_use_id_%s" % id
        capacity_variable_name = "%s=sanfrancisco.zone.aggregate_%s_from_building" % \
                                 (capacity_attribute_name, capacity_attribute_name)
        buildings.compute_variables("sanfrancisco.building.zone_id", dataset_pool=dataset_pool)
        zones.compute_variables(capacity_variable_name, dataset_pool=dataset_pool)

        building_zone_id = buildings.get_attribute('zone_id')
        
#        is_household_unplace = datasets['household'].get_attribute("building_id") <= 0
        is_household_unplaced = 1 #all households are unplaced
        household_building_id = zeros(households.size(), dtype='int32')-1 #datasets['household'].get_attribute("building_id")
        
        for zone_id in zone_ids:
            capacity = zones.get_attribute_by_id(capacity_attribute_name, zone_id)
            is_household_in_this_zone = (households.get_attribute('zone_id') == zone_id)
            is_unplaced_household_in_this_zone = is_household_in_this_zone * is_household_unplaced
            is_building_in_this_zone = (building_zone_id == zone_id)
#            if not is_household_in_this_zone.sum() <= capacity:
            if capacity == 0 or is_household_in_this_zone.sum()==0:
                print "WARNING: zone %s has %s households but only %s units" % (zone_id, is_household_in_this_zone.sum(), capacity)
                continue
                        
            prob = buildings.get_attribute(capacity_attribute_name) * is_building_in_this_zone / array(capacity, dtype=float64)

            r = random(sum(is_unplaced_household_in_this_zone))
            prob_cumsum = ncumsum(prob)
            index_to_bldg = searchsorted(prob_cumsum, r)

            household_building_id[where(is_unplaced_household_in_this_zone)] = buildings.get_attribute_by_index('building_id', index_to_bldg)

#        import pdb;pdb.set_trace()
        households.set_values_of_one_attribute('building_id', household_building_id)
        households.write_dataset(out_table_name='households', out_storage=out_storage)
    def create_building_types_table(self, db_config, db_name):
        table_name = 'job_building_types'

        dbconfig = DatabaseServerConfiguration(
            host_name = db_config.host_name,
            user_name = db_config.user_name,
            protocol = 'mysql',
            password = db_config.password                                       
        )
        db_server = DatabaseServer(dbconfig)
        
        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        logger.log_status('Creating table %s.' % table_name)
        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % table_name)
            db.DoQuery('CREATE TABLE %s '
                '(id INT, name varchar(50), home_based INT);' 
                % table_name)
        except:
            raise NameError, "Invalid table name specified! (%s)" % table_name
            
        db.DoQuery('INSERT INTO %s (id, name, home_based) VALUES'
            '(1, "commercial", 0),' 
            '(3, "industrial", 0),'
            '(2, "governmental", 0),'
            '(4, "home_based", 1);'
                % table_name)
Пример #12
0
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        ForkProcess().fork_new_process(
            self.config['creating_baseyear_cache_configuration'].
            cache_scenario_database, self.config)

        # Create output database (normally done by run manager)
        if 'estimation_database_configuration' in self.config:
            db_server = DatabaseServer(
                self.config['estimation_database_configuration'])
            if not db_server.has_database(
                    self.config['estimation_database_configuration'].
                    database_name):
                db_server.create_database(
                    self.config['estimation_database_configuration'].
                    database_name)
Пример #13
0
    def combine_tables(self, db_config, db_name, from_tables_names,
                       to_table_name):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        union_statements = []
        for from_table_name in from_tables_names:
            union_statements.append('(SELECT * FROM %s)' % from_table_name)

        create_table_query = "CREATE TABLE %s " % to_table_name
        create_table_query += ' UNION ALL '.join(union_statements)
        create_table_query += ';'

        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % to_table_name)
            db.DoQuery(create_table_query)
        except:
            raise NameError, "Unknown or invalid table specified!"
Пример #14
0
 def setUp(self):
     self.db_name = 'test_create_table'
     
     self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
     self.db_server.drop_database(self.db_name)
     self.db_server.create_database(self.db_name)
     self.db = self.db_server.get_database(self.db_name)
Пример #15
0
	def run (self):
		time = -1
		latest = ""
		directoryname = 'data/vibe_gridcell/runs/'
		if self.isParcel is True:
			directoryname = 'data/vibe_parcel/'
		for filename in os.listdir(os.path.join(os.environ['OPUS_HOME'], directoryname)):
			print filename
			if time == -1:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename
			if os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename)) > time:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename

		config = DatabaseServerConfiguration(host_name = 'localhost',
                                           user_name = 'urbansim',
					   password = '******',
                                           protocol = 'mysql')
		db_server = DatabaseServer(config)
		for i in range(1981, 1980+int(self.YearsToRun)):
			newdir = latest + '/' + str(i)
			flt_directory_in = os.path.join(os.environ['OPUS_HOME'], directoryname ,newdir)
			input_storage = flt_storage(storage_location = flt_directory_in)	
			db = db_server.get_database('ress_'+str(i))
			output_storage = StorageFactory().get_storage('sql_storage', storage_location = db)
			ExportStorage().export(in_storage=input_storage, out_storage=output_storage)
    def __init__(self, config):
        ss = SimulationState(new_instance=True)
        ss.set_current_time(config['base_year'])
        ss.set_cache_directory(config['cache_directory'])

        SessionConfiguration(new_instance=True,
                             package_order=config['dataset_pool_configuration'].package_order,
                             in_storage=AttributeCache())
        #if not os.path.exists(config['cache_directory']):  ## if cache exists, it will automatically skip
        cacher = CreateBaseyearCache()
        cache_dir = cacher.run(config)

        if 'estimation_database_configuration' in config:
            db_server = DatabaseServer(config['estimation_database_configuration'])
            db = db_server.get_database(config['estimation_database_configuration'].database_name)
            out_storage = StorageFactory().get_storage(
                'sql_storage', 
                storage_location = db)
        else:
            output_cache = os.path.join(config['cache_directory'], str(config['base_year']+1))
            out_storage = StorageFactory().get_storage('flt_storage', storage_location=output_cache)

        dataset_pool = SessionConfiguration().get_dataset_pool()
        households = dataset_pool.get_dataset("household")
        buildings = dataset_pool.get_dataset("building")
        zones = dataset_pool.get_dataset("zone")
        zone_ids = zones.get_id_attribute()
        capacity_attribute_name = "residential_units"  #_of_use_id_%s" % id
        capacity_variable_name = "%s=sanfrancisco.zone.aggregate_%s_from_building" % \
                                 (capacity_attribute_name, capacity_attribute_name)
        buildings.compute_variables("sanfrancisco.building.zone_id", dataset_pool=dataset_pool)
        zones.compute_variables(capacity_variable_name, dataset_pool=dataset_pool)

        building_zone_id = buildings.get_attribute('zone_id')
        
#        is_household_unplace = datasets['household'].get_attribute("building_id") <= 0
        is_household_unplaced = 1 #all households are unplaced
        household_building_id = zeros(households.size(), dtype='int32')-1 #datasets['household'].get_attribute("building_id")
        
        for zone_id in zone_ids:
            capacity = zones.get_attribute_by_id(capacity_attribute_name, zone_id)
            is_household_in_this_zone = (households.get_attribute('zone_id') == zone_id)
            is_unplaced_household_in_this_zone = is_household_in_this_zone * is_household_unplaced
            is_building_in_this_zone = (building_zone_id == zone_id)
#            if not is_household_in_this_zone.sum() <= capacity:
            if capacity == 0 or is_household_in_this_zone.sum()==0:
                print "WARNING: zone %s has %s households but only %s units" % (zone_id, is_household_in_this_zone.sum(), capacity)
                continue
                        
            prob = buildings.get_attribute(capacity_attribute_name) * is_building_in_this_zone / array(capacity, dtype=float64)

            r = random(sum(is_unplaced_household_in_this_zone))
            prob_cumsum = ncumsum(prob)
            index_to_bldg = searchsorted(prob_cumsum, r)

            household_building_id[where(is_unplaced_household_in_this_zone)] = buildings.get_attribute_by_index('building_id', index_to_bldg)

#        import pdb;pdb.set_trace()
        households.set_values_of_one_attribute('building_id', household_building_id)
        households.write_dataset(out_table_name='households', out_storage=out_storage)
Пример #17
0
def opusRun(progressCB,logCB,params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)
        
    
    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    query = param_dict['query']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opus_db = server.get_database(database_name=database_name)
    
    # Do Query
    logCB("Running Query...\n")
    opus_db.execute(query)
    
    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
Пример #18
0
 def setUp(self):
     self.db_name = 'test_rename_commercial_and_industrial'
     
     self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
     
     self.db_server.drop_database(self.db_name)
     self.db_server.create_database(self.db_name)
     self.db = self.db_server.get_database(self.db_name)
     
     self.tables_to_convert = (
         'employment_commercial_location_choice_model_specification',
         'employment_commercial_location_choice_model_coefficients',
         'employment_industrial_location_choice_model_specification',
         'employment_industrial_location_choice_model_coefficients',
         )
         
     self.other_tables = (
         'i_am_not_to_be_renamed_location_choice_model_specifications',
         'i_am_not_to_be_renamed_location_choice_model_coefficients',
         'employment_industrial_location_choice_model_coefficients_old',
         )
     
     for table in self.tables_to_convert + self.other_tables:
         self.db.DoQuery('CREATE TABLE %s (id INT);' % table)
         
     self.output_tables = (
         'commercial_employment_location_choice_model_specification',
         'commercial_employment_location_choice_model_coefficients',
         'industrial_employment_location_choice_model_specification',
         'industrial_employment_location_choice_model_coefficients',
         )
    def __init__(self, config):
        if 'estimation_database_configuration' in config:
            db_server = DatabaseServer(config['estimation_database_configuration'])
            db = db_server.get_database(config['estimation_database_configuration'].database_name)
        
            out_storage = StorageFactory().build_storage_for_dataset(
                type='sql_storage', storage_location=db)
        else:
            out_storage = StorageFactory().get_storage(type='flt_storage',
                storage_location=os.path.join(config['cache_directory'], str(config['base_year']+1)))

        simulation_state = SimulationState()
        simulation_state.set_cache_directory(config['cache_directory'])
        simulation_state.set_current_time(config['base_year'])
        attribute_cache = AttributeCache()
        
        SessionConfiguration(new_instance=True,
                             package_order=config['dataset_pool_configuration'].package_order,
                             in_storage=attribute_cache)
        
        if not os.path.exists(os.path.join(config['cache_directory'], str(config['base_year']))):
            #raise RuntimeError, "datasets uncached; run prepare_estimation_data.py first"
            CacheScenarioDatabase().run(config, unroll_gridcells=False)

        for dataset_name in config['datasets_to_preload']:
            SessionConfiguration().get_dataset_from_pool(dataset_name)

        households = SessionConfiguration().get_dataset_from_pool("household")
        household_ids = households.get_id_attribute()
        workers = households.get_attribute("workers")
        
        hh_ids = []
        member_ids = []
        is_worker = []
        job_ids = []

        for i in range(households.size()):  
            if workers[i] > 0:
                hh_ids += [household_ids[i]] * workers[i]
                member_ids += range(1, workers[i]+1)
                is_worker += [1] * workers[i]
                job_ids += [-1] * workers[i]

        in_storage = StorageFactory().get_storage('dict_storage')
        
        persons_table_name = 'persons'
        in_storage.write_table(
                table_name=persons_table_name,
                table_data={
                    'person_id':arange(len(hh_ids))+1,
                    'household_id':array(hh_ids),
                    'member_id':array(member_ids),
                    'is_worker':array(is_worker),                    
                    'job_id':array(job_ids),
                    },
            )

        persons = PersonDataset(in_storage=in_storage, in_table_name=persons_table_name)
        persons.write_dataset(out_storage=out_storage, out_table_name=persons_table_name)
Пример #20
0
    def run_run(self, run_resources, run_name = None, run_as_multiprocess=True, run_in_background=False):
        """check run hasn't already been marked running
           log it in to run_activity
           run simulation
           mark run as done/failed
           """

        if not self.ready_to_run:
            raise 'RunManager.setup_new_run must be execute before RunManager.run_run'

        if run_resources['cache_directory'] != self.current_cache_directory:
            raise 'The configuration and the RunManager conflict on the proper cache_directory'

        self.add_row_to_history(self.run_id, run_resources, "started", run_name = run_name)

        try:
            # Test pre-conditions
            model_system_class_path = run_resources.get('model_system', None)
            if model_system_class_path is None:
                raise TypeError, ("The configuration must specify model_system, the"
                    " full Opus path to the model system to be used.")

            # Create baseyear cache
            self.create_baseyear_cache(run_resources)

            # Create brand-new output database (deletes any prior contents)
            if 'estimation_database_configuration' in run_resources:
                db_server = DatabaseServer(run_resources['estimation_database_configuration'])
                if not db_server.has_database(run_resources['estimation_database_configuration'].database_name):
                    db_server.create_database(run_resources['estimation_database_configuration'].database_name)


            # Run simulation
            exec('from %s import ModelSystem' % model_system_class_path)

            model_system = ModelSystem()
            self.model_system = model_system

            if 'base_year' not in run_resources:
                run_resources['base_year'] = run_resources['years'][0] - 1

            self._create_seed_dictionary(run_resources)
#            model_system.run_in_same_process(run_resources)
            if run_as_multiprocess:
                model_system.run_multiprocess(run_resources)
            else:
                model_system.run_in_one_process(run_resources, run_in_background=run_in_background, class_path=model_system_class_path)

            self.model_system = None

        except:
            self.add_row_to_history(self.run_id, run_resources, "failed", run_name = run_name)
            self.ready_to_run = False
            raise # This re-raises the last exception
        else:
            self.add_row_to_history(self.run_id, run_resources, "done", run_name = run_name)

        self.ready_to_run = False
        return self.run_id
    def get(self, database):
        db_config = ScenarioDatabaseConfiguration()
        db_server = DatabaseServer(db_config)
        db = db_server.get_database(database)

        storage = StorageFactory().get_storage('sql_storage',
                                               storage_location=db)
        return storage
 def get(self, database):
     db_config = ScenarioDatabaseConfiguration()
     db_server = DatabaseServer(db_config)
     db = db_server.get_database(database)
     
     storage = StorageFactory().get_storage(
         'sql_storage',
         storage_location = db)
     return storage
def drop_table(table_name, dbname, schema):
    dbserverconfig = IndicatorsDatabaseConfiguration(protocol='postgres')
    server = DatabaseServer(database_server_configuration = dbserverconfig)
    db = server.get_database(database_name=dbname)
    query = 'DROP TABLE %s.%s' % (schema, table_name)
    try:
        db.execute(query)
        logCB('DROPPED TABLE %s \n' % table_name)
    except:
        return
Пример #24
0
def drop_table(table_name, dbname, schema):
    dbserverconfig = IndicatorsDatabaseConfiguration(protocol='postgres')
    server = DatabaseServer(database_server_configuration=dbserverconfig)
    db = server.get_database(database_name=dbname)
    query = 'DROP TABLE %s.%s' % (schema, table_name)
    try:
        db.execute(query)
        logCB('DROPPED TABLE %s \n' % table_name)
    except:
        return
Пример #25
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #26
0
class Tests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        self.assert_(not self.db.table_exists('building_types'))
        
        
    def test_create_table(self):
        CreateBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        self.assert_(self.db.table_exists('building_types'))
Пример #27
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity'))
        self.assertTrue(db.table_exists('computed_indicators'))
Пример #28
0
class AbstractServiceTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)
    
    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()
        
    def test_create_when_already_exists(self):
        """Shouldn't do anything if the database already exists."""
        self.db_server.create_database(self.database_name)
        db = self.db_server.get_database(self.database_name)
        self.assertFalse(db.table_exists('run_activity'))
        self.assertFalse(db.table_exists('computed_indicators'))

        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))

    def test_create(self):
        """Should create services tables if the database doesn't exist."""
        services = AbstractService(self.config)
        services.services_db.close()
        self.assertTrue(self.db_server.has_database(self.database_name))
        db = self.db_server.get_database(self.database_name)
        self.assertTrue(db.table_exists('run_activity')) 
        self.assertTrue(db.table_exists('computed_indicators'))               
Пример #29
0
    def prepare_for_run(self, database_configuration, database_name):
        ## sql protocol, hostname, username and password are set in
        ## $OPUS_HOME/settings/database_server_setting.xml
        db_config = DatabaseConfiguration(database_name=database_name, database_configuration=database_configuration)
        db_server = DatabaseServer(db_config)
        if not db_server.has_database(database_name):
            db_server.create_database(database_name)
        db = db_server.get_database(database_name)
        self.out_storage = sql_storage(storage_location=db)

        return self.out_storage
Пример #30
0
    def save_results(self, out_storage=None, model_name=None):
        if self.specification is None or self.coefficients is None:
            raise ValueError, "model specification or coefficient is None"

        #invalid = self.coefficients.is_invalid()
        if False:
            logger.log_warning('Invalid coefficients. Not saving results!')
            return

        if model_name is None:
            model_name = self.config.get('model_name_for_coefficients', None)
            
        if model_name is None:
            if self.model_name is not None:
                model_name = self.model_name
            else:
                raise ValueError, "model_name unspecified"

        out_storage_available = True
        if out_storage:
            pass
        elif 'estimation_database_configuration' in self.config:
            try:
                db_server = DatabaseServer(self.config['estimation_database_configuration'])
                database_name = self.config["estimation_database_configuration"].database_name
    
                if not db_server.has_database(database_name):
                    db_server.create_database(database_name)
    
                output_db = db_server.get_database(database_name)
                out_storage = StorageFactory().get_storage(
                    type='sql_storage',
                    storage_location=output_db)
            except:
                logger.log_warning("Problem with connecting database given by 'estimation_database_configuration'.")
                out_storage_available = False
        else:
            logger.log_warning("No estimation_database_configuration given.")
            out_storage_available = False

        # the original model name of development_project_lcm is too long as a mysql db table name, truncate it
        if model_name.rfind("_development_project_location_choice_model") >=0:
            model_name = model_name.replace('_project', '')
        specification_table = '%s_specification' % model_name
        coefficients_table = '%s_coefficients' % model_name
        if out_storage_available:
            logger.start_block("Writing specification and coefficients into storage given by 'estimation_database_configuration'")
            self.specification.write(out_storage=out_storage, out_table_name=specification_table)
            self.coefficients.write(out_storage=out_storage, out_table_name=coefficients_table)
            logger.end_block()
        logger.start_block("Writing specification and coefficients into %s" % AttributeCache().get_storage_location())
        self.specification.write(out_storage=AttributeCache(), out_table_name=specification_table)
        self.coefficients.write(out_storage=AttributeCache(), out_table_name=coefficients_table)
        logger.end_block()
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    pums_id_to_bg_id_file_path = param_dict['pums_id_to_bg_id_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Main application routine:

    opus_db.execute("""
            CREATE TABLE pums_id_to_bg_id (
              county int,
              tract int,
              bg int,
              puma5 int,
              tract_string text,
              number_of_digits int);
            """)

    opus_db.execute("""
            LOAD DATA LOCAL INFILE '%s' INTO TABLE pums_id_to_bg_id
            FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n';
            """ % (pums_id_to_bg_id_file_path))

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract_string = tract;
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set number_of_digits = length(tract_string);
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract = tract*100
            where number_of_digits <= 3;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
 def cleanup(self, remove_cache, remove_output_database):
     """Remove all outputs of this simulation."""
     self.simulation_state.remove_singleton(delete_cache=remove_cache)
     # Remove SessionConfiguration singleton, if it exists
     Singleton().remove_singleton_for_class(SessionConfiguration)
     
     cache_dir = self.config['cache_directory']
     if os.path.exists(cache_dir):
         rmtree(cache_dir)
     if remove_output_database and ('estimation_database_configuration' in self.config):
         db_server = DatabaseServer(self.config['estimation_database_configuration'])
         db_server.drop_database(self.config['estimation_database_configuration'].database_name)
 def cleanup(self, remove_cache, remove_output_database):
     """Remove all outputs of this simulation."""
     self.simulation_state.remove_singleton(delete_cache=remove_cache)
     # Remove SessionConfiguration singleton, if it exists
     Singleton().remove_singleton_for_class(SessionConfiguration)
     
     cache_dir = self.config['cache_directory']
     if os.path.exists(cache_dir):
         rmtree(cache_dir)
     if remove_output_database and ('estimation_database_configuration' in self.config):
         db_server = DatabaseServer(self.config['estimation_database_configuration'])
         db_server.drop_database(self.config['estimation_database_configuration'].database_name)
Пример #34
0
def opusRun(progressCB,logCB,params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)
    
    # get parameter values
    pums_id_to_bg_id_file_path = param_dict['pums_id_to_bg_id_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']
    
    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opus_db = server.get_database(database_name=database_name)   

    # Main application routine:

    opus_db.execute("""
            CREATE TABLE pums_id_to_bg_id (
              county int,
              tract int,
              bg int,
              puma5 int,
              tract_string text,
              number_of_digits int);
            """)
    
    opus_db.execute("""
            LOAD DATA LOCAL INFILE '%s' INTO TABLE pums_id_to_bg_id
            FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n';
            """ % (pums_id_to_bg_id_file_path))
    
    opus_db.execute("""
            update pums_id_to_bg_id
            set tract_string = tract;
    """)
    
    opus_db.execute("""
            update pums_id_to_bg_id
            set number_of_digits = length(tract_string);
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract = tract*100
            where number_of_digits <= 3;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
Пример #35
0
class RunManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(database_name = self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_setup_run(self):
        base_directory = tempfile.mkdtemp(prefix='opus_tmp')
        run_name = 'test_scenario_name'
        run_manager = RunManager(self.config)

        run_manager.setup_new_run(cache_directory = os.path.join(base_directory, run_name),
                                  configuration = {})
        resulting_cache_directory = run_manager.get_current_cache_directory()
        self.assertTrue(resulting_cache_directory.find(run_name)>-1)
        self.assertEquals(os.path.dirname(resulting_cache_directory), base_directory)
        self.assertTrue(run_manager.ready_to_run)
        self.assertTrue(not os.path.exists(resulting_cache_directory))
        run_manager.services_db.close()
        os.rmdir(base_directory)

    def test_add_row_to_history(self):
        run_manager = RunManager(self.config)
        cache_directory = tempfile.mkdtemp(prefix='opus_tmp')
        resources = {'cache_directory':cache_directory,
                     'description':'test_run',
                     'base_year':2000,
                     'project_name': 'test'}

        run_manager.add_row_to_history(run_id = 1,
                                       resources = resources,
                                       status = 'done')

        db = self.db_server.get_database(self.database_name)
        run_activity_table = db.get_table('run_activity')

        s = select([run_activity_table.c.run_description,
                    run_activity_table.c.status],
                    whereclause = run_activity_table.c.run_id == 1)

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        run_name, status = results[0]
        self.assertEqual(status, 'done')
        self.assertEqual(run_name, 'test_run')

        run_manager.services_db.close()
        os.rmdir(cache_directory)
Пример #36
0
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']
    
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opusdb = server.get_database(database_name=database_name, create_if_doesnt_exist=False)
    
    input_storage = sql_storage(storage_location = opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())
        
    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)
        
    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (i, opus_data_year, opus_data_directory))
        try:
            ExportStorage().export_dataset(
                                           dataset_name = i,
                                           in_storage = input_storage,
                                           out_storage = output_storage,
                                           overwrite = overwrite,
                                           )
        except:
            logCB('Error in exporting %s.' % i)
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Пример #37
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(
        opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name,
                                 create_if_doesnt_exist=False)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())

    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)

    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=i,
            in_storage=input_storage,
            out_storage=output_storage,
            overwrite=overwrite,
        )
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Пример #38
0
 def _get_db(self, db_config, db_name):
     dbconfig = DatabaseServerConfiguration(
         protocol = 'mysql',
         host_name = db_config.host_name,
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     try:
         return db_server.get_database(db_name)
     except:
         raise NameError, "Unknown database '%s'!" % db_name      
Пример #39
0
 def _get_db(self, db_config, db_name):
     dbconfig = DatabaseServerConfiguration(
         protocol = 'mysql',
         host_name = db_config.host_name,
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     try:
         return db_server.get_database(db_name)
     except:
         raise NameError, "Unknown database '%s'!" % db_name      
Пример #40
0
    def setUp(self):
        db_configs = []
        for engine in _get_installed_database_engines():
            config = TestDatabaseConfiguration(protocol=engine)
            db_configs.append(config)

        self.test_db = 'OpusDatabaseTestDatabase'
        test_table = 'test_table'

        self.dbs = []
        for config in db_configs:
            try:
                server = DatabaseServer(config)
                if server.has_database(self.test_db):
                    server.drop_database(self.test_db)
                server.create_database(self.test_db)
                self.assertTrue(
                    server.has_database(database_name=self.test_db))
                db = OpusDatabase(database_server_configuration=config,
                                  database_name=self.test_db)
                self.assertFalse(db.table_exists(test_table))
                self.dbs.append((db, server))
            except:
                import traceback
                traceback.print_exc()

                logger.log_warning('Could not start server for protocol %s' %
                                   config.protocol)
Пример #41
0
        def setUp(self):

            db_configs = []
            for engine in get_testable_engines():
                config = TestDatabaseConfiguration(protocol=engine)
                db_configs.append(config)

            self.database_name = 'test_database'
            self.dbs = []
            for config in db_configs:
                try:
                    server = DatabaseServer(config)
                    if server.has_database(self.database_name):
                        server.drop_database(self.database_name)
                    server.create_database(self.database_name)
                    self.assertTrue(
                        server.has_database(database_name=self.database_name))
                    db = OpusDatabase(database_server_configuration=config,
                                      database_name=self.database_name)
                    storage = sql_storage(storage_location=db)
                    self.dbs.append((db, server, storage))
                    self.storage = storage
                except:
                    import traceback
                    traceback.print_exc()

                    print 'WARNING: could not start server for protocol %s' % config.protocol
Пример #42
0
    def __init__(self,
                 indicator_directory,
                 name = None,
                 output_type = None,
                 storage_location = None,
                 output_style = ALL,
                 fixed_field_format = None  # Only used with the 'fixed_field' output type
                ):

        if output_type == 'sql' and not isinstance(storage_location, DatabaseConfiguration):
            raise Exception("If Table output_type is 'sql', a Database object must be passed as storage_location.")
        elif output_type in ['dbf', 'csv', 'tab', 'esri', 'fixed_field'] and \
               storage_location is not None and \
               not isinstance(storage_location,str):
            raise Exception("If Table output_type is %s, storage_location must be a path to the output directory"%output_type)
        elif output_type not in ['dbf', 'csv', 'tab', 'sql', 'esri', 'fixed_field']:
            raise Exception("Table output_type must be either dbf, csv, tab, sql, esri, fixed_field, not %s"%output_type)

        if output_type == "fixed_field" and not fixed_field_format:
            raise ValueError("If Table output_type is 'fixed_field', an XML format string must be passed as fixed_field_format.")
        
        self.fixed_field_format = fixed_field_format

        if output_style not in [Table.ALL,
                                Table.PER_YEAR,
                                Table.PER_ATTRIBUTE]:
            raise Exception(('%s output_style is not appropriate.'%output_style,
                   'Choose from Table.ALL, Table.PER_YEAR, ',
                   'and Table.PER_ATTRIBUTE'))

        self.output_type = output_type
        self.output_style = output_style

        if storage_location is None:
            storage_location = indicator_directory
        elif output_type == 'sql':
            server = DatabaseServer(database_server_configuration = storage_location)
            if not server.has_database(database_name = storage_location.database_name):
                server.create_database(database_name = storage_location.database_name)
            storage_location = server.get_database(
                                   database_name = storage_location.database_name)
        self.storage_location = storage_location

        self.output_storage = StorageFactory().get_storage(
            type = '%s_storage'%(self.output_type),
            storage_location = storage_location
        )

        self.name = name
        self.indicator_directory = indicator_directory
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'
        
        self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql',))
        
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)
            
        
    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()
        
        
    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail('Output table job_building_tpes already exists. (Check setUp)')
        except: pass
        
        
    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
    
            
    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol = 'mysql'), self.db_name)
        
        expected_results = [
            ['id', 'name', 'home_based'],
            [1, "commercial", 0],
            [3, "industrial", 0],
            [2, "governmental", 0],
            [4, "home_based", 1]
            ]
        
        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail('Expected output table job_building_types does not exist.')
        
        self.assert_(expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
Пример #44
0
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']

    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        if opus_data_year == 'ALL':
            opusdb = server.get_database(database_name=database_name + "_" +
                                         str(year))
        output_storage = sql_storage(storage_location=opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = re.split(' +', opus_table_name.strip())
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
Пример #45
0
 def convert_databases(self, db_config, databases, tables, patterns, backup=True, backup_postfix='_old'):
     dbconfig = DatabaseServerConfiguration(
         host_name = db_config.host_name,
         protocol = 'mysql',
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     for db_name in databases:
         db = db_server.get_database(db_name)
         self.convert_database(db, tables[db_name], patterns, backup, backup_postfix)
         db.close()
         
     db_server.close()
class TestCreateJobBuildingTypesTable(opus_unittest.OpusTestCase):
    def setUp(self):
        self.db_name = 'test_create_table'

        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql', ))

        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

    def tearDown(self):
        self.db.close()
        self.db_server.drop_database(self.db_name)
        self.db_server.close()

    def test_setUp(self):
        try:
            self.db.DoQuery('select * from job_building_types;')
            self.fail(
                'Output table job_building_tpes already exists. (Check setUp)')
        except:
            pass

    def test_create_table(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        try:
            self.db.DoQuery('select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

    def test_values(self):
        CreateJobBuildingTypesTable().create_building_types_table(
            TestDatabaseConfiguration(protocol='mysql'), self.db_name)

        expected_results = [['id', 'name', 'home_based'], [1, "commercial", 0],
                            [3, "industrial", 0], [2, "governmental", 0],
                            [4, "home_based", 1]]

        try:
            results = self.db.GetResultsFromQuery(
                'select * from job_building_types;')
        except:
            self.fail(
                'Expected output table job_building_types does not exist.')

        self.assert_(
            expected_results == results,
            "Table job_building_types has incorrect values! "
            "Expected: %s. Received: %s" % (expected_results, results))
Пример #47
0
 def setUp(self):
     self.db_name = 'test_create_table'
     
     self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
     self.db_server.drop_database(self.db_name)
     self.db_server.create_database(self.db_name)
     self.db = self.db_server.get_database(self.db_name)
    def setUp(self):
        self.db_config = TestDatabaseConfiguration(protocol = self.protocol)
        self.db_config_node = self.db_config._database_configuration_node()
        self.db_server = DatabaseServer(self.db_config)

        self.test_db = 'OpusDatabaseTestDatabase'
        
        self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql"
        self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache"
        self.year = 1000
        
        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.test_data = {
            self.year:{
                'table_a':{
                    'tablea_id':array([1,2,3]),
                    'tablea_id_name': array(['1','2','3']),
                    'value1': array([1.0, 2.001, 3], dtype='float'),
                    'value2': array([True, False, False], dtype='i'),  ## sqlit is having problem handling bool type
                    },
                'table_b':{
                    'tableb_id':array([1,2,3]),
                    'tableb_id_name': array(['one','two','three']),
                    'value3': array([1.0, 2.001, 3], dtype='float'),
                    },
                },
            }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)
    def run(self, config, show_output = False):
        logger.log_status("Caching large SQL tables to: " + config['cache_directory'])
        self.show_output = show_output
        
        #import pydevd;pydevd.settrace()
        
        server_configuration = config['scenario_database_configuration']
        
        scenario_database_manager = ScenarioDatabaseManager(
            server_configuration = server_configuration, 
            base_scenario_database_name = server_configuration.database_name                                                         
        )
        
        self.database_server = DatabaseServer(server_configuration)
        
        database_to_table_mapping = scenario_database_manager.get_database_to_table_mapping()
        
        self.tables_to_cache = config['creating_baseyear_cache_configuration'].tables_to_cache
                
        simulation_state = SimulationState()
        if 'low_memory_run' in config:
            simulation_state.set_low_memory_run(config['low_memory_run'])
        simulation_state.set_cache_directory(config['cache_directory'])
        simulation_state.set_current_time(config['base_year'])
                  
        self.tables_cached = set()      
        for database_name, tables in database_to_table_mapping.items():
            self.cache_database_tables(config, database_name, tables)

        un_cached_tables = set(self.tables_to_cache) - self.tables_cached
        if un_cached_tables:
            logger.log_warning('The following requested tables were NOT cached:')
            for table_name in un_cached_tables:
                logger.log_warning('\t%s' % table_name)
def create_dataset_from_sql_storage():
    from opus_core.database_management.configurations.services_database_configuration import ServicesDatabaseConfiguration
    from opus_core.database_management.database_server import DatabaseServer
    
    # make sure the environment variables are set, or replace it by approproate values 
    db_config = ServicesDatabaseConfiguration()
    db_server = DatabaseServer(db_config)
    database = db_server.get_database('services') # name of the database
        
    storage = StorageFactory().get_storage('sql_storage',
                                           storage_location = database)
    services_dataset = Dataset(in_storage = storage, 
                           in_table_name='available_runs', # name of the table
                           id_name=[] # the table doees not have an unique identifier
                           )
    return services_dataset
Пример #51
0
    def setUp(self):
        self.database_name = 'test_travel_model_input_file_writer'

        self.dbconfig = TestDatabaseConfiguration()

        self.db_server = DatabaseServer(self.dbconfig)

        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)

        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')
Пример #52
0
        def setUp(self):

            db_configs = []
            for engine in get_testable_engines():
                config = TestDatabaseConfiguration(protocol = engine)
                db_configs.append(config)
            
            self.database_name = 'test_database'
            self.dbs = []
            for config in db_configs:
                try:
                    server = DatabaseServer(config)
                    if server.has_database(self.database_name):
                        server.drop_database(self.database_name)
                    server.create_database(self.database_name)
                    self.assertTrue(server.has_database(database_name = self.database_name))
                    db = OpusDatabase(database_server_configuration = config, 
                                       database_name = self.database_name)
                    storage = sql_storage(
                                    storage_location = db
                                    )
                    self.dbs.append((db,server,storage))
                    self.storage = storage
                except:
                    import traceback
                    traceback.print_exc()
                    
                    print 'WARNING: could not start server for protocol %s'%config.protocol
Пример #53
0
    def create_storage(self):

        try:
            server = DatabaseServer(self.server_config)
        except:
            logger.log_error(
                'Cannot connect to the database server that the services database is hosted on %s.'
                % self.server_config.database_name)
            raise

        if not server.has_database(self.server_config.database_name):
            server.create_database(self.server_config.database_name)

        try:
            services_db = server.get_database(self.server_config.database_name)
        except:
            logger.log_error('Cannot connect to a services database on %s.' %
                             server.get_connection_string(scrub=True))
            raise

        metadata.bind = services_db.engine
        setup_all()
        create_all()

        return services_db
Пример #54
0
class ResultsManagerTests(opus_unittest.OpusTestCase):
    def setUp(self):
        self.database_name = 'test_services_database'
        self.config = TestDatabaseConfiguration(
            database_name=self.database_name)
        self.db_server = DatabaseServer(self.config)

    def tearDown(self):
        self.db_server.drop_database(self.database_name)
        self.db_server.close()

    def test_add_computed_indicator(self):
        result_manager = ResultsManager(self.config)

        indicator_name = 'test'
        dataset_name = 'ds'
        expression = 'exp'
        run_id = None
        data_path = '/home'

        result_manager.add_computed_indicator(indicator_name,
                                              dataset_name,
                                              expression,
                                              run_id,
                                              data_path,
                                              project_name='test')

        db = self.db_server.get_database(self.database_name)
        computed_indicators_table = db.get_table('computed_indicators')

        s = select(
            [
                computed_indicators_table.c.indicator_name,
                computed_indicators_table.c.expression
            ],
            whereclause=computed_indicators_table.c.dataset_name == 'ds')

        results = db.execute(s).fetchall()
        self.assertEqual(len(results), 1)

        i_name, exp = results[0]
        self.assertEqual(indicator_name, i_name)
        self.assertEqual(expression, exp)

        result_manager.services_db.close()
    def __init__(self):
        db_config = DatabaseServerConfiguration(
            host_name=settings.get_db_host_name(),
            user_name=settings.get_db_user_name(),
            password=settings.get_db_password())
        db_server = DatabaseServer(db_config)
        db = db_server.get_database(settings.db)

        in_storage = StorageFactory().get_storage('sql_storage',
                                                  storage_location=db)

        gcs = GridcellDataset(in_storage=in_storage, nchunks=5)
        print "Read and Write GridcellDataset."
        out_storage = StorageFactory().build_storage_for_dataset(
            type='flt_storage', storage_location=settings.dir)
        ReadWriteADataset(gcs,
                          out_storage=out_storage,
                          out_table_name=settings.gcsubdir)