def _create_db_from_chain_via_python(self, from_database_configuration,
                                         to_database_configuration,
                                         tables_to_copy):

        db_server_from = DatabaseServer(from_database_configuration)
        db_server_to = DatabaseServer(to_database_configuration)
        db_server_to.drop_database(to_database_configuration.database_name)
        db_server_to.create_database(to_database_configuration.database_name)

        database_out = db_server_to.get_database(
            to_database_configuration.database_name)

        scenario_db_manager = ScenarioDatabaseManager(
            server_configuration=from_database_configuration,
            base_scenario_database_name=from_database_configuration.
            database_name)
        table_mapping = scenario_db_manager.get_database_to_table_mapping()

        cross_db_operations = CrossDatabaseOperations()

        #by default, copy all tables
        if tables_to_copy == []:
            tables_to_copy = sum(table_mapping.values(),
                                 [])  # flat a list of lists
        elif 'scenario_information' not in tables_to_copy:
            tables_to_copy.append('scenario_information')

        for database_name, tables in table_mapping.items():
            database_in = db_server_from.get_database(database_name)
            for table in tables:
                if table not in tables_to_copy:
                    continue

                logger.start_block("Copying table '%s' from database '%s'" %
                                   (table, database_name))

                try:
                    cross_db_operations.copy_table(table_to_copy=table,
                                                   database_in=database_in,
                                                   database_out=database_out,
                                                   use_chunking=True)
                finally:
                    logger.end_block()
            database_in.close()

        self._fix_scenario_information_table(database_out)
        database_out.close()
        db_server_from.close()
        db_server_to.close()
Beispiel #2
0
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        ForkProcess().fork_new_process(
            self.config['creating_baseyear_cache_configuration'].
            cache_scenario_database, self.config)

        # Create output database (normally done by run manager)
        if 'estimation_database_configuration' in self.config:
            db_server = DatabaseServer(
                self.config['estimation_database_configuration'])
            if not db_server.has_database(
                    self.config['estimation_database_configuration'].
                    database_name):
                db_server.create_database(
                    self.config['estimation_database_configuration'].
                    database_name)
Beispiel #3
0
 def setUp(self):
     self.db_name = 'test_create_table'
     
     self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
     self.db_server.drop_database(self.db_name)
     self.db_server.create_database(self.db_name)
     self.db = self.db_server.get_database(self.db_name)
    def create_storage(self):

        try:
            server = DatabaseServer(self.server_config)
        except:
            logger.log_error(
                'Cannot connect to the database server that the services database is hosted on %s.'
                % self.server_config.database_name)
            raise

        if not server.has_database(self.server_config.database_name):
            server.create_database(self.server_config.database_name)

        try:
            services_db = server.get_database(self.server_config.database_name)
        except:
            logger.log_error('Cannot connect to a services database on %s.' %
                             server.get_connection_string(scrub=True))
            raise

        metadata.bind = services_db.engine
        setup_all()
        create_all()

        return services_db
Beispiel #5
0
    def combine_tables(self, db_config, db_name, from_tables_names,
                       to_table_name):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        union_statements = []
        for from_table_name in from_tables_names:
            union_statements.append('(SELECT * FROM %s)' % from_table_name)

        create_table_query = "CREATE TABLE %s " % to_table_name
        create_table_query += ' UNION ALL '.join(union_statements)
        create_table_query += ';'

        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % to_table_name)
            db.DoQuery(create_table_query)
        except:
            raise NameError, "Unknown or invalid table specified!"
    def setUp(self):
        db_configs = []
        for engine in _get_installed_database_engines():
            config = TestDatabaseConfiguration(protocol=engine)
            db_configs.append(config)

        self.test_db = 'OpusDatabaseTestDatabase'
        test_table = 'test_table'

        self.dbs = []
        for config in db_configs:
            try:
                server = DatabaseServer(config)
                if server.has_database(self.test_db):
                    server.drop_database(self.test_db)
                server.create_database(self.test_db)
                self.assertTrue(
                    server.has_database(database_name=self.test_db))
                db = OpusDatabase(database_server_configuration=config,
                                  database_name=self.test_db)
                self.assertFalse(db.table_exists(test_table))
                self.dbs.append((db, server))
            except:
                import traceback
                traceback.print_exc()

                logger.log_warning('Could not start server for protocol %s' %
                                   config.protocol)
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    query = param_dict['query']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Do Query
    logCB("Running Query...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
    def create_building_types_table(self, db_config, db_name):
        table_name = 'job_building_types'

        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               protocol='mysql',
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        logger.log_status('Creating table %s.' % table_name)
        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % table_name)
            db.DoQuery('CREATE TABLE %s '
                       '(id INT, name varchar(50), home_based INT);' %
                       table_name)
        except:
            raise NameError, "Invalid table name specified! (%s)" % table_name

        db.DoQuery('INSERT INTO %s (id, name, home_based) VALUES'
                   '(1, "commercial", 0),'
                   '(3, "industrial", 0),'
                   '(2, "governmental", 0),'
                   '(4, "home_based", 1);' % table_name)
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']

        try:
            backup = config['backup']
        except KeyError:
            backup = True

        try:
            backup_postfix = config['backup_postfix']
        except KeyError:
            backup_postfix = '_old'

        dbconfig = DatabaseServerConfiguration(protocol='mysql',
                                               host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)

            db.close()

        db_server.close()
Beispiel #10
0
 def setUp(self):
     self.db_name = 'test_rename_commercial_and_industrial'
     
     self.db_server = DatabaseServer(TestDatabaseConfiguration(protocol = 'mysql'))
     
     self.db_server.drop_database(self.db_name)
     self.db_server.create_database(self.db_name)
     self.db = self.db_server.get_database(self.db_name)
     
     self.tables_to_convert = (
         'employment_commercial_location_choice_model_specification',
         'employment_commercial_location_choice_model_coefficients',
         'employment_industrial_location_choice_model_specification',
         'employment_industrial_location_choice_model_coefficients',
         )
         
     self.other_tables = (
         'i_am_not_to_be_renamed_location_choice_model_specifications',
         'i_am_not_to_be_renamed_location_choice_model_coefficients',
         'employment_industrial_location_choice_model_coefficients_old',
         )
     
     for table in self.tables_to_convert + self.other_tables:
         self.db.DoQuery('CREATE TABLE %s (id INT);' % table)
         
     self.output_tables = (
         'commercial_employment_location_choice_model_specification',
         'commercial_employment_location_choice_model_coefficients',
         'industrial_employment_location_choice_model_specification',
         'industrial_employment_location_choice_model_coefficients',
         )
Beispiel #11
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    households_table_name = 'raw_pums_hh_data'

    query = "DELETE h.* FROM %s AS h WHERE h.persons = '00' IS NULL" % (
        households_table_name)

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)
    # Do Query
    logCB("Deleting empty household records...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
    def setUp(self):
        self.db_config = TestDatabaseConfiguration(protocol=self.protocol)
        self.db_config_node = self.db_config._database_configuration_node()
        self.db_server = DatabaseServer(self.db_config)

        self.test_db = 'OpusDatabaseTestDatabase'

        self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql"
        self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache"
        self.year = 1000

        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.test_data = {
            self.year: {
                'table_a': {
                    'tablea_id': array([1, 2, 3]),
                    'tablea_id_name': array(['1', '2', '3']),
                    'value1': array([1.0, 2.001, 3], dtype='float'),
                    'value2':
                    array([True, False, False], dtype='i'
                          ),  ## sqlit is having problem handling bool type
                },
                'table_b': {
                    'tableb_id': array([1, 2, 3]),
                    'tableb_id_name': array(['one', 'two', 'three']),
                    'value3': array([1.0, 2.001, 3], dtype='float'),
                },
            },
        }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir,
                                                       self.test_data)
	def run (self):
		time = -1
		latest = ""
		directoryname = 'data/vibe_gridcell/runs/'
		if self.isParcel is True:
			directoryname = 'data/vibe_parcel/'
		for filename in os.listdir(os.path.join(os.environ['OPUS_HOME'], directoryname)):
			print filename
			if time == -1:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename
			if os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename)) > time:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename

		config = DatabaseServerConfiguration(host_name = 'localhost',
                                           user_name = 'urbansim',
					   password = '******',
                                           protocol = 'mysql')
		db_server = DatabaseServer(config)
		for i in range(1981, 1980+int(self.YearsToRun)):
			newdir = latest + '/' + str(i)
			flt_directory_in = os.path.join(os.environ['OPUS_HOME'], directoryname ,newdir)
			input_storage = flt_storage(storage_location = flt_directory_in)	
			db = db_server.get_database('ress_'+str(i))
			output_storage = StorageFactory().get_storage('sql_storage', storage_location = db)
			ExportStorage().export(in_storage=input_storage, out_storage=output_storage)
    def __init__(self, config):
        ss = SimulationState(new_instance=True)
        ss.set_current_time(config['base_year'])
        ss.set_cache_directory(config['cache_directory'])

        SessionConfiguration(new_instance=True,
                             package_order=config['dataset_pool_configuration'].package_order,
                             in_storage=AttributeCache())
        #if not os.path.exists(config['cache_directory']):  ## if cache exists, it will automatically skip
        cacher = CreateBaseyearCache()
        cache_dir = cacher.run(config)

        if 'estimation_database_configuration' in config:
            db_server = DatabaseServer(config['estimation_database_configuration'])
            db = db_server.get_database(config['estimation_database_configuration'].database_name)
            out_storage = StorageFactory().get_storage(
                'sql_storage', 
                storage_location = db)
        else:
            output_cache = os.path.join(config['cache_directory'], str(config['base_year']+1))
            out_storage = StorageFactory().get_storage('flt_storage', storage_location=output_cache)

        dataset_pool = SessionConfiguration().get_dataset_pool()
        households = dataset_pool.get_dataset("household")
        buildings = dataset_pool.get_dataset("building")
        zones = dataset_pool.get_dataset("zone")
        zone_ids = zones.get_id_attribute()
        capacity_attribute_name = "residential_units"  #_of_use_id_%s" % id
        capacity_variable_name = "%s=sanfrancisco.zone.aggregate_%s_from_building" % \
                                 (capacity_attribute_name, capacity_attribute_name)
        buildings.compute_variables("sanfrancisco.building.zone_id", dataset_pool=dataset_pool)
        zones.compute_variables(capacity_variable_name, dataset_pool=dataset_pool)

        building_zone_id = buildings.get_attribute('zone_id')
        
#        is_household_unplace = datasets['household'].get_attribute("building_id") <= 0
        is_household_unplaced = 1 #all households are unplaced
        household_building_id = zeros(households.size(), dtype='int32')-1 #datasets['household'].get_attribute("building_id")
        
        for zone_id in zone_ids:
            capacity = zones.get_attribute_by_id(capacity_attribute_name, zone_id)
            is_household_in_this_zone = (households.get_attribute('zone_id') == zone_id)
            is_unplaced_household_in_this_zone = is_household_in_this_zone * is_household_unplaced
            is_building_in_this_zone = (building_zone_id == zone_id)
#            if not is_household_in_this_zone.sum() <= capacity:
            if capacity == 0 or is_household_in_this_zone.sum()==0:
                print "WARNING: zone %s has %s households but only %s units" % (zone_id, is_household_in_this_zone.sum(), capacity)
                continue
                        
            prob = buildings.get_attribute(capacity_attribute_name) * is_building_in_this_zone / array(capacity, dtype=float64)

            r = random(sum(is_unplaced_household_in_this_zone))
            prob_cumsum = ncumsum(prob)
            index_to_bldg = searchsorted(prob_cumsum, r)

            household_building_id[where(is_unplaced_household_in_this_zone)] = buildings.get_attribute_by_index('building_id', index_to_bldg)

#        import pdb;pdb.set_trace()
        households.set_values_of_one_attribute('building_id', household_building_id)
        households.write_dataset(out_table_name='households', out_storage=out_storage)
Beispiel #15
0
        def setUp(self):

            db_configs = []
            for engine in get_testable_engines():
                config = TestDatabaseConfiguration(protocol=engine)
                db_configs.append(config)

            self.database_name = 'test_database'
            self.dbs = []
            for config in db_configs:
                try:
                    server = DatabaseServer(config)
                    if server.has_database(self.database_name):
                        server.drop_database(self.database_name)
                    server.create_database(self.database_name)
                    self.assertTrue(
                        server.has_database(database_name=self.database_name))
                    db = OpusDatabase(database_server_configuration=config,
                                      database_name=self.database_name)
                    storage = sql_storage(storage_location=db)
                    self.dbs.append((db, server, storage))
                    self.storage = storage
                except:
                    import traceback
                    traceback.print_exc()

                    print 'WARNING: could not start server for protocol %s' % config.protocol
Beispiel #16
0
    def run_run(self, run_resources, run_name = None, run_as_multiprocess=True, run_in_background=False):
        """check run hasn't already been marked running
           log it in to run_activity
           run simulation
           mark run as done/failed
           """

        if not self.ready_to_run:
            raise 'RunManager.setup_new_run must be execute before RunManager.run_run'

        if run_resources['cache_directory'] != self.current_cache_directory:
            raise 'The configuration and the RunManager conflict on the proper cache_directory'

        self.add_row_to_history(self.run_id, run_resources, "started", run_name = run_name)

        try:
            # Test pre-conditions
            model_system_class_path = run_resources.get('model_system', None)
            if model_system_class_path is None:
                raise TypeError, ("The configuration must specify model_system, the"
                    " full Opus path to the model system to be used.")

            # Create baseyear cache
            self.create_baseyear_cache(run_resources)

            # Create brand-new output database (deletes any prior contents)
            if 'estimation_database_configuration' in run_resources:
                db_server = DatabaseServer(run_resources['estimation_database_configuration'])
                if not db_server.has_database(run_resources['estimation_database_configuration'].database_name):
                    db_server.create_database(run_resources['estimation_database_configuration'].database_name)


            # Run simulation
            exec('from %s import ModelSystem' % model_system_class_path)

            model_system = ModelSystem()
            self.model_system = model_system

            if 'base_year' not in run_resources:
                run_resources['base_year'] = run_resources['years'][0] - 1

            self._create_seed_dictionary(run_resources)
#            model_system.run_in_same_process(run_resources)
            if run_as_multiprocess:
                model_system.run_multiprocess(run_resources)
            else:
                model_system.run_in_one_process(run_resources, run_in_background=run_in_background, class_path=model_system_class_path)

            self.model_system = None

        except:
            self.add_row_to_history(self.run_id, run_resources, "failed", run_name = run_name)
            self.ready_to_run = False
            raise # This re-raises the last exception
        else:
            self.add_row_to_history(self.run_id, run_resources, "done", run_name = run_name)

        self.ready_to_run = False
        return self.run_id
    def get(self, database):
        db_config = ScenarioDatabaseConfiguration()
        db_server = DatabaseServer(db_config)
        db = db_server.get_database(database)

        storage = StorageFactory().get_storage('sql_storage',
                                               storage_location=db)
        return storage
Beispiel #18
0
def drop_table(table_name, dbname, schema):
    dbserverconfig = IndicatorsDatabaseConfiguration(protocol='postgres')
    server = DatabaseServer(database_server_configuration=dbserverconfig)
    db = server.get_database(database_name=dbname)
    query = 'DROP TABLE %s.%s' % (schema, table_name)
    try:
        db.execute(query)
        logCB('DROPPED TABLE %s \n' % table_name)
    except:
        return
Beispiel #19
0
    def save_results(self, out_storage=None, model_name=None):
        if self.specification is None or self.coefficients is None:
            raise ValueError, "model specification or coefficient is None"

        #invalid = self.coefficients.is_invalid()
        if False:
            logger.log_warning('Invalid coefficients. Not saving results!')
            return

        if model_name is None:
            model_name = self.config.get('model_name_for_coefficients', None)
            
        if model_name is None:
            if self.model_name is not None:
                model_name = self.model_name
            else:
                raise ValueError, "model_name unspecified"

        out_storage_available = True
        if out_storage:
            pass
        elif 'estimation_database_configuration' in self.config:
            try:
                db_server = DatabaseServer(self.config['estimation_database_configuration'])
                database_name = self.config["estimation_database_configuration"].database_name
    
                if not db_server.has_database(database_name):
                    db_server.create_database(database_name)
    
                output_db = db_server.get_database(database_name)
                out_storage = StorageFactory().get_storage(
                    type='sql_storage',
                    storage_location=output_db)
            except:
                logger.log_warning("Problem with connecting database given by 'estimation_database_configuration'.")
                out_storage_available = False
        else:
            logger.log_warning("No estimation_database_configuration given.")
            out_storage_available = False

        # the original model name of development_project_lcm is too long as a mysql db table name, truncate it
        if model_name.rfind("_development_project_location_choice_model") >=0:
            model_name = model_name.replace('_project', '')
        specification_table = '%s_specification' % model_name
        coefficients_table = '%s_coefficients' % model_name
        if out_storage_available:
            logger.start_block("Writing specification and coefficients into storage given by 'estimation_database_configuration'")
            self.specification.write(out_storage=out_storage, out_table_name=specification_table)
            self.coefficients.write(out_storage=out_storage, out_table_name=coefficients_table)
            logger.end_block()
        logger.start_block("Writing specification and coefficients into %s" % AttributeCache().get_storage_location())
        self.specification.write(out_storage=AttributeCache(), out_table_name=specification_table)
        self.coefficients.write(out_storage=AttributeCache(), out_table_name=coefficients_table)        
        logger.end_block()
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    pums_id_to_bg_id_file_path = param_dict['pums_id_to_bg_id_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Main application routine:

    opus_db.execute("""
            CREATE TABLE pums_id_to_bg_id (
              county int,
              tract int,
              bg int,
              puma5 int,
              tract_string text,
              number_of_digits int);
            """)

    opus_db.execute("""
            LOAD DATA LOCAL INFILE '%s' INTO TABLE pums_id_to_bg_id
            FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n';
            """ % (pums_id_to_bg_id_file_path))

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract_string = tract;
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set number_of_digits = length(tract_string);
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract = tract*100
            where number_of_digits <= 3;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
 def cleanup(self, remove_cache, remove_output_database):
     """Remove all outputs of this simulation."""
     self.simulation_state.remove_singleton(delete_cache=remove_cache)
     # Remove SessionConfiguration singleton, if it exists
     Singleton().remove_singleton_for_class(SessionConfiguration)
     
     cache_dir = self.config['cache_directory']
     if os.path.exists(cache_dir):
         rmtree(cache_dir)
     if remove_output_database and ('estimation_database_configuration' in self.config):
         db_server = DatabaseServer(self.config['estimation_database_configuration'])
         db_server.drop_database(self.config['estimation_database_configuration'].database_name)
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(
        opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name,
                                 create_if_doesnt_exist=False)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())

    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)

    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=i,
            in_storage=input_storage,
            out_storage=output_storage,
            overwrite=overwrite,
        )
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Beispiel #23
0
 def _get_db(self, db_config, db_name):
     dbconfig = DatabaseServerConfiguration(
         protocol = 'mysql',
         host_name = db_config.host_name,
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     try:
         return db_server.get_database(db_name)
     except:
         raise NameError, "Unknown database '%s'!" % db_name      
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']

    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        if opus_data_year == 'ALL':
            opusdb = server.get_database(database_name=database_name + "_" +
                                         str(year))
        output_storage = sql_storage(storage_location=opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = re.split(' +', opus_table_name.strip())
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
def create_dataset_from_sql_storage():
    from opus_core.database_management.configurations.services_database_configuration import ServicesDatabaseConfiguration
    from opus_core.database_management.database_server import DatabaseServer
    
    # make sure the environment variables are set, or replace it by approproate values 
    db_config = ServicesDatabaseConfiguration()
    db_server = DatabaseServer(db_config)
    database = db_server.get_database('services') # name of the database
        
    storage = StorageFactory().get_storage('sql_storage',
                                           storage_location = database)
    services_dataset = Dataset(in_storage = storage, 
                           in_table_name='available_runs', # name of the table
                           id_name=[] # the table doees not have an unique identifier
                           )
    return services_dataset
Beispiel #26
0
    def setUp(self):
        self.database_name = 'test_travel_model_input_file_writer'

        self.dbconfig = TestDatabaseConfiguration()

        self.db_server = DatabaseServer(self.dbconfig)

        self.db_server.drop_database(self.database_name)
        self.db_server.create_database(self.database_name)
        self.database = self.db_server.get_database(self.database_name)

        self.create_jobs_table(self.database)
        self.create_zones_table(self.database)
        self.create_employment_sector_groups_table(self.database)
        self.create_constant_taz_columns_table(self.database)
        self.create_households_table(self.database)
        self.tempdir_path = tempfile.mkdtemp(prefix='opus_tmp')
    def __init__(self):
        db_config = DatabaseServerConfiguration(
            host_name=settings.get_db_host_name(),
            user_name=settings.get_db_user_name(),
            password=settings.get_db_password())
        db_server = DatabaseServer(db_config)
        db = db_server.get_database(settings.db)

        in_storage = StorageFactory().get_storage('sql_storage',
                                                  storage_location=db)

        gcs = GridcellDataset(in_storage=in_storage, nchunks=5)
        print "Read and Write GridcellDataset."
        out_storage = StorageFactory().build_storage_for_dataset(
            type='flt_storage', storage_location=settings.dir)
        ReadWriteADataset(gcs,
                          out_storage=out_storage,
                          out_table_name=settings.gcsubdir)
Beispiel #28
0
    def setUp(self):
        self.db_server = DatabaseServer(
            TestDatabaseConfiguration(protocol='mysql'))
        self.db_name = 'test_combine_tables'
        self.db_server.drop_database(self.db_name)
        self.db_server.create_database(self.db_name)
        self.db = self.db_server.get_database(self.db_name)

        self.from_tables = (
            ('table_a', 'A'),
            ('table_b', 'B'),
            ('table_c', 'C'),
            ('table_d', 'D'),
        )
        table_ids = {
            'table_a': [1],
            'table_b': [2, 3],
            'table_c': [4, 5, 6, 7],
            'table_d': [8, 9, 10, 11, 12, 13, 14, 15],
        }
        self.to_table = 'to_table'

        for table, type in self.from_tables:
            self.db.DoQuery('CREATE TABLE %s (job_id INT, sector_id INT, '
                            'grid_id INT, sic INT, building_type varchar(5), '
                            'home_based tinyint(4), impute_flag tinyint(4));' %
                            table)

            values = ','.join([
                '(%(j)s, %(j)s, %(j)s, %(j)s, "%(type)s", %(j)s, %(j)s)' % {
                    'j': j,
                    'type': type
                } for j in table_ids[table]
            ])
            self.db.DoQuery(
                'INSERT INTO %(table_name)s (job_id, sector_id, '
                'grid_id, sic, building_type, home_based, impute_flag) VALUES '
                '%(values)s;' % {
                    'table_name': table,
                    'values': values,
                })
            number_rows = self.db.GetResultsFromQuery(
                'SELECT count(*) FROM %s' % table)[1][0]
            self.assertEqual(number_rows, len(table_ids[table]))
Beispiel #29
0
    def setUp(self):
        self.databases = ['db_chain_son', 'db_chain_dad', 'db_chain_granddad']

        self.config = TestDatabaseConfiguration(
            protocol=get_default_database_engine())
        self.server = DatabaseServer(self.config)
        self._create_databases()
        self.db_chain_granddad = self.server.get_database('db_chain_granddad')
        self.db_chain_dad = self.server.get_database('db_chain_dad')
        self.db_chain_son = self.server.get_database('db_chain_son')

        self._create_tables()
        self.granddad_schema = self.db_chain_granddad.get_table('base_schema')
        self.dad_schema = self.db_chain_dad.get_table('base_schema')
        self.granddad_schema2 = self.db_chain_granddad.get_table(
            'base_schema2')
        self.son_schema2 = self.db_chain_son.get_table('base_schema2')

        self._seed_values()
    def convert_databases(self,
                          db_config,
                          databases,
                          tables,
                          patterns,
                          backup=True,
                          backup_postfix='_old'):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)
            self.convert_database(db, tables[db_name], patterns, backup,
                                  backup_postfix)
            db.close()

        db_server.close()