Esempio n. 1
0
    def __init__(self, parent_widget):
        QDialog.__init__(self, parent_widget)
        self.setupUi(self)

        desc = "\n".join(
            (
                "%(PROTOCOL_TAG)s: Database protocol. Double-click and hold to see the available options.",
                "%(HOST_NAME_TAG)s: Host name (and database name for Postgres).",
                '    * Postgres: Use the format "host_name/database_name" (without quotes).',
                "          - Leave host_name empty to connect to localhost.",
                "          - Leave database_name empty (but retain the slash) to connect",
                "            to the default database for the database user.",
                "    * Other protocols: Enter the host name only.",
                "%(USER_NAME_TAG)s: Database user name",
                "%(PASSWORD_TAG)s: Database password",
                "",
                "To add a new database configuration, please edit",
                "%(conf_file_path)s",
            )
        )
        desc = QtGui.QApplication.translate("DatabaseSettingsEditGui", desc, None, QtGui.QApplication.UnicodeUTF8)
        desc = unicode(desc) % dict(
            [
                (n, eval("DatabaseServerConfiguration.%s" % n))
                for n in dir(DatabaseServerConfiguration)
                if re.match(".*_TAG$", n)
            ]
            + [("conf_file_path", DatabaseServerConfiguration.get_default_configuration_file_path())]
        )
        desc = QtCore.QString(desc)

        self.description.setText(desc)

        self._config_filename = DatabaseServerConfiguration.get_default_configuration_file_path()

        try:
            self.xml_root = ElementTree(file=self._config_filename).getroot()
            self.base_widget = self.variableBox
            self.xml_controller = XmlController_DatabaseConfig(self)
            # Turns out that Qt Garbage collects the model (and delegate) if we don't explicitly
            # bind it to a Python object in addition to using the PyQt .setModel() method.

            self.tree_view = self.xml_controller.view
            return

        except IOError, ex:
            MessageBox.error(mainwindow=self, text="Could not initialize Database Settings", detailed_text=str(ex))
            self.xml_root = None
            self._config_filename = ""
            self.configFile = None
Esempio n. 2
0
    def __init__(self, parent_widget):
        QDialog.__init__(self, parent_widget)
        self.setupUi(self)

        desc = '\n'.join((
            '%(PROTOCOL_TAG)s: Database protocol. Double-click and hold to see the available options.',
            '%(HOST_NAME_TAG)s: Host name (and database name for Postgres).',
            '    * Postgres: Use the format "host_name/database_name" (without quotes).',
            '          - Leave host_name empty to connect to localhost.',
            '          - Leave database_name empty (but retain the slash) to connect',
            '            to the default database for the database user.',
            '    * Other protocols: Enter the host name only.',
            '%(USER_NAME_TAG)s: Database user name',
            '%(PASSWORD_TAG)s: Database password', '',
            'To add a new database configuration, please edit',
            '%(conf_file_path)s'))
        desc = QtGui.QApplication.translate("DatabaseSettingsEditGui", desc,
                                            None,
                                            QtGui.QApplication.UnicodeUTF8)
        desc = unicode(desc) % dict(
            [(n, eval('DatabaseServerConfiguration.%s' % n))
             for n in dir(DatabaseServerConfiguration)
             if re.match('.*_TAG$', n)] +
            [('conf_file_path',
              DatabaseServerConfiguration.get_default_configuration_file_path(
              ))])
        desc = QtCore.QString(desc)

        self.description.setText(desc)

        self._config_filename = DatabaseServerConfiguration.get_default_configuration_file_path(
        )

        try:
            self.xml_root = ElementTree(file=self._config_filename).getroot()
            self.base_widget = self.variableBox
            self.xml_controller = XmlController_DatabaseConfig(self)
            # Turns out that Qt Garbage collects the model (and delegate) if we don't explicitly
            # bind it to a Python object in addition to using the PyQt .setModel() method.

            self.tree_view = self.xml_controller.view
            return

        except IOError, ex:
            MessageBox.error(mainwindow=self,
                             text='Could not initialize Database Settings',
                             detailed_text=str(ex))
            self.xml_root = None
            self._config_filename = ''
            self.configFile = None
Esempio n. 3
0
    def combine_tables(self, db_config, db_name, from_tables_names,
                       to_table_name):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        union_statements = []
        for from_table_name in from_tables_names:
            union_statements.append('(SELECT * FROM %s)' % from_table_name)

        create_table_query = "CREATE TABLE %s " % to_table_name
        create_table_query += ' UNION ALL '.join(union_statements)
        create_table_query += ';'

        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % to_table_name)
            db.DoQuery(create_table_query)
        except:
            raise NameError, "Unknown or invalid table specified!"
Esempio n. 4
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    query = param_dict['query']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Do Query
    logCB("Running Query...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
    def create_building_types_table(self, db_config, db_name):
        table_name = 'job_building_types'

        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               protocol='mysql',
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        try:
            db = db_server.get_database(db_name)
        except:
            raise NameError, "Unknown database '%s'!" % db_name

        logger.log_status('Creating table %s.' % table_name)
        try:
            db.DoQuery('DROP TABLE IF EXISTS %s;' % table_name)
            db.DoQuery('CREATE TABLE %s '
                       '(id INT, name varchar(50), home_based INT);' %
                       table_name)
        except:
            raise NameError, "Invalid table name specified! (%s)" % table_name

        db.DoQuery('INSERT INTO %s (id, name, home_based) VALUES'
                   '(1, "commercial", 0),'
                   '(3, "industrial", 0),'
                   '(2, "governmental", 0),'
                   '(4, "home_based", 1);' % table_name)
    def convert_databases(self, db_config, config):
        databases = config['databases']
        tables = config['tables']

        try:
            backup = config['backup']
        except KeyError:
            backup = True

        try:
            backup_postfix = config['backup_postfix']
        except KeyError:
            backup_postfix = '_old'

        dbconfig = DatabaseServerConfiguration(protocol='mysql',
                                               host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)

            self.convert_database(db, tables[db_name], backup, backup_postfix)

            db.close()

        db_server.close()
Esempio n. 7
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    households_table_name = 'raw_pums_hh_data'

    query = "DELETE h.* FROM %s AS h WHERE h.persons = '00' IS NULL" % (
        households_table_name)

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)
    # Do Query
    logCB("Deleting empty household records...\n")
    opus_db.execute(query)

    # Finish up
    logCB("Closing database connection\n")
    opus_db.close()
    logCB('Finished running query\n')
Esempio n. 8
0
	def run (self):
		time = -1
		latest = ""
		directoryname = 'data/vibe_gridcell/runs/'
		if self.isParcel is True:
			directoryname = 'data/vibe_parcel/'
		for filename in os.listdir(os.path.join(os.environ['OPUS_HOME'], directoryname)):
			print filename
			if time == -1:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename
			if os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename)) > time:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename

		config = DatabaseServerConfiguration(host_name = 'localhost',
                                           user_name = 'urbansim',
					   password = '******',
                                           protocol = 'mysql')
		db_server = DatabaseServer(config)
		for i in range(1981, 1980+int(self.YearsToRun)):
			newdir = latest + '/' + str(i)
			flt_directory_in = os.path.join(os.environ['OPUS_HOME'], directoryname ,newdir)
			input_storage = flt_storage(storage_location = flt_directory_in)	
			db = db_server.get_database('ress_'+str(i))
			output_storage = StorageFactory().get_storage('sql_storage', storage_location = db)
			ExportStorage().export(in_storage=input_storage, out_storage=output_storage)
Esempio n. 9
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # TODO:
    #    - automatically get geometry column name (probably requires custom type in sqlalchemy)
    #    - more error checking and messages
    #    - get all columns?
    #    - set constraint on new primary_key column instead of just naming it oid

    # get parameter values

    database_name = param_dict['database_name']
    drop_existing = param_dict['drop_existing']
    schema = param_dict['schema']
    new_table_name = param_dict['new_table_name']
    geometry_field_name = param_dict['geometry_field_name']
    existing_table_name = param_dict['existing_table_name']
    centroid_inside_polygon = param_dict['centroid_inside_polygon']
    database_server_connection = param_dict['database_server_connection']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    connection_string = str(dbs_config) + '/%s' % (database_name)
    engine = create_engine(connection_string)
    connection = engine.connect()

    metadata = MetaData()
    metadata.bind = engine
    metadata.reflect(schema=schema)

    # get primary key
    primary_key_name = get_primary_key(metadata, schema, existing_table_name)

    # drop existing table
    if drop_existing == 'True':
        drop_table(new_table_name, schema, connection)

    # force centroid inside polygon
    if centroid_inside_polygon == 'True':
        centroid_function = 'ST_PointOnSurface'
    else:
        centroid_function = 'ST_Centroid'

    # set up query
    query = '''CREATE TABLE %s.%s as
    SELECT %s as oid, %s(%s) as wkb_geometry from %s.%s;
            ''' % (schema, new_table_name, primary_key_name, centroid_function,
                   geometry_field_name, schema, existing_table_name)

    # execute query
    logCB(query + '\n')
    connection.execute(query)
    connection.close()
    logCB('Finished creating %s\n' % new_table_name)
    def __init__(self, 
                 protocol = None, 
                 host_name = None, 
                 user_name = None, 
                 password = None,
                 database_name = None,
                 database_configuration = None,
                 test = False):
  
        DatabaseServerConfiguration.__init__(self,
            protocol = protocol,
            host_name = host_name,
            user_name = user_name,
            password = password,
            database_configuration = database_configuration,
            test = test
            )      

        self.database_name = database_name
Esempio n. 11
0
    def __init__(self,
                 protocol=None,
                 host_name=None,
                 user_name=None,
                 password=None,
                 database_name=None,
                 database_configuration=None,
                 test=False):

        DatabaseServerConfiguration.__init__(
            self,
            protocol=protocol,
            host_name=host_name,
            user_name=user_name,
            password=password,
            database_configuration=database_configuration,
            test=test)

        self.database_name = database_name
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    pums_id_to_bg_id_file_path = param_dict['pums_id_to_bg_id_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    # Main application routine:

    opus_db.execute("""
            CREATE TABLE pums_id_to_bg_id (
              county int,
              tract int,
              bg int,
              puma5 int,
              tract_string text,
              number_of_digits int);
            """)

    opus_db.execute("""
            LOAD DATA LOCAL INFILE '%s' INTO TABLE pums_id_to_bg_id
            FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n';
            """ % (pums_id_to_bg_id_file_path))

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract_string = tract;
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set number_of_digits = length(tract_string);
    """)

    opus_db.execute("""
            update pums_id_to_bg_id
            set tract = tract*100
            where number_of_digits <= 3;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
Esempio n. 13
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # TODO:
    #    - automatically get geometry column name (probably requires custom type in sqlalchemy)
    #    - more error checking and messages

    # get parameter values
    database_name = param_dict['database_name']
    schema = param_dict['schema']
    table_name = param_dict['table_name']
    geometry_column_name = param_dict['geometry_column_name']
    run_vacuum_analyze = param_dict['run_vacuum_analyze']
    index_name = table_name + '_geom_indx'
    database_server_connection = param_dict['database_server_connection']

    # create engine and connection
    logCB("Openeing database connection\n")
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    connection_string = str(dbs_config) + '/%s' % (database_name)
    engine = create_engine(connection_string)
    connection = engine.connect()

    # set up queries
    # drop index if exists
    query1 = '''DROP INDEX IF EXISTS %s.%s;''' % (schema, index_name)
    # create the new index
    query2 = '''CREATE INDEX %s ON %s.%s USING GIST (%s);''' % (
        index_name, schema, table_name, geometry_column_name)

    queries = [query1, query2]

    # execute queries
    for query in queries:
        logCB("Running query:\n")
        logCB("%s\n" % query)
        connection.execute(query)

    # update database statistics
    if run_vacuum_analyze == 'True':
        logCB("Running vacuum\n")
        import psycopg2.extensions
        connection.connection.connection.set_isolation_level(
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        query = '''VACUUM ANALYZE;'''
        connection.execute(query)

    #close connection
    logCB("Closing database connection\n")
    connection.close()
    logCB('Finished creating spatial index on %s\n' % table_name)
Esempio n. 14
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(
        opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name,
                                 create_if_doesnt_exist=False)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())

    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)

    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=i,
            in_storage=input_storage,
            out_storage=output_storage,
            overwrite=overwrite,
        )
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Esempio n. 15
0
 def _get_db(self, db_config, db_name):
     dbconfig = DatabaseServerConfiguration(
         protocol = 'mysql',
         host_name = db_config.host_name,
         user_name = db_config.user_name,
         password = db_config.password                                       
     )
     db_server = DatabaseServer(dbconfig)
     
     try:
         return db_server.get_database(db_name)
     except:
         raise NameError, "Unknown database '%s'!" % db_name      
Esempio n. 16
0
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']

    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        if opus_data_year == 'ALL':
            opusdb = server.get_database(database_name=database_name + "_" +
                                         str(year))
        output_storage = sql_storage(storage_location=opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = re.split(' +', opus_table_name.strip())
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
    def __init__(self):
        db_config = DatabaseServerConfiguration(
            host_name=settings.get_db_host_name(),
            user_name=settings.get_db_user_name(),
            password=settings.get_db_password())
        db_server = DatabaseServer(db_config)
        db = db_server.get_database(settings.db)

        in_storage = StorageFactory().get_storage('sql_storage',
                                                  storage_location=db)

        gcs = GridcellDataset(in_storage=in_storage, nchunks=5)
        print "Read and Write GridcellDataset."
        out_storage = StorageFactory().build_storage_for_dataset(
            type='flt_storage', storage_location=settings.dir)
        ReadWriteADataset(gcs,
                          out_storage=out_storage,
                          out_table_name=settings.gcsubdir)
Esempio n. 18
0
    def convert_databases(self,
                          db_config,
                          databases,
                          tables,
                          patterns,
                          backup=True,
                          backup_postfix='_old'):
        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               protocol='mysql',
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)

        for db_name in databases:
            db = db_server.get_database(db_name)
            self.convert_database(db, tables[db_name], patterns, backup,
                                  backup_postfix)
            db.close()

        db_server.close()
Esempio n. 19
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
    else:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (table_name, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(dataset_name=table_name,
                                       in_storage=input_storage,
                                       out_storage=output_storage)
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values

    pums_hh_table_name = 'raw_pums_hh_data'
    pums_pp_table_name = 'raw_pums_pp_data'
    raw_pums_file_path = param_dict['raw_pums_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    hostname = dbs_config.host_name
    username = dbs_config.user_name
    password = dbs_config.password
    db_type = dbs_config.protocol

    # Main application routine:
    start = time.time()
    pums = raw_pums_data_processor(username, password, hostname, database_name,
                                   db_type, raw_pums_file_path)

    logCB('Creating PUMS households table...\n')
    pums.create_hh_table(pums_hh_table_name)
    progressCB(3)
    logCB('Creating PUMS persons table...\n')
    pums.create_pp_table(pums_pp_table_name)
    progressCB(50)
    logCB('Inserting household records...\n')
    pums.insert_hh_records(pums_hh_table_name, raw_pums_file_path)
    progressCB(53)
    logCB('Inserting person records...\n')
    pums.insert_pp_records(pums_pp_table_name, raw_pums_file_path)
    progressCB(100)
    logCB('Operation lasted %f minutes\n' % ((time.time() - start) / 60))
Esempio n. 21
0
    def __init__(self, gui_configuration = None):
        QMainWindow.__init__(self)
        self.setupUi(self)

        # Bind the application global instance for to this window
        set_opusgui_instance(self)

        self.thread().setPriority(QThread.HighestPriority)

        self.tabWidget = QTabWidget(self.splitter)
        self.splitter.setSizes([400, 500])

        # Create a log window
        self.log_tab = LogWidget(self.tabWidget)
        self.log_tab.start_stdout_capture()

        # Initialize empty project
        self.project = OpusProject()
        self.shows_hidden = False

        # Read database connection names
        db_con_file = DatabaseServerConfiguration.get_default_configuration_file_path()
        db_config_node = ElementTree(file=db_con_file).getroot()
        self.db_connection_names = [node.tag for node in db_config_node if
                                     node.tag != Comment and node.get('hidden') != "True" and node.tag != 'xml_version']
  
        # Application default configuration
        self.gui_config = gui_configuration

        # Bind actions
        self._setup_actions()

        # Manager collection -- initialized by openProject()
        self.managers = {}

#        # Delay before hiding the splash screen
#        time.sleep(1)
#        self.gui_config.splash_screen.hide()

        # Restoring application geometry from last shut down
        settings = QSettings()
        self.restoreGeometry(settings.value("Geometry").toByteArray())
        self.updateFontSize()
        self.setFocus()

        # Variable library
        self.variable_library = None

        # Load the latest project file if that flag is set in GUI configuration
        if self.gui_config.load_latest_on_start:
            try:
                self.openProject(self.gui_config.latest_project_filename or '')
            except:
                self.closeProject()
            if self.gui_config.load_latest_tab_on_start:
                try:
                    self.toolBox.setCurrentIndex(int(self.gui_config.latest_tab_index))
                except:
                    pass
        ###T: removing these until they serve a purpose
        self.menuUtilities.removeAction(self.actPythonView)
        #self.menuUtilities.removeAction(self.actionLog_View)
        self.menuUtilities.removeAction(self.actEditorView)

        self.connect(self, SIGNAL('variables_updated'), self.update_saved_state)
        self.update_saved_state()
Esempio n. 22
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    raw_sf3_data_table_name = 'raw_sf3_data'

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    logCB(" ***  WARNING *** \n")
    logCB(" *** At the end of this tool, you will need\n")
    logCB(
        " *** to check to make sure each record in the 'housing_marginals'\n")
    logCB(
        " *** table has a proper 'pumano' assigned to it.  You may need to \n")
    logCB(" *** manually update the 'pumano' for each \n")
    logCB(" *** block group that this set of queries was \n")
    logCB(" *** unable to match up properly due to idiosyncrasies\n")
    logCB(" *** in the way that block group ids are recorded\n")
    logCB(" *** in the original source files.\n")

    opus_db.execute("""
            drop table if exists housing_marginals;
            """)
    progressCB(50)
    logCB("Creating housing_marginals table...\n")
    opus_db.execute("""
            CREATE TABLE housing_marginals
            SELECT
              mid(GEO_ID, 8, 5) as county,
              0 as pumano,
              cast(mid(GEO_ID, 13, 6) as unsigned) as tract,
              cast(right(GEO_ID, 1) as unsigned) as bg,
              P010001 as hhtotal,
              P010008 + P010012 + P010015 as childpresence1,
              P010009 + P010013 + P010016 + P010017 + P010002 as childpresence2,
              P010007 as hhldtype1,
              P010011 as hhldtype2,
              P010014 as hhldtype3,
              P010002 as hhldtype4,
              P010017 as hhldtype5,
              P014010 as hhldsize1,
              P014003+P014011 as hhldsize2,
              P014004+P014012 as hhldsize3,
              P014005+P014013 as hhldsize4,
              P014006+P014014 as hhldsize5,
              P014007+P014015 as hhldsize6,
              P014008+P014016 as hhldsize7,
              P052002 + P052003 as hhldinc1,
              P052004 + P052005 as hhldinc2,
              P052006 + P052007 as hhldinc3,
              P052008 + P052009 as hhldinc4,
              P052010 + P052011 as hhldinc5,
              P052012 + P052013 as hhldinc6,
              P052014 + P052015 as hhldinc7,
              P052016 + P052017 as hhldinc8,
              P009026 as groupquarter1,
              P009027 as groupquarter2
            FROM raw_sf3_data;
    """)

    logCB("Updating PUMA identifier...\n")
    opus_db.execute("""
            UPDATE housing_marginals h, pums_id_to_bg_id p
            SET h.pumano = p.puma5
            WHERE h.county = p.county AND h.tract = p.tract AND h.bg = p.bg;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
def opusRun(progressCB,logCB,params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)
    
    # get parameter values
    raw_sf3_file_path = param_dict['raw_sf3_file_path']
    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']
    
    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opus_db = server.get_database(database_name=database_name)   

    # Main application routine:

    opus_db.execute("""
            CREATE TABLE raw_sf3_data (
              GEO_ID text,
              P006001 int,P006002 int,P006003 int,P006004 int,
              P006005 int,P006006 int,P006007 int,P006008 int,
              P008001 int,P008002 int,P008003 int,P008004 int,
              P008005 int,P008006 int,P008007 int,P008008 int,
              P008009 int,P008010 int,P008011 int,P008012 int,
              P008013 int,P008014 int,P008015 int,P008016 int,
              P008017 int,P008018 int,P008019 int,P008020 int,
              P008021 int,P008022 int,P008023 int,P008024 int,
              P008025 int,P008026 int,P008027 int,P008028 int,
              P008029 int,P008030 int,P008031 int,P008032 int,
              P008033 int,P008034 int,P008035 int,P008036 int,
              P008037 int,P008038 int,P008039 int,P008040 int,
              P008041 int,P008042 int,P008043 int,P008044 int,
              P008045 int,P008046 int,P008047 int,P008048 int,
              P008049 int,P008050 int,P008051 int,P008052 int,
              P008053 int,P008054 int,P008055 int,P008056 int,
              P008057 int,P008058 int,P008059 int,P008060 int,
              P008061 int,P008062 int,P008063 int,P008064 int,
              P008065 int,P008066 int,P008067 int,P008068 int,
              P008069 int,P008070 int,P008071 int,P008072 int,
              P008073 int,P008074 int,P008075 int,P008076 int,
              P008077 int,P008078 int,P008079 int,P009001 int,
              P009002 int,P009003 int,P009004 int,P009005 int,
              P009006 int,P009007 int,P009008 int,P009009 int,
              P009010 int,P009011 int,P009012 int,P009013 int,
              P009014 int,P009015 int,P009016 int,P009017 int,
              P009018 int,P009019 int,P009020 int,P009021 int,
              P009022 int,P009023 int,P009024 int,P009025 int,
              P009026 int,P009027 int,P010001 int,P010002 int,
              P010003 int,P010004 int,P010005 int,P010006 int,
              P010007 int,P010008 int,P010009 int,P010010 int,
              P010011 int,P010012 int,P010013 int,P010014 int,
              P010015 int,P010016 int,P010017 int,P010018 int,
              P010019 int,P014001 int,P014002 int,P014003 int,
              P014004 int,P014005 int,P014006 int,P014007 int,
              P014008 int,P014009 int,P014010 int,P014011 int,
              P014012 int,P014013 int,P014014 int,P014015 int,
              P014016 int,P043001 int,P043002 int,P043003 int,
              P043004 int,P043005 int,P043006 int,P043007 int,
              P043008 int,P043009 int,P043010 int,P043011 int,
              P043012 int,P043013 int,P043014 int,P043015 int,
              P052001 int,P052002 int,P052003 int,P052004 int,
              P052005 int,P052006 int,P052007 int,P052008 int,
              P052009 int,P052010 int,P052011 int,P052012 int,
              P052013 int,P052014 int,P052015 int,P052016 int,
              P052017 int);              
            """)
    
    opus_db.execute("""
            LOAD DATA LOCAL INFILE '%s' INTO TABLE raw_sf3_data
            FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n';
            """ % (raw_sf3_file_path))
    
    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
def main():
    my_dir = os.path.split(__file__)[0]
    sub_file = os.path.join(my_dir, 'substitutions.txt')

    patterns = []
    f = open(sub_file)
    for line in f:
        if line.endswith('\n'):
            line = line[:-1]

        values = line.split(' ')
        if len(values) > 2:
            print(
                "Invalid format for 'substitutions.txt': Too many "
                "space-separated values on a single line.")
            return
        if len(values) < 2:
            print(
                "Invalid format for 'substitutions.txt': Too few "
                "space-separated values on a single line.")
            return

        pattern, substitution = values
        pattern = r'\b%s\b' % re.escape(pattern)
        substitution = substitution
        patterns.append((pattern, substitution))

    parser = OptionParser()

    parser.add_option("-o",
                      "--host",
                      dest="host",
                      type="string",
                      help="The mysql host (default: 'localhost').")
    parser.add_option("-u",
                      "--username",
                      dest="username",
                      type="string",
                      help="The mysql connection password (default: nothing).")
    parser.add_option("-p",
                      "--password",
                      dest="password",
                      type="string",
                      help="The mysql connection password (default: nothing).")
    parser.add_option(
        "-n",
        "--nobackup",
        action="store_false",
        dest="backup",
        help="If this flag is present, no backup tables will be generated.")
    parser.add_option(
        "-f",
        "--postfix",
        dest="postfix",
        type="string",
        help="The postfix to append to backup table names (default: '_old').")
    parser.add_option("-d",
                      "--databases",
                      action="append",
                      dest="databases",
                      type="string",
                      help="Add a databases to convert. This option may be "
                      "used multiple times.")
    parser.add_option("-t",
                      "--tables",
                      action="append",
                      dest="tables",
                      type="string",
                      help="Add a table to convert. This option may be used "
                      "multiple times.")
    (options, args) = parser.parse_args()

    if options.backup == None: options.backup = True
    if options.postfix == None: options.postfix = '_old'

    if (options.databases == None) or (options.tables == None):
        print(
            'At least one database and one table must be specified. Please '
            'see the usage instructions provided below.\n\n')
        parser.print_help()
        return

    table_list = {}
    for db_name in options.databases:
        table_list[db_name] = []
        for table in options.tables:
            table_list[db_name] += [table]

    db_config = DatabaseServerConfiguration(
        protocol='mysql',
        host_name=options.host,
        user_name=options.username,
        password=options.password,
    )

    if len(options.databases) > 1:
        print "Converting databases on host %s..." % options.host
        DBSubPattern().convert_databases(db_config, options.databases,
                                         table_list, patterns, options.backup,
                                         options.postfix)
        print "Done."

    elif len(options.tables) > 1:
        print "Converting tables in database %s on host %s" % (
            options.databases[0], options.host)
        dbconfig = DatabaseServerConfiguration(protocol='mysql',
                                               host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)
        db = db_server.get_database(options.databases[0])

        DBSubPattern().convert_database(db, options.tables, patterns,
                                        options.backup, options.postfix)
        print "Done."

    else:
        print "Converting table %s in database %s on host %s" % (
            options.tables[0], options.databases[0], options.host)

        dbconfig = DatabaseServerConfiguration(host_name=db_config.host_name,
                                               user_name=db_config.user_name,
                                               protocol='mysql',
                                               password=db_config.password)
        db_server = DatabaseServer(dbconfig)
        db = db_server.get_database(options.databases[0])

        DBSubPattern().convert_table(db, options.tables[0], patterns,
                                     options.backup, options.postfix)
        print "Done."
Esempio n. 25
0
    def __init__(self, gui_configuration=None):
        QMainWindow.__init__(self)
        self.setupUi(self)

        # Bind the application global instance for to this window
        set_opusgui_instance(self)

        self.thread().setPriority(QThread.HighestPriority)

        self.tabWidget = QTabWidget(self.splitter)
        self.splitter.setSizes([400, 500])

        # Create a log window
        self.log_tab = LogWidget(self.tabWidget)
        self.log_tab.start_stdout_capture()

        # Initialize empty project
        self.project = OpusProject()
        self.shows_hidden = False

        # Read database connection names
        db_con_file = DatabaseServerConfiguration.get_default_configuration_file_path(
        )
        db_config_node = ElementTree(file=db_con_file).getroot()
        self.db_connection_names = [
            node.tag for node in db_config_node if node.tag != Comment
            and node.get('hidden') != "True" and node.tag != 'xml_version'
        ]

        # Application default configuration
        self.gui_config = gui_configuration

        # Bind actions
        self._setup_actions()

        # Manager collection -- initialized by openProject()
        self.managers = {}

        #        # Delay before hiding the splash screen
        #        time.sleep(1)
        #        self.gui_config.splash_screen.hide()

        # Restoring application geometry from last shut down
        settings = QSettings()
        self.restoreGeometry(settings.value("Geometry").toByteArray())
        self.updateFontSize()
        self.setFocus()

        # Variable library
        self.variable_library = None

        # Load the latest project file if that flag is set in GUI configuration
        if self.gui_config.load_latest_on_start:
            try:
                self.openProject(self.gui_config.latest_project_filename or '')
            except:
                self.closeProject()
            if self.gui_config.load_latest_tab_on_start:
                try:
                    self.toolBox.setCurrentIndex(
                        int(self.gui_config.latest_tab_index))
                except:
                    pass
        ###T: removing these until they serve a purpose
        self.menuUtilities.removeAction(self.actPythonView)
        #self.menuUtilities.removeAction(self.actionLog_View)
        self.menuUtilities.removeAction(self.actEditorView)

        self.connect(self, SIGNAL('variables_updated'),
                     self.update_saved_state)
        self.update_saved_state()
def main():
    parser = OptionParser()
    
    parser.add_option("-o", "--host", dest="host", type="string",
        help="The mysql host (default: 'localhost').")
    parser.add_option("-u", "--username", dest="username", type="string",
        help="The mysql connection password (default: nothing).")
    parser.add_option("-p", "--password", dest="password", type="string",
        help="The mysql connection password (default: nothing).")
    parser.add_option("-n", "--nobackup", action="store_false", dest="backup", 
        help="If this flag is present, no backup tables will be generated.")
    parser.add_option("-f", "--postfix", dest="postfix", type="string",
        help="The postfix to append to backup table names (default: '_old').")
    parser.add_option("-d", "--databases", action="append", dest="databases", 
        type="string", help="Add a databases to convert. This option may be "
            "used multiple times.")
    parser.add_option("-t", "--tables", action="append", dest="tables", 
        type="string", help="Add a table to convert. This option may be used "
            "multiple times.")
    (options, args) = parser.parse_args()
    
    if options.host == None: options.host = 'localhost'
    if options.username == None: 
        options.username = ''
    if options.password == None: 
        options.password = ''
    if options.backup == None: options.backup = True
    if options.postfix == None: options.postfix = '_old'
    
    if options.databases == None or options.tables == None: 
        print 'Nothing to convert.'
        return
    
    table_list = {}
    for db_name in options.databases:
        table_list[db_name] = []
        for table in options.tables:
            table_list[db_name] += [table]    
    
    
    db_config = DatabaseServerConfiguration(
        protocol = 'mysql',
        host_name = options.host,
        user_name = options.username,
        password = options.password,
        )
        
    config = {
        'databases':options.databases,
        'tables':table_list,
        
        'backup':options.backup,
        'backup_postfix':options.postfix,
        }    
    
    if len(options.databases) > 1:
        print "Converting databases on host %s..." % options.host
        ConvertDatabase().convert_databases(db_config, config)
        print "Done."
        
    elif len(options.tables) > 1:
        print "Converting tables in database %s on host %s" % (options.databases[0], options.host)
        dbconfig = DatabaseServerConfiguration(
            protocol = 'mysql',
            host_name = db_config.host_name,
            user_name = db_config.user_name,
            password = db_config.password                                       
        )
        db_server = DatabaseServer(dbconfig)
        db = db_server.get_database(options.databases[0])
        
        ConvertDatabase().convert_database(db, options.tables, options.backup, options.postfix)
        print "Done."
    
    else:
        dbconfig = DatabaseServerConfiguration(
            protocol = 'mysql',
            host_name = db_config.host_name,
            user_name = db_config.user_name,
            password = db_config.password                                       
        )
        print "Converting table %s in database %s on host %s" % (options.tables[0], options.databases[0], options.host)
        db_server = DatabaseServer(dbconfig)
        db = db_server.get_database(options.databases[0])
        
        ConvertDatabase().convert_table(db, options.tables[0], options.backup, options.postfix)
        print "Done."
Esempio n. 27
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # TODO -
    #    - add 'append' option
    #    - add 'sql statement' option

    # get parameter values
    dbname = param_dict['dbname']
    shapefile = param_dict['shapefile_path']
    schema = param_dict['schema_name']
    overwrite = param_dict['overwrite']
    table_name = param_dict['output_table_name']
    geometry_type = param_dict['geometry_type']
    database_server_connection = param_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    host = dbs_config.host_name
    user = dbs_config.user_name
    password = dbs_config.password

    # check for presence of ogr2ogr
    try:
        p = subprocess.Popen('ogr2ogr',
                             stdin=subprocess.PIPE,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        stdout_text, stderr_text = p.communicate()
    except:
        logCB('ogr2ogr is not properly installed or configured\n')
        return

    # check to see if shapefile exists
    if not os.path.isfile(shapefile):
        logCB('shapefile does not exist\n')
        return

    # set proper table name and schema
    if table_name == 'default':
        table_name = os.path.split(shapefile)[1].split('.')[0]
    if schema == 'default':
        schema = 'public'

    # delete existing table if overwrite = YES
    if overwrite == 'YES':
        drop_table(table_name, dbname, schema)

    # set up base command
    base_cmd = 'ogr2ogr -f PostgreSQL PG:"host=%s user=%s dbname=%s password=%s" %s' \
                % (host, user, dbname, password, shapefile)
    # add switches to base command
    ogr2ogr_cmd = base_cmd + get_lco_options() + get_nln_option(
        schema, table_name) + get_nlt_option(
            geometry_type)  #+ ' -a_srs EPSG:32148'

    logCB('Running ogr2ogr using: \n')
    logCB(ogr2ogr_cmd + '\n')

    # execute full command
    p = subprocess.Popen((ogr2ogr_cmd),
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    stdout_text, stderr_text = p.communicate()

    # print messages from ogr2ogr
    if stdout_text:
        logCB('stdout from ogr2ogr: \n')
        logCB(stdout_text + '\n')
    if stderr_text:
        logCB('stderr from ogr2ogr: \n')
        logCB(stderr_text + '\n')

    logCB('Finished exporting shapefile to %s.%s' % (dbname, schema))
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    database_server_connection = param_dict['database_server_connection']
    database_name = param_dict['database_name']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    #opusDB = OpusDatabase(database_server_configuration=dbs_config, database_name=database_name)
    db = MySQLdb.connect(host=dbs_config.host_name,
                         user=dbs_config.user_name,
                         passwd=dbs_config.password,
                         db=database_name)

    #begin inherited code...

    #    Processes/ methods to be called at the beginning of the pop_synthesis process
    dbc = db.cursor()

    # Identifying the number of housing units to build the Master Matrix
    dbc.execute('select * from housing_pums')
    housing_units = dbc.rowcount
    ti = time.clock()
    # Identifying the control variables for the households, gq's, and persons
    hhld_control_variables = adjusting_pums_joint_distribution.choose_control_variables(
        db, 'hhld')
    gq_control_variables = adjusting_pums_joint_distribution.choose_control_variables(
        db, 'gq')
    person_control_variables = adjusting_pums_joint_distribution.choose_control_variables(
        db, 'person')

    # Identifying the number of categories within each control variable for the households, gq's, and persons
    hhld_dimensions = numpy.asarray(
        adjusting_pums_joint_distribution.create_dimensions(
            db, 'hhld', hhld_control_variables))
    gq_dimensions = numpy.asarray(
        adjusting_pums_joint_distribution.create_dimensions(
            db, 'gq', gq_control_variables))
    person_dimensions = numpy.asarray(
        adjusting_pums_joint_distribution.create_dimensions(
            db, 'person', person_control_variables))

    print 'Dimensions and Control Variables created in %.4f' % (time.clock() -
                                                                ti)
    ti = time.clock()

    update_string = adjusting_pums_joint_distribution.create_update_string(
        db, hhld_control_variables, hhld_dimensions)
    adjusting_pums_joint_distribution.add_unique_id(db, 'hhld', update_string)
    update_string = adjusting_pums_joint_distribution.create_update_string(
        db, gq_control_variables, gq_dimensions)
    adjusting_pums_joint_distribution.add_unique_id(db, 'gq', update_string)
    update_string = adjusting_pums_joint_distribution.create_update_string(
        db, person_control_variables, person_dimensions)
    adjusting_pums_joint_distribution.add_unique_id(db, 'person',
                                                    update_string)

    print 'Uniqueid\'s created in %.4f' % (time.clock() - ti)
    ti = time.clock()

    # Populating the Master Matrix
    populated_matrix = psuedo_sparse_matrix.populate_master_matrix(
        db, 0, housing_units, hhld_dimensions, gq_dimensions,
        person_dimensions)
    print 'Frequency Matrix Populated in %.4f' % (time.clock() - ti)
    ti = time.clock()

    # Sparse representation of the Master Matrix
    ps_sp_matrix = psuedo_sparse_matrix.psuedo_sparse_matrix(
        db, populated_matrix, 0)
    print 'Psuedo Sparse Representation of the Frequency Matrix created in %.4f' % (
        time.clock() - ti)
    ti = time.clock()
    #______________________________________________________________________
    #Creating Index Matrix
    index_matrix = psuedo_sparse_matrix.generate_index_matrix(db, 0)
    print 'Index matrix created in %.4f' % (time.clock() - ti)
    ti = time.clock()
    dbc.close()
    #______________________________________________________________________
    # creating synthetic_population tables in MySQL
    drawing_households.create_synthetic_attribute_tables(db)

    # Total PUMS Sample x composite_type adjustment for hhld
    adjusting_pums_joint_distribution.create_joint_dist(
        db, 'hhld', hhld_control_variables, hhld_dimensions, 0, 0, 0)

    # Total PUMS Sample x composite_type adjustment for gq
    adjusting_pums_joint_distribution.create_joint_dist(
        db, 'gq', gq_control_variables, gq_dimensions, 0, 0, 0)

    # Total PUMS Sample x composite_type adjustment for person
    adjusting_pums_joint_distribution.create_joint_dist(
        db, 'person', person_control_variables, person_dimensions, 0, 0, 0)
def opusRun(progressCB,logCB,params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)
        
    
    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    raw_pums_persons_table_name = 'raw_pums_pp_data'
    raw_pums_households_table_name = 'raw_pums_hh_data'
    
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opus_db = server.get_database(database_name=database_name)   
    
    logCB("Creating temporary table pp_temp...\n")
    
    opus_db.execute("""
            drop table if exists pp_temp;
            """)
    opus_db.execute("""
            create table pp_temp
            select
            0 as pumano,
            SERIALNO, 0 as hhpumsid,
            0 as hhid,
            PNUM, 0 as personid,
            SEX, 0 as gender,
            AGE as AGE_PUMS, 0 as age,
            NUMRACE,
            WHITE,
            BLACK,
            AIAN,
            ASIAN,
            NHPI,
            OTHER,
            0 as race,
            ESR, 0 as employment
            from %s;
            """ % (raw_pums_persons_table_name))
    progressCB(5)
    
    logCB("Updating values...\n")

    index_name = get_random_index_name()
    opus_db.execute("""
            alter table pp_temp add index %s(SERIALNO);    
                    """ % (index_name))

    opus_db.execute("""
            update pp_temp as p, raw_pums_hh_data as h
            set p.pumano = h.PUMA5
            where p.SERIALNO = h.SERIALNO;
            """)

    opus_db.execute("""
            update pp_temp
            set hhpumsid = SERIALNO;
            """)
    index_name = get_random_index_name()

    opus_db.execute("""
            alter table pp_temp add index %s(hhpumsid);
            """ % (index_name))
    index_name = get_random_index_name()

    opus_db.execute("""
            alter table housing_pums add index %s(hhpumsid);
            """ % (index_name))

    opus_db.execute("""
            update pp_temp as p, housing_pums as h
            set p.hhid = h.hhid
            where p.hhpumsid = h.hhpumsid; 
            """)

    opus_db.execute("""
            update pp_temp
            set personid = PNUM;
            """)

    opus_db.execute("""
            update pp_temp
            set gender = SEX;
            """)

    opus_db.execute("""
            update pp_temp
            set age = AGE_PUMS;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 1
            where WHITE = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 2
            where BLACK = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 3
            where AIAN = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 4
            where ASIAN = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 5
            where NHPI = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 6
            where OTHER = 1;
            """)

    opus_db.execute("""
            update pp_temp
            set race = 7
            where NUMRACE > 1;
            """)

    opus_db.execute("""
            update pp_temp
            set employment = 1
            where ESR = '0';
            """)

    opus_db.execute("""
            update pp_temp
            set employment = 2
            where ESR='1' OR ESR='2' OR ESR='4' OR ESR='5';
            """)

    opus_db.execute("""
            update pp_temp
            set employment = 3
            where ESR = '3';
            """)

    opus_db.execute("""
            update pp_temp
            set employment = 4
            where ESR = '6';
            """)

    opus_db.execute("""
            drop table if exists person_pums;
            """)

    opus_db.execute("""
            create table person_pums
            select
            pumano,
            hhpumsid,
            hhid,
            personid,
            gender,
            age,
            race,
            employment
            from pp_temp;
            """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)
parser.add_option("-d", "--database", dest="database", 
    type="string", help="The database to convert. (REQUIRED)")
    
(options, args) = parser.parse_args()

if options.host == None: options.host = 'localhost'
if options.username == None: 
    options.username = ''
if options.password == None: 
    options.password = ''
    
if options.database == None: 
        parser.print_help()
        sys.exit(1)
    
config = DatabaseServerConfiguration(
    host_name = options.host,
    protocol = 'mysql',
    user_name = options.username,
    password = options.password,
    )

CombineTables().combine_tables(config, options.database, 
    ['jobs_for_estimation_commercial',
     'jobs_for_estimation_industrial',
     'jobs_for_estimation_governmental',
     'jobs_for_estimation_home_based',
    ], 
    'jobs_for_estimation')

print 'Done.'
Esempio n. 31
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # TODO -
    #    - add 'where' option
    #    - add 'select' option
    #    - add 'overwrite' option

    # get parameter values
    database_name = param_dict['database_name']
    csv_file_path = param_dict['csv_file_path']
    output_table_name = param_dict['output_table_name']
    database_server_connection = param_dict['database_server_connection']
    overwrite = param_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    host = dbs_config.host_name
    user = dbs_config.user_name
    password = dbs_config.password
    protocol = dbs_config.protocol

    # check for presence of ogr2ogr
    try:
        p = subprocess.Popen('ogr2ogr',
                             stdin=subprocess.PIPE,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        stdout_text, stderr_text = p.communicate()
    except:
        logCB('ogr2ogr is not properly installed or configured\n')
        return

    # check to see if csv file exists
    if not os.path.isfile(csv_file_path):
        logCB('csv file does not exist\n')
        return

    # set proper table name
    if output_table_name == '':
        output_table_name = os.path.split(csv_file_path)[1].split('.')[0]

    # delete existing table if overwrite = YES
    if overwrite == 'True':
        logCB('dropping table %s' % output_table_name)
        drop_table(dbs_config, database_name, output_table_name)

    # set up base command
    if protocol == 'mysql':
        ogr2ogr_cmd = 'ogr2ogr -f MySQL MYSQL:"%s,host=%s,user=%s,password=%s" %s' \
                    % (database_name, host, user, password, csv_file_path)
    elif protocol == 'postgres':
        ogr2ogr_cmd = 'ogr2ogr -f PostgreSQL PG:"host=%s user=%s dbname=%s password=%s" %s' \
                    % (host, user, database_name, password, csv_file_path)
    else:
        logCB('A database other than MySQL or PostgreSQL was specified')
        return

    # add switches to ogr2ogr_cmd command
    # check for output_table_name
    if output_table_name != '':
        ogr2ogr_cmd = ogr2ogr_cmd + get_nln_option(output_table_name)

    logCB('Running ogr2ogr using: \n')
    logCB(ogr2ogr_cmd + '\n')

    # execute full command
    p = subprocess.Popen((ogr2ogr_cmd),
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    stdout_text, stderr_text = p.communicate()

    # print messages from ogr2ogr
    if stdout_text:
        logCB('stdout from ogr2ogr: \n')
        logCB(stdout_text + '\n')
    if stderr_text:
        logCB('stderr from ogr2ogr: \n')
        logCB(stderr_text + '\n')

    logCB('Finished exporting csv file to %s' % database_name)
Esempio n. 32
0
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    database_server_connection = param_dict['database_server_connection']
    project_name = param_dict['database_name']
    path = param_dict['pums_data_path']

    # set up database server configuration
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    #opusDB = OpusDatabase(database_server_configuration=dbs_config, database_name=database_name)
    db = MySQLdb.connect(host=dbs_config.host_name,
                         user=dbs_config.user_name,
                         passwd=dbs_config.password)

    #begin inherited code...

    dbc = db.cursor()
    dbc.execute('Create Database %s' % (project_name))
    dbc.close()

    db = MySQLdb.connect(host=dbs_config.host_name,
                         user=dbs_config.user_name,
                         passwd=dbs_config.password,
                         db='%s' % (project_name))
    dbc = db.cursor()

    dbc.execute('''Create Table housing_pums ( pumano int, hhpumsid int,
                                   hhid int, hhtype int, 
                                   childpresence int, hhldtype int, 
                                   hhldsize int, hhldinc int,
                                   groupquarter int )''')
    dbc.execute(
        '''load data local infile '%s/housing_pums.dat' into table housing_pums'''
        % (path))

    dbc.execute('''Create Table person_pums ( pumano int, hhpumsid int,
                                  hhid int, personid int,
                                  gender int, age int, 
                                  race int, employment int )''')
    dbc.execute(
        '''load data local infile '%s/person_pums.dat' into table person_pums'''
        % (path))

    dbc.execute('''Create Table housing_marginals ( county int, pumano int,
                                         tract int, bg int,
                                         hhtotal int, childpresence1 int,
                                         childpresence2 int, hhldtype1 int,
                                         hhldtype2 int, hhldtype3 int,
                                         hhldtype4 int, hhldtype5 int,
                                         hhldsize1 int, hhldsize2 int,
                                         hhldsize3 int, hhldsize4 int,
                                         hhldsize5 int, hhldsize6 int,
                                         hhldsize7 int, hhldinc1 int,
                                         hhldinc2 int, hhldinc3 int,
                                         hhldinc4 int, hhldinc5 int,
                                         hhldinc6 int, hhldinc7 int,
                                         hhldinc8 int, groupquarter1 int, 
                                         groupquarter2 int )''')
    dbc.execute(
        '''load data local infile '%s/housing_marginals.dat' into table housing_marginals'''
        % (path))

    dbc.execute('''Create Table person_marginals ( county int, pumano int,
                                        tract int, bg int,
                                        gender1 int, gender2 int,
                                        age1 int, age2 int, 
                                        age3 int, age4 int, 
                                        age5 int, age6 int, 
                                        age7 int, age8 int, 
                                        age9 int, age10 int, 
                                        race1 int, race2 int, 
                                        race3 int, race4 int, 
                                        race5 int, race6 int, 
                                        race7 int, employment1 int,
                                        employment2 int, employment3 int,
                                        employment4 int )''')
    dbc.execute(
        '''load data local infile '%s/person_marginals.dat' into table person_marginals'''
        % (path))
    # Figure out a way to automate this process based on the number of hhtypes and the independent tables must only contain variables corresponding
    # to the particular housing type
    hhld_variables = 'childpresence, hhldtype, hhldsize, hhldinc'
    gq_variables = 'groupquarter'
    dbc.execute(
        '''create table hhld_pums select pumano, hhpumsid, hhid, %s from housing_pums where hhtype = 1'''
        % (hhld_variables))
    dbc.execute(
        '''create table gq_pums select pumano, hhpumsid, hhid, %s from housing_pums where hhtype = 2'''
        % (gq_variables))
    dbc.close()
    db.commit()
    db.close()


#
#if __name__ == '__main__':
#    db = MySQLdb.connect(host = 'localhost', user = '******', passwd = '1234')
## How to pickup the location of the flat-files, this can probably come from the GUI?
#    path = 'C:/Documents and Settings/kkonduri/Desktop/pop_syn/northcarolina/data'
#    create_tables (db, 'ncpopsyn', path)
#    db.close()
def opusRun(progressCB, logCB, params):
    param_dict = {}
    for key, val in params.iteritems():
        param_dict[str(key)] = str(val)

    # get parameter values
    database_name = param_dict['database_name']
    database_server_connection = param_dict['database_server_connection']
    raw_sf3_data_table_name = 'raw_sf3_data'

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opus_db = server.get_database(database_name=database_name)

    logCB(" ***  WARNING *** \n")
    logCB(" *** At the end of this tool, you will need\n")
    logCB(" *** to check to make sure each record in the 'person_marginals'\n")
    logCB(
        " *** table has a proper 'pumano' assigned to it.  You may need to \n")
    logCB(" *** manually update the 'pumano' for each \n")
    logCB(" *** block group that this set of queries was \n")
    logCB(" *** unable to match up properly due to idiosyncrasies\n")
    logCB(" *** in the way that block group ids are recorded\n")
    logCB(" *** in the original source files.\n")

    opus_db.execute("""
            drop table if exists person_marginals;
            """)
    progressCB(50)
    logCB("Creating person_marginals table...\n")
    opus_db.execute("""
        CREATE TABLE person_marginals
        SELECT
          mid(GEO_ID, 8, 5) as county,
          0 as pumano,
          cast(mid(GEO_ID, 13, 6) as unsigned) as tract,
          cast(right(GEO_ID, 1) as unsigned) as bg,
          P008002 as gender1,
          P008041 as gender2,
          P008003+P008004+P008005+P008006+P008007+P008042+P008043+P008044+P008045+P008046 as age1,
          P008008+P008009+P008010+P008011+P008012+P008013+P008014+P008015+P008016+P008017+P008047+P008048+P008049+P008050+P008051+P008052+P008053+P008054+P008055+P008056 as age2,
          P008018+P008019+P008020+P008021+P008022+P008023+P008024+P008025+P008057+P008058+P008059+P008060+P008061+P008062+P008063+P008064 as age3,
          P008026+P008027+P008065+P008066 as age4,
          P008028+P008029+P008067+P008068 as age5,
          P008030+P008031+P008069+P008070 as age6,
          P008032+P008033+P008034+P008071+P008072+P008073 as age7,
          P008035+P008036+P008037+P008074+P008075+P008076 as age8,
          P008038+P008039+P008077+P008078 as age9,
          P008040+P008079 as age10,
          P006002 as race1,
          P006003 as race2,
          P006004 as race3,
          P006005 as race4,
          P006006 as race5,
          P006007 as race6,
          P006008 as race7,
          P008003+P008004+P008005+P008006+P008007+P008008+P008009+P008010+P008011+P008012+P008013+P008014+P008015+P008016+P008017+P008018+P008042+P008043+P008044+P008045+P008046+P008047+P008048+P008049+P008050+P008051+P008052+P008053+P008054+P008055+P008056+P008057 as employment1,
          P043004+P043006+P043011+P043013 as employment2,
          P043007+P043014 as employment3,
          P043008+P043015 as employment4
        from raw_sf3_data;
    """)

    logCB("Updating PUMA identifier...\n")
    opus_db.execute("""
            UPDATE person_marginals h, pums_id_to_bg_id p
            SET h.pumano = p.puma5
            WHERE h.county = p.county AND h.tract = p.tract AND h.bg = p.bg;
    """)

    progressCB(90)
    logCB("Closing database connection...\n")
    opus_db.close()
    logCB('Finished running queries.\n')
    progressCB(100)