def setUp(self): # Assume setup is done if db exists output = PSQL.run_sql_command("select 'command_found_' || datname from pg_database where datname like '" + self.db_name + "'") if 'command_found_' + self.db_name in output: return cmd = Command('dropdb', "dropdb " + self.db_name) cmd.run(validateAfter=False) result = cmd.get_results() cmd = Command('createdb', "createdb " + self.db_name) cmd.run(validateAfter=True) result = cmd.get_results() MYD = os.path.abspath( os.path.dirname( __file__ ) ) # Copy and unzip data files if os.path.exists( MYD + '/' + '/compressed_data/' + 'mpp16291.tar.gz' ): run_shell_command( 'cp ' + MYD + '/compressed_data/' + 'mpp16291.tar.gz ' + MYD + '/' + 'mpp16291.tar.gz ', 'Copy compressed data' ) run_shell_command( 'gunzip ' + MYD + '/' + 'mpp16291.tar.gz', 'Unzip compressed data' ) run_shell_command( 'tar -xvf ' + MYD + '/' + 'mpp16291.tar -C ' + MYD, 'Untar archive' ) filelist = [ 'dim_workflows.dat', 'dim_temporarl_expressions.dat', 'dim_subject_areas.dat', 'dim_dates.dat', 'xref_dim_dates_te.dat', 'fact_workflow_events.dat', 'fact_task_events.dat', 'dim_tasks.dat'] for i in range(len(filelist)): runfile = MYD + '/adp/' + filelist[i] PSQL.run_sql_file( runfile, out_file = runfile.replace('.dat', '') + '.out', dbname = self.db_name) return True
def setUp(self): # Assume setup is done if db exists output = PSQL.run_sql_command("select 'command_found_' || datname from pg_database where datname like '" + self.db_name + "'") if 'command_found_' + self.db_name in output: return cmd = Command('dropdb', "dropdb " + self.db_name) cmd.run(validateAfter=False) result = cmd.get_results() cmd = Command('createdb', "createdb " + self.db_name) cmd.run(validateAfter=True) result = cmd.get_results() MYD = os.path.abspath( os.path.dirname( __file__ ) ) #First create the schemas, before loading the data runfile = MYD + '/' + 'mpp18457_repro_setup.sql' PSQL.run_sql_file( runfile, out_file = runfile.replace('.sql', '') + '.out', dbname = self.db_name) # Copy and unzip data files if os.path.exists( MYD + '/' + '/compressed_data/' + 'mpp18457.tar.gz' ): run_shell_command( 'cp ' + MYD + '/compressed_data/' + 'mpp18457.tar.gz ' + MYD + '/' + 'mpp18457.tar.gz ', 'Copy compressed data' ) run_shell_command( 'gunzip ' + MYD + '/' + 'mpp18457.tar.gz', 'Unzip compressed data' ) run_shell_command( 'tar -xvf ' + MYD + '/' + 'mpp18457.tar -C ' + MYD, 'Untar archive' ) mypath = MYD + '/mpp18457/' filelist = [ f for f in os.listdir(mypath) if os.path.isfile(mypath + f) and f.endswith('dmp') ] # Set-up schema, data for i in range(len(filelist)): runfile = str(MYD) + str('/mpp18457/') + str(filelist[i]) PSQL.run_sql_file( runfile, out_file = runfile.replace('.dmp', '') + '.out', dbname = self.db_name) return True
def setUp(self): # Assume setup is done if db exists output = PSQL.run_sql_command("select 'command_found_' || datname from pg_database where datname like '" + self.db_name + "'") if 'command_found_' + self.db_name in output: return cmd = Command('dropdb', "dropdb " + self.db_name) cmd.run(validateAfter=False) result = cmd.get_results() cmd = Command('createdb', "createdb " + self.db_name) cmd.run(validateAfter=True) result = cmd.get_results() PSQL.run_sql_file(local_path('setup.sql'), dbname = self.db_name)
def setUp(self): # Assume setup is done if db exists output = PSQL.run_sql_command( "select 'command_found_' || datname from pg_database where datname like '" + self.db_name + "'") if 'command_found_' + self.db_name in output: return cmd = Command('dropdb', "dropdb " + self.db_name) cmd.run(validateAfter=False) result = cmd.get_results() cmd = Command('createdb', "createdb " + self.db_name) cmd.run(validateAfter=True) result = cmd.get_results() MYD = os.path.abspath(os.path.dirname(__file__)) #First create the schemas, before loading the data runfile = MYD + '/' + 'mpp18457_repro_setup.sql' PSQL.run_sql_file(runfile, out_file=runfile.replace('.sql', '') + '.out', dbname=self.db_name) # Copy and unzip data files if os.path.exists(MYD + '/' + '/compressed_data/' + 'mpp18457.tar.gz'): run_shell_command( 'cp ' + MYD + '/compressed_data/' + 'mpp18457.tar.gz ' + MYD + '/' + 'mpp18457.tar.gz ', 'Copy compressed data') run_shell_command('gunzip ' + MYD + '/' + 'mpp18457.tar.gz', 'Unzip compressed data') run_shell_command( 'tar -xvf ' + MYD + '/' + 'mpp18457.tar -C ' + MYD, 'Untar archive') mypath = MYD + '/mpp18457/' filelist = [ f for f in os.listdir(mypath) if os.path.isfile(mypath + f) and f.endswith('dmp') ] # Set-up schema, data for i in range(len(filelist)): runfile = str(MYD) + str('/mpp18457/') + str(filelist[i]) PSQL.run_sql_file(runfile, out_file=runfile.replace('.dmp', '') + '.out', dbname=self.db_name) return True
def setUp(self): # Assume setup is done if db exists output = PSQL.run_sql_command( "select 'command_found_' || datname from pg_database where datname like '" + self.db_name + "'") if 'command_found_' + self.db_name in output: return cmd = Command('dropdb', "dropdb " + self.db_name) cmd.run(validateAfter=False) result = cmd.get_results() cmd = Command('createdb', "createdb " + self.db_name) cmd.run(validateAfter=True) result = cmd.get_results() MYD = os.path.abspath(os.path.dirname(__file__)) # Copy and unzip data files if os.path.exists(MYD + '/' + '/compressed_data/' + 'mpp16291.tar.gz'): run_shell_command( 'cp ' + MYD + '/compressed_data/' + 'mpp16291.tar.gz ' + MYD + '/' + 'mpp16291.tar.gz ', 'Copy compressed data') run_shell_command('gunzip ' + MYD + '/' + 'mpp16291.tar.gz', 'Unzip compressed data') run_shell_command( 'tar -xvf ' + MYD + '/' + 'mpp16291.tar -C ' + MYD, 'Untar archive') filelist = [ 'dim_workflows.dat', 'dim_temporarl_expressions.dat', 'dim_subject_areas.dat', 'dim_dates.dat', 'xref_dim_dates_te.dat', 'fact_workflow_events.dat', 'fact_task_events.dat', 'dim_tasks.dat' ] for i in range(len(filelist)): runfile = MYD + '/adp/' + filelist[i] PSQL.run_sql_file(runfile, out_file=runfile.replace('.dat', '') + '.out', dbname=self.db_name) return True