def test_import_direct(self): metastore = IniFileMetaStore(file=os.path.join( os.path.dirname(os.path.realpath(__file__)), 'resources', 'sqoop', 'sqoop.ini')) config = Configuration.load(metastore=metastore, readonly=False) self.assertEquals( SqoopImport.load_preconfigured_job(name="test", config=config).from_rdbms(rdbms="mysql", username="******", password_file="/user/cloudera/password", host="localhost", database="sqoop_tests").with_direct_mode(direct_split_size="1", name_2="12", names_3="1").table( table="table_name").to_hdfs().build(), '-DA=12 -DB=13 --connect jdbc:mysql://localhost/sqoop_tests --username root --password-file /user/cloudera/password --table table_name --direct -- --name-2=12 --names-3=1')
def test_import_with_hadoop_properties_from_ini_file(self): metastore = IniFileMetaStore(file=os.path.join( os.path.dirname(os.path.realpath(__file__)), 'resources', 'sqoop', 'sqoop.ini')) config = Configuration.load(metastore=metastore, readonly=False) self.assertEquals( SqoopImport.load_preconfigured_job(name="sqoo", config=config).from_rdbms(rdbms="mysql", username="******", password_file="/user/cloudera/password", host="localhost", database="sqoop_tests"). to_hdfs().table(table="table_name").with_hadoop_properties(some_properties="10").build(), "-DA=12 -DB=13 -Dsome.properties=10 --connect jdbc:mysql://localhost/sqoop_tests --username root --password-file /user/cloudera/password --table table_name")
def test_import_table(self): try: metastore = IniFileMetaStore(file=os.path.join(os.path.dirname(__file__), 'resources/sqoop/custom.ini')) cmd = SqoopImport.load_preconfigured_job( config=Configuration.load(metastore=metastore, readonly=False, accepts_nulls=True)).from_rdbms().table( table="table_name", where="id>2", columns="id,last_name").to_hdfs( target_dir="{0}/custom_directory".format(BASE_DIR)).run() self.assertEquals(cmd.status, 0, cmd.stderr) result = shell.execute_shell_command('hadoop fs', '-du -s {0}/custom_directory/part-m-*'.format(BASE_DIR)) self.assertNotEqual(result.stdout.split(' ')[0], '0', result.stdout) finally: shell.execute_shell_command('hadoop fs', '-rm -r {0}/custom_directory'.format(BASE_DIR))
def test_import_table(self): try: metastore = IniFileMetaStore(file=os.path.join( os.path.dirname(__file__), 'resources/sqoop/custom.ini')) cmd = SqoopImport.load_preconfigured_job(config=Configuration.load( metastore=metastore, readonly=False, accepts_nulls=True )).from_rdbms().table( table="table_name", where="id>2", columns="id,last_name").to_hdfs( target_dir="{0}/custom_directory".format(BASE_DIR)).run() self.assertEquals(cmd.status, 0, cmd.stderr) result = shell.execute_shell_command( 'hadoop fs', '-du -s {0}/custom_directory/part-m-*'.format(BASE_DIR)) self.assertNotEqual( result.stdout.split(' ')[0], '0', result.stdout) finally: shell.execute_shell_command( 'hadoop fs', '-rm -r {0}/custom_directory'.format(BASE_DIR))