def _run_(self, application, test_id=str(uuid.uuid4())): basedir = LocalFS(os.path.join("/tmp", "test_spark", test_id)) try: basedir.create_directory() _app_input = self.input_path _app_output_dir = os.path.join(basedir.path, "output") status = application.run('file:' + _app_input, 'file:' + _app_output_dir) self.assertTrue(status.is_ok(), status.stderr()) self.assertTrue(os.path.exists(_app_output_dir), status.stderr()) finally: basedir.delete_directory()
def _run_(self, application, test_id=str(uuid.uuid4())): basedir = LocalFS(os.path.join("/tmp", "test_spark", test_id)) try: basedir.create_directory() _app_input = self.input_path _app_output_dir = os.path.join(basedir.path, "output") status = application.run('file:' + _app_input, 'file:' + _app_output_dir) self.assertTrue(status.is_ok(), status.stderr()) self.assertTrue(os.path.exists(_app_output_dir), status.stderr()) finally: basedir.delete_directory()
def test_broker(self): shell.execute_shell_command('fuser -k -n tcp {0}'.format(PORT)) local = LocalFS("/tmp/kafka-test") if not local.exists(): local.create_directory() thread = KafkaThreadBroker() thread.daemon = True thread.start() sleep(TIME) cmd = shell.execute_shell_command('netstat -lntu') self.assertTrue("9010" in cmd.stdout, cmd.stdout) local.delete_directory() shell.execute_shell_command('fuser -k -n tcp {0}'.format(PORT))
def test_broker(self): shell.execute_shell_command('fuser -k -n tcp {0}'.format(PORT)) local = LocalFS("/tmp/kafka-test") if not local.exists(): local.create_directory() thread = KafkaThreadBroker() thread.daemon = True thread.start() sleep(TIME) cmd = shell.execute_shell_command('netstat -lntu') self.assertTrue("9010" in cmd.stdout, cmd.stdout) local.delete_directory() shell.execute_shell_command('fuser -k -n tcp {0}'.format(PORT))
def test_apply_local_fs_snapshot(self): _config_file = os.path.join(os.path.dirname(__file__), 'resources', 'bootsrap', 'bootstrap.ini') test_dir = LocalFS('/tmp/data_tmp') if test_dir.exists(): test_dir.delete_directory() try: metastore = IniFileMetaStore(file=_config_file) _config = Configuration.load(metastore) apply_localfs_snapshot(_config) self.assertTrue(test_dir.exists(), "Folder was not created") finally: test_dir.delete_directory()
if __name__ == "__main__": hdfs_file = HDFS("{0}/raw".format(BASE_DIR)) if hdfs_file.exists(): hdfs_file.delete(recursive=True) config = RawConfigParser() config.read(os.path.join(os.path.dirname(__file__), "resources/ftp_config.ini")) host_download = config.get("ftp", "host.download") user_name = config.get("ftp", "user.name") password = config.get("ftp", "password") path = config.get("ftp", "path") ftp = ftp_client(host=host_download, login=user_name, password=password, path="/tmp") if ftp.exists(): ftp.delete(recursive=True) local_file = LocalFS(path=os.path.join(os.path.dirname(__file__), 'resources/tmp')) if local_file.exists(): local_file.delete_directory() hive = Hive.load_queries_from_string(query="DROP DATABASE IF EXISTS hive_monitoring CASCADE;") hive.run()
def on_flow_failed(context): local_file = LocalFS( path=os.path.join(os.path.dirname(__file__), 'resources/tmp')) if local_file.exists(): local_file.delete_directory()
def on_flow_failed(context): local_file = LocalFS(path=os.path.join(os.path.dirname(__file__), 'resources/tmp')) if local_file.exists(): local_file.delete_directory()