def setup_hbase_dirs(): print('Setting up hdfs conf bind_mount directories...\n') os.chdir(config.dest_dir) dir_glob = 'hbase' + '*' dir_lst = glob.glob(dir_glob) for i in dir_lst: se_os.del_dir(str(i)) src_dir_path = os.path.join(config.data_dir, 'hbase_conf') dest_path_new = os.path.join(config.dest_dir, 'hbasenode') se_os.copy_dir(src_dir_path, dest_path_new) print('bind_mount directories setup compelete\n')
def setup_kafka_dirs(): print('Setting up kafka bind_mount directories...\n') os.chdir(config.dest_dir) dir_glob = 'kafka' + '*' dir_lst = glob.glob(dir_glob) for i in dir_lst: se_os.del_dir(str(i)) src_dir_path = os.path.join(config.data_dir, 'kafka_conf') for i in range(0, config.kafka_nodes): dest_path_new = os.path.join(config.dest_dir, 'kafkanode' + str(i)) se_os.copy_dir(src_dir_path, dest_path_new) print('bind_mount directories setup compelete\n')
def setup_spark2_dirs(): print('Setting up hdfs conf bind_mount directories...\n') os.chdir(config.dest_dir) dir_glob = 'spark' + '*' dir_lst = glob.glob(dir_glob) for i in dir_lst: se_os.del_dir(str(i)) src_dir_path = os.path.join(config.data_dir, 'spark_conf') dest_path_new = os.path.join(config.dest_dir, 'sparknode') os.mkdir(os.path.join(config.dest_dir, 'sparkclient')) os.mkdir(os.path.join(config.dest_dir, 'sparkwatcher')) se_os.copy_dir(src_dir_path, dest_path_new) print('bind_mount directories setup compelete\n')