Exemplo n.º 1
0
 def setUpClass(cls):
     listenbrainz_spark.init_test_session('spark-test-run-{}'.format(str(uuid.uuid4())))
     hdfs_connection.init_hdfs(config.HDFS_HTTP_URI)
     cls.app = utils.create_app()
     cls.app_context = cls.app.app_context()
     cls.date = datetime(2019, 1, 21)
     cls.app_context.push()
Exemplo n.º 2
0
 def setUpClass(cls):
     cls.app = utils.create_app()
     cls.app_context = cls.app.app_context()
     cls.app_context.push()
import os
import sys
import click
import logging
import time

import listenbrainz_spark
from listenbrainz_spark import path
from listenbrainz_spark import utils
from listenbrainz_spark import config
from listenbrainz_spark import hdfs_connection

from hdfs.util import HdfsError
from py4j.protocol import Py4JJavaError

app = utils.create_app(debug=True)


@click.group()
def cli():
    pass


@cli.command(name='init_dir')
@click.option('--rm',
              is_flag=True,
              help='Delete existing directories from HDFS.')
@click.option('--recursive',
              is_flag=True,
              help='Delete existing directories from HDFS recursively.')
@click.option('--create_dir', is_flag=True, help='Create directories in HDFS.')