Ejemplo n.º 1
0
PARSER.add_argument('-kt',
                    '--kafka_topic',
                    type=str,
                    required=True,
                    help='The name of the kafka topic for the '
                    'application to received tweets')
ARGS = PARSER.parse_args()

CF_INFO = cf_helpers.get_info(ARGS)
cf_cli.login(CF_INFO)

cf_cli.create_service('kafka', 'shared', 'kafka-twitter-instance')

PROJECT_DIR = ARGS.project_dir if ARGS.project_dir else \
    cf_helpers.get_project_dir()
cf_helpers.prepare_package(work_dir=PROJECT_DIR)
cf_helpers.push(work_dir=PROJECT_DIR,
                options="{0} -n {0} --no-start".format(ARGS.app_name))

cf_cli.set_env(APP_NAME, "TWITTER_CONSUMER_KEY", ARGS.twitter_consumer_key)
cf_cli.set_env(APP_NAME, "TWITTER_CONSUMER_SECRET",
               ARGS.twitter_consumer_secret)
cf_cli.set_env(APP_NAME, "TWITTER_TOKEN", ARGS.twitter_token)
cf_cli.set_env(APP_NAME, "TWITTER_SECRET", ARGS.twitter_secret)
cf_cli.set_env(APP_NAME, "TWITTER_TERMS", ARGS.twitter_terms)
cf_cli.set_env(APP_NAME, "TWITTER_FOLLOWINGS", ARGS.twitter_followings)
cf_cli.set_env(APP_NAME, "TWITTER_LOCATIONS", ARGS.twitter_locations)
cf_cli.set_env(APP_NAME, "KAFKA_TOPIC", ARGS.kafka_topic)

cf_cli.start(APP_NAME)
cf_cli.create_service('hdfs', 'shared', 'hdfs-instance')

LOGGER.info('Creating kerberos service instance...')
cf_cli.create_service('kerberos', 'shared', 'kerberos-service')

PROJECT_DIR = cf_helpers.get_project_dir()

LOGGER.info('Creating artifact package...')
cf_helpers.prepare_package(work_dir=PROJECT_DIR)

LOGGER.info('Pushing application to Cloud Foundry...')
cf_helpers.push(options="{0} -n {0} --no-start".format(ARGS.app_name), work_dir=PROJECT_DIR)

LOGGER.info('Binding hdfs-instance...')
cf_cli.bind_service(ARGS.app_name, 'hdfs-instance')

LOGGER.info('Binding kerberos-service...')
cf_cli.bind_service(ARGS.app_name, 'kerberos-service')

LOGGER.info('Uploading dataset to HDFS...')
LOCAL_DATASET_PATH = "data/nf-data-application.csv"
HDFS_DATASET_PATH = cf_helpers.upload_to_hdfs(ARGS.api_url, CF_INFO.org,
                                            '{}/{}'.format(PROJECT_DIR, LOCAL_DATASET_PATH),
                                            'nf-data-application')

LOGGER.info('Setting environment variables...')
cf_cli.set_env(ARGS.app_name, "FILE", HDFS_DATASET_PATH)

LOGGER.info('Starting application...')
cf_cli.start(ARGS.app_name)
Ejemplo n.º 3
0
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""
This scripts automates deployment of kafka-admin-api application
(creates required service instances).
"""

from app_deployment_helpers import cf_cli
from app_deployment_helpers import cf_helpers

APP_NAME = "kafka-admin-api"

PARSER = cf_helpers.get_parser(APP_NAME)
ARGS = PARSER.parse_args()

CF_INFO = cf_helpers.get_info(ARGS)
cf_cli.login(CF_INFO)

cf_cli.create_service('kafka', 'shared', 'kafka-instance')

PROJECT_DIR = ARGS.project_dir if ARGS.project_dir else \
    cf_helpers.get_project_dir()
cf_helpers.prepare_package(work_dir=PROJECT_DIR)
cf_helpers.push(work_dir=PROJECT_DIR, options="{0} -n {0} --no-start".format(ARGS.app_name))

cf_cli.start(APP_NAME)
Ejemplo n.º 4
0
LOGGER.info('Creating kerberos service instance...')
cf_cli.create_service('kerberos', 'shared', 'kerberos-service')

PROJECT_DIR = cf_helpers.get_project_dir()

LOGGER.info('Creating artifact package...')
cf_helpers.prepare_package(work_dir=PROJECT_DIR)

LOGGER.info('Pushing application to Cloud Foundry...')
cf_helpers.push(options="{0} -n {0} --no-start".format(ARGS.app_name),
                work_dir=PROJECT_DIR)

LOGGER.info('Binding hdfs-instance...')
cf_cli.bind_service(ARGS.app_name, 'hdfs-instance')

LOGGER.info('Binding kerberos-service...')
cf_cli.bind_service(ARGS.app_name, 'kerberos-service')

LOGGER.info('Uploading dataset to HDFS...')
LOCAL_DATASET_PATH = "data/nf-data-application.csv"
HDFS_DATASET_PATH = cf_helpers.upload_to_hdfs(
    ARGS.api_url, CF_INFO.org, '{}/{}'.format(PROJECT_DIR, LOCAL_DATASET_PATH),
    'nf-data-application')

LOGGER.info('Setting environment variables...')
cf_cli.set_env(ARGS.app_name, "FILE", HDFS_DATASET_PATH)

LOGGER.info('Starting application...')
cf_cli.start(ARGS.app_name)