def test_0_push_hdfs_hive_demo(self, dataset_target_uri, hdfs_instance, kerberos_instance, hive_instance, cups_sso_instance, login_to_cf, class_context): """ <b>Description:</b> Check that hdfs hive demo can run on platform. <b>Input data:</b> 1. hdfs hive demo sources 2. hdfs instance 3. kerberos instance 4. hive instance 5. cups sso instance <b>Expected results:</b> Test passes when hdfs hive demo is successfully pushed to platform and app has an url. <b>Steps:</b> 1. Get hdfs hive demo sources from repository. 2. Push hdfs hive demo to platform. 3. Check that pushed app has an url address. """ step("Get app sources") repo = AppSources.get_repository(repo_name=TapGitHub.hdfs_hive_demo, repo_owner=TapGitHub.intel_data) repo.compile_mvn() step("Push hdfs-hive-demo app to cf") self.__class__.hdfs_reader_app = Application.push(class_context, source_directory=repo.path, bound_services=[hdfs_instance.name, kerberos_instance.name, hive_instance.name, cups_sso_instance.name]) step("Check hdfs-hive-demo app has url") assert len(self.hdfs_reader_app.urls) == 1
def kafka2hdfs_app(class_context, kafka_instance, hdfs_instance, kerberos_instance, api_service_admin_client): log_fixture("kafka2hdfs: download libraries") ingestion_repo = AppSources.get_repository(repo_name=TapGitHub.ws_kafka_hdfs, repo_owner=TapGitHub.intel_data) kafka2hdfs_path = os.path.join(ingestion_repo.path, TapGitHub.kafka2hdfs) log_fixture("Package kafka2hdfs app") ingestion_repo.compile_gradle(working_directory=kafka2hdfs_path) build_path = os.path.join(kafka2hdfs_path, "deploy") ingestion_repo.run_build_sh(cwd=build_path, command="./pack.sh") app_path = os.path.join(build_path, "kafka2hdfs.tar") log_fixture("kafka2hdfs: update manifest") p_a = PrepApp(build_path) manifest_params = {"bindings": [kafka_instance.name, hdfs_instance.name, kerberos_instance.name]} manifest_path = p_a.update_manifest(params=manifest_params) log_fixture("kafka2hdfs: push application") app = Application.push(class_context, app_path=app_path, name=p_a.app_name, manifest_path=manifest_path, client=api_service_admin_client) log_fixture("kafka2hdfs: Check the application is running") app.ensure_running() return app
def key_path(self): """ If key path was not set in configuration, download key. Download is executed at most once. """ if self._key_path is None: self._logger.info("Download repository with ssh key") ilab_deploy = AppSources.get_repository(repo_name=TapGitHub.ilab_deploy, repo_owner=TapGitHub.intel_data) self._ilab_deploy_path = ilab_deploy.path self._key_path = os.path.join(self._ilab_deploy_path, RelativeRepositoryPaths.ilab_jump_key) os.chmod(self._key_path, stat.S_IRUSR | stat.S_IWUSR) self._key_path = os.path.expanduser(self._key_path) assert os.path.isfile(self._key_path), "No such file {}".format(self._key_path) return self._key_path
def test_1_push_app_to_cf(self, class_context, test_space, psql_instance, login_to_cf): step("Push application to cf") sql_api_sources = AppSources.get_repository( repo_name=TapGitHub.sql_api_example, repo_owner=TapGitHub.intel_data) self.__class__.psql_app = Application.push( context=class_context, source_directory=sql_api_sources.path, space_guid=test_space.guid, bound_services=(psql_instance.name, self.kerberos_instance.name)) step("Check the application is running") assertions.assert_equal_with_retry(True, self.psql_app.cf_api_app_is_running) step("Check that application is on the list") apps = Application.cf_api_get_list_by_space(test_space.guid) assert self.psql_app in apps
def test_0_push_dataset_reader_app(self, test_space, dataset_target_uri, hdfs_instance, kerberos_instance, login_to_cf, class_context): step("Get app sources") repo = AppSources.get_repository( repo_name=TapGitHub.dataset_reader_sample, repo_owner=TapGitHub.trustedanalytics) repo.compile_mvn() step("Push dataset-reader app to cf") self.__class__.hdfs_reader_app = Application.push( class_context, space_guid=test_space.guid, source_directory=repo.path, bound_services=(hdfs_instance.name, kerberos_instance.name), env={"FILE": dataset_target_uri}) step("Check dataset reader app has url") assert len(self.hdfs_reader_app.urls) == 1
def ws2kafka_app(class_context, kafka_instance, api_service_admin_client): log_fixture("ws2kafka: download libraries") ingestion_repo = AppSources.get_repository(repo_name=TapGitHub.ws_kafka_hdfs, repo_owner=TapGitHub.intel_data) ws2kafka_path = os.path.join(ingestion_repo.path, TapGitHub.ws2kafka) build_path = os.path.join(ws2kafka_path, "deploy") ingestion_repo.run_build_sh(cwd=build_path, command="./pack.sh") app_path = os.path.join(build_path, "ws2kafka.tar.gz") log_fixture("ws2kafka: update manifest") p_a = PrepApp(build_path) manifest_params = {"bindings": [kafka_instance.name]} manifest_path = p_a.update_manifest(params=manifest_params) log_fixture("ws2kafka: push application") app = Application.push(class_context, app_path=app_path, name=p_a.app_name, manifest_path=manifest_path, client=api_service_admin_client) log_fixture("ws2kafka: Check the application is running") app.ensure_running() return app
def hbase_reader_app(class_context, kerberos_instance, api_service_admin_client): log_fixture("hbase_reader: download libraries") hbase_reader_repo = AppSources.get_repository(repo_name=TapGitHub.hbase_api_example, repo_owner=TapGitHub.intel_data) log_fixture("Package hbase_reader app") build_path = os.path.join(hbase_reader_repo.path, "deploy") hbase_reader_repo.run_build_sh(cwd=build_path, command="./pack.sh") app_path = os.path.join(build_path, "hbase-java-api-example-0.1.1.tar") log_fixture("hbase_reader: update manifest") p_a = PrepApp(build_path) manifest_params = {"bindings": [kerberos_instance.name]} manifest_path = p_a.update_manifest(params=manifest_params) log_fixture("hbase_reader: push application") app = Application.push(class_context, app_path=app_path, name=p_a.app_name, manifest_path=manifest_path, client=api_service_admin_client) log_fixture("hbase_reader: Check the application is running") app.ensure_running() return app
def test_mqtt_demo(self, context, test_org, test_space, login_to_cf, class_context): """ <b>Description:</b> Checks if a mqtt service can communicate with mqtt demo app and test client. <b>Input data:</b> 1. Mqtt demo repository. 2. Organization <b>Expected results:</b> A mqtt demo app logs contain data send through test client. <b>Steps:</b> 1. Create influxdb service. 2. Create mosquitto service. 3. Push mqtt demo app. 4. Get mqtt service credentials. 5. Connect test client to mqtt service. 6. Publish test data through test client. 7. Verify mqtt demo app logs contain test data. """ step("Clone repository") mqtt_demo_sources = AppSources.get_repository( repo_name=self.REPO_NAME, repo_owner=self.SOURCES_OWNER) step("Compile the sources") mqtt_demo_sources.compile_mvn() step("Create required service instances.") ServiceInstance.api_create_with_plan_name( context=context, org_guid=test_org.guid, space_guid=test_space.guid, service_label=ServiceLabels.INFLUX_DB_110, name=self.INFLUX_INSTANCE_NAME, service_plan_name=ServicePlan.SINGLE_SMALL, ) ServiceInstance.api_create_with_plan_name( context=context, org_guid=test_org.guid, space_guid=test_space.guid, service_label=ServiceLabels.MOSQUITTO, name=self.MQTT_INSTANCE_NAME, service_plan_name=ServicePlan.FREE) step("Push mqtt app to cf") mqtt_demo_app = Application.push( class_context, source_directory=mqtt_demo_sources.path, space_guid=test_space.guid) step("Retrieve credentials for mqtt service instance") self.credentials = mqtt_demo_app.get_credentials( service_name=ServiceLabels.MOSQUITTO) mqtt_port = self.credentials.get("port") assert mqtt_port is not None mqtt_username = self.credentials.get("username") assert mqtt_username is not None mqtt_pwd = self.credentials.get("password") assert mqtt_pwd is not None step("Connect to mqtt app with mqtt client") mqtt_client = mqtt.Client() mqtt_client.username_pw_set(mqtt_username, mqtt_pwd) mqtt_client.tls_set(self.SERVER_CERTIFICATE, tls_version=ssl.PROTOCOL_TLSv1_2) mqtt_server_address = mqtt_demo_app.urls[0] mqtt_client.connect(mqtt_server_address, int(mqtt_port), 20) with open(self.TEST_DATA_FILE) as f: expected_data = f.read().split("\n") step("Start reading logs") logs = subprocess.Popen(["cf", "logs", "mqtt-demo"], stdout=subprocess.PIPE) time.sleep(5) step("Send {0} data vectors to {1}:{2} on topic {3}".format( len(expected_data), mqtt_server_address, mqtt_port, self.MQTT_TOPIC_NAME)) for line in expected_data: mqtt_client.publish(self.MQTT_TOPIC_NAME, line) step("Stop reading logs. Retrieve vectors from log content.") grep = subprocess.Popen(["grep", "message:"], stdin=logs.stdout, stdout=subprocess.PIPE) logs.stdout.close() time.sleep(50) os.kill(logs.pid, signal.SIGTERM) cut = subprocess.Popen("cut -d ':' -f7 ", stdin=grep.stdout, stdout=subprocess.PIPE, shell=True) grep.stdout.close() step("Check that logs display all the vectors sent") log_result = cut.communicate()[0].decode().split("\n") log_result = [ item.strip() for item in log_result if item not in (" ", "") ] self.maxDiff = None # allows for full diff to be displayed assert log_result == expected_data, "Data in logs do not match sent data"