def test_change_app_state_in_catalog_and_delete_it(
            self, context, test_sample_apps, api_service_admin_client):
        """
        <b>Description:</b>
        Change the application state in catalog and later delete it

        <b>Input data:</b>
        - Path to application
        - Admin credentials

        <b>Expected results:</b>
        - Application state can be changed in catalog
        - Application state can be set back via api service

        <b>Steps:</b>
        - Prepare the application and push it
        - Make sure the application is running
        - Change the state of the application via catalog
        - Make sure the state has changed
        - Stop the application via api service client and remove it
        - Verify the application was removed
        """
        log_fixture("sample_application: update manifest")
        p_a = PrepApp(test_sample_apps.tapng_python_app.filepath)
        manifest_params = {"type": TapApplicationType.PYTHON27}
        manifest_path = p_a.update_manifest(params=manifest_params)

        log_fixture("Push sample application and check it's running")
        application = Application.push(
            context,
            app_path=test_sample_apps.tapng_python_app.filepath,
            name=p_a.app_name,
            manifest_path=manifest_path,
            client=api_service_admin_client)
        application.ensure_running()

        step("Check that the application has only one instance")
        instances = CatalogApplicationInstance.get_list_for_application(
            application_id=application.id)
        assert len(instances) == 1
        updated_state = TapEntityState.FAILURE
        step("Update app state to {} using catalog api".format(updated_state))
        catalog_api.update_instance(instance_id=instances[0].id,
                                    field_name="state",
                                    value=updated_state)
        step("Check that the app state was updated")
        app = Application.get(app_inst_id=application.id,
                              client=api_service_admin_client)
        assert app.state == updated_state, "Application is not in the expected state. App state: {}".format(
            app.state)
        step("Check that the application can be deleted")
        application.delete()
        step("Check that application has been removed")
        apps = Application.get_list(client=api_service_admin_client)
        assert application not in apps
def cleanup_test_data():
    core_org_guid = core_org().guid
    test_object_models = [
        {'name': 'data set', 'objects_list': DataSet.api_get_list(), 'name_attribute': 'title'},
        {'name': 'transfer', 'objects_list': Transfer.api_get_list(), 'name_attribute': 'title'},
        {'name': 'user', 'objects_list': User.get_list_in_organization(org_guid=core_org_guid), 'name_attribute': 'username'},
        {'name': 'invitation', 'objects_list': Invitation.api_get_list(), 'name_attribute': 'username'},
        {'name': 'application', 'objects_list': Application.get_list(), 'name_attribute': 'name'},
        {'name': 'service', 'objects_list': ServiceInstance.get_list(), 'name_attribute': 'name'},
        {'name': 'offering', 'objects_list': ServiceOffering.get_list(), 'name_attribute': 'label'},
        {'name': 'scoring engine model', 'objects_list': ScoringEngineModel.get_list(org_guid=core_org_guid),
         'name_attribute': 'name'}
    ]
    for model in test_object_models:
        _cleanup_test_data(**model)
Example #3
0
    def from_reference(cls, org_guid):
        from modules.tap_object_model import Application, User, Organization, ServiceOffering, ServiceInstance, DataSet
        metrics = []
        app_down_states = [TapEntityState.FAILURE, TapEntityState.STOPPED]

        apps = Application.get_list()
        apps_count = len(apps)
        apps_running_count = len(
            [app for app in apps if app.state == TapEntityState.RUNNING])
        apps_down_count = len(
            [app for app in apps if app.state in app_down_states])
        user_count = len(User.get_all_users(org_guid))
        orgs_count = len(Organization.get_list())
        services_count = len(ServiceOffering.get_list())
        services_inst = len([
            instance for instance in ServiceInstance.get_list()
            if instance.state == TapEntityState.RUNNING
        ])

        nodes = KubernetesNode.get_list()
        for node in nodes:
            metrics.append(node.get_metrics())

        cpu_usage_org = cls.parse_cpu(metrics) / (cls.CPU_RATE_FOR_REF *
                                                  cls.NODE)
        cpu_usage_platform = cls.parse_cpu(metrics) / (cls.CPU_RATE_FOR_REF *
                                                       cls.NODE)
        memory_usage_org = cls.parse_memory(metrics)
        memory_usage_platform = cls.parse_memory(metrics)

        datasets = DataSet.api_get_list(org_guid_list=[org_guid])

        return cls(apps=apps_count,
                   apps_running=apps_running_count,
                   apps_down=apps_down_count,
                   users_org=user_count,
                   users_platform=user_count,
                   orgs=orgs_count,
                   services=services_count,
                   service_instances=services_inst,
                   service_usage=services_inst,
                   cpu_usage_org=cpu_usage_org,
                   memory_usage_org=memory_usage_org,
                   cpu_usage_platform=cpu_usage_platform,
                   memory_usage_platform=memory_usage_platform,
                   datasets=datasets)
def test_connect_to_atk_from_jupyter_using_default_atk_client(
        context, request, core_space, test_space, test_org, admin_user):
    """Connect to Atk from Jupyter using Default Atk Client"""
    step("Get atk app from core space")
    atk_app = next(
        (app for app in Application.get_list() if app.name == "atk"), None)
    if atk_app is None:
        raise AssertionError("Atk app not found in core space")
    atk_url = atk_app.urls[0]
    step("Add admin to test space")
    admin_user.api_add_to_space(space_guid=test_space.guid,
                                org_guid=test_org.guid,
                                roles=User.SPACE_ROLES["developer"])
    step("Create instance of Jupyter service")
    jupyter = Jupyter(context=context)
    assertions.assert_in_with_retry(jupyter.instance,
                                    ServiceInstance.api_get_list,
                                    space_guid=test_space.guid)
    step("Get credentials for the new jupyter service instance")
    jupyter.get_credentials()
    step("Login into Jupyter")
    jupyter.login()
    request.addfinalizer(lambda: jupyter.instance.api_delete())
    step("Create new Jupyter notebook")
    notebook = jupyter.create_notebook()
    step("import atk client in the notebook")
    notebook.send_input("import trustedanalytics as ta")
    assert notebook.check_command_status() == "ok"
    step("Create credentials file using atk client")
    notebook.send_input("ta.create_credentials_file('./cred_file')")
    assert "URI of the ATK server" in notebook.get_prompt_text()
    notebook.send_input(atk_url, reply=True)
    assert "User name" in notebook.get_prompt_text()
    notebook.send_input(config.admin_username, reply=True)
    assert "" in notebook.get_prompt_text()
    notebook.send_input(config.admin_password,
                        reply=True,
                        obscure_from_log=True)
    assert "Connect now?" in notebook.get_prompt_text()
    notebook.send_input("y", reply=True)
    assert "Connected." in str(notebook.get_stream_result())
    notebook.ws.close()
    def test_3_submit_java_job_design(self, admin_user):
        """
        <b>Description:</b>
        Check that submitting a new job works.

        <b>Input data:</b>
        1. user admin id
        2. job.properties file
        3. workflow.xml file

        <b>Expected results:</b>
        Test passes when java job is submitted to platform.

        <b>Steps:</b>
        1. Create cdh master client.
        2. Create job.properties and workflow.xml.
        3. Create hdfs directory.
        4. Copy workflow.xml to hdfs directory.
        5. Submit java job to platform.
        6. Check that job was successfully submited.
        """
        step("Create application hdfs path name")
        app_hdfs_path = generate_test_object_name(
            prefix="hdfs://nameservice1/user/hue/oozie/workspaces/_{}_-oozie-".
            format(admin_user.guid))
        step("Create Cdh master 2 client")
        self.__class__.client = CdhMasterClient(
            cdh_host_name=config.cdh_master_2_hostname)
        step("Create directory to store oozie data")
        properties_path = "/tmp/{}".format(
            datetime.now().strftime("%Y%m%d_%H%M%S_%f"))
        step("Get oozie url")
        app_name = TAP.cdh_broker
        cdh_broker = next(
            (app for app in Application.get_list() if app_name == app.name),
            None)
        assert cdh_broker is not None, "{} not available".format(app_name)
        oozie_url = str(json.loads(cdh_broker.cf_api_env()["ENVIRONMENT_JSON"]["CREDENTIALS"])["resource_manager"]). \
            replace("8088", "11000/oozie")
        step("Prepare commands needed before job design execution")
        cmds = [
            ["mkdir", properties_path],
            ["echo", "non-kerberos"],
            [
                "echo",
                self.generate_job_properties(admin_user=admin_user,
                                             app_hdfs_path=app_hdfs_path)
            ],
            [
                "cp", "{}{}".format(self.client._output_path, "/2_1"),
                "{}{}".format(properties_path, "/job.properties")
            ],
            [
                "echo",
                self.generate_workflow(job_name=self.job_name,
                                       main_class=self.main_class,
                                       csv_hdfs_path=self.csv_hdfs_path,
                                       output_name=self.output_name)
            ],
            [
                "cp", "{}{}".format(self.client._output_path, "/4_1"),
                "{}{}".format(properties_path, "/workflow.xml")
            ],
            ["hadoop ", "fs", "-mkdir", app_hdfs_path],
            [
                "hadoop ", "fs", "-copyFromLocal",
                "{}{}".format(properties_path, "/workflow.xml"), app_hdfs_path
            ],
        ]
        step("Check if this is kerberos environment")
        if kerberos.is_kerberos_environment():
            step(
                "If kerberos environment: add ktinit with oauth token to commands"
            )
            client = HttpClientFactory.get(
                UaaConfigurationProvider.get(config.admin_username,
                                             config.admin_password))
            cmds[1] = ["ktinit", "-t", client.auth.token]

        step(
            "Execute commands: Create job.properties and workflow.xml files at cdh-master-2; Create hdfs directory;"
            "Copy workflow.xml to hdfs directory")
        self.client.exec_commands(cmds)
        step("Run java job design")
        submited_job = self.client.exec_commands([[
            "oozie", "job", "-oozie", oozie_url, "-config",
            "{}{}".format(properties_path, "/job.properties"), "-run"
        ]])
        assert "job" in submited_job[0][0], "Job was not created"
        step("Get job workflow id")
        self.__class__.workflow_id = submited_job[0][0].strip("job: ")