def test_json_build(TestApp, mock_auth):
    TestApp.task.run_name = 'Workflow_test'
    wf_obj = Workflow(TestApp.task, auth=mock_auth)
    wf_json = wf_obj._build_worklfow_json()

    tasks = wf_json['tasks']

    assert len(tasks) == 2

    custom_task_name = None
    custom_task_output_names = None

    for task in tasks:

        if task['taskType'] == 'MyCustomTask':
            inputs = task['inputs']
            outputs = task['outputs']
            assert len(inputs), 3
            assert len(outputs), 1
            custom_task_name = task['name']
            custom_task_output_names = [p['name'] for p in task['outputs']]
        elif task['taskType'] == 'StageDataToS3':
            inputs = task['inputs']
            assert len(inputs), 2

            parsed_s3_loc = [x['value'].split('/')[2:] for x in inputs if x['name'] == 'destination'][0]
            exp_names = ['not_provided', 'not_provided', 'Workflow_test', 'output_port']
            assert exp_names == parsed_s3_loc

            parsed_source = [x['source'].split(':') for x in inputs if x['name'] == 'data'][0]
            assert parsed_source[0] == custom_task_name
            assert True if parsed_source[1] in custom_task_output_names else False
        else:
            assert False  # Fail test
def test_execute_workflow(TestApp, test_path, mock_auth):

    test_wf_file = os.path.join(test_path, 'test_valid_wf.json')

    vcr_filename = os.path.join(
        test_path, 'vcr_cassettes', 'workflow.yaml'
    )

    if os.path.isfile(vcr_filename):
        wf_obj = Workflow(TestApp.task, auth=mock_auth)
    else:
        wf_obj = Workflow(TestApp.task)

    with open(test_wf_file, 'r') as f:
        wf_json = f.read()
        print(wf_json)
        wf_obj.execute(override_wf_json=json.loads(wf_json))

    assert wf_obj.id is not None
    assert isinstance(int(wf_obj.id), int)
    assert wf_obj.status['state'] == 'pending'
    assert wf_obj.status['event'] == 'submitted'

    assert wf_obj.succeeded is False
    assert wf_obj.complete is False

    wf_obj.id = '4392617339332738708'  # A completed workflow
    wf_obj._refresh_status()

    assert wf_obj.succeeded is True
    assert wf_obj.complete is True
    def _run_app(self):
        """
        Method for running a custom Application Templates.
        NOTES:
            * The default name of the application is app.py. So this function is going to look
            for app.py, unless the --file option is provide with a different file name.
            * The generated source bundle will package everything in the work_path. If large files
            not required for the application source, they need to be ignored. Use a file called "pkg_ignore"
            to identify folders and files to ignore.
        USAGE: cloud-harness run <file_name> [--remote] [--verbose] [--upload] [--download] [--dry-run]
        """
        is_remote_run = self._arguments.get('--remote')
        filename = self._arguments.get('<file_name>')
        upload_ports = self._arguments.get('--upload')
        download_ports = self._arguments.get('--download')
        is_verbose = self._arguments.get('--verbose')
        # A dry run means, allow port sot be pushed up, but don't allow execution and monitoring.
        is_dry_run = self._arguments.get('--dry-run')

        if download_ports:  # TODO temporary until implemented.
            raise NotImplementedError("Downloading of output ports is not implemented yet.")

        # Check if the filename passed is actually a class object (gbdxtools functionality)
        if not isinstance(filename, str) and issubclass(filename, TaskTemplate):
            template_class = filename
            template_file = inspect.getfile(template_class)
            config_file = self._write_config_file(template_file)

        else:
            template_file = self._get_template_abs_path(filename)

            if not os.path.isfile(template_file):
                raise ValueError('The location %s does not exist' % template_file)

            config_file = self._write_config_file(template_file)

            template_class = self._get_class(template_file)

        with template_class() as template:
            if is_remote_run:
                task = template.task

                # Set the source bundle directory to where the tempalte_file is.
                task.source_bundle.value = os.path.join(os.path.dirname(template_file), 'tmp_%s' % str(uuid.uuid4()))

                # Create a task service object
                task_service = TaskService()
                task_service.delete_task(task.name)
                printer(task_service.register_task(task.json()))

                task.run_name = '{task_name}_src'.format(
                    task_name=task.name,
                    # timestamp=datetime.utcnow().strftime('%Y_%m_%d_%H')
                )

                src_bundle_dir = task.source_bundle.value

                # Create source bundle to be executed on the GBDX platform
                self._archive_source(os.path.dirname(src_bundle_dir), src_bundle_dir)

                port_service = PortService(task)

                if upload_ports:
                    # Push all port data to S3
                    port_service.upload_input_ports()
                else:
                    # Only push source bundle port
                    port_service.upload_input_ports(port_list=[self.SOURCE_BUNDLE_PORT])

                # Delete source bundle directory and config after upload.
                shutil.rmtree(src_bundle_dir)
                os.remove(config_file)

                # Build task json to run remotely
                self.task = port_service.task

                # Validate task
                task.is_valid(remote=True)

                workflow = Workflow(self.task)

                if is_verbose:
                    printer(template.task.json())

                    temp_wf = workflow.json

                    printer(temp_wf)

                if not is_dry_run:
                    try:
                        workflow.execute()
                        printer(workflow.id)
                    except Exception as e:
                        printer(e.message)
                        template.reason = "Execution Failed: %s" % e.message
                        return

                    # Monitor events of workflow
                    is_done = workflow.monitor_run()

                    if is_done:
                        template.reason = "Execution Completed"
                    else:
                        template.reason = "Execution Failed during Run"

                    if download_ports:
                        # port_service.download_output_port()
                        pass

                    # Note: This may be temporary while working with gbdxtools
                    # Delete task after run
                    task_service.delete_task(task.name)

            else:
                # Validate task
                template.task.is_valid()

                if is_verbose:
                    printer(template.task.json())
                    all_ports = template.task.ports[0] + template.task.ports[1]
                    printer([port.__str__() for port in all_ports])

                if not is_dry_run:
                    # Run Task Locally
                    try:
                        template.invoke()
                    except Exception as e:
                        template.reason = "Failed Exception: %s" % e

                    template.reason = "Execution Completed"
                else:
                    template.reason = "Execution Skipped"