def view_save_problem_form(request):
    """View test form"""
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    info_dict = dict()
    if request.POST:
        save_problem_form = SaveProblemForm(request.POST)
        if save_problem_form.is_valid():
            content = save_problem_form.cleaned_data

            bpw = BasicProblemWriter(user_workspace,
                                     content[PROBLEM_REQ_FILENAME],
                                     content[PROBLEM_REQ_DATA])

            if bpw.has_error():
                return JsonResponse(get_json_error(bpw.error_message))

            data_info = dict(filename=bpw.new_filepath,
                             timestamp=datetime.now())

            info = get_json_success('file created!', data=data_info)
            return JsonResponse(info)
        else:
            info_dict['form_errs'] = save_problem_form.errors
            save_problem_form = SaveProblemForm()
    else:
        save_problem_form = SaveProblemForm()

    info_dict['cform'] = save_problem_form

    return render(request, 'ta2_interfaces/view_save_problem_form.html',
                  info_dict)
Example #2
0
    def retrieve_examine_workspace(self):
        """Was the workspace set.  If not, see if it can be retrieved"""
        if self.has_error():
            return

        # If the workspace is set, make sure it's the right type
        #
        if self.user_workspace:
            if not isinstance(self.user_workspace, UserWorkspace):
                self.add_err_msg(('user_workspace must be a UserWorkspace'
                                  ' object or None'))
                return
            # Looks like we have a user workspace!

        # If there's not a user workspace, see if we can get it
        #   via the request_obj
        #
        if self.request_obj and not self.user_workspace:
            ws_info = ws_util.get_latest_user_workspace(self.request_obj)
            if ws_info.success:
                # Got one!
                self.user_workspace = ws_info.result_obj

                # Also use the d3m_config from it
                # This overrides a manually set d3m_config
                self.d3m_config = self.user_workspace.d3m_config

        #  See if there's a default d3m_config
        #
        if not self.d3m_config:
            self.d3m_config = get_latest_d3m_config()
Example #3
0
def api_augment_async(request):
    """Run steps of augment, create new dataset folder structure, etc"""
    # Get the latest UserWorkspace
    #
    ws_info = get_latest_user_workspace(request)
    if not ws_info.success:
        user_msg = 'User workspace not found: %s' % ws_info.err_msg
        return JsonResponse(get_json_error(user_msg))

    user_workspace = ws_info.result_obj

    # Request as python dict
    #
    json_req_info = get_request_body_as_json(request)
    if not json_req_info.success:
        return JsonResponse(get_json_error(json_req_info.err_msg))

    augment_params = json_req_info.result_obj

    # print('augment_params', augment_params)

    augment_info = dm_tasks.make_augment_call(user_workspace, augment_params)

    if not augment_info.success:
        return JsonResponse(get_json_error(augment_info.err_msg))

    return JsonResponse(get_json_success(augment_info.result_obj))
Example #4
0
    def write_user_log_from_request(request_obj):
        """
        Write out the user log, based on the HttpRequest
        """
        ws_info = get_latest_user_workspace(request_obj)
        if not ws_info.success:
            user_msg = 'User workspace not found: %s' % ws_info.err_msg
            return err_resp(user_msg)

        return LogEntryMaker.write_user_log(ws_info.result_obj)
Example #5
0
def view_solve(request):

    user_info = get_authenticated_user(request)
    if not user_info.success:
        return JsonResponse(get_json_error(user_info.err_msg))
    user_obj = user_info.result_obj

    raven_data_info = get_request_body_as_json(request)
    if not raven_data_info.success:
        err_msg = f"request.body not found for solve"
        return JsonResponse(get_json_error(err_msg))
    data = raven_data_info.result_obj

    # workspace
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    websocket_id = user_obj.username
    specification = data['specification']
    system_id = data['system']
    system_params = data.get('system_params', {})

    # create a location where the solver may write to disk
    dest_dir_info = create_destination_directory(user_workspace,
                                                 name='solver_scratch_space')
    if not dest_dir_info[KEY_SUCCESS]:
        return JsonResponse(get_json_error(dest_dir_info.err_msg))
    dest_directory = dest_dir_info[KEY_DATA]
    specification['temp_directory'] = dest_directory

    # TODO: timeout on celery worker
    # sanity check timeout
    if isinstance(data.get('timeout', None), (int, float)):
        timeout = min(max(data.get('timeout'), 0), TIMEOUT_MAX)
    else:
        timeout = TIMEOUT_DEFAULT

    search_id = Search.get_search_id()

    task_handle = tasks.solve_task
    if not DEBUG_MODE:
        task_handle = task_handle.delay
    task_handle(websocket_id, system_id, specification, system_params,
                search_id)

    return JsonResponse({
        KEY_SUCCESS: True,
        KEY_MESSAGE: "solve successfully started",
        KEY_DATA: {
            "search_id": search_id
        }
    })
Example #6
0
def view_end_search_solutions(request):
    """gRPC: Call from UI with a EndSearchSolutionsRequest"""
    print('view_end_search_solutions 1')
    user_info = get_authenticated_user(request)
    if not user_info.success:
        return JsonResponse(get_json_error(user_info.err_msg))
    user = user_info.result_obj

    print('view_end_search_solutions 2')
    req_body_info = get_request_body(request)
    if not req_body_info.success:
        return JsonResponse(get_json_error(req_body_info.err_msg))

    print('view_end_search_solutions 3')

    # --------------------------------
    # Behavioral logging
    # --------------------------------
    log_data = dict(session_key=get_session_key(request),
                    feature_id=ta2_static.END_SEARCH_SOLUTIONS,
                    activity_l1=bl_static.L1_SYSTEM_ACTIVITY,
                    activity_l2=bl_static.L2_ACTIVITY_BLANK)

    LogEntryMaker.create_ta2ta3_entry(user, log_data)
    print('view_end_search_solutions 4')

    # Let's call the TA2 and end the session!
    #
    params = dict(user=user)
    search_info = end_search_solutions(req_body_info.result_obj,
                                       **params)

    if not search_info.success:
        return JsonResponse(get_json_error(search_info.err_msg))

    # The session is over, write the log entries files
    #
    #LogEntryMaker.write_user_log_from_request(request)
    # User is done at this point!
    # Write out the log and delete it....
    user_workspace = None
    ws_info = get_latest_user_workspace(request)
    if ws_info.success:
        user_workspace = ws_info.result_obj
    ResetUtil.write_and_clear_behavioral_logs(user, user_workspace)


    json_info = get_json_success('success!', data=search_info.result_obj)
    return JsonResponse(json_info, safe=False)
Example #7
0
def api_materialize(request):
    """Run materialize using either ISI or NYU"""
    success, json_req_obj = get_request_body_as_json(request)

    if not success:
        return JsonResponse(get_json_error(json_req_obj))

    # ----------------------------------------
    # Get the latest UserWorkspace
    #   - later used for logging
    # ----------------------------------------
    ws_info = get_latest_user_workspace(request)
    if not ws_info.success:
        user_msg = 'User workspace not found: %s' % ws_info.err_msg
        return JsonResponse(get_json_error(user_msg))

    user_workspace = ws_info.result_obj

    # --------------------------------------
    # check the data
    # --------------------------------------
    form = dm_forms.DatamartMaterializeForm(json_req_obj)
    if not form.is_valid():
        print('form.errors.as_json()', form.errors.as_json())
        return JsonResponse(\
                get_json_error("invalid input",
                               errors=form.errors.as_json()))

    # --------------------------------------
    # Retrieve the correct datamart
    # --------------------------------------
    job_util_info = get_datamart_job_util(form.cleaned_data['source'])
    if not job_util_info.success:
        return JsonResponse(get_json_error(job_util_info.err_msg))

    DatamartJobUtil = job_util_info.result_obj  # e.g. DatamartJobUtilISI, DatamartJobUtilNYU

    # --------------------------------------
    # Run datamart_materialize
    # --------------------------------------
    materialize_result = DatamartJobUtil.datamart_materialize(\
                                user_workspace,
                                form.cleaned_data['search_result'])
    if not materialize_result.success:
        return JsonResponse(get_json_error(materialize_result.err_msg))

    return JsonResponse(\
            get_json_success('it worked',
                             data=materialize_result.result_obj))
Example #8
0
def api_search(request):
    """Search the datamart with a JSON request.  The 'source' will
    determine which datamart to search"""
    # for logging
    ws_info = get_latest_user_workspace(request)
    if not ws_info.success:
        user_msg = 'User workspace not found: %s' % ws_info.err_msg
        return JsonResponse(get_json_error(user_msg))
    user_workspace = ws_info.result_obj

    success, json_req_obj = get_request_body_as_json(request)

    if not success:
        return JsonResponse(get_json_error(json_req_obj))

    # check if data is valid
    form = dm_forms.DatamartSearchForm(json_req_obj)
    if not form.is_valid():
        #print('form.errors', form.errors.as_json())
        print('\ntype form.errors', type(form.errors.as_json()))
        json_errs = json.loads(form.errors.as_json())
        err_msgs = [
            dal['message'] for dval_list in json_errs.values()
            for dal in dval_list if 'message' in dal
        ]
        print('\nerr_msgs', err_msgs)

        json_err = get_json_error('Input error: %s' % ('. '.join(err_msgs)))
        return JsonResponse(json_err)

    # Retrieve the appropriate DatamartJobUtil
    #
    job_util_info = get_datamart_job_util(form.cleaned_data['source'])
    if not job_util_info.success:
        return JsonResponse(get_json_error(job_util_info.err_msg))

    # e.g. DatamartJobUtilISI, DatamartJobUtilNYU
    DatamartJobUtil = job_util_info.result_obj

    #data_path = json_req_obj['data_path'] if 'data_path' in json_req_obj else None


    success, results_obj_err = DatamartJobUtil.datamart_search(\
                                    form.cleaned_data['query'],
                                    **dict(user_workspace=user_workspace))
    if not success:
        return JsonResponse(get_json_error(results_obj_err))

    return JsonResponse(get_json_success('it worked', data=results_obj_err))
Example #9
0
def api_search_by_dataset(request):
    """For search, submit the entire dataset.
    Return the calls async"""
    # (1) get the request body
    #
    success, json_req_obj = get_request_body_as_json(request)
    if not success:
        return JsonResponse(get_json_error(json_req_obj))

    # (2) Get the latest UserWorkspace
    #
    ws_info = get_latest_user_workspace(request)
    if not ws_info.success:
        user_msg = 'User workspace not found: %s' % ws_info.err_msg
        return JsonResponse(get_json_error(user_msg))

    user_workspace = ws_info.result_obj

    # (3) Which datamart?
    #
    form = dm_forms.DatamartSearchByDatasetForm(json_req_obj)
    if not form.is_valid():
        print('form.errors.as_json()', form.errors.as_json())
        return JsonResponse(\
                get_json_error("invalid input",
                               errors=form.errors.as_json()))

    # (4) Location of the current dataset
    #
    # augment is embedded within a manipulations pipeline, so the data path may be different
    dataset_path = json_req_obj['dataset_path']

    # (5) Kick off async search
    #
    call_info = dm_tasks.make_search_by_dataset_call(\
                        form.cleaned_data['source'],
                        user_workspace.id,
                        dataset_path,
                        query=json_req_obj.get('query', None))

    if not call_info.success:
        return JsonResponse(get_json_error(call_info.err_msg))

    return JsonResponse(get_json_success('Search by dataset has started!'))
Example #10
0
def get_partials_datasets(request):
    # request body
    req_body_info = get_request_body_as_json(request)
    if not req_body_info.success:
        return JsonResponse(get_json_error(req_body_info.err_msg))
    req_info = req_body_info.result_obj

    # workspace
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    # user
    user_info = get_authenticated_user(request)
    if not user_info.success:
        return JsonResponse(get_json_error(user_info.err_msg))

    activity_l1 = bl_static.L1_PROBLEM_DEFINITION
    activity_l2 = bl_static.L2_ACTIVITY_BLANK

    log_data = dict(session_key=get_session_key(request),
                    feature_id='PARTIALS_APP',
                    activity_l1=activity_l1,
                    activity_l2=activity_l2)

    LogEntryMaker.create_system_entry(user_workspace.user, log_data)

    try:
        response = create_partials_datasets(req_info, user_workspace.id)

    except Exception:
        print("caught traceback when creating ICE datasets:", flush=True)
        print(traceback.format_exc(), flush=True)
        response = {
            KEY_SUCCESS: False,
            KEY_MESSAGE: "Internal error while creating ICE datasets."
        }

    return JsonResponse(response)
def view_store_basic_problem(request):
    """Initial step, store a file to the /output/problems directory"""
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    req_info = get_request_body_as_json(request)
    if not req_info.success:
        #user_msg = ('The request did not contain problem data')
        return JsonResponse(get_json_error(req_info.err_msg))

    req_json = req_info.result_obj

    if not PROBLEM_REQ_FILENAME in req_json:
        user_msg = (
            'The request did not contain a "%s" value. (view_store_basic_problem)'
        ) % PROBLEM_REQ_FILENAME
        return JsonResponse(get_json_error(user_msg))

    if not PROBLEM_REQ_DATA in req_json:
        user_msg = (
            'The request did not contain a "%s" value. (view_store_basic_problem)'
        ) % PROBLEM_REQ_DATA
        return JsonResponse(get_json_error(user_msg))

    bpw = BasicProblemWriter(user_workspace, req_json[PROBLEM_REQ_FILENAME],
                             req_json[PROBLEM_REQ_DATA])

    if bpw.has_error():
        return JsonResponse(get_json_error(bpw.error_message))

    data_info = dict(filename=bpw.new_filepath, timestamp=datetime.now())

    info = get_json_success('file created!', data=data_info)

    return JsonResponse(info)
Example #12
0
def get_train_test_split(request):
    """Expects a JSON request containing "datasetDoc_path"
    For example: { "datasetDoc_path": "/datasetDoc.json"}
    """
    # request body
    req_body_info = get_request_body_as_json(request)
    if not req_body_info.success:
        return JsonResponse(get_json_error(req_body_info.err_msg))
    req_info = req_body_info.result_obj

    # workspace
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    # user
    user_info = get_authenticated_user(request)
    if not user_info.success:
        return JsonResponse(get_json_error(user_info.err_msg))

    try:
        response = {
            "success": True,
            "data": split_dataset(req_info, user_workspace),
            "message": "data partitioning successful"
        }

    except Exception:
        print("caught traceback when splitting data:", flush=True)
        print(traceback.format_exc(), flush=True)
        response = {
            "success": False,
            "message": "Internal error while splitting dataset."
        }

    return JsonResponse(response)
Example #13
0
def api_materialize_async(request):
    """Run async materialize with ISI"""
    success, json_req_obj = get_request_body_as_json(request)
    if not success:
        return JsonResponse(get_json_error(json_req_obj))

    # Get the latest UserWorkspace
    #
    ws_info = get_latest_user_workspace(request)
    if not ws_info.success:
        user_msg = 'User workspace not found: %s' % ws_info.err_msg
        return JsonResponse(get_json_error(user_msg))

    user_workspace = ws_info.result_obj

    # check if data is valid
    #print('materialize input: ', json_req_obj)
    form = dm_forms.DatamartMaterializeForm(json_req_obj)
    if not form.is_valid():
        print('form.errors.as_json()', form.errors.as_json())
        return JsonResponse(\
                get_json_error("invalid input",
                               errors=form.errors.as_json()))

    mu_info = dm_tasks.make_materialize_call(\
                 form.cleaned_data['source'],
                 user_workspace.id,
                 form.cleaned_data,
                 **dict(websocket_id=user_workspace.user.username))

    if not mu_info.success:
        return JsonResponse(get_json_error(mu_info.err_msg))
    else:
        return JsonResponse(\
                    get_json_success('in process',
                                     data=mu_info.result_obj))
    """
Example #14
0
def view_R_route(request, app_name_in_url):
    """Route TwoRavens calls to Rook
        orig: TwoRavens -> Rook
        view: TwoRavens -> Django 2ravens -> Rook

    This is a bit messy.  Still trying to handle two UI calls:
    - old ones, form POSTs sent with solaJSON key
    - new ones, straight JSON requests
    """
    # -----------------------------
    # get the app info
    # -----------------------------
    rook_app_info = RAppInfo.get_appinfo_from_url(app_name_in_url)
    if rook_app_info is None:
        raise Http404(('unknown rook app: "{0}" (please add "{0}" to '
                       ' "tworaven_apps/R_services/app_names.py")').format(\
                       app_name_in_url))

    # -----------------------------
    # Used for logging
    # -----------------------------
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))

    user_workspace = user_workspace_info.result_obj


    # -----------------------------
    # additional params
    # -----------------------------
    raven_data_text = {}    # default
    additional_params = {}  # params to add to a JSON call, e.g. for PARTIALS_APP

    # -----------------------------
    # look for the "solaJSON" variable in the POST
    # -----------------------------
    if request.POST and UI_KEY_SOLA_JSON in request.POST:
        # this is a POST with a JSON string under the key solaJSON key
        raven_data_text = request.POST[UI_KEY_SOLA_JSON]
    else:
        # See if the body is JSON format
        raven_data_info = get_request_body_as_json(request)
        if not raven_data_info.success:
            err_msg = ("Neither key '%s' found in POST"
                       " nor JSON in request.body") % UI_KEY_SOLA_JSON
            return JsonResponse(dict(status="ERROR",
                                     message=err_msg))

        raven_data_text = raven_data_info.result_obj

    # Retrieve post data and attempt to insert django session id
    # (if none exists)
    #
    # retrieve session key
    session_key = get_session_key(request)

    if isinstance(raven_data_text, str):

        blank_session_str = '%s":""' % ROOK_ZESSIONID
        if raven_data_text.find(blank_session_str) > -1:
            # was converting to JSON, but now just simple text substitution
            #
            updated_session_str = '%s":"%s"' % (ROOK_ZESSIONID, session_key)
            raven_data_text = raven_data_text.replace(blank_session_str, updated_session_str)
        elif raven_data_text.find(ROOK_ZESSIONID) == -1:
            print('MAJOR ISSUE: NOT SESSION AT ALL (R_services.views.py)')

    elif isinstance(raven_data_text, dict):
        #  We have a dict, make sure it gets a session
        if ROOK_ZESSIONID in raven_data_text:
            if raven_data_text[ROOK_ZESSIONID] in [None, '']:
                raven_data_text[ROOK_ZESSIONID] = session_key
        elif ROOK_ZESSIONID not in raven_data_text:
            raven_data_text[ROOK_ZESSIONID] = session_key

        # Add the additional params
        raven_data_text.update(additional_params)

        try:
            raven_data_text = json.dumps(raven_data_text)
        except TypeError:
            return JsonResponse(\
                        dict(success=False,
                             message='Failed to convert data to JSON'))

    # print('raven_data_text', raven_data_text)

    app_data = json.loads(raven_data_text)

    # --------------------------------
    # Behavioral logging
    # --------------------------------
    print('rook_app_info.name:', rook_app_info.name)
    feature_id = rook_app_info.name
    if rook_app_info.name == app_names.EXPLORE_APP:
        activity_l1 = bl_static.L1_DATA_PREPARATION
        activity_l2 = bl_static.L2_DATA_EXPLORE

    elif rook_app_info.name == app_names.PLOTDATA_APP:
        feature_id = 'EXPLORE_VIEW_PLOTS'
        activity_l1 = bl_static.L1_DATA_PREPARATION
        activity_l2 = bl_static.L2_DATA_EXPLORE
    else:
        activity_l1 = bl_static.L1_PROBLEM_DEFINITION
        activity_l2 = bl_static.L2_ACTIVITY_BLANK

    log_data = dict(session_key=session_key,
                    feature_id=feature_id,
                    activity_l1=activity_l1,
                    activity_l2=activity_l2)

    LogEntryMaker.create_system_entry(user_workspace.user, log_data)

    # Call R services
    #
    rook_svc_url = rook_app_info.get_rook_server_url()
    print('rook_svc_url', rook_svc_url)
    try:
        rservice_req = requests.post(rook_svc_url,
                                     json=app_data)
    except ConnectionError:
        err_msg = 'R Server not responding: %s' % rook_svc_url
        resp_dict = dict(message=err_msg)
        return JsonResponse(resp_dict)

    print('status code from rook call: %s' % rservice_req.status_code)

    # print('rook text: %s' % rservice_req.text)
    return HttpResponse(rservice_req.text)
Example #15
0
def view_upload_dataset(request):
    """Upload dataset and metadata"""
    print('FILE_UPLOAD_MAX_MEMORY_SIZE:', settings.FILE_UPLOAD_MAX_MEMORY_SIZE)

    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    # Destination directory for learningData.csv, learningData#.csv, etc.
    #   and about.json
    #
    dest_dir_info = create_directory_add_timestamp(\
                        join(settings.TWORAVENS_USER_DATASETS_DIR,
                             f'uploads_{user_workspace.user.id}',
                             get_alpha_string(6)))

    if not dest_dir_info.success:
        return JsonResponse(get_json_error(dest_dir_info.err_msg))
    dest_directory = dest_dir_info.result_obj

    print('view_upload_dataset. dest_directory', dest_directory)

    # Save the about.json
    #
    json_info = json_loads(request.POST.get('metadata'))
    if not json_info.success:
        return JsonResponse(get_json_error(json_info.err_msg))

    # save json data
    dataset_name = None
    if dp_static.DATASET_NAME_FROM_UI in json_info.result_obj:
        dataset_name = json_info.result_obj[dp_static.DATASET_NAME_FROM_UI]

    #with open(os.path.join(dest_directory, 'about.json'), 'w') as metadata_file:
    #    json.dump(json_info.result_obj, metadata_file)

    # Save data files.  They don't have to be .csv, that's handled latter,
    #     e.g. convert from .tab, .tsv, xls, etc.
    #
    for idx, file in enumerate(request.FILES.getlist('files')):
        print(file.name)
        _fname, fext = splitext(file.name)
        if not fext.lower() in dp_static.VALID_EXTENSIONS:
            # no extension found, won't be able to open it
            user_msg = (
                f'The extension for this file was not recognized: "{file.name}".'
                f' Valid extensions: {", ".join(dp_static.VALID_EXTENSIONS)}.')

            return JsonResponse(get_json_error(user_msg))

        new_filename = join(
            dest_directory,
            f'learningData{idx + 1 if idx else ""}{fext.lower()}')
        with open(new_filename, 'wb+') as outfile:
            for chunk in file.chunks():
                outfile.write(chunk)

    print('dest_directory', dest_directory)

    # Create new dataset folders/etc
    #
    additional_inputs_dir = user_workspace.d3m_config.additional_inputs
    created = create_directory(additional_inputs_dir)
    if not created.success:
        return JsonResponse(get_json_error(created.err_msg))

    new_dataset_info = UserDatasetUtil.make_new_dataset(\
                            user_workspace.user.id,
                            dest_directory,
                            settings.TWORAVENS_USER_DATASETS_DIR,
                            **{dp_static.DATASET_NAME: dataset_name})

    if not new_dataset_info.success:
        return JsonResponse(get_json_error(new_dataset_info.err_msg))
    #udu = UserDatasetUtil(1, input_files, output_dir)

    return JsonResponse(get_json_success('file upload completed successfully'))
Example #16
0
def api_get_data(request):
    """Retrieve data from MongoDB
    Example input:
      {
        "datafile": "/ravens_volume/test_data/196_autoMpg/TRAIN/dataset_TRAIN/tables/learningData.csv",
        "collection_name": "196_ag_dataset_TRAIN",
        "method": "aggregate",
        "query": "[{\"$count\":\"total\"}]"
      }

    """
    LOGGER.info('--- api_get_data: Retrieve data from MongoDB ---')
    user_workspace_info = get_latest_user_workspace(request)
    if not user_workspace_info.success:
        return JsonResponse(get_json_error(user_workspace_info.err_msg))
    user_workspace = user_workspace_info.result_obj

    success, json_req_obj = get_request_body_as_json(request)

    #import json; print('json_req_obj', json.dumps(json_req_obj, indent=4))
    if not success:
        return JsonResponse({"success": False, "error": get_json_error(json_req_obj)})

    # check if data is valid
    #
    try:
        form = EventDataGetManipulationForm(json_req_obj)
        if not form.is_valid():
            err_info = get_json_error("invalid_input",
                                      errors=form.errors.as_json())
            return JsonResponse(err_info)
    except json.decoder.JSONDecodeError as err_obj:
        return JsonResponse(get_json_error('JSONDecodeError: %s' % (err_obj)))

    # ensure the dataset is present
    #
    LOGGER.info('--- api_get_data: ensure the dataset is present ---')
    #
    EventJobUtil.import_dataset(
        settings.TWORAVENS_MONGO_DB_NAME,
        json_req_obj['collection_name'],
        data_path=json_req_obj.get('datafile', None),
        reload=json_req_obj.get('reload', None))

    # apply the manipulations
    #
    LOGGER.info('--- api_get_data: apply any manipulations ---')
    #
    success, results_obj_err = EventJobUtil.get_data(
        settings.TWORAVENS_MONGO_DB_NAME,
        settings.MONGO_COLLECTION_PREFIX + json_req_obj['collection_name'],
        json_req_obj['method'],
        json.loads(json_req_obj['query']),
        distinct=json_req_obj.get('distinct', None))

    if not success:
        return JsonResponse(get_json_error(results_obj_err))

    # export single data file
    if json_req_obj.get('export') == 'csv':
        success, results_obj_err = EventJobUtil.export_csv(\
            user_workspace,
            settings.MONGO_COLLECTION_PREFIX + json_req_obj['collection_name'],
            results_obj_err)

    # export single data file in problem format
    elif json_req_obj.get('export') == 'dataset':
        success, results_obj_err = EventJobUtil.export_dataset(\
            user_workspace,
            results_obj_err,
            json.loads(json_req_obj['metadata']))

    # since we aren't exporting to files, exhaust the mongo cursor
    else:
        results_obj_err = list(results_obj_err)

    if not success:
        return JsonResponse(get_json_error(results_obj_err))

    LOGGER.info('--- api_get_data: returning data... ---')
    return JsonResponse(\
                get_json_success('it worked',
                                 data=json_comply(results_obj_err)))