def post(self): args = resendEmailPostParser.parse_args() email = args.get('email') os = args.get('os') browser = args.get('browser') site_url = args.get('site_url', current_app.config['SITE_URL']) user = check_email_exists(email) if user: token = generate_confirmation_token(email) site_url = '%s://%s' % (current_app.config['PREFERRED_URL_SCHEME'], site_url) confirm_url = '%s/auth/activate/%s' % (site_url, token) html = render_template('confirm_email.html', username=user.username, confirm_url=confirm_url, site_url=site_url, support_url='mailto:[email protected]', os=os, browser=browser ) send_email(email, 'Activate Your DIVE Account', html) return jsonify({ 'status': 'success', 'message': 'A confirmation e-mail has been sent to %s' % email }) else: return jsonify({ 'status': 'error', 'message': 'No account corresponds to that e-mail address.' }, status=401)
def post(self): args = specPostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') selected_fields = args.get('field_agg_pairs', []) if not selected_fields: selected_fields = [] recommendation_types = args.get('recommendation_types', []) conditionals = args.get('conditionals', {}) config = args.get('config', {}) specs = db_access.get_specs(project_id, dataset_id, recommendation_types=recommendation_types, selected_fields=selected_fields, conditionals=conditionals) if specs and not current_app.config['RECOMPUTE_VIZ_SPECS']: return jsonify({'result': specs, 'compute': False}) else: specs_task = viz_spec_pipeline.apply_async(args=[ dataset_id, project_id, selected_fields, recommendation_types, conditionals, config ]) return jsonify({'taskId': specs_task.task_id, 'compute': True})
def post(self): args = specPostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') selected_fields = args.get('field_agg_pairs', []) if not selected_fields: selected_fields = [] recommendation_types = args.get('recommendation_types', []) conditionals = args.get('conditionals', {}) config = args.get('config', {}) specs = db_access.get_specs(project_id, dataset_id, recommendation_types=recommendation_types, selected_fields=selected_fields, conditionals=conditionals) if specs and not current_app.config['RECOMPUTE_VIZ_SPECS']: return jsonify({ 'result': specs, 'compute': False }) else: specs_task = viz_spec_pipeline.apply_async( args = [dataset_id, project_id, selected_fields, recommendation_types, conditionals, config] ) return jsonify({ 'taskId': specs_task.task_id, 'compute': True })
def get(self, token): email = confirm_token(token) if not email: return jsonify({ 'status': 'failure', 'message': 'The confirmation link is invalid or expired.' }, status=401) user = get_user(email=email) parsed_user = row_to_dict(user) if user.confirmed: response = jsonify({ 'status': 'success', 'message': 'Account for %s already activated.' % email, 'alreadyActivated': True, 'user': { k: parsed_user[k] for k in ['anonymous', 'confirmed', 'email', 'id', 'username']} }, status=200) else: confirm_user(email=email) login_user(user, remember=True) response = jsonify({ 'status': 'success', 'message': 'Account for %s successfully activated.' % email, 'alreadyActivated': False, 'user': { k: parsed_user[k] for k in ['anonymous', 'confirmed', 'email', 'id', 'username']} }) response = set_cookies(response, { 'anonymous': user.anonymous, 'username': user.username, 'email': user.email, 'user_id': user.id, 'confirmed': user.confirmed }, expires=datetime.utcnow() + COOKIE_DURATION) return response
def post(self, field_id): args = fieldPostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') field_type = args.get('type') field_is_id = args.get('isId') field_color = args.get('color') if field_type: if (field_type not in quantitative_types) \ and (field_type not in categorical_types) \ and (field_type not in temporal_types): return make_response(jsonify({'status': 'Invalid field type.'})) general_type = specific_type_to_general_type[field_type] field_property = db_access.get_field_property(project_id, dataset_id, field_id) field_name = field_property['name'] df = get_data(project_id=project_id, dataset_id=dataset_id) updated_properties = compute_single_field_property_nontype(field_name, df[field_name], field_type, general_type) field_property_document = \ db_access.update_field_properties_type_by_id(project_id, field_id, field_type, general_type, updated_properties) if field_is_id != None: field_property_document = \ db_access.update_field_properties_is_id_by_id(project_id, field_id, field_is_id) if field_color != None: field_property_document = \ db_access.update_field_properties_color_by_id(project_id, field_id, field_color) return make_response(jsonify(field_property_document))
def post(self): form_data = json.loads(request.form.get('data')) project_id = form_data.get('project_id') file_obj = request.files.get('file') if file_obj and allowed_file(file_obj.filename): # Get dataset_ids corresponding to file if successful upload try: datasets = upload_file(project_id, file_obj) except UploadTooLargeException as e: return jsonify({ 'status': 'error', 'message': str(e) }, status=413) result = { 'status': 'success', 'datasets': datasets } for dataset in datasets: ingestion_task = ingestion_pipeline.apply_async( args=[ dataset['id'], project_id ] ) return jsonify({ 'task_id': ingestion_task.task_id }, status=202) return jsonify({'status': 'Upload failed'})
def post(self): args = loginPostParser.parse_args() username = args.get('username') email = args.get('email') password = args.get('password') remember = args.get('rememberMe') user_auth_object = check_user_auth(password, email=email, username=username) user = user_auth_object['user'] status = user_auth_object['status'] message = user_auth_object['message'] error_type = user_auth_object['error_type'] if status == AuthStatus.SUCCESS.value: parsed_user = row_to_dict(user) login_user(user, remember=remember) if user.username: message = 'Welcome back %s!' % user.username else: message = 'Welcome back!' response = jsonify({ 'status': status, 'message': message, 'user': { k: parsed_user[k] for k in ['anonymous', 'confirmed', 'email', 'id', 'username'] } }) response = set_cookies(response, { 'username': user.username, 'email': user.email, 'user_id': user.id, 'confirmed': user.confirmed, 'anonymous': user.anonymous }, expires=datetime.utcnow() + COOKIE_DURATION) return response else: return jsonify( { 'status': status, 'message': { 'login': message, }, }, status=401)
def post(self): ''' spec: { independentVariables dependentVariable interactionTerms model estimator degree weights functions datasetId tableLayout } ''' args = comparisonPostParser.parse_args() project_id = args.get('projectId') spec = args.get('spec') conditionals = args.get('conditionals', {}) comparison_doc = db_access.get_comparison_from_spec( project_id, spec, conditionals=conditionals) # check to see if comparison is in db; if so, send back data if comparison_doc and not current_app.config['RECOMPUTE_STATISTICS']: comparison_data = comparison_doc['data'] comparison_data['id'] = comparison_doc['id'] exported_comparison_doc = db_access.get_exported_comparison_by_comparison_id( project_id, comparison_doc['id']) if exported_comparison_doc: comparison_data['exported'] = True comparison_data[ 'exportedComparisonId'] = exported_comparison_doc['id'] else: comparison_data['exported'] = False return jsonify(comparison_data) else: comparison_task = comparison_pipeline.apply_async( args=[spec, project_id, conditionals]) return jsonify( { 'task_id': comparison_task.task_id, 'compute': True }, status=202)
def post(self, token): args = resetPasswordWithTokenPostParser.parse_args() password = args.get('password') email = confirm_token(token) if email: user = change_user_password_by_email(email, password) return jsonify({ 'status': 'success', 'message': 'Successfully changed password for account %s.' % email }, status=200) else: return jsonify({ 'status': 'failure', 'message': 'The password reset link is invalid or expired.' }, status=401)
def delete(self): args = userDeleteParser.parse_args() user_id = args.get('user_id') password = args.get('password') deleted_user = delete_user(user_id, password) return jsonify(deleted_user)
def post(self, spec_id): args = visualizationFromSpecPostParser.parse_args() project_id = args.get('project_id') conditionals = args.get('conditionals', {}) config = args.get('config', {}).get('data', {}) data_formats = args.get('data_formats') spec = db_access.get_spec(spec_id, project_id) viz_data = spec.get('data', None) if viz_data and (data_formats == spec.get('data_formats') and conditionals == spec.get('conditionals')) and (config == spec.get('config')): del spec['data'] else: viz_data = get_viz_data_from_enumerated_spec(spec, project_id, conditionals, config, data_formats=data_formats) result = { 'spec': spec, 'visualization': viz_data, 'exported': False, 'exported_spec_id': None } existing_exported_spec = db_access.get_exported_spec_by_fields( project_id, spec_id, conditionals=conditionals, config=config ) if existing_exported_spec: result['exported'] = True result['exported_spec_id'] = existing_exported_spec['id'] return jsonify(result)
def get(self): args = datasetsGetParser.parse_args() project_id = args.get('project_id') has_project_access, auth_message = project_auth(project_id) if not has_project_access: return auth_message datasets = db_access.get_datasets(project_id, include_preloaded=True) data_list = [] for d in datasets: dataset_data = { k: d[k] for k in [ 'title', 'file_name', 'id', 'description', 'info_url', 'tags', 'preloaded' ] } dataset_data['details'] = db_access.get_dataset_properties( project_id, d.get('id')) data_list.append(dataset_data) return jsonify({'status': 'success', 'datasets': data_list})
def get(self): args = fieldPropertiesGetParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') group_by = args.get('group_by') has_project_access, auth_message = project_auth(project_id) if not has_project_access: return auth_message field_properties = db_access.get_field_properties( project_id, dataset_id) interaction_terms = db_access.get_interaction_terms( project_id, dataset_id) if group_by: result = {} for fp in field_properties: fp_group_by = fp[group_by] if fp_group_by in result: result[fp_group_by].append(fp) else: result[fp_group_by] = [fp] else: result = {'field_properties': field_properties} result['interactionTerms'] = interaction_terms return make_response(jsonify(result))
def put(self, project_id): args = projectPutParser.parse_args() title = args.get('title') description = args.get('description') starred = args.get('starred') result = db_access.update_project(project_id, title=title, description=description, starred=starred) return jsonify(result)
def delete(self, project_id): result = db_access.delete_project(project_id) if current_app.config['STORAGE_TYPE'] == 'file': project_dir = os.path.join(current_app.config['STORAGE_PATH'], str(result['id'])) if os.path.isdir(project_dir): shutil.rmtree(project_dir) elif current_app.config['STORAGE_TYPE'] == 's3': bucket_objects = s3_client.list_objects( Bucket=current_app.config['AWS_DATA_BUCKET'], Prefix="%s/" % (project_id) ) if bucket_objects.get('Contents'): file_objects = [ { 'Key': obj['Key'] } for obj in bucket_objects['Contents']] s3_delete_objects_result = s3_client.delete_objects( Bucket=current_app.config['AWS_DATA_BUCKET'], Delete={ 'Objects': file_objects } ) return jsonify({ "message": "Successfully deleted project.", "id": result['id'] })
def get(self, document_id): args = documentGetParser.parse_args() project_id = args.get('project_id') include_data = args.get('include_data') document = db_access.get_public_document(document_id) if include_data: new_document = document new_blocks = [] for block in document['content']['blocks']: if block['contentType'] == ContentType.TEXT.value: continue new_block = block exported_spec_id = block['exportedSpecId'] exported_spec_type = block['contentType'] exported_spec = db_access.get_public_exported_spec( exported_spec_id, exported_spec_type) new_block['spec'] = exported_spec new_blocks.append(new_block) new_document['content']['blocks'] = new_blocks result = new_document else: result = document return jsonify(result)
def post(self): args = projectsPostParser.parse_args() title = args.get('title') description = args.get('description') user_id = args.get('user_id') anonymous = args.get('anonymous') private = args.get('private') result = db_access.insert_project( title=title, description=description, user_id=user_id, private=private, preloaded=False, anonymous=anonymous ) new_project_id = result['id'] db_access.create_document(new_project_id) if current_app.config['STORAGE_TYPE'] == 'file': project_dir = os.path.join(current_app.config['STORAGE_PATH'], str(result['id'])) if not os.path.isdir(project_dir): os.mkdir(project_dir) return jsonify(result)
def put(self, document_id): args = documentPutParser.parse_args() content = args.get('content') title = args.get('title') project_id = args.get('project_id') result = db_access.update_document(project_id, document_id, title, content) return jsonify(result)
def get(self): args = preloadedDatasetsGetParser.parse_args() project_id = args.get('project_id') get_structure = args.get('get_structure') preloaded_datasets = db_access.get_preloaded_datasets(**args) selected_preloaded_dataset_ids = [] if project_id: selected_preloaded_datasets = db_access.get_project_preloaded_datasets( project_id) selected_preloaded_dataset_ids = [ d['id'] for d in selected_preloaded_datasets ] data_list = [] for d in preloaded_datasets: dataset_data = { k: d[k] for k in [ 'title', 'file_name', 'id', 'description', 'info_url', 'tags' ] } if dataset_data['id'] in selected_preloaded_dataset_ids: dataset_data['selected'] = True else: dataset_data['selected'] = False dataset_data['details'] = db_access.get_dataset_properties( project_id, d.get('id')) data_list.append(dataset_data) return jsonify({'status': 'success', 'datasets': data_list})
def get(self): if current_user.is_authenticated: user = current_user fresh = login_fresh() logger.info('User %s (%s) already authenticated. Fresh: %s', user.username, user.id, fresh) confirm_login() else: user = create_anonymous_user() login_user(user, remember=True) parsed_user = row_to_dict(user) response = jsonify({ 'user': { k: parsed_user[k] for k in ['anonymous', 'confirmed', 'email', 'id', 'username'] } }) response = set_cookies( response, { 'username': user.username, 'email': '', 'user_id': user.id, 'confirmed': False, 'anonymous': True }) return response
def get(self): args = documentsGetParser.parse_args() project_id = args.get('project_id') result = db_access.get_documents(project_id) return jsonify({ 'documents': result })
def get(self): args = deselectPreloadedDatasetGetParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') preloaded_dataset = db_access.remove_preloaded_dataset_from_project(project_id, dataset_id) if preloaded_dataset: return jsonify({ 'result': 'success', 'preloaded_dataset': { k: preloaded_dataset[k] for k in [ 'title', 'file_name', 'id', 'description', 'preloaded' ]} }) else: return jsonify({ 'result': 'failure', }, status=400)
def delete(self, document_id): args = documentDeleteParser.parse_args() project_id = args.get('project_id') result = db_access.delete_document(project_id, document_id) return jsonify({ "message": "Successfully deleted project.", "id": int(result['id']) })
def post(self): args = interactionTermPostParser.parse_args() project_id = args.get('projectId') dataset_id = args.get('datasetId') interaction_term_ids = args.get('interactionTermIds') data = db_access.insert_interaction_term(project_id, dataset_id, interaction_term_ids) return jsonify(data)
def post(self): args = timeFromParamsPostParser.parse_args() # TODO Implement required parameters numInputs = args.get('numInputs') sizeArray = args.get('sizeArray') funcArraySize = args.get('funcArraySize') result, status = timeEstimator(numInputs, sizeArray, funcArraySize) return make_response(jsonify(result))
def get(self): args = exportedSpecsGetParser.parse_args() project_id = args.get('project_id') exported_specs = db_access.get_exported_specs(project_id) return jsonify({ 'result': exported_specs, 'length': len(exported_specs) })
def get(self, exported_correlation_id): args = dataFromExportedComparisonGetParser.parse_args() project_id = args.get('project_id') exported_correlation = db_access.get_exported_comparison(project_id, exported_comparison_id) comparison_id = exported_spec['comparison_id'] comparison = db_access.get_comparison_by_id(comparison_id, project_id) return jsonify(summary['data'])
def get(self): args = exportedComparisonGetParser.parse_args() project_id = args.get('project_id') exported_comparisons = db_access.get_exported_comparisons(project_id) return jsonify({ 'result': exported_comparisons, 'length': len(exported_comparisons) })
def get(self, exported_regression_id): args = dataFromExportedRegressionGetParser.parse_args() project_id = args.get('project_id') exported_regression = db_access.get_exported_regression(project_id, exported_regression_id) regression_id = exported_spec['regression_id'] regression = db_access.get_regression_by_id(regression_id, project_id) return jsonify(regression['data'])
def post(self): args = reducePostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') column_ids = args.get('column_ids') new_dataset_name_prefix = args.get('new_dataset_name_prefix') reduce_task = reduce_pipeline.apply_async( args=[column_ids, new_dataset_name_prefix, dataset_id, project_id]) return make_response(jsonify({'taskId': reduce_task.task_id}))
def delete(self, dataset_id): args = datasetDeleteParser.parse_args() project_id = args.get('project_id') db_result = delete_dataset(project_id, dataset_id) return jsonify({ "message": "Successfully deleted dataset.", "id": int(db_result['id']) })
def get(self, exported_correlation_id): args = dataFromExportedComparisonGetParser.parse_args() project_id = args.get('project_id') exported_correlation = db_access.get_exported_comparison( project_id, exported_comparison_id) comparison_id = exported_spec['comparison_id'] comparison = db_access.get_comparison_by_id(comparison_id, project_id) return jsonify(summary['data'])
def get(self, exported_regression_id): args = dataFromExportedRegressionGetParser.parse_args() project_id = args.get('project_id') exported_regression = db_access.get_exported_regression( project_id, exported_regression_id) regression_id = exported_spec['regression_id'] regression = db_access.get_regression_by_id(regression_id, project_id) return jsonify(regression['data'])
def get(self, user_id): deleted_user = delete_anonymous_data(user_id) response = jsonify({'user': row_to_dict(deleted_user)}) response = set_cookies(response, { 'username': '', 'email': '', 'user_id': '', 'confirmed': str(False) }, expires=0)
def post(self): args = exportedCorrelationPostParser.parse_args() project_id = args.get('project_id') correlation_id = args.get('correlation_id') data = args.get('data') conditionals = args.get('conditionals') config = args.get('config') result = db_access.insert_exported_correlation(project_id, correlation_id, data, conditionals, config) result['spec'] = db_access.get_correlation_by_id(correlation_id, project_id)['spec'] return jsonify(result)
def post(self): args = reducePostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') column_ids = args.get('column_ids') new_dataset_name_prefix = args.get('new_dataset_name_prefix') reduce_task = reduce_pipeline.apply_async( args = [column_ids, new_dataset_name_prefix, dataset_id, project_id] ) return make_response(jsonify({ 'taskId': reduce_task.task_id }))
def get(self, exported_spec_id): args = visualizationFromExportedSpecGetParser.parse_args() project_id = args.get('project_id') exported_spec = db_access.get_exported_spec(project_id, exported_spec_id) spec_id = exported_spec['spec_id'] spec = db_access.get_spec(spec_id, project_id) result = {'spec': spec, 'visualization': spec['data']} return jsonify(result)
def get(self): args = deselectPreloadedDatasetGetParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') preloaded_dataset = db_access.remove_preloaded_dataset_from_project( project_id, dataset_id) if preloaded_dataset: return jsonify({ 'result': 'success', 'preloaded_dataset': { k: preloaded_dataset[k] for k in ['title', 'file_name', 'id', 'description', 'preloaded'] } }) else: return jsonify({ 'result': 'failure', }, status=400)
def project_auth(project_id): matching_project = Project.query.get_or_404(project_id) authorized = False message = {} status = 401 if is_authorized_user(current_user, matching_project): authorized = True status = 200 else: message = {'status': 'error', 'message': 'Not authorized'} return authorized, jsonify(message, status=status)
def post(self): args = unpivotPostParser.parse_args() project_id = args.get('project_id') dataset_id = args.get('dataset_id') pivot_fields = args.get('pivot_fields') variable_name = args.get('variable_name') value_name = args.get('value_name') new_dataset_name_prefix = args.get('new_dataset_name_prefix') unpivot_task = unpivot_pipeline.apply_async( args = [pivot_fields, variable_name, value_name, new_dataset_name_prefix, dataset_id, project_id] ) return make_response(jsonify({ 'taskId': unpivot_task.task_id }))
def post(self): args = aggregationFromSpecPostParser.parse_args() project_id = args.get('projectId') spec = args.get('spec') config = args.get('config') conditionals = args.get('conditionals') aggregation_doc = db_access.get_aggregation_from_spec( project_id, spec, config=config, conditionals=conditionals) if aggregation_doc and not current_app.config['RECOMPUTE_STATISTICS']: aggregation_data = aggregation_doc['data'] aggregation_data['id'] = aggregation_doc['id'] return jsonify(aggregation_data) else: aggregation_task = aggregation_pipeline.apply_async( args=[spec, project_id, config, conditionals]) return jsonify( { 'task_id': aggregation_task.task_id, 'compute': True }, status=202)
def get(self, exported_spec_id): args = visualizationFromExportedSpecGetParser.parse_args() project_id = args.get('project_id') exported_spec = db_access.get_exported_spec(project_id, exported_spec_id) spec_id = exported_spec['spec_id'] spec = db_access.get_spec(spec_id, project_id) result = { 'spec': spec, 'visualization': spec['data'] } return jsonify(result)
def get(self, dataset_id): args = datasetGetParser.parse_args() project_id = args.get('project_id') dataset = db_access.get_dataset(project_id, dataset_id) sample = get_dataset_sample(dataset_id, project_id) response = { 'id': dataset_id, 'title': dataset.get('title'), 'preloaded': dataset.get('preloaded'), 'details': sample } return jsonify(response)
def project_auth(project_id): matching_project = Project.query.get_or_404(project_id) authorized = False message = {} status = 401 if is_authorized_user(current_user, matching_project): authorized = True status = 200 else: message = { 'status': 'error', 'message': 'Not authorized' } return authorized, jsonify(message, status=status)
def get(self): args = exportedAnalysesGetParser.parse_args() project_id = args.get('project_id') result_types = args.get('result_types') exported_results = {} if 'aggregation' in result_types: exported_results['aggregation'] = db_access.get_exported_aggregations(project_id) if 'comparison' in result_types: exported_results['comparison'] = db_access.get_exported_comparisons(project_id) if 'correlation' in result_types: exported_results['correlation'] = db_access.get_exported_correlations(project_id) if 'regression' in result_types: exported_results['regression'] = db_access.get_exported_regressions(project_id) return jsonify(exported_results)
def get(self): args = datasetsGetParser.parse_args() project_id = args.get('project_id') has_project_access, auth_message = project_auth(project_id) if not has_project_access: return auth_message datasets = db_access.get_datasets(project_id, include_preloaded=True) data_list = [] for d in datasets: dataset_data = { k: d[k] for k in [ 'title', 'file_name', 'id', 'description', 'info_url', 'tags', 'preloaded' ]} dataset_data['details'] = db_access.get_dataset_properties(project_id, d.get('id')) data_list.append(dataset_data) return jsonify({'status': 'success', 'datasets': data_list})
def post(self): args = joinPostParser.parse_args() project_id = args.get('project_id') left_dataset_id = args.get('left_dataset_id') right_dataset_id = args.get('right_dataset_id') on = args.get('on') left_on = args.get('left_on') right_on = args.get('right_on') how = args.get('how') left_suffix = args.get('left_suffix') right_suffix = args.get('right_suffix') new_dataset_name_prefix = args.get('new_dataset_name_prefix') join_task = join_pipeline.apply_async(args=[ left_dataset_id, right_dataset_id, on, left_on, right_on, how, left_suffix, right_suffix, new_dataset_name_prefix, project_id ]) return make_response(jsonify({ 'taskId': join_task.task_id }))
def post(self): args = exportedSpecsPostParser.parse_args() project_id = args.get('project_id') spec_id = args.get('spec_id') data = args.get('data') conditionals = args.get('conditionals') config = args.get('config') existing_exported_spec = db_access.get_exported_spec_by_fields( project_id, spec_id, conditionals=conditionals, config=config ) if existing_exported_spec: result = { 'result': 'Visualization already exported.' } else: result = db_access.insert_exported_spec(project_id, spec_id, data, conditionals, config) return jsonify(result)
def get(self): args = projectsGetParser.parse_args() user_id = args.get('user_id') preloaded = args.get('preloaded') private = args.get('private') query_args = {} if 'preloaded' in args: query_args['preloaded'] = preloaded if 'private' in args: query_args['private'] = private if 'user_id' in args and user_id: user = load_account(user_id) if user.is_global_admin(): del query_args['private'] if not user.is_global_admin() and not preloaded: query_args['user_id'] = user_id return jsonify({'projects': db_access.get_projects(**query_args)})
def get(self): args = preloadedDatasetsGetParser.parse_args() project_id = args.get('project_id') get_structure = args.get('get_structure') preloaded_datasets = db_access.get_preloaded_datasets(**args) selected_preloaded_dataset_ids = [] if project_id: selected_preloaded_datasets = db_access.get_project_preloaded_datasets(project_id) selected_preloaded_dataset_ids = [ d['id'] for d in selected_preloaded_datasets ] data_list = [] for d in preloaded_datasets: dataset_data = { k: d[k] for k in [ 'title', 'file_name', 'id', 'description', 'info_url', 'tags' ]} if dataset_data['id'] in selected_preloaded_dataset_ids: dataset_data['selected'] = True else: dataset_data['selected'] = False dataset_data['details'] = db_access.get_dataset_properties(project_id, d.get('id')) data_list.append(dataset_data) return jsonify({'status': 'success', 'datasets': data_list})
def get(self, document_id): args = documentGetParser.parse_args() project_id = args.get('project_id') include_data = args.get('include_data') document = db_access.get_public_document(document_id) if include_data: new_document = document new_blocks = [] for block in document['content']['blocks']: if block['contentType'] == ContentType.TEXT.value: continue new_block = block exported_spec_id = block['exportedSpecId'] exported_spec_type = block['contentType'] exported_spec = db_access.get_public_exported_spec(exported_spec_id, exported_spec_type) new_block['spec'] = exported_spec new_blocks.append(new_block) new_document['content']['blocks'] = new_blocks result = new_document else: result = document return jsonify(result)
def get(self): args = exportedRegressionGetParser.parse_args() project_id = args.get('project_id') exported_regressions = db_access.get_exported_regression(project_id) return jsonify({ 'result': exported_regressions })
def get(self): args = exportedSpecsGetParser.parse_args() project_id = args.get('project_id') exported_specs = db_access.get_exported_specs(project_id) return jsonify({'result': exported_specs, 'length': len(exported_specs)})
def get(self): args = exportedComparisonGetParser.parse_args() project_id = args.get('project_id') exported_comparisons = db_access.get_exported_comparisons(project_id) return jsonify({'result': exported_comparisons, 'length': len(exported_comparisons)})
def get(self): args = exportedCorrelationGetParser.parse_args() project_id = args.get('project_id') exported_correlation = db_access.get_exported_correlation(project_id) return jsonify({ 'result': exported_correlation })