def create_dataset(): """Create a new dataset in the datastore for the project. The dataset schema and rows are given in the request body. Dataset annotations are optional. The expected request body format is: { "columns": [ { "id": 0, "name": "string", "type": "string" } ], "rows": [ { "id": 0, "values": [ "string" ] } ], "annotations": [ { "columnId": 0, "rowId": 0, "key": "string", "value": "string" } ] } """ # Validate the request obj = srv.validate_json_request( request, required=[labels.COLUMNS, labels.ROWS], optional=[labels.ANNOTATIONS] ) columns = deserialize.DATASET_COLUMNS(obj[labels.COLUMNS]) rows = [deserialize.DATASET_ROW(row) for row in obj[labels.ROWS]] annotations = None if labels.ANNOTATIONS in obj: annotations = DatasetMetadata() for anno in obj[labels.ANNOTATIONS]: a = deserialize.ANNOTATION(anno) if a.column_id is None: annotations.rows.append(a) elif a.row_id is None: annotations.columns.append(a) else: annotations.cells.append(a) try: dataset = api.datasets.create_dataset( project_id=config.project_id, columns=columns, rows=rows, annotations=annotations ) return jsonify(dataset) except ValueError as ex: raise srv.InvalidRequest(str(ex))
def get_dataset_descriptor(dataset_id): """Get the descriptor for the dataset with given identifier.""" try: dataset = api.datasets.get_dataset_descriptor( project_id=config.project_id, dataset_id=dataset_id) if not dataset is None: return jsonify(dataset) except ValueError as ex: raise srv.InvalidRequest(str(ex)) raise srv.ResourceNotFound('unknown dataset \'' + dataset_id + '\'')
def get_dataset(dataset_id): """Get the dataset with given identifier that has been generated by a curation workflow. """ # Get dataset rows with offset and limit parameters try: dataset = api.datasets.get_dataset( project_id=config.project_id, dataset_id=dataset_id, offset=request.args.get(PAGE_OFFSET), limit=request.args.get(PAGE_LIMIT)) if not dataset is None: return jsonify(dataset) except ValueError as ex: raise srv.InvalidRequest(str(ex)) raise srv.ResourceNotFound('unknown dataset \'' + dataset_id + '\'')
def get_dataset_chart_view(branch_id, workflow_id, module_id, chart_id): """Get content of a dataset chart view for a given workflow module. """ try: view = api.views.get_dataset_chart_view(project_id=config.project_id, branch_id=branch_id, workflow_id=workflow_id, module_id=module_id, chart_id=chart_id) except ValueError as ex: raise srv.InvalidRequest(str(ex)) if not view is None: return jsonify(view) raise srv.ResourceNotFound(''.join([ 'unknown branch \'' + branch_id, '\', workflow \'' + workflow_id, '\', module \'' + module_id, '\' or chart \'' + chart_id + '\'' ]))
def update_dataset_annotation(dataset_id): """Update an annotation that is associated with a component of the given dataset. Request ------- { "columnId": 0, "rowId": 0, "key": "string", "oldValue": "string", or "int", or "float" "newValue": "string", or "int", or "float" } """ # Validate the request obj = srv.validate_json_request( request, required=['key'], optional=['columnId', 'rowId', 'key', 'oldValue', 'newValue'] ) # Create update statement and execute. The result is None if no dataset with # given identifier exists. key = obj[labels.KEY] if labels.KEY in obj else None column_id = obj[labels.COLUMN_ID] if labels.COLUMN_ID in obj else None row_id = obj[labels.ROW_ID] if labels.ROW_ID in obj else None old_value = obj[labels.OLD_VALUE] if labels.OLD_VALUE in obj else None new_value = obj[labels.NEW_VALUE] if labels.NEW_VALUE in obj else None try: annotations = api.datasets.update_annotation( project_id=config.project_id, dataset_id=dataset_id, key=key, column_id=column_id, row_id=row_id, old_value=old_value, new_value=new_value ) if not annotations is None: return jsonify(annotations) except ValueError as ex: raise srv.InvalidRequest(str(ex)) raise srv.ResourceNotFound('unknown dataset \'' + dataset_id + '\'')