def get_check_dataset(dataset_id):
    """Wrapper for `dataset.get` function in `db` package. Meant for use with the API.

    Checks the following conditions and raises NotFound exception if they
    aren't met:
    * Specified dataset exists.
    * Current user is allowed to access this dataset.
    """
    try:
        ds = db.dataset.get(dataset_id)
    except db.exceptions.NoDataFoundException as e:
        raise api_exceptions.APINotFound("Can't find this dataset.")
    if ds["public"] or (current_user.is_authenticated and
                        ds["author"] == current_user.id):
        return ds
    else:
        raise api_exceptions.APINotFound("Can't find this dataset.")
Exemplo n.º 2
0
def get_high_level(mbid):
    """Endpoint for fetching high-level data.
    If there is more than one document with the same mbid, you can specify
    an offset as a query parameter in the form of ?n=x, where x is an integer
    starting from 0
    """
    mbid, offset = _validate_data_arguments(mbid, request.args.get("n"))
    try:
        return jsonify(db.data.load_high_level(mbid, offset))
    except NoDataFoundException:
        raise exceptions.APINotFound("Not found")
Exemplo n.º 3
0
def _validate_data_arguments(mbid, offset):
    """Validate the mbid and offset. If the mbid is not a valid uuid, raise 404.
    If the offset is None, return 0, otherwise interpret it as a number. If it is
    not a number, raise 400."""
    try:
        mbid = str(uuid.UUID(mbid))
    except ValueError:
        # an invalid uuid is 404
        raise exceptions.APINotFound("Not found")

    if offset:
        try:
            offset = int(offset)
        except ValueError:
            raise exceptions.APIBadRequest("Offset must be an integer value")
    else:
        offset = 0

    return mbid, offset
Exemplo n.º 4
0
def job_details(job_id):
    """Returns the details of a particular job.
       API key argument is required.

       **Example Response**:

       .. sourcecode:: json

       {
           "created": "Tue, 07 Jun 2016 22:12:32 GMT",
           "dataset": {
               "author": 1,
               "classes": [
                   {
                       "description": null,
                       "id": "141",
                       "name": "Class2",
                       "recordings": [
                           "d08ab44b-94c8-482b-a67f-a683a30fbe5a",
                           "2618cb1d-8699-49df-93f7-a8afea6c914f"
                       ]
                   },
                   {
                       "description": null,
                       "id": "142",
                       "name": "Class1",
                       "recordings": [
                           "5251c17c-c161-4e73-8b1c-4231e8e39095",
                           "c0dccd50-f9dc-476c-b1f1-84f00adeab51"
                       ]
                   }
               ],
               "created": "Mon, 02 May 2016 16:41:08 GMT",
               "description": "",
               "id": "5375e0ff-a6d0-44a3-bee1-05d46fbe6bd5",
               "last_edited": "Mon, 02 May 2016 16:41:08 GMT",
               "name": "test4",
               "public": true
           },
           "dataset_id": "5375e0ff-a6d0-44a3-bee1-05d46fbe6bd5",
           "eval_location": "local",
           "id": "7804abe5-58be-4c9c-a787-22b91d031489",
           "options": {
               "filter_type": null,
               "normalize": false
           },
           "result": null,
           "snapshot_id": "2d51df50-6b71-410e-bf9a-7e877fc9c6c0",
           "status": "pending",
           "status_msg": null,
           "testing_snapshot": null,
           "training_snapshot": null,
           "updated": "Tue, 07 Jun 2016 22:12:32 GMT"
    }
    """
    job = db.dataset_eval.get_job(job_id)
    if not job:
        raise exceptions.APINotFound('No such job')

    job['dataset'] = datasets.get_check_dataset(job['dataset_id'])
    return jsonify(job)