How to use the webserver.views.api.exceptions.APINotFound function in webserver

To help you get started, we’ve selected a few webserver examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github metabrainz / acousticbrainz-server / webserver / views / api / v1 / datasets.py View on Github external
Checks the following conditions and raises NotFound exception if they
    aren't met:
    * Specified dataset exists.
    * Current user is allowed to access this dataset.
    """
    try:
        ds = db.dataset.get(dataset_id)
    except db.exceptions.NoDataFoundException as e:
        raise api_exceptions.APINotFound("Can't find this dataset.")
    if not write:
        if ds["public"] or (current_user.is_authenticated and
                            ds["author"] == current_user.id):
            return ds
        else:
            raise api_exceptions.APINotFound("Can't find this dataset.")
    else:
        if (current_user.is_authenticated and
                            ds["author"] == current_user.id):
            return ds
        else:
            raise api_exceptions.APIUnauthorized("Only the author can modify the dataset.")
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / datasets.py View on Github external
def get_check_dataset(dataset_id, write=False):
    """Wrapper for `dataset.get` function in `db` package. Meant for use with the API.

    Checks the following conditions and raises NotFound exception if they
    aren't met:
    * Specified dataset exists.
    * Current user is allowed to access this dataset.
    """
    try:
        ds = db.dataset.get(dataset_id)
    except db.exceptions.NoDataFoundException as e:
        raise api_exceptions.APINotFound("Can't find this dataset.")
    if not write:
        if ds["public"] or (current_user.is_authenticated and
                            ds["author"] == current_user.id):
            return ds
        else:
            raise api_exceptions.APINotFound("Can't find this dataset.")
    else:
        if (current_user.is_authenticated and
                            ds["author"] == current_user.id):
            return ds
        else:
            raise api_exceptions.APIUnauthorized("Only the author can modify the dataset.")
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / dataset_eval.py View on Github external
"options": {
               "filter_type": null,
               "normalize": false
           },
           "result": null,
           "snapshot_id": "2d51df50-6b71-410e-bf9a-7e877fc9c6c0",
           "status": "pending",
           "status_msg": null,
           "testing_snapshot": null,
           "training_snapshot": null,
           "updated": "Tue, 07 Jun 2016 22:12:32 GMT"
    }
    """
    job = db.dataset_eval.get_job(job_id)
    if not job:
        raise exceptions.APINotFound('No such job')

    job['dataset'] = datasets.get_check_dataset(job['dataset_id'])
    return jsonify(job)
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / core.py View on Github external
This endpoint returns one document at a time. If there are many submissions
    for an MBID, you can browse through them by specifying an offset parameter
    ``n``. Documents are sorted by their submission time.

    You can the get total number of low-level submissions using the ``//count``
    endpoint.

    :query n: *Optional.* Integer specifying an offset for a document.

    :resheader Content-Type: *application/json*
    """
    offset = _validate_offset(request.args.get("n"))
    try:
        return jsonify(db.data.load_low_level(str(mbid), offset))
    except NoDataFoundException:
        raise webserver.views.api.exceptions.APINotFound("Not found")
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / path.py View on Github external
The metrics available are shown here :py:const:`~similarity.metrics.BASE_METRICS`.

    :resheader Content-Type: *application/json*
    """
    offset = validate_offset(request.args.get("n"))
    metric, distance_type, n_trees, n_neighbours = _check_index_params(metric)
    try:
        index = AnnoyModel(metric, n_trees=n_trees, distance_type=distance_type, load_existing=True)
    except IndexNotFoundException:
        raise webserver.views.api.exceptions.APIBadRequest("Index does not exist with specified parameters.")

    try:
        ids, similar_recordings, distances = index.get_nns_by_mbid(str(mbid), offset, n_neighbours)
        return jsonify(similar_recordings)
    except NoDataFoundException:
        raise webserver.views.api.exceptions.APINotFound("No submission exists for the given (MBID, offset) combination.")
    except ItemNotFoundException:
        raise webserver.views.api.exceptions.APINotFound("The submission of interest is not indexed.")
github metabrainz / acousticbrainz-server / webserver / views / api / legacy.py View on Github external
def get_high_level(mbid):
    """Endpoint for fetching high-level data.
    If there is more than one document with the same mbid, you can specify
    an offset as a query parameter in the form of ?n=x, where x is an integer
    starting from 0
    """
    mbid, offset = _validate_data_arguments(mbid, request.args.get("n"))
    try:
        return jsonify(db.data.load_high_level(mbid, offset))
    except NoDataFoundException:
        raise exceptions.APINotFound("Not found")
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / path.py View on Github external
:resheader Content-Type: *application/json*
    """
    offset = validate_offset(request.args.get("n"))
    metric, distance_type, n_trees, n_neighbours = _check_index_params(metric)
    try:
        index = AnnoyModel(metric, n_trees=n_trees, distance_type=distance_type, load_existing=True)
    except IndexNotFoundException:
        raise webserver.views.api.exceptions.APIBadRequest("Index does not exist with specified parameters.")

    try:
        ids, similar_recordings, distances = index.get_nns_by_mbid(str(mbid), offset, n_neighbours)
        return jsonify(similar_recordings)
    except NoDataFoundException:
        raise webserver.views.api.exceptions.APINotFound("No submission exists for the given (MBID, offset) combination.")
    except ItemNotFoundException:
        raise webserver.views.api.exceptions.APINotFound("The submission of interest is not indexed.")
github metabrainz / acousticbrainz-server / webserver / views / api / legacy.py View on Github external
def _validate_data_arguments(mbid, offset):
    """Validate the mbid and offset. If the mbid is not a valid uuid, raise 404.
    If the offset is None, return 0, otherwise interpret it as a number. If it is
    not a number, raise 400."""
    try:
        uuid.UUID(mbid)
    except ValueError:
        # an invalid uuid is 404
        raise exceptions.APINotFound("Not found")

    if offset:
        try:
            offset = int(offset)
        except ValueError:
            raise exceptions.APIBadRequest("Offset must be an integer value")
    else:
        offset = 0

    return mbid, offset
github metabrainz / acousticbrainz-server / webserver / views / api / v1 / dataset_eval.py View on Github external
"options": {
               "filter_type": null,
               "normalize": false
           },
           "result": null,
           "snapshot_id": "2d51df50-6b71-410e-bf9a-7e877fc9c6c0",
           "status": "pending",
           "status_msg": null,
           "testing_snapshot": null,
           "training_snapshot": null,
           "updated": "Tue, 07 Jun 2016 22:12:32 GMT"
    }
    """
    job = db.dataset_eval.get_job(job_id)
    if not job:
        raise exceptions.APINotFound('No such job')

    job['dataset'] = datasets.get_check_dataset(job['dataset_id'])
    return jsonify(job)