def _storage_api():
    """ Returns slumber access to storage API. """
    storage_service_url = _storage_service_url()
    username = get_setting('storage_service_user', 'test')
    api_key = get_setting('storage_service_apikey', None)
    api = slumber.API(storage_service_url, auth=TastypieApikeyAuth(username, api_key))
    return api
示例#2
0
def create_file(uuid,
                origin_location,
                origin_path,
                current_location,
                current_path,
                package_type,
                size,
                update=False,
                related_package_uuid=None,
                events=None,
                agents=None,
                aip_subtype=None):
    """Creates a new file. Returns a tuple of (resulting dict, None) on
    success, (None, error) on failure. Note: for backwards compatibility
    reasons, the SS API calls "packages" "files" and this function should be
    read as ``create_package``.

    origin_location and current_location should be URIs for the storage service.
    """
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        return (None, 'Pipeline not available, see logs.')
    if events is None:
        events = []
    if agents is None:
        agents = []
    new_file = {
        'uuid': uuid,
        'origin_location': origin_location,
        'origin_path': origin_path,
        'current_location': current_location,
        'current_path': current_path,
        'package_type': package_type,
        'aip_subtype': aip_subtype,
        'size': size,
        'origin_pipeline': pipeline['resource_uri'],
        'related_package_uuid': related_package_uuid,
        'events': events,
        'agents': agents
    }

    LOGGER.info("Creating file with %s", new_file)
    try:
        session = _storage_api_session()
        if update:
            new_file['reingest'] = pipeline['uuid']
            url = _storage_service_url() + 'file/' + uuid + '/'
            response = session.put(url, json=new_file)
        else:
            url = _storage_service_url() + 'file/'
            response = session.post(url, json=new_file)
    except requests.exceptions.RequestException as e:
        LOGGER.warning("Unable to create file from %s because %s", new_file, e)
        return (None, e)
    # TODO: if the SS returns a 500 error, then the dashboard will not signal
    # to the user that AIP storage has failed! This is not good.
    LOGGER.info('Status code of create file/package request: %s',
                response.status_code)
    file_ = response.json()
    return (file_, None)
示例#3
0
def call(jobs):
    # Parse arguments
    parser = argparse.ArgumentParser(description='Convert folder into a bag.')
    parser.add_argument('operation')
    parser.add_argument('destination')
    parser.add_argument('sip_directory')
    parser.add_argument(
        'payload_entries',
        metavar='Payload',
        nargs='+',
        help='All the files/folders that should go in the bag.')
    parser.add_argument('--writer', dest='writer')

    algorithm = get_setting('checksum_type',
                            mcpclient_settings.DEFAULT_CHECKSUM_ALGORITHM)

    for job in jobs:
        with job.JobContext():
            try:
                args = parser.parse_args(job.args[1:])
                bag_with_empty_directories(job, args.operation,
                                           args.destination,
                                           args.sip_directory,
                                           args.payload_entries, args.writer,
                                           algorithm)
            except BagException:
                pass
示例#4
0
def create_pipeline(create_default_locations=False,
                    shared_path=None,
                    api_username=None,
                    api_key=None,
                    remote_name=None):
    pipeline = {
        'uuid': get_setting('dashboard_uuid'),
        'description': "Archivematica on {}".format(platform.node()),
        'create_default_locations': create_default_locations,
        'shared_path': shared_path,
        'api_username': api_username,
        'api_key': api_key,
    }
    if remote_name is not None:
        pipeline['remote_name'] = remote_name
    LOGGER.info("Creating pipeline in storage service with %s", pipeline)
    url = _storage_service_url() + 'pipeline/'
    try:
        response = _storage_api_session().post(url, json=pipeline)
        response.raise_for_status()
    except requests.exceptions.RequestException as e:
        LOGGER.warning(
            'Unable to create Archivematica pipeline in storage service from %s because %s',
            pipeline,
            e,
            exc_info=True)
        raise
    return True
def get_location(path=None, purpose=None, space=None):
    """ Returns a list of storage locations, filtered by parameters.

    Queries the storage service and returns a list of storage locations,
    optionally filtered by purpose, containing space or path.

    purpose: How the storage is used.  Should reference storage service
        purposes, found in storage_service.locations.models.py
    path: Path to location.  If a space is passed in, paths starting with /
        have the space's path stripped.
    """
    api = _storage_api()
    offset = 0
    return_locations = []
    if space and path:
        path = _storage_relative_from_absolute(path, space['path'])
        space = space['uuid']
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        return None
    while True:
        locations = api.location.get(pipeline__uuid=pipeline['uuid'],
                                     relative_path=path,
                                     purpose=purpose,
                                     space=space,
                                     offset=offset)
        LOGGER.debug("Storage locations retrieved: {}".format(locations))
        return_locations += locations['objects']
        if not locations['meta']['next']:
            break
        offset += locations['meta']['limit']

    LOGGER.info("Storage locations returned: {}".format(return_locations))
    return return_locations
示例#6
0
def updateSizeAndChecksum(fileUUID,
                          filePath,
                          date,
                          eventIdentifierUUID,
                          fileSize=None,
                          checksum=None,
                          checksumType=None,
                          add_event=True):
    """
    Update a File with its size, checksum and checksum type. These are
    parameters that can be either generated or provided via keywords.

    Finally, insert the corresponding Event. This behavior can be cancelled
    using the boolean keyword 'add_event'.
    """
    if not fileSize:
        fileSize = os.path.getsize(filePath)
    if not checksumType:
        checksumType = get_setting('checksum_type', 'sha256')
    if not checksum:
        checksum = get_file_checksum(filePath, checksumType)

    File.objects.filter(uuid=fileUUID).update(size=fileSize,
                                              checksum=checksum,
                                              checksumtype=checksumType)

    if add_event:
        insertIntoEvents(
            fileUUID=fileUUID,
            eventType='message digest calculation',
            eventDateTime=date,
            eventDetail='program="python"; module="hashlib.{}()"'.format(
                checksumType),
            eventOutcomeDetailNote=checksum)
示例#7
0
def create_pipeline(
    create_default_locations=False,
    shared_path=None,
    api_username=None,
    api_key=None,
    remote_name=None,
):
    pipeline = {
        "uuid": get_setting("dashboard_uuid"),
        "description": "Archivematica on {}".format(platform.node()),
        "create_default_locations": create_default_locations,
        "shared_path": shared_path,
        "api_username": api_username,
        "api_key": api_key,
    }
    if remote_name is not None:
        pipeline["remote_name"] = remote_name
    LOGGER.info("Creating pipeline in storage service with %s", pipeline)
    url = _storage_service_url() + "pipeline/"
    try:
        response = _storage_api_session().post(url, json=pipeline)
        response.raise_for_status()
    except requests.exceptions.RequestException as e:
        LOGGER.warning(
            "Unable to create Archivematica pipeline in storage service from %s because %s",
            pipeline,
            e,
            exc_info=True,
        )
        raise
    return True
示例#8
0
def request_reingest(package_uuid, reingest_type, processing_config):
    """
    Requests `package_uuid` for reingest in this pipeline.

    `reingest_type` determines what files will be copied for reingest, defined
    by ReingestAIPForm.REINGEST_CHOICES.

    Returns a dict: {'error': [True|False], 'message': '<error message>'}
    """
    api_request = {
        "pipeline": am.get_setting("dashboard_uuid"),
        "reingest_type": reingest_type,
        "processing_config": processing_config,
    }
    url = _storage_service_url() + "file/" + package_uuid + "/reingest/"
    try:
        with ss_api_timer(function="request_reingest"):
            response = _storage_api_slow_session().post(url, json=api_request)
    except requests.ConnectionError:
        LOGGER.exception("Could not connect to storage service")
        return {
            "error": True,
            "message": "Could not connect to storage service"
        }
    except requests.exceptions.RequestException:
        LOGGER.exception("Unable to reingest %s", package_uuid)
        try:
            return response.json()
        except Exception:
            return {"error": True}
    return response.json()
def request_reingest(package_uuid, reingest_type, processing_config):
    """
    Requests `package_uuid` for reingest in this pipeline.

    `reingest_type` determines what files will be copied for reingest, defined
    by ReingestAIPForm.REINGEST_CHOICES.

    Returns a dict: {'error': [True|False], 'message': '<error message>'}
    """
    api = _storage_api()
    api_request = {
        'pipeline': get_setting('dashboard_uuid'),
        'reingest_type': reingest_type,
        'processing_config': processing_config,
    }
    try:
        response = api.file(package_uuid).reingest.post(api_request)
    except (slumber.exceptions.HttpClientError, slumber.exceptions.HttpServerError) as e:
        LOGGER.exception("Unable to reingest {}".format(package_uuid))
        try:
            return e.response.json()
        except Exception:
            return {'error': True}
    except requests.ConnectionError as e:
        LOGGER.exception("Could not connect to storage service")
        return {'error': True, 'message': 'Could not connect to storage service'}
    return response
示例#10
0
def request_reingest(package_uuid, reingest_type, processing_config):
    """
    Requests `package_uuid` for reingest in this pipeline.

    `reingest_type` determines what files will be copied for reingest, defined
    by ReingestAIPForm.REINGEST_CHOICES.

    Returns a dict: {'error': [True|False], 'message': '<error message>'}
    """
    api_request = {
        'pipeline': get_setting('dashboard_uuid'),
        'reingest_type': reingest_type,
        'processing_config': processing_config,
    }
    url = _storage_service_url() + 'file/' + package_uuid + '/reingest/'
    try:
        response = _storage_api_slow_session().post(url, json=api_request)
    except requests.ConnectionError:
        LOGGER.exception("Could not connect to storage service")
        return {
            'error': True,
            'message': 'Could not connect to storage service'
        }
    except requests.exceptions.RequestException:
        LOGGER.exception("Unable to reingest %s", package_uuid)
        try:
            return response.json()
        except Exception:
            return {'error': True}
    return response.json()
示例#11
0
def create_file(uuid,
                origin_location,
                origin_path,
                current_location,
                current_path,
                package_type,
                size,
                update=False,
                related_package_uuid=None,
                events=None,
                agents=None,
                aip_subtype=None):
    """Creates a new file. Returns a tuple of (resulting dict, None) on
    success, (None, error) on failure. Note: for backwards compatibility
    reasons, the SS API calls "packages" "files" and this function should be
    read as ``create_package``.

    origin_location and current_location should be URIs for the storage service.
    """
    pipeline = get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        return (None, 'Pipeline not available, see logs.')
    if events is None:
        events = []
    if agents is None:
        agents = []
    new_file = {
        'uuid': uuid,
        'origin_location': origin_location,
        'origin_path': origin_path,
        'current_location': current_location,
        'current_path': current_path,
        'package_type': package_type,
        'aip_subtype': aip_subtype,
        'size': size,
        'origin_pipeline': pipeline['resource_uri'],
        'related_package_uuid': related_package_uuid,
        'events': events,
        'agents': agents
    }

    LOGGER.info("Creating file with %s", new_file)
    try:
        if update:
            session = _storage_api_slow_session()
            new_file['reingest'] = pipeline['uuid']
            url = _storage_service_url() + 'file/' + uuid + '/'
            response = session.put(url, json=new_file)
            return (response.json(), None)
        else:
            session = _storage_api_session()
            url = _storage_service_url() + 'file/async/'
            response = session.post(url, json=new_file, allow_redirects=False)
            return wait_for_async(response)
    except requests.exceptions.RequestException as e:
        LOGGER.warning("Unable to create file from %s because %s", new_file, e)
        return (None, e)
示例#12
0
def _storage_service_url():
    # Get storage service URL from DashboardSetting model
    storage_service_url = get_setting('storage_service_url', None)
    if storage_service_url is None:
        LOGGER.error("Storage server not configured.")
        storage_service_url = 'http://localhost:8000/'
    # If the URL doesn't end in a /, add one
    if storage_service_url[-1] != '/':
        storage_service_url += '/'
    storage_service_url = storage_service_url + 'api/v2/'
    return storage_service_url
示例#13
0
def _storage_service_url():
    # Get storage service URL from DashboardSetting model
    storage_service_url = get_setting("storage_service_url", None)
    if storage_service_url is None:
        LOGGER.error("Storage server not configured.")
        storage_service_url = "http://localhost:8000/"
    # If the URL doesn't end in a /, add one
    if storage_service_url[-1] != "/":
        storage_service_url += "/"
    storage_service_url = storage_service_url + "api/v2/"
    return storage_service_url
示例#14
0
def _create_bag(transfer_id, transfer_path):
    """Convert the transfer directory into a bag using bagit-python."""
    algorithm = get_setting(
        "checksum_type", mcpclient_settings.DEFAULT_CHECKSUM_ALGORITHM
    )
    return make_bag(
        transfer_path,
        processes=multiprocessing.cpu_count(),
        checksums=[algorithm],
        bag_info={"External-Identifier": transfer_id},
    )
示例#15
0
def request_file_deletion(uuid, user_id, user_email, reason_for_deletion):
    """ Returns the server response. """

    api_request = {
        "event_reason": reason_for_deletion,
        "pipeline": get_setting("dashboard_uuid"),
        "user_email": user_email,
        "user_id": user_id,
    }
    url = _storage_service_url() + "file/" + uuid + "/delete_aip/"
    response = _storage_api_session().post(url, json=api_request)
    return response.json()
示例#16
0
def request_file_deletion(uuid, user_id, user_email, reason_for_deletion):
    """ Returns the server response. """

    api_request = {
        'event_reason': reason_for_deletion,
        'pipeline': get_setting('dashboard_uuid'),
        'user_email': user_email,
        'user_id': user_id,
    }
    url = _storage_service_url() + 'file/' + uuid + '/delete_aip/'
    response = _storage_api_session().post(url, json=api_request)
    return response.json()
示例#17
0
def verify_checksums(job, bag, sip_uuid):
    """Verify that the checksums generated at the beginning of transfer match
    those generated near the end of ingest by bag, i.e., "Prepare AIP"
    (bagit_v0.0).
    """
    is_reingest = 'REIN' in SIP.objects.get(uuid=sip_uuid).sip_type
    checksum_type = get_setting('checksum_type',
                                mcpclient_settings.DEFAULT_CHECKSUM_ALGORITHM)
    removableFiles = [
        e.strip() for e in mcpclient_settings.REMOVABLE_FILES.split(',')
    ]
    try:
        manifest_path = get_manifest_path(job, bag, sip_uuid, checksum_type)
        path2checksum = parse_manifest(job, manifest_path, sip_uuid,
                                       checksum_type)
        verification_count = 0
        verification_skipped_because_reingest = 0
        for file_ in File.objects.filter(sip_id=sip_uuid):
            if (os.path.basename(file_.originallocation) in removableFiles
                    or file_.removedtime
                    or not file_.currentlocation.startswith(
                        '%SIPDirectory%objects/')
                    or file_.filegrpuse == 'manualNormalization'):
                continue
            file_path = file_.currentlocation.replace('%SIPDirectory%', '', 1)
            assert_checksum_types_match(job, file_, sip_uuid, checksum_type)
            expected_checksum = get_expected_checksum(job, file_, sip_uuid,
                                                      checksum_type,
                                                      path2checksum, file_path,
                                                      manifest_path,
                                                      is_reingest)
            if expected_checksum is None:
                verification_skipped_because_reingest += 1
                continue
            assert_checksums_match(job, file_, sip_uuid, checksum_type,
                                   expected_checksum)
            verification_count += 1
    except VerifyChecksumsError as err:
        job.print_error(repr(err))
        raise
    event_outcome_detail_note = (
        'All checksums (count={verification_count}) generated at start of'
        ' transfer match those generated by BagIt (bag).'.format(
            verification_count=verification_count))
    if verification_skipped_because_reingest:
        event_outcome_detail_note += (
            ' Note that checksum verification was skipped for {skipped_count}'
            ' file(s) because this AIP is being re-ingested and the re-ingest'
            ' payload did not include said file(s).'.format(
                skipped_count=verification_skipped_because_reingest))
    write_premis_event(job, sip_uuid, checksum_type, 'Pass',
                       event_outcome_detail_note)
    job.pyprint(event_outcome_detail_note)
示例#18
0
def request_file_deletion(uuid, user_id, user_email, reason_for_deletion):
    """ Returns the server response. """

    api = _storage_api()
    api_request = {
        'event_reason': reason_for_deletion,
        'pipeline':     get_setting('dashboard_uuid'),
        'user_email':   user_email,
        'user_id':      user_id
    }

    return api.file(uuid).delete_aip.post(api_request)
示例#19
0
def call(jobs):
    # Parse arguments
    parser = argparse.ArgumentParser(description="Convert folder into a bag.")
    parser.add_argument("destination")
    parser.add_argument("sip_directory")
    parser.add_argument("sip_uuid")

    algorithm = get_setting("checksum_type",
                            mcpclient_settings.DEFAULT_CHECKSUM_ALGORITHM)

    for job in jobs:
        with job.JobContext():
            args = parser.parse_args(job.args[1:])
            bag_with_empty_directories(job, args.destination,
                                       args.sip_directory, args.sip_uuid,
                                       algorithm)
示例#20
0
def copy_files(source_location, destination_location, files, api=None):
    """
    Copies `files` from `source_location` to `destination_location` using SS.

    source_location/destination_location: Dict with Location information, result
        of a call to get_location or get_location_by_uri.
    files: List of dicts with source and destination paths relative to
        source_location and destination_location, respectively.  All other
        fields ignored.
    """
    if api is None:
        api = _storage_api()
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    move_files = {
        'origin_location': source_location['resource_uri'],
        'files': files,
        'pipeline': pipeline['resource_uri'],
    }

    # Here we attempt to decode the 'source' attributes of each move-file to
    # Unicode prior to passing to Slumber's ``post`` method. Slumber will do
    # this anyway and will choke in certain specific cases, specifically where
    # the JavaScript of the dashboard has base-64-encoded a Latin-1-encoded
    # string.
    for file_ in move_files['files']:
        try:
            file_['source'] = file_['source'].decode('utf8')
        except UnicodeDecodeError:
            try:
                file_['source'] = file_['source'].decode('latin-1')
            except UnicodeError:
                pass

    try:
        ret = api.location(destination_location['uuid']).post(move_files)
    except slumber.exceptions.HttpClientError as e:
        LOGGER.warning("Unable to move files with {} because {}".format(move_files, e.content))
        return (None, e)
    except slumber.exceptions.HttpServerError as e:
        LOGGER.warning("Could not connect to storage service: {} ({})".format(
            e, e.content))
        return (None, e)
    return (ret, None)
示例#21
0
def copy_files(source_location, destination_location, files):
    """
    Copies `files` from `source_location` to `destination_location` using SS.

    source_location/destination_location: Dict with Location information, result
        of a call to get_location.
    files: List of dicts with source and destination paths relative to
        source_location and destination_location, respectively.  All other
        fields ignored.
    """
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    move_files = {
        'origin_location': source_location['resource_uri'],
        'files': files,
        'pipeline': pipeline['resource_uri'],
    }

    # Here we attempt to decode the 'source' attributes of each move-file to
    # Unicode prior to passing to Slumber's ``post`` method. Slumber will do
    # this anyway and will choke in certain specific cases, specifically where
    # the JavaScript of the dashboard has base-64-encoded a Latin-1-encoded
    # string.
    for file_ in move_files['files']:
        try:
            file_['source'] = file_['source'].decode('utf8')
        except UnicodeDecodeError:
            try:
                file_['source'] = file_['source'].decode('latin-1')
            except UnicodeError:
                pass

    url = _storage_service_url(
    ) + 'location/' + destination_location['uuid'] + '/'
    try:
        response = _storage_api_session().post(url, json=move_files)
        response.raise_for_status()
    except requests.exceptions.RequestException as e:
        LOGGER.warning("Unable to move files with %s because %s", move_files,
                       e)
        return (None, e)
    ret = response.json()
    return (ret, None)
示例#22
0
def copy_files(source_location, destination_location, files):
    """
    Copies `files` from `source_location` to `destination_location` using SS.

    source_location/destination_location: Dict with Location information, result
        of a call to get_location.
    files: List of dicts with source and destination paths relative to
        source_location and destination_location, respectively.  All other
        fields ignored.
    """
    pipeline = get_pipeline(am.get_setting("dashboard_uuid"))
    move_files = {
        "origin_location": source_location["resource_uri"],
        "files": files,
        "pipeline": pipeline["resource_uri"],
    }

    # Here we attempt to decode the 'source' attributes of each move-file to
    # Unicode prior to passing to Slumber's ``post`` method. Slumber will do
    # this anyway and will choke in certain specific cases, specifically where
    # the JavaScript of the dashboard has base-64-encoded a Latin-1-encoded
    # string.
    for file_ in move_files["files"]:
        try:
            file_["source"] = file_["source"].decode("utf8")
        except UnicodeDecodeError:
            try:
                file_["source"] = file_["source"].decode("latin-1")
            except UnicodeError:
                pass

    url = _storage_service_url(
    ) + "location/" + destination_location["uuid"] + "/"
    try:
        with ss_api_timer(function="copy_files"):
            response = _storage_api_slow_session().post(url, json=move_files)
        response.raise_for_status()
        return (response.json(), None)
    except requests.exceptions.RequestException as e:
        LOGGER.warning("Unable to move files with %s because %s", move_files,
                       e)
        return (None, e)
示例#23
0
def create_file(uuid, origin_location, origin_path, current_location,
        current_path, package_type, size, update=False, related_package_uuid=None):
    """ Creates a new file. Returns a tuple of (resulting dict, None) on success, (None, error) on failure.

    origin_location and current_location should be URIs for the storage service.
    """

    api = _storage_api()
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        return (None, 'Pipeline not available, see logs.')
    new_file = {
        'uuid': uuid,
        'origin_location': origin_location,
        'origin_path': origin_path,
        'current_location': current_location,
        'current_path': current_path,
        'package_type': package_type,
        'size': size,
        'origin_pipeline': pipeline['resource_uri'],
        'related_package_uuid': related_package_uuid
    }

    LOGGER.info("Creating file with {}".format(new_file))
    try:
        if update:
            new_file['reingest'] = pipeline['uuid']
            file_ = api.file(uuid).put(new_file)
        else:
            file_ = api.file.post(new_file)
    except slumber.exceptions.HttpClientError as e:
        LOGGER.warning("Unable to create file from {} because {}".format(new_file, e.content))
        return (None, e)
    except slumber.exceptions.HttpServerError as e:
        LOGGER.warning("Could not connect to storage service: {} ({})".format(
            e, e.content))
        return (None, e)
    return (file_, None)
示例#24
0
def create_pipeline(create_default_locations=False, shared_path=None, api_username=None, api_key=None):
    api = _storage_api()
    pipeline = {
        'uuid': get_setting('dashboard_uuid'),
        'description': "Archivematica on {}".format(platform.node()),
        'create_default_locations': create_default_locations,
        'shared_path': shared_path,
        'api_username': api_username,
        'api_key': api_key,
    }
    LOGGER.info("Creating pipeline in storage service with {}".format(pipeline))
    try:
        api.pipeline.post(pipeline)
    except slumber.exceptions.HttpClientError as e:
        LOGGER.warning("Unable to create Archivematica pipeline in storage service from {} because {}".format(pipeline, e.content))
        return False
    except slumber.exceptions.HttpServerError as e:
        LOGGER.warning("Unable to create Archivematica pipeline in storage service from {} because {}".format(pipeline, e.content), exc_info=True)
        if 'column uuid is not unique' in e.content:
            pass
        else:
            raise
    return True
示例#25
0
def get_location(path=None, purpose=None, space=None):
    """ Returns a list of storage locations, filtered by parameters.

    Queries the storage service and returns a list of storage locations,
    optionally filtered by purpose, containing space or path.

    purpose: How the storage is used.  Should reference storage service
        purposes, found in storage_service/locations/models/location.py
    path: Path to location.  If a space is passed in, paths starting with /
        have the space's path stripped.
    """
    return_locations = []
    if space and path:
        path = _storage_relative_from_absolute(path, space["path"])
        space = space["uuid"]
    pipeline = get_pipeline(am.get_setting("dashboard_uuid"))
    if pipeline is None:
        return None
    url = _storage_service_url() + "location/"
    params = {
        "pipeline__uuid": pipeline["uuid"],
        "relative_path": path,
        "purpose": purpose,
        "space": space,
        "offset": 0,
    }
    while True:
        with ss_api_timer(function="get_location"):
            response = _storage_api_session().get(url, params=params)
        locations = response.json()
        return_locations += locations["objects"]
        if not locations["meta"]["next"]:
            break
        params["offset"] += locations["meta"]["limit"]

    LOGGER.debug("Storage locations returned: %s", return_locations)
    return return_locations
示例#26
0
def get_location(path=None, purpose=None, space=None):
    """ Returns a list of storage locations, filtered by parameters.

    Queries the storage service and returns a list of storage locations,
    optionally filtered by purpose, containing space or path.

    purpose: How the storage is used.  Should reference storage service
        purposes, found in storage_service.locations.models.py
    path: Path to location.  If a space is passed in, paths starting with /
        have the space's path stripped.
    """
    return_locations = []
    if space and path:
        path = _storage_relative_from_absolute(path, space['path'])
        space = space['uuid']
    pipeline = get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        return None
    url = _storage_service_url() + 'location/'
    params = {
        'pipeline__uuid': pipeline['uuid'],
        'relative_path': path,
        'purpose': purpose,
        'space': space,
        'offset': 0,
    }
    while True:
        response = _storage_api_session().get(url, params=params)
        locations = response.json()
        return_locations += locations['objects']
        if not locations['meta']['next']:
            break
        params['offset'] += locations['meta']['limit']

    LOGGER.debug("Storage locations returned: %s", return_locations)
    return return_locations
示例#27
0
def _storage_api_params():
    """Return API GET params username=USERNAME&api_key=KEY for use in URL."""
    username = get_setting('storage_service_user', 'test')
    api_key = get_setting('storage_service_apikey', None)
    return urllib.urlencode({'username': username, 'api_key': api_key})
示例#28
0
def create_file(uuid,
                origin_location,
                origin_path,
                current_location,
                current_path,
                package_type,
                size,
                update=False,
                related_package_uuid=None,
                events=None,
                agents=None,
                aip_subtype=None):
    """Creates a new file. Note: for backwards compatibility
    reasons, the SS API calls "packages" "files" and this function should be
    read as ``create_package``.

    origin_location and current_location should be URIs for the storage service.

    Returns:
        Dict with the JSON response from the SS API

    Raises:
        RequestException: if the SS API call fails
    """
    pipeline = _get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        raise ResourceNotFound('Pipeline not available')
    if events is None:
        events = []
    if agents is None:
        agents = []
    new_file = {
        'uuid': uuid,
        'origin_location': origin_location,
        'origin_path': origin_path,
        'current_location': current_location,
        'current_path': current_path,
        'package_type': package_type,
        'aip_subtype': aip_subtype,
        'size': size,
        'origin_pipeline': pipeline['resource_uri'],
        'related_package_uuid': related_package_uuid,
        'events': events,
        'agents': agents
    }

    LOGGER.info("Creating file with %s", new_file)
    try:
        session = _storage_api_session()
        if update:
            new_file['reingest'] = pipeline['uuid']
            url = _storage_service_url() + 'file/' + uuid + '/'
            response = session.put(url, json=new_file)
        else:
            url = _storage_service_url() + 'file/'
            response = session.post(url, json=new_file)
        # raise an HTTPError exception if getting HTTP 4xx or 5xx error
        response.raise_for_status()
    except requests.exceptions.RequestException as e:
        LOGGER.warning("Unable to create file from %s because %s", new_file, e)
        raise
    LOGGER.info('Status code of create file/package request: %s',
                response.status_code)
    return response.json()
示例#29
0
def create_file(uuid,
                origin_location,
                origin_path,
                current_location,
                current_path,
                package_type,
                size,
                update=False,
                related_package_uuid=None,
                events=None,
                agents=None,
                aip_subtype=None):
    """Creates a new file.

    Note: for backwards compatibility reasons, the SS
    API calls "packages" "files" and this function should be read as
    ``create_package``.

    ``origin_location`` and ``current_location`` should be URIs for the
    Storage Service.

    Returns a dict with the decoded JSON response from the SS API. It may raise
    ``RequestException`` if the SS API call fails.
    """
    pipeline = get_pipeline(get_setting('dashboard_uuid'))
    if pipeline is None:
        raise ResourceNotFound('Pipeline not available')
    if events is None:
        events = []
    if agents is None:
        agents = []
    new_file = {
        'uuid': uuid,
        'origin_location': origin_location,
        'origin_path': origin_path,
        'current_location': current_location,
        'current_path': current_path,
        'package_type': package_type,
        'aip_subtype': aip_subtype,
        'size': size,
        'origin_pipeline': pipeline['resource_uri'],
        'related_package_uuid': related_package_uuid,
        'events': events,
        'agents': agents
    }

    LOGGER.info("Creating file with %s", new_file)
    errmsg = "Unable to create file from %s because %s"

    ret = None
    if update:
        try:
            session = _storage_api_slow_session()
            new_file['reingest'] = pipeline['uuid']
            url = _storage_service_url() + 'file/' + uuid + '/'
            response = session.put(url, json=new_file)
            response.raise_for_status()
        except requests.exceptions.RequestException as err:
            LOGGER.warning(errmsg, new_file, err)
            raise
        else:
            ret = response.json()
    else:
        try:
            session = _storage_api_session()
            url = _storage_service_url() + 'file/async/'
            response = session.post(url, json=new_file, allow_redirects=False)
            ret = wait_for_async(response)
        except requests.exceptions.RequestException as err:
            LOGGER.warning(errmsg, new_file, err)
            raise

    LOGGER.info('Status code of create file/package request: %s',
                response.status_code)
    return ret
示例#30
0
 def __init__(self, username=None, apikey=None):
     self.username = username or get_setting('storage_service_user', 'test')
     self.apikey = apikey or get_setting('storage_service_apikey', None)