Пример #1
0
def _calibration_list(calib_type: str):
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    bucket_path = location(CALIBRATION_FPATH(calib_type))
    try:
        objects = storage.list_objects(bucket_path)
    except BaseException as e:
        return bad_request(str(e))
    # fetch objects' metadata
    backups = []
    for obj in objects:
        try:
            meta = storage.head(obj)
        except BaseException as e:
            logger.error(str(e))
            continue
        backups.append({
            "origin": Path(obj).stem,
            "object": obj,
            "date": meta['Last-Modified'],
            "timestamp": datetime.strptime(meta['Last-Modified'], TIMEFORMAT).timestamp(),
            "hash": meta['ETag'].replace('"', ''),
            "size": meta['Content-Length'],
            "owner": meta.get('x-amz-meta-owner-id', -1)
        })
    # sort backups by date (newest first)
    backups = sorted(backups, key=lambda b: b['timestamp'], reverse=True)
    # ---
    return jsonify({
        'type': calib_type,
        'backups': backups
    })
Пример #2
0
def _calibration_backup(calib_type: str):
    robot_name = get_robot_name()
    calib_filename = f"{robot_name}.yaml"
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    source = os.path.join(DATA_DIR, CALIBRATION_FPATH(calib_type), calib_filename)
    if not os.path.isfile(source):
        return jsonify({
            'type': calib_type,
            'resource': None,
            'backed-up': False
        })
    # ---
    bucket_path = location(os.path.join(CALIBRATION_FPATH(calib_type), calib_filename))
    try:
        logger.info("Uploading:", source, "->", bucket_path)
        # TODO: test this before enablying
        # storage.upload(source, bucket_path)
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return jsonify({
        'type': calib_type,
        'resource': source,
        'backed-up': True
    })
Пример #3
0
def _backup_restore(resource: str):
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    bucket_path = location(resource)
    destination = os.path.join(DATA_DIR, resource)
    try:
        storage.download(bucket_path, destination, force=True)
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return jsonify({'resource': resource, 'restored': True})
Пример #4
0
def _backup_perform(resource: str):
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    source = os.path.join(DATA_DIR, resource)
    bucket_path = location(resource)
    try:
        storage.upload(source, bucket_path)
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return jsonify({'resource': resource, 'backed-up': True})
Пример #5
0
def _data_get(resource: str):
    _format = request.args.get('format', default=None)
    # check arguments
    if _format is not None and _format not in FORMAT_TO_ARCHIVE:
        return bad_request(f"Format '{_format}' not supported")
    # get requested file from request object
    filepath = os.path.abspath(os.path.join(DATA_DIR, resource))
    filename = os.path.basename(filepath)
    logger.debug(f'Requesting: GET:[{resource}]')
    # check if the path exists
    if not os.path.exists(filepath):
        return not_found(f"Resource '{resource}' not found")
    # deliver files on match
    if _format is None and os.path.isfile(filepath):
        # send headers
        return send_from_directory(DATA_DIR, resource)
    # compress the resource
    archive = FORMAT_TO_ARCHIVE[_format]()
    if os.path.isfile(filepath):
        # compress single file
        archive.add(filepath, filename, logger)
    elif os.path.isdir(filepath):
        # compress directory
        archive.add(filepath, resource, logger)
    mime_type, _ = archive.mime()
    filename = f"{Path(filepath).stem}.{archive.extension()}"
    return send_file(archive.data(),
                     attachment_filename=filename,
                     mimetype=mime_type)
Пример #6
0
def _backup_exists(resource: str):
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    bucket_path = location(resource)
    try:
        storage.head(bucket_path)
        exists = True
    except FileNotFoundError:
        exists = False
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return jsonify({'resource': resource, 'exists': exists})
Пример #7
0
def _backup_list(resource: str):
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    bucket_path = location(resource)
    try:
        files = storage.list_objects(bucket_path)
    except BaseException as e:
        return bad_request(str(e))
    # remove the '{uid}/' prefix from the files
    uid = str(storage.api.uid)
    files = list(
        map(lambda f: f[len(uid) + 1:]
            if f.startswith(f"{uid}/") else f, files))
    # ---
    return jsonify({'resource': resource, 'files': files})
Пример #8
0
def _calibration_restore(calib_type: str, origin: str):
    robot_name = get_robot_name()
    calib_filename = f"{robot_name}.yaml"
    try:
        storage, location = _storage()
    except AppException as e:
        return bad_request(e.message)
    # ---
    bucket_path = location(CALIBRATION_FPATH(calib_type, f"{origin}.yaml"))
    destination = os.path.join(DATA_DIR, CALIBRATION_FPATH(calib_type), calib_filename)
    try:
        logger.info("Downloading:", bucket_path, "->", destination)
        storage.download(bucket_path, destination, force=True)
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return jsonify({
        'type': calib_type,
        'origin': origin,
        'restored': True
    })
Пример #9
0
def _data_post(resource: str):
    _format = request.args.get('format', default=None)
    # check arguments
    if _format is not None and _format not in FORMAT_TO_ARCHIVE:
        return bad_request(f"Format '{_format}' not supported")
    # get requested file from request object
    filepath = os.path.abspath(os.path.join(DATA_DIR, resource))
    logger.debug(f'Requesting: POST:[{resource}]')
    body = io.BytesIO(request.data)
    body_len = len(request.data)
    # format is given: we need to extract an archive
    if _format is not None:
        # cannot uncompress onto a file
        if os.path.isfile(filepath):
            return bad_request(f"The path '{filepath}' points to a file")
        # extract archive
        ArchiveClass = FORMAT_TO_ARCHIVE[_format]
        archive = ArchiveClass.from_buffer(body)
        try:
            logger.debug(
                f"Extracting {archive.extension()} archive onto '{filepath}'..."
            )
            archive.extract_all(filepath)
        except ArchiveError as e:
            return bad_request(e.message)
        return ok()
    # format is not given: we are working with a single file
    if os.path.isdir(filepath):
        return bad_request(f"The path '{filepath}' points to a directory")
    # dump the body into a file
    logger.debug(f"Writing a body of size {body_len}B into '{filepath}'")
    try:
        os.makedirs(Path(filepath).parent, exist_ok=True)
        with open(filepath, 'wb') as fout:
            transfer_bytes(body, fout)
    except BaseException as e:
        return bad_request(str(e))
    # ---
    return ok()