Exemplo n.º 1
0
def generate_metadata(object_path=None, metadata=None, send_to_cloud=True):
    """ Create a json metadata file locally and set it on s3 server.
    """
    Bucket = ConnectionInfos.get("bucket")
    assert Bucket is not None, "Bucket is None"

    if not os.path.exists(object_path):
        return None

    object_key = get_object_key(object_path)

    m = cirrus_objects.ObjectMetadata(object_key=object_key)
    if metadata:
        if not cirrus_objects.ObjectMetadata.object_up_to_date(metadata):
            Logger.Log.warning("Metadata object outdated for file: " +
                               str(object_path))
        m.update(metadata)

    Logger.Log.debug("[LOCAL] dump metadata (generated): " + object_path)

    if send_to_cloud:
        metadata_file = m.dump(remove_locals=True)
        Logger.Log.debug("[CLD_UP] send metadata (generated): " + object_path)
        Bucket.upload_file(metadata_file, Key=m.object_key)

    m.dump(remove_locals=False)
Exemplo n.º 2
0
def send_object(object_path="", message="", callback=None, keep_locked=False):
    """ Send an object to ams S3 server, create new file if doesn't exist
        or update exsiting file. Arg callback is a funciton called to update
        transfert information ( in bytes )
    """
    Bucket = ConnectionInfos.get("bucket")

    assert os.path.exists(object_path), "object_path not valid"

    object_key = get_object_key(object_path)

    user_uid = cirrus_objects.ObjectMetadata.get_user_uid()
    if keep_locked:
        # if kept locked, fetched the existing locked time and message
        cur_meta = get_metadata(object_path)
        user = user_uid
        lock_message = cur_meta.get("lock_message", "None")
        lock_time = cur_meta.get("lock_time", "")
    else:
        user = ""
        lock_message = ""
        lock_time = ""

    now = datetime.datetime.now()
    raw_metadata = {
        "upload_message": message,
        "latest_upload": now.ctime(),
        "lock_message": lock_message,
        "lock_time": lock_time,
        "user": user,
        "latest_upload_user": user_uid
    }

    metadata = cirrus_objects.ObjectMetadata(object_key)
    metadata.load(raw_metadata)

    Logger.Log.debug("[CLD_UP] send object: " + object_path)

    with open(object_path, "rb") as obj:

        s3_metadata = raw_metadata.copy()
        s3_metadata["latest_upload"] = now.ctime().replace(' ', '_').replace(
            ':', '_')
        s3_metadata["lock_time"] = lock_time.replace(' ',
                                                     '_').replace(':', '_')
        s3_metadata["lock_message"] = raw_metadata["lock_message"].decode(
            "ascii", "ignore")
        s3_metadata["upload_message"] = raw_metadata["upload_message"].decode(
            "ascii", "ignore")

        Bucket.upload_fileobj(obj,
                              Key=object_key,
                              ExtraArgs={"Metadata": s3_metadata},
                              Callback=callback)

    metadata.update({"version_id": get_cloud_version_id(object_path)})
    metadata.update({"is_latest": True})

    generate_metadata(object_path, metadata=metadata.data(remove_locals=False))
Exemplo n.º 3
0
def get_object(object_path="", version_id="", callback=None):
    """ Gets a given object onto the S3 cloud and download it locally
        Gets also the metadata file
    """
    Bucket = ConnectionInfos.get("bucket")

    object_key = get_object_key(object_path)

    Logger.Log.debug("[CLD_DOWN] Downloading file: " + object_path +
                     " version_id: " + version_id)

    extra_args = None
    if version_id:
        extra_args = {"VersionId": version_id}

    # file is downloaded first to a temp file then copied to the right file
    temp_file = object_path + ".tmp"
    Bucket.download_file(object_key,
                         temp_file,
                         ExtraArgs=extra_args,
                         Callback=callback)

    if os.path.exists(object_path):
        os.chmod(object_path, S_IWRITE)
        shutil.copy2(temp_file, object_path)
    else:
        os.rename(temp_file, object_path)

    if os.path.exists(temp_file):
        os.remove(temp_file)

    metadata = get_metadata(object_path, force_cloud=True)

    # fetch latest version id
    if version_id == "":
        version_id = get_cloud_version_id(object_path)
        is_latest = True
    else:
        is_latest = False

    if not metadata:
        metadata = {"version_id": version_id}
        metadata["is_latest"] = is_latest
        generate_metadata(object_path, metadata=metadata)

    else:
        update_metadata = {"version_id": version_id, "is_latest": is_latest}

        p, f = os.path.split(object_path)
        p = p.replace('\\', '/')
        f = f.split('.')[0] + cirrus_objects.METADATA_IDENTIFIER

        _metadata = cirrus_objects.ObjectMetadata(object_key)
        _metadata.update(update_metadata)
        _metadata.dump(False)
Exemplo n.º 4
0
def get_metadata(object_path="", force_cloud=False, dump=True):
    """ Get the given object_path metadata on S3 vault, return None
        if not found.
        dump allows to save the cloud metadata to the local file.
    """

    Bucket = ConnectionInfos.get("bucket")
    assert Bucket is not None, "Bucket is None"

    # if the local file is not found then return None,
    # as this means there might be a metadata / file desync
    if not os.path.exists(object_path):
        return None

    object_key = get_object_key(object_path)
    metadata_file = object_key.split('.',
                                     1)[0] + cirrus_objects.METADATA_IDENTIFIER
    metadata_path = os.path.dirname(object_path) + \
                    '/' + metadata_file.split('/')[-1]

    if not os.path.exists(metadata_path) and not force_cloud:
        Logger.Log.warning("Metadata missing: " + metadata_path)
        return None

    try:
        # if the local metadata doesn't exist, then download it from cloud
        # unless force_cloud is True
        if force_cloud:
            Logger.Log.debug("[CLD_DOWN] Access metadata file: " + metadata_path + \
                                " force_cloud: " + str(force_cloud))
            if not dump:
                metadata_path = tempfile.tempdir + os.sep + metadata_file.split(
                    '/')[-1] + ".tmp"

            obj = Bucket.Object(metadata_file)
            obj.download_file(metadata_path)

            with open(metadata_path) as f:
                data = json.load(f)

            if not dump:
                try:
                    os.remove(metadata_path)
                except IOError:
                    pass

        else:
            Logger.Log.debug("[LOCAL] Access metadata file: " + metadata_path + \
                             " force_cloud: " + str(force_cloud))
            with open(metadata_path) as f:
                data = json.load(f)

        metadata_version = data.get("metadata_version", -1.0)

        metadata = cirrus_objects.ObjectMetadata(object_key)
        metadata.update(data)
        metadata.update({"metadata_version": metadata_version})

        return metadata

    except botocore.exceptions.ClientError as e:
        if e.response['Error']['Code'] == "404":
            return None
        else:
            raise e