Esempio n. 1
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = "".join(list(filter(str.isalnum, file_meta['ETag']))) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))

        # hashes match we have a cache hit
        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(
        key=creds.key,
        keyid=creds.keyid,
        kms_keyid=creds.kms_keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=_quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl,
        location=creds.location
    )
Esempio n. 2
0
def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket_name, env, path)
        file_md5 = filter(str.isalnum,
                          file_meta['ETag']) if file_meta else None

        cached_file_hash = hashlib.md5()
        with salt.utils.fopen(cached_file_path, 'rb') as fp_:
            cached_file_hash.update(fp_.read())

        # hashes match we have a cache hit
        if cached_file_hash.hexdigest() == file_md5:
            return

    # ... or get the file from S3
    key, keyid = _get_s3_key()
    s3.query(key=key,
             keyid=keyid,
             bucket=bucket_name,
             path=urllib.quote(path),
             local_file=cached_file_path)
Esempio n. 3
0
File: s3.py Progetto: jmdcal/salt
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = list(filter(str.isalnum, file_meta['ETag'])) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        # hashes match we have a cache hit
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(key=creds.key,
             keyid=creds.keyid,
             bucket=bucket,
             service_url=creds.service_url,
             path=urllib.quote(path),
             local_file=cached_file_path,
             verify_ssl=creds.verify_ssl)
Esempio n. 4
0
File: s3fs.py Progetto: sijis/salt
def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket_name, env, path)
        file_md5 = filter(str.isalnum, file_meta['ETag']) if file_meta else None

        cached_file_hash = hashlib.md5()
        with salt.utils.fopen(cached_file_path, 'rb') as fp_:
            cached_file_hash.update(fp_.read())

        # hashes match we have a cache hit
        if cached_file_hash.hexdigest() == file_md5:
            return

    # ... or get the file from S3
    key, keyid = _get_s3_key()
    s3.query(
            key=key,
            keyid=keyid,
            bucket=bucket_name,
            path=urllib.quote(path),
            local_file=cached_file_path)
Esempio n. 5
0
File: s3.py Progetto: yi9/salt
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = "".join(list(filter(str.isalnum, file_meta['ETag']))) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        # hashes match we have a cache hit
        log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(
        key=creds.key,
        keyid=creds.keyid,
        kms_keyid=creds.kms_keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=_quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl,
        location=creds.location
    )
Esempio n. 6
0
def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
                      cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket, saltenv, path)
        file_md5 = filter(str.isalnum, file_meta['ETag']) \
            if file_meta else None

        cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

        # hashes match we have a cache hit
        if cached_md5 == file_md5:
            return

    # ... or get the file from S3
    s3.query(
        key=creds.key,
        keyid=creds.keyid,
        bucket=bucket,
        service_url=creds.service_url,
        path=urllib.quote(path),
        local_file=cached_file_path,
        verify_ssl=creds.verify_ssl
    )
Esempio n. 7
0
 def __get_s3_meta(bucket, key=key, keyid=keyid):
     return s3.query(
             key=key,
             keyid=keyid,
             bucket=bucket,
             service_url=service_url,
             return_bin=False)
Esempio n. 8
0
File: s3.py Progetto: jmdcal/salt
 def __get_s3_meta():
     return s3.query(key=creds.key,
                     keyid=creds.keyid,
                     bucket=creds.bucket,
                     service_url=creds.service_url,
                     verify_ssl=creds.verify_ssl,
                     return_bin=False)
Esempio n. 9
0
 def __get_s3_meta(bucket, key=key, keyid=keyid):
     return s3.query(key=key,
                     keyid=keyid,
                     bucket=bucket,
                     service_url=service_url,
                     verify_ssl=verify_ssl,
                     return_bin=False)
Esempio n. 10
0
def _refresh_buckets_cache_file():
    '''
    Retrieve the content of all buckets and cache the metadata to the buckets
    cache file
    '''
    key, keyid = _get_s3_key()
    cache_file = _get_buckets_cache_filename()

    resultset = {}

    # == envs ==
    for env, buckets in _get_buckets().items():
        bucket_files = {}
        # == buckets ==
        for bucket_name in buckets:
            # == metadata ==
            bucket_files[bucket_name] = s3.query(
                    key = key,
                    keyid = keyid,
                    bucket = bucket_name,
                    return_bin = False)

        resultset[env] = bucket_files

    # write the metadata to disk
    if os.path.isfile(cache_file):
        os.remove(cache_file)

    with salt.utils.fopen(cache_file, 'w') as fp_:
        pickle.dump(resultset, fp_)

    return resultset
Esempio n. 11
0
 def __get_s3_meta():
     return s3.query(
         key=creds.key,
         keyid=creds.keyid,
         bucket=creds.bucket,
         service_url=creds.service_url,
         verify_ssl=creds.verify_ssl,
         return_bin=False)
Esempio n. 12
0
 def __get_s3_meta():
     return s3.query(key=creds.key,
                     keyid=creds.keyid,
                     bucket=creds.bucket,
                     service_url=creds.service_url,
                     verify_ssl=creds.verify_ssl,
                     location=creds.location,
                     return_bin=False,
                     params={'prefix': prefix})
Esempio n. 13
0
 def __get_s3_meta(bucket, key=key, keyid=keyid):
     return s3.query(
             key=key,
             keyid=keyid,
             bucket=bucket,
             service_url=service_url,
             verify_ssl=verify_ssl,
             location=location,
             return_bin=False)
Esempio n. 14
0
 def __get_s3_meta():
     return s3.query(
         key=creds.key,
         keyid=creds.keyid,
         bucket=creds.bucket,
         service_url=creds.service_url,
         verify_ssl=creds.verify_ssl,
         location=creds.location,
         return_bin=False,
         params={'prefix': prefix})
Esempio n. 15
0
def find_file(path, env='base', **kwargs):
    '''
    Looks through the buckets cache file for a match. If it's found the file
    will be retrieved from S3 only if it's cached version is missing or the md5
    hash doesn't match
    '''
    fnd = {'bucket': None,
            'path' : None}

    resultset = _init()
    if not resultset or env not in resultset:
        return fnd

    ret = _find_files(resultset[env])

    # look for the path including the env, but only return the path if found
    env_path = _get_env_path(env, path)
    for bucket_name, files in ret.iteritems():
        if env_path in files and not salt.fileserver.is_file_ignored(__opts__, env_path):
            fnd['bucket'] = bucket_name
            fnd['path'] = path

    if not fnd['path'] or not fnd['bucket']:
        return fnd

    # check the cache using the env/path
    if _is_cached_file_current(resultset, env, fnd['bucket'], env_path):
        return fnd

    # grab from S3 using the env/path, url encode to cover weird chars
    key, keyid = _get_s3_key()
    s3.query(
            key = key,
            keyid = keyid,
            bucket = fnd['bucket'],
            path = urllib.quote(env_path),
            local_file = _get_cached_file_name(fnd['bucket'], env_path))
    return fnd
Esempio n. 16
0
def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''
    key, keyid, service_url, verify_ssl = _get_s3_key()

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket_name, saltenv, path)
        if file_meta:
            file_etag = file_meta['ETag']

            if file_etag.find('-') == -1:
                file_md5 = file_etag
                cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

                # hashes match we have a cache hit
                if cached_md5 == file_md5:
                    return
            else:
                cached_file_stat = os.stat(cached_file_path)
                cached_file_size = cached_file_stat.st_size
                cached_file_mtime = datetime.datetime.fromtimestamp(
                    cached_file_stat.st_mtime)

                cached_file_lastmod = datetime.datetime.strptime(
                    file_meta['LastModified'], '%Y-%m-%dT%H:%M:%S.%fZ')
                if (cached_file_size == int(file_meta['Size'])
                        and cached_file_mtime > cached_file_lastmod):
                    log.debug('cached file size equal to metadata size and '
                              'cached file mtime later than metadata last '
                              'modification time.')
                    ret = s3.query(key=key,
                                   keyid=keyid,
                                   method='HEAD',
                                   bucket=bucket_name,
                                   service_url=service_url,
                                   verify_ssl=verify_ssl,
                                   path=_quote(path),
                                   local_file=cached_file_path)
                    if ret is not None:
                        for header in ret['headers']:
                            name, value = header.split(':', 1)
                            name = name.strip()
                            value = value.strip()
                            if name == 'Last-Modified':
                                s3_file_mtime = datetime.datetime.strptime(
                                    value, '%a, %d %b %Y %H:%M:%S %Z')
                            elif name == 'Content-Length':
                                s3_file_size = int(value)
                        if (cached_file_size == s3_file_size
                                and cached_file_mtime > s3_file_mtime):
                            log.info(
                                '{0} - {1} : {2} skipped download since cached file size '
                                'equal to and mtime after s3 values'.format(
                                    bucket_name, saltenv, path))
                            return

    # ... or get the file from S3
    s3.query(key=key,
             keyid=keyid,
             bucket=bucket_name,
             service_url=service_url,
             verify_ssl=verify_ssl,
             path=_quote(path),
             local_file=cached_file_path)
Esempio n. 17
0
def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
    '''
    Checks the local cache for the file, if it's old or missing go grab the
    file from S3 and update the cache
    '''
    key, keyid, service_url, verify_ssl = _get_s3_key()

    # check the local cache...
    if os.path.isfile(cached_file_path):
        file_meta = _find_file_meta(metadata, bucket_name, saltenv, path)
        if file_meta:
            file_etag = file_meta['ETag']

            if file_etag.find('-') == -1:
                file_md5 = file_etag
                cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')

                # hashes match we have a cache hit
                if cached_md5 == file_md5:
                    return
            else:
                cached_file_stat = os.stat(cached_file_path)
                cached_file_size = cached_file_stat.st_size
                cached_file_mtime = datetime.datetime.fromtimestamp(
                    cached_file_stat.st_mtime)

                cached_file_lastmod = datetime.datetime.strptime(
                    file_meta['LastModified'], '%Y-%m-%dT%H:%M:%S.%fZ')
                if (cached_file_size == int(file_meta['Size']) and
                        cached_file_mtime > cached_file_lastmod):
                    log.debug('cached file size equal to metadata size and '
                              'cached file mtime later than metadata last '
                              'modification time.')
                    ret = s3.query(
                        key=key,
                        keyid=keyid,
                        method='HEAD',
                        bucket=bucket_name,
                        service_url=service_url,
                        verify_ssl=verify_ssl,
                        path=_quote(path),
                        local_file=cached_file_path
                    )
                    if ret is not None:
                        for header in ret['headers']:
                            name, value = header.split(':', 1)
                            name = name.strip()
                            value = value.strip()
                            if name == 'Last-Modified':
                                s3_file_mtime = datetime.datetime.strptime(
                                    value, '%a, %d %b %Y %H:%M:%S %Z')
                            elif name == 'Content-Length':
                                s3_file_size = int(value)
                        if (cached_file_size == s3_file_size and
                                cached_file_mtime > s3_file_mtime):
                            log.info(
                                '{0} - {1} : {2} skipped download since cached file size '
                                'equal to and mtime after s3 values'.format(
                                    bucket_name, saltenv, path))
                            return

    # ... or get the file from S3
    s3.query(
        key=key,
        keyid=keyid,
        bucket=bucket_name,
        service_url=service_url,
        verify_ssl=verify_ssl,
        path=_quote(path),
        local_file=cached_file_path
    )
Esempio n. 18
0
 def __get_s3_meta(bucket, key=key, keyid=keyid):
     return s3.query(key=key, keyid=keyid, bucket=bucket, return_bin=False)