コード例 #1
0
 def test_s3_streaming(self):
     boto3_session = boto3.session.Session()
     payload = io.BytesIO(os.urandom(2**20))
     test_key = "hca-dss-sync-test/s3-streaming-upload/{}".format(uuid.uuid4())
     chunker = S3SigningChunker(fh=payload,
                                total_bytes=len(payload.getvalue()),
                                credentials=boto3_session.get_credentials(),
                                service_name="s3",
                                region_name=boto3_session.region_name)
     upload_url = "{host}/{bucket}/{key}".format(host=self.s3.meta.client.meta.endpoint_url,
                                                 bucket=self.s3_bucket.name,
                                                 key=test_key)
     res = get_pool_manager().request("PUT", upload_url,
                                      headers=chunker.get_headers("PUT", upload_url),
                                      body=chunker,
                                      chunked=True,
                                      retries=False)
     self.assertEqual(res.status, requests.codes.ok)
     self.assertEqual(self.s3_bucket.Object(test_key).get()["Body"].read(), payload.getvalue())
コード例 #2
0
ファイル: sync.py プロジェクト: hannes-ucsc/data-store
from google.resumable_media._upload import get_content_range

import dss
from dss import Config, Replica
from dss.util.aws import resources, clients, send_sns_msg, ARN
from dss.util.streaming import get_pool_manager, S3SigningChunker

logger = logging.getLogger(__name__)

presigned_url_lifetime_seconds = 3600
use_gsts = False
gsts_sched_delay_minutes = 2
part_size = {"s3": 64 * 1024 * 1024, "gs": 640 * 1024 * 1024}
parts_per_worker = {"s3": 8, "gs": 1}
gs_upload_chunk_size = 1024 * 1024 * 32
http = get_pool_manager()

sns_topics = dict(
    copy_parts="dss-copy-parts-" + os.environ["DSS_DEPLOYMENT_STAGE"],
    closer=dict(s3="dss-s3-mpu-ready-" + os.environ["DSS_DEPLOYMENT_STAGE"],
                gs="dss-gs-composite-upload-ready-" +
                os.environ["DSS_DEPLOYMENT_STAGE"]))

BlobLocation = namedtuple("BlobLocation", "platform bucket blob")


class GStorageTransferClient(ClientWithProject):
    SCOPE = ["https://www.googleapis.com/auth/cloud-platform"]


class GStorageTransferConnection(JSONConnection):