def setUpClass(cls):
     replica = Replica.aws
     Config.set_config(BucketConfig.TEST_FIXTURE)
     cls.blobstore = Config.get_blobstore_handle(replica)
     cls.test_fixture_bucket = replica.bucket
     Config.set_config(BucketConfig.TEST)
     cls.test_bucket = replica.bucket
Exemplo n.º 2
0
 def start_serving(cls):
     Config.set_config(BucketConfig.TEST)
     cls._addr, cls._port = cls.get_addr_port()
     cls.stash_oidc_group_claim()
     cls.stash_openid_provider()
     Config._set_authz_url(f"http://{cls._addr}:{cls._port}")
     logger.info(
         f"Mock Fusillade server listening at {cls._addr}:{cls._port}")
     cls._server = HTTPServer((cls._addr, cls._port), cls)
     cls._thread = threading.Thread(target=cls._server.serve_forever)
     cls._thread.start()
Exemplo n.º 3
0
    def setUp(self, rounds=3):
        Config.set_config(BucketConfig.TEST)

        self.test_bucket = infra.get_env("DSS_GS_BUCKET_TEST")
        self.gs_blobstore = Config.get_blobstore_handle(Replica.gcp)
        test_src_keys = [infra.generate_test_key() for _ in range(rounds)]
        final_key = infra.generate_test_key()

        bucket_obj = self.gs_blobstore.gcp_client.bucket(self.test_bucket)

        self.gs_blobstore.upload_file_handle(
            self.test_bucket, test_src_keys[0],
            io.BytesIO(os.urandom(1024 * 1024)))

        for ix in range(len(test_src_keys) - 1):
            src_blob_obj = bucket_obj.get_blob(test_src_keys[ix])
            blobs = [src_blob_obj for _ in range(16)]
            dst_blob_obj = bucket_obj.blob(test_src_keys[ix + 1])

            dst_blob_obj.content_type = "application/octet-stream"
            dst_blob_obj.compose(blobs)

        # set the storage class to nearline.
        # NOTE: compose(…) does not seem to support setting a storage class.  The canonical way of changing storage
        # class is to call update_storage_class(…), but Google's libraries does not seem to handle
        # update_storage_class(…) calls for large objects.
        final_blob_obj = bucket_obj.blob(final_key)
        final_blob_obj.storage_class = "NEARLINE"
        final_blob_src = bucket_obj.get_blob(test_src_keys[-1])
        token = None
        while True:
            result = final_blob_obj.rewrite(final_blob_src, token=token)
            if result[0] is None:
                # done!
                break
            token = result[0]

        self.src_key = final_key
Exemplo n.º 4
0
            "swagger_url": os.environ["SWAGGER_URL"]
        }}))

client = None


def get_client():
    global client
    if client is None:
        from hca.dss import DSSClient
        client = DSSClient()
    return client


dynamodb = boto3.resource('dynamodb')
Config.set_config(BucketConfig.NORMAL)


def current_time():
    return int(round(time.time() * 1000))


def upload_bundle(event, context):
    logger.info("Start uploading bundle")
    with tempfile.TemporaryDirectory() as src_dir:
        with tempfile.NamedTemporaryFile(dir=src_dir,
                                         suffix=".json",
                                         delete=False) as jfh:
            jfh.write(bytes(generate_sample(), 'UTF-8'))
            jfh.flush()
        with tempfile.NamedTemporaryFile(dir=src_dir, suffix=".bin") as fh:
Exemplo n.º 5
0
 def setUp(self):
     Config.set_config(BucketConfig.TEST)
Exemplo n.º 6
0
 def setUpClass(cls):
     Config.set_config(BucketConfig.TEST)