Beispiel #1
0
def download_file(tar_path):
    if os.getenv('BUILDER_STORAGE') == "s3":
        with open('/var/run/secrets/deis/objectstore/creds/accesskey', 'r') as access_file:
            AWS_ACCESS_KEY_ID = access_file.read()
        with open('/var/run/secrets/deis/objectstore/creds/secretkey', 'r') as secret_file:
            AWS_SECRET_ACCESS_KEY = secret_file.read()
        with open('/var/run/secrets/deis/objectstore/creds/region', 'r') as region_file:
            AWS_DEFAULT_REGION = region_file.read()

        bucket_name = ""
        with open('/var/run/secrets/deis/objectstore/creds/builder-bucket', 'r') as bucket_file:
            bucket_name = bucket_file.read()

        conn = boto3.resource('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_DEFAULT_REGION)
        conn.Bucket(bucket_name).Object(tar_path).download_file('apptar')

    elif os.getenv('BUILDER_STORAGE') == "gcs":
        bucket_name = ""
        with open('/var/run/secrets/deis/objectstore/creds/builder-bucket', 'r') as bucket_file:
            bucket_name = bucket_file.read()
        scopes = ['https://www.googleapis.com/auth/devstorage.full_control']
        credentials = ServiceAccountCredentials.from_json_keyfile_name('/var/run/secrets/deis/objectstore/creds/key.json', scopes=scopes)
        with open('/var/run/secrets/deis/objectstore/creds/key.json') as data_file:
            data = json.load(data_file)
        client = Client(credentials=credentials, project=data['project_id'])
        client.get_bucket(bucket_name).get_blob(tar_path).download_to_filename("apptar")

    elif os.getenv('BUILDER_STORAGE') == "azure":
      with open('/var/run/secrets/deis/objectstore/creds/accountname', 'r') as account_file:
          accountname = account_file.read()
      with open('/var/run/secrets/deis/objectstore/creds/accountkey', 'r') as key_file:
          accountkey = key_file.read()
      with open('/var/run/secrets/deis/objectstore/creds/builder-container', 'r') as container_file:
          container_name = container_file.read()
      block_blob_service = BlockBlobService(account_name=accountname, account_key=accountkey)
      block_blob_service.get_blob_to_path(container_name, tar_path, 'apptar')

    else :
      with open('/var/run/secrets/deis/objectstore/creds/accesskey', 'r') as access_file:
          AWS_ACCESS_KEY_ID = access_file.read()
      with open('/var/run/secrets/deis/objectstore/creds/secretkey', 'r') as secret_file:
          AWS_SECRET_ACCESS_KEY = secret_file.read()

      AWS_DEFAULT_REGION = "us-east-1"
      bucket_name = "git"
      mHost = os.getenv('DEIS_MINIO_SERVICE_HOST')
      mPort = os.getenv('DEIS_MINIO_SERVICE_PORT')
      if mPort == "80" :
      	# If you add port 80 to the end of the endpoint_url, boto3 freaks out.
      	S3_URL = "http://"+mHost
      else :
      	S3_URL="http://"+mHost+":"+mPort

      conn = boto3.resource('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_DEFAULT_REGION, endpoint_url=S3_URL, config=Config(signature_version='s3v4'))
      # stop boto3 from automatically changing the endpoint
      conn.meta.client.meta.events.unregister('before-sign.s3', fix_s3_host)

      conn.Bucket(bucket_name).Object(tar_path).download_file('apptar')
Beispiel #2
0
    def __init__(self, bucket_name, subdir='_/'):
        gcs = Client()
        try:
            self.bucket = gcs.get_bucket(bucket_name)
        except NotFound:
            self.bucket = gcs.bucket(bucket_name)
            # Hardcode the bucket location to EU
            self.bucket.location = 'EU'
            self.bucket.create()

        self.subdir = subdir
Beispiel #3
0
    def __init__(self, bucket_name, subdir='_/'):
        gcs = Client()
        try:
            self.bucket = gcs.get_bucket(bucket_name)
        except NotFound:
            self.bucket = gcs.bucket(bucket_name)
            # Hardcode the bucket location to EU
            self.bucket.location = 'EU'
            self.bucket.create()

        self.subdir = subdir
Beispiel #4
0
    def test_current(self):
        from gcloud.storage.client import Client
        project = 'PROJECT'
        credentials = _Credentials()
        client = Client(project=project, credentials=credentials)
        batch1 = self._makeOne(client)
        self.assertTrue(batch1.current() is None)

        client._push_batch(batch1)
        self.assertTrue(batch1.current() is batch1)

        batch2 = self._makeOne(client)
        client._push_batch(batch2)
        self.assertTrue(batch1.current() is batch2)
Beispiel #5
0
    def __init__(self, project, cas_url_prefix):
        assert project is not None

        self.buckets = {}
        self.client = GSClient(project)
        if cas_url_prefix[-1] == "/":
            cas_url_prefix = cas_url_prefix[:-1]
        self.cas_url_prefix = cas_url_prefix
Beispiel #6
0
def get_client() -> Client:
    """Stores the GCS client on the global Flask object.

    The GCS client is not user-specific anyway.
    """

    _gcs = getattr(g, '_gcs_client', None)
    if _gcs is None:
        _gcs = g._gcs_client = Client()
    return _gcs
Beispiel #7
0
    def test_as_context_mgr_w_error(self):
        from gcloud.storage.batch import _FutureDict
        from gcloud.storage.client import Client
        URL = 'http://example.com/api'
        http = _HTTP()
        connection = _Connection(http=http)
        project = 'PROJECT'
        credentials = _Credentials()
        client = Client(project=project, credentials=credentials)
        client._connection = connection

        self.assertEqual(list(client._batch_stack), [])

        target1 = _MockObject()
        target2 = _MockObject()
        target3 = _MockObject()
        try:
            with self._makeOne(client) as batch:
                self.assertEqual(list(client._batch_stack), [batch])
                batch._make_request('POST',
                                    URL, {
                                        'foo': 1,
                                        'bar': 2
                                    },
                                    target_object=target1)
                batch._make_request('PATCH',
                                    URL, {'bar': 3},
                                    target_object=target2)
                batch._make_request('DELETE', URL, target_object=target3)
                raise ValueError()
        except ValueError:
            pass

        self.assertEqual(list(client._batch_stack), [])
        self.assertEqual(len(http._requests), 0)
        self.assertEqual(len(batch._requests), 3)
        self.assertEqual(batch._target_objects, [target1, target2, target3])
        # Since the context manager fails, finish will not get called and
        # the _properties will still be futures.
        self.assertTrue(isinstance(target1._properties, _FutureDict))
        self.assertTrue(isinstance(target2._properties, _FutureDict))
        self.assertTrue(isinstance(target3._properties, _FutureDict))
Beispiel #8
0
    def __init__(self, bucket_name, subdir='_/'):
        CGS_PROJECT_NAME = app.config['CGS_PROJECT_NAME']
        GCS_CLIENT_EMAIL = app.config['GCS_CLIENT_EMAIL']
        GCS_PRIVATE_KEY_PEM = app.config['GCS_PRIVATE_KEY_PEM']
        GCS_PRIVATE_KEY_P12 = app.config['GCS_PRIVATE_KEY_P12']

        # Load private key in pem format (used by the API)
        with open(GCS_PRIVATE_KEY_PEM) as f:
          private_key_pem = f.read()
        credentials_pem = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL,
            private_key_pem,
            'https://www.googleapis.com/auth/devstorage.read_write')

        # Load private key in p12 format (used by the singed urls generator)
        with open(GCS_PRIVATE_KEY_P12) as f:
          private_key_pkcs12 = f.read()
        self.credentials_p12 = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL,
            private_key_pkcs12,
            'https://www.googleapis.com/auth/devstorage.read_write')

        gcs = Client(project=CGS_PROJECT_NAME, credentials=credentials_pem)
        self.bucket = gcs.get_bucket(bucket_name)
        self.subdir = subdir
Beispiel #9
0
    def test_as_context_mgr_w_error(self):
        from gcloud.storage.batch import _FutureDict
        from gcloud.storage.client import Client
        URL = 'http://example.com/api'
        http = _HTTP()
        connection = _Connection(http=http)
        project = 'PROJECT'
        credentials = _Credentials()
        client = Client(project=project, credentials=credentials)
        client._connection = connection

        self.assertEqual(list(client._batch_stack), [])

        target1 = _MockObject()
        target2 = _MockObject()
        target3 = _MockObject()
        try:
            with self._makeOne(client) as batch:
                self.assertEqual(list(client._batch_stack), [batch])
                batch._make_request('POST', URL, {'foo': 1, 'bar': 2},
                                    target_object=target1)
                batch._make_request('PATCH', URL, {'bar': 3},
                                    target_object=target2)
                batch._make_request('DELETE', URL, target_object=target3)
                raise ValueError()
        except ValueError:
            pass

        self.assertEqual(list(client._batch_stack), [])
        self.assertEqual(len(http._requests), 0)
        self.assertEqual(len(batch._requests), 3)
        self.assertEqual(batch._target_objects, [target1, target2, target3])
        # Since the context manager fails, finish will not get called and
        # the _properties will still be futures.
        self.assertTrue(isinstance(target1._properties, _FutureDict))
        self.assertTrue(isinstance(target2._properties, _FutureDict))
        self.assertTrue(isinstance(target3._properties, _FutureDict))
Beispiel #10
0
    def test_current(self):
        from gcloud.storage.client import Client
        project = 'PROJECT'
        credentials = _Credentials()
        client = Client(project=project, credentials=credentials)
        batch1 = self._makeOne(client)
        self.assertTrue(batch1.current() is None)

        client._push_batch(batch1)
        self.assertTrue(batch1.current() is batch1)

        batch2 = self._makeOne(client)
        client._push_batch(batch2)
        self.assertTrue(batch1.current() is batch2)
Beispiel #11
0
    def get_or_create(cls, bucket_name, client=None):
        """
        If the bucket exists with this name, get it. Else, create it

        :param cls: :class:`gstorage.bucket.Bucket`
        :type bucket_name: string
        :param bucket_name: name of the bucket
        :type client: gcloud.client.Client
        :param client: (optional) instance of client to use
        :return: :class:`Bucket <Bucket>` object
        :raises gcloud.exceptions.BadRequest (400): not a valid bucket name
        :raises gcloud.exceptions.Forbidden (403): The credentials are invalid
        """
        if not client:
            credentials = GoogleCredentials.get_application_default()
            client = Client(credentials=credentials)
        bucket = cls(client, name=bucket_name)
        if not bucket.exists():
            bucket.create()
        return bucket
Beispiel #12
0
    def test_as_context_mgr_wo_error(self):
        from gcloud.storage.client import Client
        URL = 'http://example.com/api'
        expected = _Response()
        expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
        http = _HTTP((expected, _THREE_PART_MIME_RESPONSE))
        project = 'PROJECT'
        credentials = _Credentials()
        client = Client(project=project, credentials=credentials)
        client._connection._http = http

        self.assertEqual(list(client._batch_stack), [])

        target1 = _MockObject()
        target2 = _MockObject()
        target3 = _MockObject()
        with self._makeOne(client) as batch:
            self.assertEqual(list(client._batch_stack), [batch])
            batch._make_request('POST',
                                URL, {
                                    'foo': 1,
                                    'bar': 2
                                },
                                target_object=target1)
            batch._make_request('PATCH',
                                URL, {'bar': 3},
                                target_object=target2)
            batch._make_request('DELETE', URL, target_object=target3)

        self.assertEqual(list(client._batch_stack), [])
        self.assertEqual(len(batch._requests), 3)
        self.assertEqual(batch._requests[0][0], 'POST')
        self.assertEqual(batch._requests[1][0], 'PATCH')
        self.assertEqual(batch._requests[2][0], 'DELETE')
        self.assertEqual(batch._target_objects, [target1, target2, target3])
        self.assertEqual(target1._properties, {'foo': 1, 'bar': 2})
        self.assertEqual(target2._properties, {'foo': 1, 'bar': 3})
        self.assertEqual(target3._properties, '')
Beispiel #13
0
class IO:
    def __init__(self, project, cas_url_prefix):
        assert project is not None

        self.buckets = {}
        self.client = GSClient(project)
        if cas_url_prefix[-1] == "/":
            cas_url_prefix = cas_url_prefix[:-1]
        self.cas_url_prefix = cas_url_prefix

    def _get_bucket_and_path(self, path):
        m = re.match("^gs://([^/]+)/(.*)$", path)
        assert m != None, "invalid remote path: {}".format(path)
        bucket_name = m.group(1)
        path = m.group(2)

        if bucket_name in self.buckets:
            bucket = self.buckets[bucket_name]
        else:
            bucket = self.client.bucket(bucket_name)
        return bucket, path

    def get(self, src_url, dst_filename, must=True):
        log.info("get %s -> %s", src_url, dst_filename)
        bucket, path = self._get_bucket_and_path(src_url)
        blob = bucket.blob(path)
        if blob.exists():
            blob.download_to_filename(dst_filename)
        else:
            assert not must, "Could not find {}".format(path)

    def get_as_str(self, src_url):
        bucket, path = self._get_bucket_and_path(src_url)
        blob = bucket.blob(path)
        return blob.download_as_string().decode("utf8")

    def put(self, src_filename, dst_url, must=True, skip_if_exists=False):
        if must:
            assert os.path.exists(src_filename)

        bucket, path = self._get_bucket_and_path(dst_url)
        blob = bucket.blob(path)
        if skip_if_exists and blob.exists():
            log.info("skipping put %s -> %s", src_filename, dst_url)
        else:
            log.info("put %s -> %s", src_filename, dst_url)
            blob.upload_from_filename(src_filename)

    def _get_url_prefix(self):
        return "gs://"

    def write_file_to_cas(self, filename):
        m = hashlib.sha256()
        with open(filename, "rb") as fd:
            for chunk in iter(lambda: fd.read(10000), b""):
                m.update(chunk)
        hash = m.hexdigest()
        dst_url = self.cas_url_prefix + hash
        bucket, path = self._get_bucket_and_path(dst_url)
        blob = bucket.blob(path)
        blob.upload_from_filename(filename)
        return self._get_url_prefix() + bucket.name + "/" + path

    def write_str_to_cas(self, text):
        text = text.encode("utf8")
        hash = hashlib.sha256(text).hexdigest()
        dst_url = self.cas_url_prefix + "/" + hash
        #        print("self.cas_url_prefix", self.cas_url_prefix)
        bucket, path = self._get_bucket_and_path(dst_url)
        blob = bucket.blob(path)
        blob.upload_from_string(text)
        return self._get_url_prefix() + bucket.name + "/" + path

    def write_json_to_cas(self, obj):
        obj_str = json.dumps(obj)
        return self.write_str_to_cas(obj_str)