def download_file(tar_path): if os.getenv('BUILDER_STORAGE') == "s3": with open('/var/run/secrets/deis/objectstore/creds/accesskey', 'r') as access_file: AWS_ACCESS_KEY_ID = access_file.read() with open('/var/run/secrets/deis/objectstore/creds/secretkey', 'r') as secret_file: AWS_SECRET_ACCESS_KEY = secret_file.read() with open('/var/run/secrets/deis/objectstore/creds/region', 'r') as region_file: AWS_DEFAULT_REGION = region_file.read() bucket_name = "" with open('/var/run/secrets/deis/objectstore/creds/builder-bucket', 'r') as bucket_file: bucket_name = bucket_file.read() conn = boto3.resource('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_DEFAULT_REGION) conn.Bucket(bucket_name).Object(tar_path).download_file('apptar') elif os.getenv('BUILDER_STORAGE') == "gcs": bucket_name = "" with open('/var/run/secrets/deis/objectstore/creds/builder-bucket', 'r') as bucket_file: bucket_name = bucket_file.read() scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] credentials = ServiceAccountCredentials.from_json_keyfile_name('/var/run/secrets/deis/objectstore/creds/key.json', scopes=scopes) with open('/var/run/secrets/deis/objectstore/creds/key.json') as data_file: data = json.load(data_file) client = Client(credentials=credentials, project=data['project_id']) client.get_bucket(bucket_name).get_blob(tar_path).download_to_filename("apptar") elif os.getenv('BUILDER_STORAGE') == "azure": with open('/var/run/secrets/deis/objectstore/creds/accountname', 'r') as account_file: accountname = account_file.read() with open('/var/run/secrets/deis/objectstore/creds/accountkey', 'r') as key_file: accountkey = key_file.read() with open('/var/run/secrets/deis/objectstore/creds/builder-container', 'r') as container_file: container_name = container_file.read() block_blob_service = BlockBlobService(account_name=accountname, account_key=accountkey) block_blob_service.get_blob_to_path(container_name, tar_path, 'apptar') else : with open('/var/run/secrets/deis/objectstore/creds/accesskey', 'r') as access_file: AWS_ACCESS_KEY_ID = access_file.read() with open('/var/run/secrets/deis/objectstore/creds/secretkey', 'r') as secret_file: AWS_SECRET_ACCESS_KEY = secret_file.read() AWS_DEFAULT_REGION = "us-east-1" bucket_name = "git" mHost = os.getenv('DEIS_MINIO_SERVICE_HOST') mPort = os.getenv('DEIS_MINIO_SERVICE_PORT') if mPort == "80" : # If you add port 80 to the end of the endpoint_url, boto3 freaks out. S3_URL = "http://"+mHost else : S3_URL="http://"+mHost+":"+mPort conn = boto3.resource('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_DEFAULT_REGION, endpoint_url=S3_URL, config=Config(signature_version='s3v4')) # stop boto3 from automatically changing the endpoint conn.meta.client.meta.events.unregister('before-sign.s3', fix_s3_host) conn.Bucket(bucket_name).Object(tar_path).download_file('apptar')
def __init__(self, bucket_name, subdir='_/'): gcs = Client() try: self.bucket = gcs.get_bucket(bucket_name) except NotFound: self.bucket = gcs.bucket(bucket_name) # Hardcode the bucket location to EU self.bucket.location = 'EU' self.bucket.create() self.subdir = subdir
def __init__(self, bucket_name, subdir='_/'): CGS_PROJECT_NAME = app.config['CGS_PROJECT_NAME'] GCS_CLIENT_EMAIL = app.config['GCS_CLIENT_EMAIL'] GCS_PRIVATE_KEY_PEM = app.config['GCS_PRIVATE_KEY_PEM'] GCS_PRIVATE_KEY_P12 = app.config['GCS_PRIVATE_KEY_P12'] # Load private key in pem format (used by the API) with open(GCS_PRIVATE_KEY_PEM) as f: private_key_pem = f.read() credentials_pem = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL, private_key_pem, 'https://www.googleapis.com/auth/devstorage.read_write') # Load private key in p12 format (used by the singed urls generator) with open(GCS_PRIVATE_KEY_P12) as f: private_key_pkcs12 = f.read() self.credentials_p12 = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL, private_key_pkcs12, 'https://www.googleapis.com/auth/devstorage.read_write') gcs = Client(project=CGS_PROJECT_NAME, credentials=credentials_pem) self.bucket = gcs.get_bucket(bucket_name) self.subdir = subdir