def _get_gcs_bucket(self): conn = gcs.get_connection( self.args.gcs_project, self.args.gcs_client_email, self.args.gcs_private_key ) return conn.get_bucket(self.args.gcs_bucket)
def get_storage_connection(): environ = get_environ(require_storage=True) project_id = environ['project_id'] key = ('get_storage_connection', project_id) if key not in CACHED_RETURN_VALS: # Cache return value for the environment. CACHED_RETURN_VALS[key] = storage.get_connection(project_id) return CACHED_RETURN_VALS[key]
def get_storage_connection(): environ = get_environ(require_storage=True) get_connection_args = (environ['project_id'], environ['client_email'], environ['key_filename']) key = ('get_storage_connection', get_connection_args) if key not in CACHED_RETURN_VALS: # Cache return value for the environment. CACHED_RETURN_VALS[key] = storage.get_connection(*get_connection_args) return CACHED_RETURN_VALS[key]
def bucket(self): if self.use_interoperable_auth: gs_connection = boto.connect_gs( self.config.access_key, self.config.access_secret, calling_format=connection.OrdinaryCallingFormat()) # Always use our internal cacerts.txt file. This fixes an issue with the # PyInstaller-based frozen distribution, while allowing us to continue to # verify certificates and use a secure connection. gs_connection.ca_certificates_file = utils.get_cacerts_path() else: gs_connection = storage.get_connection( self.config.project, self.config.email, self.config.key_path) return gs_connection.get_bucket(self.config.bucket)
def bucket(self): if self.use_interoperable_auth: gs_connection = boto.connect_gs( self.config.access_key, self.config.access_secret, calling_format=connection.OrdinaryCallingFormat()) # Always use our internal cacerts.txt file. This fixes an issue with the # PyInstaller-based frozen distribution, while allowing us to continue to # verify certificates and use a secure connection. gs_connection.ca_certificates_file = _certs_path else: gs_connection = storage.get_connection( self.config.project, self.config.email, self.config.key_path) return gs_connection.get_bucket(self.config.bucket)
def get_gcs_connection(): ''' Checks if there is a GCS connection and, if not, creates one :return: True once the connection is established ''' #TODO returning True doesn't seem right here but gets the job done # Does not account for no connectivity, api issues... if storage.get_default_connection() and storage.get_default_bucket(): return True else: connect = storage.get_connection().from_service_account_json( SRV_ACCT_CREDS) bucket = storage.bucket.Bucket(name=BUCKET_NAME) storage.set_defaults(bucket, PROJECT_ID, connect) return True
def _get_gcs_bucket(self): if not gcs: return None conn = gcs.get_connection(self.args.gcs_project, self.args.gcs_client_email, self.args.gcs_private_key) # try to make a new one, or pass if existed try: bucket = conn.create_bucket(self.args.gcs_bucket) bucket.make_public(recursive=True, future=True) except Exception: pass return conn.get_bucket(self.args.gcs_bucket)
def _get_gcs_bucket(self): if not gcs: return None conn = gcs.get_connection( self.args.gcs_project, self.args.gcs_client_email, self.args.gcs_private_key ) # try to make a new one, or pass if existed try: bucket = conn.create_bucket(self.args.gcs_bucket) bucket.make_public(recursive=True, future=True) except Exception: pass return conn.get_bucket(self.args.gcs_bucket)
def __init__(self, bucket_name=None, project=None, client_email=None, private_key_path=None): self.bucket_name = bucket_name if bucket_name else settings.DJANGO_GCS_BUCKET self.project = project if project else settings.DJANGO_GCS_PROJECT self.client_email = client_email if client_email else settings.DJANGO_GCS_CLIENT_EMAIL self.private_key_path = private_key_path if private_key_path else settings.DJANGO_GCS_PRIVATE_KEY_PATH self.gc_connection = gc_storage.get_connection(self.project, self.client_email, self.private_key_path) try: self.gc_bucket = self.gc_connection.get_bucket(self.bucket_name) except exceptions.NotFound: # if the bucket hasn't been created, create one # TODO: creating buckets here is not functional, buckets won't be created. self.gc_bucket = self.gc_connection.new_bucket(self.bucket_name)
def get_connection(): # pragma NO COVER. return storage.get_connection(PROJECT, CLIENT_EMAIL, PRIVATE_KEY_PATH)
def get_connection(): return storage.get_connection(PROJECT_NAME, CLIENT_EMAIL, PRIVATE_KEY_PATH)
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Let's start by importing the demo module and getting a connection: import time from gcloud import storage from gcloud.storage import demo connection = storage.get_connection() # OK, now let's look at all of the buckets... print(list(demo.list_buckets(connection))) # This might take a second... # Now let's create a new bucket... bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots. print(bucket_name) bucket = demo.create_bucket(bucket_name, connection) print(bucket) # Let's look at all of the buckets again... print(list(demo.list_buckets(connection))) # How about we create a new blob inside this bucket. blob = storage.Blob("my-new-file.txt", bucket=bucket)
def get_connection(): return storage.get_connection(PROJECT_ID)
def get_connection(): return storage.get_connection(PROJECT, CLIENT_EMAIL, PRIVATE_KEY_PATH)
def get_connection(): # pragma NO COVER. return storage.get_connection(PROJECT_ID, CLIENT_EMAIL, KEY_FILENAME)
def _callFUT(self, *args, **kw): from gcloud.storage import get_connection return get_connection(*args, **kw)
import datetime import os.path from gcloud import storage from collections import Counter def frange(start, stop, step): i = start while i < stop: yield i i += step #-------------------------------------- # set default bucket #-------------------------------------- storage.set_default_bucket("isb-cgc") storage_conn = storage.get_connection() storage.set_default_connection(storage_conn) all_elements = {} df2 = pd.DataFrame() #-------------------------------------- # get the bucket contents #-------------------------------------- bucket = storage.get_bucket('ptone-experiments') for k in bucket.list_blobs(prefix="working-files/clinical_metadata/"): if 'counts.txt' in k.name: disease_type = k.name.split("/")[2].split(".")[0]
def _get_gcs_bucket(self): conn = gcs.get_connection(self.args.gcs_project, self.args.gcs_client_email, self.args.gcs_private_key) return conn.get_bucket(self.args.gcs_bucket)