def upload_to_GS(self, local_file_path, gs_file_path, replace=True): """ Args: local_file_path: local file path which user want to upload to google storage gs_file_path: file path that stored on google storage Return: upload result """ gs_file_path = self._normpath(gs_file_path) obj = Key(self.bucket) obj.key = gs_file_path obj.set_contents_from_filename(local_file_path, replace=replace) return True
def upload_photo(file_path): """Uploads single photo f""" file_name = os.path.basename(file_path) global i i += 1 print "Uploading {0} to Google Cloud Storage".format(file_name) k = Key(bucket) k.key = '{0}/{1}'.format(gs_bucket_destination_prefix, file_name) k.set_contents_from_filename(file_path) metadata = {'fileName': file_name} app.put('/{0}'.format(firebase_destination_prefix), '%.5i' % i, metadata) print "Moving {0}".format(file_name) os.rename(os.path.join(captured_path, file_name), os.path.join(uploaded_path, file_name))
def upload_file(self, source_path, dest_bucket, dest_path, upload_if=UploadIf.ALWAYS, predefined_acl=None, fine_grained_acl_list=None): """Upload contents of a local file to Google Storage. params: source_path: full path (local-OS-style) on local disk to read from dest_bucket: GS bucket to copy the file to dest_path: full path (Posix-style) within that bucket upload_if: one of the UploadIf values, describing in which cases we should upload the file predefined_acl: which predefined ACL to apply to the file on Google Storage; must be one of the PredefinedACL values defined above. If None, inherits dest_bucket's default object ACL. fine_grained_acl_list: list of (id_type, id_value, permission) tuples to apply to the uploaded file (on top of the predefined_acl), or None if predefined_acl is sufficient TODO(epoger): Consider adding a do_compress parameter that would compress the file using gzip before upload, and add a "Content-Encoding:gzip" header so that HTTP downloads of the file would be unzipped automatically. See https://developers.google.com/storage/docs/gsutil/addlhelp/ WorkingWithObjectMetadata#content-encoding """ b = self._connect_to_bucket(bucket=dest_bucket) local_md5 = None # filled in lazily if upload_if == self.UploadIf.IF_NEW: old_key = b.get_key(key_name=dest_path) if old_key: print ('Skipping upload of existing file gs://%s/%s' % ( b.name, dest_path)) return elif upload_if == self.UploadIf.IF_MODIFIED: old_key = b.get_key(key_name=dest_path) if old_key: if not local_md5: local_md5 = _get_local_md5(path=source_path) if ('"%s"' % local_md5) == old_key.etag: print ( 'Skipping upload of unmodified file gs://%s/%s : %s' % ( b.name, dest_path, local_md5)) return elif upload_if != self.UploadIf.ALWAYS: raise Exception('unknown value of upload_if: %s' % upload_if) # Upload the file using a temporary name at first, in case the transfer # is interrupted partway through. if not local_md5: local_md5 = _get_local_md5(path=source_path) initial_key = Key(b) initial_key.name = dest_path + '-uploading-' + local_md5 try: initial_key.set_contents_from_filename(filename=source_path, policy=predefined_acl) except BotoServerError, e: e.body = (repr(e.body) + ' while uploading source_path=%s to gs://%s/%s' % ( source_path, b.name, initial_key.name)) raise
def upload_file(self, source_path, dest_bucket, dest_path, upload_if=UploadIf.ALWAYS, predefined_acl=None, fine_grained_acl_list=None): """Upload contents of a local file to Google Storage. params: source_path: full path (local-OS-style) on local disk to read from dest_bucket: GS bucket to copy the file to dest_path: full path (Posix-style) within that bucket upload_if: one of the UploadIf values, describing in which cases we should upload the file predefined_acl: which predefined ACL to apply to the file on Google Storage; must be one of the PredefinedACL values defined above. If None, inherits dest_bucket's default object ACL. fine_grained_acl_list: list of (id_type, id_value, permission) tuples to apply to the uploaded file (on top of the predefined_acl), or None if predefined_acl is sufficient TODO(epoger): Consider adding a do_compress parameter that would compress the file using gzip before upload, and add a "Content-Encoding:gzip" header so that HTTP downloads of the file would be unzipped automatically. See https://developers.google.com/storage/docs/gsutil/addlhelp/ WorkingWithObjectMetadata#content-encoding """ b = self._connect_to_bucket(bucket=dest_bucket) local_md5 = None # filled in lazily if upload_if == self.UploadIf.IF_NEW: old_key = b.get_key(key_name=dest_path) if old_key: print('Skipping upload of existing file gs://%s/%s' % (b.name, dest_path)) return elif upload_if == self.UploadIf.IF_MODIFIED: old_key = b.get_key(key_name=dest_path) if old_key: if not local_md5: local_md5 = _get_local_md5(path=source_path) if ('"%s"' % local_md5) == old_key.etag: print( 'Skipping upload of unmodified file gs://%s/%s : %s' % (b.name, dest_path, local_md5)) return elif upload_if != self.UploadIf.ALWAYS: raise Exception('unknown value of upload_if: %s' % upload_if) # Upload the file using a temporary name at first, in case the transfer # is interrupted partway through. if not local_md5: local_md5 = _get_local_md5(path=source_path) initial_key = Key(b) initial_key.name = dest_path + '-uploading-' + local_md5 try: initial_key.set_contents_from_filename(filename=source_path, policy=predefined_acl) except BotoServerError, e: e.body = (repr(e.body) + ' while uploading source_path=%s to gs://%s/%s' % (source_path, b.name, initial_key.name)) raise