def CreateNotFoundExceptionForObjectWrite(dst_provider, dst_bucket_name, src_provider=None, src_bucket_name=None, src_object_name=None, src_generation=None): """Creates a NotFoundException for an object upload or copy. This is necessary because 404s don't necessarily specify which resource does not exist. Args: dst_provider: String abbreviation of destination provider, e.g., 'gs'. dst_bucket_name: Destination bucket name for the write operation. src_provider: String abbreviation of source provider, i.e. 'gs', if any. src_bucket_name: Source bucket name, if any (for the copy case). src_object_name: Source object name, if any (for the copy case). src_generation: Source object generation, if any (for the copy case). Returns: NotFoundException with appropriate message. """ dst_url_string = '%s://%s' % (dst_provider, dst_bucket_name) if src_bucket_name and src_object_name: src_url_string = '%s://%s/%s' % (src_provider, src_bucket_name, src_object_name) if src_generation: src_url_string += '#%s' % str(src_generation) return NotFoundException( 'The source object %s or the destination bucket %s does not exist.' % (src_url_string, dst_url_string)) return NotFoundException( 'The destination bucket %s does not exist or the write to the ' 'destination must be restarted' % dst_url_string)
def CreateObjectNotFoundException(code, provider, bucket_name, object_name, generation=None): uri_string = '%s://%s/%s' % (provider, bucket_name, object_name) if generation: uri_string += '#%s' % str(generation) return NotFoundException('%s does not exist.' % uri_string, status=code)
def _EnumerateNotificationsFromArgs(self, accept_notification_configs=True): """Yields bucket/notification tuples from command-line args. Given a list of strings that are bucket names (gs://foo) or notification config IDs, yield tuples of bucket names and their associated notifications. Args: accept_notification_configs: whether notification configs are valid args. Yields: Tuples of the form (bucket_name, Notification) """ path_regex = self._GetNotificationPathRegex() for list_entry in self.args: match = path_regex.match(list_entry) if match: if not accept_notification_configs: raise CommandException( '%s %s accepts only bucket names, but you provided %s' % (self.command_name, self.subcommand_name, list_entry)) bucket_name = match.group('bucket') notification_id = match.group('notification') found = False for notification in self.gsutil_api.ListNotificationConfigs( bucket_name, provider='gs'): if notification.id == notification_id: yield (bucket_name, notification) found = True break if not found: raise NotFoundException('Could not find notification %s' % list_entry) else: storage_url = StorageUrlFromString(list_entry) if not storage_url.IsCloudUrl(): raise CommandException( 'The %s command must be used on cloud buckets or notification ' 'config names.' % self.command_name) if storage_url.scheme != 'gs': raise CommandException( 'The %s command only works on gs:// buckets.') path = None if storage_url.IsProvider(): path = 'gs://*' elif storage_url.IsBucket(): path = list_entry if not path: raise CommandException( 'The %s command cannot be used on cloud objects, only buckets' % self.command_name) for blr in self.WildcardIterator(path).IterBuckets( bucket_fields=['id']): for notification in self.gsutil_api.ListNotificationConfigs( blr.storage_url.bucket_name, provider='gs'): yield (blr.storage_url.bucket_name, notification)
def RunCommand(self): """Command entry point for the ls command.""" got_nomatch_errors = False got_bucket_nomatch_errors = False listing_style = ListingStyle.SHORT get_bucket_info = False self.recursion_requested = False self.all_versions = False self.include_etag = False self.human_readable = False if self.sub_opts: for o, a in self.sub_opts: if o == '-a': self.all_versions = True elif o == '-e': self.include_etag = True elif o == '-b': get_bucket_info = True elif o == '-h': self.human_readable = True elif o == '-l': listing_style = ListingStyle.LONG elif o == '-L': listing_style = ListingStyle.LONG_LONG elif o == '-p': self.project_id = a elif o == '-r' or o == '-R': self.recursion_requested = True if not self.args: # default to listing all gs buckets self.args = ['gs://'] total_objs = 0 total_bytes = 0 def MaybePrintBucketHeader(blr): if len(self.args) > 1: print '%s:' % blr.url_string.encode(UTF8) print_bucket_header = MaybePrintBucketHeader for url_str in self.args: storage_url = StorageUrlFromString(url_str) if storage_url.IsFileUrl(): raise CommandException('Only cloud URLs are supported for %s' % self.command_name) bucket_fields = None if (listing_style == ListingStyle.SHORT or listing_style == ListingStyle.LONG): bucket_fields = ['id'] elif listing_style == ListingStyle.LONG_LONG: bucket_fields = [ 'location', 'storageClass', 'versioning', 'acl', 'defaultObjectAcl', 'website', 'logging', 'cors', 'lifecycle' ] if storage_url.IsProvider(): # Provider URL: use bucket wildcard to list buckets. for blr in self.WildcardIterator( '%s://*' % storage_url.scheme).IterBuckets( bucket_fields=bucket_fields): self._PrintBucketInfo(blr, listing_style) elif storage_url.IsBucket() and get_bucket_info: # ls -b bucket listing request: List info about bucket(s). total_buckets = 0 for blr in self.WildcardIterator(url_str).IterBuckets( bucket_fields=bucket_fields): if not ContainsWildcard(url_str) and not blr.root_object: # Iterator does not make an HTTP call for non-wildcarded # listings with fields=='id'. Ensure the bucket exists by calling # GetBucket. self.gsutil_api.GetBucket(blr.storage_url.bucket_name, fields=['id'], provider=storage_url.scheme) self._PrintBucketInfo(blr, listing_style) total_buckets += 1 if not ContainsWildcard(url_str) and not total_buckets: got_bucket_nomatch_errors = True else: # URL names a bucket, object, or object subdir -> # list matching object(s) / subdirs. def _PrintPrefixLong(blr): print '%-33s%s' % ('', blr.url_string.encode(UTF8)) if listing_style == ListingStyle.SHORT: # ls helper by default readies us for a short listing. ls_helper = LsHelper( self.WildcardIterator, self.logger, all_versions=self.all_versions, print_bucket_header_func=print_bucket_header, should_recurse=self.recursion_requested) elif listing_style == ListingStyle.LONG: bucket_listing_fields = ['name', 'updated', 'size'] if self.all_versions: bucket_listing_fields.extend( ['generation', 'metageneration']) if self.include_etag: bucket_listing_fields.append('etag') ls_helper = LsHelper( self.WildcardIterator, self.logger, print_object_func=self._PrintLongListing, print_dir_func=_PrintPrefixLong, print_bucket_header_func=print_bucket_header, all_versions=self.all_versions, should_recurse=self.recursion_requested, fields=bucket_listing_fields) elif listing_style == ListingStyle.LONG_LONG: # List all fields bucket_listing_fields = None ls_helper = LsHelper( self.WildcardIterator, self.logger, print_object_func=PrintFullInfoAboutObject, print_dir_func=_PrintPrefixLong, print_bucket_header_func=print_bucket_header, all_versions=self.all_versions, should_recurse=self.recursion_requested, fields=bucket_listing_fields) else: raise CommandException('Unknown listing style: %s' % listing_style) exp_dirs, exp_objs, exp_bytes = ls_helper.ExpandUrlAndPrint( storage_url) if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0: got_nomatch_errors = True total_bytes += exp_bytes total_objs += exp_objs if total_objs and listing_style != ListingStyle.SHORT: print('TOTAL: %d objects, %d bytes (%s)' % (total_objs, total_bytes, MakeHumanReadable(float(total_bytes)))) if got_nomatch_errors: raise CommandException('One or more URLs matched no objects.') if got_bucket_nomatch_errors: raise NotFoundException( 'One or more bucket URLs matched no buckets.') return 0
def CreateBucketNotFoundException(code, provider, bucket_name): return NotFoundException('%s://%s bucket does not exist.' % (provider, bucket_name), status=code)
def _TranslateApitoolsException(self, e, service_account_id=None): """Translates apitools exceptions into their gsutil equivalents. Args: e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS. service_account_id: Optional service account ID that caused the exception. Returns: CloudStorageApiServiceException for translatable exceptions, None otherwise. """ if isinstance(e, apitools_exceptions.HttpError): message = self._GetMessageFromHttpError(e) if e.status_code == 400: # It is possible that the Project ID is incorrect. Unfortunately the # JSON API does not give us much information about what part of the # request was bad. return BadRequestException(message or 'Bad Request', status=e.status_code) elif e.status_code == 401: if 'Login Required' in str(e): return AccessDeniedException( message or 'Access denied: login required.', status=e.status_code) elif 'insufficient_scope' in str(e): # If the service includes insufficient scope error detail in the # response body, this check can be removed. return AccessDeniedException( _INSUFFICIENT_OAUTH2_SCOPE_MESSAGE, status=e.status_code, body=self._GetAcceptableScopesFromHttpError(e)) elif e.status_code == 403: # Messaging for when the the originating credentials don't have access # to impersonate a service account. if 'The caller does not have permission' in str(e): return AccessDeniedException( 'Service account impersonation failed. Please go to the Google ' 'Cloud Platform Console (https://cloud.google.com/console), ' 'select IAM & admin, then Service Accounts, and grant your ' 'originating account the Service Account Token Creator role on ' 'the target service account.') # The server's errors message when IAM Credentials API aren't enabled # are pretty great so we just display them. if 'IAM Service Account Credentials API has not been used' in str( e): return AccessDeniedException(message) if 'The account for the specified project has been disabled' in str( e): return AccessDeniedException(message or 'Account disabled.', status=e.status_code) elif 'Daily Limit for Unauthenticated Use Exceeded' in str(e): return AccessDeniedException( message or 'Access denied: quota exceeded. ' 'Is your project ID valid?', status=e.status_code) elif 'User Rate Limit Exceeded' in str(e): return AccessDeniedException( 'Rate limit exceeded. Please retry this ' 'request later.', status=e.status_code) elif 'Access Not Configured' in str(e): return AccessDeniedException( 'Access Not Configured. Please go to the Google Cloud Platform ' 'Console (https://cloud.google.com/console#/project) for your ' 'project, select APIs & services, and enable the Google Cloud ' 'IAM Credentials API.', status=e.status_code) elif 'insufficient_scope' in str(e): # If the service includes insufficient scope error detail in the # response body, this check can be removed. return AccessDeniedException( _INSUFFICIENT_OAUTH2_SCOPE_MESSAGE, status=e.status_code, body=self._GetAcceptableScopesFromHttpError(e)) else: return AccessDeniedException(message or e.message or service_account_id, status=e.status_code) elif e.status_code == 404: return NotFoundException(message or e.message, status=e.status_code) elif e.status_code == 409 and service_account_id: return ServiceException('The key %s already exists.' % service_account_id, status=e.status_code) elif e.status_code == 412: return PreconditionException(message, status=e.status_code) return ServiceException(message, status=e.status_code)
def _TranslateApitoolsException(self, e, key_name=None): """Translates apitools exceptions into their gsutil equivalents. Args: e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS. key_name: Optional key name in request that caused the exception. Returns: CloudStorageApiServiceException for translatable exceptions, None otherwise. """ if isinstance(e, apitools_exceptions.HttpError): message = self._GetMessageFromHttpError(e) if e.status_code == 400: # It is possible that the Project ID is incorrect. Unfortunately the # JSON API does not give us much information about what part of the # request was bad. return BadRequestException(message or 'Bad Request', status=e.status_code) elif e.status_code == 401: if 'Login Required' in str(e): return AccessDeniedException( message or 'Access denied: login required.', status=e.status_code) elif 'insufficient_scope' in str(e): # If the service includes insufficient scope error detail in the # response body, this check can be removed. return AccessDeniedException( _INSUFFICIENT_OAUTH2_SCOPE_MESSAGE, status=e.status_code, body=self._GetAcceptableScopesFromHttpError(e)) elif e.status_code == 403: if 'The account for the specified project has been disabled' in str( e): return AccessDeniedException(message or 'Account disabled.', status=e.status_code) elif 'Daily Limit for Unauthenticated Use Exceeded' in str(e): return AccessDeniedException( message or 'Access denied: quota exceeded. ' 'Is your project ID valid?', status=e.status_code) elif 'User Rate Limit Exceeded' in str(e): return AccessDeniedException( 'Rate limit exceeded. Please retry this ' 'request later.', status=e.status_code) elif 'Access Not Configured' in str(e): return AccessDeniedException( 'Access Not Configured. Please go to the Google Cloud Platform ' 'Console (https://cloud.google.com/console#/project) for your ' 'project, select APIs & services, and enable the Google Cloud ' 'KMS API.', status=e.status_code) elif 'insufficient_scope' in str(e): # If the service includes insufficient scope error detail in the # response body, this check can be removed. return AccessDeniedException( _INSUFFICIENT_OAUTH2_SCOPE_MESSAGE, status=e.status_code, body=self._GetAcceptableScopesFromHttpError(e)) else: return AccessDeniedException(message or e.message or key_name, status=e.status_code) elif e.status_code == 404: return NotFoundException(message or e.message, status=e.status_code) elif e.status_code == 409 and key_name: return ServiceException('The key %s already exists.' % key_name, status=e.status_code) elif e.status_code == 412: return PreconditionException(message, status=e.status_code) return ServiceException(message, status=e.status_code)