def __init__(self): """ __init__ must be callable without arguments. Check for bucket name settings upon initialization """ try: cloudstorage.validate_bucket_name(settings.BUCKET_NAME) except ValueError: raise ImproperlyConfigured("Please specify a valid value for BUCKET_NAME") self._bucket = '/' + settings.BUCKET_NAME
def __init__(self): """ __init__ must be callable without arguments. Check for bucket name settings upon initialization """ try: cloudstorage.validate_bucket_name('foe-storage') except ValueError: raise ImproperlyConfigured("Please specify a valid value for APPENGINE_TOOLKIT['BUCKET_NAME'] setting") self._bucket = '/foe-storage/'
def __init__(self): """ __init__ must be callable without arguments. Check for bucket name settings upon initialization """ try: cloudstorage.validate_bucket_name(settings.BUCKET_NAME) except ValueError: raise ImproperlyConfigured( "Please specify a valid value for BUCKET_NAME") self._bucket = '/' + settings.BUCKET_NAME
def __init__(self): """ __init__ must be callable without arguments. Check for bucket name settings upon initialization """ try: cloudstorage.validate_bucket_name( appengine_toolkit_settings.BUCKET_NAME) except ValueError: raise ImproperlyConfigured( "Please specify a valid value for APPENGINE_TOOLKIT['BUCKET_NAME'] setting" ) self._bucket = '/' + appengine_toolkit_settings.BUCKET_NAME
def post(self): """ POST 'bucket' is required. """ context = self._get_base_context() bucket = self.request.POST.get('bucket', '').strip() context['bucket'] = bucket errors = [] if not bucket: errors.append('Bucket name is required.') if bucket: try: cloudstorage.validate_bucket_name(bucket) except ValueError as e: bucket = None errors.append('Invalid bucket name. %s' % e.message) # try to write a small file if not errors: try: migrator.write_test_file(bucket) except Exception as e: errors.append('Could not write a file to <code>%s</code>. ' 'Ensure that <code>%s</code> ' 'has Writer access. Message: <code>%s</code>' % ( bucket, context['service_account'], e.message)) if errors: context['errors'] = errors self.render_response('index.html', **context) return pipeline = migrator.MigrateAllBlobsPipeline(bucket) pipeline.start(queue_name=config.config.QUEUE_NAME) context['root_pipeline_id'] = pipeline.root_pipeline_id self.render_response('started.html', **context)
def validate(cls, mapper_spec): """Validate mapper specification. Args: mapper_spec: an instance of model.MapperSpec. Raises: BadWriterParamsError: if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name. """ writer_spec = _get_params(mapper_spec, allow_old=False) # Bucket Name is required if cls.BUCKET_NAME_PARAM not in writer_spec: raise errors.BadWriterParamsError("%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_bucket_name(writer_spec[cls.BUCKET_NAME_PARAM]) except ValueError, error: raise errors.BadWriterParamsError("Bad bucket name, %s" % (error))
def validate(cls, job_config): """Validate mapper specification. Args: job_config: map_job.JobConfig. Raises: BadReaderParamsError: if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name. """ reader_params = job_config.input_reader_params # Bucket Name is required if cls.BUCKET_NAME_PARAM not in reader_params: raise errors.BadReaderParamsError("%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_bucket_name(reader_params[cls.BUCKET_NAME_PARAM]) except ValueError, error: raise errors.BadReaderParamsError("Bad bucket name, %s" % (error))
def post(self): """ POST 'bucket' is required. """ context = self._get_base_context() bucket = self.request.POST.get('bucket', '').strip() context['bucket'] = bucket errors = [] if not bucket: errors.append('Bucket name is required.') if bucket: try: cloudstorage.validate_bucket_name(bucket) except ValueError as e: bucket = None errors.append('Invalid bucket name. %s' % e.message) # try to write a small file if not errors: try: migrator.write_test_file(bucket) except Exception as e: errors.append('Could not write a file to <code>%s</code>. ' 'Ensure that <code>%s</code> ' 'has Writer access. Message: <code>%s</code>' % (bucket, context['service_account'], e.message)) if errors: context['errors'] = errors self.render_response('index.html', **context) return pipeline = migrator.MigrateAllBlobsPipeline(bucket) pipeline.start(queue_name=config.config.QUEUE_NAME) context['root_pipeline_id'] = pipeline.root_pipeline_id self.render_response('started.html', **context)
def validate(cls, mapper_spec): """Validate mapper specification. Args: mapper_spec: an instance of model.MapperSpec. Raises: BadWriterParamsError if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name. """ writer_spec = _get_params(mapper_spec, allow_old=False) # Bucket Name is required if cls.BUCKET_NAME_PARAM not in writer_spec: raise errors.BadWriterParamsError( "%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_bucket_name( writer_spec[cls.BUCKET_NAME_PARAM]) except ValueError, error: raise errors.BadWriterParamsError("Bad bucket name, %s" % (error))
def validate(cls, job_config): """Validate mapper specification. Args: job_config: map_job.JobConfig. Raises: BadReaderParamsError: if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name. """ reader_params = job_config.input_reader_params # Bucket Name is required if cls.BUCKET_NAME_PARAM not in reader_params: raise errors.BadReaderParamsError( "%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_bucket_name( reader_params[cls.BUCKET_NAME_PARAM]) except ValueError, error: raise errors.BadReaderParamsError("Bad bucket name, %s" % (error))
def __init__(self, **kwargs): cloudstorage.validate_bucket_name(settings.GS_BUCKET_NAME) self.bucket_name = settings.GS_BUCKET_NAME
def build_gcs_filename(blob_info_or_key, filename=None, bucket_name=None, include_bucket=False, include_leading_slash=False): """ Builds a GCS filename. If all values are provided, and both include_bucket and include_leading_slash are True and ROOT_GCS_FOLDER is CONFIGURED, the resulting GCS filename will look like: /[bucket_name]/[ROOT_GCS_FOLDER]/[blob_key_str]/[filename] Other possible return values (depending on config and inputs) are: [ROOT_GCS_FOLDER]/[blob_key_str] /[ROOT_GCS_FOLDER]/[blob_key_str] [blob_key_string] /[blob_key_string] [blob_key_string]/[filename] /[blob_key_string]/[filename] [bucket_name]/[blob_key_string] /[bucket_name]/[blob_key_string] ... Args: blob_info_or_key: The blob's BlobInfo, BlobKey, or blob key string. filename: A filename to include in the GCS filename (optional). bucket_name: The root bucket name to include in the GCS filename (optional). include_bucket: A flag indicating if the bucket_name should be in the returned GCS filename. include_leading_slash: A flag indicating if the GCS filename should have a leading slash. Returns: A GCS filename corresponding to the various input args. """ blob_key_str = _get_blob_key_str(blob_info_or_key) # shred the filename a bit so that the storage broswer has a hope of working gcs_filename = '%s/%s/%s/%s/%s' % ( blob_key_str[0:8], # keys have same start blob_key_str[8:10], blob_key_str[10:12], blob_key_str[12:14], blob_key_str) # add the filename to the end, if specified if filename: gcs_filename += '/' + filename # prepend the root folder root_folder = config.config.ROOT_GCS_FOLDER if not root_folder: root_folder = '' root_folder = root_folder.strip('/') # remove any leading/trailing slash if root_folder: gcs_filename = root_folder + '/' + gcs_filename # prepend the bucket including a leading slash, if specified if include_bucket: if not bucket_name: raise ValueError('bucket_name is required.') cloudstorage.validate_bucket_name(bucket_name) gcs_filename = bucket_name + '/' + gcs_filename if include_leading_slash: gcs_filename = '/' + gcs_filename return gcs_filename
def __init__(self, **kwargs): self.bucket_name = getattr(settings, 'GS_BUCKET_NAME', None) if self.bucket_name is None: self.bucket_name = app_identity.get_default_gcs_bucket_name() cloudstorage.validate_bucket_name(self.bucket_name)