Example #1
0
 def GetGoogleStorage(self):
     """ Used for App Engine HTTP requests. """
     version_key = blobstore.create_gs_key(GS_VERSION)
     blob_reader = blobstore.BlobReader(version_key)
     version = blob_reader.read()
     path = GS_PATH + version + self.request.path[len('docs/'):]
     gs_key = blobstore.create_gs_key(path)
     self.HandleCacheAge(path)
     self.send_blob(gs_key)
Example #2
0
 def GetGoogleStorage(self):
   """ Used for App Engine HTTP requests. """
   version_key = blobstore.create_gs_key(GS_VERSION)
   blob_reader = blobstore.BlobReader(version_key)
   version = blob_reader.read()
   path = GS_PATH + version + self.request.path[len('docs/'):]
   gs_key = blobstore.create_gs_key(path)
   self.HandleCacheAge(path)
   self.send_blob(gs_key)
Example #3
0
    def post(self):
        # get the info about the uploaded images
        upload_files_front = self.get_file_infos('front_image')
        upload_files_ing = self.get_file_infos('ingredients_image')
        if not len(upload_files_front) or not len(upload_files_ing):
            self.response.out.write('Images not uploaded')
            return

        front_image = upload_files_front[0]
        ingredients_image = upload_files_ing[0]

        # check whether the image has been uploaded
        if not front_image.gs_object_name or not ingredients_image.gs_object_name:
            self.response.out.write(
                'Image info not found. Seems to be upload error!')
            return

        ingredients = self.request.get('ingredients')
        if not ingredients:
            self.response.out.write(
                'You must provide a list of ingredients for the product to be created.'
            )
            return

        user = User.load(users.get_current_user())

        product = Product.create_by_barcode(self.PRODUCT_BARCODE, user)
        product.name = self.PRODUCT_NAME
        product.put()

        Image.create(creator=user,
                     product=product,
                     blob=blobstore.create_gs_key(front_image.gs_object_name),
                     image_type=ImageType.Front,
                     ocr_result='',
                     featured=True)

        Image.create(creator=user,
                     product=product,
                     blob=blobstore.create_gs_key(
                         ingredients_image.gs_object_name),
                     image_type=ImageType.Ingredient,
                     ocr_result=ingredients,
                     featured=False)

        for ingredient_name in ingredients.splitlines():
            ingredient = Ingredient.find_by_name(ingredient_name)
            if ingredient:
                ProductIngredient.add_entry(product, ingredient, user)

        self.redirect('/utils')
Example #4
0
def ler_arquivo_dict(filename):
    blobstore_filename = '/gs/{0}/{1}'.format(bucket_identity, filename)
    blob_key = blobstore.create_gs_key(blobstore_filename)
    blob_reader = blobstore.BlobReader(blob_key)
    conteudo = blob_reader.read()
    dict = xmltodict.parse(conteudo, process_namespaces=True)
    return dict
  def update_response_headers(self, request_headers, response_headers):
    try:
      # cloudstorage.stat doesn't use "/gs" prefix.
      gs_object_name = self.gs_object_name[3:]
      stat = cloudstorage.stat(gs_object_name)
    except cloudstorage.errors.NotFoundError as e:
      raise FileNotFoundError(str(e))

    headers = {}
    time_obj = datetime.fromtimestamp(stat.st_ctime).timetuple()
    headers['Last-Modified'] =  time.strftime('%a, %d %b %Y %H:%M:%S GMT', time_obj)
    headers['ETag'] = '"{}"'.format(stat.etag)
    if stat.content_type:
      headers['Content-Type'] = stat.content_type

    # The presence of "X-AppEngine-BlobKey" tells App Engine that we want to
    # serve the GCS blob directly to the user. This avoids reading the blob data
    # into the App Engine application. If the user has the file cached already,
    # omit the X-AppEngine-BlobKey header since we want to serve an empty response
    # with a 304 status code.
    request_etag = request_headers.get('If-None-Match')
    if request_etag != headers['ETag']:
      key = blobstore.create_gs_key(self.gs_object_name)
      headers['X-AppEngine-BlobKey'] = key

    response_headers.update(headers)
Example #6
0
    def post(self):
        image = self.request.get('image')
        name = self.request.get('name')
        content_type = self.request.get('contentType')

        bucket_name = generate_bucket_filename(name)

        with gcs.open(bucket_name, 'w', content_type=content_type) as f:
            f.write(image)

        blobstore_filename = generate_blobstore_filename(name)
        blob_key = blobstore.create_gs_key(blobstore_filename)
        restaurant_image_url = images.get_serving_url(blob_key)

        image_model = ImageModel(name=name,
                                 bucket_name=bucket_name,
                                 blob_key=blob_key)
        image_key = image_model.put()

        self.response.headers['Content-Type'] = 'application/json'
        self.response.write(json.dumps({
            'url': restaurant_image_url,
            'name': name,
            'mimetype': content_type,
            'id': image_key.id()
        }))
Example #7
0
    def post(self):
        image_file = self.request.POST.get("pic", None)
        file_obj = self.request.get("pic", None)
        if not isinstance(image_file, cgi.FieldStorage):
            return json_response(self.response, {}, ERROR, 'Select image file')

        key = self.request.get('key')
        e = ndb.Key(urlsafe=key).get()

        file_name = image_file.filename
        bucket_path = '/designer_frames/%s' % (file_name)
        bucket_path = bucket_path.lower()
        serving_url = ''
        upload_file(file_obj, bucket_path)
        try:
            logging.info('create_gs_key')
            bucket_key = blobstore.create_gs_key('/gs' + bucket_path)
            logging.info(bucket_key)
            logging.info('serving_url')
            if self.DEV:
                serving_url = images.get_serving_url(bucket_key)
            else:
                file_name = file_name.replace(' ', '%20').lower()
                serving_url = 'https://storage.googleapis.com/designer_frames/%s' % (
                    file_name)
            logging.info(serving_url)
        except Exception, msg:
            logging.error(msg)
            return json_response(self.response, {}, WARNING, 'Try again')
Example #8
0
def save_to_gcs(file_obj):
    serving_url = ''#just assign it adn reassign later

    time_stamp = int(time.time())
    app_id = app_identity.get_application_id()

    fname = '/%s.appspot.com/post_%s.jpg' % (app_id, time_stamp)
    # logging.error(fname)

    # Content Types
    # audio/mpeg
    # image/jpeg

    gcs_file = gcs.open(fname, 'w', content_type="image/jpeg")
    gcs_file.write(file_obj)
    gcs_file.close()

    height = images.Image(image_data=file_obj).height
    width = images.Image(image_data=file_obj).width

    gcs_filename = "/gs%s" % fname
    serving_url = images.get_serving_url(blobstore.create_gs_key(gcs_filename))
    media_obj = save_gcs_to_media(gcs_filename, serving_url)

    return media_obj
Example #9
0
    def get(self):
        uuid = self.request.get('uuid')
        size = self.request.get('size')
        size = int(size) if size else config.PROFILE_ICON_SIZE

        if not uuid:
            api.write_error(self.response, 400, 'Unknown or missing user')
            return

        filename = config.PROFILE_BUCKET + uuid
        image = images.Image(blob_key=blobstore.create_gs_key('/gs' + filename))
        image.resize(width=size, height=size, crop_to_fit=True, allow_stretch=False)
        try:
            png_data = StringIO.StringIO(image.execute_transforms(output_encoding=images.PNG))
        except:
            self.redirect(config.DEFAULT_PHOTO_URL)
            return

        im = Image.open(png_data)
        bigsize = (im.size[0] * 3, im.size[1] * 3)
        mask = Image.new('L', bigsize, 0)
        draw = ImageDraw.Draw(mask)
        draw.ellipse((0, 0) + bigsize, fill=255)
        mask = mask.resize(im.size, Image.ANTIALIAS)
        im.putalpha(mask)

        output = StringIO.StringIO()
        im.save(output, 'PNG')

        self.response.headers['Content-Type'] = 'image/png'
        self.response.headers['Cache-Control'] = 'public,max-age=%d' % (config.MEDIA_MAX_AGE)
        self.response.out.write(output.getvalue())
        output.close()
Example #10
0
def saveImageInGCS(image_data):
    # ======================
    # Save file in GCS
    # ======================
    image_data = base64.b64decode(image_data)  #image_data.encode('utf-8')
    bucket_name = os.environ.get('BUCKET_NAME',
                                 app_identity.get_default_gcs_bucket_name())

    bucket = '/' + bucket_name
    filename = bucket + '/' + getImageHash(image_data) + '.png'
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='image/png',
                        options={
                            'x-goog-meta-foo': 'foo',
                            'x-goog-meta-bar': 'bar'
                        },
                        retry_params=write_retry_params)
    gcs_file.write(image_data)
    gcs_file.close()

    gcs_object_name = '/gs' + filename
    # logging.info(gcs_object_name)
    blob_key = blobstore.create_gs_key(gcs_object_name)
    image_url = images.get_serving_url(blob_key)
    return image_url
 def FAQCategoryUpdate(self, faqcat):
     """ update faqcat"""
     if self.is_current_user_admin():
         print(faqcat.id)
         if faqcat.id != None:
             item = ndb.Key(urlsafe=faqcat.id).get()
             if item is None:
                 raise endpoints.BadRequestException("FAQCategory not found")
             else:
                 if faqcat.image != None:
                     image = faqcat.image
                     if len(image) > 6*1024*1024:
                         raise endpoints.BadRequestException("Max. image size is 6*1024*1024 bytes")
                     write_retry_params = gcs.RetryParams(backoff_factor=1.1)
                     filename = "/" + BUCKET_NAME + "/" +str(uuid.uuid4())
                     png = images.rotate(image, 0, output_encoding=images.PNG)
                     gcs_file = gcs.open(filename,'w',retry_params=write_retry_params,content_type='image/png',)
                     gcs_file.write(png)
                     gcs_file.close()
                     blobkey = blobstore.create_gs_key("/gs" + filename)
                     url = images.get_serving_url(blobkey)
                     item.image_url = url
                     item.blobkey = filename
                 del faqcat.image
                 item.put()
                 return item
         else:
             raise endpoints.BadRequestException("ID missing")
     else:
         raise endpoints.UnauthorizedException("Only Volunteers users can update FAQCategory. \
             Contact [email protected] for more information.")
 def OfferInsert(self, offer):
     """ Created create offer"""
     user = self.get_current_user()
     offer.owner_key = user.key
     urls = []
     blobkeys = []
     for image in offer.images:
         if len(image) > 6*1024*1024:
             for blobkey in blobkeys:
                 gcs.delete(blobkey)
             raise endpoints.BadRequestException("Max. image size is 6*1024*1024 bytes")
         write_retry_params = gcs.RetryParams(backoff_factor=1.1)
         filename = "/" + BUCKET_NAME + "/" +str(uuid.uuid4())
         png = images.rotate(image, 0, output_encoding=images.PNG)
         gcs_file = gcs.open(filename,'w',retry_params=write_retry_params,content_type='image/png',)
         gcs_file.write(image)
         gcs_file.close()
         blobkey = blobstore.create_gs_key("/gs" + filename)
         blobkeys.append(filename)
         #url = images.get_serving_url("gs" + filename)
         url = images.get_serving_url(blobkey)
         urls.append(url)
     offer.image_urls = urls
     offer.blobkeys = blobkeys
     del offer.images
     offer.put()
     return offer
Example #13
0
    def post(self):
        try:
            #Retrieve the image from http submit. It will be a blob object. 
            image = self.get_uploads()[0]
            #Get info from blobfile uploaded 
            file_info = self.get_file_infos()[0]
            #Create the serving url for the blob uploaded into GS (Google cloud storage)
            serving_url = images.get_serving_url(blobstore.create_gs_key(file_info.gs_object_name))

            #Fetch image dimensions. The last integer specifies how much to fetch 50000 is plenty. 
            data = blobstore.fetch_data(image.key(), 0, 50000)
            img = images.Image(image_data = data)
            width_to_height_ratio = float(img.width)/img.height

            #Get name and description
            name = self.request.get('name')
            caption = self.request.get('caption')
            
            #Now generate the database object and store it. 
            image_model = ImageModel(
                name = name,
                caption = caption,
                blob_key = image.key(),
                gs_object_name = file_info.gs_object_name,
                serving_url = serving_url,
                width_to_height_ratio = width_to_height_ratio)
            image_model.put()

            #Redirect to the upload site and tell it that it succeeded. 
            self.redirect('/admin/upload_image#success')

        except:
            #Redirect to upload site and tell that something went wrong. 
            self.redirect('/admin/upload_image#fail')
Example #14
0
def migrate(image):
	"""Copies blobs stored in Blobstore over to a GCS bucket.

	Args:
		image: main.Image instance representing a single entity in the Datastore.

	This does not delete migrated (old) blobs so it is safe to run the job
	multiple times.
	"""
	if image.blob_key and not image.gs_key:
		blob_info = blobstore.get(image.blob_key)
		if not blob_info:
			image.blob_key = None
		else:
			gs_key = '/'.join(['', BUCKET, blob_info.filename])
			try:
				gcs.stat(gs_key)
			except gcs.NotFoundError:
				reader = blobstore.BlobReader(blob_info)
				with gcs.open(gs_key, 'w', content_type=blob_info.content_type) as f:
					while True:
						data = reader.read(1024**2)
						if not data:
							break
						f.write(data)
			blob_gs_key = blobstore.create_gs_key('/gs'+gs_key)
			image.url = images_api.get_serving_url(blob_gs_key, secure_url=True)
			image.gs_key = gs_key
		yield op.db.Put(image)
		if image.gs_key:
			yield op.counters.Increment('Migrated')
Example #15
0
    def _create_note(self, user, title, content, attachments):

        note = Note(parent=ndb.Key("User", user.nickname()),
                    title=title,
                    content=content)
        note.put()

        if attachments:
            bucket_name = app_identity.get_default_gcs_bucket_name()
            for file_name, file_content in attachments:
                content_t = mimetypes.guess_type(file_name)[0]
                real_path = os.path.join('/', bucket_name, user.user_id(), file_name)

                with cloudstorage.open(real_path, 'w', content_type=content_t,
                                       options={'x-goog-acl': 'public-read'}) as f:
                    f.write(file_content.decode())

                key = blobstore.create_gs_key('/gs' + real_path)
                try:
                    url = images.get_serving_url(key, size=0)
                    thumbnail_url = images.get_serving_url(key, size=150, crop=True)
                except images.TransformationError, images.NotImageError:
                    url = "http://storage.googleapis.com{}".format(real_path)
                    thumbnail_url = None

                f = NoteFile(parent=note.key, name=file_name,
                             url=url, thumbnail_url=thumbnail_url,
                             full_path=real_path)
                f.put()
                note.files.append(f.key)

            note.put()
Example #16
0
 def url(self, filename):
     if not is_hosted():
         key = blobstore.create_gs_key('/gs' + self._real_path(filename))
         return images.get_serving_url(key)
     return 'https://storage.googleapis.com{path}'.format(
         path=self._real_path(filename)
     )
Example #17
0
def serveurl():

    config = ConfigParser.RawConfigParser(allow_no_value=True)
    config.read('config.ini')

    try:
        expectedKey = config.get("auth", "key")
        receivedKey = request.form['key']
    except:
        return json.dumps({'error': 'Key error'}), 401, {
            'ContentType': 'application/json'
        }

    if expectedKey == receivedKey:
        image = request.form['image']
        bucket = request.form['bucket']

        logging.info('Create Serving URL for ' + image)
        filename = (bucket + "/" + image)
        logging.info('Filename is ' + filename)

        gskey = blobstore.create_gs_key("/gs/" + filename)
        logging.info('gskey is ' + gskey)

        servingImage = images.get_serving_url(gskey, secure_url=True)
        logging.info('Serving url: ' + servingImage)

        return (servingImage)
    else:
        return json.dumps({'error': 'No valid key provided'}), 401, {
            'ContentType': 'application/json'
        }
    def CreateFile(self, nombre, datos):
        my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15)
        gcs.set_default_retry_params(my_default_retry_params)

        bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

        #bucket_name = os.environ.get('BUCKET_NAME', 'prueba')
        #print bucket_name
        #bucket_name = 'prueba'
        bucket = '/' + bucket_name
        filename = bucket + '/' + nombre

        print 'filename: '+filename

        #https://cloud.google.com/appengine/docs/python/googlecloudstorageclient/functions

        write_retry_params = gcs.RetryParams(backoff_factor=1.1)

        gcs_file = gcs.open(filename, 'w', content_type='image/jpeg', options={'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar', 'x-goog-acl': 'public-read'}, retry_params=write_retry_params)
        gcs_file.write(datos)
        gcs_file.close()

        blobstore_filename = '/gs' + filename

        key = blobstore.create_gs_key(blobstore_filename)


        #Si se encuentra en el servidor de Google
        if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'):
            return 'http://storage.googleapis.com'+filename
        #Si está en entorno de desarrollo local:
        else:
            return get_serving_url(key)
Example #19
0
def CreateFile(my_file,tienda):
	"""Create a GCS file with GCS client lib.

	Args:
	filename: GCS filename.

	Returns:
	The corresponding string blobkey for this GCS file.
	"""
	# Create a GCS file with GCS client.
	#with gcs.open(filename, 'w') as f:
	#		f.write(my_file.file.read())
	nombre ='/gs/'+BUCKET +'/'+str(tienda)+"/"+my_file.filename
	#write_retry_params = gcs.RetryParams(backoff_factor=1.1)
	#with gcs.open(my_file.filename, 'w') as f:
	#	f.write(my_file.file.read())
	gcs_file =files.gs.create(nombre,mime_type=my_file.type,acl='public-read')
	#, retry_params=write_retry_paramsretry_params=write_retry_params
	with files.open(gcs_file, 'a') as f:
		f.write(my_file.file.read())
	files.finalize(gcs_file)

	# Blobstore API requires extra /gs to distinguish against blobstore files.
	#blobstore_filename = '/gs' + nombre
	# This blob_key works with blobstore APIs that do not expect a
	# corresponding BlobInfo in datastore.
	return blobstore.create_gs_key(nombre)
Example #20
0
  def get(self):
    """Loads the data file located at |filename|.

    Args:
      filename: The filename for the data file to be loaded.
    """
    yesterday = datetime.date.today() - datetime.timedelta(1)
    yesterday_formatted = yesterday.strftime("%Y.%m.%d")

    filename = 'histograms/daily/%s/Everything' % (yesterday_formatted)

    if settings.PROD:
      try:
        with files.open(BIGSTORE_BUCKET + filename, 'r') as unused_f:
          pass
      except files.file.ExistenceError, e:
        self.response.write(e)
        return

      # The file exists; serve it.
      blob_key = blobstore.create_gs_key(BIGSTORE_BUCKET + filename)
      blob_reader = blobstore.BlobReader(blob_key, buffer_size=3510000)
      try:
        result = blob_reader.read()
      finally:
        blob_reader.close()
Example #21
0
def gcs_upload(file_to_upload, bucket=settings.GCS_BUCKET):
    """Uploads a file to google cloud storage."""
    try:
        filename = create_random_gcs_filename()
        bucket_and_filename = {'bucket': bucket, 'filename': filename}
        gcs_filename = '/%(bucket)s/%(filename)s' % bucket_and_filename
        public_url = 'https://storage.googleapis.com/%(bucket)s/%(filename)s' % bucket_and_filename

        gcs_file = gcs.open(
            gcs_filename,
            mode='w',
            content_type=file_to_upload.type,
            options={'x-goog-acl': 'public-read'})
        gcs_file.write(file_to_upload.value)
        gcs_file.close()

        gs_key = blobstore.create_gs_key('/gs' + gcs_filename)
        try:
            serving_url = images.get_serving_url(gs_key)
        except:
            serving_url = public_url

        return {
            'gs_key': gs_key,
            'gcs_filename': gcs_filename,
            'serving_url': serving_url,
            'public_url': public_url,
            'filename': filename,
            'filetype': file_to_upload.type
        }

    except Exception, e:
        logging.exception(e)
        return None
Example #22
0
def editormd_image_upload():
    mimetypes.init()
    if 'editormd-image-file' not in request.files:
        return jsonify({"success": 0, "message": u"No file part"})
    file = request.files['editormd-image-file']
    if file.filename == '':
        return jsonify({"success": 0, "message": u"No selected file"})
    if file and allowed_file(file.filename):
        directory = "upload/{0}".format(datetime.now().strftime("%Y%m%d/%H"))
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        filename = "{0}/{3}/{1}.{2}".format(bucket, slugify(file.filename.rsplit('.', 1)[0]).replace("-", "_"), file.filename.rsplit('.', 1)[1], directory)
        content_type = mimetypes.guess_type(filename)[0] or "application/octet-stream"
        write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1)
        gcs_file = cloudstorage.open(filename,
                                     'w',
                                     content_type=content_type,
                                     options={'x-goog-acl': 'public-read'},
                                     retry_params=write_retry_params)
        gcs_file.write(file.read())
        gcs_file.close()
        gs = "/gs{0}".format(filename)
        blob_key = blobstore.create_gs_key(gs)
        url = images.get_serving_url(blob_key, size=app.config["SITE_POST_IMG_WIDTH"], crop=False, secure_url=True)
        return jsonify({"success": 1, "message": u"No allowed_file", "url": url})

    return jsonify({"success": 0, "message": u"No allowed_file"})
    def process_images_create(self, request, context, *args, **kwargs):
        """
        Upload a new image to the Artwork Gallery
        """

        from merkabah.core.files.api.cloudstorage import Cloudstorage
        from google.appengine.ext import blobstore
        from plugins.artwork.internal.api.images import create_media

        # Get the file upload url

        fs = Cloudstorage(DEFAULT_GS_BUCKET_NAME)

        form = ImageUploadForm()

        context['form'] = form
        has_files = fs.get_uploads(request, 'the_file', True)


        if has_files:
            file_info = has_files[0]

            original_filename = file_info.filename
            content_type = file_info.content_type
            size = file_info.size
            gs_object_name = file_info.gs_object_name # Using this we could urlfetch, but the file isn't public...
            blob_key = blobstore.create_gs_key(gs_object_name)
            logging.warning(blob_key)
            data =  fs.read(gs_object_name.replace('/gs', ''))
            
            slug = original_filename.split('.')[0] # I dislike this..

            media = create_media(slug, data)

            # What we want to do now is create a copy of the file with our own info

            dest_filename = '%s' % original_filename

            new_gcs_filename = fs.write(dest_filename, data, content_type)
            logging.warning(new_gcs_filename)

            # Finally delete the tmp file
            data =  fs.delete(gs_object_name.replace('/gs', ''))

            if not kwargs.get('give_me_json_back'):
                return HttpResponseRedirect(urlresolvers.reverse('admin_plugin_action', args=(context['plugin_slug'], 'images')))
            
            # Else... we're hacky emulating an upload rest endpoint - return json info about the image
            response_dict = {
                'cool': True,
                'keystr': media.key.urlsafe(),
                'filename': media.filename,
                'thumbnail_url': media.get_thumbnail_url()
            }

            return HttpResponse(json.dumps(response_dict))

        upload_url = fs.create_upload_url('/madmin/plugin/artwork/images_create/')

        return FormResponse(form, id='images_create_form', title="Upload a file", target_url=upload_url, target_action='images_create', is_upload=True)
Example #24
0
    def finish(self):
        """
        Called when all shards have finished processing
        """
        if self.get_meta().generate_error_csv:
            self.error_csv_filename = self._error_csv_filename()

            with cloudstorage.open(self.error_csv_filename, 'w') as f:
                # Concat all error csvs from shards into 1 file
                has_written = False
                for shard in ImportShard.objects.filter(task_id=self.pk, task_model_path=self.model_path):
                    if not shard.error_csv_filename:
                        continue

                    # If this is the first row, write the column headers
                    if not has_written:
                        data = json.loads(shard.source_data_json)[0]
                        cols = getattr(self, "detected_columns", sorted(data.keys())) + [ "errors" ]
                        csvwriter = csv.writer(f)
                        csvwriter.writerow(cols)
                        has_written = True

                    # Write the shard's error file into the master file
                    f.write(cloudstorage.open(shard.error_csv_filename).read())
                    cloudstorage.delete(shard.error_csv_filename)

            if has_written:
                # Create a blobstore key for the GCS file
                blob_key = create_gs_key('/gs%s' % self.error_csv_filename)
                self.error_csv = '%s/errors.csv' % blob_key
                self.save()
            else:
                cloudstorage.delete(self.error_csv_filename)
Example #25
0
def CreateFile(my_file,tienda):
	"""Create a GCS file with GCS client lib.

	Args:
	filename: GCS filename.

	Returns:
	The corresponding string blobkey for this GCS file.
	"""
	# Create a GCS file with GCS client.
	#with gcs.open(filename, 'w') as f:
	#		f.write(my_file.file.read())
	nombre = '/'+BUCKET +'/'+str(tienda)+"/"+my_file.filename
	#write_retry_params = gcs.RetryParams(backoff_factor=1.1)
	#with gcs.open(my_file.filename, 'w') as f:
	#	f.write(my_file.file.read())
	gcs_file = gcs.open(nombre,'w',content_type=my_file.type,options={'x-goog-acl': 'public-read'})
	#, retry_params=write_retry_paramsretry_params=write_retry_params

	gcs_file.write(my_file.file.read())
	gcs_file.close()

	# Blobstore API requires extra /gs to distinguish against blobstore files.
	blobstore_filename = '/gs' + nombre
	# This blob_key works with blobstore APIs that do not expect a
	# corresponding BlobInfo in datastore.
	return blobstore.create_gs_key(blobstore_filename)
Example #26
0
    def delete(self):
        """Delete the original file and dynamic serving url if it exists
        """
        filepath = request.args.get('filepath')
        if not filepath:
            return make_response_validation_error('filepath', message='Parameter filepath is required')

        try:
            cloudstorage.delete(filename)
        except cloudstorage.AuthorizationError:
            abort_json(401, "Unauthorized request has been received by GCS.")
        except cloudstorage.ForbiddenError:
            abort_json(403, "Cloud Storage Forbidden Error. GCS replies with a 403 error for many reasons, the most common one is due to bucket permission not correctly setup for your app to access.")
        except cloudstorage.NotFoundError:
            abort_json(404, filepath + " not found on GCS in bucket " + self.bucket)
        except cloudstorage.TimeoutError:
            abort_json(408, 'Remote timed out')

        # TODO get the query string and delete file if asked to
        blobstore_filename = u'/gs/{}/{}'.format(bucket_name, filepath)
        blob_key = blobstore.create_gs_key(blobstore_filename)
        try:
            images.delete_serving_url(blob_key)
        except images.AccessDeniedError:
            abort_json(403, "App Engine Images API Access Denied Error. Files has already been deleted from Cloud Storage")
        except images.ObjectNotFoundError:
            pass

        return '', 204
Example #27
0
    def _create_note(self, user, title, content, attachments):

        note = Note(parent=ndb.Key("User", user.nickname()),
                    title=title,
                    content=content)
        note.put()

        if attachments:
            bucket_name = app_identity.get_default_gcs_bucket_name()
            for file_name, file_content in attachments:
                content_t = mimetypes.guess_type(file_name)[0]
                real_path = os.path.join('/', bucket_name, user.user_id(), file_name)

                with cloudstorage.open(real_path, 'w', content_type=content_t,
                                       options={'x-goog-acl': 'public-read'}) as f:
                    f.write(file_content.decode())

                key = blobstore.create_gs_key('/gs' + real_path)
                try:
                    url = images.get_serving_url(key, size=0)
                    thumbnail_url = images.get_serving_url(key, size=150, crop=True)
                except images.TransformationError, images.NotImageError:
                    url = "http://storage.googleapis.com{}".format(real_path)
                    thumbnail_url = None

                f = NoteFile(parent=note.key, name=file_name,
                             url=url, thumbnail_url=thumbnail_url,
                             full_path=real_path)
                f.put()
                note.files.append(f.key)

            note.put()
    def fetch_cached_thumbnail(cls, cached_key):
        blob_info = blobstore.get(
            blobstore.create_gs_key("/gs{0}".format(cached_key)))

        if blob_info:
            with blob_info.open() as f_blob:
                return f_blob.read()
Example #29
0
    def post(self, category, name):
        """Saves a resource in the cloud storage

        Multiple files are possible, if multiple files are uploaded the 'name' needs to be 'multiple'.
        For multiple files the file name is take as name.
        If multiple fils are uploaded without 'multiple' only the last file is saved.
        The function can also gerenate a serving link, this is either public or private (not guessable).

        If category or name (without extension) is a user key it needs to belong to the loged in user.
        """
        link = request.form.get(
            'link', default='private')  # either public, private or False
        gcs_links = []
        api_links = []
        private_links = []
        links = []
        print "Category: " + str(category)
        try:
            category_key = ndb.Key(urlsafe=category)
        except:
            category_key = False

        for k, f in request.files.iteritems(multi=False):
            if name == 'multiple':
                name = f.filename
            try:
                name_key = ndb.Key(
                    urlsafe=os.path.splitext(os.path.basename(name))[0])
            except:
                name_key = False

            if category_key or name_key:
                user_key = category_key or name_key
                if not auth.is_authorized(user_key):
                    return abort(403)

            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            adr = "{}/{}/{}".format(GCS_BUCKET, category, name)
            gcs_file = gcs.open(adr,
                                'w',
                                content_type=f.mimetype,
                                options={'x-goog-meta-name': f.filename},
                                retry_params=write_retry_params)
            f.save(gcs_file)  # saves file to cloud storage
            gcs_file.close()
            f.close()
            gcs_links.append("/_ah/gcs" + adr)
            api_links.append("/api/v1/upload/" + category + '/' + name)
            links.append("/resource/" + '/' + category + '/' + name)
            if link == 'private':  #TODO implement public links
                blob_key = blobstore.create_gs_key('/gs' + adr)
                img_url = images.get_serving_url(blob_key=blob_key)
                private_links.append(img_url)

        return {
            'links': links,
            'private_links': private_links,
            'gcs_links': gcs_links,
            'api_links': api_links
        }
Example #30
0
 def get(self, category, name):
     """Returns a link to an uploaded resource"""
     # TODO permissions, how, what?
     adr = GCS_BUCKET + '/' + category + '/' + name
     blob_key = blobstore.create_gs_key('/gs' + adr)
     img_url = images.get_serving_url(blob_key=blob_key)
     return img_url
Example #31
0
def upload_bucket_images():
    if dict(request.files) == {}:
        return jsonify(
            user_message = 'no image'
        ), 401
    files = request.files
    bucket_id = request.args['bucketId']
    bucket_images = []
    for image_file in files:
        image = request.files[image_file]
        image_params = upload_image(image.read(), 'bucket', image.mimetype)

        storage_key = image_params[0]
        storage_url = image_params[1]

        gs_key = blobstore.create_gs_key('/gs' + storage_key)

        serving_url = images.get_serving_url(gs_key)
        created_image = BucketImage(serving_url=serving_url, storage_url=storage_url, bucket_id=bucket_id)
        db.session.add(created_image)
        db.session.commit()
        bucket_images.append(model_to_dict(created_image))

    return jsonify(
        data=bucket_images
    )
Example #32
0
    def save_file(file_hash, file_content):
        blob_filename = '/%s/%s/%s' % (app_identity.get_default_gcs_bucket_name(), 'ics', file_hash)
        with gcs.open(blob_filename, 'w') as f:
            f.write(file_content)

        blob_store_filename = '/gs' + blob_filename
        return blobstore.create_gs_key(blob_store_filename)
def _create_branding(hash_, zip_content, description, service_user, branding_type, meta_properties, pokes):
    if pokes and not service_user:
        raise BrandingValidationException('Cannot create branding with one or more poke tags without a service user')

    filename = get_branding_cloudstorage_path(hash_, service_user)
    with cloudstorage.open(filename, 'w') as f:
        f.write(zip_content)

    blobstore_filename = '/gs' + filename
    blobstore_key = blobstore.create_gs_key(blobstore_filename)

    b = Branding(key_name=hash_)
    b.blob_key = blobstore_key.decode('utf-8')
    b.description = description
    b.timestamp = now()
    b.user = service_user
    b.pokes = list()
    b.type = branding_type
    color_scheme = meta_properties.get('color-scheme') or Branding.DEFAULT_COLOR_SCHEME
    b.menu_item_color = meta_properties.get('menu-item-color') or Branding.DEFAULT_MENU_ITEM_COLORS[color_scheme]
    b.content_type = meta_properties.get('content-type') or Branding.CONTENT_TYPE_HTML
    b.orientation = meta_properties.get('orientation') or Branding.DEFAULT_ORIENTATION
    puts = [b]
    for poke_hash, unicode_tag in pokes:
        ptm = PokeTagMap(key_name=poke_hash, parent=parent_key(service_user))
        ptm.tag = unicode_tag
        b.pokes.append(ptm.key())
        puts.append(ptm)

    db.put(puts)
    return b
Example #34
0
    def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)
Example #35
0
    def upload_image(self, blob, filename):
        mime_type = 'image/png'
        if filename.split('.')[-1] == 'jpg' or filename.split(
                '.')[-1] == 'jpeg':
            mime_type = 'image/jpeg'

        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        filename_final = bucket + '/' + str(uuid.uuid4())

        # Create a GCS file with GCS client.
        with gcs.open(filename_final, 'w') as f:
            f.write(blob)
            f.close()

        # Blobstore API requires extra /gs to distinguish against blobstore files.
        blobstore_filename = '/gs' + filename_final

        # Get the file's blob key
        blob_key = blobstore.create_gs_key(blobstore_filename)
        # Store it
        self.image = blob_key
        self.mime_type = mime_type
        self.url = get_serving_url(blob_key)
Example #36
0
  def get(self):
    path = self.resolve_doc_path()
    gs_key = blobstore.create_gs_key(path)
    age = self.get_cache_age(path)

    self.response.headers['Cache-Control'] = 'max-age=' + \
        str(age) + ',s-maxage=' + str(age)

    # is there a better way to check if a file exists in cloud storage?
    # AE will serve a 500 if the file doesn't exist, but that should
    # be a 404

    path_exists = memcache.get(path)
    if path_exists is not None:
      if path_exists == "1":
        self.send_blob(gs_key)
      else:
        self.error(404)
    else:
      try:
        # just check for existance
        files.open(path, 'r').close()
        memcache.add(key=path, value="1", time=ONE_DAY)
        self.send_blob(gs_key)
      except files.file.ExistenceError:
        memcache.add(key=path, value="0", time=ONE_DAY)
        self.error(404)
Example #37
0
    def get(self):
        """Loads the data file located at |filename|.

    Args:
      filename: The filename for the data file to be loaded.
    """
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday_formatted = yesterday.strftime("%Y.%m.%d")

        filename = 'histograms/daily/%s/Everything' % (yesterday_formatted)

        if settings.PROD:
            try:
                with cloudstorage.open(BIGSTORE_BUCKET + filename,
                                       'r') as unused_f:
                    pass
            except cloudstorage.errors.Error, e:
                logging.error(e)
                self.response.write(e)
                return

            # The file exists; serve it.
            blob_key = blobstore.create_gs_key('/gs' + BIGSTORE_BUCKET +
                                               filename)
            blob_reader = blobstore.BlobReader(blob_key, buffer_size=3510000)
            try:
                result = blob_reader.read()
            finally:
                blob_reader.close()
Example #38
0
    def get(self, user_id, practice_id, gcs_object):
        # Build the GCS path for this object, which is based on bucket, user,
        # and object.
        gs_object_name = '/gs/{}/{}/{}'.format(util.get_upload_bucket(),
                                               user_id, gcs_object)

        # Although this inherits from webapp's "Blobstore" handler, the files
        # actually reside in Google Cloud Storage. That's why we convert from
        # the gcs file name.
        blob_key = blobstore.create_gs_key(gs_object_name)

        # Look up the human-readable file name in the file info data of the
        # practice. See the UploadFiles handlers for details on what else is in
        # the file_info dictionary.
        practice = Practice.get_by_id(practice_id)
        filename = None
        for file_info in practice.json_properties['files']:
            if gs_object_name == file_info['gs_object_name']:
                filename = file_info['filename']
        if filename is None:
            raise Exception("Could not find file in practice: {} {}".format(
                practice_id, gcs_object))

        # Attach headers that make the file 1) immediately download rather than
        # opening in the browser and 2) have a pretty file name.
        self.response.headers['Content-Disposition'] = (
            "attachment; filename=" + str(filename))
        self.send_blob(blob_key)
Example #39
0
 def get(self, key):
     server = self.get_server_by_key(key)
     blobstore_filename = "/gs/{0}/{1}".format(
         gcs.get_default_bucket_name(), gcs.get_gcs_archive_name(server.key.urlsafe())
     )
     blob_key = blobstore.create_gs_key(blobstore_filename)
     self.send_blob(blob_key, save_as="{0}.zip".format(server.short_name or server.name or server.key.urlsafe()))
def upload_watermarked_image(image, job_id, new_url):
	# gets the information of the new url
	filename, content_type, extension = get_file_info(new_url)

	gcs_filename = 'watermarked/%s/%s' % (job_id, filename)
	watermarked_file = '/%s/%s' % (BUCKET_NAME, gcs_filename)
	new_url = PHOTOS_ROOT_URL + '/' + gcs_filename

	# render the image as a string
	output = StringIO.StringIO()
	image.save(output, format="jpeg")
	image_output = output.getvalue()
	output.close()

	# upload the file to google cloud storage
	retry_params = gcs.RetryParams(backoff_factor=1.1)
	gcs_file = gcs.open(watermarked_file, 'w', content_type=content_type, options={'x-goog-acl': 'public-read'})
	gcs_file.write(image_output)
	gcs_file.close()

	query_file = '/gs' + watermarked_file
	pprint.pprint('looking for the key: %s' % query_file)
	key = blobstore.create_gs_key(query_file)
	new_url = images.get_serving_url(key)

	pprint.pprint('key:')
	pprint.pprint(key)

	return new_url
def store_mapping_entity(old_blob_info_or_key, gcs_filename):
    """Store the mapping in Datastore.

  Args:
    old_blob_info_or_key: The old blob's BlobInfo, BlobKey, or BlobKey's
      encrypted string.
    gcs_filename: the GCS filenames where the blob was copied.

  Returns:
    The datastore mapping entity that was written.
  """
    if not old_blob_info_or_key:
        raise ValueError('old_blob_info_or_key is required.')
    if not gcs_filename:
        raise ValueError('gcs_filename is required.')
    if not gcs_filename.startswith('/'):
        gcs_filename = '/' + gcs_filename
    old_blob_key_str = _get_blob_key_str(old_blob_info_or_key)
    new_blob_key_str = blobstore.create_gs_key('/gs' + gcs_filename)
    kwargs = {
        'key': models.BlobKeyMapping.build_key(old_blob_key_str),
        'gcs_filename': gcs_filename,
        'new_blob_key': new_blob_key_str,
    }
    entity = models.BlobKeyMapping(**kwargs)
    entity.put()
    logging.info('Migrated blob_key "%s" to "%s" (GCS file "%s").' %
                 (old_blob_key_str, new_blob_key_str, gcs_filename))
    return entity
    def CreateFile(filename, imageFile):
        with gcs.open(filename, 'w', content_type='image/jpeg') as f:
            f.write(imageFile)
            f.close()

        blobstore_filename = '/gs' + filename
        return blobstore.create_gs_key(blobstore_filename)
Example #43
0
def unserveurl():

    config = ConfigParser.RawConfigParser(allow_no_value=True)
    config.read('config.ini')

    expectedKey = config.get("auth", "key")
    receivedKey = request.form['key']

    if expectedKey == receivedKey:
        image = request.form['image']
        bucket = request.form['bucket']

        logging.info('Remove Serving URL for ' + image)

        filename = (bucket + "/" + image)
        logging.info('Filename is ' + filename)

        gskey = blobstore.create_gs_key("/gs/" + filename)
        logging.info('gskey is ' + gskey)

        removal = images.delete_serving_url(gskey)
        logging.info('URL is removed')

        return ("OK")
    else:
        return json.dumps({'error': 'No valid key provided'}), 401, {
            'ContentType': 'application/json'
        }
Example #44
0
def get_resource(collection,category,name):
# TODO auth.is_admin and collectio permission does not work !!! -> used True or
    if True or auth.is_admin() or model.Collection.has_permission(collection,auth.current_user_key().urlsafe(),'read',urlsafe=True):
        adr =  BUCKET + '/' + collection + '/' + category + '/' + name
        blob_key = blobstore.create_gs_key('/gs' + adr)
        img_url = images.get_serving_url(blob_key=blob_key)
        print img_url
        return flask.redirect(img_url)
Example #45
0
 def img_in_cache(self):
     """
         Returns True if the thumbnail image is in the cache
     """
     return blobstore.get(
         blobstore.create_gs_key(
             "/gs/greenday-project-v02-local.appspot.com/gd-yt-thumbs/{yt_id}/ytthumb-{yt_id}-{ms}"
             .format(yt_id=self.yt_id, ms=self.at_milliseconds)))
Example #46
0
def make_blob(nid, data):
    filename = '/{0}/{1}'.format(get_default_gcs_bucket_name(), nid)
    with gcs.open(
        filename, 'w', content_type='text/plain; charset=UTF-8'
    ) as fh:
        fh.write(data.encode('utf-8'))
    blobstore_filename = '/gs{0}'.format(filename)
    return blobstore.create_gs_key(blobstore_filename)
Example #47
0
def create_file(project_db, name, data):
  filename = '/%s/%s/%s.dmp' % (config.CONFIG_DB.bucket_name, project_db.key.id(), name)
  with cloudstorage.open(filename, 'w', content_type='text/plain') as f:
    f.write(data)

  # Blobstore API requires extra /gs to distinguish against blobstore files.
  blobstore_filename = '/gs' + filename
  return ndb.BlobKey(blobstore.create_gs_key(blobstore_filename))
Example #48
0
def get_gcs_image_serving_url(filename):
    """
        Gets a serving URL from the images API for a file in GCS.

        This does not work locally
    """
    gskey = blobstore.create_gs_key("/gs/" + filename)
    return images.get_serving_url(gskey, secure_url=True)
Example #49
0
 def get(self, resource):
   if DEBUG:
     filename = "{}/{}".format(_config.LOCAL, resource)
     self.response.write(open(filename).read())
     return
   filename = "/gs/{}/{}".format(_config.BUCKET, resource)
   key = blobstore.create_gs_key(filename)
   self.send_blob(key)
Example #50
0
 def get(self, image_name):  # pylint: disable=C0111
     if not image_name:
         self.error(404)
         return
     blob_key = blobstore.create_gs_key('/gs' + config.BUCKET + '/' +
                                        image_name)
     img_url = images.get_serving_url(blob_key=blob_key)
     self.redirect(img_url)
Example #51
0
    def get(self, siteName, imageFile):
        filename = '/gs/' + siteName + '/webhook-uploads/' + imageFile

        key = blobstore.create_gs_key(filename)
        img = images.Image(blob_key=key)
        url = images.get_serving_url(key)

        self.response.out.write(url)
Example #52
0
    def get(self, siteName, imageFile):
        filename = '/gs/' + siteName + '/webhook-uploads/' + imageFile;

        key = blobstore.create_gs_key(filename)
        img = images.Image(blob_key=key)
        url = images.get_serving_url(key)

        self.response.out.write(url);
 def get(self, image_name): # pylint: disable=C0111
     if not image_name:
         self.error(404)
         return
     blob_key = blobstore.create_gs_key('/gs' + config.BUCKET + '/' +
                                        image_name)
     img_url = images.get_serving_url(blob_key=blob_key)
     self.redirect(img_url)
  def post(self):  # pylint: disable=g-bad-name
    """Handles Object Change Notifications."""
    logging.debug(
        '%s\n\n%s',
        '\n'.join(['%s: %s' % x for x in self.request.headers.iteritems()]),
        self.request.body)

    resource_state = self.request.headers['X-Goog-Resource-State']

    if resource_state == 'sync':
      logging.info('Sync OCN message received.')
    elif resource_state == 'exists':
      logging.info('New file upload OCN message received.')
      data = json.loads(self.request.body)
      bucket = data['bucket']
      object_name = data['name']

      # Get Image location in GCS.
      gcs_image_location = '/gs/%s/%s' % (bucket, object_name)
      blob_key = blobstore.create_gs_key(gcs_image_location)

      # Try and get username from metadata.
      if data.has_key('metadata') and data['metadata'].has_key('owner'):
        owner = data['metadata']['owner']
      else:
        owner = data['owner']['entity']

      # Try to get public image URL for entry creation.
      image_link = None

      try:
        image_link = images.get_serving_url(blob_key, secure_url=True)
      except images.ObjectNotFoundError:
        logging.error('Could not find image link for %s.',
                      gcs_image_location)
      except images.TransformationError:
        logging.error('Could not convert link to image: %s.',
                      gcs_image_location)

      if image_link:
        bitdoc = Bitdoc(user=owner,
                        image_link=image_link,
                        file_name=object_name)

        logging.info('Creating Entry... %s - %s',
                     bitdoc.user,
                     bitdoc.image_link)

        # timestamp auto.

        bitdoc.put()

        # Add Task to pull queue.
        info = {'key': unicode(bitdoc.key()),
                'image_link': unicode(image_link)}

        processing_task_queue.add(taskqueue.Task(payload=json.dumps(info),
                                                 method='PULL'))