Example #1
0
    def test_delete_blob(self):
        from google.appengine.ext import blobstore
        post, _, _ = self.create_post()

        with open(os.path.join(TEST_IMAGE)) as f:
            image_key = post.add_blob(f.read(), TEST_IMAGE, 'image/jpeg')

        post._delete_blob(TEST_IMAGE)
        with self.assertRaises(blobstore.BlobNotFoundError):
            blobstore.fetch_data(image_key, 0, 1)
Example #2
0
    def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)
Example #3
0
 def post(self):
     file_id = self.request.POST.get('file_id')
     key = ndb.Key('UploadedFile', file_id)
     ent = key.get()
     if ent.gs_path.endswith(IMAGE_EXTENSIONS):
         blob_key = blobstore.create_gs_key('/gs{}'.format(ent.gs_path))
         serving_url = images.get_serving_url(blob_key, secure_url=True)
         data = blobstore.fetch_data(blob_key, 0, 50000)
         image = images.Image(image_data=data)
         ent.width = image.width
         ent.height = image.height
     else:
         serving_url = 'https://storage.googleapis.com{}'.format(
             ent.gs_path)
         ent.width = 0
         ent.height = 0
     ent.serving_url = serving_url
     ent.put()
     self.json_response({
         'file_id': file_id,
         'gs_path': ent.gs_path,
         'serving_url': serving_url,
         'width': ent.width,
         'height': ent.height,
     })
Example #4
0
    def get(self):
        user = users.get_current_user()
        index = self.request.get('index')
        current_folder = self.request.get('current_folder')
        folder_key = ndb.Key('FolderInfo', user.email() + current_folder)
        folder = folder_key.get()

        zipstream = StringIO.StringIO()
        zfile = zipfile.ZipFile(file=zipstream, mode='w')

        for file_key in folder.files:
            file = file_key.get()
            start = 0
            size = 1024
            flag = False
            data = []
            while not flag:
                chunk = blobstore.fetch_data(file.blob, start, start + size)
                data.append(chunk)
                if len(chunk) < 1024:
                    flag = True
                start += size + 1
            zfile.writestr(file.name.replace(file.parent.get().name, ''),
                           "".join(data))

        zfile.close()
        zipstream.seek(0)

        self.response.headers['Content-Type'] = 'application/zip'
        self.response.headers['Content-Disposition'] = \
            'attachment; filename="root'+(folder.name.encode('utf-8'))+'.zip"'
        self.response.out.write(zipstream.getvalue())
Example #5
0
    def testFetchData(self):
        """Fetches data for blob."""

        query = blobstore.BlobInfo.all()
        key = str(query.fetch(1).pop().key())
        data = blobstore.fetch_data(key, 0, 5)
        self.assertEqual('\x89PNG\r\n', data)
Example #6
0
def remote_display_image(request, blob_key):
    from django import http

    if len(blob_key) > 0:
        blob_info = blobstore.BlobInfo.get(blob_key)
        if blob_info:

            blob_file_size = blob_info.size
            blob_content_type = blob_info.content_type

            blob_concat = ""
            start = 0
            end = blobstore.MAX_BLOB_FETCH_SIZE - 1
            step = blobstore.MAX_BLOB_FETCH_SIZE - 1

            while start < blob_file_size:
                blob_concat += blobstore.fetch_data(blob_key, start, end)
                temp_end = end
                start = temp_end + 1
                end = temp_end + step
            response = http.HttpResponse(blob_concat, mimetype=blob_content_type)
            response["Cache-Control"] = "no-cache, no-store, must-revalidate, pre-check=0, post-check=0"
            response['X-file-name'] = blob_info.filename.encode('utf-8')
            return response
    response = http.HttpResponse()
    response['X-file-name'] = 'NOFILE' # when no blob is find for given key
    return response
    def post(self):
        fkstring = self.request.get("filekey")
        filekey = FileKey(key_name=fkstring, parent=filelist_key())
        self.response.out.write("File key: " + filekey.key().id_or_name())

        upload_files = self.get_uploads("file")
        blob_file = upload_files[0]
        filekey.blobinfokey = str(blob_file.key())

        if MEMCACHE and blob_file.size <= 102400:
            self.response.out.write("</br> Inserted in MEMCACHE")
            memcache.add(fkstring, blob_file)
            filekey.filelocation = "memcache"
            self.response.out.write("""<br><b><a href="/">RETURN TO HOME</a></b><br>""")
        else:
            self.response.out.write("</br> Inserted in GOOGLE CLOUD STORAGE")
            wr_path = files.gs.create(
                BUCKET_PATH + "/" + filekey.key().id_or_name(), mime_type="text/plain", acl="public-read"
            )
            with files.open(wr_path, "a") as filepath:
                start = 0
                fetchsz = blobstore.MAX_BLOB_FETCH_SIZE - 1
                filesize = blob_file.size
                while start < filesize:
                    filepath.write(blobstore.fetch_data(blob_file, start, start + fetchsz))
                    start = start + fetchsz
            files.finalize(wr_path)
            filekey.filelocation = "cloudstorage"
            self.response.out.write("""<br><b><a href="/">RETURN TO HOME</a></b><br>""")

        filekey.put()
Example #8
0
    def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)
  def post(self):
    fkstring = self.request.get("filekey")
    filekey = FileKey(key_name =fkstring, parent=filelist_key())
    self.response.out.write("File key: "+ filekey.key().id_or_name())         

    upload_files = self.get_uploads('file')
    blob_file = upload_files[0]
    filekey.blobinfokey = str(blob_file.key())

    if MEMCACHE and blob_file.size <= 102400:
      self.response.out.write("</br> Inserted in MEMCACHE")
      memcache.add(fkstring, blob_file)
      filekey.filelocation = "memcache"
      self.response.out.write("""<br><b><a href="/">RETURN TO HOME</a></b><br>""")
    else:
      self.response.out.write("</br> Inserted in GOOGLE CLOUD STORAGE")
      wr_path = files.gs.create(BUCKET_PATH+"/"+filekey.key().id_or_name(), mime_type='text/plain', acl='public-read')
      with files.open(wr_path, 'a') as filepath:
        start = 0
        fetchsz = blobstore.MAX_BLOB_FETCH_SIZE - 1
        filesize=blob_file.size
        while start < filesize:
          filepath.write( blobstore.fetch_data(blob_file, start, start+fetchsz))
          start = start + fetchsz
      files.finalize(wr_path)
      filekey.filelocation = "cloudstorage"
      self.response.out.write("""<br><b><a href="/">RETURN TO HOME</a></b><br>""")
    
    filekey.put()
Example #10
0
    def post(self):
        try:
            #Retrieve the image from http submit. It will be a blob object. 
            image = self.get_uploads()[0]
            #Get info from blobfile uploaded 
            file_info = self.get_file_infos()[0]
            #Create the serving url for the blob uploaded into GS (Google cloud storage)
            serving_url = images.get_serving_url(blobstore.create_gs_key(file_info.gs_object_name))

            #Fetch image dimensions. The last integer specifies how much to fetch 50000 is plenty. 
            data = blobstore.fetch_data(image.key(), 0, 50000)
            img = images.Image(image_data = data)
            width_to_height_ratio = float(img.width)/img.height

            #Get name and description
            name = self.request.get('name')
            caption = self.request.get('caption')
            
            #Now generate the database object and store it. 
            image_model = ImageModel(
                name = name,
                caption = caption,
                blob_key = image.key(),
                gs_object_name = file_info.gs_object_name,
                serving_url = serving_url,
                width_to_height_ratio = width_to_height_ratio)
            image_model.put()

            #Redirect to the upload site and tell it that it succeeded. 
            self.redirect('/admin/upload_image#success')

        except:
            #Redirect to upload site and tell that something went wrong. 
            self.redirect('/admin/upload_image#fail')
Example #11
0
    def get(self):
        mapType = self.get_cookie('graphType')
        template_values = {}
        if mapType == 'series':
            template_values['data'] = convert(model['timeSeriesJson'])
            template = JINJA_ENVIRONMENT.get_template('/pages/visualizeSeries.html')
            self.response.write(template.render(template_values))
        else:
            graphstr = blobstore.fetch_data(self.request.get('jsonBlob'), 0, 900000)
            graphstr = graphstr.encode('utf-8')
            print graphstr
            graphstr = graphstr.replace("\u25cf",'o')
            graphstr =graphstr.replace("\u25cb",'x')
            modelMap = json.loads(graphstr)
            if mapType == 'contactmap':
                template_values['layout2'] = "{'coolingFactor': 0.95, 'initialTemp': 200,'nodeRepulsion': 100, 'nodeOverlap': 10, 'gravity': 650, 'padding': 4, 'name': 'cose', 'nestingFactor': 2, 'initialTemp ': 2000, 'minTemp': 1, 'numIter': 100, 'edgeElasticity': 500, 'idealEdgeLength': 10}"
                template_values['targetshape'] = "none"
                template_values['typecolor'] = '#fff'
            elif mapType in ['regulatory','std']:
                template_values['targetshape'] = "triangle"
                template_values['layout2'] = "{'name': 'dagre','fit':true,'padding':30,'directed': false}"
                template_values['typecolor'] = '#000'

            template_values['graph'] = convert(modelMap['elements'])
            template_values['layout'] = convert(modelMap['layout'][0])
            template = JINJA_ENVIRONMENT.get_template('visualize.html')
            self.response.write(template.render(template_values))
Example #12
0
    def post(self):
        # 'file' is file upload field in the form
        upload_files = self.get_uploads('file')
        for blob_info in upload_files:
            if blob_info.content_type not in content_types.split(','):
                # Ignore non-images.
                logging.warning("Invalid mimetype %s, skipping"
                                % blob_info.content_type)
                blob_info.delete()
                continue

            # images.Image doesn't have width and height when built from a
            # blob_key.  The documentation doesn't state that it's safe to
            # build an images.Image with partial data, so manually sniff image
            # dimensions.
            # http://thejapanesepage.com/phpbb/download/file.php?id=247 needs
            # at least 150kB of image data.
            data = blobstore.fetch_data(blob_info, 0, 200000) 
            try:
                width, height = peekimagedata.peek_dimensions(data)
                mimetype = peekimagedata.peek_mimetype(data)
            except ValueError:
                logging.warning("Failed to peek, skipping")
                blob_info.delete()
                continue

            if blob_info.content_type != mimetype:
                logging.warning("Sniffed mimetype didn't match, skipping")
                blob_info.delete()
                continue
            
            store_blob(blob_info.key(), width, height)

        self.redirect('/')
Example #13
0
def _reader(blob_info, chunk_size=1024):
    """ Helper generator for reading chunked content from the blobstore
    """
    position = 0
    while position < blob_info.size:
        yield blobstore.fetch_data(blob_info, position,
                position + chunk_size)
        position += chunk_size + 1
Example #14
0
    def get_blob(self, image_key):
        blob_info = blobstore.get(image_key)

        if blob_info:
            logging.info("fetching image for {}".format(image_key))
            return blobstore.fetch_data(image_key, 0, blob_info.size)
        else:
            raise cloudstorage.NotFoundError
Example #15
0
 def post(self):
     """
     Note: BlobstoreUploadHandler POSTs must return a redirect
     """
     logging.info("starting post")
     upload_files = self.get_uploads()
     handle = self.request.get("handle")
     signature = self.request.get("signature")
     logging.info("Handling post for handle=%s signature=%s" % (handle, signature))
     try:
         if len(upload_files) > 0:
             blob_info = upload_files[0]
             data = blobstore.fetch_data(blob_info.key(), 0, blob_info.size) 
             imgtemp = images.Image(image_data=data)
             width = imgtemp.width
             height = imgtemp.height
             logging.info("Got upload %s => handle=%s size=%dx%d" % (blob_info.__dict__, handle, width, height))
             
             if is_alphanumeric(handle) and \
                 is_alphanumeric(signature) and \
                 is_signature_valid(handle, signature):
                 image = find_image_by_handle(handle)
                 api_response = "/image/info?handle=%s&signature=%s" % (handle, signature)
                 if image is not None:
                     # TODO: add an option to clobber an existing image
                     response = { 'handle': image.handle, 'created_at': image.created_at.isoformat(), 'status': image.status }
                     logging.info("Image already exists: %s" % response)
                     self.redirect(api_response)
                     return
                 image = ImageAsset(handle=handle, status="done", width=width, height=height, blob_key=blob_info.key())
                 image.put()
                 if (self.request.get("return")):
                     # this was submitted by the Gallery form
                     self.redirect(self.request.get("return"))
                 else:
                     # this was submitted by an API client
                     logging.info("Created image: %s" % image)
                     # TODO: callback to the BC API to update that the image submission completed
                     self.redirect(api_response)
                 return
             else:
                 logging.error("Invalid params: handle=%s signature=%s" % (handle, signature))
         else:
             media = self.request.get("media")
             logging.error("Nothing uploaded? %s" % upload_files)
             logging.error("media = %s" % media)
             logging.error("Request data: %s" % self.request.__dict__)
     #except blobstore.BlobFetchSizeTooLargeError, e:
     #    self.redirect("/image/error?handle=%s&signature=%s&error=%s" % (handle, signature, urllib.quote(str(e)))
     #    return
     #except DeadlineExceededError, e:
     #    self.redirect("/image/error?handle=%s&signature=%s&error=%s" % (handle, signature, urllib.quote(str(e)))
     #    return
     except:
         logging.error("Request data: %s" % self.request.__dict__)
         logging.error("Image upload failed: ", exc_info=1)
     # TODO (maybe): redirect to an error handler
     self.error(500)
Example #16
0
 def get(self):
     blob_info = self.request.get('bnglfile')
     filename = self.request.get('filename')
     graphtype = self.request.get('graphtype')
     bnglContent = xmlrpclib.Binary(blobstore.fetch_data(blob_info, 0, 900000))
     s = xmlrpclib.ServerProxy(remoteServer, GAEXMLRPCTransport())
     #s = xmlrpclib.ServerProxy('http://127.0.0.1:9000',GAEXMLRPCTransport())
     ticket = s.generateGraph(bnglContent, graphtype)
     self.redirect('/waitFile?ticket={0}&fileName={1}_{2}.gml&resultMethod=visualize&graphType={2}'.format(ticket, filename, graphtype))
Example #17
0
    def get(self):
        self.response.headers['Content-Type'] = 'text/plain'
        gcs_filename = main.BUCKET + '/blobstore_demo'
        blob_key = CreateFile(gcs_filename)

        self.response.write('Fetched data %s\n' %
                            blobstore.fetch_data(blob_key, 0, 2))

        blobstore.delete(blob_key)
Example #18
0
  def get(self):
    self.response.headers['Content-Type'] = 'text/plain'
    gcs_filename = main.BUCKET + '/blobstore_demo'
    blob_key = CreateFile(gcs_filename)

    self.response.write('Fetched data %s\n' %
                        blobstore.fetch_data(blob_key, 0, 2))

    blobstore.delete(blob_key)
	def post(self):

		# Small file size <= 100KB
		# Big file size > 100KB
		BIGFILEBASE = 100 * 1024

		# Bucket name
		BUCKETPATH = '/gs/hxt-001'

		# 'file' is the file upload field in the form
		uploadFiles = self.get_uploads('file')

		for blobInfo in uploadFiles:
			# Store the key of file and blobInfoKey into the Datastore
			mykey = self.request.get('filekey')
			if mykey == '':
				mykey = blobInfo.filename
			filekey = FileKey(key_name = mykey, parent = fileKeyList())

			# Output the key of the file onto the web page
			self.response.out.write('File key: ')
			self.response.out.write(filekey.key().id_or_name())

			# Get the blobInfoKey of the file
			filekey.blobInfoKey = str(blobInfo.key())

			# Output the blob information onto the web page
			self.response.out.write('<br />Blob Info Key: ')
			self.response.out.write(blobInfo.key())
			self.response.out.write('<br />Blob Info Size: ')
			self.response.out.write(blobInfo.size)

			# Save file into memcache if the size is less than 100KB
			if blobInfo.size <= BIGFILEBASE:
				memcache.add(mykey, blobInfo)
				filekey.fileLocation = 'memcache'
				self.response.out.write('<br />File Saved to Memcache Successfully!')
				self.response.out.write('<br /><div></div>')
			# Save file into cloud storage if the size is large than 100KB
			else:
				writePath = files.gs.create(BUCKETPATH + '/' + filekey.key().id_or_name(), mime_type = 'text/plain', acl = 'public-read')
				with files.open(writePath, 'a') as fstream:
					fstart, fsize = 0, blobInfo.size
					fetchSize = blobstore.MAX_BLOB_FETCH_SIZE - 1
					while fstart < fsize:
						fstream.write(blobstore.fetch_data(blobInfo, fstart, fstart + fetchSize))
						fstart += fetchSize

				# Finalize the file to make the file readable in the Google Cloud Storage
				files.finalize(writePath)
				filekey.fileLocation = 'cloudStorage'
				self.response.out.write('<br />File Saved to Google Cloud Storage Successfully!')
				self.response.out.write('<br /><div></div>')

			# Store the file instance in the Datastore
			filekey.put()
Example #20
0
 def format(self):
     """
     The format of the image (see `Image.format`_ for possible values).
     If there is no image data, this will be ``None``.
     """
     if self.image is not None:
         try:
             return self.image.format
         except NotImageError:
             data = blobstore.fetch_data(self.blob_key, 0, IMAGE_HEADER_SIZE)
             img = images.Image(image_data=data)
             return img.format
     return None
Example #21
0
 def height(self):
     """
     The height of the image in pixels (see `Image.height`_ for more documentation).
     If there is no image data, this will be ``None``.
     """
     if self.image is not None:
         try:
             return self.image.height
         except NotImageError:
             data = blobstore.fetch_data(self.blob_key, 0, IMAGE_HEADER_SIZE)
             img = images.Image(image_data=data)
             return img.height
     return None
Example #22
0
    def post(self):
        elementDictionary = defaultdict(lambda: defaultdict(str))
        ymlDict = {'model': [{'name': self.get_cookie('fileName1'), 'molecules': []},
                             {'name': self.get_cookie('fileName2'), 'molecules': []}]}

        for element in self.request.POST.items():
            elementDictionary[element[0].split('_')[1]][element[0].split('_')[0]] = element[1]

        fileName1 =  self.get_cookie('fileName1')
        fileName2 =  self.get_cookie('fileName2')
        blob1 =  self.get_cookie('blob1')
        blob2 =  self.get_cookie('blob2')


        for entry in elementDictionary:
            if elementDictionary[entry]['scroll'] != 'None':
                if elementDictionary[entry]['alt'] != '':
                    newName = elementDictionary[entry]['alt']
                else:
                    newName = elementDictionary[entry]['field']
                if newName != elementDictionary[entry]['scroll']:
                    ymlDict['model'][1]['molecules'].append({'name': elementDictionary[entry]['scroll'], 'newname': newName})
            if elementDictionary[entry]['alt'] != '':
                newName = elementDictionary[entry]['alt']
                ymlDict['model'][0]['molecules'].append({'name': elementDictionary[entry]['field'], 'newname': newName})

        s = xmlrpclib.ServerProxy(remoteServer, GAEXMLRPCTransport())

        bnglContent1 = xmlrpclib.Binary(blobstore.fetch_data(blob1, 0, 900000))
        bnglContent2 = xmlrpclib.Binary(blobstore.fetch_data(blob2, 0, 900000))
        yamlStr = xmlrpclib.Binary(yaml.dump(ymlDict))

        ticket = s.compareFiles(bnglContent1, bnglContent2, yamlStr)
        # self.response.write(result)

        self.redirect('/waitFile?ticket={0}&resultMethod=normalize'.format(ticket, fileName1))
        
        self.response.write(str(ymlDict))
Example #23
0
    def post(self):
        """
        post Process the file translation with user information sent from matchMolecules.html. Calls a remote service defined
        in <remoteServer>, and sends it a file and an atomization flag.
        """
        bucket_name = os.environ.get('BUCKET_NAME',
                                     app_identity.get_default_gcs_bucket_name())


        fileName1 =  self.get_cookie('fileName1')
        blob1 =  self.get_cookie('blob1')

        sbmlContent = xmlrpclib.Binary(blobstore.fetch_data(blob1, 0, 900000))
        bondsjsonstr = '''"complexDefinition":[
    {0}
]'''.format(self.request.get('bondsjsonarea'))

        stoichjsonstr = '''"modificationDefinition":{{
    {0}
}}'''.format(self.request.get('stoichjsonarea'))



        #self.set_cookie(name="jsonbonds", value=self.request.get('jsonbonds'))
        #self.set_cookie(name="jsonstoich", value=self.request.get('jsonstoich'))
        jsonstr = '''
{{
    "reactionDefinition" : [],
    {0},
    {1},
    "partialComplexDefinition":[]
}}
'''.format(bondsjsonstr, stoichjsonstr)
        print jsonstr

        if len(jsonstr) > 0:
            jsonContent = xmlrpclib.Binary(jsonstr)

        # https://developers.google.com/appengine/docs/python/urlfetch/fetchfunction
        # https://groups.google.com/forum/#!topic/google-appengine/XbrJvt9LfuI

        s = xmlrpclib.ServerProxy(remoteServer, GAEXMLRPCTransport())
        #s = xmlrpclib.ServerProxy('http://127.0.0.1:9000',GAEXMLRPCTransport())
        if jsonContent:
            ticket = s.atomize(sbmlContent, 'atomize', '', '', jsonContent)
        else:
            ticket = s.atomize(sbmlContent, 'atomize', '', '')
        # self.response.write(result)
        
        self.redirect('/waitFile?ticket={0}&fileName={1}.bngl&blob1={2}&resultMethod=atomize'.format(ticket, fileName1,blob1))
Example #24
0
def get_blob(blob_key):
    ret = ''
    try:
        p0 = 0
        p1 = blobstore.MAX_BLOB_FETCH_SIZE - 1
        s = '0'
        while len(s) > 0:
            s = blobstore.fetch_data(blob_key, p0, p1)
            ret += s
            p0 = p1 + 1
            p1 = p0 + blobstore.MAX_BLOB_FETCH_SIZE - 1
    except:
        pass
    return ret 
Example #25
0
def get_blob(blob_key):
    ret = ''
    try:
        p0 = 0
        p1 = blobstore.MAX_BLOB_FETCH_SIZE - 1
        s = '0'
        while len(s) > 0:
            s = blobstore.fetch_data(blob_key, p0, p1)
            ret += s
            p0 = p1 + 1
            p1 = p0 + blobstore.MAX_BLOB_FETCH_SIZE - 1
    except:
        pass
    return ret
Example #26
0
    def testBlobReader(self):
        """Tests the BlobReader API."""

        query = blobstore.BlobInfo.all()
        blob_info = query.fetch(1).pop()
        blob_key = str(blob_info.key())

        reader = blobstore.BlobReader(blob_key)
        self.assertEqual(blob_info.filename, reader.blob_info.filename)
        self.assertEqual(blob_info.size, reader.blob_info.size)

        data = blobstore.fetch_data(blob_key, 0, 5)
        self.assertEqual(data, reader.read()[:6])
        reader.close()
        self.assertTrue(reader.closed)
 def get(self, file_key):
     blob = blobstore.get(file_key)
     if not blob:
         self.error(404)
     else:
         # TODO(jeff.carollo): Actually keep track of mimetypes.
         # TODO(jeff.carollo): Paginate output.
         if "text" in self.request.headers.get("Accept", ""):
             self.response.headers["Content-Type"] = "text/plain"
             data = blobstore.fetch_data(file_key, 0, blobstore.MAX_BLOB_FETCH_SIZE - 1)
             self.response.out.write(data)
             if len(data) >= blobstore.MAX_BLOB_FETCH_SIZE - 1:
                 self.response.out.write("\n----RESULT-TRUNCATED----\n")
         else:
             self.send_blob(file_key)
Example #28
0
 def get(self):
   key = self.request.get('key')
   fetch_data= self.request.get('data')
   if fetch_data is not None and fetch_data == 'true':
     start = self.request.get('start')
     end = self.request.get('end')
     async = self.request.get('async')
     if async is not None and async == 'true':
       data_rpc = blobstore.fetch_data_async(key, int(start), int(end))
       data = data_rpc.get_result()
     else:
       data = blobstore.fetch_data(key, int(start), int(end))
     if data is not None:
       self.response.out.write(data)
     else:
       self.response.set_status(404)
Example #29
0
 def get(self):
   key = self.request.get('key')
   fetch_data= self.request.get('data')
   if fetch_data is not None and fetch_data == 'true':
     start = self.request.get('start')
     end = self.request.get('end')
     async = self.request.get('async')
     if async is not None and async == 'true':
       data_rpc = blobstore.fetch_data_async(key, int(start), int(end))
       data = data_rpc.get_result()
     else:
       data = blobstore.fetch_data(key, int(start), int(end))
     if data is not None:
       self.response.out.write(data)
     else:
       self.response.set_status(404)
Example #30
0
 def get(self, file_key):
     blob = blobstore.get(file_key)
     if not blob:
         self.error(404)
     else:
         # TODO(jeff.carollo): Actually keep track of mimetypes.
         # TODO(jeff.carollo): Paginate output.
         if 'text' in self.request.headers.get('Accept', ''):
             self.response.headers['Content-Type'] = 'text/plain'
             data = blobstore.fetch_data(file_key, 0,
                                         blobstore.MAX_BLOB_FETCH_SIZE - 1)
             self.response.out.write(data)
             if len(data) >= blobstore.MAX_BLOB_FETCH_SIZE - 1:
                 self.response.out.write('\n----RESULT-TRUNCATED----\n')
         else:
             self.send_blob(file_key)
Example #31
0
class ViewHexHandler(webapp2.RequestHandler):
    def get(self):
        upload = get_upload(self.request.params.get('key'),
                            users.get_current_user())
        if not upload:
            self.error(404)
            return

        try:
            start = int(self.request.params.get('start', 0))
            end = int(self.request.params.get('end', 1024))
        except ValueError, e:
            pass
        if end < start or end - start > blobstore.MAX_BLOB_FETCH_SIZE:
            start = 0
            end = 1024

        # Read a range of bytes from the beginning of the Blobstore
        # value and display them as hex.
        bytes = blobstore.fetch_data(upload.blob, start, end)
        end = min(end, len(bytes))
        lines = []
        bytes_per_line = 20
        line_pattern = '%06X: %-' + str(bytes_per_line * 3) + 's %s'
        for i in xrange(0, end - start, bytes_per_line):
            subrange_end = min((end - start - i), bytes_per_line)
            value_strs = []
            value_chrs = []
            for offset in xrange(0, subrange_end):
                value_strs.append('%02X ' % ord(bytes[i + offset]))
                if ord(bytes[i + offset]) >= 32 and ord(
                        bytes[i + offset]) <= 126:
                    value_chrs.append(bytes[i + offset])
                else:
                    value_chrs.append('.')
            lines.append(line_pattern %
                         (i, ''.join(value_strs), ''.join(value_chrs)))
        hex_txt = '\n'.join(lines)

        template = template_env.get_template('viewhex.html')
        context = {
            'upload': upload,
            'hex': hex_txt,
            'start': start,
            'end': end,
        }
        self.response.write(template.render(context))
Example #32
0
    def add_image(self, url, referrer, remote):
        md5 = hashlib.md5()

        file_name = files.blobstore.create(mime_type='image/gif')
        with files.open(file_name, 'a') as f:
            while True:
                chunk = remote.read(2**10)
                if not chunk:
                    break
                f.write(chunk)
                md5.update(chunk)

        files.finalize(file_name)

        gif = gif_gif()
        gif.url = url
        gif.referrer = referrer
        gif.digest = md5.hexdigest()

        if gif_gif.all().filter('digest =', gif.digest).count() > 0:
            logging.info('Skipping (Hash Exists)')
            return

        blob_key = None
        while not blob_key: 
            time.sleep(1) 
            blob_key = files.blobstore.get_blob_key(file_name)

        info = blobstore.BlobInfo.get(blob_key)
        gif.size = info.size

        header = blobstore.fetch_data(blob_key, 0, 50000)
        image = images.Image(image_data=header)

        gif.image = str(blob_key)
        gif.width = image.width
        gif.height = image.height

        gif.thumb_url = images.get_serving_url(blob_key, settings.THUMB_SIZE)

        ratio = min(settings.THUMB_SIZE/float(gif.width), settings.THUMB_SIZE/float(gif.height))
        gif.thumb_width = int(gif.width * ratio)
        gif.thumb_height = int(gif.height * ratio)

        gif.put()
        logging.info('Successfully Added')
Example #33
0
    def get(self):
        json = []

        media_type = self.request.get('type')

        for b in blobstore.BlobInfo.all().order('creation'):
            if media_type == 'image':
                if re_image.match(b.content_type):
                    data = blobstore.fetch_data(b.key(), 0, 50000)
                    image = images.Image(image_data = data)

                    json.append({ 'src': str(b.key()), 'filename': b.filename, 'width': image.width, 'height': image.height})
            else:
                if not re_image.match(b.content_type):
                    json.append({ 'src': str(b.key()), 'filename': b.filename})

        self.render_json(json)
Example #34
0
    def post(self):
        allow_cors(self)
        try:
            upload = self.get_uploads()[0]
            data = blobstore.fetch_data(upload.key(), 0, 50000)
            measure_img = images.Image(image_data=data)
            id = uuid.uuid4().hex + upload.filename
            user_image_key = ImageModel(id=id,
                                        blob_key=upload.key(),
                                        width=measure_img.width,
                                        height=measure_img.height).put()
            user_image = user_image_key.get()
            image_info = {'url': user_image.url, 'id': user_image.id}
            return_json(self, image_info)

        except Exception, e:
            logging.error(str(e))
            self.error(500)
Example #35
0
def rescale(blob_key, width, height, halign='middle', valign='middle'):
	"""Resize then optionally crop a given image.

	Attributes:
	blob_key: blob_key of the image
	width: The desired width
	height: The desired height
	halign: Acts like photoshop's 'Canvas Size' function, horizontally
	       aligning the crop to left, middle or right
	valign: Verticallly aligns the crop to top, middle or bottom
	999999
	"""
	image_data = blobstore.fetch_data(blob_key, 0, 999999)	
	image = images.Image(image_data)
	
	desired_wh_ratio = float(width) / float(height)
	wh_ratio = float(image.width) / float(image.height)

	if desired_wh_ratio > wh_ratio:
		# resize to width, then crop to height
		image = images.Image(blob_key=blob_key)
		image.resize(width=int(width))
		image.execute_transforms()
		trim_y = (float(image.height - int(height)) / 2) / image.height
		if valign == 'top':
			image.crop(0.0, 0.0, 1.0, 1 - (2 * trim_y))
		elif valign == 'bottom':
			image.crop(0.0, (2 * trim_y), 1.0, 1.0)
		else:
			image.crop(0.0, trim_y, 1.0, 1 - trim_y)
	else:
		# resize to height, then crop to width
		image = images.Image(blob_key=blob_key)
		image.resize(height=int(height))
		image.execute_transforms()
		trim_x = (float(image.width - int(width)) / 2) / image.width
		if halign == 'left':
			image.crop(0.0, 0.0, 1 - (2 * trim_x), 1.0)
		elif halign == 'right':
			image.crop((2 * trim_x), 0.0, 1.0, 1.0)
		else:
			image.crop(trim_x, 0.0, 1 - trim_x, 1.0)

	return image
Example #36
0
def edit(request):
	if not request.user.is_authenticated():
		return HttpResponseRedirect('/')
	
	f_user = FacebookUser.objects.get(contrib_user=request.user.id)
	facebook_profile = None
	
	try:
		dj = DJ.objects.get(user_id=request.user.id)
	except ObjectDoesNotExist:
		dj = DJ()
		dj.user_id = request.user.id
	
	try:
		oauths = OAuth2Access.objects.filter(user_id=request.user.id, token_type = TOKEN_ACCESS).all()
		services = {}
		for oauth in oauths:
			services[oauth.service] = True
			fb_profile = urllib.urlopen('https://graph.facebook.com/me?access_token=%s' % oauth.token)
			facebook_profile = json.load(fb_profile)
	except ObjectDoesNotExist:
		services = {}
	
	
	if request.method == 'POST':
		form = EditDJForm(request.POST, request.FILES, instance = dj)
		form.save()
				
		return HttpResponseRedirect('/shows/')
	
	upload_url, upload_data = prepare_upload(request, '/dj/me')
	
	if dj.picture:
		blob_info = dj.picture.file.blobstore_info
		data = blobstore.fetch_data(blob_info.key(), 0, 50000) 
		image = images.Image(image_data=data)
	else:
		image = None
	
	try:
		connections = FacebookConnection.objects.filter(user_id=request.user.id).all()
	except ObjectDoesNotExist, e:
		connections = None
Example #37
0
 def post(self):
     upload_files = self.get_uploads('file')  # 'file' is file upload field in the form
     blob_info = upload_files[0]
     path = self.request.get("path")
     resource = resources.ResourceHandler.create_or_update_resource("Image", path, self.request)
     if not resource:
         self.error(412) # Precondition Failed
         self.response.out.write("parent folder not found for path '%s'" % path)
         return
         
     resource.blob = blob_info
     # fetch enough data to figure out the size (do we need the size for anything?)
     image_data = blobstore.fetch_data(blob_info, 0, blobstore.MAX_BLOB_FETCH_SIZE - 1)
     image = images.Image(image_data=image_data)
     resource.width = image.width
     resource.height = image.height
     resource.title = os.path.basename(path)
     resource.put()
     self.redirect('/admin/blob')
Example #38
0
def view(request, id):
	show = Show.objects.get(id__exact=id)
	
	blob_info = show.dj.picture.file.blobstore_info
	data = blobstore.fetch_data(blob_info.key(), 0, 50000) 
	image = images.Image(image_data=data)
	
	hosturl = 'http://' + request.get_host()
	flashvars = "lang=en&codec=mp3&volume=100&tracking=false&jsevents=false&autoplay=true&" + \
			"buffering=5&title=" + show.title
	flashplayer = hosturl + "/media/ffmp3-tiny.swf?url=" + show.url + '&' + flashvars
	sflashplayer = 'https://' + request.get_host() + "/media/ffmp3-tiny.swf?url=" + show.url + '&' + flashvars
	
	return direct_to_template(request, 'deejaypages/show.html', 
				{'show': show, 'flashvars' : flashvars, 'hosturl' : hosturl,
					'flashplayer' : flashplayer,
					'sflashplayer' : sflashplayer,
					'logout': '/logout' if request.user.is_authenticated() else '', 
					'nickname' : request.user.first_name if request.user.is_authenticated() else None,
					'user': request.user, 'image' : image, 
					'loggedin' : True if request.user.is_authenticated() else False})
Example #39
0
	def checkResize(self, uploadedImg, bKey, thumb):
		#image dimension limits
		limitW = 256 if thumb else 1024
		limitH = 192 if thumb else 768
	
		#can't view dimensions of blobstore items so have to load part of the file to check
		data = blobstore.fetch_data(bKey, 0, 50000) 
		imgCheck = images.Image(data)
		if not(imgCheck.width > limitW or imgCheck.height > limitH):
			return
		
		img = images.Image(blob_key = str(bKey))
		if (imgCheck.width/limitW) > (imgCheck.height/limitH):
			newWidth = limitW
			newHeight = imgCheck.height * (limitW/imgCheck.width)
		else:
			newWidth = imgCheck.width * (limitH/imgCheck.height)
			newHeight = limitH
		
		img.resize(newWidth, newHeight)
		uploadedImg.shrunk = img.execute_transforms(output_encoding=images.JPEG)
Example #40
0
	def get(self):
		template_values = {}
		email = self.request.get('email')
		user = User.get_by_email(email)
		if user is not None:
			template_values['user'] = user
			if user.picture is None:
				upload_url = blobstore.create_upload_url('/upload')
				template_values['upload_url'] = upload_url
			else:
				blob_info = blobstore.BlobInfo.get(user.picture)
				try:
					blob_part = blobstore.fetch_data(blob_info, 0, 10)
				except Exception as e:
					try:
						logging.error('Couldn\'t fetch picture %s for user %s. Removing the picture for the user.' % (blob_info.filename, user.email))
						blob_info.delete()
					except AttributeError, e:
						logging.error('The picture for user %s couldn\'t be found. Removing it from the user properties.' % user.email)
					user.picture = None
					user.put()
Example #41
0
  def compute_blob_ref(self, hash_func, blob_key):
    """Computes the blob ref for a blob stored using the given hash function.

    Args:
      hash_func: The name of the hash function (sha1, md5)
      blob_key: The BlobKey of the App Engine blob containing the blob's data.

    Returns:
      A newly computed blob_ref for the data.
    """
    hasher = hashlib.new(hash_func)
    last_index = 0
    while True:
      data = blobstore.fetch_data(
          blob_key, last_index, last_index + blobstore.MAX_BLOB_FETCH_SIZE - 1)
      if not data:
        break
      hasher.update(data)
      last_index += len(data)

    return '%s-%s' % (hash_func, hasher.hexdigest())
Example #42
0
  def post(self):
    # save the file key and blobinfokey to Datastore
    mykey = self.request.get("filekey")
    filekey = FileKey(key_name =mykey, parent=filelist_key())
    self.response.out.write("File key:")
    self.response.out.write(filekey.key().id_or_name())
         

    upload_files = self.get_uploads('file')  # 'file' is file upload field in the form
    blob_info = upload_files[0]
    filekey.blobinfokey = str(blob_info.key())
    self.response.out.write("</br>Blob info key:")
    self.response.out.write(blob_info.key())
    self.response.out.write("</br>Blob info size:")
    self.response.out.write(blob_info.size)
    
    if blob_info.size <= BIGFILEBASE and ENABLEMEMCACHE:
      # small file, put to memcache
      memcache.add(mykey, blob_info)
      filekey.filelocation = "memcache"
      self.response.out.write("</br> File saved to memcache")
    else:
      self.response.out.write("</br> File saved to Google Cloud Storage.")
      # use filekey key name as the obj name in bucket
      write_path = files.gs.create(BUCKET_PATH+"/"+filekey.key().id_or_name(), mime_type='text/plain',
                                     acl='public-read')
      # Write to the file.
      with files.open(write_path, 'a') as fp:
        rstart = 0
        fsize = blob_info.size
        fetchsize = blobstore.MAX_BLOB_FETCH_SIZE - 1
        while rstart < fsize:
          fp.write( blobstore.fetch_data(blob_info, rstart, rstart+fetchsize))
          rstart = rstart + fetchsize
      # Finalize the file so it is readable in Google Cloud Storage.
      files.finalize(write_path)
      filekey.filelocation = "cloudstorage"
      self.response.out.write("File saved to Google Cloud Storage.</br>")
    
    filekey.put()
Example #43
0
    def get(self):
        json = []

        media_type = self.request.get('type')

        for b in blobstore.BlobInfo.all().order('creation'):
            if media_type == 'image':
                if re_image.match(b.content_type):
                    data = blobstore.fetch_data(b.key(), 0, 50000)
                    image = images.Image(image_data=data)

                    json.append({
                        'src': str(b.key()),
                        'filename': b.filename,
                        'width': image.width,
                        'height': image.height
                    })
            else:
                if not re_image.match(b.content_type):
                    json.append({'src': str(b.key()), 'filename': b.filename})

        self.render_json(json)
Example #44
0
    def compute_blob_ref(self, hash_func, blob_key):
        """Computes the blob ref for a blob stored using the given hash function.

    Args:
      hash_func: The name of the hash function (sha1, md5)
      blob_key: The BlobKey of the App Engine blob containing the blob's data.

    Returns:
      A newly computed blob_ref for the data.
    """
        hasher = hashlib.new(hash_func)
        last_index = 0
        while True:
            data = blobstore.fetch_data(
                blob_key, last_index,
                last_index + blobstore.MAX_BLOB_FETCH_SIZE - 1)
            if not data:
                break
            hasher.update(data)
            last_index += len(data)

        return '%s-%s' % (hash_func, hasher.hexdigest())
Example #45
0
File: main.py Project: le-koj/lekoj
    def readFile(self, filename):
        self.response.write('Reading the full file contents:\n')

        # create blobstore filename
        blobstore_filename = '/gs{}'.format(filename)

        # create blob key
        blob_key = blobstore.create_gs_key(blobstore_filename)

        contents = blobstore.fetch_data(blob_key, 0, 1)
        """
        gcs_file = gcs.open(filename, 'r') ## open file in bucket
        contents = gcs_file.read()    ## read openned file
        gcs_file.close()              ## close file



        try:
            with gcs.open(filename, 'r') as f:
                self.response.out.write(f.read())
        except gcs.errors.NotFoundError:
                self.abort(404)
        """
        return contents
Example #46
0
	def upload( self, node=None, *args, **kwargs ):
		try:
			canAdd = self.canAdd("leaf", node)
		except:
			canAdd = False
		if not canAdd:
			for upload in self.getUploads():
				upload.delete()
			raise errors.Forbidden()
		try:
			res = []
			if node:
				# The file is uploaded into a rootNode
				nodeSkel = self.editNodeSkel()
				if not nodeSkel.fromDB(node):
					for upload in self.getUploads():
						upload.delete()
					raise errors.NotFound()
				else:
					weak = False
					parentDir = str(node)
					parentRepo =  nodeSkel["parentrepo"]
			else:
				weak = True
				parentDir = None
				parentRepo = None
			# Handle the actual uploads
			for upload in self.getUploads():
				fileName = self.decodeFileName(upload.filename)
				if str(upload.content_type).startswith("image/"):
					try:
						servingURL = images.get_serving_url(upload.key())
						if request.current.get().isDevServer:
							# NOTE: changed for Ticket ADMIN-37
							servingURL = urlparse(servingURL).path
						elif servingURL.startswith("http://"):
							# Rewrite Serving-URLs to https if we are live
							servingURL = servingURL.replace("http://", "https://")
					except:
						servingURL = ""
				else:
					servingURL = ""
				fileSkel = self.addLeafSkel()
				try:
					# only fetching the file header or all if the file is smaller than 1M
					data = blobstore.fetch_data(upload.key(), 0, min(upload.size, 1000000))
					image = images.Image(image_data=data)
					height = image.height
					width = image.width
				except Exception, err:
					height = width = 0
					logging.error(
						"some error occurred while trying to fetch the image header with dimensions")
					logging.exception(err)

				fileSkel.setValues(
					{
						"name": utils.escapeString(fileName),
						"size": upload.size,
						"mimetype": utils.escapeString(upload.content_type),
						"dlkey": str(upload.key()),
						"servingurl": servingURL,
						"parentdir": parentDir,
						"parentrepo": parentRepo,
						"weak": weak,
						"width": width,
						"height": height
					}
				)
				fileSkel.toDB()
				res.append(fileSkel)
				self.onItemUploaded(fileSkel)
			# Uploads stored successfully, generate response to the client
			for r in res:
				logging.info("Upload successful: %s (%s)" % (r["name"], r["dlkey"]))
			user = utils.getCurrentUser()
			if user:
				logging.info("User: %s (%s)" % (user["name"], user["key"]))
			return( self.render.addItemSuccess( res ) )
Example #47
0
 def fetch_data(self, *args, **kwargs):
     return blobstore.fetch_data(self.blob_key, *args, **kwargs)
Example #48
0
 def get(self, gs_path):
     gs_path = self.request.get('gs_path') or gs_path
     reset_cache = self.request.get('reset_cache')
     locale = self.request.get('locale')
     service_account_email = \
         '{}@appspot.gserviceaccount.com'.format(APPID)
     if not gs_path:
         detail = (
             'Usage: Share GCS objects with `{}`. Make requests to:'
             ' {}://{}/<bucket>/<path>.ext'.format(
                 service_account_email,
                 os.getenv('wsgi.url_scheme'),
                 os.getenv('HTTP_HOST')))
         self.abort(400, detail=detail)
         return
     gs_path, stat_result = self.normalize_gs_path(gs_path, locale)
     blob_key = blobstore.create_gs_key(gs_path)
     if reset_cache:
         try:
             images.delete_serving_url(blob_key)
         except images.Error as e:
             logging.error('Error deleting {} -> {}'.format(gs_path, str(e)))
     try:
         url = images.get_serving_url(blob_key, secure_url=True)
     except images.AccessDeniedError:
         detail = (
             'Ensure the following service'
             ' account has access to the object in Google Cloud Storage:'
             ' {}'.format(service_account_email))
         self.abort(400, explanation='AccessDeniedError', detail=detail)
         return
     except images.ObjectNotFoundError:
         detail = (
             'The object was not found. Ensure the following service'
             ' account has access to the object in Google Cloud Storage:'
             ' {}'.format(service_account_email))
         self.abort(400, explanation='ObjectNotFoundError', detail=detail)
         return
     except (images.TransformationError, ValueError):
         logging.exception('Debugging TransformationError.')
         detail = (
             'There was a problem transforming the image. Ensure the'
             ' following service account has access to the object in Google'
             ' Cloud Storage: {}'.format(service_account_email))
         self.abort(400, explanation='TransformationError', detail=detail)
         return
     # TODO(jeremydw): This is a WIP.
     # Should be updated based on Grow's integration.
     if self.request.get('redirect'):
         size = self.request.get('size')
         if size:
             url += '=s{}'.format(size)
         self.redirect(url)
         return
     image_metadata = {}
     try:
         data = blobstore.fetch_data(blob_key, 0, 50000)
         image = images.Image(image_data=data)
         image_metadata = {
                 'height': image.height,
                 'width': image.width,
         }
     except images.BadImageError:
         # If the header containing sizes isn't in the first 50000 bytes of the image.
         # Or, if the file uploaded was just not a real image.
         logging.exception('Failed to transform image.')
     response_content = json.dumps({
         'content_type': stat_result.content_type,
         'created': stat_result.st_ctime,
         'etag': stat_result.etag,
         'image_metadata': image_metadata,
         'metadata': stat_result.metadata,
         'size': stat_result.st_size,
         'url': url,
     })
     self.response.headers['Content-Type'] = 'application/json'
     self.response.out.write(response_content)