Esempio n. 1
0
def delete_blob(key):
    try:
        BlobInfo.get(key).delete()
    except:
        return 'failed'
    else:
        return 'succeeded'
Esempio n. 2
0
	def update_background(self, full, thumb):
		station = self.station
		
		old_full_blob_key = None
		old_thumb_blob_key = None
		
		m1 = re.match(r"/picture/([^/]+)?/view", station.full)
		m2 = re.match(r"/picture/([^/]+)?/view", station.thumb)
		if m1 and m2:
			logging.info("Background is a blob")
			old_full_blob_key = m1.group(1)
			old_thumb_blob_key = m2.group(1)
		else:
			logging.info("Background is a static file")
		
		station.full = full
		station.thumb = thumb
		
		station.put()
		logging.info("Station updated in datastore")
		
		memcache.set(self._memcache_station_id, station)
		logging.info("Station updated in memcache")
		
		# Update in runtime
		self._station = station
		
		if old_full_blob_key and old_thumb_blob_key:
			old_full = BlobInfo.get(old_full_blob_key)
			old_full.delete()
			logging.info("Old full size background removed from blobstore")
		
			old_thumb = BlobInfo.get(old_thumb_blob_key)
			old_thumb.delete()
			logging.info("Old thumbnail removed from blobstore")	
Esempio n. 3
0
def image_manager(page):
    """
    GET --> The main image manager page
    POST --> Delete requested file(s)
    :param page: The requested page
    """
    if request.method == 'POST':
        img_ref_key = request.get_json()

        # Delete the img from ndb
        for img_ref in img_ref_key['objects']:
            img_inst = ndb.Key(ImageReference, int(img_ref))
            img = img_inst.get()
            blob_key = img.blob

            # Delete img and blob
            img_inst.delete()
            BlobInfo.get(blob_key).delete()
            logging.info("Delete image: {}".format(img_ref))

        return "true"

    offset = (page-1)*IMAGES_PER_PAGE
    images = ImageReference.query().order(-ImageReference.date)
    pagination = Pagination(page, IMAGES_PER_PAGE, images.count())
    query = images.fetch(IMAGES_PER_PAGE, offset=offset)

    return render_template('image-manager/admin-manager-images.html',
                           keys=query,
                           pagination=pagination)
def start_batch(key, bid):
    try:
        reader = BlobReader(BlobKey(key))
    except:
        return 'failed to find key: please re-upload.'
    newest_date = branch_newest_date(bid)
    dic = {}
    for line in reader:
        line = line.strip()
        try:
            game_date = valid(line)
            if game_date < newest_date:
                game_date = newest_date
        except:
            continue
        if util.tomorrow(util.today()) < game_date:
            continue
        if game_date not in dic:
            dic[game_date] = []
        dic[game_date].append(','.join(line.split(',')[:8]))
    for key_date in sorted(dic.keys()):
        qs = QueueScore(
          bid=bid, game_date=key_date, body=(
            '\n'.join(reversed(dic[key_date]))                                                 
        ))
        qs.put()
    BlobInfo.get(key).delete()
    return 'upload succeeded!'
Esempio n. 5
0
 def action_edit(self):
     if self.section.handler.request.get('submit'):
         self.SITE_HEADER = self.section.handler.request.get('SITE_HEADER')
         self.SITE_SUB_HEADER = self.section.handler.request.get('SITE_SUB_HEADER')
         self.DEFAULT_THEME = self.section.handler.request.get('DEFAULT_THEME')
         self.GOOGLE_ANALYTICS_UA = self.section.handler.request.get('GOOGLE_ANALYTICS_UA')
         self.ROBOTS_TXT = self.section.handler.request.get('ROBOTS_TXT')
         if self.section.handler.request.get('FAVICON_ICO'):
             if self.FAVICON_ICO:
                 BlobInfo.get(self.FAVICON_ICO).delete()
             data = self.section.handler.request.get('FAVICON_ICO')
             handle = files.blobstore.create(mime_type='image/x-icon', _blobinfo_uploaded_filename='favicon.ico')
             with files.open(handle, 'a') as f: f.write(data)
             files.finalize(handle)
             self.FAVICON_ICO = files.blobstore.get_blob_key(handle)
         self.ENABLE_THEME_PREVIEW = self.section.handler.request.get('ENABLE_THEME_PREVIEW') != ''
         self.DEBUG_MODE = self.section.handler.request.get('DEBUG_MODE') != ''
         cache.delete(CACHE_KEY)
         self.update()
         raise Exception('Redirect', self.section.action_redirect_path)
     f = form(self.section, self.section.full_path)
     f.add_control(control(self.section, 'text', 'SITE_HEADER', self.SITE_HEADER, 'Site header', 50))
     f.add_control(control(self.section, 'text', 'SITE_SUB_HEADER', self.SITE_SUB_HEADER, 'Site sub-header', 50))
     combined_themes = get_local_theme_namespaces() + get_custom_theme_namespaces()
     f.add_control(selectcontrol(self.section, 'DEFAULT_THEME', combined_themes, self.DEFAULT_THEME if self.DEFAULT_THEME else DEFAULT_LOCAL_THEME_TEMPLATE, 'Default theme'))
     f.add_control(control(self.section, 'text', 'GOOGLE_ANALYTICS_UA', self.GOOGLE_ANALYTICS_UA, 'Google analytics UA'))
     f.add_control(control(self.section, 'file', 'FAVICON_ICO', label='favicon.ico'))
     f.add_control(textareacontrol(self.section, 'ROBOTS_TXT', self.ROBOTS_TXT, 'robots.txt', 90, 5))
     f.add_control(checkboxcontrol(self.section, 'ENABLE_THEME_PREVIEW', self.ENABLE_THEME_PREVIEW, 'Enable theme preview'))
     f.add_control(checkboxcontrol(self.section, 'DEBUG_MODE', self.DEBUG_MODE, 'Debug mode'))
     f.add_control(control(self.section, 'submit', 'submit', 'Submit'))
     return '<h2>Edit configuration</h2>%s' % unicode(f)
Esempio n. 6
0
 def delete(self, **kwargs):
     if self.imageid:
         b = BlobInfo.get(self.imageid.split('.')[0])
         if b:
             b.delete()
     if self.apkkey:
         b = BlobInfo.get(self.apkkey)
         if b:
             b.delete()
     for imguri in self.imagelist:
         b = BlobInfo.get(imguri.split('.')[0])
         if b:
             b.delete()
     super(Plugin, self).delete(**kwargs)
     pluginCount = PluginCount.get_or_insert('plugin_count')
     pluginCount.num -= 1
     pluginCount.put()
     memcache.delete('allplugincount')
     memcache.delete('appnamelist')
     memcache.delete('pluginid%s' % self.key().id())
     memcache.delete('user_applist_%s' % (self.username))
     l = []
     for i in range(0, pluginCount.num % 30):
         l.append('applist__%s' % i)
     l.append('applist__%s' % len(l))
     memcache.delete_multi(l)
Esempio n. 7
0
File: iw.py Progetto: asgpng/COS-iw
 def get(self):
     query_params = {'blob_prop':str(urllib.unquote(self.request.get('blob_key')))}
     file = object_query(Blob, query_params).get()
     blob = BlobInfo(file.blob_key)
     blob.delete()
     file.key.delete()
     time.sleep(TIME_SLEEP)
     self.redirect('/files/view_list')
Esempio n. 8
0
File: utils.py Progetto: uri247/arch
def delete_all_images():
    k = ndb.Key("Firm", "frl")
    for img in Image.query(ancestor=k).iter():
        print img.key.id()
        for bk in [img.small_blob_key, img.large_blob_key]:
            if BlobInfo.get(bk):
                BlobInfo.get(bk).delete()
        img.key.delete()
Esempio n. 9
0
 def test_remove_image_with_only_original_blob(self):
     content_type = 'image/jpeg'
     blob_key = self.make_blob(content_type, 'dummy')
     self.assertTrue(BlobInfo.get(blob_key),
         'Should be able to load BlobInfo for key.')
     image = ae_image.core.Image(blob_key, content_type)
     image.remove()
     self.assertFalse(BlobInfo.get(blob_key),
         'Should no longer be able to load BlobInfo for key.')
Esempio n. 10
0
def serve_file(request, blob_key_or_info, as_download=False, content_type=None, filename=None, offset=None, size=None):
    """
        Serves a file from the blobstore, reads most of the data from the blobinfo by default but you can override stuff
        by passing kwargs.

        You can also pass a Google Cloud Storage filename as `blob_key_or_info` to use Blobstore API to serve the file:
        https://cloud.google.com/appengine/docs/python/blobstore/#Python_Using_the_Blobstore_API_with_Google_Cloud_Storage
    """

    if isinstance(blob_key_or_info, BlobKey):
        info = BlobInfo.get(blob_key_or_info)
        blob_key = blob_key_or_info
    elif isinstance(blob_key_or_info, basestring):
        info = BlobInfo.get(BlobKey(blob_key_or_info))
        blob_key = BlobKey(blob_key_or_info)
    elif isinstance(blob_key_or_info, BlobInfo):
        info = blob_key_or_info
        blob_key = info.key()
    else:
        raise ValueError("Invalid type %s" % blob_key_or_info.__class__)

    if info == None:
        # Lack of blobstore_info means this is a Google Cloud Storage file
        if has_cloudstorage:
            info = cloudstorage.stat(blob_key_or_info)
            info.size = info.st_size
            blob_key = create_gs_key('/gs{0}'.format(blob_key_or_info))
        else:
            raise ImportError("To serve a Cloud Storage file you need to install cloudstorage")

    response = HttpResponse(content_type=content_type or info.content_type)
    response[BLOB_KEY_HEADER] = str(blob_key)
    response['Accept-Ranges'] = 'bytes'
    http_range = request.META.get('HTTP_RANGE')

    if offset or size:
        # Looks a little bonkers, but basically create the HTTP range string, we cast to int first to make sure
        # nothing funky gets into the headers
        http_range = "{}-{}".format(
            str(int(offset)) if offset else "",
            str(int(offset or 0) + size) if size else ""
        )

    if http_range is not None:
        response[BLOB_RANGE_HEADER] = http_range

    if as_download:
        response['Content-Disposition'] = smart_str(
            u'attachment; filename="%s"' % (filename or info.filename)
        )
    elif filename:
        raise ValueError("You can't specify a filename without also specifying as_download")

    if info.size is not None:
        response['Content-Length'] = info.size
    return response
Esempio n. 11
0
def upload_post_processing():
	file = request.files.data.filename
	# validate file is image format
	if mimetypes.guess_type(file)[0].split('/')[0] != 'image':
		# delete non-image file types
		BlobInfo.gql("WHERE filename = :fname", fname=file).get().delete()
		return template('upload_error.html')
	
	response.set_cookie('img', file, path='/')
	redirect('/upload_success')
Esempio n. 12
0
    def __init__(self, **kwargs):
        gs_object_name = kwargs.pop('gs_object_name', None)
        blob_key = kwargs.pop('blob_key', None)
        if gs_object_name:
            self.blobstore_info = CloudStorageInfo(gs_object_name)
        elif blob_key:
            self.blobstore_info = BlobInfo(blob_key)
        else:
            raise ValueError('A gs_object_name or blob_key is required.')

        super(AppEngineUploadedFile, self).__init__(self.blobstore_info.open(), **kwargs)
Esempio n. 13
0
 def on_remove(self):
     for i in range(len(self.theme_namespaces)):
         # This can be done more efficiently via GQL
         theme = self.get_theme(self.theme_namespaces[i])
         cache.delete(CACHE_KEY_PREPEND + self.theme_namespaces[i])
         for key in theme.image_keys:
             cache.delete(CACHE_KEY_PREPEND + str(key))
             BlobInfo.get(key).delete()
         theme.key.delete()
         del self.theme_keys[i]
         del self.theme_namespaces[i]
     self.update()
Esempio n. 14
0
File: insert.py Progetto: bfeng/dfs
    def post(self):
        key = self.request.get("key")
        value = self.request.get("value")

        filename = urllib.unquote(key)

        # Clean up current file
        query = DataFile.all().filter("f_key", filename)

        for data_file in query:
            data_file.delete()

        # Create a file
        writable_file_name = files.blobstore.create(mime_type="application/octect-stream")

        with files.open(writable_file_name, "a") as f:
            f.write(value)
        files.finalize(writable_file_name)

        blob_key = files.blobstore.get_blob_key(writable_file_name)

        data_file = DataFile(f_key=filename, f_value=blob_key)
        data_file.put()

        if memcache.get(key="turn") == "on":
            if BlobInfo.get(blob_key).size <= 100000:
                memcache.set(key=filename, value=value, time=3600)

        write_boolean(self, True)
Esempio n. 15
0
 def test_append_from_blob_info(self):
     blob_key = self.make_blob('image/jpeg', 'dummy')
     collection = ae_image.core.Collection(
         [ae_image.core.Style('big', 500)])
     collection.append_from_blob_info(BlobInfo.get(blob_key))
     self.assertTrue(
         collection.get_url('big', blob_key), 'Expect URL back.')
Esempio n. 16
0
  def GarbageCollectBlobs(self):
    keys_to_blobs = {}
    for blob in BlobInfo.all():
      keys_to_blobs[blob.key()] = blob

    for responder in Responder.all():
      image_blob = responder.image_data
      if image_blob:
        key = image_blob.key()
        if key in keys_to_blobs:
          del keys_to_blobs[key]

    for product in Product.all():
      image_blob = product.image_data
      if image_blob:
        key = image_blob.key()
        if key in keys_to_blobs:
          del keys_to_blobs[key]

    for key, blob_info in keys_to_blobs.iteritems():
      logging.info('deleting %s' % key)
      blob_info.delete()

    if keys_to_blobs:
      return 'Deleted blobs: \n%s' % '\n'.join(str(k) for k in keys_to_blobs)
    else:
      return 'No blobs to delete'
Esempio n. 17
0
 def post(self):
     upload_files = self.get_uploads('file')  
     blob_info = upload_files[0]
     # Resize the image
     image = images.Image(blob_key=blob_info.key())
     image.resize(width=WOOF_FEED_ITEM_IMAGE_MAX_WIDTH, height=WOOF_FEED_ITEM_IMAGE_MAX_HEIGHT)
     thumbnail = image.execute_transforms(output_encoding=images.JPEG)
     # Save Resized Image back to blobstore
     file_name = files.blobstore.create(mime_type='image/jpeg')
     with files.open(file_name, 'a') as f:
         f.write(thumbnail)
     files.finalize(file_name)
     # Remove the original image
     blobstore.delete(blob_info.key())
     blob_key = files.blobstore.get_blob_key(file_name)
     # New FeedImage
     feed_image = FeedImage()
     feed_image.data = BlobInfo.get(blob_key)
     feed_image.width = image.width
     feed_image.height = image.height
     feed_image.put()
     # Create new FeedItem
     feed_item = FeedItem()
     feed_item.text = self.request.get("text")
     feed_item.image = feed_image
     feed_item.put()
     self.redirect('/')
Esempio n. 18
0
 def test_remove_with_single_image(self):
     blob_key = self.make_blob('image/jpeg', 'dummy')
     self.assertTrue(BlobInfo.get(blob_key),
         'Should be able to load BlobInfo for key.')
     collection = ae_image.core.Collection(
         [ae_image.core.Style('big', 500)])
     collection.append_from_blob_info(BlobInfo.get(blob_key))
     self.assertTrue(
         collection.get_url('big', blob_key), 'Expect URL back.')
     collection.remove(blob_key)
     self.assertFalse(BlobInfo.get(blob_key),
         'Should no longer be able to load BlobInfo for key.')
     self.assertRaises(ae_image.core.UnknownImage, collection.get_url,
         'big', blob_key)
     self.assertRaises(ae_image.core.UnknownImage, collection.remove,
         blob_key)
Esempio n. 19
0
 def action_get(self):
     if not self.section.path_params or len(self.section.path_params) != 3:
         raise Exception('NotFound')
     theme = self.get_theme(self.section.path_params[0])
     resource = self.section.path_params[1]
     filename = self.section.path_params[2]
     if resource == 'css':
         filenames, contents = theme.css_filenames, theme.css_contents
         content_type = 'text/css'
     elif resource == 'js':
         filenames, contents = theme.js_filenames, theme.js_contents
         content_type = 'text/javascript'
     elif resource == 'image':
         data = None
         try:
             key = theme.image_keys[theme.image_filenames.index(filename)]
             data = cache.get(CACHE_KEY_PREPEND + str(key))
             if not data:
                 data = BlobInfo.get(key)
                 cache.set(CACHE_KEY_PREPEND + str(key), data)
         finally:
             if not data:
                 raise Exception('NotFound')
             raise Exception('SendFileBlob', data.open().read(), data.content_type)
     else:
         raise Exception('NotFound')
     try:
         index = filenames.index(filename)
     except:
         raise Exception('NotFound')
     else:
         raise Exception('SendFileBlob', str(contents[index]), content_type)
Esempio n. 20
0
	def returnAllJSON(self):
		results = []
		fils = BlobInfo.all()
		for record in fils:
			blob_key = str(record.key())
			result = {}
			result['key'] = blob_key
			result['name'] = record.filename
			result['type'] = record.content_type
			result['size'] = record.size
			result['deleteType'] = 'DELETE'
			result['deleteUrl'] = self.request.host_url +'/?key=' + urllib.quote(blob_key, '')
			if (IMAGE_TYPES.match(result['type'])):
				try:
					result['url'] = images.get_serving_url(blob_key, size=1024, crop=True, secure_url=None)
					result['thumbnailUrl'] = images.get_serving_url(blob_key, size=240, crop=True, secure_url=None)
					result['thumbnailUrl2'] = images.get_serving_url(blob_key, size=120, crop=True, secure_url=None)
				except:  # Could not get an image serving url
					pass
			if not 'url' in result:
				result['url'] = self.request.host_url +\
					'/' + blob_key + '/' + urllib.quote(
						result['name'].encode('utf-8'), '')
			results.append(result)
		#result = {'files': results}
		s = json.dumps(results, separators=(',', ':'))
		redirect = self.request.get('redirect')
		if redirect:
			return self.redirect(str(
				redirect.replace('%s', urllib.quote(s, ''), 1)
			))
		if 'application/json' in self.request.headers.get('Accept'):
			self.response.headers['Content-Type'] = 'application/json'
		self.response.write(s)
Esempio n. 21
0
class AppEngineUploadedFile(UploadedFile):
    """
    A file uploaded via App Engine's upload mechanism.
    """

    def __init__(self, **kwargs):
        gs_object_name = kwargs.pop('gs_object_name', None)
        blob_key = kwargs.pop('blob_key', None)
        if gs_object_name:
            self.blobstore_info = CloudStorageInfo(gs_object_name)
        elif blob_key:
            self.blobstore_info = BlobInfo(blob_key)
        else:
            raise ValueError('A gs_object_name or blob_key is required.')

        super(AppEngineUploadedFile, self).__init__(self.blobstore_info.open(), **kwargs)

    def open(self, mode=None):
        pass

    def chunks(self, chunk_size=1024 * 128):
        self.file.seek(0)
        while True:
            content = self.read(chunk_size)
            if not content:
                break
            yield content

    def multiple_chunks(self, chunk_size=1024 * 128):
        return True
Esempio n. 22
0
 def _get_info(self, name):
     if name.startswith('/gs/'):
         assert cloudstorage, 'cloudstorage module is not available.'
         return CloudStorageInfo(name)
     else:
         key = BlobKey(name.split('/', 1)[0])
         return BlobInfo.get(key)
Esempio n. 23
0
	def post(self):
		try:
			if not db.WRITE_CAPABILITY.is_enabled():
				raise utils.CapabilityUnavailable('Datastore unavailable')
			
			if 'user_uuid' not in self.context['request_args'] or self.context['request_args']['user_uuid'] == '':
				raise Exception('No user_uuid provided')

			if 'serving_url' not in self.context['request_args'] or self.context['request_args']['serving_url'] == '':
				raise Exception('No serving_url provided')

			if 'short_url' not in self.context['request_args'] or self.context['request_args']['short_url'] == '':
				raise Exception('No short_url provided')

			if 'blob_key_name' not in self.context['request_args'] or self.context['request_args']['blob_key_name'] == '':
				raise Exception('No blob_key_name provided')

			blob_key = BlobKey(self.context['request_args']['blob_key_name'])
			blob_info = BlobInfo.get(blob_key)
			user_image = photo_module.Photo(
				user_uuid=self.context['request_args']['user_uuid'],
				serving_url=self.context['request_args']['serving_url'],
				short_url=self.context['request_args']['short_url'],
				blob=blob_info
			)
			user_image.save()
		except modules_base.CapabilityUnavailable, cu:
			logging.exception(cu)
			self.response.set_status(503)
Esempio n. 24
0
def deleteOldBlobs():
    blobs = BlobInfo.all().fetch(500)
    for blob in blobs:
        if blob.filename.find(SMARTHISTORY_URL) != -1:
            age = datetime.now() - blob.creation
            if age.days * 86400 + age.seconds >= SMARTHISTORY_IMAGE_CACHE_EXPIRATION:
                blob.delete()
Esempio n. 25
0
    def output(self, filename, file_out):
        content = file_out.read()

        base, ext = os.path.splitext(filename)

        if ext == ".css":
            mimetype = "text/css"
        elif ext == ".js":
            mimetype = "text/javascript"
        else:
            mimetype = "application/octet-stream"

        already_exists = False

        for info in BlobInfo.all().filter('content_type = ', mimetype):
            if info.filename == filename:
                already_exists = True
                continue

            #Clear out old blobs
            if info.filename.split(".")[0] == filename.split(".")[0]:
                logging.debug("Deleting: %s", info.filename)
                info.delete()

        if not already_exists:
            logging.info("Creating: %s", filename)
            result = files.blobstore.create(mime_type=mimetype, _blobinfo_uploaded_filename=filename)
            with files.open(result, "a") as f:
                f.write(content)
            files.finalize(result)

            blob_key = files.blobstore.get_blob_key(result)
            while not blob_key:
                blob_key = files.blobstore.get_blob_key(result)
Esempio n. 26
0
def do_clean(cursor=None):

  bq = BlobInfo.all()

  if cursor:
    bq.with_cursor(cursor)

  blob = bq.get()

  if not blob:
    return

  key = str(blob.key())

  thq = Thread.all(keys_only=True)
  thq.filter("images", key)

  th = thq.get()

  if th:
    logging.info("thread: %r" % th)
  else:
    logging.info("no thread for image %r" % key)

    blob.delete()

  deferred.defer(do_clean, bq.cursor(), _countdown=30)
Esempio n. 27
0
 def action_delete(self):
     if not self.section.path_params or len(self.section.path_params) != 1:
         raise Exception('NotFound')
     theme = self.get_theme(self.section.path_params[0])
     if self.section.handler.request.get('submit'):
         for key in theme.image_keys:
             cache.delete(CACHE_KEY_PREPEND + str(key))
             BlobInfo.get(key).delete()
         self.theme_keys.remove(theme.key)
         self.theme_namespaces.remove(theme.namespace)
         theme.key.delete()
         self.update()
         cache.flush_all() # Flush all cached resources for this theme which is important for sections where it is active
         raise Exception('Redirect', self.section.action_redirect_path)
     f = form(self.section, self.section.full_path)
     f.add_control(control(self.section, 'submit', 'submit', 'Confirm'))
     return '<div class="status warning">Are you sure you wish to delete theme "%s" and all associated resources?</div>%s' % (theme.namespace, unicode(f))
Esempio n. 28
0
def presentation_download(request, key, name):
    blob = BlobInfo.get(key)
    if not blob:
        return Response(status=304)
    headers = Headers()
    headers['Content-Type'] = blob.content_type.encode('utf-8')
    headers['Last-Modified'] = blob.creation.strftime(HTTP_DATE_FMT)
    return Response(blob.open().read(), headers=headers)
Esempio n. 29
0
	def get(self, resource):
		blob_key = str(urllib.unquote(resource))
		blob_info = BlobInfo.get(blob_key)
		
		if not blob_info:
			self.error(404)
		else:
			self.send_blob(blob_info)
Esempio n. 30
0
    def post(self):
        try:
            recipeID = self.request.get('recipe_id')
            print("rID: " + recipeID)

            recipe = ndb.Key(urlsafe=recipeID)
            recipe_key = recipe.get()

            # Image Elements
            recipe_key.img = []
            deletable = []

            # Get Deletable Elements
            try:
                removedImages = self.request.get('removed_images')
                images = removedImages.split(',')
                for image in images:
                    if image != '':
                        print('Deletable: ' + image)
                        deletable.append(BlobInfo.get(image).key())

            except Exception, e:
                print e

            # Avatar Model
            try:
                avi = self.get_uploads('avatar_file')
                recipe_key.avatar = avi[0].key()
                try:
                    oldAvi = self.request.get('existing_avatar')
                    recipe_key.img.append(BlobInfo.get(oldAvi).key())
                except Exception, e:
                    print(e)

            except Exception, e:
                print('No new Avatar Image Uploaded')
                # Existing Avatar Model
                try:
                    aviKey = self.request.get_all('existing_avatar')
                    recipe_key.avatar = BlobInfo.get(aviKey[0]).key()
                except Exception, e:
                    # Pull old avatar that was deleted out of the trash
                    if recipe_key.avatar in deletable:
                        deletable.remove(recipe_key.avatar)
                    print('No Existing Image Chosen as Avatar. Keeping Same Avatar')
Esempio n. 31
0
def serve_file(request, file, save_as, content_type, **kwargs):
    if isinstance(file, BlobKey):
        blobkey = file
    elif hasattr(file, 'file') and hasattr(file.file, 'blobstore_info'):
        blobkey = file.file.blobstore_info.key()
    elif hasattr(file, 'blobstore_info'):
        blobkey = file.blobstore_info.key()
    else:
        raise ValueError("The provided file can't be served via the "
                         "Google App Engine Blobstore.")
    response = HttpResponse(content_type=content_type)
    response[BLOB_KEY_HEADER] = str(blobkey)
    response['Accept-Ranges'] = 'bytes'
    http_range = request.META.get('HTTP_RANGE')
    if http_range is not None:
        response[BLOB_RANGE_HEADER] = http_range
    if save_as:
        response['Content-Disposition'] = smart_str(
            u'attachment; filename="%s"' % save_as)

    info = BlobInfo.get(blobkey)
    if info.size is not None:
        response['Content-Length'] = info.size
    return response
Esempio n. 32
0
 def _get_blobinfo(self, name):
     return BlobInfo.get(self._get_key(name))
Esempio n. 33
0
def download_file(blob_key):
    blob = BlobInfo.get(blob_key)      
    response.headers['X-AppEngine-BlobKey'] = blob_key.decode('utf-8')
    response.headers['Content-Disposition'] = "attachment; filename=%s" % blob.filename
    response.headers['Content-Type'] = blob.content_type
    return response
Esempio n. 34
0
                try:
                    aviKey = self.request.get_all('existing_avatar')
                    recipe_key.avatar = BlobInfo.get(aviKey[0]).key()
                except Exception, e:
                    # Pull old avatar that was deleted out of the trash
                    if recipe_key.avatar in deletable:
                        deletable.remove(recipe_key.avatar)
                    print(
                        'No Existing Image Chosen as Avatar. Keeping Same Avatar'
                    )

            # Existing Images Model
            try:
                urlKeys = self.request.get_all('existing_img')
                for keye in urlKeys:
                    recipe_key.img.append(BlobInfo.get(keye).key())
            except Exception, e:
                print e

            # New Images
            try:
                sm_images = self.get_uploads('sm_image')
                for image in sm_images:
                    recipe_key.img.append(image.key())
            except Exception, e:
                print e

            # Process Deletable
            try:
                for delete in deletable:
                    BlobInfo.get(delete).delete()
Esempio n. 35
0
 def getphotogallery(self, dish_id):
     dish = Dish.get_by_id(int(dish_id))
     docinfo = BlobInfo.get(dish.photogallery)
     return docinfo
Esempio n. 36
0
def get_blobstore_uploaded_file(blobstore_key):
    """ Convenience method for returning a BlobstoreUploadedFile object for a given blobstore_key
    """
    return BlobstoreUploadedFile(BlobInfo(blobstore_key), charset="utf-8")
Esempio n. 37
0
 def getaudio(self, dish_id):
     dish = Dish.get_by_id(int(dish_id))
     docinfo = BlobInfo.get(dish.audio)
     return docinfo
Esempio n. 38
0
 def on_remove(self):
     for key in self.file_keys:
         cache.delete(CACHE_KEY_PREPEND + str(key))
         BlobInfo.get(key).delete()
     self.update()
 def file_info(self, name):
     key = BlobKey(name.split('/', 1)[0])
     return BlobInfo(key)
Esempio n. 40
0
 def titi(value):
     return BlobInfo.get(str(value)).key()  #blobstore.get(str(value))
Esempio n. 41
0
 def delete_blob(self):
     if self.blob_key:
         blob_info = BlobInfo.get(self.blob_key)
         if blob_info:
             blob_info.delete()
Esempio n. 42
0
def serve_file(request,
               blob_key_or_info,
               as_download=False,
               content_type=None,
               filename=None,
               offset=None,
               size=None):
    """
        Serves a file from the blobstore, reads most of the data from the blobinfo by default but you can override stuff
        by passing kwargs.

        You can also pass a Google Cloud Storage filename as `blob_key_or_info` to use Blobstore API to serve the file:
        https://cloud.google.com/appengine/docs/python/blobstore/#Python_Using_the_Blobstore_API_with_Google_Cloud_Storage
    """

    if isinstance(blob_key_or_info, BlobKey):
        info = BlobInfo.get(blob_key_or_info)
        blob_key = blob_key_or_info
    elif isinstance(blob_key_or_info, basestring):
        info = BlobInfo.get(BlobKey(blob_key_or_info))
        blob_key = BlobKey(blob_key_or_info)
    elif isinstance(blob_key_or_info, BlobInfo):
        info = blob_key_or_info
        blob_key = info.key()
    else:
        raise ValueError("Invalid type %s" % blob_key_or_info.__class__)

    if info == None:
        # Lack of blobstore_info means this is a Google Cloud Storage file
        if has_cloudstorage:
            cached_value = _get_from_cache(blob_key_or_info)
            if cached_value:
                blob_key, info = cached_value
            else:
                info = cloudstorage.stat(blob_key_or_info)
                info.size = info.st_size
                blob_key = create_gs_key('/gs{0}'.format(blob_key_or_info))
                _add_to_cache(blob_key_or_info, blob_key, info)
        else:
            raise ImportError(
                "To serve a Cloud Storage file you need to install cloudstorage"
            )

    response = HttpResponse(content_type=content_type or info.content_type)
    response[BLOB_KEY_HEADER] = str(blob_key)
    response['Accept-Ranges'] = 'bytes'
    http_range = request.META.get('HTTP_RANGE')

    if offset or size:
        # Looks a little bonkers, but basically create the HTTP range string, we cast to int first to make sure
        # nothing funky gets into the headers
        http_range = "{}-{}".format(
            str(int(offset)) if offset else "",
            str(int(offset or 0) + size) if size else "")

    if http_range is not None:
        response[BLOB_RANGE_HEADER] = http_range

    if as_download:
        response['Content-Disposition'] = smart_str(
            u'attachment; filename="%s"' % (filename or info.filename))
    elif filename:
        raise ValueError(
            "You can't specify a filename without also specifying as_download")

    if info.size is not None:
        response['Content-Length'] = info.size
    return response
Esempio n. 43
0
 def file_up_to_date(self, filename):
     result = bool(BlobInfo.all().filter('filename =', filename).count())
     return result
Esempio n. 44
0
def post_get_blob_key(input_dict, entity_instance, bulkload_state):
    entity_instance['file'] = BlobInfo.get(str(entity_instance['file'])).key()
    return entity_instance
Esempio n. 45
0
 def getvideo(self, dish_id):
     dish = Dish.get_by_id(int(dish_id))
     docinfo = BlobInfo.get(dish.video)
     return docinfo
Esempio n. 46
0
def pull_from_dropbox():
    client = get_dropbox_client()
    if client is None:
        return
    path_prefix = get_config('dropbox_path')
    cursor = get_config('dropbox_delta_cursor')
    last_sync = get_config('dropbox_last_sync') or datetime.datetime(
        2000, 1, 1)
    first = cursor is None
    if first:
        set_config('dropbox_sync_progress', (0, 1))
    entries = []
    while 1:
        result = client.delta(cursor, path_prefix=path_prefix.rstrip('/'))
        entries.extend(
            (path, metadata) for path, metadata in result['entries'])
        cursor = result['cursor']
        set_config('dropbox_delta_cursor', cursor)
        if not result['has_more']:
            break
    for i, (path, metadata) in enumerate(entries):
        repo_key = path[len(path_prefix):].split('/')
        cache_key = make_cache_key(repo_key)
        list_cache_key = make_cache_key(repo_key[:-1])
        if not repo_key or any(not part for part in repo_key):
            continue
        db_key = make_db_key(repo_key)
        if metadata:
            rev = metadata['rev']
            modified_at = parse_rfc2822(metadata['modified'])
            last_sync = max(modified_at, last_sync)
            if metadata['is_dir']:
                blob_info = None
                cache_value = 'D'
            else:
                filename = create(mime_type='text/xml')
                cache_value = None
                cache_buffer = ['F']
                dst_size = 0
                with fopen(filename, 'ab') as dst:
                    for offset in xrange(0, metadata['bytes'],
                                         INCOMING_BYTES_LIMIT):
                        src = client.get_file(path,
                                              rev=rev,
                                              start=offset,
                                              length=offset)
                        while 1:
                            chunk = src.read(10240)
                            if chunk:
                                dst_size += len(chunk)
                                dst.write(chunk)
                                if dst_size < CACHE_BYTES_LIMIT:
                                    cache_buffer.append(chunk)
                            else:
                                break
                    if dst_size < CACHE_BYTES_LIMIT:
                        cache_value = ''.join(cache_buffer)
                        del cache_buffer
                finalize(filename)
                blob_key = get_blob_key(filename)
                blob_info = BlobInfo.get(blob_key)

            def txn():
                delete(cache_key, namespace='slot')
                delete(list_cache_key, namespace='list')
                slot = Slot.get(db_key)
                if slot is None:
                    slot = Slot(depth=len(repo_key),
                                key=db_key,
                                blob=blob_info,
                                rev=rev,
                                updated_at=modified_at,
                                synced_at=modified_at)
                else:
                    if slot.blob is not None:
                        slot.blob.delete()
                    slot.blob = blob_info
                    slot.rev = rev
                    slot.updated_at = modified_at
                    slot.synced_at = modified_at
                slot.put()
                if cache_value is not None:
                    put(cache_key, cache_value, namespace='slot')
                delete(list_cache_key, namespace='list')

            run_in_transaction_options(create_transaction_options(xg=True),
                                       txn)
        else:
            slot = Slot.get(db_key)
            if slot is not None:
                slot.delete()
                delete(cache_key, namespace='slot')
        delete(list_cache_key, namespace='list')
        if first:
            set_config('dropbox_sync_progress', (i + 1, len(entries)))
    set_config('dropbox_last_sync', last_sync)
Esempio n. 47
0
#     Lesser General Public License for more details.
#
#     You should have received a copy of the GNU Lesser General Public
#     License along with this library; if not, write to the Free Software
#     Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307  USA
#
import os, glob
import cgi
import cgitb

cgitb.enable()
#from config import *

from google.appengine.ext.blobstore import BlobInfo

blobs = BlobInfo.all()
vars = {
    'checkboxes': "",
    'transplantws': "/transplantdata",
    'survivaldatasource': "/sampledata",
    'genedatasource': "/genedata",
    "jsdir": "/js",
    "loadergif": "/images/loader.gif"
}

files = []
tr = ""
lastfile = ""

namedic = {}
for blob in blobs.run():