def split_input(cls, mapper_spec): """Returns a list of shard_count input_spec_shards for input_spec. Args: mapper_spec: The mapper specification to split from. Must contain 'blob_keys' parameter with one or more blob keys. Returns: A list of BlobstoreInputReaders corresponding to the specified shards. """ params = mapper_spec.params blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. blob_keys = blob_keys.split(",") blob_sizes = {} for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) blob_sizes[blob_key] = blob_info.size shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count) shards_per_blob = shard_count // len(blob_keys) if shards_per_blob == 0: shards_per_blob = 1 chunks = [] for blob_key, blob_size in blob_sizes.items(): blob_chunk_size = blob_size // shards_per_blob for i in xrange(shards_per_blob - 1): chunks.append(BlobstoreLineInputReader.from_json( {cls.BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * i, cls.END_POSITION_PARAM: blob_chunk_size * (i + 1)})) chunks.append(BlobstoreLineInputReader.from_json( {cls.BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * (shards_per_blob - 1), cls.END_POSITION_PARAM: blob_size})) return chunks
def process_image(value, i, values): config = self._process_config new_value = value gs_object_name = new_value.gs_object_name new_gs_object_name = new_value.gs_object_name if config.get('copy'): new_value = copy.deepcopy(value) new_gs_object_name = '%s_%s' % (new_value.gs_object_name, config.get('copy_name')) blob_key = None # @note No try block is implemented here. This code is no longer forgiving. # If any of the images fail to process, everything is lost/reverted, because one or more images: # - are no longer existant in the cloudstorage / .read(); # - are not valid / not image exception; # - failed to resize / resize could not be done; # - failed to create gs key / blobstore failed for some reason; # - failed to create get_serving_url / serving url service failed for some reason; # - failed to write to cloudstorage / cloudstorage failed for some reason. readonly_blob = cloudstorage.open(gs_object_name[3:], 'r') blob = readonly_blob.read() readonly_blob.close() image = images.Image(image_data=blob) if config.get('transform'): image.resize(config.get('width'), config.get('height'), crop_to_fit=config.get('crop_to_fit', False), crop_offset_x=config.get('crop_offset_x', 0.0), crop_offset_y=config.get('crop_offset_y', 0.0)) blob = yield image.execute_transforms_async(output_encoding=image.format) new_value.proportion = float(image.width) / float(image.height) new_value.size = len(blob) writable_blob = cloudstorage.open(new_gs_object_name[3:], 'w', content_type=new_value.content_type) writable_blob.write(blob) writable_blob.close() if gs_object_name != new_gs_object_name: new_value.gs_object_name = new_gs_object_name blob_key = yield blobstore.create_gs_key_async(new_gs_object_name) new_value.image = blobstore.BlobKey(blob_key) new_value.serving_url = None values[i] = new_value raise orm.Return(True)
def edit_record(request): rid = nid(request.matchdict['rid']) record = m.Record.get_by_id(rid) bid = record.bhajan_key.id() form = deform.Form(f.Record(), action=blobstore.create_upload_url( request.route_path('admin.edit_record', rid=rid)), buttons=(u'сохранить', )) gae_store = f.GaeUploadTempStore() if request.method == 'POST': post_data = request.POST.items() logger.debug('post_data: %s', post_data) keys = [ '_charset_', '__formid__', 'artist', 'bhajan', '__start__', 'upload', 'uid', '__end__' ] post_data = [(k, request.POST[k]) for k in keys if k in request.POST] logger.debug('post_data: %s', post_data) try: data = form.validate(post_data) except deform.ValidationFailure as e: return dict(record=record, form=e.render()) record.populate(artist=data['artist'], audio_key=blobstore.BlobKey(data['audio']['uid']), bhajan_key=ndb.Key(m.Bhajan, nid(data['bhajan']))) record.put() return HTTPFound(request.route_path('admin.records')) return dict(record=record, form=form.render( dict(artist=record.artist, audio=gae_store[record.audio_key], bhajan=bid)))
def finish_save(): blob_key = get_blob_key("fileup") if blob_key is None: return "error" blob_key = blobstore.BlobKey(blob_key) blob_reader = blobstore.BlobReader(blob_key) params = blob_reader.readline() logging.debug("read params:") plotid = request.values['plotid'] obj = find_object(plotid) obj.saved = True obj.d3params = params try: obj.put() except: return "error" logging.debug("saved object: %d" % obj.key().id()) return 'hs_%d' % obj.key().id()
def applyAttributes(self, obj, attrs, **kwargs): """ Applies C{attrs} to C{obj}. Since C{blobstore.BlobInfo} objects are read-only entities, we only care about the C{key} attribute. """ assert type(obj) is BlobInfoStub key = attrs.pop('key', None) if not key: raise pyamf.DecodeError("Unable to build blobstore.BlobInfo " "instance. Missing 'key' attribute.") try: key = blobstore.BlobKey(key) except: raise pyamf.DecodeError( "Unable to build a valid blobstore.BlobKey " "instance. Key supplied was %r" % (key, )) obj.__class__ = blobstore.BlobInfo obj.__init__(key)
def post(self): file_info = blobstore.BlobInfo( blobstore.BlobKey(self.request.get('key_str'))) reader = blobstore.BlobReader(file_info) next(reader) csv_file_content = csv.reader(reader, delimiter=',', quotechar='"') for row in csv_file_content: number = row[0] units = row[1] description = row[2] labels = row[3] source = row[4] year = row[5] month = row[6] day = row[7] if check_duplicate_numbers(number, units, description, source, year, month, day): logging.info("duplicate number %s %s %s" % (number, units, description)) return add_to_number_index(get_author(), None, float(number), units, description, labels, source, int(year), int(month), int(day))
def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise input_readers.BadReaderParamsError( "__RobotsLineInputReader:Mapper input reader class mismatch") params = input_readers._get_params(mapper_spec) if cls.BLOB_KEYS_PARAM not in params: raise input_readers.BadReaderParamsError( "_RobotsLineInputReader:Must specify 'blob_keys' for mapper input" ) file_names = params[cls.BLOB_KEYS_PARAM] if isinstance(file_names, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. file_names = file_names.split(",") if len(file_names) > cls._MAX_BLOB_KEYS_COUNT: raise input_readers.BadReaderParamsError( "_RobotsLineInputReader:Too many 'blob_keys' for mapper input") if not file_names: raise input_readers.BadReaderParamsError( "_RobotsLineInputReader:No 'blob_keys' specified for mapper input" ) for file_name in file_names: blob_key = files.blobstore.get_blob_key(file_name) blob_key_str = str(blob_key) blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key_str)) if not blob_info: raise input_readers.BadReaderParamsError( "_RobotsLineInputReader:Could not find blobinfo for key %s" % blob_key_str)
def post(self): logging.debug('POST') self.response.headers['Content-Type'] = 'text/html' buttonVal = self.request.get('button') if buttonVal == 'Add': self.add() self.redirect('/') elif buttonVal == 'Delete': self.delete() self.redirect('/') elif buttonVal == 'Up': currentUser = functions.getCurrentUser() if not functions.getDirectoryList().parentDirectory is None: parentDirectoryId = functions.getCurrentUser( ).currentDirectory.get().parentDirectory currentUser.currentDirectory = parentDirectoryId currentUser.put() self.redirect('/') elif buttonVal == 'info': fileName = self.request.get('fileName') key = functions.getFileList(fileName) stat = blobstore.BlobInfo.get(blobstore.BlobKey(str(key.blob))) startViewer.showFileInformation(self, fileName, stat.size / 1000, stat.content_type, stat.creation) elif buttonVal == 'Home': currentUser = functions.getCurrentUser() currentUser.currentDirectory = ndb.Key(Directory, currentUser.key.id() + '/') currentUser.put() self.redirect('/')
def async(entity): gs_object_name = entity.gs_object_name try: gs_object_name = entity.parse_duplicate_appendix(gs_object_name) except IndexError: pass new_gs_object_name = '%s_duplicate_%s' % (gs_object_name, entity.duplicate_appendix) readonly_blob = cloudstorage.open(gs_object_name[3:], 'r') writable_blob = cloudstorage.open(new_gs_object_name[3:], 'w', content_type=entity.content_type) # Less consuming memory write, can be only used when using brute force copy. # There is no copy feature in cloudstorage sdk, so we have to implement our own! while True: blob_segment = readonly_blob.read(1000000) # Read 1mb per write, that should be enough. if not blob_segment: break writable_blob.write(blob_segment) readonly_blob.close() writable_blob.close() entity.gs_object_name = new_gs_object_name blob_key = yield blobstore.create_gs_key_async(new_gs_object_name) entity.image = blobstore.BlobKey(blob_key) entity.serving_url = yield images.get_serving_url_async(entity.image) self._property.save_blobs_on_success(entity.image) raise orm.Return(entity)
def post(self): try: gurl = self.request.get('imageURL') # print("urlColor: " +self.request.get('color')) gcolor = float(self.request.get('color')) gbrightness = float(self.request.get('brightness')) gcontrast = float(self.request.get('contrast')) gsharpness = float(self.request.get('sharpness')) grotate = int(self.request.get('rotate')) gImFLSelected = bool(self.request.get('lucky')) c = urlfetch.fetch(gurl, deadline=10).content im = Image.open(StringIO(c)) mimeType = im.format try: if gImFLSelected: img = images.Image(c) img.im_feeling_lucky() data = img.execute_transforms(output_encoding=images.JPEG) else: enh = ImageEnhance.Color(im) # 0 - 2 to be considered out = enh.enhance(gcolor) enh = ImageEnhance.Brightness( out ) # 0 - black image, 1 - original image; Can give more than 1.0 out = enh.enhance(gbrightness) enh = ImageEnhance.Contrast( out) # 0 - solid grey image, 1 - original image out = enh.enhance(gcontrast) enh = ImageEnhance.Sharpness( out ) # 0 - blurred image, 1 - original image, 2 - sharpened image out = enh.enhance(gsharpness) out = out.rotate(grotate, resample=Image.BICUBIC, expand=True) buf = cStringIO.StringIO() out.save(buf, mimeType) data = buf.getvalue() bucket_name = 'dem-ode' bucket = '/' + bucket_name filename = bucket + '/' + urllib.quote(u"{0}".format( time.time()).encode('utf8')) with gcs.open(filename, 'w') as f: f.write(data) blobstore_filename = "/gs" + filename # this is needed if you want to continue using blob_keys. ieurl = images.get_serving_url( blobstore.BlobKey( blobstore.create_gs_key(str(blobstore_filename)))) # self.response.out.write( '<img width="100%" height="100%" src="' + ieurl + '"/>') self.response.out.write( '<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="' + ieurl + '" download>' + '<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>' ) except: self.response.headers[b'Content-Type'] = b'text/plain' self.response.out.write( 'Image size is too large. Can\'t handle') except: self.response.headers[b'Content-Type'] = b'text/plain' self.response.out.write('Image size is too large. Can\'t handle')
def get(self, photo_key): if not blobstore.get(photo_key): self.error(404) else: try: blob_info = blobstore.BlobInfo.get(photo_key) im = Image.open(blob_info.open()) iR = ndb.Key(ImageData, '123').get() mimeType = im.format try: if iR.gImFLSelected: img = images.Image(blob_key=iR.imageKey) img.im_feeling_lucky() data = img.execute_transforms( output_encoding=images.JPEG) else: enh = ImageEnhance.Color(im) # 0 - 2 to be considered out = enh.enhance(iR.gColor) enh = ImageEnhance.Brightness( out ) # 0 - black image, 1 - original image; Can give more than 1.0 out = enh.enhance(iR.gBrightness) enh = ImageEnhance.Contrast( out) # 0 - solid grey image, 1 - original image out = enh.enhance(iR.gContrast) enh = ImageEnhance.Sharpness( out ) # 0 - blurred image, 1 - original image, 2 - sharpened image out = enh.enhance(iR.gSharpness) out = out.rotate(iR.gRotate, resample=Image.BICUBIC, expand=True) buf = cStringIO.StringIO() out.save(buf, mimeType) data = buf.getvalue() bucket_name = 'dem-ode' bucket = '/' + bucket_name filename = bucket + '/' + urllib.quote(u"{0}".format( time.time()).encode('utf8')) with gcs.open(filename, 'w') as f: f.write(data) blobstore_filename = "/gs" + filename #this is needed if you want to continue using blob_keys. ieurl = images.get_serving_url( blobstore.BlobKey( blobstore.create_gs_key(str(blobstore_filename)))) self.response.out.write( '<img width="100%" height="100%" src="' + ieurl + '"/>') self.response.out.write( '<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="' + ieurl + '" download>' + '<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>' ) except: self.response.headers[b'Content-Type'] = b'text/plain' self.response.out.write( 'Image size is too large. Can\'t handle') except: self.response.headers[b'Content-Type'] = b'text/plain' self.response.out.write( 'Image size is too large. Can\'t handle')
def update_api_data(data_id=None, items=None, user=None, content_type=None): data = APIData.get_by_id(normalize_id(data_id)) if not data: return if content_type == "application/json": tags = [] try: for key, value in items.items(): if key.startswith('unindexed_'): # unindexed_ ad_key = key.replace("unindexed_", "") data.additional_data[ad_key] = value.strip() if key.startswith('indexed_'): ad_key = key.replace("indexed_", "") data.additional_data[ad_key] = value for d in data.indexed_data: ad_key = key.replace("indexed_", "") if d.startswith(ad_key.upper()): try: data.indexed_data.remove(d) except Exception as e: logging.exception(e) logging.info("Cannot remove from list") data.indexed_data.append( create_indexed_tag( key, value)) try: tags += create_tags(value) except Exception as e: logging.exception("Cannot create tag from: ") if user: data.username = user.name data.user = user.key logging.info(tags) data.indexed_data = uniquify(data.indexed_data) data.tags = uniquify(tags) data.put() except Exception as e: logging.exception(e) else: tags = [] try: for arg in self.request.arguments(): for d in data.indexed_data: ad_key = arg.replace("indexed_", "") if d.startswith(ad_key.upper()): try: data.indexed_data.remove(d) except Exception as e: logging.exception(e) if arg.startswith('unindexed_'): # unindexed_ ad_key = arg.replace("unindexed_", "") ad_value = self.request.POST.get(arg) data.additional_data[ad_key] = ad_value.strip() if arg.startswith('indexed_'): ad_key = arg.replace("indexed_", "") ad_value = self.request.POST.get(arg) data.additional_data[ad_key] = ad_value try: tags += create_tags(ad_value) except Exception as e: logging.exception("Cannot create tag from: ") data.indexed_data.append( create_indexed_tag( arg, self.request.POST.get(arg))) if arg.startswith('file_'): filename = BUCKET_NAME filename += random_string(20) + "/" ad_key = arg.replace("file_", "") data.additional_data[ad_key] = {} try: file_name = items.get(arg).filename filename += file_name gcs_options = {'x-goog-acl': 'public-read'} gcs_file = gcs.open(filename, 'w', options=gcs_options) gcs_file.write(self.request.get(arg)) gcs_file.close() full_url = "https://storage.googleapis.com" \ + filename data.file_url = full_url data.additional_data[ad_key]["file_url"] = full_url try: blob_key = blobstore.create_gs_key("/gs" + filename) data.serving_url = images.get_serving_url(blob_key) data.additional_data[ad_key]["serving_url"] = data.serving_url data.gcs_key = blobstore.BlobKey(blob_key) except Exception as e: logging.exception(e) data.additional_data[ad_key]["serving_url"] = full_url except AttributeError, e: logging.exception(e) if self.user: data.username = self.user.name data.user = self.user.key data.indexed_data = uniquify(data.indexed_data) data.tags = uniquify(tags) data.put() except Exception as e: logging.exception(e)
def timeline_delete(self, card): """Remove an existing card for the current user. This will set all properties except the ID to None and set isDeleted to true """ if not card.from_datastore or card.user != endpoints.get_current_user(): raise endpoints.NotFoundException("Contact not found.") if card.isDeleted: raise endpoints.NotFoundException("Card has been deleted") # Delete attachments keys = [] if card.attachments is not None: for att in card.attachments: keys.append(blobstore.BlobKey(att.id)) blobstore.delete_async(keys) card.attachments = [] card.bundleId = None card.canonicalUrl = None card.created = None card.creator = None card.displayTime = None card.html = None card.inReplyTo = None card.isBundleCover = None card.isPinned = None card.menuItems = [] card.notification = None card.recipients = [] card.sourceItemId = None card.speakableType = None card.speakableText = None card.text = None card.title = None card.updated = None card.isDeleted = True card.put() # Notify Glass emulator channel.send_message(card.user.email(), json.dumps({"id": card.id})) # Notify timeline DELETE subscriptions data = {} data["collection"] = "timeline" data["itemId"] = card.id operation = Operation.DELETE data["operation"] = operation.name header = {"Content-type": "application/json"} query = Subscription.query().filter(Subscription.user == endpoints.get_current_user()) query = query.filter(Subscription.collection == "timeline") query = query.filter(Subscription.operation == operation) for subscription in query.fetch(): data["userToken"] = subscription.userToken data["verifyToken"] = subscription.verifyToken req = urllib2.Request(subscription.callbackUrl, json.dumps(data), header) try: urllib2.urlopen(req) except: logging.error(sys.exc_info()[0]) return card
def setUp(self): BaseTestCase.setUp(self) self.key = blobstore.BlobKey('foobar') self.info = blobstore.BlobInfo(self.key, self.values)
def _getBlob(cls, name): return blobstore.BlobInfo.get(blobstore.BlobKey(name))
def get(self, streamKey, imageKey, viewCount): streamKeyUrl = str(urllib.unquote(streamKey)) streamKey = ndb.Key(urlsafe=streamKeyUrl) imageKeyStr = str(urllib.unquote(imageKey)) imageKey = blobstore.BlobKey(imageKeyStr) stream = streamKey.get() images = stream.photos viewCount = str(urllib.unquote(viewCount)) self.response.write("""<!DOCTYPE HTML> <html lang="en"> <head> <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css"> <link rel="stylesheet" href="http://aptconnexus11.appspot.com/file/css/style.css"> <link rel="stylesheet" href="//blueimp.github.io/Gallery/css/blueimp-gallery.min.css"> <link rel="stylesheet" href="http://aptconnexus11.appspot.com/file/css/jquery.fileupload.css"> <link rel="stylesheet" href="http://aptconnexus11.appspot.com/file/css/jquery.fileupload-ui.css"> <noscript><link rel="stylesheet" href="http://aptconnexus11.appspot.com/file/css/jquery.fileupload-noscript.css"></noscript> <noscript><link rel="stylesheet" href="http://aptconnexus11.appspot.com/file/css/jquery.fileupload-ui-noscript.css"></noscript> </head> <body> <h1> Connex.us </h1> <p> <a href = "/"> Manage </a> <a href = "/create"> | Create </a> <a href = "/viewAll"> | View </a> <a href = "/search"> | Search </a> <a href = "/trending"> | Trending </a> <a href = "/social"> | Social </a> </p>""") # present three pictures and the button "more pics" if len(images) == 0: self.response.write("<h3> No pictures in this stream yet!</h3>") else: self.response.write("<p>") index = images.index(imageKey) if (index + 1) <= 3: while index >= 0: blobkey = images[index] params = {'blobKey': str(blobkey)} url = 'http://aptconnexus11.appspot.com/getUrl' imageUrl = jsonfyFetch(params, url)['url'] self.response.write('<img src = "%s" style="width:304px;height:228px">'%(imageUrl)) index -= 1 self.response.write('No more pics!') self.response.write("</p>") else: for i in range(0, 3): blobkey = images[index - i] params = {'blobKey': str(blobkey)} url = 'http://aptconnexus11.appspot.com/getUrl' imageUrl = jsonfyFetch(params, url)['url'] self.response.write('<img src = "%s" style="width:304px;height:228px">'%(imageUrl)) nextImageKey = str(images[index - 3]) self.response.write("""<a href = "/viewSingle/%s/%s/0"> <form action=""> <input type="button" value="More pictures"> </form></a></p>"""%(streamKeyUrl, nextImageKey)) # # subscribe self.response.write("""<p><a href = "/subscribe/%s/%s"> <form action=""> <input type="button" value="Subscribe"> </form></a></p> <p><a href="/geoView/%s"><form action=""> <input type="button" value="Geo view"> </form></a></p>"""%(streamKeyUrl, imageKeyStr,streamKeyUrl)) self.response.write(""" <a href = "/reload/%s"> <form action=""> <input type="button" value="Reload this page!"> </form></a></p>"""%(streamKeyUrl)) url = '/imgUploadHandler/%s'%streamKeyUrl self.response.write(""" <div class="container"> <form id="fileupload" action="%s" method="POST" enctype="multipart/form-data"> """%url) self.response.write(""" <noscript><input type="hidden" name="redirect" value="https://blueimp.github.io/jQuery-File-Upload/"></noscript> <div class="row fileupload-buttonbar"> <div class="col-lg-7"> <span class="btn btn-success fileinput-button"> <i class="glyphicon glyphicon-plus"></i> <span>Add files...</span> <input type="file" name="files[]" multiple> </span> <button type="submit" class="btn btn-primary start"> <i class="glyphicon glyphicon-upload"></i> <span>Start upload</span> </button> <button type="reset" class="btn btn-warning cancel"> <i class="glyphicon glyphicon-ban-circle"></i> <span>Cancel upload</span> </button> <button type="button" class="btn btn-danger delete"> <i class="glyphicon glyphicon-trash"></i> <span>Delete</span> </button> <input type="checkbox" class="toggle"> <span class="fileupload-process"></span> </div> <div class="col-lg-5 fileupload-progress fade"> <div class="progress progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100"> <div class="progress-bar progress-bar-success" style="width:0%;"></div> </div> <div class="progress-extended"> </div> </div> </div> <table role="presentation" class="table table-striped"><tbody class="files"></tbody></table> </form> <br> </div> <div id="blueimp-gallery" class="blueimp-gallery blueimp-gallery-controls" data-filter=":even"> <div class="slides"></div> <h3 class="title"></h3> <a class="prev">‹</a> <a class="next">›</a> <a class="close">×</a> <a class="play-pause"></a> <ol class="indicator"></ol> </div> <script id="template-upload" type="text/x-tmpl"> {% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-upload fade"> <td> <span class="preview"></span> </td> <td> <p class="name">{%=file.name%}</p> <strong class="error text-danger"></strong> </td> <td> <p class="size">Processing...</p> <div class="progress progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100" aria-valuenow="0"><div class="progress-bar progress-bar-success" style="width:0%;"></div></div> </td> <td> {% if (!i && !o.options.autoUpload) { %} <button class="btn btn-primary start" disabled> <i class="glyphicon glyphicon-upload"></i> <span>Start</span> </button> {% } %} {% if (!i) { %} <button class="btn btn-warning cancel"> <i class="glyphicon glyphicon-ban-circle"></i> <span>Cancel</span> </button> {% } %} </td> </tr> {% } %} </script> <script id="template-download" type="text/x-tmpl"> {% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-download fade"> <td> <span class="preview"> {% if (file.thumbnailUrl) { %} <a href="{%=file.url%}" title="{%=file.name%}" download="{%=file.name%}" data-gallery><img src="{%=file.thumbnailUrl%}"></a> {% } %} </span> </td> <td> <p class="name"> {% if (file.url) { %} <a href="{%=file.url%}" title="{%=file.name%}" download="{%=file.name%}" {%=file.thumbnailUrl?'data-gallery':''%}>{%=file.name%}</a> {% } else { %} <span>{%=file.name%}</span> {% } %} </p> {% if (file.error) { %} <div><span class="label label-danger">Error</span> {%=file.error%}</div> {% } %} </td> <td> <span class="size">{%=o.formatFileSize(file.size)%}</span> </td> <td> {% if (file.deleteUrl) { %} <button class="btn btn-danger delete" data-type="{%=file.deleteType%}" data-url="{%=file.deleteUrl%}"{% if (file.deleteWithCredentials) { %} data-xhr-fields='{"withCredentials":true}'{% } %}> <i class="glyphicon glyphicon-trash"></i> <span>Delete</span> </button> <input type="checkbox" name="delete" value="1" class="toggle"> {% } else { %} <button class="btn btn-warning cancel"> <i class="glyphicon glyphicon-ban-circle"></i> <span>Cancel</span> </button> {% } %} </td> </tr> {% } %} </script> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/vendor/jquery.ui.widget.js"></script> <script src="//blueimp.github.io/JavaScript-Templates/js/tmpl.min.js"></script> <script src="//blueimp.github.io/JavaScript-Load-Image/js/load-image.all.min.js"></script> <script src="//blueimp.github.io/JavaScript-Canvas-to-Blob/js/canvas-to-blob.min.js"></script> <script src="//netdna.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"></script> <script src="//blueimp.github.io/Gallery/js/jquery.blueimp-gallery.min.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.iframe-transport.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.fileupload.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.fileupload-process.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.fileupload-image.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.fileupload-validate.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/jquery.fileupload-ui.js"></script> <script src="http://aptconnexus11.appspot.com/file/js/main.js"></script> </body> </html>""") if viewCount == '1': stream.viewRecord.append(datetime.datetime.now()) stream.put()
def write_to_api(items=None, user=None, content_type=None, imported=False, user_request=None): logging.debug(items) logging.debug(user) logging.debug(content_type) logging.debug(imported) logging.debug(user_request) if imported: data = APIData(id=str(items['indexed_code'])) else: data = APIData() if user == '*****@*****.**': user = User.query(User.current_email == '*****@*****.**').get() data.additional_data = {} if content_type == "application/json": tags = [] logging.debug(items) try: for key, value in items.items(): if key.startswith('unindexed_'): ad_key = key.replace("unindexed_", "") data.additional_data[ad_key] = value.strip() if key.startswith('indexed_'): ad_key = key.replace('indexed_', '').replace('_array', '') if key.endswith('_array'): value_arr = json.loads(value) for v_arr in value_arr: data.indexed_data.append(create_indexed_tag(key, v_arr)) else: data.indexed_data.append(create_indexed_tag(key, value)) data.additional_data[ad_key] = value try: tags += create_tags(value) except Exception as e: logging.exception("Cannot create tag from: ") if user: data.username = user.name data.user = user.key data.indexed_data.append(create_indexed_tag("USER_ID", str(user.key.id()))) data.indexed_data = uniquify(data.indexed_data) data.tags = uniquify(tags) data.put() return data except Exception as e: logging.exception(e) else: tags = [] try: for arg in items: if arg.startswith('unindexed_'): # unindexed_ ad_key = arg.replace("unindexed_", "") ad_value = items.get(arg) data.additional_data[ad_key] = ad_value.strip() if arg.startswith('indexed_'): ad_key = arg.replace("indexed_", "") ad_value = items.get(arg) data.additional_data[ad_key] = ad_value try: tags += create_tags(ad_value) except Exception as e: logging.exception("Cannot create tag from: ") data.indexed_data.append(create_indexed_tag(arg, items.get(arg))) if arg.startswith('file_'): logging.debug(arg) filename = BUCKET_NAME filename += random_string(128) + "/" ad_key = arg.replace("file_", "") data.additional_data[ad_key] = {} try: if not user_request: if 'file_image' in items: file_field = 'file_image' if 'file_file' in items: file_field = 'file_file' if 'file_kml' in items: file_field = 'file_kml' file_name = items[file_field].filename filename += file_name gcs_options = {'x-goog-acl': 'public-read'} gcs_file = gcs.open(filename, 'w', options=gcs_options) gcs_file.write(items[file_field].file.read()) gcs_file.close() else: file_name = user_request.request.POST.get('file') file_name = file_name.filename filename += file_name gcs_options = {'x-goog-acl': 'public-read'} gcs_file = gcs.open(filename, 'w', options=gcs_options) gcs_file.write(user_request.request.get('file')) gcs_file.close() full_url = "https://storage.googleapis.com" + filename data.file_url = full_url data.additional_data[ad_key]["file_url"] = full_url try: blob_key = blobstore.create_gs_key("/gs" + filename) data.serving_url = images.get_serving_url(blob_key) data.additional_data[ad_key]["serving_url"] = data.serving_url data.gcs_key = blobstore.BlobKey(blob_key) except Exception as e: logging.exception(e) logging.error("FILE IS NOT AN IMAGE") data.additional_data[ad_key]["serving_url"] = full_url except AttributeError, e: logging.exception(e) logging.exception("NO FILE ATTACHED") if user: data.username = user.name data.user = user.key data.indexed_data.append(create_indexed_tag("USER_ID", str(user.key.id()))) data.tags = uniquify(tags) data.put() return data except Exception as e: logging.exception('ERROR') logging.debug(e)
def get_blob_key(key): return blobstore.BlobKey(key)
def post(self): file_info = blobstore.BlobInfo( blobstore.BlobKey(self.request.get('key_str'))) reader = blobstore.BlobReader(file_info) tree = ET.parse(reader) root = tree.getroot() author = get_author() for child in root: list_of_number_ids = u'' description = child.attrib['description'] labels = child.attrib['labels'] series_type = child.attrib['series_type'] units = child.attrib['unit'] source = child.attrib['source'] data_fields = [ search.TextField(name='author', value=author), search.TextField(name='description', value=description), search.TextField(name='units', value=units), search.TextField(name='labels', value=labels), search.TextField(name='series_type', value=series_type) ] if check_duplicate_series(description): logging.info("duplicate series %s" % description) return series_id = search.Index(name=_INDEX_NAME).put( search.Document( fields=data_fields + [search.TextField(name='list_of_number_ids', value='')]) )[0].id for number in child: value = float(number.attrib['value']) year = '-1' month = '-1' day = '-1' if 'time_period' in number.attrib: time = number.attrib['time_period'] year = time if time.find('-') != -1: year = time.split('-')[0] month = time.split('-')[1] if len(time.split('-')) == 3: day = time.split('-')[2] check_duplicate = check_duplicate_numbers( value, units, description, source, year, month, day) if check_duplicate: number = check_duplicate number_id = number.doc_id add_series_id_to_number(number, series_id) else: number_id = add_to_number_index(author, None, value, units, description, labels, source, int(year), int(month), int(day), series_id) list_of_number_ids += u" " + number_id search.Index(name=_INDEX_NAME).put( search.Document(doc_id=series_id, fields=data_fields + [ search.TextField(name='list_of_number_ids', value=list_of_number_ids) ]))
def get_by_key(cls, key): return cls(blobstore.BlobInfo(blobstore.BlobKey(key)))
def post(self): blobKeyStr = json.loads(self.request.body)['blobKey'] blobKey = blobstore.BlobKey(blobKeyStr) url = images.get_serving_url(blobKey, size=None, crop=False, secure_url=None) self.response.write(json.dumps({'url': url}))
def getproperty(kind, p, key=False): if key: input_name = 'input__p__key__%s' % p else: input_name = 'input__p__%s' % p v = getattr(request.forms, input_name) if not key: property_class = kind._properties[p] else: property_class = db.StringProperty() logging.info("p = %s" % p) logging.info("v = %s" % v) logging.info("property_class = %s" % property_class) if not v: v = None else: if isinstance(property_class, db.BooleanProperty): if v.lower() in ['false', 'no']: v = False else: v = bool(v) elif isinstance(property_class, db.IntegerProperty): v = long(v) elif isinstance(property_class, db.FloatProperty): v = float(v) elif isinstance(property_class, db.DateTimeProperty): v = datetime.datetime.strptime(v, '%Y-%m-%d %H:%M:%S.%f') elif isinstance(property_class, db.LinkProperty): v = db.Link(v) elif isinstance(property_class, db.TextProperty): v = db.Text(v) elif isinstance(property_class, db.BlobProperty): v = db.Blob(v) elif isinstance(property_class, db.EmailProperty): v = db.Email(v) elif isinstance(property_class, db.GeoPtProperty): lat, lon = [float(x) for x in v.split(',', 1).strip()] v = db.GeoPt(lat, lon) elif isinstance(property_class, db.RatingProperty): v = db.Rating(int(v)) elif isinstance(property_class, db.CategoryProperty): v = db.Category(v) elif isinstance(property_class, (db.ListProperty, db.StringListProperty)): # todo assumes list of strings v = list([v.strip() for v in v.split(",")]) elif isinstance(property_class, db.ReferenceProperty): kindname = property_class.reference_class.__name__ v = db.Key(kindname, v) elif isinstance(property_class, blobstore.BlobReferenceProperty): v = blobstore.BlobKey(v) elif isinstance( property_class, (db.IMProperty, db.PhoneNumberProperty, db.PostalAddressProperty)): abort( 500, 'Unsupported property type %s for model %s' % (property_class, kind.__name__)) if key and v is None: abort( 400, 'Property %s is part of the key for model %s so is required' % (p, kind.__name__)) return v
def blob_reader(self): """ a BlobInfo like open returns a BlobReader """ return blobstore.BlobReader(blobstore.BlobKey(self.blobkey))
def setUp(self): super(BlobStoreTestCase, self).setUp() self.key = blobstore.BlobKey('foobar') self.info = blobstore.BlobInfo(self.key, self.values)
def decode_blobkey_object(self, dict): return blobstore.BlobKey(dict['__key__'])
def post(self, data_id=None): response = {} response["success"] = True logging.info(self.request.headers) content_type = self.request.headers["Content_Type"] if not self.user: if content_type == "application/json": if "Authorization" not in self.request.headers: logging.info("No Authorization in headers") desc = "You must be logged in to use the API." response["success"] = False response["response"] = "AuthorizationError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return if self.request.headers["Authorization"] == API_KEY: if not self.request.headers["From"]: logging.info("No email defined") desc = "Cannot find user." response["success"] = False response["response"] = "InvalidUserError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return user_email = self.request.headers["From"].lower() query = User.query() owner = query.filter( User.current_email == user_email).get() if not owner: logging.info("Cannot find user") desc = "Cannot find user." response["success"] = False response["response"] = "InvalidUserError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return else: token = Token.get_by_id( self.request.headers["Authorization"]) if not token: logging.info( "Cannot find token: " + str(self.request.headers["Authorization"])) desc = "The token you provided is invalid." response["success"] = False response["response"] = "InvalidTokenError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return logging.info(token) session = token.session.get() if not session: logging.info("Cannot find session") desc = "The token has already expired." response["error"] = False response["response"] = "InvalidTokenError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return logging.info(session) if session.expires < datetime.datetime.now( ) or session.status is False: logging.info("token has expired or not active") desc = "The token has already expired." response["success"] = False response["response"] = "InvalidTokenError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return else: desc = "You must be logged in to use the API." if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) if not data_id: desc = "ID is missing from the request." if content_type == "application/json": response["success"] = False response["response"] = "MissingParametersError" response["description"] = desc response["code"] = 400 wrap_response(self, response) else: if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) return data = APIData.get_by_id(normalize_id(data_id)) if not data: desc = "Cannot find the package." if content_type == "application/json": response["success"] = False response["response"] = "InvalidIDError" response["description"] = desc response["code"] = 400 wrap_response(self, response) else: if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) return if data.archived: desc = "Cannot find the package." if content_type == "application/json": response["success"] = False response["response"] = "InvalidIDError" response["description"] = desc response["code"] = 400 wrap_response(self, response) else: if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) return desc = "There are missing parameters in your request." if content_type == "application/json": if not self.request.body: response["success"] = False response["response"] = "MissingParametersError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return try: body = json.loads(self.request.body) except Exception as e: logging.info(e) desc = "Invalid JSON format." response["success"] = False response["response"] = "InvalidJSONError" response["description"] = desc response["code"] = 400 wrap_response(self, response) return tags = [] try: for key, value in body.items(): try: tags += create_tags(value) except Exception as e: logging.info("Cannot create tag from: ") logging.info(e) if key.startswith('unindexed_'): # unindexed_ ad_key = key.replace("unindexed_", "") data.additional_data[ad_key] = value.strip() if key.startswith('indexed_'): ad_key = key.replace("indexed_", "") data.additional_data[ad_key] = value for d in data.indexed_data: ad_key = key.replace("indexed_", "") if d.startswith(ad_key.upper()): try: data.indexed_data.remove(d) except Exception as e: logging.exception(e) logging.info("Cannot remove from list") data.indexed_data.append(create_indexed_tag( key, value)) if self.user: data.username = self.user.name data.user = self.user.key data.indexed_data = uniquify(data.indexed_data) data.tags = uniquify(tags) data.put() desc = "Data has been saved." response["success"] = True response["response"] = "Success" response["description"] = desc response["code"] = 200 response["data"] = data.to_api_object() wrap_response(self, response) except Exception as e: logging.exception(e) desc = "A server error occured. Please try again later." response["success"] = False response["response"] = "ServerError" response["description"] = desc response["code"] = 500 wrap_response(self, response) else: if not self.request.arguments(): if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) return tags = [] try: for arg in self.request.arguments(): for d in data.indexed_data: ad_key = arg.replace("indexed_", "") if d.startswith(ad_key.upper()): try: data.indexed_data.remove(d) except Exception as e: logging.exception(e) logging.info("Cannot remove from list") if arg.startswith('unindexed_'): ad_key = arg.replace("unindexed_", "") ad_value = self.request.POST.get(arg) data.additional_data[ad_key] = ad_value.strip() try: tags += create_tags(ad_value) except Exception as e: logging.info("Cannot create tag from: ") logging.info(e) if arg.startswith('indexed_'): ad_key = arg.replace("indexed_", "") ad_value = self.request.POST.get(arg) data.additional_data[ad_key] = ad_value try: tags += create_tags(ad_value) except Exception as e: logging.info("Cannot create tag from: ") logging.info(e) data.indexed_data.append( create_indexed_tag(arg, self.request.POST.get(arg))) if arg.startswith('file_'): filename = BUCKET_NAME filename += random_string(20) + "/" ad_key = arg.replace("file_", "") data.additional_data[ad_key] = {} try: # try: file_name = self.request.POST.get(arg).filename filename += file_name gcs_file = gcs.open( filename, 'w', options={'x-goog-acl': 'public-read'}) gcs_file.write(self.request.get(arg)) gcs_file.close() full_url = "https://storage.googleapis.com" + filename # data.additional_data["file"]["file_url"] = full_url data.file_url = full_url data.additional_data[ad_key]["file_url"] = full_url try: blob_key = blobstore.create_gs_key("/gs" + filename) data.serving_url = images.get_serving_url( blob_key) data.additional_data[ad_key][ "serving_url"] = data.serving_url data.gcs_key = blobstore.BlobKey(blob_key) except Exception as e: logging.exception(e) logging.error("not an image??") data.additional_data[ad_key][ "serving_url"] = full_url except AttributeError, e: logging.exception(e) logging.exception("NO FILE ATTACHED") if self.user: data.username = self.user.name data.user = self.user.key data.indexed_data = uniquify(data.indexed_data) data.tags = uniquify(tags) data.put() desc = "Data has been updated." if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?success=" + urllib.quote(desc) self.redirect(url) else: response["success"] = True response["response"] = "Success" response["description"] = desc response["code"] = 200 response["data"] = data.to_api_object() wrap_response(self, response) except Exception as e: logging.exception(e) desc = "A server error occured. Please try again later." if self.POST("r"): url = urllib.unquote(str(self.POST("r"))) else: url = self.request.referer if url: if "?" in url: url = url.split("?")[0] url += "?error=" + urllib.quote(desc) self.redirect(url) else: response["success"] = False response["response"] = "ServerError" response["description"] = desc response["code"] = 500 wrap_response(self, response)
def get_by_bucket_key(bucket_key): #result = blobstore.get(bucket_key) #return images.Image(blob_key=bucket_key) return blobstore.BlobReader(blobstore.BlobKey(bucket_key))
def post(self): # get all the uploaded file info myfile = self.get_uploads('file')[0] # this is a blob key info # too large file, we bail out if myfile.size > self.app.config[ 'max_upload_size'] * 1000000: # in B, so kb = 1000B, mb==1000000B self.error(413) self.response.out.write( 'file too large: %s Max allowed: %s MB' % (str(myfile.size), self.app.config['max_upload_size'])) # delete the original file uploaded to blobstore,all of them [blobstore.delete(each.key()) for each in self.get_uploads('file')] return # if not image, we bail out try: ImagePreProcessHandler.is_image(myfile) except Exception as ex: self.error(406) self.response.out.write(str(ex)) self.response.out.write( ' Image in "file" Field is danmaged, or not image') # delete the original file uploaded to blobstore,all of them [blobstore.delete(each.key()) for each in self.get_uploads('file')] return start_time = time.time() ### Use PIL library to transform image transformation ### But this process is slow and memory too huge to consume, quality is high # im = PILImage.open(myfile.open()) # if im.mode not in ("L","RGB"): # im = im.convert("RGB") # im.thumbnail((1600,1600)) # if the image size is smaller than this size, it will not stretch # buf_1600 = StringIO() # im.save(buf_1600,"JPEG",quality=95) # 95% can save 1/2 space # data_1600 = buf_1600.getvalue() # buf_1600.close() # free memory # im.thumbnail((800,800)) # buf_800 = StringIO() # im.save(buf_800,"JPEG",quality=95) # data_800 = buf_800.getvalue() # buf_800.close() # im3 = PILImageOps.fit(im, (512, 512),PILImage.ANTIALIAS) # buf_512 = StringIO() # im3.save(buf_512,"JPEG",quality=95) # data_512 = buf_512.getvalue() # buf_512.close() # im4 = PILImageOps.fit(im, (256, 256),PILImage.ANTIALIAS) # let the edge soft, not so sharp # buf_256 = StringIO() # im4.save(buf_256,"JPEG",quality=95) # data_256 = buf_256.getvalue() # buf_256.close() # thumbnails = [data_1600,data_800,data_512,data_256] ### This is using google image service to perform image transforation ### Which will result quite blur in the final quality, but it is fast thumbnails = [] img = images.Image(blob_key=myfile.key()) img.resize( width=1600, height=1600 ) # if image size is smaller than this size, it will stretch to this size thumbnail_1600 = img.execute_transforms(output_encoding=images.JPEG, quality=95) thumbnails.append(thumbnail_1600) img.resize(width=800, height=800) thumbnail_800 = img.execute_transforms(output_encoding=images.JPEG, quality=95) thumbnails.append(thumbnail_800) img.resize(width=512, height=512, crop_to_fit=True) thumbnail_512 = img.execute_transforms(output_encoding=images.JPEG, quality=95) thumbnails.append(thumbnail_512) img.resize(width=256, height=256, crop_to_fit=True) thumbnail_256 = img.execute_transforms(output_encoding=images.JPEG, quality=95) thumbnails.append(thumbnail_256) ### end end_time = time.time() elapsed_time = str(end_time - start_time) content_type_str = 'image/jpeg' format_size_list = ['1600', '800', '512', '256'] args_list = [('file', 'processed_file_' + y, x) for x, y in zip(thumbnails, format_size_list)] ### ### This is transformation using url get, I think it is slow, so deprecated #2. get the compressed files as different size # start_time = time.time() # compressed_file_url_list = [ # images.get_serving_url(myfile.key(), size=1600, crop=False), # images.get_serving_url(myfile.key(), size=800, crop=False), # images.get_serving_url(myfile.key(), size=512, crop=True), # images.get_serving_url(myfile.key(), size=256, crop=True), # ] # format_size_list = ['1600','800','512','256'] # response_list = [] # for each_url in compressed_file_url_list: # response = urlfetch.fetch( # url=each_url, # the url # method=urlfetch.GET, # deadline=30, # validate_certificate=False) # response_list.append(response) # end_time = time.time() # elapsed_time = str(end_time - start_time) # content_type_str = response_list[0].headers['content-type'] # maybe image/jpeg # args_list = [ ('file', 'processed_file_'+ y, x.content) for x,y in zip(response_list, format_size_list)] # 3. write the picture to blob, again content_type, body = ImagePreProcessHandler.encode_multipart_formdata( [], args_list, content_type_str) #4. upload to the image storage handler #when success, store a DB storage object that holds these images response2 = urlfetch.fetch(url=blobstore.create_upload_url( self.app.config['blob_store_final']), payload=body, method=urlfetch.POST, headers={'Content-Type': content_type}, deadline=30) if response2.status_code == 200: response2_loaded_object = json.loads(response2.content, 'utf-8') arg_list_db = { 'blob_256': None, 'blob_512': None, 'blob_800': None, 'blob_1600': None, } for each in response2_loaded_object['stored']: if '256' in each['filename']: arg_list_db['blob_256'] = blobstore.BlobKey( each['blob_key']) if '512' in each['filename']: arg_list_db['blob_512'] = blobstore.BlobKey( each['blob_key']) if '800' in each['filename']: arg_list_db['blob_800'] = blobstore.BlobKey( each['blob_key']) if '1600' in each['filename']: arg_list_db['blob_1600'] = blobstore.BlobKey( each['blob_key']) public_hash_id = db.add_processed_image( **arg_list_db) # add the image information into db. response2_loaded_object['public_hash_id'] = public_hash_id response2_loaded_object['process_time'] = elapsed_time self.response.charset = 'utf-8' self.response.content_type = response2.headers['content-type'] self.response.out.write( json.dumps(response2_loaded_object, cls=MyEncoder, ensure_ascii=False, indent=2, sort_keys=True).encode('utf-8')) # delete the original file uploaded to blobstore,all of them [blobstore.delete(each.key()) for each in self.get_uploads('file')]
def createBlobStoreForm(self): return blobstore.BlobKey('Some example content of a form.')
def write_to_api_params(items=None, user=None, content_type=None, imported=False, user_request=None): data = APIData() data.additional_data = {} if user: tags = [] try: for arg in items: if arg.startswith('unindexed_'): ad_key = arg.replace("unindexed_", "") ad_value = user_request.request.get(arg) data.additional_data[ad_key] = ad_value.strip() if arg.startswith('indexed_'): ad_key = arg.replace("indexed_", "") ad_value = user_request.request.get(arg) data.additional_data[ad_key] = ad_value try: tags += create_tags(ad_value) except Exception as e: logging.exception("Cannot create tag from: ") data.indexed_data.append(create_indexed_tag(arg, user_request.request.get(arg))) if arg.startswith('file_'): logging.debug(arg) filename = BUCKET_NAME filename += random_string(128) + "/" ad_key = arg.replace("file_", "") data.additional_data[ad_key] = {} try: if not user_request: file_name = items[arg].filename filename += file_name gcs_options = {'x-goog-acl': 'public-read'} gcs_file = gcs.open(filename, 'w', options=gcs_options) gcs_file.write(items[arg].file.read()) gcs_file.close() else: file_name = user_request.request.POST.get(arg) file_name = file_name.filename filename += file_name gcs_options = {'x-goog-acl': 'public-read'} gcs_file = gcs.open(filename, 'w', options=gcs_options) gcs_file.write(user_request.request.get(arg)) gcs_file.close() full_url = "https://storage.googleapis.com" + filename data.file_url = full_url data.additional_data[ad_key]["file_url"] = full_url try: blob_key = blobstore.create_gs_key("/gs" + filename) data.serving_url = images.get_serving_url(blob_key) data.additional_data[ad_key]["serving_url"] = data.serving_url data.gcs_key = blobstore.BlobKey(blob_key) except Exception as e: logging.exception(e) logging.error("FILE IS NOT AN IMAGE") data.additional_data[ad_key]["serving_url"] = full_url except AttributeError, e: logging.exception(e) logging.exception("NO FILE ATTACHED") if user: data.username = user.name data.user = user.key data.indexed_data.append(create_indexed_tag("USER_ID", str(user.key.id()))) data.tags = uniquify(tags) data.put() return data except Exception as e: logging.exception('ERROR') logging.debug(e)