def testMultipleRequests(self):
    """Tests restoring the reader state across multiple requests."""
    input_file = files.blobstore.create()

    # Create a file with two records.
    with files.open(input_file, "a") as f:
      with records.RecordsWriter(f) as w:
        proto = file_service_pb.KeyValues()
        proto.set_key("key2")
        proto.value_list().extend(["a", "b"])
        proto.set_partial(True)
        w.write(proto.Encode())

        proto = file_service_pb.KeyValues()
        proto.set_key("key2")
        proto.value_list().extend(["c", "d"])
        w.write(proto.Encode())

    files.finalize(input_file)
    input_file = files.blobstore.get_file_name(
        files.blobstore.get_blob_key(input_file))

    # Now read the records in two attempts, serializing and recreating the
    # input reader as if it's a separate request.
    reader = mapreduce_pipeline._ReducerReader([input_file], 0)
    it = iter(reader)
    self.assertEquals(input_readers.ALLOW_CHECKPOINT, it.next())

    reader_state = reader.to_json()
    other_reader = mapreduce_pipeline._ReducerReader.from_json(reader_state)
    it = iter(reader)
    self.assertEquals(("key2", ["a", "b", "c", "d"]), it.next())
Ejemplo n.º 2
0
 def store_image(self, link):
     file_name = files.blobstore.create(
         mime_type='application/octet-stream')
     with files.open(file_name, 'a') as f:
         f.write(urlfetch.Fetch(link, deadline=60).content)
     files.finalize(file_name)
     self.image_name = file_name
  def testMergeFiles(self):
    """Test merging multiple files."""
    input_data = [(str(i), "_" + str(i)) for i in range(100)]
    input_data.sort()

    input_file = files.blobstore.create()

    with files.open(input_file, "a") as f:
      with records.RecordsWriter(f) as w:
        for (k, v) in input_data:
          proto = file_service_pb.KeyValue()
          proto.set_key(k)
          proto.set_value(v)
          w.write(proto.Encode())
    files.finalize(input_file)
    input_file = files.blobstore.get_file_name(
        files.blobstore.get_blob_key(input_file))

    p = TestMergePipeline([input_file, input_file, input_file])
    p.start()
    test_support.execute_until_empty(self.taskqueue)
    p = TestMergePipeline.from_id(p.pipeline_id)

    output_file = p.outputs.default.value[0]
    output_data = []
    with files.open(output_file, "r") as f:
      for record in records.RecordsReader(f):
        output_data.append(record)

    expected_data = [
        str((k, [v, v, v], False)) for (k, v) in input_data]
    self.assertEquals(expected_data, output_data)
Ejemplo n.º 4
0
    def _save(self, name, content):
        name = name.replace('\\', '/')
        if hasattr(content, 'file') and \
           hasattr(content.file, 'blobstore_info'):
            data = content.file.blobstore_info
        elif hasattr(content, 'blobstore_info'):
            data = content.blobstore_info
        elif isinstance(content, File):
            guessed_type = mimetypes.guess_type(name)[0]
            file_name = files.blobstore.create(mime_type=guessed_type or 'application/octet-stream',
                                               _blobinfo_uploaded_filename=name)

            with files.open(file_name, 'a') as f:
                for chunk in content.chunks():
                    f.write(chunk)

            files.finalize(file_name)

            data = files.blobstore.get_blob_key(file_name)
        else:
            raise ValueError("The App Engine storage backend only supports "
                             "BlobstoreFile instances or File instances.")

        if isinstance(data, (BlobInfo, BlobKey)):
            # We change the file name to the BlobKey's str() value.
            if isinstance(data, BlobInfo):
                data = data.key()
            return '%s/%s' % (data, name.lstrip('/'))
        else:
            raise ValueError("The App Engine Blobstore only supports "
                             "BlobInfo values. Data can't be uploaded "
                             "directly. You have to use the file upload "
                             "handler.")
  def testSortFile(self):
    """Test sorting a file."""
    input_file = files.blobstore.create()

    input_data = [
        (str(i), "_" + str(i)) for i in range(100)]

    with files.open(input_file, "a") as f:
      with records.RecordsWriter(f) as w:
        for (k, v) in input_data:
          proto = file_service_pb.KeyValue()
          proto.set_key(k)
          proto.set_value(v)
          w.write(proto.Encode())
    files.finalize(input_file)
    input_file = files.blobstore.get_file_name(
        files.blobstore.get_blob_key(input_file))

    p = shuffler._SortChunksPipeline("testjob", [input_file])
    p.start()
    test_support.execute_until_empty(self.taskqueue)
    p = shuffler._SortChunksPipeline.from_id(p.pipeline_id)

    input_data.sort()
    output_files = p.outputs.default.value[0]
    output_data = []
    for output_file in output_files:
      with files.open(output_file, "r") as f:
        for binary_record in records.RecordsReader(f):
          proto = file_service_pb.KeyValue()
          proto.ParseFromString(binary_record)
          output_data.append((proto.key(), proto.value()))

    self.assertEquals(input_data, output_data)
Ejemplo n.º 6
0
 def save_blob(self, blobdata='blobdata'):
     file_name = files.blobstore.create(mime_type='application/octet-stream')
     with files.open(file_name, 'a') as f:
         f.write(blobdata)
     files.finalize(file_name)
     blob_key = files.blobstore.get_blob_key(file_name)
     return blob_key
Ejemplo n.º 7
0
def finish_upload(file_id):
    file_item = File.get_by_id(file_id)
    size = file_item.size
    current_size = file_item.current_size
    write_path = file_item.write_path
    files.finalize(write_path)
    
    if size != current_size:
        file_item.delete()
        raise Exception('file size error')
    
    # check sha1 
    
    sh = hashlib.sha1()
    with files.open(file_item.read_path, 'r') as fp:
        buf = fp.read(1000000)
        while buf:
            sh.update(buf)
            buf = fp.read(1000000)
        
    sha1_value = sh.hexdigest()
    if sha1_value != file_item.sha1:
        file_item.delete()
        raise Exception('file size error')
    
    file_item.upload_finished = True
    file_item.put()
Ejemplo n.º 8
0
    def set(key, result, time=DEFAULT_LAYER_CACHE_EXPIRATION_SECONDS, namespace=""):
        old_blob_infos = BlobCache.get_blob_infos(key, namespace)
      
        value = pickle.dumps(result)
        
        # Create the file
        file_name = files.blobstore.create(mime_type='application/octet-stream', _blobinfo_uploaded_filename=BlobCache.get_filename(key, namespace))
 
        # might need to wrap it in an object to handle expiration time here
        
        # write the pickled result to the file
        pos = 0
        chunkSize = 65536
        with files.open(file_name, 'a') as f:
            while pos < len(value):
                chunk = value[pos:pos+chunkSize]
                pos += chunkSize
                f.write(chunk)

        # Finalize the file. Do this before attempting to read it.
        files.finalize(file_name)

        # Get the file's blob key
        blob_key = files.blobstore.get_blob_key(file_name)

        for info in old_blob_infos:
            try:
                info.delete()
            except Exception, e:
                # If deleting blob times out, don't crash the request. Just log the error.
                logging.error("Failed to delete old blob from layer_cache: %s" % e)
Ejemplo n.º 9
0
def moveCoverToBlobstore(album):
  if not album.small_filetype:
    return

  from slughifi import slughifi
  fn = "%s_%s"%(slughifi(album.artist), slughifi(album.title))
  small_file = files.blobstore.create(mime_type=album.small_filetype,
                                      _blobinfo_uploaded_filename="%s_small.png"%fn)
  large_file = files.blobstore.create(mime_type=album.large_filetype,
                                      _blobinfo_uploaded_filename="%s_big.png"%fn)

  with files.open(small_file, 'a') as small:
    small.write(album.small_cover)
  with files.open(large_file, 'a') as large:
    large.write(album.large_cover)

  files.finalize(small_file)
  files.finalize(large_file)

  album.cover_small = files.blobstore.get_blob_key(small_file)
  album.cover_large = files.blobstore.get_blob_key(large_file)

  del album.small_cover
  del album.large_cover
  del album.large_filetype
  del album.small_filetype

  album.put()
Ejemplo n.º 10
0
 def post(self):
     rows=self.request.POST.get('file').value
     key = self.request.POST.get('key')
     file_name = files.blobstore.create(mime_type='text/plain')
     with files.open(file_name, 'a') as f:
         writer = csv.writer(f , delimiter=',')
         for row in csv.reader(StringIO(rows), delimiter=','):
             if len(row) > 1:
                 row[1] = self.crypt(row[1], key)
             writer.writerow(row)
     files.finalize(file_name)
     
     blobs = blobstore.BlobInfo.all()
     blob_links = [
                   '<a href="/serve/%s">File %s</a><br/>' % (blob.key(), index+1)
                   for index, blob in enumerate(blobs)
                  ]
     
     self.response.out.write(
         '''
            <html>
              <body>
              %s
              </body>
             </html>
         ''' % "".join(blob_links)
     )
Ejemplo n.º 11
0
    def post(self):
        """Store a file after encripting it.

        """
        rows = self.request.POST.get('file').value
        file_name = files.blobstore.create(mime_type='text/plain')
        with files.open(file_name, 'a') as raw_file:
            writer = csv.writer(raw_file, delimiter=',')
            for row in csv.reader(StringIO(rows), delimiter=','):
                if len(row) > 1:
                    row[1] = self.crypt(row[1])
                writer.writerow(row)
        files.finalize(file_name)
        # TODO: with high replication, the newly created file will not
        # show in the list.
        #
        # It show redirect to a page listing the files instead
        blobs = blobstore.BlobInfo.all()
        blob_links = [
            '<a href="/serve/%s">File %s</a><br/>' % (blob.key(), index+1)
            for index, blob in enumerate(blobs)
        ]

        self.response.out.write("""<html>
    <body>
        <form action="/upload" enctype="multipart/form-data" method="post">
            <input type="file" name="file"/><input type="submit" />
        </form>
        <br>
        %s
    </body>
</html>
            """ % "".join(blob_links)
        )
Ejemplo n.º 12
0
Archivo: api.py Proyecto: uri247/arch
    def get(self, firmid, process):
        self.json_content()
        firm_key = ndb.Key('Firm', firmid)

        if process == 'process_hier':
            deferred.defer( process_hier )
            self.w( json.dumps("ok") )

        if process == 'get_hier':
            self.w(self.request.get('callback') + '(')
            fname = '/gs/frl-arch/' + firmid + '/json/proj-detailed.json'
            with files.open(fname, 'r') as f:
                data = f.read(60 * 1000)
                while data:
                    self.w( data )
                    data = f.read(60 * 1000)
            self.w(');')

        elif process == 'mail':
            mail.send_mail( sender = 'Uri London <*****@*****.**>',
                            to = 'Live Person <*****@*****.**>',
                            subject = 'your test has succeeded',
                            body = 'Dear Live Person,\n\n    I\'m happy to tell you that your test is good'
                            )
            self.w( json.dumps('ok') )

        elif process == 'test_file':
            fname = '/gs/frl-arch/test.html'
            wfname = files.gs.create( fname, mime_type='text/html', acl='public-read')
            with files.open(wfname, 'a') as f:
                f.write( 'hello world' )
                f.write( 'bye bye' )
            files.finalize(wfname)
Ejemplo n.º 13
0
  def finalize(self, ctx, shard_state):
    """Finalize writer shard-level state.

    Args:
      ctx: an instance of context.Context.
      shard_state: shard state.
    """
    mapreduce_spec = ctx.mapreduce_spec
    output_sharding = self.__class__._get_output_sharding(
        mapper_spec=mapreduce_spec.mapper)


    if self._request_filename is None or hasattr(self, "_183_test"):
      writer_state = self._State.from_json(shard_state.writer_state)
      self._request_filename = writer_state.request_filenames[0]

    if output_sharding == self.OUTPUT_SHARDING_INPUT_SHARDS:
      filesystem = self._get_filesystem(mapreduce_spec.mapper)
      files.finalize(self._filename)
      finalized_filenames = [self._get_finalized_filename(
          filesystem, self._filename, self._request_filename)]

      shard_state.writer_state = self._State(
          finalized_filenames, []).to_json()



      if filesystem == "blobstore":
        logging.info(
            "Shard %s-%s finalized blobstore file %s.",
            mapreduce_spec.mapreduce_id,
            shard_state.shard_number,
            self._filename)
        logging.info("Finalized name is %s.", finalized_filenames[0])
Ejemplo n.º 14
0
def store_raw_data_as_blob(data,name,content_type):
    logging.info('store (bin) raw_data as %s (%s)'%(name,content_type))
    file_name = files.blobstore.create(mime_type=content_type, _blobinfo_uploaded_filename = name)
    with files.open(file_name, 'a') as out:
        out.write(data)
    files.finalize(file_name)
    blob_key = files.blobstore.get_blob_key(file_name)

#http://code.google.com/p/googleappengine/issues/detail?id=4872 
#FIXME

    if not blob_key:
        logging.info('again....1')
        time.sleep(1)
        blob_key = files.blobstore.get_blob_key(file_name)

    if not blob_key:
        logging.info('again....2')
        time.sleep(1)
        blob_key = files.blobstore.get_blob_key(file_name)

#endofhack 
        
    logging.info('file key:%s'%blob_key)
    return blob_key
Ejemplo n.º 15
0
  def POST(self):
    """Receive HTTP POST (json-format data), and store data to blobstore.
    """
    data = json.loads(web.data())

    # Create the file
    # set filename property in BlobStore
    # http://stackoverflow.com/questions/5697844/how-to-set-filename-property-in-blobstore
    file_name = files.blobstore.create(mime_type='application/octet-stream',
        _blobinfo_uploaded_filename=data['key'])

    # Open the file and write to it
    with files.open(file_name, 'a') as f:
      f.write(base64.b64decode(data['payload']))

    # Finalize the file. Do this before attempting to read it.
    files.finalize(file_name)

    # Get the file's blob key
    blob_key = files.blobstore.get_blob_key(file_name)

    # store blob_key in datastore
    XmlBlobKey(id=data['key'], blob_key=blob_key).put()

    return 'OK<br />%s<br />%s' % (data['key'], blob_key)
Ejemplo n.º 16
0
def bitcointalk_plot(bt_data=None, outputtype='html'):
    dtf, (dt1, dt2) = plt.subplots(2, 1, sharex=True, figsize=(8, 4))
    plt.subplots_adjust(hspace = .000)
    dtf.suptitle('Bitcointalk')
    if bt_data == None:
        bt_data = bitcointalk_data()
    bitcointalk(dt1, bt_data, keys=['new_topics','new_members'])
    bitcointalk(dt2, bt_data, keys=['most_members_online'])
    dt1.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%b\n%d"))
    dt2.yaxis.set_ticks(dt2.get_yticks()[:-1])
    dtf.set_size_inches(8,4)
    if outputtype == 'html':
        dt = StringIO.StringIO()
        plt.savefig(dt, format="png")
        return """<img src="data:image/png;base64,%s"/>""" % dt.getvalue().encode("base64").strip()
        plt.clf()
    elif outputtype == 'blobstore':
        dt = StringIO.StringIO()
        plt.savefig(dt, format="png")
        file_name = files.blobstore.create(mime_type='image/png',
             _blobinfo_uploaded_filename='bitcointalk.png')
        with files.open(file_name, 'a') as f:
            f.write(dt.getvalue())
        files.finalize(file_name)
        plt.clf()
    else:
        plt.show()
Ejemplo n.º 17
0
    def finalize_job(cls, mapreduce_state):
        """Finalize job-level writer state.

    Collect from model.ShardState if this job has output per shard.

    Args:
      mapreduce_state: an instance of model.MapreduceState describing current
      job.
    """
        state = cls._State.from_json(mapreduce_state.writer_state)
        output_sharding = cls._get_output_sharding(mapreduce_state=mapreduce_state)
        filesystem = cls._get_filesystem(mapreduce_state.mapreduce_spec.mapper)
        if output_sharding != cls.OUTPUT_SHARDING_INPUT_SHARDS:
            files.finalize(state.filenames[0])
            finalized_filenames = [
                cls._get_finalized_filename(filesystem, state.filenames[0], state.request_filenames[0])
            ]
        else:
            shards = model.ShardState.find_by_mapreduce_state(mapreduce_state)
            finalized_filenames = []
            for shard in shards:
                state = cls._State.from_json(shard.writer_state)
                finalized_filenames.append(state.filenames[0])

        state.filenames = finalized_filenames
        state.request_filenames = []
        mapreduce_state.writer_state = state.to_json()
Ejemplo n.º 18
0
  def post(self, post):
    form = PostForm(data=self.request.POST, instance=post,
                    initial={'draft': post and post.published is None})
    if form.is_valid():

      post = form.save(commit=False)

      image_url = form.clean_data['image_url']
      if image_url:
        try:
          u = urllib2.urlopen(image_url)
          file_name = files.blobstore.create(mime_type='image/jpeg')
          with files.open(file_name, 'a') as f:
            f.write(u.read())
          files.finalize(file_name)
          post.image_id = str(files.blobstore.get_blob_key(file_name))
        except:
          # Not sure how to use ErrorList in Django 0.96
          # form._errors['image'] = ''
          self.render_form(form)
          return
      if form.clean_data['draft']:# Draft post
        post.published = datetime.datetime.max
        post.put()
      else:
        if not post.path: # Publish post
          post.updated = post.published = datetime.datetime.now()
        else:# Edit post
          post.updated = datetime.datetime.now()
        post.publish()
      self.render_to_response("published.html", {
          'post': post,
          'draft': form.clean_data['draft']})
    else:
      self.render_form(form)
  def testRecordsReader(self):
    """End-to-end test for records reader."""
    input_file = files.blobstore.create()
    input_data = [str(i) for i in range(100)]

    with files.open(input_file, "a") as f:
      with records.RecordsWriter(f) as w:
        for record in input_data:
          w.write(record)
    files.finalize(input_file)
    input_file = files.blobstore.get_file_name(
        files.blobstore.get_blob_key(input_file))

    mapreduce_id = control.start_map(
        "test_map",
        __name__ + ".TestHandler",
        "mapreduce.input_readers.RecordsReader",
        {
            "file": input_file
        },
        shard_count=4,
        base_path="/mapreduce_base_path")

    test_support.execute_until_empty(self.taskqueue)
    self.assertEquals(100, len(TestHandler.processed_entites))
  def finalize(self, ctx, shard_state):
    """Finalize writer shard-level state.

    Args:
      ctx: an instance of context.Context.
      shard_state: shard state.
    """
    mapreduce_spec = ctx.mapreduce_spec
    output_sharding = self.__class__._get_output_sharding(
        mapper_spec=mapreduce_spec.mapper)
    if output_sharding == self.OUTPUT_SHARDING_INPUT_SHARDS:
      filesystem = self._get_filesystem(mapreduce_spec.mapper)
      files.finalize(self._filename)
      finalized_filenames = [self._get_finalized_filename(
          filesystem, self._filename, self._request_filename)]

      shard_state.writer_state = self._State(
          finalized_filenames, []).to_json()

      # Log to help debug empty blobstore key.
      # b/8302363
      if filesystem == "blobstore":
        logging.info(
            "Shard %s-%s finalized blobstore file %s.",
            mapreduce_spec.mapreduce_id,
            shard_state.shard_number,
            self._filename)
        logging.info("Finalized name is %s.", finalized_filenames[0])
Ejemplo n.º 21
0
    def output(self, filename, file_out):
        content = file_out.read()

        base, ext = os.path.splitext(filename)

        if ext == ".css":
            mimetype = "text/css"
        elif ext == ".js":
            mimetype = "text/javascript"
        else:
            mimetype = "application/octet-stream"

        already_exists = False

        for info in BlobInfo.all().filter('content_type = ', mimetype):
            if info.filename == filename:
                already_exists = True
                continue

            #Clear out old blobs
            if info.filename.split(".")[0] == filename.split(".")[0]:
                logging.debug("Deleting: %s", info.filename)
                info.delete()

        if not already_exists:
            logging.info("Creating: %s", filename)
            result = files.blobstore.create(mime_type=mimetype, _blobinfo_uploaded_filename=filename)
            with files.open(result, "a") as f:
                f.write(content)
            files.finalize(result)

            blob_key = files.blobstore.get_blob_key(result)
            while not blob_key:
                blob_key = files.blobstore.get_blob_key(result)
Ejemplo n.º 22
0
    def run(self, pid, config, *l):
        r, g, b = l

        w, h = config['width'], config['height']

        r = Counter(tuple(x) for x in r if x)
        g = Counter(tuple(x) for x in g if x)
        b = Counter(tuple(x) for x in b if x)

        ra, rb = minmax(r)
        ga, gb = minmax(g)
        ba, bb = minmax(b)

        img = bmp.BitMap(w, h)

        for y in range(h):
            for x in range(w):
                rc = int(round(255.0 * (r[(x, y)] - ra) / (rb - ra)))
                gc = int(round(255.0 * (g[(x, y)] - ga) / (gb - ga)))
                bc = int(round(255.0 * (b[(x, y)] - ba) / (bb - ba)))
                img.setPenColor(bmp.Color(rc, gc, bc))
                img.plotPoint(x, y)

        filename = files.blobstore.create(mime_type='image/bmp')

        with files.open(filename, 'a') as f:
            f.write(img.getBitmap())

        files.finalize(filename)
        imr = ImgResult()
        imr.pid = pid
        imr.blob_key = files.blobstore.get_blob_key(filename)
        imr.put()
  def testHugeTaskUseDatastore(self):
    """Test map job with huge parameter values."""
    input_file = files.blobstore.create()
    input_data = [str(i) for i in range(100)]

    with files.open(input_file, "a") as f:
      with records.RecordsWriter(f) as w:
        for record in input_data:
          w.write(record)
    files.finalize(input_file)
    input_file = files.blobstore.get_file_name(
        files.blobstore.get_blob_key(input_file))

    mapreduce_id = control.start_map(
        "test_map",
        __name__ + ".TestHandler",
        "mapreduce.input_readers.RecordsReader",
        {
            "file": input_file,
            # the parameter can't be compressed and wouldn't fit into
            # taskqueue payload
            "huge_parameter": random_string(900000)
        },
        shard_count=4,
        base_path="/mapreduce_base_path")

    test_support.execute_until_empty(self.taskqueue)
    self.assertEquals(100, len(TestHandler.processed_entites))
    self.assertEquals([], model._HugeTaskPayload.all().fetch(100))
Ejemplo n.º 24
0
def WriteTempImageFile(stream):
    blob = files.blobstore.create(_blobinfo_uploaded_filename="ImageTemp")
    with files.open(blob, "a") as f:
        f.write(stream.getvalue())

    files.finalize(blob)
    return files.blobstore.get_blob_key(blob)
def encoded_video_receiver(mp4_url, webm_url, video_id):
    logging.info("starting the encoded_video_receiver")
    if mp4_url and webm_url:
        video = Video.get_by_id(int(video_id))
        try:
            # adding the mp4 video ref
            mp4_file_name = files.blobstore.create(mime_type="video/mp4")
            mp4_video = urllib2.urlopen(mp4_url)
            with files.open(mp4_file_name, "a") as mp4_f:
                mp4_f.write(mp4_video.read())
            files.finalize(mp4_file_name)
            mp4_blob_key = files.blobstore.get_blob_key(mp4_file_name)
            video.mp4_video_ref = mp4_blob_key

            # adding the webm video ref
            webm_file_name = files.blobstore.create(mime_type="video/webm")
            webm_video = urllib2.urlopen(webm_url)
            with files.open(webm_file_name, "a") as webm_f:
                webm_f.write(webm_video.read())
            files.finalize(webm_file_name)
            webm_blob_key = files.blobstore.get_blob_key(webm_file_name)
            video.webm_video_ref = webm_blob_key

            video.put()

            logging.info("just saved the video in the datastore")

        except urllib2.HTTPError, e:
            logging.error("HTTPError:  Failed somehow with a HTTPerror: " + str(e))
            # time.sleep(10)
        except Exception, e:
            logging.error("Exception:  Failed somehow: " + str(e))
Ejemplo n.º 26
0
    def _make_backup_info_blobstore(self, num):
        file_num = 3
        dt = datetime.datetime.utcnow()
        for i in range(num):
            # _AE_Backup_Information
            info = AeBackupInformation(
                name=config.BACKUP_NAME,
                filesystem='blobstore',
                start_time=dt,
                complete_time=dt + datetime.timedelta(hours=1)
            )
            info.put()
            dt -= datetime.timedelta(days=1)

            # _AE_Backup_Information_Kind_Files
            info_files = AeBackupInformationKindFiles()
            info_files.files = []
            if i == (num - 1):
                file_num = 120  # 100 over at last
            for j in range(file_num):
                # make blobstore file
                file_name = files.blobstore.create(
                    mime_type='application/octet-stream')
                with files.open(file_name, 'a') as f:
                    f.write('asdfghjkl')
                files.finalize(file_name)
                info_files.files.append(file_name)

            info_files.key = ndb.Key(
                AeBackupInformationKindFiles._get_kind(),
                AeBackupInformationKindFiles.allocate_ids(1)[0],
                parent=info.key
            )
            info_files.put()
Ejemplo n.º 27
0
def add_file_as_blob(filename, mime_type):
  blob_name = files.blobstore.create(mime_type='image/jpeg',
    _blobinfo_uploaded_filename=filename.split("/")[-1])
  with files.open(blob_name, 'a') as f:
    f.write(open(filename).read())
  files.finalize(blob_name)
  return files.blobstore.get_blob_key(blob_name)
def write_to_blobstore(data, mime_type, name=None):
    """Writes a file to the App Engine blobstore and returns an instance of a
    BlobKey if successful.

      :param data: Blob data.
      :param mime_type: String, mime type of the blob.
      :param name: String, name of the blob.

      :returns: Instance of a `BlobKey`.
    """
    if not name:
        name = "".join(random.choice(string.letters) for x in range(DEFAULT_NAME_LEN))

    blob = files.blobstore.create(mime_type=mime_type, _blobinfo_uploaded_filename=name)
    with files.open(blob, "a", exclusive_lock=True) as f:
        f.write(data)
    files.finalize(blob)
    result = files.blobstore.get_blob_key(blob)
    # issue with the local development SDK. we can only write to the blobstore
    # so fast, so set a retry_count and delay the execution thread between
    # each attempt..
    for i in range(1, WRITE_MAX_RETRIES):
        if result:
            break
        else:
            logging.debug("blob still None.. will retry to write to blobstore..")
            time.sleep(WRITE_SLEEP_SECONDS)
            result = files.blobstore.get_blob_key(blob)
        logging.debug('File written to blobstore: key: "%s"', result)
    return result
Ejemplo n.º 29
0
def uploadXmlsToBlobstore():
  """Writing Files to the Blobstore (Experimental) via remote api to 
     production server not working!

  https://developers.google.com/appengine/docs/python/blobstore/overview#Writing_Files_to_the_Blobstore
  http://stackoverflow.com/questions/8201283/google-app-engine-how-to-write-large-files-to-google-cloud-storage
  http://stackoverflow.com/questions/3530990/upload-data-to-blobstore-using-remote-api
  http://stackoverflow.com/questions/6545247/erratic-problem-with-app-engine-when-writing-files-directly-to-the-blobstore
  """
  romn_dir = getRomnDir()

  for dirpath, dirnames, filenames in os.walk(romn_dir):
    for filename in filenames:
      path = os.path.join(dirpath, filename)
      key = path[len(romn_dir)+1:]
      print('uploading %s ...' % key)
      with open(path, 'rb') as f:
        # Create the file
        file_name = files.blobstore.create(mime_type='application/octet-stream')

        # Open the file and write to it
        with files.open(file_name, 'a') as f2:
          f2.write(f.read())

        # Finalize the file. Do this before attempting to read it.
        files.finalize(file_name)

        XmlBlobKey(id=key, blob_key=files.blobstore.get_blob_key(file_name)).put()
Ejemplo n.º 30
0
    def post(self):
        countries = self.all_countries
        tourist_id = int(self.get_cookie("query")[0])
        tourist = Tourist.Tourist.get_by_id(tourist_id)

        new_email = self.request.get("email")
        first_name = self.request.get("first_name")
        last_name = self.request.get("last_name")
        country = self.request.get("country")
        state = self.request.get("state")
        picture = self.request.POST.get("photo")
        # self.write(picture.filename[picture.filename.find(".") + 1 : ])

        profile_args = {"email" : new_email, "country" : country, "first_name" : first_name, 
            "last_name" : last_name, "state" : state, "picture" : picture}

        if self.validate_email(new_email) and self.validate_name(first_name) and self.validate_name(last_name):
            Tourist.Tourist.updateTourist(tourist, new_email, first_name, last_name, country, state)
            if picture != None and picture != "":
                picture_extension = picture.filename[picture.filename.rfind(".") : ]
                picture_url = "/tourbly/profile_pictures/" +  str(tourist.key().id()) + self.rand_salt("picture") + picture_extension
                picture_name = "/gs" + picture_url
                writable_file_name = files.gs.create(picture_name, mime_type='image/jpeg', acl='public-read')
                with files.open(writable_file_name, 'a') as f:
                    f.write(picture.file.read())
                files.finalize(writable_file_name)

                tourist.picture = "http://storage.googleapis.com" + picture_url
                tourist.put()
            self.render("profile.html", isLoggedIn = self.check_session("query"), profile_args = profile_args,
                success_message = "Your profile has been updated successfully", tourist = tourist, 
                countries = countries)
        else:
            self.render("profile.html", email_error = self.profile_email_error_prompt(tourist.email, new_email),
             profile_args = profile_args, success_message = "there is something wrong", countries = countries)
Ejemplo n.º 31
0
def generate_pledges_csv(file_name, pledge_type, pledge_time, full_data=False):
    """ Generates the pledges.csv file in a deferred way """

    PAGE_SIZE = 500
    csv_buffer = StringIO.StringIO()
    w = csv.writer(csv_buffer)
    if not pledge_time and pledge_type == 'WpPledge':
        # First time through, add the column headers
        if full_data:
            headers = [
                'SOURCE', 'donationTime', 'Amount ($)', 'url_nonce',
                'stripeCustomer', 'Email', 'First Name', 'Last Name',
                'Address', 'Address 2', 'City', 'State', 'Zip', 'Phone',
                'Country', 'Occupation', 'Employer', 'Targeting', 'Type',
                'Recurring', 'Source'
            ]
        else:
            headers = [
                'zip', 'dollars', 'timestamp', 'date', 'city', 'state',
                'latitude', 'longitude'
            ]

        w.writerow(headers)
    zg = Zipgun('zipgun/zipcodes')

    # Get the next PAGE_SIZE pledges
    query = getattr(model, pledge_type).all().order('donationTime')
    if pledge_time:
        # Filter instead of using 'offset' because offset is very inefficient,
        # according to https://developers.google.com/appengine/articles/paging
        query = query.filter('donationTime >= ', pledge_time)
    pledges = query.fetch(PAGE_SIZE + 1)
    next_pledge_time = None
    if len(pledges) == PAGE_SIZE + 1:
        next_pledge_time = pledges[-1].donationTime
    pledges = pledges[:PAGE_SIZE]

    # Loop through the current pledges and write them to the csv
    for pledge in pledges:
        w.writerow(pledge_row(pledge, zg, full_data))
    with files.open(file_name, 'a') as f:
        f.write(csv_buffer.getvalue())
    csv_buffer.close()

    if not next_pledge_time and pledge_type == 'Pledge':
        # Last time through, finalize the file
        files.finalize(file_name)
    else:
        # More to process, recursively run again
        next_pledge_type = pledge_type
        if pledge_type == 'WpPledge' and not next_pledge_time:
            next_pledge_type = 'Pledge'
        deferred.defer(generate_pledges_csv,
                       file_name,
                       next_pledge_type,
                       next_pledge_time,
                       full_data,
                       _queue='generateCSV')
Ejemplo n.º 32
0
 def action_edit(self):
     if self.section.handler.request.get('submit'):
         self.SITE_HEADER = self.section.handler.request.get('SITE_HEADER')
         self.SITE_SUB_HEADER = self.section.handler.request.get(
             'SITE_SUB_HEADER')
         self.DEFAULT_THEME = self.section.handler.request.get(
             'DEFAULT_THEME')
         self.GOOGLE_ANALYTICS_UA = self.section.handler.request.get(
             'GOOGLE_ANALYTICS_UA')
         self.ROBOTS_TXT = self.section.handler.request.get('ROBOTS_TXT')
         if self.section.handler.request.get('FAVICON_ICO'):
             if self.FAVICON_ICO:
                 BlobInfo.get(self.FAVICON_ICO).delete()
             data = self.section.handler.request.get('FAVICON_ICO')
             handle = files.blobstore.create(
                 mime_type='image/x-icon',
                 _blobinfo_uploaded_filename='favicon.ico')
             with files.open(handle, 'a') as f:
                 f.write(data)
             files.finalize(handle)
             self.FAVICON_ICO = files.blobstore.get_blob_key(handle)
         self.ENABLE_THEME_PREVIEW = self.section.handler.request.get(
             'ENABLE_THEME_PREVIEW') != ''
         self.DEBUG_MODE = self.section.handler.request.get(
             'DEBUG_MODE') != ''
         cache.delete(CACHE_KEY)
         self.update()
         raise Exception('Redirect', self.section.action_redirect_path)
     f = form(self.section, self.section.full_path)
     f.add_control(
         control(self.section, 'text', 'SITE_HEADER', self.SITE_HEADER,
                 'Site header', 50))
     f.add_control(
         control(self.section, 'text', 'SITE_SUB_HEADER',
                 self.SITE_SUB_HEADER, 'Site sub-header', 50))
     combined_themes = get_local_theme_namespaces(
     ) + get_custom_theme_namespaces()
     f.add_control(
         selectcontrol(
             self.section, 'DEFAULT_THEME', combined_themes,
             self.DEFAULT_THEME if self.DEFAULT_THEME else
             DEFAULT_LOCAL_THEME_TEMPLATE, 'Default theme'))
     f.add_control(
         control(self.section, 'text', 'GOOGLE_ANALYTICS_UA',
                 self.GOOGLE_ANALYTICS_UA, 'Google analytics UA'))
     f.add_control(
         control(self.section, 'file', 'FAVICON_ICO', label='favicon.ico'))
     f.add_control(
         textareacontrol(self.section, 'ROBOTS_TXT', self.ROBOTS_TXT,
                         'robots.txt', 90, 5))
     f.add_control(
         checkboxcontrol(self.section, 'ENABLE_THEME_PREVIEW',
                         self.ENABLE_THEME_PREVIEW, 'Enable theme preview'))
     f.add_control(
         checkboxcontrol(self.section, 'DEBUG_MODE', self.DEBUG_MODE,
                         'Debug mode'))
     f.add_control(control(self.section, 'submit', 'submit', 'Submit'))
     return '<h2>Edit configuration</h2>%s' % unicode(f)
Ejemplo n.º 33
0
def _fetchContentMap(binary_record):
    """Map function of fetch content.
  Fetched content will store to blobstore.

  Arg:
    binary_record: key value data, that key is url of target page,
      value is url of target of fetch.

  Returns:
    url: fetched url.
  """
    proto = file_service_pb.KeyValue()
    proto.ParseFromString(binary_record)
    page_url = proto.key()
    target_url = proto.value()
    #Fetch to CrawlDbDatum
    try:
        query = CrawlDbDatum.query(CrawlDbDatum.url == page_url)
        crawl_db_datum_future = query.fetch_async()
    except Exception as e:
        logging.warning("Failed create key, caused by invalid url:" +
                        page_url + ":" + e.message)

    #start fetch
    fetcher = fetchers.SimpleHttpFetcher(1, fetcher_policy_yaml.fetcher_policy)
    stored_url = None
    if re.match("^/", target_url):
        crawl_db_datum = _getCrawlDatum(crawl_db_datum_future)
        target_url = "%s%s" % (crawl_db_datum.extract_domain_url, target_url)

    try:
        fetch_result = fetcher.get(target_url)
        if fetch_result:
            #Storing to blobstore
            blob_io = files.blobstore.create(
                mime_type=fetch_result.get("mime_type"),
                _blobinfo_uploaded_filename=fetch_result.get("fetched_url"))
            with files.open(blob_io, 'a') as f:
                f.write(fetch_result.get("content"))
            files.finalize(blob_io)
            blob_key = files.blobstore.get_blob_key(blob_io)
            stored_url = images.get_serving_url(str(blob_key))
    except Exception as e:
        logging.warning("Fetch Error Occurs:" + e.message)

    #Put content to datastore.
    crawl_db_datum = _getCrawlDatum(crawl_db_datum_future)
    if crawl_db_datum and stored_url is not None:
        entity = ContentDbDatum(
            parent=crawl_db_datum.key,
            fetched_url=fetch_result.get("fetched_url"),
            stored_url=stored_url,
            content_type=fetch_result.get("mime_type"),
            content_size=fetch_result.get("content_length"),
            http_headers=str(fetch_result.get("headers")))
        entity.put()

    yield "%s:%s" % (target_url, stored_url)
Ejemplo n.º 34
0
 def close(self):
     fn = files.blobstore.create(mime_type='application/octet-stream')
     f = files.open(fn, 'a')
     # TODO: grouping into chunks would probably help optimize RPC's
     for b in self.bs:
         f.write(b)
     f.close()
     files.finalize(fn)
     return blobstore.get(files.blobstore.get_blob_key(fn))
Ejemplo n.º 35
0
 def write_blob(self, data, info):
     blob = files.blobstore.create(
         mime_type=info['type'],
         _blobinfo_uploaded_filename=info['name']
     )
     with files.open(blob, 'a') as f:
         f.write(data)
     files.finalize(blob)
     return files.blobstore.get_blob_key(blob)
Ejemplo n.º 36
0
    def post(self, id):

        self.response.content_type = "application/json"

        if self._content is None:
            self.response.status = 400
            self.response.out.write(
                utils.createError(
                    400, "Couldn't decode content or invalid content-type"))

        # Trying to access card to see if user is allowed to
        request = self._service.timeline().get(id=id)
        try:
            card = request.execute()
        except HttpError as e:
            self.response.status = e.resp.status
            self.response.out.write(e.content)
            return

        # 2) Insert data into blob store
        file_name = files.blobstore.create(mime_type=self._content_type)
        with files.open(file_name, 'a') as f:
            f.write(self._content)
        files.finalize(file_name)
        blob_key = files.blobstore.get_blob_key(file_name)

        # 3) Update card with attachment info
        if not "attachments" in card:
            card["attachments"] = []

        attachment = {
            "id":
            str(blob_key),
            "contentType":
            self._content_type,
            "contentUrl":
            "%s/upload/mirror/v1/timeline/%s/attachments/%s" %
            (utils.base_url, card["id"], str(blob_key)),
            "isProcessing":
            False
        }

        card["attachments"].append(attachment)

        request = self._service.internal().timeline().update(id=card["id"],
                                                             body=card)

        try:
            result = request.execute()
        except HttpError as e:
            self.response.status = e.resp.status
            self.response.out.write(e.content)
            return

        self.response.status = 200
        self.response.out.write(json.dumps(result))
Ejemplo n.º 37
0
 def store_file_as_blob(self, dropbox_link):
     response = urllib2.urlopen(dropbox_link)
     data = response.read(
     )  # TODO In future, read 10Mb at a time rather than entire file
     blob_file_name = files.blobstore.create(
         mime_type='application/octet-stream')
     with files.open(blob_file_name, 'a') as f:
         f.write(data)
     files.finalize(blob_file_name)
     return files.blobstore.get_blob_key(blob_file_name)
Ejemplo n.º 38
0
  def finalize(self, ctx, shard_number):
    """Finalize writer shard-level state.

    Args:
      ctx: an instance of context.Context.
      shard_number: shard number as integer.
    """
    finalized_filenames = []
    for filename in self._filenames:
      files.finalize(filename)
    def post(self):
        """
    POST

    'number' and 'maxsize' are required.
    """
        number = self.request.POST.get('number', '')
        maxsize = self.request.POST.get('maxsize', '')
        context = {
            'number': number,
            'maxsize': maxsize,
        }
        errors = []

        try:
            number = int(number.strip())
        except ValueError:
            errors.append('Number must be an integer.')

        try:
            maxsize = int(maxsize.strip())
        except ValueError:
            errors.append('Max Size must be an integer.')

        if number < 1:
            errors.append('Number must be at least 1.')
        if maxsize < 100:
            errors.append('Max Size must be at least 100 bytes.')
        if maxsize > 10000000:
            errors.append('Max Size must be less than 10000000 bytes.')

        if errors:
            context['errors'] = errors
            self.render_response('create.html', **context)
            return

        for index in range(number):
            content_type, extension = random.choice(self.SAMPLE_CONTENT_TYPES)
            kwargs = {'mime_type': content_type}
            # put an uploaded filename on some random set of files
            if random.random() < 0.5:
                random_chars = (''.join(
                    random.sample(string.letters + string.digits, 12)))
                kwargs['_blobinfo_uploaded_filename'] = (
                    'testfile-%s-%s.%s' % (index, random_chars, extension))
            output_filename = files.blobstore.create(**kwargs)
            with files.open(output_filename, 'a') as outfile:
                input_bytes = '1' * (random.randint(100, maxsize))
                outfile.write(input_bytes)
            files.finalize(output_filename)

        context['message'] = ('%s blob%s created. ' %
                              (number, number > 1 and 's' or '') +
                              '<a href="/view-blobs">View blobs</a>')
        self.render_response('create.html', **context)
Ejemplo n.º 40
0
    def finalize_job(cls, mapreduce_state):
        """See parent class."""
        output_sharding = cls._get_output_sharding(
            mapreduce_state=mapreduce_state)
        if output_sharding != cls.OUTPUT_SHARDING_INPUT_SHARDS:
            state = cls._State.from_json(mapreduce_state.writer_state)
            files.finalize(state.filenames[0])

        finalized_filenames = cls.get_filenames(mapreduce_state)
        state = cls._State(finalized_filenames, [])
        mapreduce_state.writer_state = state.to_json()
Ejemplo n.º 41
0
 def generate_attachments(self, project):
     file_string = self.generate_attachments_zip(project)
     blob_file_name = files.blobstore.create(mime_type=u'application/octet-stream')
     with files.open(blob_file_name, u'a') as f:
         f.write(file_string)
     files.finalize(blob_file_name)
     if project.blob_key:
         blobstore.delete(project.blob_key)
     project.blob_key = files.blobstore.get_blob_key(blob_file_name)
     project.filename = u'{}.zip'.format(project.internal_name)
     project.put()
Ejemplo n.º 42
0
 def _save(self, name, content):
     objectname = self._gcs_path(name)
     writable_name = files.gs.create(
         objectname,
         mime_type='application/vnd.google-earth.kml+xml',
         acl="public-read")
     with files.open(writable_name, 'a') as fp:
         fp.write(content.read())
     files.finalize(writable_name)
     logging.debug("_save wrote %s " % objectname)
     return name
Ejemplo n.º 43
0
    def _save(self, name, content):
        file_name = files.blobstore.create(
            mime_type='application/octet-stream',
            _blobinfo_uploaded_filename=name)

        with files.open(file_name, 'a') as f:
            content.seek(0)
            f.write(content.read())

        files.finalize(file_name)
        return name
Ejemplo n.º 44
0
    def uploadImage(b64image):
        file_name = files.blobstore.create(mime_type='image/png')

        with files.open(file_name, 'a') as f:
            f.write(b64image)

        files.finalize(file_name)

        key = files.blobstore.get_blob_key(file_name)

        return key
Ejemplo n.º 45
0
    def setUp(self):
        super(FileTestCase, self).setUp()

        # Create a blob and blob_reader for testing.
        filename = blobstore_files.blobstore.create(
            mime_type='application/octet-stream')
        with blobstore_files.open(filename, 'a') as fp:
            fp.write('Blobstore!')
        blobstore_files.finalize(filename)
        self.blob_key = blobstore_files.blobstore.get_blob_key(filename)
        self.blob_reader = blobstore.BlobReader(self.blob_key)
Ejemplo n.º 46
0
def createCloudStorageFiles(files_num, content):
    """Create cloudstorage file for test"""
    file_names = []
    for num in range(files_num):
        file_path = "/gs/" + TEST_BUCKET_NAME + "/contents_" + str(num)
        file_name = files.gs.create(file_path)
        for text in content:
            with files.open(file_name, 'a') as fp:
                fp.write(text + "\n")
        files.finalize(file_name)
        file_names.append(file_path)
    return file_names
Ejemplo n.º 47
0
 def create():
     from google.appengine.ext import blobstore
     from google.appengine.api import files
     file_name = files.blobstore.create(
         mime_type='text/javascript',
         _blobinfo_uploaded_filename=simplr.random())
     with files.open(file_name, 'a') as f:
         f.write(content)
     files.finalize(file_name)
     blob_key = files.blobstore.get_blob_key(file_name)
     logging.info('Backup Success: blob key is %s' % blob_key)
     return 'Backup Was Successful'
Ejemplo n.º 48
0
def write_blob(upload_file):
    file_name = files.blobstore.create(mime_type=upload_file.type)

    # Open the file and write to it
    with files.open(file_name, 'a') as f:
        f.write(upload_file.value)

    # Finalize the file. Do this before attempting to read it.
    files.finalize(file_name)

    # Get the file's blob key
    return str(files.blobstore.get_blob_key(file_name))
Ejemplo n.º 49
0
 def run(self,
         file_names,
         bucket_name):
   #Save filenames to cloud storage
   #If already exists same filename, will override it.
   print("/gs/"+bucket_name+TARGETS_FILE_PATH)
   target_file = files.gs.create("/gs/"+bucket_name+TARGETS_FILE_PATH)
   #Append the filename to the Google Cloud Storage object.
   for file_name in file_names:
     with files.open(target_file, 'a') as fp:
       fp.write(file_name+"\n")
   files.finalize(target_file)
    def post(self):
        
        QL.Results = []          
        for key, value in self.request.params.items():
            if isinstance(value, cgi.FieldStorage):
                upload = value
                if upload.type:
                    mimetype = upload.type
                else:
                    mimetype = 'application/octet-stream'
                if upload.filename:
                    pass
                    filename = urllib.quote(str(upload.filename.encode('utf8')))
                else:
                    filename = "no-name-" + str(time())[-6:]
                    
                try:     
                    title = strftime("%Y%m%d",gmtime()) + '/' + filename
                    filepath = '/gs/%s/%s' % (BUCKET,title)             
                    
                    real_filepath = files.gs.create(filename=filepath,
                                    acl='bucket-owner-full-control',
                                    mime_type= mimetype,
                                    cache_control='no-cache')
                             
                    with files.open(real_filepath,'a') as fp:
                        pass
                        fp.write(upload.value)
                    
                    files.finalize(real_filepath)
                    
                    # template_values = {
                        # 'message':'Upload Success!',
                        # 'origin_url':"http://commondatastorage.googleapis.com/" + filepath[4:]    
                    # }
                    
#                    origin_url = "http://commondatastorage.googleapis.com/" + filepath[4:]
                    origin_url = 'http://'+self.request.host + '/view/' + filepath[4:]
                    
                    QL.add(str(upload.filename.encode('utf8')), 'Success', origin_url)
                    #print str(QL.Results)
                    #self.response.write(str(QL.Results))
                    #self.out(template_values)           
                    

                except BaseException, e:
                    pass
                    #raise(e)
#                    template_values = {
#                        'message':'Upload Failed!'               
#                    }
                    #self.out(template_values)                    
                    QL.add(str(upload.filename.encode('utf8')), 'Failed', 'None')
Ejemplo n.º 51
0
 def finalize_job(cls, mapreduce_state):
     """See parent class."""
     output_sharding = cls._get_output_sharding(
         mapreduce_state=mapreduce_state)
     if output_sharding != cls.OUTPUT_SHARDING_INPUT_SHARDS:
         state = cls._State.from_json(mapreduce_state.writer_state)
         files.finalize(state.filenames[0])
     # Keep writing for 183 clients.
     # TODO(user): Remove after 184 is out.
     finalized_filenames = cls.get_filenames(mapreduce_state)
     state = cls._State(finalized_filenames, [])
     mapreduce_state.writer_state = state.to_json()
Ejemplo n.º 52
0
    def post(self, unused_key):
        """Creates a file from the posted data."""

        filename = files.blobstore.create()

        f = files.open(filename, "a")
        f.write(self.request.get('text'))
        f.close()

        files.finalize(filename)

        self.redirect('/')
Ejemplo n.º 53
0
    def post(self):

        # user = users.get_current_user()
        # userQuery = User.gql("WHERE name='{}'".format(user.nickname())).get()
        patient = Patient()
        patient.firstName = cgi.escape(self.request.get('firstName'))
        patient.lastName = cgi.escape(self.request.get('lastName'))
        patient.middleName = cgi.escape(self.request.get('middleName'))
        patient.age = cgi.escape(self.request.get('age'))
        patient.leftEyeRating = cgi.escape(self.request.get('leftEyeRating'))
        patient.rightEyeRating = cgi.escape(self.request.get('rightEyeRating'))
        patient.note = cgi.escape(self.request.get('note'))
        patient.join_date = datetime.datetime.now().date()
        patient.sex = cgi.escape(self.request.get('sex'))
        patient.leftEyeNote = cgi.escape(self.request.get('leftEyeNote'))
        patient.rightEyeNote = cgi.escape(self.request.get('rightEyeNote'))
        patient.clinicNumber = 1
        patient.otherNotes = cgi.escape(self.request.get('otherNotes'))

        upload_files = self.get_uploads('inputLeftEye')
        if (len(upload_files)):
            # logging.ERROR('Start guestbook signing request')
            blob_info = upload_files[0]
            patient.leftEyePhoto = str(upload_files[0].key())
            patient.leftEyePhotoURL = str(blob_info.key())

        else:
            leftimg = cgi.escape(self.request.get('inputLeftEyeHidden'))
            if (len(leftimg)):
                #reconstruct and store image
                leftimg = base64.b64decode(leftimg)
                # image/jpeg
                file_name = files.blobstore.create(mime_type='image/jpeg')

                # Open the file and write to it
                with files.open(file_name, 'a') as f:
                    f.write('leftimg')

                # Finalize the file. Do this before attempting to read it.
                files.finalize(file_name)

                # Get the file's blob key
                blob_key = files.blobstore.get_blob_key(file_name)
                patient.leftEyePhotoURL = str(blob_key)

        upload_files2 = self.get_uploads('inputRightEye')
        if (len(upload_files2)):
            blob_info2 = upload_files2[0]
            patient.rightEyePhotoURL = str(blob_info2.key())
            patient.rightEyePhoto = upload_files2[0].key()

        patient.put()
Ejemplo n.º 54
0
def render_text_into_blob(text):
    #text = realestate.email if realestate.email else ''

    from pybmp.bmpfont_arial_12 import font_data
    from pybmp.bmp import BitMap, Color

    bmp = BitMap(250, 22, Color.WHITE)
    bmp.setFont(font_data)
    bmp.setPenColor(Color.BLACK.darken())
    bmp.drawText(text, 4, 4)
    #bmp.drawLine( 0, 0, 10, 10)

    f = StringIO()
    f.write(bmp.getBitmap())
    f.flush()

    file_name = files.blobstore.create(mime_type='image/bmp',
                                       _blobinfo_uploaded_filename=str(
                                           random.randint(0, 100000)))
    img = images.Image(image_data=f.getvalue())

    # img = images.Image.open(f)
    img.resize(width=250, height=22)
    myfile = files.open(file_name, 'a')
    myfile.write(
        img.execute_transforms(output_encoding=images.JPEG, quality=100))
    myfile.close()

    files.finalize(file_name)

    blob_key = files.blobstore.get_blob_key(file_name)

    for i in range(1, 10):
        if not blob_key:
            time.sleep(0.05)
            blob_key = files.blobstore.get_blob_key(file_name)
        else:
            break

    if not blob_key:
        logging.error(
            "no pude obtener el blob_key, hay un leak en el blobstore!")
        abort(500)

    return blob_key

    # realestate.email_image      = blob_key
    # realestate.email_image_url  = get_serving_url(blob_key)

    # realestate.save()

    return 0
Ejemplo n.º 55
0
    def write_blob(self, data, info):
        blob = files.blobstore.create(mime_type=info['type'],
                                      _blobinfo_uploaded_filename=info['name'])

        #        image = images.Image(image_data=data)
        #        image.im_feeling_lucky()
        #        image.resize(width=980)
        #        image.execute_transforms(output_encoding=images.JPEG)
        #        blob = files.blobstore.create(mime_type='image/jpeg')
        with files.open(blob, 'a') as f:
            f.write(data)
        files.finalize(blob)
        return files.blobstore.get_blob_key(blob)
Ejemplo n.º 56
0
	def post(self):
		lang = self.request.get('lang')
		extracted_data = ''
		file_name = files.blobstore.create(mime_type='text/json', _blobinfo_uploaded_filename='extract_' + lang + time.strftime("%y%m%d") + '.json')
		with files.open(file_name, 'a') as f:
			ancestor = ndb.Key('Language', lang)
			for candidate in SuspectedEncoding.query(ancestor=ancestor):
				f.write(serializer.encode({
					'key': candidate.key.id(), 
					'approved': candidate.approved,
					'rejected': candidate.rejected}) + '\n')
		files.finalize(file_name)
		logging.info('finished writing file ' + file_name)
Ejemplo n.º 57
0
 def create(cls, team, datafile):
     file_name = files.blobstore.create(
         mime_type='application/octet-stream')
     with files.open(file_name, 'a') as f:
         for chunk in datafile.chunks():
             f.write(chunk)
     files.finalize(file_name)
     blob_key = files.blobstore.get_blob_key(file_name)
     braindata = cls(blob=blobstore.BlobInfo.get(blob_key),
                     team=team,
                     filename=datafile.name,
                     parent=team.group)
     braindata.put()
Ejemplo n.º 58
0
def gs_write(name, mime, content, cache='no-cache'):
    if os.environ.get('SERVER_SOFTWARE').startswith('Google App Engine'):
        fn = files.gs.create(
            '/gs/' + name,
            mime_type=mime,
            acl='public-read',
            cache_control=cache,
        )
        with files.open(fn, 'a') as f:
            f.write(content)
        files.finalize(fn)
    else:
        logging.info('gs write: %s, %s, %s', mime, name, len(content))
Ejemplo n.º 59
0
    def finalize(self, ctx, shard_number):
        """Finalize writer shard-level state.

    Args:
      ctx: an instance of context.Context.
      shard_number: shard number as integer.
    """
        mapreduce_spec = ctx.mapreduce_spec
        output_sharding = _get_output_sharding(
            mapper_spec=mapreduce_spec.mapper)
        if output_sharding == self.OUTPUT_SHARDING_INPUT_SHARDS:

            files.finalize(self._filename)
    def finalize_job(cls, mapreduce_state):
        """Finalize job-level writer state.

    Args:
      mapreduce_state: an instance of model.MapreduceState describing current
      job.
    """
        state = BlobstoreOutputWriter._State.from_json(
            mapreduce_state.writer_state)
        files.finalize(state.filename)
        state.filename = files.blobstore.get_file_name(
            files.blobstore.get_blob_key(state.filename))
        mapreduce_state.writer_state = state.to_json()