Exemplo n.º 1
0
 def post(self):
     title = self.request.get("title")
     text = self.request.get("text")
     key = self.request.get("key")
     page = self.request.get("page")
     written = self.request.get("written")
     if key and title and text:
         a = db.get(key)
         a.title = title
         a.text = text
         if page == None:
             a.page = ''
         else:
             a.page = page
         if written == None:
             a.written=''
         else:
             a.written = written            
         db.save(a)
         self.redirect("/")
     elif title and text:
         a = Poem(title=title, text=text, page=page, written=written)
         a.put()
         self.redirect("/")
     else:
         error = "incomplete submission"
         poem = Poem(title=title,text=text,page=page,written=written)
         self.render_index(poem, error,"")
Exemplo n.º 2
0
    def from_thinpack(cls, pack_store, f, indexer, resolve_ext_ref):
        entries = list(indexer)

        # Update the header with the new number of objects.
        f.seek(0)
        write_pack_header(f, len(entries) + len(indexer.ext_refs()))

        # Rescan the rest of the pack, computing the SHA with the new header.
        new_sha = compute_file_sha(f, end_ofs=-20)

        # Complete the pack.
        for ext_sha in indexer.ext_refs():
            assert len(ext_sha) == 20
            type_num, data = resolve_ext_ref(ext_sha)
            offset = f.tell()
            crc32 = write_pack_object(f, type_num, data, sha=new_sha)
            entries.append((ext_sha, offset, crc32))
        pack_sha = new_sha.digest()
        f.write(pack_sha)
        #f.close()

        #write the pack
        blob_name = files.blobstore.create(
            mime_type='application/octet-stream')
        with files.open(blob_name, 'a') as blob:
            blob.write(f.getvalue())
        files.finalize(blob_name)

        #store pack info
        pack_store.data = files.blobstore.get_blob_key(blob_name)
        #pack_store.sha1 #sha's are added when writing the index
        pack_store.size = f.tell()
        pack_store.checksum = sha_to_hex(pack_sha)
        pack_store.save()

        # Write the index.
        pack_indexes = [pack_store]
        for (name, offset, entry_checksum) in entries:
            idx = PackStoreIndex(packref=pack_store,
                                 sha=sha_to_hex(name),
                                 offset=offset,
                                 crc32=entry_checksum)
            pack_store.sha1.append(sha_to_hex(name))
            pack_indexes.append(idx)
        db.save(pack_indexes)

        # Add the pack to the store and return it.
        final_pack = Pack(pack_store)
        final_pack.check_length_and_checksum()
        return final_pack
Exemplo n.º 3
0
def newDatabase(owner, dbid=None):
  logging.error('nd dbid: '+str(dbid))
  if not dbid:
    logging.debug('no dbid')
    dbid=generateId()
    while Database.all().filter("dbid =", dbid).count()!=0:
      dbid=generateId()
  elif Database.all().filter("owner =", owner).filter("dbid =", dbid).count()!=0:
    return None

  logging.info('using '+str(dbid))
  db=Database(dbid=dbid, owner=owner)
  db.save()
  return db
Exemplo n.º 4
0
 def post(self):
     #persist new comment to the db and 
     title = self.request.get("title")
     text = self.request.get("text")
     key = self.request.get("key") 
     user_id = self.request.cookies.get("user-id")
     if key and title and text:
         comment = db.get(key)
         comment.title = title
         comment.text = text
         db.save(comment)
     elif title and text:
         comment = Comment(title=title,text=text,username=user_id)
         comment.put()
     self.redirect('/comments')
Exemplo n.º 5
0
    def from_thinpack(cls, pack_store, f, indexer, resolve_ext_ref):
        entries = list(indexer)

        # Update the header with the new number of objects.
        f.seek(0)
        write_pack_header(f, len(entries) + len(indexer.ext_refs()))

        # Rescan the rest of the pack, computing the SHA with the new header.
        new_sha = compute_file_sha(f, end_ofs=-20)

        # Complete the pack.
        for ext_sha in indexer.ext_refs():
            assert len(ext_sha) == 20
            type_num, data = resolve_ext_ref(ext_sha)
            offset = f.tell()
            crc32 = write_pack_object(f, type_num, data, sha=new_sha)
            entries.append((ext_sha, offset, crc32))
        pack_sha = new_sha.digest()
        f.write(pack_sha)
        # f.close()

        # write the pack
        blob_name = files.blobstore.create(mime_type="application/octet-stream")
        with files.open(blob_name, "a") as blob:
            blob.write(f.getvalue())
        files.finalize(blob_name)

        # store pack info
        pack_store.data = files.blobstore.get_blob_key(blob_name)
        # pack_store.sha1 #sha's are added when writing the index
        pack_store.size = f.tell()
        pack_store.checksum = sha_to_hex(pack_sha)
        pack_store.save()

        # Write the index.
        pack_indexes = [pack_store]
        for (name, offset, entry_checksum) in entries:
            idx = PackStoreIndex(packref=pack_store, sha=sha_to_hex(name), offset=offset, crc32=entry_checksum)
            pack_store.sha1.append(sha_to_hex(name))
            pack_indexes.append(idx)
        db.save(pack_indexes)

        # Add the pack to the store and return it.
        final_pack = Pack(pack_store)
        final_pack.check_length_and_checksum()
        return final_pack
Exemplo n.º 6
0
  def post(self, **kwargs):
    self.request.charset = 'utf-8'
    keys = self.request.POST['keys'].split(',')
    
    to_save = []
    for i,fi in enumerate(ImageFile.get(keys)):
      fi.position = i+1
      
      # Verifico que sean mias las fotines por 'silas' hacker
      if not self.has_role('ultraadmin') and str(fi.realestate.key()) != self.get_realestate_key():
        self.abort(500)

      to_save.append(fi)
    
    db.save(to_save)
    property = self.mine_or_404(str(to_save[0].property.key()))
    property.main_image_url = to_save[0].title
    property.save(build_index=False)
    
    self.response.write('ok')
Exemplo n.º 7
0
    def post(self):
        user = users.get_current_user()

        # Where is the new syntax :(
        # vals = [ x: self.request.get(x) for x in self.request.arguments()]
        vals = []
        elements_to_save = []
        # Create answer set and save it
        answerSet = AnswerSet()
        answerSet.author = user
        db.save(answerSet)

        real_answer_count = 0
        # iterate over all the parameters and make SavedResults
        for arg in self.request.arguments():
            answer = self.request.get(arg)
            elem = {'arg':arg,'val':answer}
            vals.append(elem)
            if answer != None and answer.strip() !="":
                real_answer_count +=1
            savedAnswer =self.createSavedResult(answerSet,arg,answer)
            elements_to_save.append(savedAnswer)

        answerSet.count = real_answer_count
        db.save(answerSet)
        db.save(elements_to_save)

        self.redirect("/")
Exemplo n.º 8
0
 def flush_memcache_to_db():
     keys=[];total=0
     index=memcache.get("article_queue")
     if not index:
         logging.error("article_queue not found in memcache")
         return total
     for i in range(1,index+1):
         keys.append("article_queue_"+str(i))
     articles=memcache.get_multi(keys)
     logging.info("pulled "+str(len(articles))+" articles from memcache")
     
     r2_keys=[]; v2_keys=[];
     for key,article in articles.items(): #TODO: skip duplicate articles
         r2_keys.append("article_r2_"+article['key'])
         v2_keys.append("article_v2_"+article['key'])
     
     # get article ranks    
     article_ranks={}
     r2_records=memcache.get_multi(r2_keys)
     for key,rank in r2_records.items():
         article_key=key[11:]
         article_ranks[article_key]=rank
     logging.info("pulled "+str(len(article_ranks))+" article_ranks from memcache")
     
     # get article velocities
     article_velocities={}
     v2_records=memcache.get_multi(v2_keys)
     for key,velocity in v2_records.items():
         article_key=key[11:]
         article_velocities[article_key]=velocity
         #logging.info("velocity for article "+article_key+" == "+str(velocity))
     logging.info("pulled "+str(len(article_velocities))+" article_velocities from memcache")
        
     records=[];num_records=0;update=False
     for key,article in articles.items(): #TODO: skip duplicates
         articlemodel=ArticleLib.getByKey(article['key'])
         if not articlemodel:
             pass;#logging.error("article not found "+article['key'])
         else: 
             update=False
             if article_velocities.has_key(article['key']):
                 adjusted_article_velocity=article_velocities[article['key']]-4611686018427387403;
                     
                 if articlemodel.v2!=adjusted_article_velocity:
                     update=True
                     #logging.info("setting v2 from "+str(articlemodel.v2)+" to "+str(article_velocities[article['key']])+" for article "+article['key'])
                     articlemodel.v2=adjusted_article_velocity
                     #logging.info("v2="+str(articlemodel.v2))
                     
             else:
                 pass;#logging.error("no velocity found for article "+article['key'])
             if article_ranks.has_key(article['key']):
                 if articlemodel.r2!=article_ranks[article['key']]:
                     update=True
                     articlemodel.r2=article_ranks[article['key']]
                 #if article_ranks[article['key']]>0:
                 #    logging.info("r2="+str(article_ranks[article['key']]))
            
             #TODO: also flush the 'numapprovals' property
             if update:
                 records.append(articlemodel)
                 #logging.info("v2="+str(articlemodel.v2)+" for article "+str(articlemodel.key()))
                 num_records=num_records+1
                 if num_records >100:
                     logging.info("saving 100 records")
                     db.save(records)
                     total=total+num_records
                     records=[];num_records=0
         
     if records:
         logging.info("saving "+str(num_records)+" records")
         total=total+num_records
         db.save(records)
     return total