예제 #1
0
파일: process.py 프로젝트: sagism/anyway
def import_to_datastore():
    from models import User, Marker
    from google.appengine.ext import db

    my_user = User.all().filter("email", "*****@*****.**").get()
    i = 0
    markers = []
    for data in import_data():
        #old_marker = Marker.get_by_key_name(str(data["id"]))
        #if old_marker:
        #    old_marker.delete()

        marker = Marker(
            key_name=str(data["id"]),
            user = my_user,
            title = "Accident",
            description = data["description"].decode("utf8"),
            address = data["address"].decode("utf8"),
            location = db.GeoPt(data["lat"], data["lng"]),
            type = Marker.MARKER_TYPE_ACCIDENT,
            subtype = data["severity"],
            created = data["date"],
            modified = data["date"],
        )
        #marker.put()
        #marker.update_location()
        markers.append(marker)

        print marker.key().name()
        if len(markers) == 100:
            print "Writing to datastore..."
            db.put(markers)
            markers = []
    def put(self):
        """
        Extends put so that it writes vaules to memcache as well as the
        datastore, and keeps them in sync, even when datastore writes fails.

        Returns the session object.
        """
        try:
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (str(self.key())), self)
        except:
            # new session, generate a new key, which will handle the
            # put and set the memcache
            db.put(self)

        self.last_activity = datetime.datetime.now()

        try:
            self.dirty = False
            db.put(self)
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (str(self.key())), self)
        except:
            self.dirty = True
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (str(self.key())), self)

        return self
예제 #3
0
  def post(self):
    if not self.request.get('sites'):
      self.error(400)  # Bad request.
    user = users.get_current_user()
    sites = simplejson.loads(self.request.get('sites'))

    # Newer clients include a 'now' parameter. If it exists, use it, otherwise,
    # use the current UTC time.
    day = datetime.datetime.fromtimestamp(
        float(self.request.get('now', time.time()))).date()

    updates = []
    for (site, seconds) in sites.iteritems():
      site_time = SiteTime.gql('WHERE user = :1 AND site = :2 AND day = :3',
                               user, site, day).get()
      if not site_time:
        site_time = SiteTime(user=user,
                             seconds=0.0,
                             site=site,
                             day=day)
      site_time.seconds += seconds
      updates.append(site_time)
    try:
      db.put(updates)
    except db.TransactionFailedError:
      self.error(500)  # Internal server error.

    user_info = UserInfo.gql('WHERE user = :1', user).get()
    if not user_info:
      user_info = UserInfo(user=user)
    user_info.last_update=datetime.datetime.now()
    user_info.put()
    self.response.set_status(200)
예제 #4
0
    def post(self):
            import time
            date = time.strftime('%Y-%m-%d', time.gmtime())
            k = db.Key(self.request.get('clientkey'))
            invoice = Invoices()
            invoice.client = k
            invoice.date = date
            invoice.status = 'draft'
            invoice.total = 0.00
            invoice.put()
            iid = invoice.key()
            invoiceid = str(invoice.inum)
            billedtotal = 0.00
            billedtime = 0.00
            # there is a form that lists the projects, allowing the user to 
            # check one or more the field data is the key for that project
            # get_all gives an array of keys returned which we want to pull 
            # time from
            projects = self.request.get_all('projects')
            # start looping through the keys
            for projectkey in projects:
                # make the string key an actual key object
                pkey = db.Key(projectkey)
                # get everything out of the time store with that project 
                # associated and which has a status of logged.
                times_query = Time.all()
                times_query.filter('project =', pkey)
                times_query.filter('status =', 'logged')
                times = times_query.fetch(100)
                
                for time in times:


                    time.invoice = iid
                    time.status = "invoiced"
                    db.put(time)

                    billedtime = float(billedtime) + float(time.hours)
                    billedtotal = float(billedtotal) + float(time.total)
                    
                project_update = db.get(pkey)
                project_update.status = "empty"
                project_update.put()
            
            

            totalhoursbilled = "%2.f" % billedtime
            totalbill = "%.2f" % billedtotal
            totalbill = float(totalbill)

            invoice_update = db.get(iid)
            invoice_update.totalhours = float(totalhoursbilled)
            invoice_update.totalbill = totalbill
            invoice_update.put()

            project_update = db.get(pkey)
            project_update.billed = billedtime
            project_update.put()
            action = '/invoice?iid=' + str(iid)            
            self.redirect(action)
예제 #5
0
파일: mentor.py 프로젝트: ajaksu/Melange
  def _updateField(self, entity, entity_properties, name):
    """Called when the fields of the mentor are updated

      When status is changed to invalid, removes the Mentor from all Student
      Proposals possible mentor lists.
    """

    from soc.logic.models.student_proposal import logic \
        as student_proposal_logic

    value = entity_properties[name]

    if name == 'status' and value != entity.status and value == 'invalid':
      fields = {'org': entity.scope}

      # TODO make this work for more then 1000 entities
      proposals_query = student_proposal_logic.getQueryForFields(fields)

      # store all updated proposals
      changed = []

      for proposal in proposals_query:

        if proposal.possible_mentors.count(entity.key()):
          # remove from list and add to changed
          proposal.possible_mentors.remove(entity.key())
          changed.append(proposal)

      # store all changed proposals
      db.put(changed)

    return super(Logic, self)._updateField(entity, entity_properties, name)
예제 #6
0
	def get(self):
		category_key = self.request.get('id')
		category = db.get(category_key)
		category.reverse = True
		db.put(category)
		memcache.delete(category.getReviewPairsKey())
		self.redirect('category?id=' + category_key)
예제 #7
0
  def post(self):
    if ( self.request.get("img1") ):
      PresentTFrame = TblFrame.all()
      PresentTFrame.order('FrameNum').fetch(limit=1)
      NextId = 0
      for r in PresentTFrame:
        if PresentTFrame:
          NextId = r.FrameNum + 1
        else:
          NextId = 0
      TFrame = TblFrame()
      TFrame.FrameNum = NextId

      Img = images.resize(self.request.get("img1"), 600, 600)

      TFrame.FrameImage = db.Blob(Img)

      TFrame.FrameDate = datetime.datetime.today() + datetime.timedelta(hours=9)
      TFrame.FrameIPAddress = os.environ['REMOTE_ADDR']

      db.put(TFrame)

      cUrl = "/FrameForm"
      self.redirect( cUrl )

    else:
      self.redirect( '/Err' )
예제 #8
0
파일: __init__.py 프로젝트: hebkhan/server
    def post(self):
        template_values = {}
        user_data = UserData.current()

        status_file = StringIO.StringIO(self.request_string("status_file"))
        reader = csv.reader(status_file)
        student_list = []
        for line in reader:
            student_email = line[0]
            student_status = line[1]
            student_comment = line[2]

            student = SummerStudent.all().filter("email =", student_email).get()
            if student is None:
                logging.error("Student %s not found" % student_email)
                continue

            student.application_status = student_status
            student.comment = student_comment
            if student_status == "Accepted":
                student.accepted = True

            student_list.append(student)

        db.put(student_list)

        self.response.out.write("OK")
        self.response.set_status(200)
예제 #9
0
파일: methods.py 프로젝트: jamslevy/PQ
    def set(self, quiz_taker, save=False):
		logging.info('setting proficiency level data for %s', quiz_taker.unique_identifier)
		save = []
		pro_scores = {}
		for tl in quiz_taker.topic_levels.fetch(1000):
			try: 
				current_average = pro_scores[tl.topic.proficiency.key()]['average']
				current_count = pro_scores[tl.topic.proficiency.key()]['count']
				current_product = current_average * current_count
				new_sum = current_product + tl.topic_level
				new_count = current_count + 1
				new_average = new_sum / new_count				
			except: # no current average
				pro_scores[tl.topic.proficiency.key()] = {}
				new_count = 1
				new_average = tl.topic_level
			pro_scores[tl.topic.proficiency.key()]['count'] = new_count
			pro_scores[tl.topic.proficiency.key()]['average'] = new_average

		for pro_pair in pro_scores.items():
				pl_keyname = str(quiz_taker.unique_identifier) + "_" + str(pro_pair[0])
				pro_pair[1]['average'] = int(pro_pair[1]['average'])
				if ProficiencyLevel.get_by_key_name(pl_keyname):
					proficiency_level = ProficiencyLevel.get_by_key_name(pl_keyname)
					proficiency_level.proficiency_level = pro_pair[1]['average']
				else:
					 proficiency_level = ProficiencyLevel(key_name = pl_keyname,
															proficiency = pro_pair[0],
															quiz_taker = quiz_taker,
															proficiency_level = pro_pair[1]['average'])
				save.append(proficiency_level)
		if save: db.put(save)
		return save
예제 #10
0
파일: GaeApp.py 프로젝트: benlau/gaescripts
 def upload_model(self,model_class,result):
     from google.appengine.ext import db
     from utils import deserialize
                 
     save = []
     to_delete = []
     for entity in result:
                         
         if "id" in entity:
             existing_entry = model_class.get(db.Key.from_path(model_class.kind(),entity ["id"]))
             if existing_entry:
                 to_delete.append(existing_entry) # Remove the existing entry with numeric ID
             
         object = deserialize(model_class , entity)
                     
         save.append(object)
         
         if len(to_delete) > 100:
             db.delete(to_delete)
             to_delete = []
         if len(save) > 100:
             db.put(save)
             save = []
     
     db.delete(to_delete)    
     db.put(save)
예제 #11
0
    def put(self):
        """
        Extends put so that it writes vaules to memcache as well as the
        datastore, and keeps them in sync, even when datastore writes fails.
        It also uses a db.put(), rather than the ROTModel put, to avoid
        retries on puts. With the memcache layer this optimizes performance,
        stopping on db.Timeout rather than retrying.

        Returns the session object.
        """
        if self.session_key:
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (unicode(self.session_key)), self)
        else:
            # new session, generate a new key, which will handle the
            # put and set the memcache
            self.create_key()

        self.last_activity = datetime.datetime.now()

        try:
            self.dirty = False
            db.put(self)
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (unicode(self.session_key)), self)
        except:
            self.dirty = True
            memcache.set(u"_AppEngineUtilities_Session_%s" % \
                (unicode(self.session_key)), self)

        return self
예제 #12
0
    def post(self):
        session_user = self.get_session_user()
        keys = json.loads(self.request.get('keys'))

        user = models.User.get_user_from_identifier(session_user.identifier)
        orders = []
        books = []
        isbn_list = []
        for key in keys:
            book = db.get(db.Key(key))
            order = models.UserBookOrder(user=user, book=book)
            books.append(str(book))
            orders.append(order)
        db.put(orders)

        person = user.people_singleton[0]
        book_infos = '\n'.join(books)

        queue_mail_task(url='/worker/mail/book_order/',
            params=dict(
                user_key=str(user.key()),
                books=book_infos,
                person_name=str(person),
                person_email_address=user.email,
                person_phone_number = str(person.phones[0])
            ),
            method='POST'
        )
        self.response.out.write(json.dumps(keys))
예제 #13
0
파일: main.py 프로젝트: chrisvaughn/pt-mail
	def post(self):
		""" this handler supports http post """
		google_user = google_users.get_current_user()
		user = db.Query(Users).filter('user_id =', google_user.user_id()).get()
		if user is None:
			user = Users(user_id = google_user.user_id(), email = google_user.email())

		emails = self.request.get('email').lower()
		emails = emails.split(',')

		if len(emails) == 0:
			self.response.set_status(400)
			self.response.out.write('Email is required.')
			return

		added_one = False
		for email in emails:
			if email.strip() == '':
				continue

			try:
				user.pt_emails.index(email)
			except ValueError:
				added_one = True
				user.pt_emails.append(email)

		if added_one == False:
			self.response.set_status(400)
			self.response.out.write('Email is required.')
			return

		db.put(user)
		self.response.out.write(json.dumps(user.pt_emails))
예제 #14
0
  def create_namespace_entry(self, namespace, size, number, timestamp):
    """ Puts a namespace into the datastore.

    Args:
      namespace: A string, the namespace.
      size: An int representing the number of bytes taken by a namespace.
      number: The total number of entities in a namespace.
      timestamp: A datetime.datetime object.
    Returns:
      True on success, False otherwise.
    """
    entities_to_write = []
    namespace_stat = stats.NamespaceStat(subject_namespace=namespace,
                               bytes=size,
                               count=number,
                               timestamp=timestamp)
    entities_to_write.append(namespace_stat)

    # All application are assumed to have the default namespace.
    if namespace != "":
      namespace_entry = metadata.Namespace(key_name=namespace)
      entities_to_write.append(namespace_entry)
    try:
      db.put(entities_to_write)
    except datastore_errors.InternalError, internal_error:
      logging.error("Error inserting namespace info: {0}.".\
        format(internal_error))
      return False
예제 #15
0
def _put(models,countdown=0):
  batch_size = 50
  to_put = []
  keys = []
  try:
    last_index = 0
    for i,model in enumerate(models):
      to_put.append(model)
      last_index = i
      if (i+1) % batch_size == 0:
        keys.extend(db.put(to_put))
        to_put = []
    keys.extend(db.put(to_put))
    return keys
    
  except apiproxy_errors.DeadlineExceededError:
    keys.extend(db.put(to_put))
    deferred.defer(_put,models[last_index+1:],_countdown=10)
    return keys
  
  except apiproxy_errors.CapabilityDisabledError:
    if not countdown:
      countdown = 30
    else:
      countdown *= 2
    deferred.defer(_put,models,countdown,_countdown=countdown)
예제 #16
0
 def get(self,time):
   ticker = "yhoo"
   ticker_instance = Stock.get_by_key_name(ticker)
   # time = how many hours back in the past we go
   # loop between now-time and now-time-1
   start_time = datetime.now() - timedelta(hours=int(time))
   end_time = start_time - timedelta(hours=1)
   loop_time = start_time
   
   # Check we aren't already covering this time period
   exists_query = TweetBag.all().filter("stock = ", ticker_instance).filter("time >", end_time).filter("time <", start_time).order("-time")
   if exists_query.count() > 0:
     highest = exists_query.get()
     end_time = highest.time
        
   inserts = []
   while loop_time > end_time:
     num_tweets = 0
     loop_time_5 = loop_time - timedelta(minutes=5)
     for keyword in ticker_instance.keywords:
        tweet_query = Tweet.all().filter("keywords = ", keyword).filter("created_at > ", loop_time_5).filter("created_at < ", loop_time)
        num_tweets += tweet_query.count()
     bagobj = TweetBag (
       sample_tweet = tweet_query.get(),
       stock = ticker_instance,
       count = num_tweets,
       time = loop_time_5
     )
     inserts.append(bagobj)
     loop_time = loop_time_5
   db.put(inserts)
예제 #17
0
def do_task_update_users(force, cursor):
    force = bool(int(force))
    cursor = int(cursor)
    friend_list, data = api._GetFriends(cursor=cursor)
    batch = []
    for friend in friend_list:
        username = friend.GetScreenName()
        isnew = not CurryUser.get_by_key_name(username)
        if force or isnew:
            batch.append(CurryUser(key_name=username))
            taskqueue.add(
                url=('/task/update_links/%s/%d' % (username, (isnew and not force))),
                queue_name='update-links-queue'
                )

    db.put(batch)
    logging.debug('added %d users from %d friends of bot' % (len(batch), len(friend_list)))
    logging.debug('next cursor=%d' % data['next_cursor'])

    if int(data['next_cursor']) != 0:
        taskqueue.add(
                url=('/task/update_users/%d/%d' % (force, int(data['next_cursor']))),
                queue_name='update-users-queue'
                )
    else:
        logging.info('update users: done')

    return 'ok'
예제 #18
0
파일: fixtures.py 프로젝트: jamslevy/PQ
	def load(self):
		save = []
		logging.info('loading fixture')
		self.fixture_offset = Setting.get_by_key_name('fixture_offset')  
		if self.fixture_offset.status == "update_stats": #instead of 'create_account'
			logging.warning('Load Fixtures Cron Job Hit Twice In a Row')
			print "error -- current status: ", self.fixture_offset.status
			return False
		this_account, this_user, this_quiz_taker = self.get_fixture()
		scores = Scores()
		import random
		correct_prob = random.randint(80,95)
		FIXTURE_PROFICIENCY = self.get_fixture_subject()
		from model.proficiency import Proficiency 
		this_proficiency = random.sample( Proficiency.gql("WHERE status = 'public'").fetch(1000), 1 )[0]
		save_scores = scores.make_scores(this_user, this_proficiency, correct_prob, SCORE_NUM = 10) 
		memcache.set('current_fixture', ( this_account, this_user, this_quiz_taker ), 600000)
		self.fixture_offset.status = "update_stats"
		print this_user.nickname
		save.append(self.fixture_offset)
		save.extend(save_scores)
		db.put(save)
		# open fixture.xml, go to offset.
		# load one name, email pair. register. 

		FIXTURE_PROFICIENCY = self.get_fixture_subject()
예제 #19
0
파일: fixtures.py 프로젝트: jamslevy/PQ
	def update_stats(self):
	  save = []
	  self.fixture_offset = Setting.get_by_key_name('fixture_offset') 
	  if self.fixture_offset.status != "update_stats":
		   logging.warning('Update Stats Job Hit Twice In A Row')
		   print "error -- current status: ", self.fixture_offset.status
		   return False
	  (this_account, this_user, this_quiz_taker) = memcache.get('current_fixture')
	  logging.info('Updating User Stats for User %s', this_user.unique_identifier)
	  from quiztaker.methods import ProficiencyLevels
	  pl = ProficiencyLevels()
	  pl.set_for_user(this_quiz_taker)
	  from accounts.methods import Awards
	  awards = Awards()
	  # check for new awards
	  new_awards = awards.check_all(this_quiz_taker)
	  from accounts.methods import Sponsorships
	  sponsorships = Sponsorships()
	  # check for new sponsorships, both personal and business
	  new_sponsorships = sponsorships.check_user(this_user)
	  self.fixture_offset.value += 1
	  self.fixture_offset.status = "create_account"
	  save.append(self.fixture_offset)
	  db.put(self.fixture_offset)
	  return new_awards, new_sponsorships
예제 #20
0
파일: binding.py 프로젝트: cerbero/shorty
def bind(domain_string, mydomain):
     
    """
    Calculate and save in db the last assignment valid to short classic 
    
    Params
        domain_s - a domain : string
        mydomain - a domain who provide the service : string
    
    Return
        last assignament : string 
    
    """
    last_assignment = ''   

    query = db.Query(Domain).filter('name_domain = ', domain_string)
    if query.count() != 0:
        dt = query.fetch(1)[0]
        last_assignment = dt.last_assignament
        
    while True:
        last_assignament = compute_next(last_assignment)
        short = '/'.join( (mydomain, domain_string, last_assignament) )
        if not functions.isAlredyCustum(short):
            break
    

    dt.last_assignament = last_assignament
    db.put(dt)

    return dt.last_assignament
예제 #21
0
def GetSnippetUser(user=None):
  """Try to get the Snipper user from the datastore, create if not.

  Args:
    user: User object.

  Returns:
    SnippetUser object.
  """
  if user is None:
    user = users.get_current_user()
  logging.debug('GetSnippetUser call for %s', user)
  logging.debug('Trying memcache first...')
  memcache_key = 'SnippetUser-' + str(user)
  snippet_user = memcache.get(memcache_key)
  # Added a check for the timezone to ensure the SnipperUser version is updated.
  if snippet_user and snippet_user.timezone:
    logging.debug('Memcache worked, returning.')
    return snippet_user
  else:
    snippet_user = db.Query(SnippetUser).filter('User = '******'Adding new Snipper user: %s', user)
      timezone = util.GetUserTimezone(user.nickname())
      snippet_user = SnippetUser()
      snippet_user.User = user  # pylint: disable-msg=C6409
      if timezone:
        snippet_user.timezone = timezone
      db.put(snippet_user)
    memcache.set(memcache_key, snippet_user)
    return snippet_user
예제 #22
0
    def put_expiry_flags(self):
        """Updates the is_expired flags on this Person and related Notes to
        make them consistent with the effective_expiry_date() on this Person,
        and commits the changes to the datastore."""
        import utils
        now = utils.get_utcnow()
        expired = self.get_effective_expiry_date() <= now

        if self.is_expired != expired:
            # NOTE: This should be the ONLY code that modifies is_expired.
            self.is_expired = expired

            # if we neglected to capture the original_creation_date,
            # make a best effort to grab it now, for posterity.
            if not self.original_creation_date:
                self.original_creation_date = self.source_date

            # If the record is expiring (being replaced with a placeholder,
            # see http://zesty.ca/pfif/1.3/#data-expiry) or un-expiring (being
            # restored from deletion), we want the source_date and entry_date
            # updated so downstream clients will see this as the newest state.
            self.source_date = now
            self.entry_date = now

            # All the Notes on the Person also expire or unexpire, to match.
            notes = self.get_notes(filter_expired=False)
            for note in notes:
                note.is_expired = expired

            # Store these changes in the datastore.
            db.put(notes + [self])
예제 #23
0
    def test_addOfficeHour(self):
        b = F.addBuilding("Office Hour Test", "ET")
        l = F.addLocation(b, "2nd", "201")
        s = F.addSchedule("1/1/2000", "1pm", ["Monday", "Tuesday"], "2pm", "1/1/2010")

        oh = F.addOfficeHour(s, l)

        self.assert_(oh.type == "Office Hours")
        self.assert_(oh.location.building.name == "Office Hour Test" and oh.location.building.abbreviation == "ET")
        self.assert_(oh.location.floor == "2nd" and oh.location.room == "201")
        self.assert_(oh.schedule.start_date == "1/1/2000" and oh.schedule.end_date == "1/1/2010")
        self.assert_(oh.schedule.start_time == "1pm" and oh.schedule.end_time == "2pm")
        self.assert_("Monday" in oh.schedule.days and "Tuesday" in oh.schedule.days)

        db.put(oh)

        key = oh.key()

        oh = db.get(key)

        self.assert_(oh.type == "Office Hours")
        self.assert_(oh.location.building.name == "Office Hour Test" and oh.location.building.abbreviation == "ET")
        self.assert_(oh.location.floor == "2nd" and oh.location.room == "201")
        self.assert_(oh.schedule.start_date == "1/1/2000" and oh.schedule.end_date == "1/1/2010")
        self.assert_(oh.schedule.start_time == "1pm" and oh.schedule.end_time == "2pm")
        self.assert_("Monday" in oh.schedule.days and "Tuesday" in oh.schedule.days)
예제 #24
0
파일: chat.py 프로젝트: danvk/lmnopuz
  def post(self):
    user = users.get_current_user()
    if not user:
      self.response.set_status(401)
      return

    text = self.request.get("text")
    line = ChatLine(user=user, text=text)
    db.put(line)

    # Send an update to everyone who's listening.
    # TODO(danvk): write a function to do this.
    msg = { 'lines': [ "%s: %s" % (user.nickname(), text) ] }
    active_users=db.GqlQuery("SELECT * FROM ActiveUsers")
    for user in active_users:
      if (datetime.datetime.now() - user.last_update_time
          > datetime.timedelta(hours=1)):
        logging.info("Removing inactive user: "******"Sending message on channel: " + user.user.user_id())
        try:
          channel.send_message(user.user.user_id(), simplejson.dumps(msg))
        except channel.InvalidChannelKeyError:
          # This happens when you restart the server and sever connections.
          pass

    self.response.out.write('ok')
예제 #25
0
    def test_addRetrieveWebsite(self):
        ra = F.addResearchGroup("Beer drinkers anonymous")
        db.put(ra)

        auth1 = F.addName("Author", "One")
        auth2 = F.addName("Author", "Two")

        auth1_key = db.put(auth1)
        auth2_key = db.put(auth2)

        auths = [auth1, auth2]

        w = F.addWebsite("http://beer.com", ra, auths)

        self.assert_(w.address == "http://beer.com")
        self.assert_(w.group.name == "Beer drinkers anonymous")
        # self.assert_(len(w.authors) == 2)
        self.assert_(auth1_key in w.authors and auth2_key in w.authors)

        w_key = db.put(w)

        w = db.get(w_key)

        self.assert_(w.address == "http://beer.com")
        self.assert_(w.group.name == "Beer drinkers anonymous")
        # self.assert_(len(w.authors) == 2)
        self.assert_(auth1_key in w.authors and auth2_key in w.authors)
예제 #26
0
def _set_css_deferred(user_data_key, video_key, status, version):
    user_data = user_models.UserData.get(user_data_key)
    uvc = UserVideoCss.get_for_user_data(user_data)
    css = pickle_util.load(uvc.pickled_dict)

    id = '.v%d' % video_key.id()
    if status == UserVideoCss.STARTED:
        if id in css['completed']:
            logging.warn("video [%s] for [%s] went from completed->started. ignoring." %
                         (video_key, user_data_key))
        else:
            css['started'].add(id)
    else:
        css['started'].discard(id)
        css['completed'].add(id)

    uvc.pickled_dict = pickle_util.dump(css)
    uvc.load_pickled()

    # if set_css_deferred runs out of order then we bump the version number
    # to break the cache
    if version < uvc.version:
        version = uvc.version + 1
        user_data.uservideocss_version += 1
        db.put(user_data)

    uvc.version = version
    db.put(uvc)
예제 #27
0
def CreateMachines(num_instances, token, os, browser, browser_version, download_info, retries=0):
    """Create and launch EC2 machines for the given parameters.

  Args:
    num_instances: An integer representing the number of instances to spawn
      with the given configuration.
    token: A string representing the token for this run.
    os: An integer that corresponds to an enum.OS value.
    browser: An integer that corresponds to an enum.BROWSER value.
    browser_version: A string representing browser version to use.
      Specifically, this should be the channel for Chrome.
    download_info: A string representing the information necessary for
      calculating the version and browser download url for the given machine.
    retries: An optional paramater specifying the initial retry count for the
      machine.
  """
    logging.info(
        "\n".join(["num_instances: %d", "token: %s", "os: %d", "browser: %d", "browser_version: %s"]),
        num_instances,
        token,
        os,
        browser,
        browser_version,
    )

    ec2 = ec2_manager.EC2Manager()
    user_data = simplejson.dumps(
        {"channel": browser_version, "os": OS_TO_USER_DATA[os], "token": token, "download_info": download_info}
    )
    logging.info("Spawning EC2 machines.")
    # Note: All exceptions are caught here because the EC2 API could fail after
    # successfully starting a machine. Because this task is rescheduled on
    # failure, we need to make sure we don't keep spawning EC2 machines.
    try:
        bots_instances = ec2.StartAmiWithOs(
            os, count=num_instances, instance_type=DEFAULT_INSTANCE_SIZE, user_data=user_data
        )
    except Exception:
        logging.exception("Something failed when setting up the EC2 instance. " "Stopping setup for this instance.")
        return

    logging.info("Creating the corresponding ClientMachine models.")
    new_instances = []
    for instance in bots_instances:
        new_instances.append(
            client_machine.ClientMachine(
                vm_service=enum.VM_SERVICE.EC2,
                os=os,
                browser=browser,
                browser_version=browser_version,
                client_id=instance.inst_id,
                status=enum.MACHINE_STATUS.PROVISIONED,
                retry_count=retries,
                token=token,
                download_info=download_info,
            )
        )

    db.put(new_instances)
    logging.info("Finished creating the ClientMachine models.")
예제 #28
0
 def test_addRetrieveArticle(self):
     article = F.addArticle("Psychology in the Lab", "47000")
     self.assert_(article.journal == "Psychology in the Lab" and article.edition == "47000")
     db.put(article)
     key = article.key()
     article = db.get(key)
     self.assert_(article.journal == "Psychology in the Lab" and article.edition == "47000")
예제 #29
0
  def append_api_response(self, checkins_json_data):
    seen_venue_ids = set()
    new_venues = []
    new_count = 0
    for checkin_json_data in checkins_json_data['checkins']['items']:
      checkin = data.checkin.Checkin(checkin_json_data)

      if 'venue' in checkin_json_data:
        venue_json_data = checkin_json_data['venue']
        if 'id' in venue_json_data:
          venue_id = venue_json_data['id']
          if venue_id not in seen_venue_ids:
            venue, is_new, = data.venue.Venue.create_if_needed(venue_json_data)
            seen_venue_ids.add(venue_id)
            if is_new:
              new_venues.append(venue)

      if checkin.id not in self._checkins_by_id:
        new_count += 1
      self._checkins_by_id[checkin.id] = checkin

    if new_venues:
      logging.info('Saving %d new venues', len(new_venues))
      db.put(new_venues)

    return new_count
예제 #30
0
파일: chat.py 프로젝트: ramsay/ntersekt
 def _new_chat(self):
     '''Attempts to find a good-matching open chat, if none are found it returns a new open chat with the current
     user as host.
     '''
     user = users.get_current_user()
     metrics = db.Query(UserMetrics).filter('user = '******'guest = ', None).filter('nsfw = ', False)
     vocab = set(metrics.twl)
     matches = []
     for chat in open_chats:
         host = db.Query(UserMetrics).filter('user =', chat.host).get()
         chat.score = len(vocab.intersection(host.twl))*10 - abs(host.awl-metrics.awl) - abs(host.act-metrics.act)
         if chat.score > -1:
             matches.append(chat)
     if matches:
         matches.sort(key = lambda chat: chat.score, reverse=True)
         for chat in matches:
             result = db.run_in_transaction(join_chat(chat.key(), user))
             if result is not None:
                 return result
     chat = Chat()
     chat.created = datetime.now()
     chat.host = user
     chat.nsfw = False
     chat.finished = False
     chat.guest = None
     db.put(chat)
     return chat
예제 #31
0
def import_schedule(season=None):

    # We want to convert from Eastern Time in the schedule to UTC
    import pytz
    est = pytz.timezone('US/Eastern')

    if not season:
        season = Season.current()

    # Make a map of all the teams. Map by both name and place because the
    # input data uses either one, sort of arbitrarily.
    teams = Team.all().fetch(32)
    team_map = dict((t.name, t) for t in teams)
    team_map.update(dict((t.place, t) for t in teams))

    # We'll build up the weeks and games we're importing in these structures.
    week_map = {}
    games = []

    csvlines = open('data/examples/2011-2012-schedule.csv').readlines()[1:]
    reader = csv.reader(csvlines)
    for row in reader:
        # Figure out kickoff
        kickoff = '%s %s' % itemgetter(0, 7)(row)
        kickoff = datetime.datetime.strptime(kickoff, '%m/%d/%Y %I:%M %p')
        kickoff = est.localize(kickoff).astimezone(pytz.utc)

        # Look up home and away teams by team name in the team map
        team_names = [_.strip() for _ in row[-2:]]
        home, away = itemgetter(*team_names)(team_map)

        # Figure out what week this game belongs to. The data in the CSV is a
        # string like this:
        # 'NFL Week 8:    Chargers @ Chiefs          [TV: ESPN]'
        info = row[2]
        week_num = int(info.split()[2][:-1])

        if week_num not in week_map:
            key = db.Key.from_path('Week', week_num, parent=season.key())
            week_map[week_num] = Week(key=key, name='Week %s' % week_num)
        week = week_map[week_num]

        key_name = '%s@%s' % (away.slug, home.slug)

        game = dict(parent=week,
                    key_name=key_name,
                    home_team=home,
                    away_team=away,
                    teams=[home.key(), away.key()],
                    start=kickoff)
        games.append(game)

    games.sort(key=itemgetter('parent'))

    # Figure out each week's end date based on the latest kickoff of its games
    for week, week_games in groupby(games, itemgetter('parent')):
        week_games = list(week_games)
        start = min(game['start'] for game in week_games)
        end = max(game['start'] for game in week_games)
        week.start = start
        week.end = end + datetime.timedelta(hours=5)

    # Store the weeks, so they have "complete" keys, and can therefore be used
    # as the parents to the games we're about to create.
    week_keys = db.put(week_map.values())

    # Create actual game entities from the kwargs we gathered, and store them.
    games = [Game(**kwargs) for kwargs in games]
    game_keys = db.put(games)

    return week_keys + game_keys
예제 #32
0
def get_ads(cursor=None, count=0):
    print "getting ads %d" % count

    ads = Ad.all().filter("source =", "avito").order("-created_at")

    print ads[0].created_at

    if cursor:
        ads = ads.with_cursor(cursor)

    ads_for_put = []
    ads_for_delete = []

    for ad in ads.fetch(10):
        try:
            parser = adsparser.parse(ad.key().name(), 'spb')
        except StandardError as e:
            msg = e.__str__()
            if msg == 'HTTP Error 404: Not found':
                print "deleting"
                ad.deleted = True

                ads_for_put.append(ad)

            continue

        if parser.phone_key is None:
            continue

        phone_url = "%s?pkey=%s" % (ad.key().name().replace(
            'items/', 'items/phone/'), parser.phone_key)
        phone_cmd = command.replace("__url__", phone_url)

        print ad.key().name()

        fin, fout = os.popen4(phone_cmd)
        phone = fout.read()

        time.sleep(2)

        f = open("result.txt", "r")
        phone = adsparser.format_phone(f.read())
        f.close()

        if parser.is_real_agent:
            ad.rating = 0
        else:
            if ad.phone is None or ad.phone == '':
                ad.rating = 100

        if ad.phone is not None and ad.phone != '' and ad.phone != phone:
            new_ad = clone_entity(ad,
                                  key_name="%s?v2" % ad.key().name(),
                                  parent=ad)
            new_ad.phone = phone
            new_ad.created_at = datetime.datetime.now()

            ads_for_put.append(new_ad)

        if ad.phone is None or ad.phone == '':
            ad.phone = phone
            ad.created_at = datetime.datetime.combine(
                ad.created_at.date(),
                datetime.datetime.now().time())

        ads_for_put.append(ad)

    print "saving ads"
    db.put(ads_for_put)
    print "ads saved"

    for ad in ads_for_put:
        try:
            print "adding task"
            taskqueue.add(queue_name='quick',
                          url='/ad/check',
                          params={'key': ad.key().name()})
        except:
            pass

    print "tasks added"

    get_ads(ads.cursor(), count + 10)
예제 #33
0
    def confirm_note_with_bad_words(self, note):
        """After a note containing bad words is confirmed by the author,
        we will:
        (1) set note.confirmed = True;
        (2) copy the note from NoteWithBadWords to Note;
        (3) log user action;
        (4) update person record. """
        note.confirmed = True

        # Check whether the record author disabled notes on
        # this record during the time between the note author inputs the
        # note in the UI and confirms the note through email.
        person = model.Person.get(self.repo, note.person_record_id)
        if person.notes_disabled:
            return self.error(
                200, _('The author has disabled notes on this record.'))

        # Check whether the admin disabled reporting "believed_dead"
        # during the time between the note author inputs the
        # note in the UI and confirms the note through email.
        if (self.params.status == 'believed_dead'
                and not self.config.allow_believed_dead_via_ui):
            return self.error(
                200,
                _('Not authorized to post notes with the status '
                  '"believed_dead".'))

        # clone the flagged note to Note table.
        note_confirmed = model.Note.create_original(
            self.repo,
            entry_date=note.entry_date,
            person_record_id=note.person_record_id,
            author_name=note.author_name,
            author_email=note.author_email,
            author_phone=note.author_phone,
            source_date=note.source_date,
            author_made_contact=note.author_made_contact,
            status=note.status,
            email_of_found_person=note.email_of_found_person,
            phone_of_found_person=note.phone_of_found_person,
            last_known_location=note.last_known_location,
            text=note.text)
        entities_to_put = [note_confirmed]

        note.confirmed_copy_id = note_confirmed.get_record_id()
        entities_to_put.append(note)

        # Specially log 'believed_dead'.
        if note_confirmed.status == 'believed_dead':
            model.UserActionLog.put_new('mark_dead', note_confirmed,
                                        person.primary_full_name,
                                        self.request.remote_addr)

        # Specially log a switch to an alive status.
        if (note_confirmed.status in ['believed_alive', 'is_note_author']
                and person.latest_status
                not in ['believed_alive', 'is_note_author']):
            model.UserActionLog.put_new('mark_alive', note_confirmed,
                                        person.primary_full_name)

        # Update the Person based on the Note.
        if person:
            person.update_from_note(note_confirmed)
            # Send notification to all people
            # who subscribed to updates on this person
            subscribe.send_notifications(self, person, [note_confirmed])
            entities_to_put.append(person)

        # Write one or both entities to the store.
        db.put(entities_to_put)
예제 #34
0
 def store():
   db.put(doc)
예제 #35
0
 def make_doc():
   db.put(doc)
   for scan in scans:
     scan.lacks_document = False
     scan.document = doc.key()
     db.put(scan)
예제 #36
0
    def post(self):
        body = self.request.body.decode('utf-8').encode(
            'ascii', 'xmlcharrefreplace')
        logging.info('Post body is %d characters', len(body))
        topic = get_self_link(self.request)

        data = feedparser.parse(body)
        if data.bozo:
            logging.error('Bozo feed data. %s: %r',
                          data.bozo_exception.__class__.__name__,
                          data.bozo_exception)
            if (hasattr(data.bozo_exception, 'getLineNumber')
                    and hasattr(data.bozo_exception, 'getMessage')):
                line = data.bozo_exception.getLineNumber()
                logging.error('Line %d: %s', line,
                              data.bozo_exception.getMessage())
                segment = body.split('\n')[line - 1]
                logging.info('Body segment with error: %r', segment)
            return self.response.set_status(500)

        update_list = []
        logging.info('Found %d entries', len(data.entries))
        for entry in data.entries:
            if hasattr(entry, 'content'):
                # This is Atom.
                entry_id = entry.id
                content = entry.content[0].value
                link = entry.get('link', '')
                title = entry.get('title', '')
            else:
                content = entry.get('description', '')
                title = entry.get('title', '')
                link = entry.get('link', '')
                entry_id = (entry.get('id', '') or link or title or content)

            logging.info(
                'Found entry in topic = "%s" with title = "%s", id = "%s", '
                'link = "%s", content = "%s"', topic, title, entry_id, link,
                content)
            update_list.append(
                TopicUpdate(key_name='key_' +
                            hashlib.sha1(link + '\n' + entry_id).hexdigest(),
                            topic=topic,
                            title=title,
                            content=content,
                            link=link,
                            callback=self.request.path[len('/subscriber'):]))
        db.put(update_list)

        self.response.set_status(200)
        self.response.out.write('Aight.  Saved.')

        uploads = len(update_list)
        uploads_not_claimed = 0

        for entry in update_list:
            yt_video_id = get_yt_video_id(entry.link)
            if (yt_video_id):
                try:
                    apply_usage_policy(yt_video_id)
                except:
                    uploads_not_claimed = uploads_not_claimed + 1

        logging.info("Videos claimed: %d of %d", uploads - uploads_not_claimed,
                     uploads)
예제 #37
0
 def post(self):
     """Save the user's preferences."""
     user = users.get_current_user()
     snipper_user = models.GetSnippetUser(user)
     logging.debug('Saving settings for %s', user)
     errors = []
     date_format = str(
         self.request.get('date_format', snipper_user.date_format))
     snippet_format = self.request.get('snippet_format',
                                       snipper_user.snippet_format)
     snipper_user.mail_snippets = bool(
         self.request.get('mail_snippets', False))
     snipper_user.send_confirm = bool(
         self.request.get('send_confirm', False))
     snipper_user.reset_day = int(
         self.request.get('reset_day', snipper_user.reset_day))
     snipper_user.reset_hour = int(
         self.request.get('reset_hour', snipper_user.reset_hour))
     timezone = self.request.get('timezone')
     try:
         assert pytz.timezone(timezone)
     except pytz.UnknownTimeZoneError:
         logging.exception('Invalid timezone: %s', timezone)
         errors.append('Invalid timezone: %s.' % timezone)
     else:
         snipper_user.timezone = timezone
         # Convert to UTC for storage.
         utc_reset = util.ResetDatetimeToUtc(snipper_user.reset_day,
                                             snipper_user.reset_hour,
                                             snipper_user.timezone)
         snipper_user.utc_reset_day = utc_reset.weekday()
         snipper_user.utc_reset_hour = utc_reset.hour
     try:
         assert datetime.datetime.now().strftime(date_format)
     except (ValueError, TypeError):
         errors.append('Invalid date format "%s".' % date_format)
         logging.exception('date_format "%s" failed validation.',
                           date_format)
     else:
         snipper_user.date_format = date_format
     try:
         assert snippet_format % 'test snippet'
     except (ValueError, TypeError):
         errors.append('Invalid snippet format "%s".' % snippet_format)
         logging.exception('snippet_format "%s" is invalid.',
                           snippet_format)
     else:
         snipper_user.snippet_format = snippet_format
     logging.debug(
         'date:%s, snip:%s, mail:%s, conf:%s, day:%s, hour:%s, tz:%s'
         'utc_day:%s, utc_hour:%s', snipper_user.date_format,
         snipper_user.snippet_format, snipper_user.mail_snippets,
         snipper_user.send_confirm, snipper_user.reset_day,
         snipper_user.reset_hour, snipper_user.timezone,
         snipper_user.utc_reset_day, snipper_user.utc_reset_hour)
     try:
         db.put(snipper_user)
     except (db.Timeout, db.InternalError):
         logging.exception('Could not save settings.')
         errors.append('Could not save settings.')
     else:
         memcache_key = 'SnippetUser-' + str(user)
         memcache.set(memcache_key, snipper_user)
     if errors:
         errors = urllib.quote_plus(','.join(errors))
         return self.redirect('/settings?errors=' + errors)
     # Drop the last two weeks from memcache.
     memcache.delete_multi(
         ['snippets_%s_%d' % (str(user), x) for x in (0, 1)])
     self.redirect('/?msg=Settings+saved.')
예제 #38
0
    def setUp(self):
        self.testbed = testbed.Testbed()
        self.testbed.activate()
        self.testbed.init_user_stub()
        self.testbed.init_datastore_v3_stub()
        self.testbed.init_memcache_stub()
        self.testbed.init_taskqueue_stub()
        model.Repo(key_name='haiti').put()

        logging.basicConfig(level=logging.INFO, stream=sys.stderr)
        self.mox = None

        # Setup cheerfully stolen from test_model.
        set_utcnow_for_test(datetime.datetime(2010, 1, 1))
        self.photo = model.Photo.create('haiti', image_data='xyz')
        self.photo.put()
        self.photo_key = self.photo.key()
        self.p1 = model.Person.create_original(
            'haiti',
            given_name='John',
            family_name='Smith',
            home_street='Washington St.',
            home_city='Los Angeles',
            home_state='California',
            home_postal_code='11111',
            home_neighborhood='Good Neighborhood',
            author_name='Alice Smith',
            author_phone='111-111-1111',
            author_email='*****@*****.**',
            photo_url='',
            photo=self.photo,
            source_url='https://www.source.com',
            source_date=datetime.datetime(2010, 1, 1),
            source_name='Source Name',
            entry_date=datetime.datetime(2010, 1, 1),
            expiry_date=datetime.datetime(2010, 2, 1),
            other='')
        self.p2 = model.Person.create_original(
            'haiti',
            given_name='Tzvika',
            family_name='Hartman',
            home_street='Herzl St.',
            home_city='Tel Aviv',
            home_state='Israel',
            source_date=datetime.datetime(2010, 1, 1),
            entry_date=datetime.datetime(2010, 1, 1),
            expiry_date=datetime.datetime(2010, 3, 1),
            other='')
        self.key_p1 = db.put(self.p1)
        self.key_p2 = db.put(self.p2)
        self.n1_1 = model.Note.create_original(
            'haiti',
            person_record_id=self.p1.record_id,
            linked_person_record_id=self.p2.record_id,
            status=u'believed_missing',
            author_made_contact=False,
            entry_date=get_utcnow(),
            source_date=datetime.datetime(2010, 1, 2))
        self.note_id = self.n1_1.note_record_id
        db.put(self.n1_1)
        self.to_delete = [self.p1, self.p2, self.n1_1, self.photo]
예제 #39
0
    def get(self, action=None):
        if action:
            if action == 'turn_download_on':
                turn_download_on()
            if action == 'turn_download_off':
                turn_download_off()

        self.response.out.write('Admin page<br/><br/>')
        self.response.out.write('<a href="/gt/">Home</a><br/><br/>')
        self.response.out.write(
            '<a href="/gt/admin/create_geo_trees">Create GeoTrees</a><br/><br/>'
        )
        self.response.out.write(
            '<a href="/_ah/admin">App Engine localhost admin</a><br/><br/>')
        self.response.out.write(
            '<a href="/gt/admin/add_points">Add OSM points to GeoTree</a><br/>'
        )
        self.response.out.write(
            '<a href="/gt/admin/add_cities">Add cities to GeoTree</a><br/><br/>'
        )
        self.response.out.write(
            '<a href="/gt/admin/update_tiles">Update OSM GeoTree tiles</a><br/>'
        )
        self.response.out.write(
            '<a href="/gt/admin/update_cities_tiles">Update Cities GeoTree tiles</a><br/><br/>'
        )
        if is_download_on():
            self.response.out.write(
                '<a href="/gt/admin/turn_download_off">Turn OSM Download OFF</a><br/><br/>'
            )
        else:
            self.response.out.write(
                '<a href="/gt/admin/turn_download_on">Turn OSM Download ON</a><br/><br/>'
            )

        if action:
            if action == 'create_geo_trees':
                gt = GeoTree.get(gt_key_name='osm')
                if not gt:
                    gt = GeoTree(key_name='osm',
                                 max_z=config.max_z_osm,
                                 min_z=config.min_z_osm)
                    gt.put()
                    self.response.out.write('\n\nInfo: Created osm GeoTree.')
                else:
                    gt.max_z = config.max_z_osm
                    gt.min_z = config.min_z_osm
                    gt.put()
                    self.response.out.write('\n\nInfo: OSM GeoTree exists.')
                gt = GeoTree.get(gt_key_name='cities')
                if not gt:
                    gt = GeoTree(key_name='cities',
                                 max_z=config.max_z_cities,
                                 min_z=config.min_z_cities)
                    gt.put()
                    self.response.out.write('\nInfo: Created cities GeoTree.')
                else:
                    gt.max_z = config.max_z_cities
                    gt.min_z = config.min_z_cities
                    gt.put()
                    self.response.out.write('\nInfo: Cities GeoTree exists.')
            if action == 'add_points':
                batch = OSMPOI.all().filter('is_in_tree =',
                                            False).fetch(self._BATCH_ADD_SIZE)
                if batch:
                    GeoTree.insert_points_list(batch,
                                               max_z=17,
                                               gt_key_name="osm")
                    self.response.out.write('\n\nInfo: added %d points' %
                                            len(batch))
                    for p in batch:
                        p.is_in_tree = True
                    db.put(batch)
                    taskqueue.add(url='/gt/admin/add_points', method='GET')
                else:
                    if GeoTree.exists(gt_key_name="osm"):
                        self.response.out.write('\n\nInfo: no POIs to add.')
                        taskqueue.add(url='/gt/admin/update_tiles',
                                      method='GET')
                    else:
                        self.response.out.write(
                            '\n\nInfo: GeoTree does not exist.')
            if action == 'add_cities':
                batch = City.all().filter('is_in_tree =',
                                          False).fetch(self._BATCH_ADD_SIZE)
                if batch:
                    GeoTree.insert_points_list(batch, gt_key_name="cities")
                    self.response.out.write('\n\nInfo: added %d cities' %
                                            len(batch))
                    for p in batch:
                        p.is_in_tree = True
                    db.put(batch)
                else:
                    if GeoTree.exists(gt_key_name="cities"):
                        self.response.out.write(
                            '\n\nInfo: no cities left out of tree')
                    else:
                        self.response.out.write(
                            '\n\nInfo: GeoTree does not exist')
            if action == 'update_tiles':
                message = GeoTree.update_tiles(count=self._BATCH_UPDATE_SIZE,
                                               gt_key_name="osm")
                if message:
                    if 'nothing to update' in message:
                        self.response.out.write('<br/>' + message)
                else:
                    taskqueue.add(url='/gt/admin/update_tiles', method='GET')
            if action == 'update_cities_tiles':
                message = GeoTree.update_tiles(count=self._BATCH_UPDATE_SIZE,
                                               gt_key_name="cities")
                if message:
                    self.response.out.write('<br/>' + message)
                else:
                    self.response.out.write('\n\nInfo: updated tiles')
            # memcaching is not used at the moment
            if action == 'clear_cache':
                memcache.flush_all()
                self.response.out.write(
                    '<br/>All memcache entries are deleted.')
예제 #40
0
    def get(self):
        # Creating an entity with a system ID
        e1 = Entity()
        e1.prop = 1
        e1.put()

        k1 = e1.key()
        self.response.write('<p>Entity e1 has a system ID = %d</p>' % k1.id())

        # Creating an entity with a key name
        e2 = Entity(key_name='alphabeta')
        e2.prop = 2
        e2.put()

        k2 = e2.key()
        self.response.write('<p>Entity e2 has a key name = %s</p>' % k2.name())

        # Getting an entity by a known key
        k = db.Key.from_path('Entity', 'alphabeta')
        result = db.get(k)

        # You could also use this shortcut:
        # result = Entity.get_by_key_name('alphabeta')

        if result:
            self.response.write('<p>Got an entity by key name, prop = %d</p>' %
                                result.prop)
        else:
            self.response.write('<p>Could not find an entity with key name '
                                '"alphabeta"</p>')

        # Inspecting entity objects

        e3 = Entity()
        assert (not e3.is_saved())

        e3.put()
        assert (e3.is_saved())

        assert (not hasattr(e3, 'prop'))

        e3.prop = 3
        assert (hasattr(e3, 'prop'))

        for n in range(1, 10):
            value = n * n
            setattr(e3, 'prop' + str(n), value)

        assert (getattr(e3, 'prop' + str(7)) == 49)

        self.response.write('<p>Properties of e3:</p><ul>')
        for name in e3.instance_properties():
            value = getattr(e3, name)
            self.response.write('<li>%s = %d</li>' % (name, value))
        self.response.write('</ul>')

        # Batch put
        e4 = Entity()
        e5 = Entity()
        db.put([e4, e5])

        # Delete an entity using the model object
        db.delete(e1)

        e2.delete()

        # Delete by key
        k = db.Key.from_path('Entity', 'alphabeta')
        db.delete(k)

        # Batch delete
        db.delete([e4, e5])

        self.response.write('<p>Entities deleted.</p>')

        self.response.write('<p>The time is: %s</p>' %
                            str(datetime.datetime.now()))
예제 #41
0
    def test_search_by_name_only(self):
        db.put(self.p1)
        db.put(self.p2)
        db.put(self.p3)
        db.put(self.p4)
        db.put(self.p5)
        db.put(self.p6)
        db.put(self.p7)
        db.put(self.p8)
        db.put(self.p9)
        db.put(self.p10)
        db.put(self.p11)
        db.put(self.p12)
        db.put(self.p13)
        db.put(self.p14)
        db.put(self.p15)
        db.put(self.p16)
        db.put(self.p17)
        db.put(self.p18)
        full_text_search.add_record_to_index(self.p1)
        full_text_search.add_record_to_index(self.p2)
        full_text_search.add_record_to_index(self.p3)
        full_text_search.add_record_to_index(self.p4)
        full_text_search.add_record_to_index(self.p5)
        full_text_search.add_record_to_index(self.p6)
        full_text_search.add_record_to_index(self.p7)
        full_text_search.add_record_to_index(self.p8)
        full_text_search.add_record_to_index(self.p9)
        full_text_search.add_record_to_index(self.p10)
        full_text_search.add_record_to_index(self.p11)
        full_text_search.add_record_to_index(self.p12)
        full_text_search.add_record_to_index(self.p13)
        full_text_search.add_record_to_index(self.p14)
        full_text_search.add_record_to_index(self.p15)
        full_text_search.add_record_to_index(self.p16)
        full_text_search.add_record_to_index(self.p17)
        full_text_search.add_record_to_index(self.p18)

        # Search by alternate name
        results = full_text_search.search('haiti', 'Iorin', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0505'])

        # Search by family name
        results = full_text_search.search('haiti', 'Minase', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0505'])

        # Search by given name
        results = full_text_search.search('haiti', 'Iori', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0505'])

        # Search by given name + family name
        results = full_text_search.search('haiti', 'Minase Iori', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0505'])

        # Search by full name
        resutls = full_text_search.search('haiti', 'Iori Minase', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0505'])

        # Search by name & location
        results = full_text_search.search('haiti', 'Chihaya Arao', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0225'])

        # Search Cyrillic record by name & location
        results = full_text_search.search('haiti', 'Ritsuko Tottori', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0623'])

        # Search by home_street only
        results = full_text_search.search('haiti', 'Kunaideme72', 5)
        assert not results

        # Search by home_city only
        results = full_text_search.search('haiti', 'Arao', 5)
        assert not results

        # Search by home_state only
        results = full_text_search.search('haiti', 'Kumamoto', 5)
        assert not results

        # Search by home_postal_code only
        results = full_text_search.search('haiti', '864-0003', 5)
        assert not results

        # Search by home_neighborhood only
        results = full_text_search.search('haiti', 'Araokeibajou', 5)
        assert not results

        # Search by home_country only
        results = full_text_search.search('haiti', 'Japan', 5)
        assert not results

        # Search in a different repository
        results = full_text_search.search('japan', 'Iori', 5)
        assert not results

        # Check no results
        results = full_text_search.search('haiti', 'Producer san', 5)
        assert not results

        # Search with no query text
        results = full_text_search.search('haiti', '', 5)
        assert not results

        # Search deleted record
        delete.delete_person(self, self.p5)
        results = full_text_search.search('haiti', 'Ami', 5)
        assert not results

        # Check rank order (name match heigher than location match)
        results = full_text_search.search('haiti', 'Rin Shibuya', 5)
        assert [r.record_id for r in results] == \
               ['haiti:0810', 'haiti:0203']

        # Search romaji record by kanji name
        results = full_text_search.search('haiti', u'千早', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0225'])

        # Search romaji record by kanji name and location
        results = full_text_search.search('haiti', u'千早 荒尾', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0225'])

        # Check rank order
        # (same kanji higher than different kanji with the same reading)

        results = full_text_search.search('haiti', u'菊地 真', 5)
        assert [r.record_id for r in results] == \
            ['haiti/0829', 'haiti/1829']
        results = full_text_search.search('haiti', u'菊地 眞', 5)
        assert [r.record_id for r in results] == \
            ['haiti/1829', 'haiti/0829']

        # Search kanji record by multi reading
        results = full_text_search.search('haiti', u'hagiwara', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/1224'])
        results = full_text_search.search('haiti', u'ogiwara', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/1224'])

        # Search romaji record by hiragana name and location
        results = full_text_search.search('haiti', u'ちはや あらお', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0225'])

        # Search by full name without space
        results = full_text_search.search('haiti', 'HibikiGanaha', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/1010'])

        # Search kanji record by full name without space
        results = full_text_search.search('haiti', u'AzusaMiura', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0719'])

        # Search Cyrillic record by full name without space
        results = full_text_search.search('haiti', u'RitsukoAkiduki', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0623'])

        # Search full name without space record by given name and family name
        results = full_text_search.search('haiti', u'Kotori Otonashi', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0909'])

        # Search Cyrillic record by full name without space
        results = full_text_search.search('haiti', u'OtonashiKotori', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0909'])

        # Search Chinese record by kanji
        results = full_text_search.search('haiti', u'真美', 5)
        assert set([r.record_id for r in results]) == \
            set(['haiti/0523'])

        # Search by Special Chinese Family Name
        # while records are written in English
        results = full_text_search.search('haiti', u'单鱼', 5)
        assert set([r.record_id for r in results]) == \
               set(['haiti/0911'])

        # Search by Pinyin(Chinese Romaji)
        # while records are written in Chinese
        results = full_text_search.search('haiti', u'Zeng Cheng', 5)
        assert set([r.record_id for r in results]) == \
               set(['haiti/0910'])

        # Search by Chinese
        # while records are written in Chinese
        results = full_text_search.search('haiti', u'曾诚', 5)
        assert set([r.record_id for r in results]) == \
               set(['haiti/0910'])
예제 #42
0
    def test_clean_up_in_test_mode(self):
        """Test the clean up in test mode."""
        def run_clean_up_in_test_mode_task():
            """Runs the CleanUpInTestMode task."""
            test_handler.initialize_handler(
                tasks.CleanUpInTestMode, tasks.CleanUpInTestMode.ACTION).get()

        tasks.CleanUpInTestMode.DELETION_AGE_SECONDS = 2 * 3600  # 2 hours

        # entry_date of p3 is 4 hours after p1 and p2.
        self.p3 = model.Person.create_original(
            'haiti',
            first_name='Taro',
            last_name='Google',
            home_street='Roppongi',
            home_city='Minato',
            home_state='Tokyo',
            source_date=datetime.datetime(2010, 1, 1),
            entry_date=datetime.datetime(2010, 1, 1, 4, 0, 0),
            expiry_date=datetime.datetime(2010, 3, 1),
            other='')
        self.key_p3 = db.put(self.p3)
        self.to_delete.append(self.p3)

        # Initial state: three Persons and one Note.
        assert model.Person.all().count() == 3
        assert model.Note.get('haiti', self.note_id)
        assert db.get(self.photo_key)
        assert db.get(self.key_p1).is_expired == False  # still exists
        assert db.get(self.key_p2).is_expired == False
        assert db.get(self.key_p3).is_expired == False

        # verify schedule_next_task does the right thing.
        utcnow = datetime.datetime(2010, 1, 1)
        config.set(test_mode=True, repo='haiti')
        query = model.Person.all()
        query.get()
        self.mox = mox.Mox()
        self.mox.StubOutWithMock(taskqueue, 'add')
        taskqueue.add(method='GET',
                      url='/haiti/tasks/clean_up_in_test_mode',
                      params={
                          'cursor': query.cursor(),
                          'utcnow':
                          str(calendar.timegm(utcnow.utctimetuple())),
                          'queue_name': 'clean_up_in_test_mode',
                      },
                      name=mox.IsA(str))
        self.mox.ReplayAll()
        cleanup = \
            test_handler.initialize_handler(tasks.CleanUpInTestMode,
                                            tasks.CleanUpInTestMode.ACTION)
        cleanup.schedule_next_task(query.cursor(), utcnow)
        self.mox.UnsetStubs()
        self.mox.VerifyAll()

        # Nothing happens if test_mode is False.
        config.set(test_mode=False, repo='haiti')
        set_utcnow_for_test(datetime.datetime(2010, 6, 1))
        run_clean_up_in_test_mode_task()
        assert db.get(self.key_p1).is_expired == False  # still exists
        assert db.get(self.key_p2).is_expired == False
        assert db.get(self.key_p3).is_expired == False

        # Records with entry_date before 2010-01-01 3:00 are deleted.
        config.set(test_mode=True, repo='haiti')
        set_utcnow_for_test(datetime.datetime(2010, 1, 1, 5, 0, 0))
        run_clean_up_in_test_mode_task()
        assert db.get(self.key_p1) is None
        assert db.get(self.key_p2) is None
        assert db.get(self.key_p3).is_expired == False  # still exists

        # All records are deleted.
        config.set(test_mode=True, repo='haiti')
        set_utcnow_for_test(datetime.datetime(2010, 1, 1, 7, 0, 0))
        run_clean_up_in_test_mode_task()
        assert db.get(self.key_p1) is None
        assert db.get(self.key_p2) is None
        assert db.get(self.key_p3) is None
예제 #43
0
 def _save_notification_and_payload(cls, notification, payload):
     return db.put([notification, payload])
예제 #44
0
 def test_delete_record_from_index(self):
     db.put(self.p4)
     full_text_search.add_record_to_index(self.p4)
     full_text_search.delete_record_from_index(self.p4)
     results = full_text_search.search('haiti', 'Miki', 5)
     assert not results
예제 #45
0
 def trans():
     sc.provisioned = False
     sc.deleted = True
     sln_settings.updates_pending = True
     db.put([sln_settings, sc])
     return sln_settings
예제 #46
0
def updateTasksPostStudentSignUp(request, *args, **kwargs):
    """Appengine task that updates the GHOP Tasks after the student signs up.

  Expects the following to be present in the POST dict:
    student_key: Specifies the student key name who registered

  Args:
    request: Django Request object
  """
    from soc.modules.ghop.logic.models import student as ghop_student_logic

    post_dict = request.POST

    student_key = post_dict.get('student_key')

    if not student_key:
        # invalid student data, log and return OK
        return error_handler.logErrorAndReturnOK('Invalid Student data: %s' %
                                                 post_dict)

    student_entity = ghop_student_logic.logic.getFromKeyNameOr404(student_key)

    # retrieve all tasks currently assigned to the user
    task_fields = {
        'user': student_entity.user,
    }
    task_entities = ghop_task_logic.logic.getForFields(task_fields)

    # TODO(madhusudan) move this to the Task Logic
    # Make sure the tasks store references to the student as well as
    # closing all tasks that are AwaitingRegistration.
    for task_entity in task_entities:
        task_entity.student = student_entity
        if task_entity.status == 'AwaitingRegistration':
            task_entities.remove(task_entity)

            properties = {'status': 'Closed'}
            changes = [
                ugettext('User-MelangeAutomatic'),
                ugettext('Action-Student registered'),
                ugettext('Status-%s' % (properties['status']))
            ]

            comment_properties = {
                'parent':
                task_entity,
                'scope_path':
                task_entity.key().name(),
                'created_by':
                None,
                'changes':
                changes,
                'content':
                ugettext(
                    '(The Melange Automated System has detected that the student '
                    'has signed up for the program and hence has closed this task.'
                ),
            }

            ghop_task_logic.logic.updateEntityPropertiesWithCWS(
                task_entity, properties, comment_properties)

    db.put(task_entities)

    # return OK
    return http.HttpResponse()
예제 #47
0
e = Employee(name="",
             role="manager",
             account=users.get_current_user())
e.hire_date = datetime.datetime.now()
e.put()

# other example, consulta SQL
training_registration_list = [users.User("*****@*****.**"),
                              users.User("*****@*****.**"),
                              users.User("*****@*****.**")]
employees_trained = db.GqlQuery("SELECT * FROM Employee WHERE account IN :1",
                                training_registration_list)
for e in employees_trained:
    e.new_hire_training_completed = True
    db.put(e)

# poliformismo
from google.appengine.ext import db
from google.appengine.ext.db import polymodel

class Contact(polymodel.PolyModel):
  phone_number = db.PhoneNumberProperty()
  address = db.PostalAddressProperty()

class Person(Contact):
  first_name = db.StringProperty()
  last_name = db.StringProperty()
  mobile_number = db.PhoneNumberProperty()

class Company(Contact):
예제 #48
0
                COUNTER_SEND_MAIL_TASK_FAILED.inc()

                # Set by except: clause above. pylint: disable=raising-bad-type
                raise exception

        if sent:
            cls._mark_sent(notification, now)

        if failed_permanently:
            cls._mark_failed(notification, now, exception, permanent=True)

        if sent or failed_permanently:
            policy.run(notification, payload)
            cls._mark_done(notification, now)

        db.put([notification, payload])

        COUNTER_RETENTION_POLICY_RUN.inc()

        if sent:
            COUNTER_SEND_MAIL_TASK_SENT.inc()
        elif failed_permanently:
            COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()

        COUNTER_SEND_MAIL_TASK_SUCCESS.inc()

    @classmethod
    @db.transactional(
            propagation=datastore_rpc.TransactionOptions.INDEPENDENT, xg=True)
    def _record_failure(
            cls, notification, payload, exception, dt=None, permanent=False,
예제 #49
0
파일: main.py 프로젝트: AVB24/LapRecords
	def post(self):
		track = self.request.get('track')
		bestlaps = {}
		for bl in BestLap.all().filter('track =', track):
			if bl.isBest is True:
				bestlaps[bl.raceclass.name] = bl

		upload_files = self.get_uploads('file')[0]
		blob_key = upload_files.key()
		blob_info = upload_files
		record = Record(csv=str(blob_info)).put()
		blob_reader = blobstore.BlobReader(blob_key)
		reader = csv.DictReader(blob_reader)
		lapsToUpload = []

		for row in reader:
			#row = row.replace('"','').replace(', ', ' ').strip()
			if 'Best Tm' in row:
				time = row['Best Tm']
			else:
				time = row['Overall BestTm']
			
			if 'Laps' in row:
				laps = row['Laps']
			else:
				laps = row['Appeared']

			position = row['Pos']
			point_in_class = row['PIC']
			carnum = row['No.']
			racer_name = normalize_string(row['Name'])
			racer_class = normalize_string(row['Class'])
			diff = row['Diff']
			gap = row['Gap']
			points = row['Points']
			car_make = normalize_string(row['Make'])
			car_model = normalize_string(row['Model'])
			car_year = row['Year']
			car_color = row['Color']
			city = row['City']
			state = row['State']
			sponsor = normalize_string(row['Sponsor'])
			email = row['Email']

			if time.count(':') == 0 and time:
				time = '0:' + time
			
			if validLap(racer_class, point_in_class):
				pt = process_time(time)
				t = track #Track.get_or_insert(key_name=self.request.get('track'), name=self.request.get('track'), lap_distance=1.02)
				g = self.request.get('group')
				sd = self.request.get('date')
				dt = datetime.strptime(sd, '%Y-%m-%d')
				tr = Track.get_or_insert(key_name=t, name=t)
				e = Event.get_or_insert(key_name=g+t+sd, name=g+t, track=tr, date=dt)
				c = Car.get_or_insert(key_name=carnum+car_make+car_model+car_color+car_year, make=car_make, model=car_model,year=car_year,color=car_color,number=carnum)
				cl = RaceClass.get_or_insert(key_name=racer_class, name=racer_class)
				if email:
					email
				else:
					email = racer_name.split()[0].lower() + racer_name.split()[1].lower()+'@gmail.com'
				r = Racer.get_or_insert(key_name=racer_name.split()[0][0:1].lower() + racer_name.split()[1].lower(), email=email,name=racer_name, driver=users.User(email), points=int(points), car=c, raceclass=cl)
				if sponsor:
					r.sponsor=Sponsor.get_or_insert(key_name=sponsor, name=sponsor)

				r.put()
				best = BestLap.get_or_insert(key_name=sd+t+g+cl.name+racer_name.replace(' ','.'), driver=r, raceclass=cl, track=t, time=pt, event=e, isBest=False, date=dt)

				if cl.name in bestlaps:
					if pt < bestlaps[cl.name].time and pt != 0.0:
						print str(pt) + ' is better than ' + bestlaps[cl.name].driver.name + 's time of ' + str(bestlaps[cl.name].time)
						best.isBest = True					#Mark current record as best
						bestlaps[cl.name].isBest = False	#Mark old record as not best
						bestlaps[cl.name].put()				#Commit old record to db
						bestlaps[cl.name] = best 			#Replace record in local dictionary with new best record for class
				elif pt != 0.0:
					best.isBest = True
					bestlaps[cl.name] = best
				lapsToUpload.append(best)
		db.put(lapsToUpload)
		self.redirect('/')
예제 #50
0
    def post(self):
        try:
            #global global_dict
            #global_dict = {}
            starttime = time.time()
            cutoff_date = datetime.datetime.now() + datetime.timedelta(-365)
            cutoff_date_string = cutoff_date.strftime("%Y-%m-%d %H:%M:%S")

            parts = self.request.body.split("&")
            requests = {}

            if parts is not None and parts[0] != "":
                for pair in parts:
                    ab = pair.split('=')
                    requests[ab[0]] = ab[1]

            force = bool(requests.get("force", False))
            write = bool(requests.get("write", False))
            minwrite = bool(requests.get("minwrite", False))

            rpcList = []
            client = memcache.Client()

            q = structures.Rumble.all()
            rumbles = []
            for r in q.run():
                memr = memcache.get(r.Name)
                if memr is not None:
                    r = memr
                if r.BatchScoresAccurate and not force:
                    continue
                rumbles.append(r)

            for r in rumbles:
                scoresdicts = pickle.loads(
                    zlib.decompress(r.ParticipantsScores))
                entries = len(scoresdicts)
                r.__dict__["entries"] = entries
            rumbles.sort(key=lambda r: -r.__dict__["entries"])

            first = True
            for r in rumbles:
                if not first:
                    time.sleep(5)
                    gc.collect()
                    gc.collect(2)
                first = False

                logging.info("mem usage at start of " + r.Name + ": " +
                             str(runtime.memory_usage().current()) + "MB")
                try:
                    scores = pickle.loads(zlib.decompress(
                        r.ParticipantsScores))
                except:
                    scoresdicts = marshal.loads(
                        zlib.decompress(r.ParticipantsScores))
                    scoreslist = [structures.LiteBot() for _ in scoresdicts]
                    for s, d in zip(scoreslist, scoresdicts):
                        s.__dict__.update(d)
                    scores = {s.Name: s for s in scoreslist}

                if len(scores) == 0:
                    continue

                r.ParticipantsScores = None
                #gc.collect()

                particHash = [p + "|" + r.Name for p in scores]

                particSplit = list_split(particHash, 32)
                ppDict = {}
                for l in particSplit:
                    ppDict.update(memcache.get_multi(l))
                    time.sleep(0.1)

                particSplit = None

                bots = [ppDict.get(h, None) for h in particHash]

                botsdict = {}

                missingHashes = []
                missingIndexes = []
                for i in xrange(len(bots)):
                    if bots[i] is None:
                        missingHashes.append(particHash[i])
                        missingIndexes.append(i)

                    elif isinstance(bots[i], structures.BotEntry):
                        bots[i] = structures.CachedBotEntry(bots[i])

                if len(missingHashes) > 0:
                    bmis = structures.BotEntry.get_by_key_name(missingHashes)

                    #lost = False
                    lostList = []

                    for i in xrange(len(missingHashes)):
                        if bmis[i] is not None:
                            cb = structures.CachedBotEntry(bmis[i])
                            bots[missingIndexes[i]] = cb
                            botsdict[missingHashes[i]] = cb

                        else:
                            bots[missingIndexes[i]] = None
                            lostList.append(missingHashes[i])
                            #lost = True

                while len(particHash) > 0:
                    particHash.pop()
                particHash = None

                while len(missingHashes) > 0:
                    missingHashes.pop()
                missingHashes = None

                while len(missingIndexes) > 0:
                    missingIndexes.pop()
                missingIndexes = None

                logging.info("mem usage after loading bots: " +
                             str(runtime.memory_usage().current()) + "MB")

                bots = filter(lambda b: b is not None, bots)

                get_key = attrgetter("APS")
                bots.sort(key=lambda b: get_key(b), reverse=True)

                gc.collect()

                botIndexes = {}
                for i, b in enumerate(bots):
                    b.Name = b.Name.encode('ascii')
                    intern(b.Name)
                    botIndexes[b.Name] = i
                    b.VoteScore = 0.

                botlen = len(bots)
                APSs = numpy.empty([botlen, botlen])
                APSs.fill(numpy.nan)
                totalAlivePairs = 0
                for i, b in enumerate(bots):
                    try:
                        pairings = pickle.loads(zlib.decompress(
                            b.PairingsList))
                    except:
                        pairsDicts = marshal.loads(
                            zlib.decompress(b.PairingsList))

                        pairings = [structures.ScoreSet() for _ in pairsDicts]
                        for s, d in zip(pairings, pairsDicts):
                            s.__dict__.update(d)
                    removes = []
                    alivePairings = 0
                    for q, p in enumerate(pairings):
                        j = botIndexes.get(p.Name, -1)
                        if j != -1:
                            APSs[j, i] = numpy.float64(p.APS)
                            p.Alive = True
                            alivePairings += 1
                        else:
                            removes.append(q)
                    b.Pairings = alivePairings
                    totalAlivePairs += alivePairings
                    removes.reverse()
                    removed = False
                    for q in removes:
                        p = pairings[q]
                        if p.LastUpload < cutoff_date_string:
                            removed = True
                            pairings.pop(q)
                        else:
                            if p.Alive:
                                removed = True
                            p.Alive = False
                    if removed:
                        b.PairingsList = zlib.compress(
                            pickle.dumps(pairings, -1), 1)

                gc.collect()

                APSs += numpy.float64(100) - APSs.transpose()
                APSs *= numpy.float64(0.5)

                numpy.fill_diagonal(APSs, numpy.nan)

                gc.collect()
                logging.info(
                    str(len(bots)) + " bots loaded, total of " +
                    str(totalAlivePairs) + " alive pairings")
                logging.info("mem usage after unzipping pairings: " +
                             str(runtime.memory_usage().current()) + "MB")

                #Vote
                mins = numpy.nanmax(APSs, 1)
                for i, minimum in enumerate(mins):
                    minIndexes = numpy.argwhere(APSs[i, ...] == minimum)
                    ties = len(minIndexes)
                    if ties > 0:
                        increment = 1. / ties
                        for minIndex in minIndexes:
                            bots[minIndex].VoteScore += increment

                #inv_len = 1.0/botlen
                for b in bots:
                    if b.Pairings > 0:
                        b.VoteScore = 100.0 * b.VoteScore / float(b.Pairings)
                    else:
                        b.VoteScore = 0

                #KNN PBI
                half_k = int(math.ceil(math.sqrt(botlen) / 2))
                KNN_PBI = -numpy.ones((botlen, botlen))
                for i in xrange(len(bots)):
                    low_bound = max([0, i - half_k])
                    high_bound = min([botlen - 1, i + half_k])
                    low_high_bound = min([i + 1, high_bound])
                    before = APSs[:, low_bound:i]
                    after = APSs[:, low_high_bound:high_bound]
                    compare = numpy.hstack((before, after))
                    mm = numpy.mean(numpy.ma.masked_array(
                        compare, numpy.isnan(compare)),
                                    axis=1)
                    KNN_PBI[:, i] = APSs[:, i] - mm.filled(numpy.nan)

    #                a[i] = 0
    #               logging.info("mean error of transpose: " + str(numpy.mean(numpy.square(a))))

    #KNN_PBI[KNN_PBI == numpy.nan] = -1

    #logging.info("mem usage after KNNPBI: " + str(runtime.memory_usage().current()) + "MB")
    # Avg Normalised Pairing Percentage

                mins = numpy.nanmin(APSs, 1)
                maxs = numpy.nanmax(APSs, 1)
                inv_ranges = numpy.float64(1.0) / (maxs - mins)
                NPPs = -numpy.ones((botlen, botlen))
                for i in range(botlen):
                    if numpy.isfinite(inv_ranges[i]):
                        NPPs[i, :] = numpy.float64(100) * (
                            APSs[i, :] - mins[i]) * inv_ranges[i]
                    else:
                        NPPs[i, :] = numpy.float64(100)

                #NPPs[NPPs] = -1

                #logging.info("mem usage after ANPP: " + str(runtime.memory_usage().current()) + "MB")

                changedBots = []  #bots with new pairings since last run

                # save to cache
                botsdict = {}

                for i, b in enumerate(bots):
                    #                try:
                    pairings = pickle.loads(zlib.decompress(b.PairingsList))
                    #                except:
                    #                    pairsDicts = marshal.loads(zlib.decompress(b.PairingsList))
                    #
                    #                    pairings = [structures.ScoreSet() for _ in pairsDicts]
                    #                    for s,d in zip(pairings,pairsDicts):
                    #                        s.__dict__.update(d)
                    nppCount = 0
                    totalNPP = 0.0

                    apsCount = 0
                    totalAPS = 0.0

                    aliveCount = 0

                    changed = False
                    for p in pairings:
                        j = botIndexes.get(p.Name, -1)
                        if j != -1:
                            p.Alive = True
                            changePotential = (p.KNNPBI == 0.0 and p.NPP == -1)

                            aliveCount += 1
                            p.KNNPBI = float(KNN_PBI[j, i])
                            p.NPP = float(NPPs[j, i])

                            if not numpy.isnan(APSs[j, i]):
                                p.APS = float(APSs[j, i])
                                totalAPS += p.APS
                                apsCount += 1

                            if numpy.isnan(p.KNNPBI):
                                p.KNNPBI = 0

                            if numpy.isnan(p.NPP):
                                p.NPP = -1
                            else:
                                totalNPP += p.NPP
                                nppCount += 1

                            if changePotential and p.KNNPBI != 0.0 and p.NPP != -1:
                                changed = True
                        else:
                            p.Alive = False
                            p.KNNPBI = 0
                            p.NPP = -1

                    if nppCount > 0:
                        b.ANPP = float(totalNPP / nppCount)
                    else:
                        b.ANPP = -1.0
                    if apsCount > 0:
                        b.APS = float(totalAPS / apsCount)
                    else:
                        b.APS = -1.0

                    b.PairingsList = zlib.compress(pickle.dumps(pairings, -1),
                                                   1)
                    b.Pairings = aliveCount
                    if b.Pairings > 0:
                        botsdict[b.key_name] = b
                    if changed:
                        changedBots.append(b)

                KNN_PBI = None
                APSs = None
                NPPs = None
                logging.info("mem usage after zipping: " +
                             str(runtime.memory_usage().current()) + "MB")

                gc.collect()
                #logging.info("mem usage after gc: " + str(runtime.memory_usage().current()) + "MB")
                if len(botsdict) > 0:
                    splitlist = dict_split(botsdict, 20)
                    logging.info("split bots into " + str(len(splitlist)) +
                                 " sections")

                    for d in splitlist:
                        rpcList.append(client.set_multi_async(d))
                        time.sleep(.5)  #throttle

                    logging.info("wrote " + str(len(botsdict)) +
                                 " bots to memcache")

                botsdict.clear()
                botsdict = None

                scores = {b.Name: structures.LiteBot(b) for b in bots}

                # bots = None
                r.ParticipantsScores = None
                gc.collect()

                r.ParticipantsScores = db.Blob(
                    zlib.compress(
                        pickle.dumps(scores, pickle.HIGHEST_PROTOCOL), 3))
                logging.info("mem usage after participants zipping: " +
                             str(runtime.memory_usage().current()) + "MB")
                #r.ParticipantsScores = zlib.compress(marshal.dumps([scores[s].__dict__ for s in scores]),4)
                scores = None

                if write:
                    writebots = [None] * len(bots)
                    for i, b in enumerate(bots):
                        putb = structures.BotEntry(key_name=b.key_name)
                        putb.init_from_cache(b)
                        writebots[i] = putb
                    write_lists = list_split(writebots, 50)
                    for subset in write_lists:
                        db.put(subset)
                        time.sleep(0.1)  #throttle
                    logging.info("wrote " + str(len(writebots)) +
                                 " bots to database")

                while len(bots) > 0:
                    bots.pop()
                bots = None

                if minwrite:
                    writebots = [None] * len(changedBots)
                    for i, b in enumerate(changedBots):
                        putb = structures.BotEntry(key_name=b.key_name)
                        putb.init_from_cache(b)
                        writebots[i] = putb
                    write_lists = list_split(writebots, 50)
                    for subset in write_lists:
                        db.put(subset)
                        time.sleep(0.1)
                    logging.info("wrote " + str(len(writebots)) +
                                 " changed bots to database")

                while len(changedBots) > 0:
                    changedBots.pop()
                changedBots = None
                gc.collect()

                if write or minwrite:
                    r.BatchScoresAccurate = True

                rpcList.append(client.set_multi_async({r.Name: r}))

                db.put([r])
                #gc.collect()
                r = None
                logging.info("mem usage after write: " +
                             str(runtime.memory_usage().current()) + "MB")

            for rpc in rpcList:
                rpc.get_result()

            elapsed = time.time() - starttime
            logging.info("Success in " + str(round(1000 * elapsed) / 1000) +
                         "s")
            self.response.out.write("Success in " +
                                    str(round(1000 * elapsed)) + "ms")
        except:
            logging.exception('')
            elapsed = time.time() - starttime
            logging.info("Error in " + str(round(1000 * elapsed) / 1000) + "s")
            self.response.out.write("Error in " + str(round(1000 * elapsed)) +
                                    "ms")
예제 #51
0
def CreateMachines(num_instances,
                   token,
                   os,
                   browser,
                   browser_version,
                   download_info,
                   retries=0):
    """Create and launch EC2 machines for the given parameters.

  Args:
    num_instances: An integer representing the number of instances to spawn
      with the given configuration.
    token: A string representing the token for this run.
    os: An integer that corresponds to an enum.OS value.
    browser: An integer that corresponds to an enum.BROWSER value.
    browser_version: A string representing browser version to use.
      Specifically, this should be the channel for Chrome.
    download_info: A string representing the information necessary for
      calculating the version and browser download url for the given machine.
    retries: An optional paramater specifying the initial retry count for the
      machine.
  """
    logging.info(
        '\n'.join([
            'num_instances: %d', 'token: %s', 'os: %d', 'browser: %d',
            'browser_version: %s'
        ]), num_instances, token, os, browser, browser_version)

    ec2 = ec2_manager.EC2Manager()
    user_data = simplejson.dumps({
        'channel': browser_version,
        'os': OS_TO_USER_DATA[os],
        'token': token,
        'download_info': download_info
    })
    logging.info('Spawning EC2 machines.')
    # Note: All exceptions are caught here because the EC2 API could fail after
    # successfully starting a machine. Because this task is rescheduled on
    # failure, we need to make sure we don't keep spawning EC2 machines.
    try:
        bots_instances = ec2.StartAmiWithOs(
            os,
            count=num_instances,
            instance_type=DEFAULT_INSTANCE_SIZE,
            user_data=user_data)
    except Exception:
        logging.exception('Something failed when setting up the EC2 instance. '
                          'Stopping setup for this instance.')
        return

    logging.info('Creating the corresponding ClientMachine models.')
    new_instances = []
    for instance in bots_instances:
        new_instances.append(
            client_machine.ClientMachine(
                vm_service=enum.VM_SERVICE.EC2,
                os=os,
                browser=browser,
                browser_version=browser_version,
                client_id=instance.inst_id,
                status=enum.MACHINE_STATUS.PROVISIONED,
                retry_count=retries,
                token=token,
                download_info=download_info))

    db.put(new_instances)
    logging.info('Finished creating the ClientMachine models.')
예제 #52
0
    def _mock_data(self):
        # The fictional day for these tests Wednesday, July 29, 2015
        slacklib._TODAY_FN = lambda: datetime.datetime(2015, 7, 29)

        # Stuart created his account, but has never once filled out a snippet
        db.put(models.User(email='*****@*****.**'))

        # Fleetwood has two recent snippets, and always uses markdown lists,
        # but sometimes uses different list indicators or indention.
        db.put(models.User(email='*****@*****.**'))
        db.put(
            models.Snippet(email='*****@*****.**',
                           week=datetime.date(2015, 7, 27),
                           text=textwrap.dedent("""
            *  went for a walk
            *  sniffed some things
            *  hoping to sniff more things! #yolo
            """)))
        db.put(
            models.Snippet(email='*****@*****.**',
                           week=datetime.date(2015, 7, 20),
                           text=textwrap.dedent("""
            - lots of walks this week
            - not enough sniffing, hope to remedy next week!
            """)))

        # Toby has filled out two snippets, but missed a week in-between while
        # on vacation. When he got back from vacation he was still jetlagged so
        # he wrote a longform paragraph instead of a list.
        db.put(models.User(email='*****@*****.**'))
        db.put(
            models.Snippet(email='*****@*****.**',
                           week=datetime.date(2015, 7, 13),
                           text=textwrap.dedent("""
            - going on vacation next week, so excited!


            """)))
        db.put(
            models.Snippet(email='*****@*****.**',
                           week=datetime.date(2015, 7, 27),
                           text=textwrap.dedent("""
            I JUST GOT BACK FROM VACATION IT WAS TOTALLY AWESOME AND I SNIFFED
            ALL SORTS OF THINGS.  I GUESS I NEED TO WRITE SOMETHING HERE, HUH?

            OK THEN:
            - I had fun.

            LUNCHTIME SUCKERS!
            """)))

        # Fozzie tried hard to create an entry manually in the previous week,
        # but didn't understand markdown list syntax and got discouraged (so
        # has no entry this week, and a malformed one last week).
        db.put(models.User(email='*****@*****.**'))
        db.put(
            models.Snippet(email='*****@*****.**',
                           week=datetime.date(2015, 7, 20),
                           text=textwrap.dedent("""
            -is this how I list?
            -why is it not formatting??!?
            """)))
예제 #53
0
 def get(self):
     query = VideoPlaylist.all()
     all_video_playlists = query.fetch(100000)
     for video_playlist in all_video_playlists:
         video_playlist.live_association = False
     db.put(all_video_playlists)
예제 #54
0
def put_google_events(service_user, calendar_str_key, calendar_id, solution,
                      google_calendar_finished, google_events, language):
    to_put = []
    no_title_text = common_translate(language, SOLUTION_COMMON, '(No title)')
    for google_event in google_events:
        try:
            google_event_id = google_event["id"]
            event_parent_key = parent_key(service_user, solution)
            event = Event.all().ancestor(event_parent_key).filter(
                "source =",
                Event.SOURCE_GOOGLE_CALENDAR).filter("external_id =",
                                                     google_event_id).get()
            if not event:
                event = Event(parent=event_parent_key,
                              source=Event.SOURCE_GOOGLE_CALENDAR,
                              external_id=google_event_id)

            if google_event.get("status", "confirmed") == "cancelled":
                if not event.is_saved():
                    continue
                event.deleted = True
            elif not event.deleted:
                event.deleted = False
            event.title = google_event.get('summary', no_title_text)
            event.place = google_event.get("location",
                                           u"").replace(u"\n", u" ")
            event.organizer = google_event.get("organizer",
                                               {}).get("displayName", u"")
            event.description = google_event.get('description', u"")
            event.calendar_id = calendar_id
            event.external_link = google_event["htmlLink"]
            event.start_dates = list()
            event.end_dates = list()

            if google_event["start"].get("dateTime", None):
                start_date_with_tz = dateutil.parser.parse(
                    google_event["start"]["dateTime"])
                end_date_with_tz = dateutil.parser.parse(
                    google_event["end"]["dateTime"])

                day_difference = abs(
                    (end_date_with_tz - start_date_with_tz).days)
                if day_difference == 0:
                    start_epoch = get_epoch_from_datetime(
                        datetime(start_date_with_tz.year,
                                 start_date_with_tz.month,
                                 start_date_with_tz.day,
                                 start_date_with_tz.hour,
                                 start_date_with_tz.minute))
                    event.start_dates.append(start_epoch)
                    end_epoch = get_epoch_from_datetime(
                        datetime(end_date_with_tz.year, end_date_with_tz.month,
                                 end_date_with_tz.day, end_date_with_tz.hour,
                                 end_date_with_tz.minute))
                    end_date = datetime.fromtimestamp(end_epoch)
                    event.end_dates.append(
                        int(
                            timedelta(
                                hours=end_date.hour,
                                minutes=end_date.minute,
                                seconds=end_date.second).total_seconds()))


# TODO: multi day event
#                 else:
#                     start_date = datetime.strptime(google_event["start"]["date"], '%Y-%m-%d')
#                     end_date = datetime.strptime(google_event["end"]["date"], '%Y-%m-%d')
#                     day_difference = abs((end_date - start_date).days)

            if event.start_dates:
                event.first_start_date = event.start_dates[0]
                event.last_start_date = event.start_dates[-1]
                to_put.append(event)
            else:
                logging.info("Skipping event because it had no start_dates")
                logging.debug(google_event)
        except:
            logging.warn('Failed to put Google Event: %s', google_event)
            raise

    if to_put:
        db.put(to_put)

    if google_calendar_finished:

        def trans():
            scgs = SolutionCalendarGoogleSync.get_by_key_name(
                service_user.email())
            if not scgs:
                return True  # update was run from saving a calendar
            if calendar_str_key in scgs.google_calendar_keys:
                scgs.google_calendar_keys.remove(calendar_str_key)
                if not scgs.google_calendar_keys:
                    scgs.delete()
                    return True
                scgs.put()
                return False

        if db.run_in_transaction(trans):
            from solutions.common.bizz.provisioning import populate_identity_and_publish
            sln_settings = get_solution_settings(service_user)
            sln_main_branding = get_solution_main_branding(service_user)
            deferred.defer(populate_identity_and_publish, sln_settings,
                           sln_main_branding.branding_key)
예제 #55
0
    def taskDifficultyEditPost(self, request, program_entity, params):
        """View method for edit task difficulty tags POST requests.
    """
        post_dict = request.POST

        operation = simplejson.loads(post_dict.get('operation'))

        # invalid request
        INVALID_REQUEST_RESPONSE = http.HttpResponse()
        INVALID_REQUEST_RESPONSE.status_code = 400
        if not operation:
            return INVALID_REQUEST_RESPONSE

        op = operation.get('op')

        # TODO(ljvderijk): How do we want to deal with the setting of the value
        # property in the tag since it now requires an extra put.

        data = operation['data']
        if op == 'add':
            for tag_data in data:
                tag = gci_task_model.TaskDifficultyTag.get_or_create(
                    program_entity, tag_data['name'])
                tag.value = int(tag_data['value'])
                tag.put()
        elif op == 'change':
            current_tag_data = data[0]
            new_tag_data = data[1]

            current_tag_name = current_tag_data['name']
            new_tag_name = new_tag_data['name']

            current_tag = gci_task_model.TaskDifficultyTag.get_by_scope_and_name(
                program_entity, current_tag_name)

            if not current_tag:
                return INVALID_REQUEST_RESPONSE

            if current_tag_name != new_tag_name:
                # rename tag
                new_tag = gci_task_model.TaskDifficultyTag.copy_tag(
                    program_entity, current_tag_name, new_tag_name)
                # TODO(ljvderijk): The tag copy method should work with new fields
                new_tag.order = current_tag.order
                new_tag.value = int(new_tag_data['value'])
                new_tag.put()
            else:
                # change value of the tag
                current_tag.value = int(new_tag_data['value'])
                current_tag.put()
        elif op == 'delete':
            for tag_data in data:
                gci_task_model.TaskDifficultyTag.delete_tag(
                    program_entity, tag_data['name'])
        elif op == 'reorder':
            tags = []
            for i in range(0, len(data)):
                tag_data = data[i]
                tag = gci_task_model.TaskDifficultyTag.get_by_scope_and_name(
                    program_entity, tag_data['name'])

                tag.order = i
                tags.append(tag)

            db.put(tags)

        return http.HttpResponse()
예제 #56
0
파일: stat.py 프로젝트: xinl/lifepadbox
def update_count(type, add=[], subtract=[]):
    #input: type = "archive" or "tag"; add, subtract = e.g. ["200412","200501"] or ["Books", "Games"]

    if type == "archive":
        cls = Archive
    elif type == "tag":
        cls = Tag

    items_dict = {}
    for t in add:
        if t in items_dict:
            items_dict[t] += 1
        else:
            items_dict[t] = 1
    for t in subtract:
        if t in items_dict:
            items_dict[t] -= 1
        else:
            items_dict[t] = -1
    for k in items_dict.keys(
    ):  # No need to modify if the count change to a term is 0.
        if items_dict[k] == 0:
            items_dict.pop(k)

    results = cls.get_by_key_name(items_dict.keys())

    # cls.get_by_key_name returns a quirky [ None, ] when no result can be found.
    results = filter(lambda a: a != None, results)

    to_add = []
    to_del = []

    if results:

        i = 0
        while i < len(results):
            results[i].count += items_dict[results[i].key().name()]
            items_dict.pop(results[i].key().name())
            #pop out terms found in query results so the rest will be new terms.
            if results[i].count < 1:
                to_del.append(results[i])
                results.pop(i)
            else:
                i += 1
        """
        for i in range(len(results)):
            results[i].count += items_dict[results[i].key().name()]
            items_dict.pop(results[i].key().name())
            #pop out terms found in query results so the rest will be new terms.
            if results[i].count < 1:
                to_del.append(results[i])
                results.pop(i)
                i -= 1
        """
    # create new entries for non-existing terms
    if len(items_dict) > 0:
        for t in items_dict:
            if items_dict[t] > 0:
                # Don't create new term unless the initial count > 0
                to_add.append(cls(key_name=t, count=items_dict[t]))
        results += to_add

    db.put(results)

    # remove terms whose count < 1
    if len(to_del) > 0:
        db.delete(to_del)
예제 #57
0
    def _queueDispatchFanIn(self,
                            nextEvent,
                            fanInPeriod=0,
                            retryOptions=None,
                            queueName=None):
        """ Queues a call to .dispatch(nextEvent) in the task queue, or saves the context to the 
        datastore for processing by the queued .dispatch(nextEvent)
        
        @param nextEvent: a string event 
        @param fanInPeriod: the period of time between fan in Tasks 
        @param queueName: the queue name to Queue into 
        @return: a taskqueue.Task instance which may or may not have been queued already
        """
        assert nextEvent is not None
        assert not self.get(
            constants.INDEX_PARAM)  # fan-in after fan-in is not allowed
        assert queueName

        # we pop this off here because we do not want the fan-out/continuation param as part of the
        # task name, otherwise we loose the fan-in - each fan-in gets one work unit.
        self.pop(constants.GEN_PARAM, None)
        fork = self.pop(constants.FORK_PARAM, None)

        # transfer the fan-in-group into the context (under a fixed value key) so that states beyond
        # the fan-in get unique Task names
        # FIXME: this will likely change once we formalize what to do post fan-in
        transition = self.currentState.getTransition(nextEvent)
        if self.get(transition.target.fanInGroup) is not None:
            self[constants.FAN_IN_GROUP_PARAM] = self[
                transition.target.fanInGroup]

        taskNameBase = self.getTaskName(nextEvent, fanIn=True)
        rwlock = ReadWriteLock(taskNameBase, self)
        index = rwlock.currentIndex()

        # (***)
        #
        # grab the lock - memcache.incr()
        #
        # on Task retry, multiple incr() calls are possible. possible ways to handle:
        #
        # 1. release the lock in a 'finally' clause, but then risk missing a work
        #    package because acquiring the read lock will succeed even though the
        #    work package was not written yet.
        #
        # 2. allow the lock to get too high. the fan-in logic attempts to wait for
        #    work packages across multiple-retry attempts, so this seems like the
        #    best option. we basically trade a bit of latency in fan-in for reliability.
        #
        rwlock.acquireWriteLock(index, nextEvent=nextEvent)

        # insert the work package, which is simply a serialized FSMContext
        workIndex = '%s-%d' % (taskNameBase, knuthHash(index))

        # on retry, we want to ensure we get the same work index for this task
        actualTaskName = self.__obj[constants.TASK_NAME_PARAM]
        indexKeyName = 'workIndex-' + '-'.join(
            [str(i) for i in [actualTaskName, fork] if i]) or None
        semaphore = RunOnceSemaphore(indexKeyName, self)

        # check if the workIndex changed during retry
        semaphoreWritten = False
        if self.__obj[constants.RETRY_COUNT_PARAM] > 0:
            # see comment (A) in self._queueDispatchFanIn(...)
            time.sleep(constants.DATASTORE_ASYNCRONOUS_INDEX_WRITE_WAIT_TIME)
            payload = semaphore.readRunOnceSemaphore(payload=workIndex,
                                                     transactional=False)
            if payload:
                semaphoreWritten = True
                if payload != workIndex:
                    self.logger.info(
                        "Work index changed from '%s' to '%s' on retry.",
                        payload, workIndex)
                    workIndex = payload

        # update this here so it gets written down into the work package too
        self[constants.INDEX_PARAM] = index

        # write down two models, one actual work package, one idempotency package
        keyName = '-'.join([str(i)
                            for i in [actualTaskName, fork] if i]) or None
        work = _FantasmFanIn(context=self,
                             workIndex=workIndex,
                             key_name=keyName)

        # close enough to idempotent, but could still write only one of the entities
        # FIXME: could be made faster using a bulk put, but this interface is cleaner
        if not semaphoreWritten:
            semaphore.writeRunOnceSemaphore(payload=workIndex,
                                            transactional=False)

        # put the work item
        db.put(work)

        # (A) now the datastore is asynchronously writing the indices, so the work package may
        #     not show up in a query for a period of time. there is a corresponding time.sleep()
        #     in the fan-in of self.mergeJoinDispatch(...)

        # release the lock - memcache.decr()
        rwlock.releaseWriteLock(index)

        try:

            # insert a task to run in the future and process a bunch of work packages
            now = time.time()
            url = self.buildUrl(self.currentState, nextEvent)
            params = self.buildParams(self.currentState, nextEvent)
            task = Task(name='%s-%d' % (taskNameBase, index),
                        method=self.method,
                        url=url,
                        params=params,
                        eta=datetime.datetime.utcfromtimestamp(now) +
                        datetime.timedelta(seconds=fanInPeriod),
                        headers=self.headers,
                        retry_options=retryOptions)
            self.Queue(name=queueName).add(task)
            return task

        except (TaskAlreadyExistsError, TombstonedTaskError):
            pass  # Fan-in magic
예제 #58
0
def do_task_fetch_material(username, force_top):
    user = CurryUser.get_by_key_name(username)
    force_top = bool(int(force_top))

    if not user:
        logging.error("no such user '%s'" % username)
        return 'bad'

    tweet_list = api.GetUserTimeline(screen_name=username,
                                     count=config.FETCH_COUNT)

    tweet = None
    material_list = None
    success = False
    if force_top:
        tweet_list = tweet_list[0:1]
    else:
        shuffle(tweet_list)

    #
    # select material
    #
    for tweet in tweet_list:
        # check history
        if not force_top and is_duplicated(tweet):
            continue

        text = tweet.GetText().encode('utf-8')
        material_list = analyze(text, count=config.TWEET_MATERIAL_MAX)

        if len(material_list) > 0:
            # found material
            success = True
            break

    if success:
        # record to history
        # TODO: trim history chronically
        History(key_name=str(tweet.id), timestamp=datetime.now()).put()
    else:
        logging.info("material not found for user '%s'" % username)
        return 'bad'

    #
    # select receivers
    #
    link_list = (UserLink.all().filter(
        'sender = ', user).order('timestamp').fetch(limit=config.RECEIVER_MAX))

    for link in link_list:
        # randomize material per receiver
        shuffle(material_list)
        count = 1 + int(random() * len(material_list))
        receive_material = material_list[:count]

        taskqueue.add(queue_name='post-queue',
                      url='/task/post_material/%s/%s' %
                      (username, link.receiver.key().name()),
                      params={'material': receive_material})

        link.timestamp = datetime.now()
        logging.debug(
            "sending from user '%s' to '%s' with material '%s'" %
            (username, link.receiver.key().name(), repr(receive_material)))
    # update timestamp
    db.put(link_list)

    # send to karei_bot if no receivers
    if len(link_list) == 0:
        shuffle(material_list)
        count = 1 + int(random() * len(material_list))
        receive_material = material_list[:count]

        taskqueue.add(queue_name='post-queue',
                      url='/task/post_material/%s/%s' %
                      (username, config.MY_NAME),
                      params={'material': receive_material})

        logging.debug("sending from user '%s' to '%s' with material '%s'" %
                      (username, config.MY_NAME, repr(receive_material)))

    return 'ok'
예제 #59
0
def UpdateScheduledJobs(jobs):
  """Updates the scheduled jobs."""
  db.put(jobs)
예제 #60
0
    def test_import_notes_disabled_note_records(self):
        '''Check that notes will be rejected from API import when 
        notes_disabled is set to be True by the record author.'''
        records = []
        # First prepare and import two person records
        for i in range(2):
            given_name = "given_name_%d" % i
            family_name = "family_name_%d" % i

            source_date = "2010-01-01T01:23:45Z"
            record_id = "test_domain/person_%d" % i

            author_name = "test_author"
            author_email = "test_email"

            records.append({
                'given_name': given_name,
                'family_name': family_name,
                'person_record_id': record_id,
                'source_date': source_date,
                'author_name': author_name,
                'author_email': author_email
            })
        written, skipped, total = importer.import_records(
            'haiti', 'test_domain', importer.create_person, records, False,
            True, None)

        assert written == 2
        assert len(skipped) == 0
        assert total == 2
        assert model.Person.all().count() == 2

        # Disable comments for first person record
        person = model.Person.get('haiti', 'test_domain/person_0')
        assert person
        person.notes_disabled = True
        db.put([person])

        for person in model.Person.all():
            if person.person_record_id == 'test_domain/person_0':
                assert person.notes_disabled == True

        # Import notes
        records = []
        for i in range(2):
            source_date = '2010-01-01T01:23:45Z'
            note_id = 'test_domain/record_%d' % i
            person_id = 'test_domain/person_%d' % i
            records.append({
                'person_record_id': person_id,
                'note_record_id': note_id,
                'source_date': source_date
            })
        written, skipped, total = importer.import_records(
            'haiti', 'test_domain', importer.create_note, records, False, True,
            None)

        # Check that the note associted with first person record is skipped.
        assert written == 1
        assert len(skipped) == 1
        assert skipped[0] == (
            'The author has disabled new commenting on this record', {
                'person_record_id': 'test_domain/person_0',
                'source_date': '2010-01-01T01:23:45Z',
                'note_record_id': 'test_domain/record_0',
            })

        assert total == 2
        assert model.Note.all().count() == 1