Esempio n. 1
0
class Election(db.Model):
    """
    An election that users may vote for.
    """
    name = db.StringProperty(required=True)
    start = db.DateTimeProperty(required=True)  # Time when voting begins
    end = db.DateTimeProperty(required=True)  # Time when voting ends
    organization = db.ReferenceProperty(Organization,
                                        collection_name='elections')
    result_computed = db.BooleanProperty(required=True, default=False)
    result_delay = db.IntegerProperty(
        required=True, default=0)  # Results delay to public in seconds
    universal = db.BooleanProperty(required=True, default=False)
    hidden = db.BooleanProperty(required=True, default=False)
    task_count = db.IntegerProperty(
        required=True,  # Counter used to identify tasks
        default=0)
    voter_count = db.IntegerProperty(required=True, default=0)
    voted_count = db.IntegerProperty(required=True, default=0)
    description = db.TextProperty(required=False, default="")

    def to_json(self, parseable_date=False):
        # calculate the publication date by adding result delay to end
        pub = self.end + timedelta(seconds=self.result_delay)
        # parseable_date means it can be parsed in JS with Date.parse()
        times = {
            'start': self.start.strftime('%a, %B %d, %Y, %I:%M %p') + ' UTC',
            'end': self.end.strftime('%a, %B %d, %Y, %I:%M %p') + ' UTC',
            'pub': pub.strftime('%a, %B %d, %Y, %I:%M %p') + ' UTC'
        } if parseable_date else {
            'start': str(self.start),
            'end': str(self.end),
            'pub': str(pub)
        }

        now = datetime.now()
        status = 'Not started'
        if now > self.start:
            status = 'Voting in progress'
        if now > self.end:
            status = 'Voting has ended'
        if self.result_computed:
            status = 'Result computed'

        return {
            'id': str(self.key()),
            'name': self.name,
            'organization': self.organization.name,
            'times': times,
            'result_computed': self.result_computed,
            'result_delay': self.result_delay,
            'universal': self.universal,
            'hidden': self.hidden,
            'voter_count': self.voter_count,
            'voted_count': self.voted_count,
            'description': self.description,
            'status': status,
        }

    @property
    def election_positions(self):
        return [
            ep for ep in ElectionPosition.gql(
                "WHERE election=:1 ORDER BY datetime_created", self)
        ]
Esempio n. 2
0
class Artwork(db.Model):
    """An artwork with a creation date."""
    version = db.IntegerProperty(required=True)
    data = db.TextProperty(required=True)
    date = db.DateTimeProperty(auto_now_add=True)
Esempio n. 3
0
class Like(db.Model):
    blog_id = db.IntegerProperty(required=True)
    viewer = db.StringProperty(required=True)
Esempio n. 4
0
class User(db.Model):
	name = db.StringProperty(required = True)
	room = db.StringProperty(required = True)
	role = db.IntegerProperty()
	is_alive = db.BooleanProperty()
Esempio n. 5
0
class Episode(db.Model):
    show = db.ReferenceProperty(Show)
    season = db.ReferenceProperty(Season)
    season_number = db.IntegerProperty()
    number = db.IntegerProperty()
    title = db.StringProperty()
    text = db.TextProperty(default="")
    date = db.DateTimeProperty()

    _memkey_episode_dict = "all_episodes_dict"

    @classmethod
    def kind(cls):
        return "series_episode"

    @property
    def date_end(self):
        return self.date + datetime.timedelta(minutes=self.show.runtime)

    @property
    def date_local(self):
        if getattr(self, "_date_local", None) is None:
            try:
                tz = get_timezone_for_gmt_offset(self.show.timezone)
            except Exception:
                tz = utc
            self._date_local = utc.localize(self.date).astimezone(tz)
        return self._date_local

    @property
    def date_local_end(self):
        if getattr(self, "_date_local_end", None) is None:
            try:
                tz = get_timezone_for_gmt_offset(self.show.timezone)
            except Exception:
                tz = utc
            self._date_local_end = utc.localize(self.date_end).astimezone(tz)
        return self._date_local_end

    @classmethod
    def update_or_create(cls, season, episode_info):
        episode = Episode.all().filter("show =", season.show).filter(
            "season =", season).filter("number =", episode_info.nr).get()
        logging.debug("Found episode... %s" % episode)
        if episode is None:
            episode = Episode.create(season, episode_info)
        else:
            episode.update(episode_info)
        episode.put()
        return episode

    @classmethod
    def create(cls, season, episode_info):
        return Episode(show=season.show,
                       season=season,
                       season_number=season.number,
                       number=episode_info.nr,
                       title=episode_info.title,
                       date=episode_info.date)

    @classmethod
    def get_all_dict(cls):
        episode_dict = memcache.get(cls._memkey_episode_dict)
        if episode_dict is not None:
            return episode_dict
        now = datetime.datetime.now()
        one_week_ago = now - datetime.timedelta(days=8)
        # in_one_week = now + datetime.timedelta(days=8)
        episodes = Episode.all().filter("date >", one_week_ago)
        # removed this: .filter("date <",in_one_week).fetch(1000)
        episode_dict = {}
        for ep in episodes:
            if len(episode_dict.get(str(ep._show), [])) < 20:
                # store max of 20 episodes per show
                episode_dict.setdefault(str(ep._show), []).append(ep)
        memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
        return episode_dict

    @classmethod
    def clear_cache(cls):
        memcache.delete(cls._memkey_episode_dict)

    @classmethod
    def add_clear_cache_task(cls, queue_name):
        t = taskqueue.Task(url=reverse('seriesly-shows-clear_cache'),
                           params={})
        t.add(queue_name=queue_name)
        return t

    @classmethod
    def get_for_shows(cls, shows, before=None, after=None, order=None):
        episode_list = []
        episode_dict = Episode.get_all_dict()
        changed = False
        for show in shows:
            k = str(show.key())
            if k in episode_dict:
                episode_dict[k].sort(key=lambda x: x.date)
                prev = None
                for ep in episode_dict[k]:
                    if prev is not None:
                        prev.next = ep
                    ep.show = show
                    prev = ep
                episode_list.extend(episode_dict[k])
        if changed:
            memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
        episode_list.sort(key=lambda x: x.date)
        if after is not None or before is not None:
            lower = None
            upper = len(episode_list)
            for ep, i in zip(episode_list, range(len(episode_list))):
                if after is not None and lower is None and ep.date > after:
                    lower = i
                if before is not None and ep.date > before:
                    upper = i
                    break
            if lower > 0 or upper < len(episode_list):
                episode_list = episode_list[lower:upper]
        if order is not None and order.startswith("-"):
            episode_list.reverse()
        return episode_list

    @classmethod
    def get_for_shows_old(cls, shows, before=None, after=None, order=None):
        def extra(q):
            if before is not None:
                q = q.filter("date <", before)
            if after is not None:
                q = q.filter("date >", after)
            if order is not None:
                q = q.order(order)
            return q

        if not len(shows):
            return []

        if len(shows) <= 28:
            logging.debug("starting query")
            query = Episode.all().filter("show IN", shows)
            return extra(query).fetch(1000)

        episodes = []
        for i in range(len(shows) / 28 + 1):
            q_shows = shows[i * 28:(i + 1) * 28]
            if not len(q_shows):
                continue
            episodes.extend(
                extra(Episode.all().filter("show IN", q_shows)).fetch(1000))
        if order is not None and order.startswith("-"):
            return sorted(episodes, lambda x: x.date).reverse()
        else:
            return sorted(episodes, lambda x: x.date)

    def update(self, episode_info):
        self.title = episode_info.title
        self.date = episode_info.date

    def get_next(self):
        return Episode.all().filter("date >", self.date).get()

    def create_event_details(self, cal):
        vevent = cal.add('vevent')
        vevent.add('uid').value = "seriesly-episode-%s" % self.key()
        try:
            tz = get_timezone_for_gmt_offset(self.show.timezone)
        except Exception:
            tz = utc
        date = utc.localize(self.date).astimezone(tz)
        vevent.add('dtstart').value = date
        vevent.add('dtend').value = date + datetime.timedelta(
            minutes=self.show.runtime)
        vevent.add('summary').value = "%s - %s (%dx%d)" % (
            self.show.name, self.title, self.season_number, self.number)
        vevent.add('location').value = self.show.network
        return vevent
Esempio n. 6
0
class Tags(db.Model):
    '''Model to collect tags with count of using every tag.'''
    name = db.StringProperty()
    count = db.IntegerProperty()
Esempio n. 7
0
class SpamCounts(db.Model):
    nham = db.IntegerProperty()
    nspam = db.IntegerProperty()
Esempio n. 8
0
class UserData(db.Model):

    user = db.UserProperty()
    moderator = db.BooleanProperty(default=False)
    joined = db.DateTimeProperty(auto_now_add=True)
    last_login = db.DateTimeProperty()
    proficient_exercises = db.StringListProperty(
    )  # Names of exercises in which the user is *explicitly* proficient
    all_proficient_exercises = db.StringListProperty(
    )  # Names of all exercises in which the user is proficient
    suggested_exercises = db.StringListProperty()
    assigned_exercises = db.StringListProperty()
    badges = db.StringListProperty()  # All awarded badges
    need_to_reassess = db.BooleanProperty()
    points = db.IntegerProperty()
    total_seconds_watched = db.IntegerProperty(default=0)
    coaches = db.StringListProperty()
    map_coords = db.StringProperty()
    expanded_all_exercises = db.BooleanProperty(default=True)
    videos_completed = db.IntegerProperty(default=-1)
    last_daily_summary = db.DateTimeProperty()
    last_activity = db.DateTimeProperty()
    count_feedback_notification = db.IntegerProperty(default=-1)

    @staticmethod
    def get_for_current_user():
        user = util.get_current_user()
        if user is not None:
            user_data = UserData.get_for(user)
            if user_data is not None:
                return user_data
        return UserData()

    @staticmethod
    def get_for(user):
        query = UserData.all()
        query.filter('user ='******'-points')  # Temporary workaround for issue 289
        return query.get()

    @staticmethod
    def get_or_insert_for(user):
        # Once we have rekeyed legacy entities,
        # the next block can just be a call to .get_or_insert()
        user_data = UserData.get_for(user)
        if user_data is None:
            if user.email():
                key = "user_email_key_%s" % user.email()
                user_data = UserData.get_or_insert(
                    key_name=key,
                    user=user,
                    moderator=False,
                    last_login=datetime.datetime.now(),
                    proficient_exercises=[],
                    suggested_exercises=[],
                    assigned_exercises=[],
                    need_to_reassess=True,
                    points=0,
                    coaches=[])
        return user_data

    def get_or_insert_exercise(self, exercise, allow_insert=True):

        exid = exercise.name
        userExercise = UserExercise.get_by_key_name(exid, parent=self)

        if not userExercise:
            # There are some old entities lying around that don't have keys.
            # We have to check for them here, but once we have reparented and rekeyed legacy entities,
            # this entire function can just be a call to .get_or_insert()
            query = UserExercise.all()
            query.filter('user ='******'exercise =', exid)
            query.order('-total_done')  # Temporary workaround for issue 289
            userExercise = query.get()

        if allow_insert and not userExercise:
            userExercise = UserExercise.get_or_insert(
                key_name=exid,
                parent=self,
                user=self.user,
                exercise=exid,
                exercise_model=exercise,
                streak=0,
                longest_streak=0,
                first_done=datetime.datetime.now(),
                last_done=datetime.datetime.now(),
                total_done=0,
            )

        return userExercise

    def reassess_from_graph(self, ex_graph):
        all_proficient_exercises = []
        for ex in ex_graph.get_proficient_exercises():
            all_proficient_exercises.append(ex.name)
        suggested_exercises = []
        for ex in ex_graph.get_suggested_exercises():
            suggested_exercises.append(ex.name)
        is_changed = (all_proficient_exercises != self.all_proficient_exercises
                      or suggested_exercises != self.suggested_exercises)
        self.all_proficient_exercises = all_proficient_exercises
        self.suggested_exercises = suggested_exercises
        self.need_to_reassess = False
        return is_changed

    def reassess_if_necessary(self, user=None):
        if not self.need_to_reassess or self.all_proficient_exercises is None:
            return False
        ex_graph = ExerciseGraph(self, user)
        return self.reassess_from_graph(ex_graph)

    def is_proficient_at(self, exid, user=None):
        self.reassess_if_necessary(user)
        return (exid in self.all_proficient_exercises)

    def is_explicitly_proficient_at(self, exid):
        return (exid in self.proficient_exercises)

    def is_reviewing(self, exid, user_exercise, time):

        # Short circuit out of full review check if not proficient or review time hasn't come around yet

        if not self.is_proficient_at(exid):
            return False

        if user_exercise.last_review + user_exercise.get_review_interval(
        ) > time:
            return False

        ex_graph = ExerciseGraph(self)
        review_exercise_names = map(lambda exercise: exercise.name,
                                    ex_graph.get_review_exercises(time))
        return (exid in review_exercise_names)

    def is_suggested(self, exid):
        self.reassess_if_necessary()
        return (exid in self.suggested_exercises)

    def get_students_data(self):
        coach_email = self.user.email()
        query = db.GqlQuery("SELECT * FROM UserData WHERE coaches = :1",
                            coach_email)
        students_data = []
        for student_data in query:
            students_data.append(student_data)
        if coach_email.lower() != coach_email:
            students_set = set(
                map(lambda student_data: student_data.key().id_or_name(),
                    students_data))
            query = db.GqlQuery("SELECT * FROM UserData WHERE coaches = :1",
                                coach_email.lower())
            for student_data in query:
                if student_data.key().id_or_name() not in students_set:
                    students_data.append(student_data)
        return students_data

    def get_students(self):
        return map(lambda student_data: student_data.user.email(),
                   self.get_students_data())

    def add_points(self, points):
        if self.points == None:
            self.points = 0
        self.points += points

    def get_videos_completed(self):
        if self.videos_completed < 0:
            self.videos_completed = UserVideo.count_completed_for_user(
                self.user)
            self.put()
        return self.videos_completed

    def feedback_notification_count(self):
        if self.count_feedback_notification == -1:
            self.count_feedback_notification = models_discussion.FeedbackNotification.gql(
                "WHERE user = :1", self.user).count()
            self.put()
        return self.count_feedback_notification
Esempio n. 9
0
class Video(Searchable, db.Model):

    youtube_id = db.StringProperty()
    url = db.StringProperty()
    title = db.StringProperty()
    description = db.TextProperty()
    playlists = db.StringListProperty()
    keywords = db.StringProperty()
    duration = db.IntegerProperty(default=0)

    # Human readable, unique id that can be used in URLS.
    readable_id = db.StringProperty()

    # YouTube view count from last sync.
    views = db.IntegerProperty(default=0)

    # Date first added via KA library sync with YouTube.
    # This property hasn't always existsed, so for many old videos
    # this date may be much later than the actual YouTube upload date.
    date_added = db.DateTimeProperty(auto_now_add=True)

    INDEX_ONLY = ['title', 'keywords', 'description']
    INDEX_TITLE_FROM_PROP = 'title'
    INDEX_USES_MULTI_ENTITIES = False

    @staticmethod
    def get_for_readable_id(readable_id):
        video = None
        query = Video.all()
        query.filter('readable_id =', readable_id)
        # The following should just be:
        # video = query.get()
        # but the database currently contains multiple Video objects for a particular
        # video.  Some are old.  Some are due to a YouTube sync where the youtube urls
        # changed and our code was producing youtube_ids that ended with '_player'.
        # This hack gets the most recent valid Video object.
        key_id = 0
        for v in query:
            if v.key().id() > key_id and not v.youtube_id.endswith('_player'):
                video = v
                key_id = v.key().id()
        # End of hack
        return video

    def first_playlist(self):
        query = VideoPlaylist.all()
        query.filter('video =', self)
        query.filter('live_association =', True)
        video_playlist = query.get()
        if video_playlist:
            return video_playlist.playlist
        return None

    def current_user_points(self):
        user_video = UserVideo.get_for_video_and_user(self,
                                                      util.get_current_user())
        if user_video:
            return points.VideoPointCalculator(user_video)
        else:
            return 0

    @staticmethod
    def get_dict(query, fxn_key):
        video_dict = {}
        for video in query.fetch(10000):
            video_dict[fxn_key(video)] = video
        return video_dict
Esempio n. 10
0
class LikedUser(db.Model):
    userid = db.IntegerProperty(required=True)
    postid = db.IntegerProperty(required=True)
Esempio n. 11
0
class UserExercise(db.Model):

    user = db.UserProperty()
    exercise = db.StringProperty()
    exercise_model = db.ReferenceProperty(Exercise)
    streak = db.IntegerProperty(default=0)
    longest_streak = db.IntegerProperty(default=0)
    first_done = db.DateTimeProperty(auto_now_add=True)
    last_done = db.DateTimeProperty()
    total_done = db.IntegerProperty(default=0)
    last_review = db.DateTimeProperty(default=datetime.datetime.min)
    review_interval_secs = db.IntegerProperty(default=(
        60 * 60 * 24 *
        consts.DEFAULT_REVIEW_INTERVAL_DAYS))  # Default 7 days until review
    proficient_date = db.DateTimeProperty()
    seconds_per_fast_problem = db.FloatProperty(
        default=consts.MIN_SECONDS_PER_FAST_PROBLEM
    )  # Seconds expected to finish a problem 'quickly' for badge calculation
    summative = db.BooleanProperty(default=False)

    _USER_EXERCISE_KEY_FORMAT = "UserExercise.all().filter('user = '******')"

    @staticmethod
    def get_key_for_user(user):
        return UserExercise._USER_EXERCISE_KEY_FORMAT % user.email()

    @staticmethod
    def get_for_user_use_cache(user):
        user_exercises_key = UserExercise.get_key_for_user(user)
        user_exercises = memcache.get(user_exercises_key)
        if user_exercises is None:
            query = UserExercise.all()
            query.filter('user ='******'t schedule a review
        if (self.streak + correct) < self.required_streak(
        ) and self.longest_streak < self.required_streak():
            return

        # If the user is hitting a new streak either for the first time or after having lost
        # proficiency, reset their review interval counter.
        if (self.streak + correct) >= self.required_streak:
            self.review_interval_secs = 60 * 60 * 24 * consts.DEFAULT_REVIEW_INTERVAL_DAYS

        review_interval = self.get_review_interval()

        if correct and self.last_review != datetime.datetime.min:
            time_since_last_review = now - self.last_review
            if time_since_last_review >= review_interval:
                review_interval = time_since_last_review * 2
        if not correct:
            review_interval = review_interval // 2
        if correct:
            self.last_review = now
        else:
            self.last_review = datetime.datetime.min
        self.review_interval_secs = review_interval.days * 86400 + review_interval.seconds

    def set_proficient(self, proficient, user_data):
        if not proficient and self.longest_streak < self.required_streak():
            # Not proficient and never has been so nothing to do
            return

        if proficient:
            if self.exercise not in user_data.proficient_exercises:
                user_data.proficient_exercises.append(self.exercise)
                user_data.need_to_reassess = True
                user_data.put()
        else:
            if self.exercise in user_data.proficient_exercises:
                user_data.proficient_exercises.remove(self.exercise)
                user_data.need_to_reassess = True
                user_data.put()
Esempio n. 12
0
class Issue(db.Model):
    """Represents a single issue which is being voted on"""
    status = db.StringProperty(required=True,
                               default='active',
                               choices=set(['active', 'done', 'canceled']))
    is_public = db.BooleanProperty(
        default=False)  #is issue listed on site or just by sharing url?
    creator = db.UserProperty(auto_current_user=True)
    title = db.StringProperty(required=True)
    description = db.TextProperty()
    duration = db.IntegerProperty()
    creation_date = db.DateTimeProperty(auto_now_add=True)
    start_time = db.DateTimeProperty()  #time when first vote is cast
    end_time = db.DateTimeProperty()  #time when vote will end

    #Implicit Properties:
    #choices = Implicitly created list of choice objects
    #votes - implicitly created list of vote objects

    def add_choice(self, choice_name):
        new_choice = Choice(name=choice_name, issue=self)
        new_choice.put()

    def remove_choice(self, choice):
        choice.delete()

    def vote_count(self):
        return self.votes.count(999999)

    def vote_for_member(self, member=None):
        if not member:
            member = users.get_current_user()
        logging.info('member:%s voted:%s' %
                     (member.nickname(), self.votes.filter('member =',
                                                           member).fetch(20)))
        return self.votes.filter('member =', member).get()

    def register_vote(self, choice, member=None):
        if not member:
            member = users.get_current_user()
        member_vote = self.vote_for_member(member)
        was_changed = False
        if (member_vote):
            member_vote.choice = choice
            was_changed = True
        else:
            member_vote = Vote(member=member, choice=choice, issue=self)
        member_vote.put()
        if (not self.start_time):
            self.start_time = datetime.now()
            self.end_time = self.start_time + timedelta(hours=self.duration)
            self.put()
        return was_changed

    def extend_duration(self, hours):
        self.duration += hours
        if self.start_time:
            self.end_time = self.start_time + timedelta(hours=self.duration)
        self.put()

    def days_left(self):
        delta = self.end_time - datetime.now()
        return delta.days

    def hours_left(self):
        delta = self.end_time - datetime.now()
        #days = delta.days
        #hours = days*24 + delta.seconds/3600
        hours = delta.seconds / 3600
        return hours

    def is_active(self):
        return self.status in ('active')

    def has_results(self):
        return self.status in ('done')

    def member_is_creator(self, member=None):
        if not member:
            member = users.get_current_user()
        return member == self.creator

    def winning_choices(
            self):  #returns list of keys of winning choices (may be a tie)
        result = []
        high_vote = 0
        for choice in self.choices:
            cnt = choice.vote_count()
            if cnt == high_vote:
                result.append(choice.key())
            elif cnt > high_vote:
                result = [choice.key()]
                high_vote = cnt
        return result

    def update_status(self):
        if self.is_active:
            if self.end_time:
                if self.end_time <= datetime.now():
                    logging.info('status changed for issue: %s' % (self.title))
                    self.status = 'done'
                    self.put()

    @classmethod
    def issues_created_by(cls, member=None, limit=20):
        if not member:
            member = users.get_current_user()
        return cls.all().filter('creator =',
                                member).order('-creation_date').fetch(limit)

    @classmethod
    def issues_voted_on(cls, member=None, limit=20):
        if not member:
            member = users.get_current_user()
        if not member:  #if logged out
            return []
        member_votes = Vote.all().filter(
            'member =', member).order('-update_time').fetch(limit)
        ##logging.info('member_votes:%s' % (member_votes))
        ##logging.info('output:%s' % ([vote.issue for vote in member_votes]))
        return [vote.issue for vote in member_votes]

    @classmethod
    def recent_results(
        cls,
        member=None,
        limit=20
    ):  #*** Need to fix, limit will be incorrect here because of filtering
        if not member:
            member = users.get_current_user()
        if not member:  #if logged out
            return []
        recent = cls.all().filter('status =',
                                  'done').order('-end_time').fetch(limit)
        return [issue for issue in recent
                if issue.vote_for_member()]  #***this is probably slow
Esempio n. 13
0
class Counter(db.Model):
    type = db.StringProperty(multiline=False)
    period_start_time = db.DateTimeProperty()
    count = db.IntegerProperty()
Esempio n. 14
0
class Membership(db.Model):
  hash = db.StringProperty()
  first_name = db.StringProperty(required=True)
  last_name = db.StringProperty(required=True)
  email = db.StringProperty(required=True)
  # The hash of the user's password.
  # TODO(danielp): Make this required after we finish migrating away from domain
  # accounts.
  password_hash = db.StringProperty()
  twitter = db.StringProperty(required=False)
  plan  = db.StringProperty(required=False)
  status  = db.StringProperty() # None, active, suspended
  referuserid = db.StringProperty()
  referrer  = db.StringProperty()
  rfid_tag = db.StringProperty()
  extra_599main = db.StringProperty()
  extra_dnd = db.BooleanProperty(default=False)
  auto_signin = db.StringProperty()
  unsubscribe_reason = db.TextProperty()
  hardship_comment = db.TextProperty()

  spreedly_token = db.StringProperty()
  parking_pass = db.StringProperty()

  created = db.DateTimeProperty(auto_now_add=True)
  updated = db.DateTimeProperty()

  # How many times the user has signed in this month.
  signins = db.IntegerProperty(default=0)
  # When the last time they signed in was.
  last_signin = db.DateTimeProperty()

  # The following are legacy parameters.
  # TODO(danielp): Remove these after we complete the migration away from
  # domain accounts.

  # Whether we've created a google apps user yet.
  domain_user = db.BooleanProperty(default=False)
  # The user's domain username.
  username = db.StringProperty()
  # Temporarily stores the user's domain password.
  password = db.StringProperty(default=None)

  """ Override of the default put method which allows us to skip changing the
  updated property for testing purposes.
  skip_time_update: Whether or not to set updated to the current date and time.
  """
  def put(self, *args, **kwargs):
    if not kwargs.pop("skip_time_update", False):
      self.updated = datetime.datetime.now()

    super(Membership, self).put(*args, **kwargs)

  def icon(self):
    return str("http://www.gravatar.com/avatar/" + hashlib.md5(self.email.lower()).hexdigest())

  def full_name(self):
    return '%s %s' % (self.first_name, self.last_name)

  def spreedly_url(self):
    config = Config()
    return str("https://subs.pinpayments.com/%s/subscriber_accounts/%s" % \
        (config.SPREEDLY_ACCOUNT, self.spreedly_token))

  def spreedly_admin_url(self):
    config = Config()
    return str("https://subs.pinpayments.com/%s/subscribers/%s" % \
        (config.SPREEDLY_ACCOUNT, self.key().id()))

  def subscribe_url(self, plan=None):
    config = Config()
    if not plan:
      plan = self.plan
    url = "https://subs.pinpayments.com/%s/subscribers/%i/%s/subscribe/%s" % \
        (config.SPREEDLY_ACCOUNT, self.key().id(),
         self.spreedly_token, plans.Plan.get_by_name(plan).plan_id)
    return str(url)

  """ URL we use to subscribe a person for the first time.
  host: The first part of the return URL, e.g. signup.hackerdojo.com.
  plan: Optionally specifies a different plan to use. """
  def new_subscribe_url(self, host, plan=None):
    config = Config()
    if not plan:
      plan = self.plan

    query_str = urllib.urlencode({"first_name": self.first_name,
                                  "last_name": self.last_name,
                                  "email": self.email,
                                  "return_url": "http://%s/success/%s" % \
                                      (host, self.hash)})
    url = "https://subs.pinpayments.com/%s/subscribers/%i/subscribe/%s/%s?%s" % \
        (config.SPREEDLY_ACCOUNT, self.key().id(),
         plans.Plan.get_by_name(plan).plan_id, self.username, query_str)
    return str(url)

  def force_full_subscribe_url(self):
    config = Config()
    url = "https://subs.pinpayments.com/%s/subscribers/%i/%s/subscribe/%s" % \
        (config.SPREEDLY_ACCOUNT, self.key().id(),
        self.spreedly_token, plans.newfull.plan_id)
    return str(url)

  def unsubscribe_url(self):
    return "http://signup.hackerdojo.com/unsubscribe/%i" % (self.key().id())

  """ Returns this user's unique ID, which can be an integer or string. """
  def get_id(self):
    return self.key().id()

  """ Sets the user's password.
  password: The password which will be hashed and stored. """
  def set_password(self, password):
    logging.debug("Setting password for user %s." % (self.email))

    self.password_hash = security.generate_password_hash(password, length=12)

  """ Creates a new authorization token for a given user ID.
  user_id: User unique ID.
  Returns: A string with the authorization token. """
  @classmethod
  def create_auth_token(cls, user_id):
    token = UserToken(user_id, "auth")
    token.save()

    return token.token

  """ Deletes a given authorization token.
  user_id: User unique ID.
  token: A string with the authorization token. """
  @classmethod
  def delete_auth_token(cls, user_id, token):
    token = UserToken.verify(user_id, "auth", token)
    if not token:
      logging.warning("Delete: Ignoring bad token for %d." % (user_id))
      return

    token.delete()

  """ Returns a Membership object based on a user ID and token.
  user_id: The unique ID of the requesting user.
  token: The token string to be verified.
  Returns: A tuple (Membership, timestamp), with a Membership object and
  the token timestamp, or (None, None) if both were not found. """
  @classmethod
  def get_by_auth_token(cls, user_id, token):
    # First, check that the token is valid.
    token = UserToken.verify(user_id, "auth", token)
    if not token:
      logging.warning("Bad token, not getting user %d." % (user_id))
      return (None, None)

    user = cls.get_by_id(user_id)
    timestamp = token.timestamp
    return (user, timestamp)

  """ Gets the user with the specified login credentials.
  email: The email of the user.
  password: The password of the user.
  Returns: Membership object if found. """
  @classmethod
  def get_by_auth_password(cls, email, password):
    user = cls.get_by_email(email)
    if not user:
      raise auth.InvalidAuthIdError("No user with email '%s'." % (email))

    if not security.check_password_hash(password, user.password_hash):
      raise auth.InvalidPasswordError("Bad password for user '%s'." % (email))

    return user

  """ Gets the user with the specified email.
  email: Either the normal email, or the hackerdojo.com email of the user.
  Returns: The membership object corresponding to the user, or None if no user
  was found. """
  @classmethod
  def get_by_email(cls, email):
    # TODO(danielp): Remove code for dealing with hackerdojo.com emails after
    # we've finished migrating away from domain accounts.
    if "@hackerdojo.com" in email:
      username = email.split("@")[0]
      return cls.get_by_username(username)

    return cls.all().filter('email =', email).get()

  @classmethod
  def get_by_hash(cls, hash):
    return cls.all().filter('hash =', hash).get()

  # This is a legacy method:
  # TODO(danielp): Remove this after we migrate away from domain accounts.
  @classmethod
  def get_by_username(cls, username):
    return cls.all().filter('username ='******'s email. This will be used as a unique ID.
  password: The user's raw password. Will be hashed before saving, obviously.
  other_properties: Keyword arguments specifying properties that will be
  forwarded to the Membership constructor. All the other required properties
  should be in here.
  Returns: The created Membership entity. """
  @classmethod
  def create_user(cls, email, password, **other_properties):
    logging.info("Creating user with email '%s', other properties: %s." % \
                 (email, other_properties))

    password_hash = security.generate_password_hash(password, length=12)
    member = cls(email=email, password_hash=password_hash, **other_properties)
    member.put()

    return member
Esempio n. 15
0
class _PipelineRecord(db.Model):
    """Represents a Pipeline.

  Properties:
    class_path: Path of the Python class to use for this pipeline.
    root_pipeline: The root of the whole workflow; set to itself this pipeline
      is its own root.
    fanned_out: List of child _PipelineRecords that were started when this
      generator pipeline moved from WAITING to RUN.
    start_time: For pipelines with no start _BarrierRecord, when this pipeline
      was enqueued to run immediately.
    finalized_time: When this pipeline moved from WAITING or RUN to DONE.
    params: Serialized parameter dictionary.
    status: The current status of the pipeline.
    current_attempt: The current attempt (starting at 0) to run.
    max_attempts: Maximum number of attempts (starting at 0) to run.
    next_retry_time: ETA of the next retry attempt.
    retry_message: Why the last attempt failed; None or empty if no message.

  Root pipeline properties:
    is_root_pipeline: This is a root pipeline.
    abort_message: Why the whole pipeline was aborted; only saved on
      root pipelines.
    abort_requested: If an abort signal has been requested for this root
      pipeline; only saved on root pipelines
  """

    WAITING = 'waiting'
    RUN = 'run'
    DONE = 'done'
    ABORTED = 'aborted'

    class_path = db.StringProperty()
    root_pipeline = db.SelfReferenceProperty(
        collection_name='child_pipelines_set')
    fanned_out = db.ListProperty(db.Key, indexed=False)
    start_time = db.DateTimeProperty(indexed=True)
    finalized_time = db.DateTimeProperty(indexed=False)

    # One of these two will be set, depending on the size of the params.
    params_text = db.TextProperty(name='params')
    params_blob = blobstore.BlobReferenceProperty(name='params_blob',
                                                  indexed=False)

    status = db.StringProperty(choices=(WAITING, RUN, DONE, ABORTED),
                               default=WAITING)

    # Retry behavior
    current_attempt = db.IntegerProperty(default=0, indexed=False)
    max_attempts = db.IntegerProperty(default=1, indexed=False)
    next_retry_time = db.DateTimeProperty(indexed=False)
    retry_message = db.TextProperty()

    # Root pipeline properties
    is_root_pipeline = db.BooleanProperty()
    abort_message = db.TextProperty()
    abort_requested = db.BooleanProperty(indexed=False)

    @classmethod
    def kind(cls):
        return '_AE_Pipeline_Record'

    @property
    def params(self):
        """Returns the dictionary of parameters for this Pipeline."""
        if hasattr(self, '_params_decoded'):
            return self._params_decoded

        if self.params_blob is not None:
            value_encoded = self.params_blob.open().read()
        else:
            value_encoded = self.params_text

        value = simplejson.loads(value_encoded, cls=util.JsonDecoder)
        if isinstance(value, dict):
            kwargs = value.get('kwargs')
            if kwargs:
                adjusted_kwargs = {}
                for arg_key, arg_value in kwargs.iteritems():
                    # Python only allows non-unicode strings as keyword arguments.
                    adjusted_kwargs[str(arg_key)] = arg_value
                value['kwargs'] = adjusted_kwargs

        self._params_decoded = value
        return self._params_decoded

    def truncated_copy(self):
        """Create a lightweight copy of the pipeline with the args truncated."""
        return _LowMemoryPipelineRecord(self)
Esempio n. 16
0
class Exercise(db.Model):

    name = db.StringProperty()
    short_display_name = db.StringProperty(default="")
    prerequisites = db.StringListProperty()
    covers = db.StringListProperty()
    v_position = db.IntegerProperty()
    h_position = db.IntegerProperty()
    seconds_per_fast_problem = db.FloatProperty(
        default=consts.MIN_SECONDS_PER_FAST_PROBLEM
    )  # Seconds expected to finish a problem 'quickly' for badge calculation

    # True if this exercise is live and visible to all users.
    # Non-live exercises are only visible to admins.
    live = db.BooleanProperty(default=False)

    # True if this exercise is a quasi-exercise generated by
    # combining the content of other exercises
    summative = db.BooleanProperty(default=False)

    # Teachers contribute raw html with embedded CSS and JS
    # and we sanitize it with Caja before displaying it to
    # students.
    author = db.UserProperty()
    raw_html = db.TextProperty()
    last_modified = db.DateTimeProperty()
    safe_html = db.TextProperty()
    safe_js = db.TextProperty()
    last_sanitized = db.DateTimeProperty(default=datetime.datetime.min)
    sanitizer_used = db.StringProperty()

    @staticmethod
    def get_by_name(name):
        dict_exercises = Exercise.__get_dict_use_cache_unsafe__()
        if dict_exercises.has_key(name):
            if dict_exercises[name].is_visible_to_current_user():
                return dict_exercises[name]
        return None

    @staticmethod
    def to_display_name(name):
        if name:
            return name.replace('_', ' ').capitalize()
        return ""

    def display_name(self):
        return Exercise.to_display_name(self.name)

    @staticmethod
    def to_short_name(name):
        exercise = Exercise.get_by_name(name)
        if exercise:
            return exercise.short_name()
        return ""

    def short_name(self):
        if self.short_display_name:
            return self.short_display_name[:11]
        return self.display_name()[:11]

    def is_visible_to_current_user(self):
        return self.live or users.is_current_user_admin()

    def required_streak(self):
        if self.summative:
            return consts.REQUIRED_STREAK * len(self.prerequisites)
        else:
            return consts.REQUIRED_STREAK

    def struggling_threshold(self):
        return 3 * self.required_streak()

    def summative_children(self):
        if not self.summative:
            return []
        query = db.Query(Exercise)
        query.filter("name IN ", self.prerequisites)
        return query

    def non_summative_exercise(self, problem_number):
        if not self.summative:
            return self

        if len(self.prerequisites) <= 0:
            raise Exception(
                "Summative exercise '%s' does not include any other exercises"
                % self.name)

        # For now we just cycle through all of the included exercises in a summative exercise
        index = int(problem_number) % len(self.prerequisites)
        exid = self.prerequisites[index]

        query = Exercise.all()
        query.filter('name =', exid)
        exercise = query.get()

        if not exercise:
            raise Exception("Unable to find included exercise")

        if exercise.summative:
            return exercise.non_summative_exercise(problem_number)
        else:
            return exercise

    def related_videos(self):
        exercise_videos = None
        query = ExerciseVideo.all()
        query.filter('exercise =', self.key())
        return query

    @layer_cache.cache_with_key_fxn(
        lambda self: "related_videos_%s" % self.key(),
        layer=layer_cache.SINGLE_LAYER_MEMCACHE_ONLY)
    def related_videos_fetch(self):
        exercise_videos = self.related_videos().fetch(10)
        for exercise_video in exercise_videos:
            exercise_video.video  # Pre-cache video entity
        return exercise_videos

    _CURRENT_SANITIZER = "http://caja.appspot.com/"

    def ensure_sanitized(self):
        if self.last_sanitized >= self.last_modified and self.sanitizer_used == Exercise._CURRENT_SANITIZER:
            return
        cajoled = cajole.cajole(self.raw_html)
        if 'error' in cajoled:
            raise Exception(cajoled['html'])
        self.safe_html = db.Text(cajoled['html'])
        self.safe_js = db.Text(cajoled['js'])
        self.last_sanitized = datetime.datetime.now()
        self.sanitizer = Exercise._CURRENT_SANITIZER
        self.put()

    @classmethod
    def all(cls, live_only=False):
        query = super(Exercise, cls).all()
        if live_only or not users.is_current_user_admin():
            query.filter("live =", True)
        return query

    @classmethod
    def all_unsafe(cls):
        return super(Exercise, cls).all()

    @staticmethod
    def get_all_use_cache():
        if users.is_current_user_admin():
            return Exercise.__get_all_use_cache_unsafe__()
        else:
            return Exercise.__get_all_use_cache_safe__()

    @staticmethod
    @layer_cache.cache_with_key_fxn(
        lambda *args, **kwargs: "all_exercises_unsafe_%s" % Setting.
        cached_exercises_date())
    def __get_all_use_cache_unsafe__():
        query = Exercise.all_unsafe().order('h_position')
        return query.fetch(200)

    @staticmethod
    @layer_cache.cache_with_key_fxn(
        lambda *args, **kwargs: "all_exercises_safe_%s" % Setting.
        cached_exercises_date())
    def __get_all_use_cache_safe__():
        query = Exercise.all(live_only=True).order('h_position')
        return query.fetch(200)

    @staticmethod
    @layer_cache.cache_with_key_fxn(
        lambda *args, **kwargs: "all_exercises_dict_unsafe_%s" % Setting.
        cached_exercises_date())
    def __get_dict_use_cache_unsafe__():
        exercises = Exercise.__get_all_use_cache_unsafe__()
        dict_exercises = {}
        for exercise in exercises:
            dict_exercises[exercise.name] = exercise
        return dict_exercises

    _EXERCISES_COUNT_KEY = "Exercise.count()"

    @staticmethod
    def get_count():
        count = memcache.get(Exercise._EXERCISES_COUNT_KEY,
                             namespace=App.version)
        if count is None:
            count = Exercise.all().count()
            memcache.set(Exercise._EXERCISES_COUNT_KEY,
                         count,
                         namespace=App.version)
        return count

    def put(self):
        Setting.cached_exercises_date(str(datetime.datetime.now()))
        memcache.delete(Exercise._EXERCISES_COUNT_KEY, namespace=App.version)
        db.Model.put(self)
Esempio n. 17
0
class Settings(db.Model):
    days_for_old_picture = db.IntegerProperty()
    width_for_cover_album = db.IntegerProperty()
    width_for_small_picture = db.IntegerProperty()
Esempio n. 18
0
class VideoPlaylist(db.Model):

    playlist = db.ReferenceProperty(Playlist)
    video = db.ReferenceProperty(Video)
    video_position = db.IntegerProperty()

    # Lets us enable/disable video playlist relationships in bulk without removing the entry
    live_association = db.BooleanProperty(default=False)
    last_live_association_generation = db.IntegerProperty(default=0)

    _VIDEO_PLAYLIST_KEY_FORMAT = "VideoPlaylist_Videos_for_Playlist_%s"
    _PLAYLIST_VIDEO_KEY_FORMAT = "VideoPlaylist_Playlists_for_Video_%s"

    @staticmethod
    def get_cached_videos_for_playlist(playlist, limit=500):

        key = VideoPlaylist._VIDEO_PLAYLIST_KEY_FORMAT % playlist.key()
        namespace = str(App.version) + "_" + str(
            Setting.cached_library_content_date())

        videos = memcache.get(key, namespace=namespace)

        if videos is None:
            videos = []
            query = VideoPlaylist.all()
            query.filter('playlist =', playlist)
            query.filter('live_association = ', True)
            query.order('video_position')
            video_playlists = query.fetch(limit)
            for video_playlist in video_playlists:
                videos.append(video_playlist.video)

            memcache.set(key, videos, namespace=namespace)

        return videos

    @staticmethod
    def get_cached_playlists_for_video(video, limit=5):

        key = VideoPlaylist._PLAYLIST_VIDEO_KEY_FORMAT % video.key()
        namespace = str(App.version) + "_" + str(
            Setting.cached_library_content_date())

        playlists = memcache.get(key, namespace=namespace)

        if playlists is None:
            playlists = []
            query = VideoPlaylist.all()
            query.filter('video =', video)
            query.filter('live_association = ', True)
            video_playlists = query.fetch(limit)
            for video_playlist in video_playlists:
                playlists.append(video_playlist.playlist)

            memcache.set(key, playlists, namespace=namespace)

        return playlists

    @staticmethod
    def get_query_for_playlist_title(playlist_title):
        query = Playlist.all()
        query.filter('title =', playlist_title)
        playlist = query.get()
        query = VideoPlaylist.all()
        query.filter('playlist =', playlist)
        query.filter(
            'live_association = ', True
        )  #need to change this to true once I'm done with all of my hacks
        query.order('video_position')
        return query

    @staticmethod
    def get_key_dict(query):
        video_playlist_key_dict = {}
        for video_playlist in query.fetch(10000):
            playlist_key = VideoPlaylist.playlist.get_value_for_datastore(
                video_playlist)

            if not video_playlist_key_dict.has_key(playlist_key):
                video_playlist_key_dict[playlist_key] = {}

            video_playlist_key_dict[playlist_key][
                VideoPlaylist.video.get_value_for_datastore(
                    video_playlist)] = video_playlist

        return video_playlist_key_dict
class ProductSalesData(db.Model):
    product_id = db.IntegerProperty(required=True)
    date = db.DateTimeProperty(verbose_name=None,
                               auto_now=True,
                               auto_now_add=True)
    store = db.StringProperty(required=True)
Esempio n. 20
0
class SummaryStatsManager(db.Model):
    MEMCACHE_NAMESPACE = 'summary_stats'

    #category = db.StringProperty()
    summary_score = db.IntegerProperty(indexed=False)
    summary_display = db.StringProperty(indexed=False)
    total_runs = db.IntegerProperty(indexed=False)

    @classmethod
    def UpdateStats(cls, category, stats):
        """Update the summary stats in memory and the datastore.

        This will only update part of a summary score row.

        Args:
            category: a category string like 'network'
            stats: a dict of browser stats (see CategoryStatsManager.GetStats)
        Returns:
            The summary stats that have been updated by the given stats.
            (Used by GetStats.)
        """
        browsers = [b for b in stats.keys() if b != 'total_runs']
        update_summary_stats = memcache.get_multi(
                browsers, namespace=cls.MEMCACHE_NAMESPACE)
        for browser in browsers:
            ua_summary_stats = update_summary_stats.setdefault(browser, {
                    'results': {}})
            ua_summary_stats['results'][category] = {
                    'score': stats[browser]['summary_score'],
                    'display': stats[browser]['summary_display'],
                    'total_runs': stats[browser]['total_runs'],
                    }
            if category == 'acid3':
                ua_summary_stats['results']['acid3']['display'] = (
                        stats[browser]['results']['score']['display'])
        memcache.set_multi(update_summary_stats, namespace=cls.MEMCACHE_NAMESPACE)
        return update_summary_stats

    @classmethod
    def _FindAndUpdateStats(cls, category, browsers):
        test_set = all_test_sets.GetTestSet(category)
        ua_stats = CategoryStatsManager.GetStats(
                test_set, browsers, [t.key for t in test_set.VisibleTests()])
        return cls.UpdateStats(category, ua_stats)

    @classmethod
    def _AddSummaryOfSummaries(cls, summary_stats):
        """Update summary_stats with row summaries."""
        grand_total_runs = 0
        for browser in summary_stats.keys():
            results = summary_stats[browser]['results']
            categories = results.keys()
            score = int(sum(results[c]['score'] for c in categories)
                                    / len(categories))
            display = '%s/100' % score
            total_runs = sum(results[c]['total_runs'] for c in categories)
            grand_total_runs += total_runs
            summary_stats[browser].update({
                    'summary_score': score,
                    'summary_display': display,
                    'total_runs': total_runs,
                    })
        summary_stats['total_runs'] = grand_total_runs


    @classmethod
    def GetStats(cls, browsers, categories=None):
        """Return the summary stats for a set of browsers and categories.

        Gets stats out of summary memcache. If needed, re-aggregate them for the
        categories. These data may come from memcache or all the way from the
        datastore.

        Args:
            browsers: a list of browsers to use instead of version level.
            categories: a list of categories like ['security', 'richtext'].
        Returns:
            {
                    browser_x: {
                            category_y: {
                                 'score': score_xy,
                                 'display': display_xy,
                                 'total_runs': total_runs_xy,
                                 }, ...
                            }, ...
            }
        """
        summary_stats = memcache.get_multi(
                browsers, namespace=cls.MEMCACHE_NAMESPACE)
        if not categories:
            categories = [t.category for t in all_test_sets.GetVisibleTestSets()]
        # Trim any unwanted stats and find any missing stats.
        missing_stats = {}
        for browser in browsers:
            ua_summary_stats = summary_stats.get(browser, {'results': {}})
            existing_categories = ua_summary_stats['results'].keys()
            for category in existing_categories:
                if category not in categories:
                    del ua_summary_stats['results'][category]
            for category in categories:
                if category not in existing_categories:
                    missing_stats.setdefault(category, []).append(browser)
        # Load any missing stats
        for category, browsers in missing_stats.items():
            updated_stats = cls._FindAndUpdateStats(category, browsers)
            summary_stats.update(updated_stats)

        cls._AddSummaryOfSummaries(summary_stats)
        return summary_stats

    @classmethod
    def KeyName(cls, category):
        return category
Esempio n. 21
0
class WordInfoEntity(db.Model):
    word = db.StringProperty()
    spamcount = db.IntegerProperty()
    hamcount = db.IntegerProperty()
Esempio n. 22
0
class MapreduceState(db.Model):
  """Holds accumulated state of mapreduce execution.

  MapreduceState is stored in datastore with a key name equal to the
  mapreduce ID. Only controller tasks can write to MapreduceState.

  Properties:
    mapreduce_spec: cached deserialized MapreduceSpec instance. read-only
    active: if this MR is still running.
    last_poll_time: last time controller job has polled this mapreduce.
    counters_map: shard's counters map as CountersMap. Mirrors
      counters_map_json.
    chart_url: last computed mapreduce status chart url. This chart displays the
      progress of all the shards the best way it can.
    sparkline_url: last computed mapreduce status chart url in small format.
    result_status: If not None, the final status of the job.
    active_shards: How many shards are still processing. This starts as 0,
      then set by KickOffJob handler to be the actual number of input
      readers after input splitting, and is updated by Controller task
      as shards finish.
    start_time: When the job started.
    writer_state: Json property to be used by writer to store its state.
      This is filled when single output per job. Will be deprecated.
      Use OutputWriter.get_filenames instead.
  """

  RESULT_SUCCESS = "success"
  RESULT_FAILED = "failed"
  RESULT_ABORTED = "aborted"

  _RESULTS = frozenset([RESULT_SUCCESS, RESULT_FAILED, RESULT_ABORTED])

  # Functional properties.
  # TODO(user): Replace mapreduce_spec with job_config.
  mapreduce_spec = json_util.JsonProperty(MapreduceSpec, indexed=False)
  active = db.BooleanProperty(default=True, indexed=False)
  last_poll_time = db.DateTimeProperty(required=True)
  counters_map = json_util.JsonProperty(
      CountersMap, default=CountersMap(), indexed=False)
  app_id = db.StringProperty(required=False, indexed=True)
  writer_state = json_util.JsonProperty(dict, indexed=False)
  active_shards = db.IntegerProperty(default=0, indexed=False)
  failed_shards = db.IntegerProperty(default=0, indexed=False)
  aborted_shards = db.IntegerProperty(default=0, indexed=False)
  result_status = db.StringProperty(required=False, choices=_RESULTS)

  # For UI purposes only.
  chart_url = db.TextProperty(default="")
  chart_width = db.IntegerProperty(default=300, indexed=False)
  sparkline_url = db.TextProperty(default="")
  start_time = db.DateTimeProperty(auto_now_add=True)

  @classmethod
  def kind(cls):
    """Returns entity kind."""
    return "_AE_MR_MapreduceState"

  @classmethod
  def get_key_by_job_id(cls, mapreduce_id):
    """Retrieves the Key for a Job.

    Args:
      mapreduce_id: The job to retrieve.

    Returns:
      Datastore Key that can be used to fetch the MapreduceState.
    """
    return db.Key.from_path(cls.kind(), str(mapreduce_id))

  @classmethod
  def get_by_job_id(cls, mapreduce_id):
    """Retrieves the instance of state for a Job.

    Args:
      mapreduce_id: The mapreduce job to retrieve.

    Returns:
      instance of MapreduceState for passed id.
    """
    return db.get(cls.get_key_by_job_id(mapreduce_id))

  def set_processed_counts(self, shards_processed):
    """Updates a chart url to display processed count for each shard.

    Args:
      shards_processed: list of integers with number of processed entities in
        each shard
    """
    chart = google_chart_api.BarChart(shards_processed)
    shard_count = len(shards_processed)

    if shards_processed:
      # Only 16 labels on the whole chart.
      stride_length = max(1, shard_count / 16)
      chart.bottom.labels = []
      for x in xrange(shard_count):
        if (x % stride_length == 0 or
            x == shard_count - 1):
          chart.bottom.labels.append(x)
        else:
          chart.bottom.labels.append("")
      chart.left.labels = ["0", str(max(shards_processed))]
      chart.left.min = 0

    self.chart_width = min(700, max(300, shard_count * 20))
    self.chart_url = chart.display.Url(self.chart_width, 200)

  def get_processed(self):
    """Number of processed entities.

    Returns:
      The total number of processed entities as int.
    """
    return self.counters_map.get(context.COUNTER_MAPPER_CALLS)

  processed = property(get_processed)

  @staticmethod
  def create_new(mapreduce_id=None,
                 gettime=datetime.datetime.now):
    """Create a new MapreduceState.

    Args:
      mapreduce_id: Mapreduce id as string.
      gettime: Used for testing.
    """
    if not mapreduce_id:
      mapreduce_id = MapreduceState.new_mapreduce_id()
    state = MapreduceState(key_name=mapreduce_id,
                           last_poll_time=gettime())
    state.set_processed_counts([])
    return state

  @staticmethod
  def new_mapreduce_id():
    """Generate new mapreduce id."""
    return util._get_descending_key()

  def __eq__(self, other):
    if not isinstance(other, self.__class__):
      return False
    return self.properties() == other.properties()
Esempio n. 23
0
class Show(db.Model):
    name = db.StringProperty()
    normalized_name = db.StringProperty()
    alt_names = db.StringProperty()
    slug = db.StringProperty()
    description = db.StringProperty(indexed=False)
    genres = db.StringProperty(indexed=False)
    network = db.StringProperty(indexed=False)
    active = db.BooleanProperty()
    country = db.StringProperty(indexed=False)
    runtime = db.IntegerProperty()
    timezone = db.StringProperty(indexed=False)
    tvrage_id = db.IntegerProperty()
    added = db.DateTimeProperty()

    _memkey_all_shows_ordered = "all_shows_ordered"
    _memkey_shows_dict = "all_shows_dict"
    re_find_the = re.compile("^The (.*)$")

    @classmethod
    def kind(cls):
        return "series_show"

    def __unicode__(self):
        return self.name

    @property
    def idnr(self):
        return self.key().id()

    @property
    def slug(self):
        return self.normalized_name.replace(" ", "-")

    def alternative_names(self):
        if self.alt_names is None:
            return []
        return self.alt_names.split("|")

    @classmethod
    def get_all_ordered(cls):
        shows = memcache.get(cls._memkey_all_shows_ordered)
        if shows is not None:
            return shows
        shows = Show.all().filter("active =", True)
        show_list = []
        for show in shows:
            if len(show.name) > 33:
                show.ordered_name = cls.re_find_the.sub(
                    "\\1, The", show.name[:33] + "...")
            else:
                show.ordered_name = cls.re_find_the.sub("\\1, The", show.name)
            show_list.append(show)
        shows = sorted(show_list, key=lambda x: x.ordered_name.lower())
        memcache.set(key=cls._memkey_all_shows_ordered, value=shows)
        return shows

    @classmethod
    def find(cls, show_name):
        if not len(show_name):
            return None
        norm_name = normalize(show_name)
        shows = Show.get_all_ordered()
        for show in shows:
            if show_name == show.name or norm_name == show.normalized_name or \
                    any([norm_name == alt_name for alt_name in show.alternative_names()]):
                return show

    @classmethod
    def get_all_dict(cls):
        show_dict = memcache.get(cls._memkey_shows_dict)
        if show_dict is not None:
            return show_dict
        shows = Show.get_all_ordered()
        show_dict = dict([(str(show.key()), show) for show in shows])
        memcache.set(key=cls._memkey_shows_dict, value=show_dict)
        return show_dict

    @classmethod
    def clear_cache(cls):
        memcache.delete(cls._memkey_all_shows_ordered)
        memcache.delete(cls._memkey_shows_dict)

    def add_update_task(self):
        t = taskqueue.Task(url=reverse('seriesly-shows-update_show'),
                           params={"key": str(self.key())})
        t.add(queue_name="series")
        return t

    def update(self, show_info=None, get_everything=False):
        if show_info is None:
            tvrage = TVRage()
            show_info = tvrage.get_info(self.tvrage_id)
            # Kill Tabatha\u2019s ... here
            show_info.name = show_info.name.replace(u"\u2019", "'")
            # Kill >>'Til Death<< here
            if show_info.name.startswith("'"):
                show_info.name = show_info.name.replace("'", "", 1)
            attr_list = [
                "name", "network", "genres", "active", "country", "runtime",
                "timezone", "tvrage_id"
            ]
            if self.update_attrs(show_info, attr_list):
                self.put()
        for season_info in show_info.seasons:
            logging.debug("Update or create Season...")
            Season.update_or_create(self,
                                    season_info,
                                    get_everything=get_everything)

    def update_attrs(self, info_obj, attr_list):
        changed = False
        for attr in attr_list:
            val = getattr(info_obj, attr)
            if val != getattr(self, attr):
                setattr(self, attr, val)
                changed = True
        return changed

    def put(self):
        self.normalized_name = normalize(self.name)
        return super(Show, self).put()

    @classmethod
    def update_or_create(cls, name, show_id=None):
        tvrage = TVRage()
        if name is not None:
            show_info = tvrage.get_info_by_name(name)
        else:
            show_info = tvrage.get_info(show_id)
        if show_info is None:
            return False
        logging.debug("Show exists..?")
        show = Show.all().filter("tvrage_id =", show_info.tvrage_id).get()
        if show is None:
            logging.debug("Creating Show...")
            show = Show(name=show_info.name,
                        network=show_info.network,
                        genres=show_info.genres,
                        active=show_info.active,
                        country=show_info.country,
                        runtime=show_info.runtime,
                        timezone=show_info.timezone,
                        tvrage_id=show_info.tvrage_id,
                        added=datetime.datetime.now())
            show.put()
        show.update(show_info)

    @property
    def is_new(self):
        if self.added is None:
            return False
        new_time = datetime.timedelta(days=7)
        if datetime.datetime.now() - self.added < new_time:
            return True
        return False
Esempio n. 24
0
class ShardState(db.Model):
  """Single shard execution state.

  The shard state is stored in the datastore and is later aggregated by
  controller task. ShardState key_name is equal to shard_id.

  Shard state contains critical state to ensure the correctness of
  shard execution. It is the single source of truth about a shard's
  progress. For example:
  1. A slice is allowed to run only if its payload matches shard state's
     expectation.
  2. A slice is considered running only if it has acquired the shard's lock.
  3. A slice is considered done only if it has successfully committed shard
     state to db.

  Properties about the shard:
    active: if we have this shard still running as boolean.
    counters_map: shard's counters map as CountersMap. All counters yielded
      within mapreduce are stored here.
    mapreduce_id: unique id of the mapreduce.
    shard_id: unique id of this shard as string.
    shard_number: ordered number for this shard.
    retries: the number of times this shard has been retried.
    result_status: If not None, the final status of this shard.
    update_time: The last time this shard state was updated.
    shard_description: A string description of the work this shard will do.
    last_work_item: A string description of the last work item processed.
    writer_state: writer state for this shard. The shard's output writer
      instance can save in-memory output references to this field in its
      "finalize" method.

   Properties about slice management:
    slice_id: slice id of current executing slice. A slice's task
      will not run unless its slice_id matches this. Initial
      value is 0. By the end of slice execution, this number is
      incremented by 1.
    slice_start_time: a slice updates this to now at the beginning of
      execution. If the transaction succeeds, the current task holds
      a lease of slice duration + some grace period. During this time, no
      other task with the same slice_id will execute. Upon slice failure,
      the task should try to unset this value to allow retries to carry on
      ASAP.
    slice_request_id: the request id that holds/held the lease. When lease has
      expired, new request needs to verify that said request has indeed
      ended according to logs API. Do this only when lease has expired
      because logs API is expensive. This field should always be set/unset
      with slice_start_time. It is possible Logs API doesn't log a request
      at all or doesn't log the end of a request. So a new request can
      proceed after a long conservative timeout.
    slice_retries: the number of times a slice has been retried due to
      processing data when lock is held. Taskqueue/datastore errors
      related to slice/shard management are not counted. This count is
      only a lower bound and is used to determined when to fail a slice
      completely.
    acquired_once: whether the lock for this slice has been acquired at
      least once. When this is True, duplicates in outputs are possible.
  """

  RESULT_SUCCESS = "success"
  RESULT_FAILED = "failed"
  # Shard can be in aborted state when user issued abort, or controller
  # issued abort because some other shard failed.
  RESULT_ABORTED = "aborted"

  _RESULTS = frozenset([RESULT_SUCCESS, RESULT_FAILED, RESULT_ABORTED])

  # Maximum number of shard states to hold in memory at any time.
  _MAX_STATES_IN_MEMORY = 10

  # Functional properties.
  mapreduce_id = db.StringProperty(required=True)
  active = db.BooleanProperty(default=True, indexed=False)
  counters_map = json_util.JsonProperty(
      CountersMap, default=CountersMap(), indexed=False)
  result_status = db.StringProperty(choices=_RESULTS, indexed=False)
  retries = db.IntegerProperty(default=0, indexed=False)
  writer_state = json_util.JsonProperty(dict, indexed=False)
  slice_id = db.IntegerProperty(default=0, indexed=False)
  slice_start_time = db.DateTimeProperty(indexed=False)
  slice_request_id = db.ByteStringProperty(indexed=False)
  slice_retries = db.IntegerProperty(default=0, indexed=False)
  acquired_once = db.BooleanProperty(default=False, indexed=False)

  # For UI purposes only.
  update_time = db.DateTimeProperty(auto_now=True, indexed=False)
  shard_description = db.TextProperty(default="")
  last_work_item = db.TextProperty(default="")

  def __str__(self):
    kv = {"active": self.active,
          "slice_id": self.slice_id,
          "last_work_item": self.last_work_item,
          "update_time": self.update_time}
    if self.result_status:
      kv["result_status"] = self.result_status
    if self.retries:
      kv["retries"] = self.retries
    if self.slice_start_time:
      kv["slice_start_time"] = self.slice_start_time
    if self.slice_retries:
      kv["slice_retries"] = self.slice_retries
    if self.slice_request_id:
      kv["slice_request_id"] = self.slice_request_id
    if self.acquired_once:
      kv["acquired_once"] = self.acquired_once
    keys = kv.keys()
    keys.sort()

    result = "ShardState is {"
    for k in keys:
      result += k + ":" + str(kv[k]) + ","
    result += "}"
    return result

  def reset_for_retry(self):
    """Reset self for shard retry."""
    self.retries += 1
    self.last_work_item = ""
    self.active = True
    self.result_status = None
    self.counters_map = CountersMap()
    self.slice_id = 0
    self.slice_start_time = None
    self.slice_request_id = None
    self.slice_retries = 0
    self.acquired_once = False

  def advance_for_next_slice(self, recovery_slice=False):
    """Advance self for next slice.

    Args:
      recovery_slice: True if this slice is running recovery logic.
        See handlers.MapperWorkerCallbackHandler._attempt_slice_recovery
        for more info.
    """
    self.slice_start_time = None
    self.slice_request_id = None
    self.slice_retries = 0
    self.acquired_once = False
    if recovery_slice:
      self.slice_id += 2
    else:
      self.slice_id += 1

  def set_for_failure(self):
    self.active = False
    self.result_status = self.RESULT_FAILED

  def set_for_abort(self):
    self.active = False
    self.result_status = self.RESULT_ABORTED

  def set_for_success(self):
    self.active = False
    self.result_status = self.RESULT_SUCCESS
    self.slice_start_time = None
    self.slice_request_id = None
    self.slice_retries = 0
    self.acquired_once = False

  def copy_from(self, other_state):
    """Copy data from another shard state entity to self."""
    for prop in self.properties().values():
      setattr(self, prop.name, getattr(other_state, prop.name))

  def __eq__(self, other):
    if not isinstance(other, self.__class__):
      return False
    return self.properties() == other.properties()

  def get_shard_number(self):
    """Gets the shard number from the key name."""
    return int(self.key().name().split("-")[-1])

  shard_number = property(get_shard_number)

  def get_shard_id(self):
    """Returns the shard ID."""
    return self.key().name()

  shard_id = property(get_shard_id)

  @classmethod
  def kind(cls):
    """Returns entity kind."""
    return "_AE_MR_ShardState"

  @classmethod
  def shard_id_from_number(cls, mapreduce_id, shard_number):
    """Get shard id by mapreduce id and shard number.

    Args:
      mapreduce_id: mapreduce id as string.
      shard_number: shard number to compute id for as int.

    Returns:
      shard id as string.
    """
    return "%s-%d" % (mapreduce_id, shard_number)

  @classmethod
  def get_key_by_shard_id(cls, shard_id):
    """Retrieves the Key for this ShardState.

    Args:
      shard_id: The shard ID to fetch.

    Returns:
      The Datatore key to use to retrieve this ShardState.
    """
    return db.Key.from_path(cls.kind(), shard_id)

  @classmethod
  def get_by_shard_id(cls, shard_id):
    """Get shard state from datastore by shard_id.

    Args:
      shard_id: shard id as string.

    Returns:
      ShardState for given shard id or None if it's not found.
    """
    return cls.get_by_key_name(shard_id)

  @classmethod
  def find_by_mapreduce_state(cls, mapreduce_state):
    """Find all shard states for given mapreduce.

    Deprecated. Use find_all_by_mapreduce_state.
    This will be removed after 1.8.9 release.

    Args:
      mapreduce_state: MapreduceState instance

    Returns:
      A list of ShardStates.
    """
    return list(cls.find_all_by_mapreduce_state(mapreduce_state))

  @classmethod
  def find_all_by_mapreduce_state(cls, mapreduce_state):
    """Find all shard states for given mapreduce.

    Args:
      mapreduce_state: MapreduceState instance

    Yields:
      shard states sorted by shard id.
    """
    keys = cls.calculate_keys_by_mapreduce_state(mapreduce_state)
    i = 0
    while i < len(keys):
      @db.non_transactional
      def no_tx_get(i):
        return db.get(keys[i:i+cls._MAX_STATES_IN_MEMORY])
      # We need a separate function to so that we can mix non-transactional and
      # use be a generator
      states = no_tx_get(i)
      for s in states:
        i += 1
        if s is not None:
          yield s

  @classmethod
  def calculate_keys_by_mapreduce_state(cls, mapreduce_state):
    """Calculate all shard states keys for given mapreduce.

    Args:
      mapreduce_state: MapreduceState instance

    Returns:
      A list of keys for shard states, sorted by shard id.
      The corresponding shard states may not exist.
    """
    if mapreduce_state is None:
      return []

    keys = []
    for i in range(mapreduce_state.mapreduce_spec.mapper.shard_count):
      shard_id = cls.shard_id_from_number(mapreduce_state.key().name(), i)
      keys.append(cls.get_key_by_shard_id(shard_id))
    return keys

  @classmethod
  def create_new(cls, mapreduce_id, shard_number):
    """Create new shard state.

    Args:
      mapreduce_id: unique mapreduce id as string.
      shard_number: shard number for which to create shard state.

    Returns:
      new instance of ShardState ready to put into datastore.
    """
    shard_id = cls.shard_id_from_number(mapreduce_id, shard_number)
    state = cls(key_name=shard_id,
                mapreduce_id=mapreduce_id)
    return state
Esempio n. 25
0
class SocialNetworks(db.Model):
    code = db.IntegerProperty(required=True)
    name = db.StringProperty(required=True)
Esempio n. 26
0
class DurableJobEntity(entities.BaseEntity):
    """A class that represents a persistent database entity of durable job."""

    updated_on = db.DateTimeProperty(indexed=True)
    execution_time_sec = db.IntegerProperty(indexed=False)
    status_code = db.IntegerProperty(indexed=False)
    output = db.TextProperty(indexed=False)
    sequence_num = db.IntegerProperty(indexed=False)

    @classmethod
    def _get_by_name(cls, name):
        return DurableJobEntity.get_by_key_name(name)

    @classmethod
    def _update(cls, name, sequence_num, status_code, output):
        """Updates job state in a datastore."""
        assert db.is_in_transaction()

        job = DurableJobEntity._get_by_name(name)
        if not job:
            logging.error('Job was not started or was deleted: %s', name)
            return
        if job.sequence_num != sequence_num:
            logging.warning('Request to update status code to %d ' %
                            status_code +
                            'for sequence number %d ' % sequence_num +
                            'but job is already on run %d' % job.sequence_num)
            return
        now = datetime.datetime.utcnow()
        if status_code in (STATUS_CODE_STARTED, STATUS_CODE_QUEUED):
            job.execution_time_sec = 0
        else:
            job.execution_time_sec += long(
                (now - job.updated_on).total_seconds())
        job.updated_on = now
        job.status_code = status_code
        if output:
            job.output = output
        job.put()

    @classmethod
    def _create_job(cls, name):
        """Creates new or reset a state of existing job in a datastore."""
        assert db.is_in_transaction()

        job = DurableJobEntity._get_by_name(name)
        if not job:
            job = DurableJobEntity(key_name=name)
        job.updated_on = datetime.datetime.utcnow()
        job.execution_time_sec = 0
        job.status_code = STATUS_CODE_QUEUED
        job.output = None
        if not job.sequence_num:
            job.sequence_num = 1
        else:
            job.sequence_num += 1
        job.put()
        return job.sequence_num

    @classmethod
    def _start_job(cls, name, sequence_num, output=None):
        return cls._update(name, sequence_num, STATUS_CODE_STARTED, output)

    @classmethod
    def _complete_job(cls, name, sequence_num, output):
        return cls._update(name, sequence_num, STATUS_CODE_COMPLETED, output)

    @classmethod
    def _fail_job(cls, name, sequence_num, output):
        return cls._update(name, sequence_num, STATUS_CODE_FAILED, output)

    @property
    def has_finished(self):
        return self.status_code in [STATUS_CODE_COMPLETED, STATUS_CODE_FAILED]
Esempio n. 27
0
class Blogs(db.Model):
    subject = db.StringProperty(required=True)
    content = db.TextProperty(required=True)
    created = db.DateTimeProperty(auto_now_add=True)
    user_id = db.StringProperty()
    likes = db.IntegerProperty(default=0)
Esempio n. 28
0
class Counter(db.Model):
    name = db.StringProperty(required=True)
    count = db.IntegerProperty(required=True, default=0)
Esempio n. 29
0
class Comment(db.Model):
    blog_id = db.IntegerProperty(required=True)
    comment = db.TextProperty(required=True)
    commentator = db.StringProperty(required=True)
Esempio n. 30
0
class Counter(db.Model):
    """A databased-backed counter."""
    count = db.IntegerProperty(default=0)
    error_possible = db.BooleanProperty(default=False)