Beispiel #1
0
class DailyActivityLog(db.Model):
    user = db.UserProperty()
    date = db.DateTimeProperty()
    activity_summary = object_property.ObjectProperty()

    @staticmethod
    def get_key_name(user, date):
        return "%s:%s" % (user.email(), date.strftime("%Y-%m-%d-%H"))

    @staticmethod
    def build(user, date, activity_summary):
        log = DailyActivityLog(
            key_name=DailyActivityLog.get_key_name(user, date))
        log.user = user
        log.date = date
        log.activity_summary = activity_summary
        return log

    @staticmethod
    def get_for_user_between_dts(user, dt_a, dt_b):
        query = DailyActivityLog.all()
        query.filter('user ='******'date >=', dt_a)
        query.filter('date <', dt_b)
        query.order('date')

        return query
class DailyActivityLog(db.Model):
    """ A log entry for a dashboard presented to users and coaches.

    This is used in the end-user-visible dashboards that display
    student activity and breaks down where the user is spending her time.
    """

    user = db.UserProperty()
    date = db.DateTimeProperty()

    # TODO(benkomalo): This pickles models and is fragile to breakage!
    # If the ClassDailyActivity class signature changes or moves
    # modules, this could break.
    activity_summary = object_property.ObjectProperty()

    @staticmethod
    def get_key_name(user_data, date):
        return "%s:%s" % (user_data.key_email, date.strftime("%Y-%m-%d-%H"))

    @staticmethod
    def build(user_data, date, activity_summary):
        log = DailyActivityLog(
            key_name=DailyActivityLog.get_key_name(user_data, date))
        log.user = user_data.user
        log.date = date
        log.activity_summary = activity_summary
        return log

    @staticmethod
    def get_for_user_data_between_dts(user_data, dt_a, dt_b):
        query = DailyActivityLog.all()
        query.filter('user ='******'date >=', dt_a)
        query.filter('date <', dt_b)
        query.order('date')

        return query
Beispiel #3
0
class SpatialJobWrapper(db.Model):

    # These are all the attributes of a job we use for local storage
    user_id = db.StringProperty()
    pid = db.IntegerProperty()
    cloudDatabaseID = db.StringProperty()
    startTime = db.StringProperty()
    name = db.StringProperty()
    modelName = db.StringProperty(
    )  # This is a reference to the model. I should probably use a modelId instead. I'm not sure why I store it as a name
    indata = db.TextProperty()
    outData = db.StringProperty(
    )  # THis is a path to the output data on the filesystem
    status = db.StringProperty()

    preprocessed = object_property.ObjectProperty()
    preprocessedDir = db.StringProperty(
    )  # THis is a path to the output data on the filesystem

    zipFileName = db.StringProperty(
    )  # This is a temporary file that the server uses to store a zipped up copy of the output
    vtkFileName = db.StringProperty()
    csvFileName = db.StringProperty()

    # These are the cloud attributes
    resource = db.StringProperty()
    uuid = db.StringProperty()
    outputURL = db.StringProperty()
    celeryPID = db.StringProperty()
    exception_message = db.StringProperty()
    output_stored = db.StringProperty()

    def preprocess(self, trajectory):
        ''' Job is already processed check '''
        if (self.preprocessed is not None and trajectory in self.preprocessed
            ) and self.preprocessedDir and os.path.exists(
                self.preprocessedDir):
            return
        ''' Unpickle data file '''
        with open(str(self.outData +
                      '/results/result{0}'.format(trajectory))) as fd:
            #print "Unpickling data file"
            #indataStr = json.loads(self.indata)

            result = pickle.load(fd)

            if not self.preprocessedDir:
                self.preprocessedDir = os.path.abspath(
                    os.path.join(
                        os.path.dirname(os.path.abspath(__file__)),
                        '../output/preprocessed/{0}/'.format(self.key().id())))

            if not os.path.exists(self.preprocessedDir):
                os.makedirs(self.preprocessedDir)

            target = os.path.join(self.preprocessedDir,
                                  "result{0}".format(trajectory))

            f = os.path.join(self.preprocessedDir, "mesh.json")

            species = result.model.get_species_map().keys()

            with open(f, 'w') as meshFile:
                json.dump(json.loads(result.export_to_three_js(species[0], 0)),
                          meshFile)

            hdf5File = h5py.File(target, 'w')

            for specie in species:
                populationValues = result.get_species(specie,
                                                      concentration=False)
                concentrationValues = result.get_species(specie,
                                                         concentration=True)
                population = hdf5File.create_dataset(specie + "/population",
                                                     data=populationValues)
                population.attrs["min"] = min(populationValues.flatten())
                population.attrs["max"] = max(populationValues.flatten())
                concentration = hdf5File.create_dataset(
                    specie + "/concentration", data=concentrationValues)
                concentration.attrs["min"] = min(concentrationValues.flatten())
                concentration.attrs["max"] = max(concentrationValues.flatten())

            hdf5File.close()

        if self.preprocessed is None:
            self.preprocessed = set()

        self.preprocessed.add(trajectory)
        self.put()
        return

    # More attributes can obvs. be added
    # The delete operator here is a little fancy. When the item gets deleted from the GOogle db, we need to go clean up files stored locally and remotely
    def delete(self, handler):
        self.stop(handler)
        service = backendservices(handler.user_data)

        #delete the local output
        if self.zipFileName is not None and os.path.exists(self.zipFileName):
            os.remove(self.zipFileName)

        if self.preprocessedDir is not None and os.path.exists(
                str(self.preprocessedDir)):
            shutil.rmtree(str(self.preprocessedDir))

        if self.vtkFileName is not None and os.path.exists(self.vtkFileName):
            os.remove(self.vtkFileName)

        if self.outData is not None and os.path.exists(self.outData):
            shutil.rmtree(self.outData)

        # delete on cloud
        if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
            try:
                service.deleteTasks(self)
            except Exception as e:
                logging.error(
                    "Failed to delete cloud resources of job {0}".format(
                        self.key().id()))
                logging.error(e)

        super(SpatialJobWrapper, self).delete()

    # Stop the job!
    def stop(self, handler):
        if self.status == "Running":
            service = backendservices(handler.user_data)
            if self.resource == "local":
                service.stopTaskLocal([int(self.pid)])
            elif self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
                result = service.stopTasks(self)
                if result and result[self.cloudDatabaseID]:
                    final_cloud_result = result[self.cloudDatabaseID]
                    try:
                        self.outputURL = final_cloud_result['output']
                    except KeyError:
                        pass
                    self.status = "Finished"
                    self.put()
                    return True
                else:
                    # Something went wrong
                    logging.error(result)
                    return False
            else:
                raise Exception('Job Resource {0} not supported!'.format(
                    self.resource))

    def mark_final_cloud_data(self):
        flag_file = os.path.join(self.outData, ".final-cloud")
        os.system("touch {0}".format(flag_file))

    def has_final_cloud_data(self):
        flag_file = os.path.join(self.outData, ".final-cloud")
        return os.path.exists(flag_file)
Beispiel #4
0
class UserDiscussionStats(backup_model.BackupModel):
    """Hold statistics for each user for how many feedback items (by type)
    have some number votes. Example:

        vote_frequencies[FeedbackType.Answer] = {
            2: 7,
            3: 2,
        }

    means that the user has

        * 7 answers with sum_votes = 2 and
        * 2 answers with sum_votes = 3.

    The author's implicit vote is ignored.
    """

    # the parent entity is the associated UserData

    vote_frequencies = object_property.ObjectProperty()

    @staticmethod
    def get_or_build_for(user_data):
        """Return the UserDiscussionStats for a user, if it exists, creating it
        from scratch if it does not.
        """
        stats = UserDiscussionStats._get_for(user_data)

        if stats is None:
            stats = UserDiscussionStats._build_for(user_data)
            stats.put()

        return stats

    @staticmethod
    def _key_name(user_data):
        return 'stats:%s' % user_data.user_id

    @staticmethod
    def _get_for(user_data):
        """Return the UserDiscussionStats for a user, if it exists."""
        return UserDiscussionStats.get_by_key_name(
            UserDiscussionStats._key_name(user_data), parent=user_data)

    @staticmethod
    def _build_for(user_data):
        """Return a new freshly-updated UserDiscussionStats for a user."""
        stats = UserDiscussionStats(
            key_name=UserDiscussionStats._key_name(user_data),
            parent=user_data,
            vote_frequencies={})
        stats._update()
        return stats

    def _update(self):
        """Update vote_frequencies using all Feedback items for a user."""
        user_data = self.parent()
        freq = self.vote_frequencies

        for feedback_type in FeedbackType.all_types():
            if feedback_type not in freq:
                freq[feedback_type] = {}
            if feedback_type + '_hidden' not in freq:
                freq[feedback_type + '_hidden'] = {}

        query = Feedback.all()
        query.filter('author_user_id =', user_data.user_id)

        for feedback in query.run(batch_size=1000):
            ftype = feedback.stats_type
            votes = int(feedback.sum_votes)
            old = freq[ftype].get(votes, 0)
            freq[ftype][votes] = old + 1

        self._normalize_vote_frequencies()

    def record(self, feedback):
        """Record stats for a feedback entity -- call right after creation.

        Also see forget.

        You can also do fancier things like:
            stats.forget(feedback)
            feedback.add_vote_by(...)
            stats.record(feedback)
        """
        self._add_to_vote_frequencies(
            {feedback.stats_type: {
                feedback.sum_votes: 1
            }})

    def forget(self, feedback):
        """Forget stats for a feedback entity -- call right before deletion.
        Also see record.
        """
        self._add_to_vote_frequencies(
            {feedback.stats_type: {
                feedback.sum_votes: -1
            }})

    def _add_to_vote_frequencies(self, new_freq):
        """Update vote_frequencies by "adding" a dictionary to it, matching up
        the feedback types and vote counts. You probably want to use the
        record/forget functions instead.
        """
        for ftype in new_freq:
            # Use collections.Counter to add frequency dictionaries
            self.vote_frequencies[ftype] = dict(
                Counter(self.vote_frequencies.get(ftype, {})) +
                Counter(new_freq[ftype]))

        self._normalize_vote_frequencies()

    def _normalize_vote_frequencies(self):
        """From each frequency dictionary, delete nonpositive entries, then
        delete empty dictionaries by type.
        """
        for ftype, freqs in self.vote_frequencies.items():
            for v, f in freqs.items():
                if f <= 0:
                    del freqs[v]
            if freqs == {}:
                del self.vote_frequencies[ftype]

    def count_of_type(self, ftype, include_hidden):
        """Return the number of feedback items of the given type a user has.

        Example:
            stats = discussion_models.UserDiscussionStats.get_for(user_data)
            print stats.count_of_type(
                discussion_models.FeedbackType.Answer, True)
        """
        count = sum(self.vote_frequencies.get(ftype, {}).values())
        if include_hidden:
            count += self.count_of_type(ftype + '_hidden', False)
        return count