class ZabbixAgent(DynamicDocument):
    """ Zabbix Agent model representation
    """

    # Retrieve node hostname to set default value of 'hostname' field
    default_hostname = get_hostname()

    enabled = BooleanField(default=False,
                           verbose_name=_('Enable service'),
                           required=True,
                           help_text=_('Enable agent service'))

    servers = StringField(
        verbose_name=_('Server(s)'),
        required=True,
        default='127.0.0.1,::127.0.0.1,::ffff:127.0.0.1',
        help_text=
        _('List of comma delimited IP addresses (or hostnames) of Zabbix servers.'
          ))

    listeners = ListField(
        ReferenceField('Listener', reverse_delete_rule=PULL),
        verbose_name=_("Listen address(es)"),
        required=True,
        help_text=_('List of IP addresses that the agent should listen on.'))

    port = IntField(
        default=10050,
        required=True,
        min_value=1024,
        max_value=65535,
        verbose_name=_("Listen port"),
        help_text=_(
            'Agent will listen on this port for connections from the server.'))

    active_servers = StringField(
        verbose_name=_('Active Server(s)'),
        required=True,
        default="127.0.0.1:20051,zabbix.domain,[::1]:30051,::1,[12fc::1]",
        help_text=_(
            'List of comma delimited IP:port (or hostname:port) pairs of Zabbix'
            ' servers for active checks.'))

    hostname = StringField(
        verbose_name=_('Hostname'),
        required=True,
        default=default_hostname,
        help_text=_('Required for active checks and must match hostname'
                    ' as configured on the server.'))

    allow_root = BooleanField(verbose_name=_('Allow root'),
                              required=True,
                              default=False,
                              help_text=_('Allow the agent to run as "root".'))

    tls_accept = StringField(
        verbose_name=_('TLS Accept'),
        default='unencrypted',
        choices=ENCRYPTION_TYPE,
        required=True,
        help_text=_('What incoming connections to accept.'))

    tls_connect = StringField(
        verbose_name=_('TLS Connect'),
        required=True,
        default='unencrypted',
        choices=ENCRYPTION_TYPE,
        help_text=
        _('How the agent should connect to server or proxy. Used for active checks.'
          ))

    tls_cert = ReferenceField(
        'SSLCertificate',
        reverse_delete_rule=PULL,
        verbose_name=_('Agent certificate'),
        required=False,
        help_text=_('Certificate used by "TLS Accept" and/or "TLS Connect"'))

    tls_server_subject = StringField(
        verbose_name=_('Server certificate subject'),
        required=False,
        help_text=_('Allowed server certificate subject.'))

    tls_server_issuer = StringField(
        verbose_name=_('Server certificate issuer'),
        required=False,
        help_text=_('Allowed server certificate issuer.'))

    psk_identity = StringField(
        verbose_name=_('Agent PSK identity '),
        required=False,
        help_text=_(
            'Unique, case sensitive string used to identify the pre-shared key.'
        ))

    psk_key = StringField(
        verbose_name=_('Agent PSK string '),
        required=False,
        help_text=_('Pre-shared key used by agent to verify connection.'))

    enable_remote_commands = BooleanField(
        verbose_name=_('Enable remote commands'),
        required=True,
        default=False,
        help_text=_('Whether remote commands from Zabbix server are allowed.'))

    log_remote_commands = BooleanField(
        verbose_name=_('Log remote commands'),
        required=True,
        default=False,
        help_text=_('Enable logging of executed shell commands as warnings.'))

    start_agents = IntField(
        default=3,
        required=True,
        min_value=0,
        max_value=100,
        verbose_name=_("Start Agents"),
        help_text=
        _('Number of pre-forked instances of zabbix_agentd that process passive checks.'
          'If set to 0, disables passive checks and the agent will not listen on any TCP '
          'port.'))

    refresh_active_checks = IntField(
        default=120,
        required=True,
        min_value=60,
        max_value=3600,
        verbose_name=_("Refresh active checks"),
        help_text=_(
            'How often list of active checks is refreshed, in seconds.'))

    timeout_process = IntField(
        default=3,
        required=True,
        min_value=1,
        max_value=30,
        verbose_name=_("Timeout"),
        help_text=_('Spend no more than Timeout seconds on processing.'))

    buffer_send = IntField(
        default=5,
        required=True,
        min_value=1,
        max_value=3600,
        verbose_name=_("Buffer send"),
        help_text=_('Do not keep data longer than N seconds in buffer.'))

    buffer_size = IntField(
        default=100,
        required=True,
        min_value=2,
        max_value=65535,
        verbose_name=_("Buffer size"),
        help_text=_(
            'Maximum number of values in a memory buffer. The agent will send all '
            'collected data to Zabbix Server/Proxy if the buffer is full.'))

    def to_template(self):
        """ Dictionary used to create configuration file.

        :return: Dictionary of configuration parameters
        """
        # Convert self attributes into dict
        zabbix_settings = self.to_mongo()

        # Convert listeners list into string
        zabbix_settings['listeners'] = ','.join([l.ip for l in self.listeners])
        # Convert Boolean fields into int (0 or 1)
        bool_to_int = {
            True: 1,
            False: 0,
            # 'True': 1,
            # 'False': 0
        }
        attrs_to_convert = {
            'allow_root': self.allow_root,
            'enable_remote_commands': self.enable_remote_commands,
            'log_remote_commands': self.log_remote_commands,
        }
        for attr_name, attr in attrs_to_convert.items():
            zabbix_settings[attr_name] = bool_to_int[attr]
        return zabbix_settings

    def __str__(self):
        return "Zabbix-Agent Settings"
Beispiel #2
0
class Notebook(Document):
    name = StringField()
    created_time = ComplexDateTimeField(default=now)
    modified_time = ComplexDateTimeField(default=now)
    inputCells = ListField(ReferenceField('CodeSnippet'))
Beispiel #3
0
class RoofImage(db.Document):
    image = ImageField(fs=storages["avatars"], max_size=2000, thumbnails=[200])
    date = DateTimeField(default=datetime.utcnow)
    uploaded_by = ReferenceField(User)
Beispiel #4
0
class Result(Document):
    student = ReferenceField(User, required=True)
    course = ReferenceField(Course, required=True)
Beispiel #5
0
class Event(Document, BaseEventumDocument):
    """The object that represents an individual event in Mongoengine.

    Recurring events also have a :class:`~app.models.EventSeries` instance that
    connects them to the other events in the series.

    :ivar date_created: :class:`mongoengine.fields.DateTimeField` - The date
        that the event object was created.
    :ivar date_modified: :class:`mongoengine.fields.DateTimeField` - The last
        date the event was modified.
    :ivar title: :class:`mongoengine.fields.StringField` - The title of the
        event.
    :ivar creator: :class:`mongoengine.fields.ReferenceField` - The User that
        created the event.
    :ivar location: :class:`mongoengine.fields.StringField` - The event's
        location.
    :ivar slug: :class:`mongoengine.fields.StringField` - The URL slug
        associated with the event. **Note:** appending the slug to the base
        path for events will not always yield the functioning URL for the
        event, because recurring events have indexes appended to the url. User
        :func:`get_absolute_url` always.
    :ivar start_date: :class:`DateField` - The date the event starts.
    :ivar end_date: :class:`DateField` - The date the event ends
    :ivar start_time: :class:`TimeField` - The time the event starts
    :ivar end_time: :class:`TimeField` - The time the event ends
    :ivar short_description: :class:`mongoengine.fields.StringField` - The HTML
        short description of the event.
    :ivar long_description: :class:`mongoengine.fields.StringField` - The HTML
        long description of the event.
    :ivar short_description_markdown: :class:`mongoengine.fields.StringField` -
        The markdown short description of the event.
    :ivar long_description_markdown: :class:`mongoengine.fields.StringField` -
        The markdown long description of the event.
    :ivar published: :class:`mongoengine.fields.BooleanField` - True if the
        event is published.
    :ivar date_published: :class:`mongoengine.fields.DateTimeField` - The date
        that the event was published.
    :ivar is_recurring: :class:`mongoengine.fields.BooleanField` - True if the
        event is recurring.
    :ivar parent_series: :class:`mongoengine.fields.ReferenceField` - The
        :class:`~app.models.EventSeries` object that holds the recurrence info
        for an event, if it is recurring.
    :ivar image: :class:`mongoengine.fields.ReferenceField` - The headline
        image for the event.
    :ivar facebook_url: :class:`mongoengine.fields.StringField` - The URL to
        the Facebook event associated with this event.
    :ivar gcal_id: :class:`mongoengine.fields.StringField` - The ID for this
        event on Google Calendar. In Google Calendar API responses, this is
        stored asthe ``id`` field for events. If this field is None, then we
        never got a proper response from Google Calendar when (if) we made a
        request to create it there. It most likely does not exist on Google
        Calendar.
    :ivar gcal_sequence: :class:`mongoengine.fields.IntField` - The sequence
        number for the event, used by Google Calendar for versioning.
    """

    # MongoEngine ORM metadata
    meta = {
        'allow_inheritance': True,
        'indexes': ['start_date', 'creator'],
        'ordering': ['-start_date']
    }

    date_created = DateTimeField(required=True, default=now)
    date_modified = DateTimeField(required=True, default=now)
    title = StringField(required=True, max_length=255)
    creator = ReferenceField("User", required=True)
    location = StringField()
    slug = StringField(required=True, max_length=255)
    start_date = DateField()
    end_date = DateField()
    start_time = TimeField()
    end_time = TimeField()
    short_description = StringField()
    long_description = StringField()
    short_description_markdown = StringField()
    long_description_markdown = StringField()
    published = BooleanField(required=True, default=False)
    date_published = DateTimeField()
    is_recurring = BooleanField(required=True, default=False)
    parent_series = ReferenceField("EventSeries")
    image = ReferenceField("Image")
    facebook_url = StringField()
    gcal_id = StringField()
    gcal_sequence = IntField()

    def get_absolute_url(self):
        """Returns the URL path that points to the client-facing version of
        this event.

        :returns: A URL path like ``"/events/cookies-and-code"``.
        :rtype: str
        """
        if self.is_recurring:
            return url_for('client.recurring_event',
                           slug=self.slug,
                           index=self.index)
        return url_for('client.event', slug=self.slug)

    @property
    def index(self):
        """Represents the index of this event in it's parent
        :class:`~app.models.EventSeries`. Returns ``None`` if the event is not
        recurring.

        :returns: The index of the event in it's series.
        :rtype: int
        """
        if not self.is_recurring:
            return
        return self.parent_series.events.index(self)

    def clean(self):
        """Called by Mongoengine on every ``.save()`` to the object.

        Updates date_modified, renders the markdown into the HTML fields, and
        validates datetimes to ensure the event ends after it starts.

        :raises: :class:`wtforms.validators.ValidationError`
        """
        self.date_modified = now()

        if self.short_description_markdown:
            self.short_description = markdown.markdown(
                self.short_description_markdown,
                ['extra', 'smarty']
            )

        if self.long_description_markdown:
            self.long_description = markdown.markdown(
                self.long_description_markdown,
                ['extra', 'smarty']
            )

        if (self.start_date and
                self.end_date and
                self.start_date > self.end_date):
            raise ValidationError("Start date should always come before end "
                                  "date. Got (%r,%r)" % (self.start_date,
                                                         self.end_date))
        # Check times against None, because midnight is represented by 0.
        if (self.start_date == self.start_time and
                self.start_time is not None and
                self.end_time is not None and
                self.start_time > self.end_time):
            raise ValidationError("Start time should always come before end "
                                  "time. Got (%r,%r)" % (self.start_time,
                                                         self.end_time))

    @property
    def start_datetime(self):
        """A convenience method to combine ``start_date`` and ``start_time``
        into one :class:`datetime`.

        :returns: The combined datetime, or ``None` if ``start_date`` or
            ``start_time`` are ``None``.
        :rtype: :class:`datetime`.
        """
        # Check times against None, because midnight is represented by 0.
        if self.start_date is None or self.start_time is None:
            return None
        return datetime.combine(self.start_date, self.start_time)

    @property
    def end_datetime(self):
        """A convenience method to combine ``end_date`` and ``end_time``
        into one :class:`datetime`.

        :returns: The combined datetime, or ``None` if ``end_date`` or
            ``end_time`` are ``None``.
        :rtype: :class:`datetime`.
        """
        # Check times against None, because midnight is represented by 0.
        if self.end_date is None or self.end_time is None:
            return None
        return datetime.combine(self.end_date, self.end_time)

    def id_str(self):
        """The id of this object, as a string.

        :returns: The id
        :rtype: str
        """
        return str(self.id)

    def image_url(self):
        """Returns the URL path that points to the image for the event.

        :returns: The URL path like ``"/static/img/cat.jpg"``.
        :rtype: str
        """
        if self.image:
            return self.image.url()
        return url_for(
            'eventum.static',
            filename=current_app.config['EVENTUM_DEFAULT_EVENT_IMAGE'])

    def ready_for_publishing(self):
        """Returns True if the event has all necessary fields filled out.

        Necessary fields are:

        - ``title``
        - ``creator``
        - ``location``
        - ``start_datetime``
        - ``end_datetime``
        - ``short_description``
        - ``long_description``
        - ``image``

        :Returns: True if we are ready for publishing.
        :rtype: bool
        """
        return all([self.title,
                    self.creator,
                    self.location,
                    self.start_datetime,
                    self.end_datetime,
                    self.short_description,
                    self.long_description,
                    self.image])

    def is_multiday(self):
        """Returns True if the event spans muliple days.

        :returns: True if the event spans multiple days.
        :rtype: bool
        """
        if self.start_date is None or self.end_date is None:
            return True
        if self.start_date == self.end_date:
            return False
        if (self.start_date == self.end_date - timedelta(days=1) and
                self.end_time.hour < 5):
            return False
        return True

    def human_readable_date(self):
        """Return the date of the event (presumed not multiday) formatted like:
        ``"Sunday, March 31"``.

        :returns: The formatted date.
        :rtype: str
        """
        if not self.start_date:
            return '??? ??/??'
        return self.start_date.strftime("%A, %B %d").replace(' 0', ' ')

    def human_readable_time(self):
        """Return the time range of the event (presumed not multiday) formatted
        like ``"11am - 2:15pm"`` or ``"3 - 7:30pm"``.

        :returns: The formatted date.
        :rtype: str
        """
        return '{}-{}'.format(self._human_readable_start_time(),
                              self._human_readable_end_time())

    def _human_readable_start_time(self):
        """Format start time as one of these four formats:

        1. ``"3:30am"``
        2. ``"3pm"``
        2. ``"3:30"``
        2. ``"3"``

        depending on whether or not the start time is on an even hour, and
        whether or not the end time and start time will share the pm/am string.

        :returns: The formatted date.
        :rtype: str
        """
        if self.start_time is None:
            return '??:??'

        am_pm = '%p'
        if self._start_and_end_time_share_am_or_pm():
            am_pm = ''   # Omit am/pm if it will appear in the end time.

        time = '%I:%M'
        if self.start_time.minute == 0:
            time = '%I'  # Omit minutes if the time is on the hour.

        format = time + am_pm
        return self.start_time.strftime(format).lstrip('0').lower()

    def _human_readable_end_time(self):
        """Format end time as one of these two formats:

        1. ``"3:30am"``
        2. ``"3pm"``

        depending on whether or not the end time is on an even hour

        :returns: The formatted date.
        :rtype: str
        """
        if self.end_time is None:
            return '??:??'
        format = '%I:%M%p'
        if self.end_time.minute == 0:
            format = '%I%p'
        return self.end_time.strftime(format).lstrip('0').lower()

    def human_readable_datetime(self):
        """Format the start and end date date in one of the following three
        formats:

        1. ``"Sunday, March 31 11pm - Monday, April 1 3am"``
        2. ``"Sunday, March 31 11am-2:15pm"``
        3. ``"Sunday, March 31 3-7:30pm"``

        Depending on whether or not the start / end times / dates are the same.
        All unkown values will be replaced by question marks.

        :returns: The formatted date.
        :rtype: str
        """
        if self.start_date:
            start_date = (self.start_date.strftime('%A, %B %d ')
                          .replace(' 0', ' '))
        else:
            start_date = '???, ??/?? '

        # Check times against None, because midnight is represented by 0.
        if self.start_time is not None:
            start_time = self._human_readable_start_time()
        else:
            start_time = '??:??'

        if self.end_date:
            if not self.start_date or self.start_date != self.end_date:
                end_date = (self.end_date.strftime('%A, %B %d ')
                            .replace(' 0', ' '))
            else:
                end_date = ''
        else:
            end_date = '???, ??/?? '

        # Check times against None, because midnight is represented by 0.
        if self.end_time is not None:
            end_time = self._human_readable_end_time()
        else:
            end_time = '??:??'

        separator = ' - '
        if not end_date:
            separator = '-'

        return '{}{}{}{}{}'.format(start_date,
                                   start_time,
                                   separator,
                                   end_date,
                                   end_time)

    def _start_and_end_time_share_am_or_pm(self):
        """Returns True if the start and end times for an event are both pm or
        am.

        :returns: True if the start and end times for an event are both pm or
            am.
        :rtype: bool
        """
        # Check times against None, because midnight is represented by 0.
        return (self.start_time is not None and
                self.end_time is not None and
                not self.is_multiday() and
                self.start_time.strftime("%p") == self.end_time.strftime("%p"))

    def to_jsonifiable(self):
        """
        Returns a jsonifiable dictionary of event attributes to values. The
        dictionary only contains attributes whose types are jsonifiable.

        :returns: A jsonifiable dictionary of event attributes to values.
        :rtype: dict
        """

        attrs = ['date_created', 'date_modified', 'title', 'location', 'slug',
                 'start_datetime', 'end_datetime', 'short_description',
                 'long_description', 'short_description_markdown',
                 'long_description_markdown', 'published', 'date_published',
                 'is_recurring', 'facebook_url']

        return dict(zip(list(attrs), [getattr(self, attr) for attr in attrs]))

    def __unicode__(self):
        """This event, as a unicode string.

        :returns: The title of the event
        :rtype: str
        """
        return self.title

    def __repr__(self):
        """The representation of this event.

        :returns: The event's details.
        :rtype: str
        """
        return 'Event(title=%r, location=%r, creator=%r, start=%r, end=%r, ' \
            'published=%r)' % (self.title, self.location, self.creator,
                               self.start_datetime, self.end_datetime,
                               self.published)
class Campaign(Document):
    name = StringField(max_length=100, required=True)
    slug = StringField(required=True, primary_key=True)
    description = StringField()
    creator = ReferenceField('Admin', required=True)
    subscribers = ListField(ReferenceField('Admin'))
    creation_time = DateTimeField(default=datetime.now)
    last_update = DateTimeField(default=datetime.now)
    wiki_page = StringField()
    tasks = ListField(ReferenceField('BaseTask'))
    # either a CSV Corpus or a corpus folder
    corpus: BaseCorpus = ReferenceField('BaseCorpus', required=True)
    # the audio file is being served in the starter zip
    serve_audio = BooleanField(default=False)
    # this object stores the campaign annotation checking scheme
    checking_scheme: TextGridCheckingScheme = ReferenceField(
        'TextGridCheckingScheme')
    # if this is false, textgrid aren't checked (except for the merge part)
    check_textgrids = BooleanField(default=True)
    # updated on trigger
    stats: CampaignStats = EmbeddedDocumentField(CampaignStats)

    def validate(self, clean=True):
        if isinstance(self.corpus, CSVCorpus) and self.serve_audio:
            raise ValidationError("Can't serve audio files with a csv corpus")
        super().validate(clean)

    def launch_gamma_update(self):
        """Launches a subprocess that computes the gamma statistics for
        that campaign. Does not wait for the subprocess to finish"""
        process = subprocess.Popen(["campaign-gamma", self.slug])
        self.stats.gamma_updating = True
        self.stats.can_update_gamma = False
        self.save()

    def update_stats(self, gamma_only=False):
        if self.stats is None:
            self.stats = CampaignStats()
        if gamma_only:
            self.stats.update_gamma_stats(self)
        else:
            self.stats.update_stats(self)
        self.save()

    @classmethod
    def post_delete_cleanup(cls, sender, document: 'Campaign', **kwargs):
        """Called upon a post_delete event. Takes care of cleaning up stuff, deleting the campaigns's
        child tasks and removing notifications related to that campaign"""
        for task in document.tasks:
            task.delete()
        from .users import Notification
        Notification.objects(
            Q(object_id=document.slug)
            & (Q(object_type="campaign") | Q(object_type="dashboard")))

    def tasks_for_file(self, audio_file: str):
        tasks = [task for task in self.tasks]
        return len([task for task in tasks if task.data_file == audio_file])

    @property
    def active_tasks(self):
        return BaseTask.objects(campaign=self.id, is_done=False)

    @property
    def annotators(self):
        return self.stats.annotators

    def gen_template_tg(self, filename: str) -> SingleAnnotatorTextGrid:
        """Generates the template textgrid (pregenerated tiers and tg length)
        for that campaign"""
        audio_file = self.corpus.get_file(filename)
        if self.checking_scheme is None:
            tg = TextGrid(name=filename, maxTime=audio_file.duration)
        else:
            tg = self.checking_scheme.gen_template_tg(audio_file.duration,
                                                      filename)
        return SingleAnnotatorTextGrid.from_textgrid(tg, [self.creator], None)

    def gen_summary_csv(self, only_gamma=False) -> str:
        str_io = StringIO()
        fields = [
            "task_file", "time_created", "time_completed", "time_started",
            "annotators"
        ]
        if self.stats is not None and self.stats.can_compute_gamma:
            write_gamma = True
            for tier_name in self.stats.tiers_gamma.keys():
                fields.append(f"gamma_{tier_name}")
        else:
            write_gamma = False

        csv_writer = csv.DictWriter(str_io, fields, delimiter="\t")
        csv_writer.writeheader()
        for task in self.tasks:
            task: BaseTask
            task_row = {
                "task_file":
                task.data_file,
                "time_created":
                task.creation_time,
                "time_completed":
                task.finish_time,
                "time_started":
                task.start_time,
                "annotators":
                ",".join(annotator.username for annotator in task.annotators)
            }
            if isinstance(task, DoubleAnnotatorTask) and write_gamma:
                for tier_name, gamma in task.tiers_gamma.items():
                    task_row[f"gamma_{tier_name}"] = gamma

            # if we only want the gamma rows, skipping single annotators tasks
            if only_gamma and not isinstance(task, DoubleAnnotatorTask):
                continue

            csv_writer.writerow(task_row)
        str_io.flush()
        return str_io.getvalue()

    def get_full_annots_archive(self) -> bytes:
        """Generates the full annotations zip archive for that campaign, to be
        then sent to the client"""
        buffer = BytesIO()
        # TODO: integrate the csv summary generator from above
        with zipfile.ZipFile(buffer, "w", zipfile.ZIP_STORED) as zfile:
            zip_folder: Path = Path(self.slug)

            # writing full summary
            summary_path = zip_folder / Path("summary.csv")
            zfile.writestr(str(summary_path), self.gen_summary_csv())

            # then writing tasks textgrids and per-task summary
            for task in self.tasks:
                task_annotators = "-".join(
                    [annotator.username for annotator in task.annotators])
                task_datafile = task.data_file.strip(
                    Path(task.data_file).suffix)
                task_folder = (zip_folder / Path(task_datafile) /
                               Path(task_annotators))
                for tg_name, tg_doc in task.textgrids.items():
                    if tg_doc is not None:
                        tg_archpath = task_folder / Path(tg_name + ".TextGrid")
                        zfile.writestr(str(tg_archpath), tg_doc.to_str())

        return buffer.getvalue()

    @property
    def short_profile(self):
        return {"slug": self.slug, "name": self.name}

    @property
    def status(self):
        if self.stats is None:
            self.update_stats()

        return {
            "slug":
            self.slug,
            "name":
            self.name,
            "description":
            self.description,
            "creator":
            self.creator.short_profile,
            "stats":
            self.stats.to_msg(),
            "corpus_path":
            self.corpus.name,
            "tiers_number":
            len(self.checking_scheme.tiers_specs)
            if self.checking_scheme is not None else None,
            "check_textgrids":
            self.check_textgrids,
            "annotators":
            [annotator.short_profile for annotator in self.annotators],
            "subscribers": [user.username for user in self.subscribers],
            "creation_time":
            self.creation_time,
            "last_update_time":
            self.last_update,
        }
Beispiel #7
0
class EventSeries(Document, BaseEventumDocument):
    """A model that stores the recurrence information for a recurring event
    series.

    :ivar date_created: :class:`mongoengine.fields.DateTimeField` - The date
        when the series was created.
    :ivar date_modified: :class:`mongoengine.fields.DateTimeField` - The date
        when the series was last modified
    :ivar slug: :class:`mongoengine.fields.StringField` - The URL slug for this
        event. **Note:** this slug is shared across all
        :class:`~app.models.Event` s in this series.  This is not the event's
        unique URL, rather the slug that is unique between series objects.
    :ivar events: :class:`mongoengine.fields.ListField` - A list of
        :class:`~app.models.Event` s in this series.
    :ivar frequency: :class:`mongoengine.fields.StringField` - The interval of
        the occurrence. Can only take the value ``"weekly"``.
    :ivar every: :class:`mongoengine.fields.IntField` - The number of
        ``frequency`` units after which the event repeats. For example,
        ``frequency = "weekly"`` and ``every = 2`` indicates that the event
        occurs every two weeks.
    :ivar ends_after: :class:`mongoengine.fields.BooleanField` - True if the
        event ends after a specific number of occurences.  Must be set opposite
        to ``ends_on``.
    :ivar ends_on: :class:`mongoengine.fields.BooleanField` - True if the event
        ends on a certain date. Must be set opposite to ``ends_after``.
    :ivar num_occurrences: :class:`mongoengine.fields.IntField` - The number of
        occurrences for a recurring event.  Should be set only if
        ``ends_after`` is ``True``.
    :ivar recurrence_end_date: :class:`DateField` - The date that the
        recurrence ends on.  Should be set only if ``ends_on`` is ``True``.
    :ivar recurrence_summary: :class:`mongoengine.fields.StringField` - A plain
        English explanation of the recurrence. Generated in JavaScript but
        stored here.
    :ivar gcal_id: :class:`mongoengine.fields.StringField` - The ID for this
        event series on Google Calendar.  In Google Calendar API responses,
        this is stored as the ``id`` field for events. If this field is None,
        then we never got a proper response from Google Calendar when (if) we
        made a request to create it there. It most likely does not exist on
        Google Calendar.  This is the same as the ``gcal_id`` of the first
        event in the series.
    """

    # MongoEngine ORM metadata
    meta = {}

    date_created = DateTimeField(required=True, default=now)
    date_modified = DateTimeField(required=True, default=now)
    slug = StringField(required=True, max_length=255)
    events = ListField(ReferenceField("Event"))
    frequency = StringField(default="weekly")
    every = IntField(min_value=1, max_value=30)
    ends_after = BooleanField(default=True)
    ends_on = BooleanField(default=False)
    num_occurrences = IntField(default=1)
    recurrence_end_date = DateField()
    recurrence_summary = StringField()
    gcal_id = StringField()  # ID of the first event in the series

    def delete_one(self, event):
        """Deletes ``event`` after removing it from the series.

        :param event: The event to delete.
        :type event: :class:`~app.models.Event`
        """
        self.events.remove(event)
        event.delete()
        self.save()

    def delete_all_except(self, event):
        """Deletes all events in the series except ``event``, and then deletes
        the series. Should be called when an event's recurrence is disabled.

        :param event: The event to delete.
        :type event: :class:`~app.models.Event`
        """
        for e in self.events[:]:
            if e != event:
                e.delete()
        event.parent_series = None
        self.delete()

    def delete_all(self):
        """Deletes all events in the series, and the series itself."""
        for e in self.events:
            e.delete()
        self.delete()

    def clean(self):
        """Called by Mongoengine on every ``.save()`` to the object.

        Update date_modified, and ensure that exactly one of `ends_after`
        and `ends_on` is True at a time.

        :raises: :class:`wtforms.validators.ValidationError`
        """
        self.date_modified = now()

        if self.ends_after == self.ends_on:
            raise ValidationError("ends_on and ends_after should not share a "
                                  "value.")
Beispiel #8
0
class RunningModule(EmbeddedDocument):
    module = ReferenceField('Module')
    version = StringField()
Beispiel #9
0
class OrderedProduct(EmbeddedDocument):
    quantity = IntField(required=True)
    product = ReferenceField(Product, required=True)
Beispiel #10
0
class Habit(Document):

    name = StringField(max_length=120, required=True)
    description = StringField(max_length=5000)
    user = ReferenceField(User, reverse_delete_rule=CASCADE)
    num_Days = IntField(default=30)
    repeat = ListField(StringField(max_length=10))
    streak = 0
    is_public = StringField(requried=True, default="false", max_length=120)
    string_start = StringField(required=True,
                               default=datetime.datetime.strftime(
                                   datetime.datetime.now(), "%B %m, %Y"))
    start_Date = DateTimeField(required=True, default=datetime.datetime.now())
    curr_Date = DateTimeField(required=True, default=datetime.datetime.now())
    end_Date = DateTimeField(required=True, default=datetime.datetime.now())
    habit_data = ListField(ListField(max_length=3, required=True))
    complete = 0
    monthDict = {
        1: 31,
        2: 28,
        3: 31,
        4: 30,
        5: 31,
        6: 30,
        7: 31,
        8: 31,
        9: 30,
        10: 31,
        11: 30,
        12: 31
    }

    def Habit(self, name, days, end_Date):
        self.name = name
        self.days = days
        self.start_Date = [
            timer.strftime("%m"), timer.strftime("%d")
        ]  #A list with the month in mm format and day in the dd format
        self.start_Day = int(timer.day())
        self.curr_Day = int(timer.day())
        self.end_Date = end_Date

    # def habits(self, name, days, end_Date):
    #     self.name = name
    #     self.days = days
    #     self.start_Date = [timer.strftime("%m"), timer.strftime("%d")] #A list with the month in mm format and day in the dd format
    #     self.start_Day = int(timer.day())
    #     self.curr_Day = int(timer.day())
    #     self.end_Date = end_Date

    def setName(self, name):
        self.name = name

    def getName(self):
        return self.name

    def setDays(self, days):
        self.days = days

    def getDays(self):
        return self.days

    def setStreak(self, num):
        self.streak = num

    def getStreak(self):
        return self.streak

    def setStartDay(self, day):
        self.start_Day = day

    def getStartDay(self):
        return self.start_Day

    def setStartDate(self, date):
        self.start_Date = date

    def getStartDate(self):
        return self.start_Date

    def setEndDate(self, date):
        self.end_Date = date

    def getEndDate(self):
        return self.end_Date

    def calculate():
        day1 = start_Date[1]
        daycurr = curr_Date[1]
        day2 = end_Date[1]
        if (int(end_Date[1]) > int(start_Date[1])):
            day2 += monthDict[end_Date[1]]
        if (int(curr_Date[1]) > int(start_Date[1])):
            daycurr += monthDict[curr_Date[1]]
        fromStart = daycurr - day1
        totalDays = day2 - day1
        return str(round(fromStart / totalDays, 2))

    def streak():
        if (self.timer.day > self.curr_Date):
            if (self.complete == 1):
                self.streak += 1
                self.curr_Date += 1
                self.complete = 0
                return self.streak
            else:
                return 0
        else:
            return self.streak

    def to_public_json(self):
        entry = {
            "id": str(self.id),
            "user": {
                "id": str(self.user.id),
                "username": self.user.username
            },
            "name": self.name,
            "description": self.description,
            "num_Days": self.num_Days,
            "repeat": self.repeat,
            "string_start": self.string_start,
            "start_Date": self.start_Date,
            "curr_Date": self.curr_Date,
            "end_Date": self.end_Date,
            "is_public": self.is_public,
            "habit_data": self.habit_data
        }
        return entry
Beispiel #11
0
class Track(Document):
    """
    Track is a reference to a playable medium stored in an external,
    online source.

    It has common meta data (title, year, album name, artist name, ...)
    and is optionally linked to Musicbrainz DB via the track's MBID,
    artist's MBID, or/and album MBID.

    It is an "item" in FRBR's parlance.
    It is a "recording" in MB's parlance.

    http://en.wikipedia.org/wiki/FRBR
    http://musicbrainz.org/doc/MusicBrainz_Database/Schema

    It has `source` which determines where the medium is physically stored.

    """
    # User is optional as tracks can exist and be in no one's music collection.
    # However, as soon a user adds it to their collection, an editable copy
    # of the track with the user assigned is created.
    user = ReferenceField(User,
                          dbref=False,
                          required=False,
                          reverse_delete_rule=CASCADE)

    # All MBIDs are optional as they are filled only when possible.
    mbid = StringField()
    artist_mbid = StringField()
    album_mbid = StringField()

    title = StringField(required=True)

    # artist and album names
    artist = StringField()
    album = StringField()

    number = IntField()
    # set = IntField()
    year = IntField()

    # Source metadata.
    source = StringField(required=True, choices=['youtube', 'dropbox'])
    dropbox = EmbeddedDocumentField(DropboxTrack)
    youtube = EmbeddedDocumentField(YoutubeTrack)

    meta = {
        'collection': 'tracks',
        'allow_inheritance': False,
        'indexes': [
            'user',
            'mbid',
            'artist_mbid',
            'album_mbid',
        ]
    }

    def __unicode__(self):
        return self.title

    def to_json(self):

        data = {
            'id': str(self.pk),
            'title': self.title,
            'artist': self.artist or UNTITLED_ARTIST,
            'album': self.album or UNTITLED_ALBUM,
            'number': self.number,
            'source': self.source[0],
        }

        if self.source == 'youtube':
            data['sourceId'] = self.youtube.id

        return data
 class Human(Document):
     meta = {"abstract": True}
     creator = ReferenceField("self", dbref=True)
 class Home(Document):
     dad = ReferenceField(
         AbstractHuman)  # Referencing the abstract class
     address = StringField()
Beispiel #14
0
class Post(Document):
    text = StringField()
    user = ReferenceField(User, required=True)
    comments = ListField(StringField())
    comments_by = ListField(ReferenceField("User"))
    keyword = ListField(StringField())
    reaction = ListField(StringField())
    hilarious = ListField(ReferenceField("User"))
    well_written = ListField(ReferenceField("User"))
    amazing_story = ListField(ReferenceField("User"))

    grammar_king = ReferenceField("User")

    group = IntField()
    assignment_id = ReferenceField("Assignment")
    submit = BooleanField(default=False)

    @staticmethod
    def create(text, assignment_id):

        post = Post(text=text,
                    user=current_user['id'],
                    assignment_id=ObjectId(assignment_id),
                    submit=True)
        post.save()
        return post

    @staticmethod
    def add_reaction(post_id, hilarious="", well_written="", amazing=""):

        post = Post.objects(pk=ObjectId(post_id), submit=True)
        if hilarious.lower() == 'true':
            post.update(add_to_set__hilarious=current_user['id'])
        if well_written.lower() == 'true':
            post.update(add_to_set__well_written=current_user['id'])
        if amazing.lower() == 'true':
            post.update(add_to_set__amazing_story=current_user['id'])

        return True

    @staticmethod
    def submit_assigment(text, assignment_id):

        assignment = Post.objects(user=current_user['id'],
                                  assignment_id=ObjectId(assignment_id))

        if assignment:
            assignment.update(text=text, submit=True)
            return assignment
        else:
            post = Post.create(text, assignment_id)
            post.save()
            return post

    def get_json(self):

        #print(self.user.pk)

        user_name = User.objects(pk=self.user.pk, name__exists=True)
        u_name = 'sample'
        if user_name:
            for u in user_name:
                u_name = u['name']

        dict_list = []
        for i in range(len(self.comments)):
            diction = {'comment': '', 'name': ''}
            diction['comment'] = self.comments[i]
            name = User.objects(pk=self.comments_by[i].pk, name__exists=True)
            another_name = ''
            if name:

                for n in name:
                    another_name = n['name']

            diction['name'] = another_name
            dict_list.append(diction)

        jstring = ''
        jstring = '{\n"text": ' + json.dumps(self.text) + ',\n' \
                  + '"name": ' + json.dumps(str(u_name)) + ',\n' \
                  + '"mongoid": ' + json.dumps(str(self.pk)) + ',\n' \
                  + '"comments": ' + json.dumps(dict_list) + ',\n' \
                  + '"hilarious": ' + json.dumps(str(len(self.hilarious))) + ',\n' \
                  + '"well_written": ' + json.dumps(str(len(self.well_written))) + ',\n' \
                  + '"amazing_story": ' + json.dumps(str(len(self.amazing_story))) + "}\n"

        return jstring

    @staticmethod
    def add_comments(post_id, comment):

        post = Post.objects(pk=ObjectId(post_id)).first()
        if (post):
            Post.objects(pk=ObjectId(post_id)).update_one(
                push__comments=comment, push__comments_by=current_user['id'])
            return post
Beispiel #15
0
class Physician(Staff):
    name_initials = StringField(required=True)
    is_licenced = BooleanField()
    reference_number = StringField()
    main_speciality = ReferenceField(MedicalSpeciality, required=True)
    auxiliary_speciality = ListField(ReferenceField(MedicalSpeciality))
Beispiel #16
0
class SummonerDoc(Document):
    name = StringField()
    game_refs = ListField(ReferenceField('GameDoc'))
Beispiel #17
0
class Comment(EmbeddedDocument):
    cid = ObjectIdField(default=lambda: ObjectId())
    author = ReferenceField(User)
    content = StringField()
    reply = ObjectIdField()
Beispiel #18
0
class Room(Document):
    name = StringField(required=True, unique=True)
    members = ListField(ReferenceField(Client))
class CampaignStats(EmbeddedDocument):
    """Stores the campaing basic statistics"""
    # TODO add "refresh campaign stats handler"
    total_files = IntField(required=True)
    assigned_files = IntField(required=True)
    total_tasks = IntField(required=True)
    completed_tasks = IntField(required=True)
    single_annotator_tasks = IntField(required=True)
    double_annotator_tasks = IntField(required=True)
    tiers_gamma: Dict[str, float] = MapField(FloatField())
    can_update_gamma = BooleanField()
    can_compute_gamma = BooleanField()
    gamma_updating = BooleanField(default=False)
    annotators = ListField(ReferenceField('Annotator'))

    def update_stats(self, campaign: 'Campaign'):
        """Update all statistics for that campaign"""
        self.total_tasks = len(campaign.tasks)
        self.completed_tasks = len(
            [task for task in campaign.tasks if task.is_done])
        self.total_files = campaign.corpus.files_count
        self.assigned_files = len(
            set(task.data_file for task in campaign.tasks))
        self.single_annotator_tasks = len([
            task for task in campaign.tasks
            if isinstance(task, SingleAnnotatorTask)
        ])
        self.double_annotator_tasks = len([
            task for task in campaign.tasks
            if isinstance(task, DoubleAnnotatorTask)
        ])
        all_annotators = set()
        for task in campaign.tasks:
            for annotator in task.annotators:
                all_annotators.add(annotator)
        self.annotators = list(all_annotators)
        self.update_gamma_stats(campaign)

    def update_gamma_stats(self, campaign: 'Campaign'):
        """Aggregates the gamma statistics for the campaign. Does **NOT**
        actually compute the gamma values"""
        if campaign.checking_scheme is None:
            # no gamma possible if a checking scheme hasn't been specified
            self.can_update_gamma = False
            self.can_compute_gamma = False
            self.gamma_updating = False
        else:
            self.can_compute_gamma = True
            tiers_gamma: Dict[str, List[float]] = defaultdict(list)
            # this flag can be set if one of the task is ripe
            # for gamma updating
            self.can_update_gamma = False
            for task in campaign.tasks:
                if not isinstance(task, DoubleAnnotatorTask):
                    continue

                if not task.can_compute_gamma:
                    continue

                if not task.tiers_gamma:
                    self.can_update_gamma = True
                else:
                    for tier_name, gamma_value in task.tiers_gamma.items():
                        tiers_gamma[tier_name].append(gamma_value)
            # TODO: computing mean gamma for each tier, can be changed?
            for tier_name, gamma_values in tiers_gamma.items():
                self.tiers_gamma[tier_name] = mean(gamma_values)

    def to_msg(self):
        return {
            "total_files": self.total_files,
            "assigned_files": self.assigned_files,
            "total_tasks": self.total_tasks,
            "completed_tasks": self.completed_tasks,
            "can_update_gamma": self.can_update_gamma,
            "can_compute_gamma": self.can_compute_gamma,
            "gamma_updating": self.gamma_updating,
            "tiers_gamma": self.tiers_gamma
        }
Beispiel #20
0
class Sockets(Document):
    client = ReferenceField(Client, required=True)
    fd = IntField(unique=True, required=True)
Beispiel #21
0
class CourseReg(Document):
    student = ReferenceField(User)
    course = ReferenceField(Course, required=True)
    status = StringField(required=True)
    message = StringField(required=True)
Beispiel #22
0
class Transaction(Document, BaseModel):
    created_at = date_now()
    updated_at = DateTimeField()
    stp_id = IntField()
    fecha_operacion = DateTimeField()
    institucion_ordenante = StringField()
    institucion_beneficiaria = StringField()
    clave_rastreo = StringField()
    monto = IntField()
    nombre_ordenante = StringField()
    tipo_cuenta_ordenante = IntField()
    cuenta_ordenante = StringField()
    rfc_curp_ordenante = StringField()
    nombre_beneficiario = StringField()
    tipo_cuenta_beneficiario = IntField()
    cuenta_beneficiario = StringField()
    rfc_curp_beneficiario = StringField()
    concepto_pago = StringField()
    referencia_numerica = IntField()
    empresa = StringField()
    estado: Enum = EnumField(Estado, default=Estado.created)
    version = IntField()
    speid_id = StringField()
    folio_origen = StringField()
    tipo_pago = IntField()
    email_beneficiario = StringField()
    tipo_cuenta_beneficiario2 = StringField()
    nombre_beneficiario2 = StringField()
    cuenta_beneficiario2 = StringField()
    rfc_curpBeneficiario2 = StringField()
    concepto_pago2 = StringField()
    clave_cat_usuario1 = StringField()
    clave_cat_usuario2 = StringField()
    clave_pago = StringField()
    referencia_cobranza = StringField()
    tipo_operacion = StringField()
    topologia = StringField()
    usuario = StringField()
    medio_entrega = IntField()
    prioridad = IntField()
    compound_key = StringField()

    events = ListField(ReferenceField(Event))

    meta = {
        'indexes': [
            '+stp_id',
            '+speid_id',
            '+clave_rastreo',
            # The Unique-Sparse index skips over any document that is missing
            # the indexed field (null values)
            {'fields': ['+compound_key'], 'unique': True, 'sparse': True},
        ]
    }

    def set_state(self, state: Estado):
        callback_helper.set_status_transaction(self.speid_id, state.value)
        self.estado = state

        self.events.append(Event(type=EventType.completed))

    def confirm_callback_transaction(self):
        response = ''
        self.events.append(Event(type=EventType.created))
        self.save()
        self.estado = Estado.succeeded
        callback_helper.send_transaction(self.to_dict())

        self.events.append(
            Event(type=EventType.completed, metadata=str(response))
        )

    def create_order(self) -> Orden:
        # Validate account has already been created
        if not SKIP_VALIDATION_PRIOR_SEND_ORDER:
            try:
                account = Account.objects.get(cuenta=self.cuenta_ordenante)
                assert account.estado is Estado.succeeded
            except (DoesNotExist, AssertionError):
                self.estado = Estado.error
                self.save()
                raise MalformedOrderException(
                    f'Account has not been registered: {self.cuenta_ordenante}'
                    f', stp_id: {self.stp_id}'
                )

        # Don't send if stp_id already exists
        if self.stp_id:
            return Orden(  # type: ignore
                id=self.stp_id,
                monto=self.monto / 100.0,
                conceptoPago=self.concepto_pago,
                nombreBeneficiario=self.nombre_beneficiario,
                cuentaBeneficiario=self.cuenta_beneficiario,
                institucionContraparte=self.institucion_beneficiaria,
                cuentaOrdenante=self.cuenta_ordenante,
            )

        optionals = dict(
            institucionOperante=self.institucion_ordenante,
            claveRastreo=self.clave_rastreo,
            referenciaNumerica=self.referencia_numerica,
            rfcCurpBeneficiario=self.rfc_curp_beneficiario,
            medioEntrega=self.medio_entrega,
            prioridad=self.prioridad,
            tipoPago=self.tipo_pago,
            topologia=self.topologia,
        )
        # remove if value is None
        remove = []
        for k, v in optionals.items():
            if v is None:
                remove.append(k)
        for k in remove:
            optionals.pop(k)

        try:
            order = stpmex_client.ordenes.registra(
                monto=self.monto / 100.0,
                conceptoPago=self.concepto_pago,
                nombreBeneficiario=self.nombre_beneficiario,
                cuentaBeneficiario=self.cuenta_beneficiario,
                institucionContraparte=self.institucion_beneficiaria,
                tipoCuentaBeneficiario=self.tipo_cuenta_beneficiario,
                nombreOrdenante=self.nombre_ordenante,
                cuentaOrdenante=self.cuenta_ordenante,
                rfcCurpOrdenante=self.rfc_curp_ordenante,
                **optionals,
            )
        except (Exception) as e:  # Anything can happen here
            self.events.append(Event(type=EventType.error, metadata=str(e)))
            self.estado = Estado.error
            self.save()
            raise e
        else:
            self.clave_rastreo = self.clave_rastreo or order.claveRastreo
            self.rfc_curp_beneficiario = (
                self.rfc_curp_beneficiario or order.rfcCurpBeneficiario
            )
            self.referencia_numerica = (
                self.referencia_numerica or order.referenciaNumerica
            )
            self.empresa = self.empresa or STP_EMPRESA
            self.stp_id = order.id

            self.events.append(
                Event(type=EventType.completed, metadata=str(order))
            )
            self.estado = Estado.submitted
            self.save()
            return order
Beispiel #23
0
class Course(Document):
    code = StringField(required=True)
    title = StringField(required=True)
    lecturer = ReferenceField(User, required=True)
    status = StringField(required=True)
Beispiel #24
0
class User(Document, BaseEventumDocument):
    """A user model.

    The :class:`User` object is only created once the user logs in for the
    first time and confirms the details of their account.
    :ivar date_created: :class:`mongoengine.fields.DateTimeField` - The date
        that this user was created.
    :ivar date_modified: :class:`mongoengine.fields.DateTimeField` - The date
        the this user was last modified.
    :ivar gplus_id: :class:`mongoengine.fields.StringField` - The Google+ ID
        for this user.  It's what we use in the Google+ authentication.
    :ivar name: :class:`mongoengine.fields.StringField` - The user's name.
    :ivar slug: :class:`mongoengine.fields.StringField` - A URL slug  their
        internal profile page.
    :ivar email: :class:`mongoengine.fields.EmailField` - The user's email
        address.
    :ivar roles: :class:`mongoengine.fields.ListField` of
        :class:`mongoengine.fields.StringField` - A list of roles that the user
        has.
    :ivar privileges: :class:`mongoengine.fields.DictField` - A dictionary of
        privileges that the user has.  Often determined soley by their
        ``user_type``.
    :ivar image_url: :class:`mongoengine.fields.URLField` - The URL of the
        profile picture for the user's profile picture.
    :ivar image: :class:`mongoengine.fields.ReferenceField` - The local image
        for the user's profile picture.
    :ivar user_type: :class:`mongoengine.fields.StringField` - The type of the
        user. Can either be ``"fake_user"``, ``"editor"``, ``"publisher"``, or
        ``"admin"``.  The selection of user type determines their
        ``privileges``.
    :ivar last_logon: :class:`mongoengine.fields.DateTimeField` - The date of
        this user's last logon.
    """

    date_created = DateTimeField(required=True, default=now)
    date_modified = DateTimeField(required=True, default=now)
    gplus_id = StringField(required=True, unique=True)
    name = StringField(required=True, max_length=510)
    slug = StringField(required=True,
                       max_length=510,
                       unique=True,
                       regex=Regex.SLUG_REGEX)
    email = EmailField(required=True, unique=True)
    roles = ListField(StringField(db_field="role"), default=list)
    privileges = DictField(required=True, default={})
    image_url = URLField()
    image = ReferenceField('Image')
    user_type = StringField(default='editor', regex=USER_TYPE_REGEX)
    last_logon = DateTimeField()

    # MongoEngine ORM metadata
    meta = {'allow_inheritance': True, 'indexes': ['email', 'gplus_id']}

    def can(self, privilege):
        """Returns True if the user has ``privilege``.

        :returns: True if the user has ``privilege``
        :rtype: bool
        """
        return self.privileges.get(privilege)

    def get_profile_picture(self, size=50):
        """Returns the url to the profile picture for the user.

        TODO: This method needs major fixing.  What's going on with that URL?

        :param int size: The size of the image to pass, if the size can be
            changed.

        :returns: The URL of the image.
        :rtype: str
        """
        if self.image:
            return self.image.url()
        if not self.image_url:
            # Import app in the function body to avoid importing `None` when
            # the module is first loaded.
            return url_for(
                'eventum.static',
                filename=current_app.config['EVENTUM_DEFAULT_PROFILE_PICTURE'])
        if "googleusercontent.com" in self.image_url:
            return self.image_url + str(size)
        return self.image_url

    def register_login(self):
        """Update the model as having logged in."""
        self.last_logon = now()

    def clean(self):
        """Called by Mongoengine on every ``.save()`` to the object.

        Update date_modified and apply privileges shorthand notation.

        :raises: :class:`wtforms.validators.ValidationError`
        """
        self.date_modified = now()

        # Update self.privileges with one of the USER_TYPES dictionaries
        self.privileges.update(USER_TYPES[self.user_type])

        # Update the slug for the user (used in URLs)
        new_slug = self.name.lower().replace(' ', '-')
        new_slug = re.sub(r"\'|\.|\_|", "", new_slug)
        if User.objects(slug=new_slug).count() > 0:
            i = 2
            new_slug = new_slug + "-{}".format(i)
            while User.objects(slug=new_slug).count() > 0:
                i += 1
                new_slug = re.sub(r"-([0-9])*$", "-{}".format(i), new_slug)
        self.slug = new_slug

        if self.image_url and "googleusercontent.com" in self.image_url:
            self.image_url = re.sub(r"sz=([0-9]*)$", "sz=", self.image_url)

    def id_str(self):
        """The id of this object, as a string.

        :returns: The id
        :rtype: str
        """
        return str(self.id)

    def role(self):
        """Returns the role of the user, in plain English.  It is either
        ``"Admin"``, ``"Publisher"``, ``"Editor"``, or ``"Fake User"``.

        :returns: The role.
        :rtype: str
        """
        if self.can('admin'):
            return "Admin"
        if self.can('publish'):
            return "Publisher"
        if self.can('edit'):
            return "Editor"
        return "Fake User"

    def __repr__(self):
        """The representation of this user.

        :returns: The user's details.
        :rtype: str
        """
        return ('User(id=%r, name=%r, email=%r, roles=%r, privileges=%r, '
                'gplus_id=%r, date_created=%r)' %
                (self.id, self.name, self.email, self.roles, self.privileges,
                 self.gplus_id, self.date_created))

    def __unicode__(self):
        """This user, as a unicode string.

        :returns: The user encoded as a string.
        :rtype: str
        """
        if self.can('admin'):
            return '%r <%r> (Admin)' % (self.name, self.email)
        if self.can('publish'):
            return '%r <%r> (Publisher)' % (self.name, self.email)
        if self.can('edit'):
            return '%r <%r> (Editor)' % (self.name, self.email)
        else:
            return '%r <%r> (Fake User)' % (self.name, self.email)
Beispiel #25
0
class Experiment(Document):
    """
    Creates and manages a traiing environment with a SNN RL algorithm.
    """
    training = ReferenceField(tg.Trainingrun)
    parameterdump = StringField()
    time_start = DateTimeField()
    time_end = DateTimeField()
    time_elapsed = FloatField()  # in s
    diagrams = ListField(FileField())
    cycle_i = IntField(default=0)
    totalCycleCounter = IntField(default=0)
    episode = IntField(default=0)
    return_per_episode_sum = ListField(FloatField())  # per episode
    log_Δw = ListField(FloatField())  # per episode
    log_m = ListField(FloatField())  # per episode
    epslength = ListField(IntField())
    episodedata = ListField(ReferenceField(
        tg.Episode))  #stores references to all episodes
    workerdata = DictField()

    def __init__(self, *args, **values):
        super().__init__(*args, **values)
        gv.init()
        self.printedbias = False
        self.env = None
        self.penalty = -8  # penalty for ending
        self.errsig_contingent = [0]

        self.return_per_episode_sum = []
        self.totalCycleCounter = -1  # will be increased at the beginning of the cycle

        self.log_Δw = []
        self.log_m = []
        self.rewards: List = []  # reward of last episode
        self.errsigs = None
        self.utils = None
        self.agent: Agent = None

        self.lastweights: np.array = 0  # initialized with 0 so that it can be used in computation

        self.epslength = []  # stores the number of cycles for each episode

    def cycle(self, observation_in: np.array) -> np.array:
        """Calculates brain one frame, applies action and simulates environment for a frame
        : observation_in: the last observation
        :return float values
        """
        if gv.render:
            self.env.render()
        self.totalCycleCounter += 1
        # feed observations into brain
        action, neural_activity = self.agent.actor.cycle(
            time=gv.cycle_length * self.cycle_i, observation_in=observation_in)
        # simulate environment
        observation, reward, done, info = self.env.step(action)
        reward_internal = reward
        # distance from ideal position
        # if isinstance(self.env.env, CartPoleEnv):
        #    reward_internal = 50 * np.math.cos(observation[2])
        if not self.printedbias:
            print("Bias: " + str(reward + self.penalty))
            self.printedbias = True

        try:  # try because of env.env
            if done and not (isinstance(self.env.env, CartPoleEnv)
                             and self.cycle_i >= 200):
                #add a penalty for cartpole when failed
                reward_internal += self.penalty
        except:
            pass

        err_signal, util = self.agent.critic.tick(
            state=observation, new_rewards=[reward_internal])
        # store unedited
        if not gv.demo:
            self.errsigs[self.episode, self.cycle_i] = err_signal
            # self.utils[self.episode, self.totalCycleCounter] = util
            self.rewards.append(reward)

        # clamp utility
        if gv.max_util_integral != float("inf"):
            if abs(self.errsig_contingent[-1] +
                   err_signal) >= gv.max_util_integral:
                err_signal = 0
            self.errsig_contingent.append(self.errsig_contingent[-1] +
                                          err_signal)

        # gv.outactivity["utility"].append(utility)

        if gv.structural_plasticity:
            self.agent.actor.connectome.update_structural_plasticity()

        # Set reward signal for left and right network
        self.agent.actor.release_neurotransmitter(err_signal *
                                                  gv.errsig_factor)

        self.agent.end_cycle(self.cycle_i)
        return done, observation

    def simulate_episode(self) -> bool:
        """Simulate one episode
        :return: True if everything went okay. False if training needs to be canceled
        """
        if self.episode > 0:
            self.agent.prepare_episode()

        observation = self.env.reset()
        self.rewards.clear()
        for self.cycle_i in range(gv.max_cycles):
            # if failed, break early
            done, observation = self.cycle(observation_in=observation)
            if done:
                break
        # extra simulation time to apply changes in last cycle before resetting
        self.agent.post_episode()
        self.epslength.append(self.cycle_i)

        return self.post_episode()

    def post_episode(self) -> bool:
        """
        :return: True if everything went okay. False if training needs to be canceled
        """
        eps: tg.Episode = tg.Episode()
        eps.rewards = self.rewards
        if gv.save_to_db:
            eps.episode = self.episode
            if len(self.agent.actor.log_m) > 0:
                eps.neuromodulator = self.agent.actor.log_m
                self.log_m.append(np.average(eps.neuromodulator))

        # extract the last weights
        try:
            weights = np.array(list(self.agent.get_weights().values()))
        except:
            weights = self.agent.get_weights()
        # check if no weight changed -> Early termination
        Δw: float = np.sum(weights - self.lastweights)
        self.log_Δw.append(Δw)
        if gv.allow_early_termination and self.episode > 50 and -0.00001 < Δw < 0.00001:
            self.early_termination(eps, weights)
            return False
        self.lastweights = weights

        self.return_per_episode_sum.append(np.sum(self.rewards))
        if gv.save_to_db:
            # save at the end of the training
            if self.episode > 0 and self.episode % (gv.num_episodes - 1) == 0:
                self.save_episode(eps, weights)
            self.save()
        if not gv.demo:
            self.agent.end_episode(self.episode)
        return True

    def early_termination(self, eps, weights):
        print("\nEarly termination because Δw=0.")
        # todo log a message in the db
        if gv.save_to_db:
            #eps.activation = list(np.average(np.array(self.agent.actor.log_activation), axis=0))
            eps.neuromodulator = self.agent.actor.log_m
            self.save_episode(eps, weights)
            try:
                self.agent.actor.connectome.drawspikes()
            except AttributeError:
                pass
            self.save()

    def save_episode(self, eps, weights):
        eps.weights_human = weights.tolist()
        eps.weights = pickle.dumps(weights)
        eps.save()
        self.episodedata.append(eps.id)

    def train(self):
        """Trains the agent for given numbers"""
        # extend on existing recordings
        self.errsigs = np.full((self.episode + gv.num_episodes, gv.max_cycles),
                               np.nan)
        for episode_training in range(gv.num_episodes):
            # episode_training=0
            # while self.totalCycleCounter < gv.max_cycles:
            episode_training += 1

            # simulate
            if not self.simulate_episode():
                break
            # "CartPole-v0 defines solving as getting average return of 195.0 over 100 consecutive trials."
            last100return = np.average(
                self.return_per_episode_sum[self.episode - 100:self.episode +
                                            1])

            # time/performance evaluation
            tpe = (datetime.datetime.utcnow() -
                   self.time_start) / episode_training
            # tpc = (datetime.datetime.utcnow() - self.time_start) / self.totalCycleCounter
            # eta = tpc * (gv.max_cycles - self.totalCycleCounter)
            eta = tpe * (gv.num_episodes - episode_training)
            overwrite = "\r" if self.episode > 0 else ""
            sys.stdout.write(
                f"{overwrite}{self.episode * 100 / gv.num_episodes:3.3f}% (Episode: {self.episode}, Cycle:{self.totalCycleCounter}) ETA {eta}. Avg. return: {last100return:.1f}"
            )
            sys.stdout.flush()

            # plots
            if gv.num_plots > 0 and gv.num_episodes > gv.num_plots and self.episode % (
                    gv.num_episodes // gv.num_plots) == 0:
                # draw.voltage(self.agent.actor.connectome.multimeter, persp="2d")
                try:
                    self.agent.actor.connectome.drawspikes()
                except AttributeError:
                    pass
            self.episode += 1
        print(f"Cycles: {self.totalCycleCounter}")

    def drawreport(self):
        # self.agent.critic.draw(xaxis=0, yaxis=1)
        filename = f"{self.id}.png" if self.id is not None else None
        try:
            connectome = self.agent.actor.connectome.conns
        except:
            connectome = None
        draw.report(utility=self.errsigs,
                    weights=np.array(self.agent.actor.weightlog),
                    returnpereps=self.return_per_episode_sum,
                    connections=connectome,
                    filename=filename,
                    env=self.env)

    def presetup(self):
        print("Process w/ worker id " + str(multiprocessing.current_process()))
        dbconnect()

        self.time_start = datetime.datetime.utcnow()
        if gv.save_to_db:
            self.save()  # safe first to get id

        # pre-training
        def dump(obj):
            f = ""
            for attr in dir(obj):
                if attr != "__dict__":
                    f += "obj.%s = %r" % (attr, getattr(obj, attr)) + "\n"
            return f

        self.parameterdump = dump(gv)
        # dump(f, self)
        # dump(f, self.agent.critic)

        # register instance
        self.training.instances.append(str(self.id))
        if gv.save_to_db:
            self.training.save()

    def posttrain(self):
        # stats
        self.time_end = datetime.datetime.utcnow()
        self.time_elapsed = (self.time_end - self.time_start).total_seconds()
        if gv.save_to_db:
            self.save()
        if isinstance(self.env, LineFollowingEnv) or isinstance(
                self.env, LineFollowingEnv2):
            self.drawreport()

        self.env.close()
        # if not gv.render:
        #    self.show()

    def run(self, workerdata: Dict = None) -> List[float]:
        """
        Create and trains the network.
        :param configurator:
        :param workerdata:
        :return: the results of the training
        """
        self.training = workerdata.pop("training")
        self.presetup()

        self.workerdata = workerdata
        gv.workerdata = workerdata  # not nice to add it as a global variable

        # create experiment
        configurator: Callable
        if "configurator" in workerdata and workerdata[
                "configurator"] is not None:
            configurator = workerdata.pop("configurator")
        else:
            from experiments import lf_placecells
            configurator = lf_placecells.configure_training
        configurator(self)

        # parse some gridsearch parameters to overwrite configurator
        if workerdata:
            for (key, value) in self.workerdata.items():
                if hasattr(gv, key):
                    setattr(gv, key, value)
                elif key == "vq_lr_int":
                    gv.vq_learning_scale = list([0, 10**-4, 10**-3,
                                                 10**-2])[int(value)]
                elif key == "vq_decay_int":
                    gv.vq_decay = list([0, 10**-4, 10**-3, 10**-2])[int(value)]
                else:
                    print("unknown gridsearch hyperparameter " + key)

        # training for pole
        self.train()
        self.posttrain()

        return self.return_per_episode_sum

    def show(self):
        global gv
        gv_old = copy.deepcopy(gv)
        gv.errsig_factor = 0.
        gv.structural_plasticity = False
        gv.render = True
        gv.demo = True
        self.agent.prepare_episode()
        self.simulate_episode()
        gv = gv_old
Beispiel #26
0
class DepositRecord(Document):
    club = ReferenceField(Club)
    value = FloatField()
    handler = ReferenceField(Member)
    remark = StringField()
    meta = {'strict': False}
Beispiel #27
0
class Fragment(EmbeddedDocument):
    uuid = StringField(primary_key=True, default=lambda: uuid.uuid4().hex)
    timestamp_created = IntField(required=True, default=get_utc_int)
    index = IntField(required=True)
    remote = ReferenceField('Hub', required=True)
    hash = StringField(required=True)
    is_clean = BooleanField(required=True, default=True)

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._cache = None

    def __enter__(self):
        if self._cache is not None:
            raise RuntimeError(
                'Cannot use the same Fragment as context manager in its own context.'
            )
        if self.index is None:
            raise ValueError(
                'You must define the index before using the Fragment.')
        if self.remote is None:
            raise ValueError(
                'You must define the remote before using the Fragment.')
        self._cache = CachedFragment(remote=self.remote,
                                     uuid=self.uuid,
                                     expected_hash=self.hash)
        return self._cache

    def __exit__(self, exc_type, exc_val, exc_tb):
        new_hash = self._cache.hash
        try:
            self._cache.close()
        except (RemoteStorageError, ConnectionTimeoutError):
            # if upload fails: clean up and raise
            self._cache = None
            raise
        else:
            self.hash = new_hash
            self._cache = None

    def verify_full(self):
        if self._cache is not None:
            raise RuntimeError(
                'Cannot verify a Fragment when in a context manager.')
        try:
            with self as fr:
                fr.read()
        except ConnectionTimeoutError:
            self.is_clean = False
        except HashError:
            self.is_clean = False
        else:
            self.is_clean = True
        return self.is_clean

    def verify_hash(self):
        try:
            fragment_hash = download_fragment_hash(self.remote, self.uuid)
        except ConnectionTimeoutError:
            self.is_clean = False
        else:
            self.is_clean = fragment_hash == self.hash
        return self.is_clean
 class GroupMembership(me.Document):
     user = ReferenceField(User)
     group = ReferenceField(Group)
Beispiel #29
0
class Comment(EmbeddedDocument):
    author = ReferenceField(User)
    rating = FloatField()
    text = StringField()
    date = DateTimeField(default=datetime.utcnow)
Beispiel #30
0
class EntityMixin(object):
    """A document mixin which attaches each document to an entity"""

    #: The entity that owns the document
    entity = ReferenceField('Entity', required=True, dbref=False)