Beispiel #1
0
class Worship(_BaseModel):
    """A worship event."""

    id = IntegerField(primary_key=True)
    date = DateField()
Beispiel #2
0
class Versions(flaskDb.Model):
    key = CharField()
    val = IntegerField()

    class Meta:
        primary_key = False
Beispiel #3
0
class Donation(Model):
    value = IntegerField()
    donor = ForeignKeyField(Donor, backref='donations')

    class Meta:
        database = db
Beispiel #4
0
class AzureConfigEntity(ConfigEntity):
    masters = IntegerField(null=True)
    master_type = CharField(null=True)
    location = CharField(null=True)
    ssh_user = CharField(null=True)
    ssh_password = CharField(null=True)
Beispiel #5
0
class GovernanceObject(BaseModel):
    parent_id = IntegerField(default=0)
    object_creation_time = IntegerField(default=int(time.time()))
    object_hash = CharField(max_length=64)
    object_parent_hash = CharField(default='0')
    object_type = IntegerField(default=0)
    object_revision = IntegerField(default=1)
    object_fee_tx = CharField(default='')
    yes_count = IntegerField(default=0)
    no_count = IntegerField(default=0)
    abstain_count = IntegerField(default=0)
    absolute_yes_count = IntegerField(default=0)

    class Meta:
        db_table = 'governance_objects'

    # sync anond gobject list with our local relational DB backend
    @classmethod
    def sync(self, anond):
        golist = anond.rpc_command('gobject', 'list')

        # objects which are removed from the network should be removed from the DB
        try:
            for purged in self.purged_network_objects(list(golist.keys())):
                # SOMEDAY: possible archive step here
                purged.delete_instance(recursive=True, delete_nullable=True)

            for item in golist.values():
                (go, subobj) = self.import_gobject_from_anond(anond, item)
        except Exception as e:
            printdbg("Got an error upon import: %s" % e)

    @classmethod
    def purged_network_objects(self, network_object_hashes):
        query = self.select()
        if network_object_hashes:
            query = query.where(~(self.object_hash << network_object_hashes))
        return query

    @classmethod
    def import_gobject_from_anond(self, anond, rec):
        import decimal
        import anonlib
        import inflection

        object_hex = rec['DataHex']
        object_hash = rec['Hash']

        gobj_dict = {
            'object_hash': object_hash,
            'object_fee_tx': rec['CollateralHash'],
            'absolute_yes_count': rec['AbsoluteYesCount'],
            'abstain_count': rec['AbstainCount'],
            'yes_count': rec['YesCount'],
            'no_count': rec['NoCount'],
        }

        # shim/anond conversion
        object_hex = anonlib.SHIM_deserialise_from_anond(object_hex)
        objects = anonlib.deserialise(object_hex)
        subobj = None

        obj_type, dikt = objects[0:2:1]
        obj_type = inflection.pluralize(obj_type)
        subclass = self._meta.reverse_rel[obj_type].model_class

        # set object_type in govobj table
        gobj_dict['object_type'] = subclass.govobj_type

        # exclude any invalid model data from anond...
        valid_keys = subclass.serialisable_fields()
        subdikt = {k: dikt[k] for k in valid_keys if k in dikt}

        # get/create, then sync vote counts from anond, with every run
        govobj, created = self.get_or_create(object_hash=object_hash,
                                             defaults=gobj_dict)
        if created:
            printdbg("govobj created = %s" % created)
        count = govobj.update(**gobj_dict).where(
            self.id == govobj.id).execute()
        if count:
            printdbg("govobj updated = %d" % count)
        subdikt['governance_object'] = govobj

        # get/create, then sync payment amounts, etc. from anond - anond is the master
        try:
            newdikt = subdikt.copy()
            newdikt['object_hash'] = object_hash

            sub, params = subclass(**newdikt), []
            if isinstance(sub, Watchdog):
                params = [anond]

            if sub.is_valid(*params) is False:
                govobj.vote_delete(anond)
                return (govobj, None)

            subobj, created = subclass.get_or_create(object_hash=object_hash,
                                                     defaults=subdikt)

        except Exception as e:
            # in this case, vote as delete, and log the vote in the DB
            printdbg("Got invalid object from anond! %s" % e)
            govobj.vote_delete(anond)
            return (govobj, None)

        if created:
            printdbg("subobj created = %s" % created)
        count = subobj.update(**subdikt).where(
            subclass.id == subobj.id).execute()
        if count:
            printdbg("subobj updated = %d" % count)

        # ATM, returns a tuple w/gov attributes and the govobj
        return (govobj, subobj)

    def vote_delete(self, anond):
        if not self.voted_on(signal=VoteSignals.delete,
                             outcome=VoteOutcomes.yes):
            self.vote(anond, VoteSignals.delete, VoteOutcomes.yes)
        return

    def get_vote_command(self, signal, outcome):
        cmd = [
            'gobject', 'vote-conf', self.object_hash, signal.name, outcome.name
        ]
        return cmd

    def vote(self, anond, signal, outcome):
        import anonlib

        # At this point, will probably never reach here. But doesn't hurt to
        # have an extra check just in case objects get out of sync (people will
        # muck with the DB).
        if (self.object_hash == '0' or not misc.is_hash(self.object_hash)):
            printdbg("No governance object hash, nothing to vote on.")
            return

        # have I already voted on this gobject with this particular signal and outcome?
        if self.voted_on(signal=signal):
            printdbg("Found a vote for this gobject/signal...")
            vote = self.votes.where(Vote.signal == signal)[0]

            # if the outcome is the same, move on, nothing more to do
            if vote.outcome == outcome:
                # move on.
                printdbg(
                    "Already voted for this same gobject/signal/outcome, no need to re-vote."
                )
                return
            else:
                printdbg(
                    "Found a STALE vote for this gobject/signal, deleting so that we can re-vote."
                )
                vote.delete_instance()

        else:
            printdbg("Haven't voted on this gobject/signal yet...")

        # now ... vote!

        vote_command = self.get_vote_command(signal, outcome)
        printdbg(' '.join(vote_command))
        output = anond.rpc_command(*vote_command)

        # extract vote output parsing to external lib
        voted = anonlib.did_we_vote(output)

        if voted:
            printdbg('VOTE success, saving Vote object to database')
            Vote(governance_object=self,
                 signal=signal,
                 outcome=outcome,
                 object_hash=self.object_hash).save()
        else:
            printdbg('VOTE failed, trying to sync with network vote')
            self.sync_network_vote(anond, signal)

    def sync_network_vote(self, anond, signal):
        printdbg('\tsyncing network vote for object %s with signal %s' %
                 (self.object_hash, signal.name))
        vote_info = anond.get_my_gobject_votes(self.object_hash)
        for vdikt in vote_info:
            if vdikt['signal'] != signal.name:
                continue

            # ensure valid outcome
            outcome = VoteOutcomes.get(vdikt['outcome'])
            if not outcome:
                continue

            printdbg(
                '\tFound a matching valid vote on the network, outcome = %s' %
                vdikt['outcome'])
            Vote(governance_object=self,
                 signal=signal,
                 outcome=outcome,
                 object_hash=self.object_hash).save()

    def voted_on(self, **kwargs):
        signal = kwargs.get('signal', None)
        outcome = kwargs.get('outcome', None)

        query = self.votes

        if signal:
            query = query.where(Vote.signal == signal)

        if outcome:
            query = query.where(Vote.outcome == outcome)

        count = query.count()
        return count
Beispiel #6
0
class TopLevelCategory(BaseModel):
    id = IntegerField(primary_key=True)
    title = TextField(null=False)
    link = CharField()
    created_at = DateTimeField(default=datetime.datetime.now)
Beispiel #7
0
class AzureGroupEntity(GroupEntity):
    instance_type = CharField(null=True)
    cpus = IntegerField(null=True)
    ram = IntegerField(null=True)
    disk_size = IntegerField(null=True)
    customhwconf = CharField(default='', null=True)
Beispiel #8
0
class PaperDoc(BasePaperModel):
    """Representation of a Dropbox Paper document."""
    title = CharField()
    paper_id = CharField()
    version = IntegerField(default=0)
    folder = ForeignKeyField(PaperFolder, null=True, related_name='docs')
    last_updated = TimestampField()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def __repr__(self):
        return "Document {} at version {}".format(self.title, self.version)

    @classmethod
    def get_by_paper_id(self, paper_id):
        return PaperDoc.get(PaperDoc.paper_id == paper_id)

    def get_changes(self):
        """Update this record with the latest version of the document. Also,
        download the latest version to the file.
        """
        title, rev = PaperDoc.download_doc(self.paper_id)
        if rev > self.version:
            print('Update revision for doc {0} from {1} to {2}'.format(
                self.title, self.version, rev))
            self.version = rev
            self.last_updated = time.time()
        if self.title != title:
            self.title = title
            self.last_updated = time.time()
        self.save()
        self.update_folder_info()

    @classmethod
    def generate_file_path(self, doc_id):
        return os.path.join(config.CACHE_DIR, doc_id + '.md')

    @classmethod
    @dropbox_api
    def sync_docs(self, dbx):
        """Fetches all the doc ids from the given dropbox handler.
        Args:
            dbx(dropbox.Dropbox): an instance of initialized dropbox handler
        Returns:
            An array of all the doc ids.
        """
        docs = dbx.paper_docs_list()
        for doc_id in docs.doc_ids:
            try:
                doc = PaperDoc.get(PaperDoc.paper_id == doc_id)
                if not os.path.exists(self.generate_file_path(doc_id)):
                    self.download_doc(doc_id)
            except PaperDoc.DoesNotExist:
                title, rev = self.download_doc(doc_id)
                doc = PaperDoc.create(paper_id=doc_id,
                                      title=title,
                                      version=rev,
                                      last_updated=time.time())
                doc.update_folder_info()
            print(doc)

    @classmethod
    @dropbox_api
    def download_doc(self, dbx, doc_id):
        """Downloads the given doc_id to the local file cache.
        """
        path = self.generate_file_path(doc_id)
        result = dbx.paper_docs_download_to_file(path, doc_id,
                                                 ExportFormat.markdown)
        return (result.title, result.revision)

    @dropbox_api
    def update_folder_info(self, dbx):
        """Fetch and update the folder information for the current PaperDoc.
        """
        folders = dbx.paper_docs_get_folder_info(self.paper_id)
        if folders.folders is None:
            return
        folder = folders.folders[0]
        f = PaperFolder.get_or_create(folder_id=folder.id, name=folder.name)[0]
        self.folder = f
        self.save()
Beispiel #9
0
class Call(Model, ModelMixin):

    ON_CALL = 1
    HUNG_UP = 2

    id = PrimaryKeyField
    created_date = DateTimeField(default=datetime.now)
    modified_date = DateTimeField(default=datetime.now)
    status = IntegerField(default=None)
    caller_id = CharField(max_length=32, null=True)

    class Meta:
        database = database

    @classmethod
    def cron(cls):
        """
        To be called by `app.threads.cron.Cron()`.
        See `CRON_TASKS` in `app.config.default.py:DefaultConfig`
        """
        call_heartbeat_interval = config.get('WEBRTC_CALL_HEARTBEAT_INTERVAL')
        date_ = datetime.now() - timedelta(seconds=call_heartbeat_interval * 2)

        if Call.select().where(Call.status == Call.ON_CALL,
                               Call.modified_date < date_).exists():
            logger.debug('Dead calls have been found')
            cls.hang_up()

        return

    @classmethod
    def get_call(cls):
        try:
            call = cls.select().where(cls.status == cls.ON_CALL). \
                order_by(cls.modified_date.desc()).get()
        except DoesNotExist:
            call = cls()

        return call

    def get_the_line(self, caller_id):
        if not self.is_line_busy:
            self.__create_call(caller_id)
        elif self.caller_id != caller_id:
            return False
        return True

    @classmethod
    def hang_up(cls):
        q = Call.update({Call.status: cls.HUNG_UP}). \
            where(Call.status == cls.ON_CALL)
        q.execute()
        # Start motion if needed
        Sender.send({'action': MotionReceiver.START}, MotionReceiver.TYPE)

    @property
    def is_line_busy(self):
        return self.status == self.ON_CALL

    def save(self, force_insert=False, only=None):

        if self.id is not None:
            self.modified_date = datetime.now()

        try:
            return super().save(force_insert=force_insert, only=only)
        except Exception as e:
            logger.error("{}.save - {}".format(self.__class__.__name__, str(e)))

        return False

    def __create_call(self, caller_id):
        self.caller_id = caller_id
        self.status = self.ON_CALL
        self.save()
Beispiel #10
0
class Process(Model, ModelMixin):

    id = PrimaryKeyField
    created_date = DateTimeField(default=datetime.now)
    modified_date = DateTimeField(default=datetime.now)
    pid = IntegerField(default=None)
    slug = CharField(max_length=32, null=True)
    command = CharField(max_length=255, null=True)

    class Meta:
        database = database

    def cron(self):
        """
        To be called by `app.threads.cron.Cron()`.
        See `CRON_TASKS` in `app.config.default.py:DefaultConfig`
        """
        self.kill_all()
        return

    def run(self, command, slug=None):
        logger.debug('Process.run: START ')
        logger.debug(command)
        proc = subprocess.Popen(command, stderr=subprocess.PIPE)
        try:
            command_str = ' '.join(command)
            self.pid = proc.pid
            self.slug = slug
            self.command = command_str

            if proc.returncode:
                logger.error(
                    'Process.run: Command `{}` failed: Error ({}): {}'.format(
                        command_str, proc.returncode,
                        proc.stderr.read().encode()))

            self.save()

        except Exception as e:
            logger.error('Process.run: Command `{}` failed: Error: {}'.format(
                command_str, str(e)))

        logger.debug('Process.run: END ' + command_str)

    @classmethod
    def kill(cls, slug):
        logger.debug('Process.run: KILL ' + slug)
        if slug is None:
            processes = Process.select().where(cls.slug.is_null(False))
        else:
            processes = Process.select().where(cls.slug == slug)

        for process in processes:
            cp = subprocess.run(['kill', '-9', str(process.pid)],
                                stderr=subprocess.PIPE)
            if cp.returncode != 0:
                logger.error('Process #{} ({}) could not be killed'.format(
                    process.pid, process.slug))
            else:
                logger.debug('Process #{} ({}) has been killed'.format(
                    process.pid, process.slug))

            process.delete_instance()

    @classmethod
    def kill_all(cls):
        cls.kill(slug=None)

    def save(self, force_insert=False, only=None):

        if self.id is not None:
            self.modified_date = datetime.now()

        try:
            return super().save(force_insert=force_insert, only=only)
        except Exception as e:
            logger.error("{}.save - {}".format(self.__class__.__name__,
                                               str(e)))

        return False
Beispiel #11
0
class X52ProProfile(Model):
    id = AutoIncrementField()
    name = CharField()
    led_fire = EnumField(default=X52LedStatus.ON, choices=X52LedStatus)
    led_a = EnumField(default=X52ColoredLedStatus.GREEN,
                      choices=X52ColoredLedStatus)
    led_b = EnumField(default=X52ColoredLedStatus.GREEN,
                      choices=X52ColoredLedStatus)
    led_d = EnumField(default=X52ColoredLedStatus.GREEN,
                      choices=X52ColoredLedStatus)
    led_e = EnumField(default=X52ColoredLedStatus.GREEN,
                      choices=X52ColoredLedStatus)
    led_t1_t2 = EnumField(default=X52ColoredLedStatus.GREEN,
                          choices=X52ColoredLedStatus)
    led_t3_t4 = EnumField(default=X52ColoredLedStatus.GREEN,
                          choices=X52ColoredLedStatus)
    led_t5_t6 = EnumField(default=X52ColoredLedStatus.GREEN,
                          choices=X52ColoredLedStatus)
    led_pov_2 = EnumField(default=X52ColoredLedStatus.GREEN,
                          choices=X52ColoredLedStatus)
    led_i = EnumField(default=X52ColoredLedStatus.GREEN,
                      choices=X52ColoredLedStatus)
    led_throttle = EnumField(default=X52LedStatus.ON, choices=X52LedStatus)
    led_brightness = IntegerField(default=X52_BRIGHTNESS_MAX)
    mfd_brightness = IntegerField(default=X52_BRIGHTNESS_MAX)
    clock_1_use_local_time = BooleanField(default=True)
    clock_1_use_24h = BooleanField(default=True)
    clock_2_offset = IntegerField(default=CLOCK_2_OFFSET_DEFAULT)
    clock_2_use_24h = BooleanField(default=True)
    clock_3_offset = IntegerField(default=CLOCK_3_OFFSET_DEFAULT)
    clock_3_use_24h = BooleanField(default=True)
    date_format = EnumField(default=X52DateFormat.YYMMDD,
                            choices=X52DateFormat)
    can_be_removed = BooleanField(default=True)
    timestamp = DateTimeField(constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')])

    @classmethod
    def get_empty_profile(cls) -> 'X52ProProfile':
        return cls(id=None,
                   name=None,
                   led_fire=None,
                   led_a=None,
                   led_b=None,
                   led_d=None,
                   led_e=None,
                   led_t1_t2=None,
                   led_t3_t4=None,
                   led_t5_t6=None,
                   led_pov_2=None,
                   led_i=None,
                   led_throttle=None,
                   led_brightness=None,
                   mfd_brightness=None,
                   clock_1_use_local_time=None,
                   clock_1_use_24h=None,
                   clock_2_offset=None,
                   clock_2_use_24h=None,
                   clock_3_offset=None,
                   clock_3_use_24h=None,
                   date_format=None,
                   can_be_removed=None)

    class Meta:
        legacy_table_names = False
        database = INJECTOR.get(SqliteDatabase)
Beispiel #12
0
class CfgNotify(BaseModel):
    check_order = IntegerField()  # 排序
    notify_type = CharField()  # 通知类型:MAIL/SMS
    notify_name = CharField()  # 通知人姓名
    notify_number = CharField()  # 通知号码
    status = BooleanField(default=True)  # 生效失效标识
Beispiel #13
0
class StoreVersion(Model):
    version = IntegerField()
class ChallengeBinaryNode(BaseModel):
    """ChallengeBinaryNode model"""
    root = ForeignKeyField('self', null=True, related_name='descendants')
    blob = BlobField()
    name = CharField()
    size = IntegerField()
    cs = ForeignKeyField(ChallengeSet, related_name='cbns')
    sha256 = FixedCharField(max_length=64)
    patch_type = ForeignKeyField(PatchType,
                                 related_name='patched_cbns',
                                 null=True)
    # needed for submitting patch+related ids rules
    ids_rule = ForeignKeyField(IDSRule, related_name='cbn', null=True)
    # needed for patch submission decision making.
    is_blacklisted = BooleanField(default=False)

    def delete_binary(self):
        """Remove binary file"""
        if os.path.isfile(self._path):
            os.remove(self._path)

    @classmethod
    def create(cls, *args, **kwargs):
        kwargs['size'] = len(kwargs['blob'])
        if 'sha256' not in kwargs:
            kwargs['sha256'] = _sha256sum(kwargs['blob'])
        obj = super(cls, cls).create(*args, **kwargs)
        return obj

    @property
    def _path(self):
        """Return path name"""
        filename = "{}-{}-{}".format(self.id, self.cs_id, self.name)
        return os.path.join(os.path.expanduser("~"),
                            filename)  # FIXME: afl doesn't like /tmp

    @property
    def path(self):
        """Save binary blob to file and return path"""
        if not os.path.isfile(self._path):
            with open(self._path, 'wb') as fp:
                fp.write(self.blob)
            os.chmod(self._path, 0o777)
        return self._path

    def prefix_path(self, prefix_str=None):
        """
        Returns path of a binary with filename prefixed with a given string.
        :param prefix_str: string to be prefixed for filename
        :return: new path to the binary
        """
        if prefix_str is None:
            return self.path
        new_fname = prefix_str + os.path.basename(self._path)
        prefixed_path = os.path.join(os.path.dirname(self._path), new_fname)
        with open(prefixed_path, 'wb') as fp:
            fp.write(self.blob)
        os.chmod(prefixed_path, 0o777)
        return prefixed_path

    @property
    def unsubmitted_patches(self):
        """All unsubmitted patches."""
        from .challenge_set_fielding import ChallengeSetFielding
        tm = ChallengeSetFielding.cbns.get_through_model()
        subquery = ChallengeSetFielding.select(tm.challengebinarynode).join(tm)
        return self.descendants.where(self.__class__.id.not_in(subquery))

    @property
    def submitted_patches(self):
        """All submitted patches."""
        from .challenge_set_fielding import ChallengeSetFielding
        tm = ChallengeSetFielding.cbns.get_through_model()
        return self.descendants \
                   .join(tm, on=(tm.challengebinarynode == ChallengeBinaryNode.id)) \
                   .join(ChallengeSetFielding) \
                   .where(
                       (ChallengeSetFielding.team == Team.get_our()) &
                       (ChallengeSetFielding.submission_round.is_null(False)))

    @property
    def estimated_feedback(self):
        try:
            return PatchScore.get((PatchScore.cs == self.cs)
                                  & (PatchScore.patch_type == self.patch_type))
        except PatchScore.DoesNotExist:
            return None

    @property
    def estimated_cb_score(self):
        if self.estimated_feedback is not None:
            return self.estimated_feedback.cb_score
        else:
            return None

    @property
    def poll_feedbacks(self):
        """All the received polls for this CB."""
        # There is probably a DB way to do this better
        from .challenge_set_fielding import ChallengeSetFielding as CSF
        from .poll_feedback import PollFeedback as PF
        total = (PF.success + PF.timeout + PF.connect + PF.function)
        query = self.fieldings.select(CSF.poll_feedback) \
                              .join(PF, on=(CSF.poll_feedback == PF.id)) \
                              .where((CSF.team == Team.get_our())
                                     & (CSF.poll_feedback.is_null(False))
                                     & (total > 0))
        return [csf.poll_feedback for csf in query]

    @property
    def min_cb_score(self):
        try:
            return min(f.cb_score for f in self.poll_feedbacks)
        except ValueError:
            # No feedbacks available, arg to min is None
            return None

    @property
    def avg_cb_score(self):
        try:
            return _avg(f.cb_score for f in self.poll_feedbacks)
        except ValueError:
            # No feedbacks available, arg to _avg is None
            return None

    @classmethod
    def roots(cls):
        """Return all root nodes (original CB)"""
        return cls.select().where(cls.root.is_null(True))

    @classmethod
    def all_descendants(cls):
        """Return all descendant nodes (patches)"""
        return cls.select().where(cls.root.is_null(False))
Beispiel #15
0
class PodcastItunesLookup(BaseModel):
    id = IntegerField(primary_key=True)
    itunes_lookup = JSONField(null=True)
    created_at = DateTimeField(default=datetime.datetime.now)
    updated_at = DateTimeField(null=True)
Beispiel #16
0
class Reservation(BaseModel):
    check_in_time = DateField()
    check_out_time = DateField()
    adult_number = IntegerField()
    children_number = IntegerField()
    company = ForeignKeyField(Company, backref='reservations', null=True)
Beispiel #17
0
class PodcastRss(BaseModel):
    id = IntegerField(primary_key=True)
    rss = TextField(null=True)
    created_at = DateTimeField(default=datetime.datetime.now)
    updated_at = DateTimeField(null=True)
Beispiel #18
0
class Expense(BaseModel):
    category = CharField(max_length=50, choices=ROOM_CATEGORIES)
    description = CharField(max_length=255)
    quantity = IntegerField()
    amount = FloatField()
    check_in = ForeignKeyField(CheckIn, backref='expenses')
Beispiel #19
0
class Superblock(BaseModel, GovernanceClass):
    governance_object = ForeignKeyField(GovernanceObject, related_name='superblocks', on_delete='CASCADE', on_update='CASCADE')
    event_block_height = IntegerField()
    payment_addresses = TextField()
    payment_amounts = TextField()
    proposal_hashes = TextField(default='')
    sb_hash = CharField()
    object_hash = CharField(max_length=64)

    govobj_type = COIN2FLYD_GOVOBJ_TYPES['superblock']
    only_masternode_can_submit = True

    class Meta:
        db_table = 'superblocks'

    def is_valid(self):
        import salenlib
        import decimal

        printdbg("In Superblock#is_valid, for SB: %s" % self.__dict__)

        # it's a string from the DB...
        addresses = self.payment_addresses.split('|')
        for addr in addresses:
            if not salenlib.is_valid_salen_address(addr, config.network):
                printdbg("\tInvalid address [%s], returning False" % addr)
                return False

        amounts = self.payment_amounts.split('|')
        for amt in amounts:
            if not misc.is_numeric(amt):
                printdbg("\tAmount [%s] is not numeric, returning False" % amt)
                return False

            # no negative or zero amounts allowed
            damt = decimal.Decimal(amt)
            if not damt > 0:
                printdbg("\tAmount [%s] is zero or negative, returning False" % damt)
                return False

        # verify proposal hashes correctly formatted...
        if len(self.proposal_hashes) > 0:
            hashes = self.proposal_hashes.split('|')
            for object_hash in hashes:
                if not misc.is_hash(object_hash):
                    printdbg("\tInvalid proposal hash [%s], returning False" % object_hash)
                    return False

        # ensure number of payment addresses matches number of payments
        if len(addresses) != len(amounts):
            printdbg("\tNumber of payment addresses [%s] != number of payment amounts [%s], returning False" % (len(addresses), len(amounts)))
            return False

        printdbg("Leaving Superblock#is_valid, Valid = True")
        return True

    def is_deletable(self):
        # end_date < (current_date - 30 days)
        # TBD (item moved to external storage/SalenDrive, etc.)
        pass

    def hash(self):
        import salenlib
        return salenlib.hashit(self.serialise())

    def hex_hash(self):
        return "%x" % self.hash()

    # workaround for now, b/c we must uniquely ID a superblock with the hash,
    # in case of differing superblocks
    #
    # this prevents sb_hash from being added to the serialised fields
    @classmethod
    def serialisable_fields(self):
        return [
            'event_block_height',
            'payment_addresses',
            'payment_amounts',
            'proposal_hashes'
        ]

    # has this masternode voted to fund *any* superblocks at the given
    # event_block_height?
    @classmethod
    def is_voted_funding(self, ebh):
        count = (self.select()
                 .where(self.event_block_height == ebh)
                 .join(GovernanceObject)
                 .join(Vote)
                 .join(Signal)
                 .switch(Vote)  # switch join query context back to Vote
                 .join(Outcome)
                 .where(Vote.signal == VoteSignals.funding)
                 .where(Vote.outcome == VoteOutcomes.yes)
                 .count())
        return count

    @classmethod
    def latest(self):
        try:
            obj = self.select().order_by(self.event_block_height).desc().limit(1)[0]
        except IndexError as e:
            obj = None
        return obj

    @classmethod
    def at_height(self, ebh):
        query = (self.select().where(self.event_block_height == ebh))
        return query

    @classmethod
    def find_highest_deterministic(self, sb_hash):
        # highest block hash wins
        query = (self.select()
                 .where(self.sb_hash == sb_hash)
                 .order_by(self.object_hash.desc()))
        try:
            obj = query.limit(1)[0]
        except IndexError as e:
            obj = None
        return obj
Beispiel #20
0
class Room(BaseModel):
    id = PrimaryKeyField()
    rid = IntegerField(unique=True)
Beispiel #21
0
class OpenstackGroupEntity(GroupEntity):
    instance_type = CharField(null=True)
    slaves = IntegerField(null=True)
    customhwconf = CharField(default='', null=True)
Beispiel #22
0
class Location(BaseModel):
    room = ForeignKeyField(Room, backref="locations", on_delete="CASCADE")
    name = TextField()
    options = ForeignKeyField(LocationOptions, on_delete="CASCADE", null=True)
    index = IntegerField()

    def __repr__(self):
        return f"<Location {self.get_path()}>"

    def get_path(self):
        return f"{self.room.get_path()}/{self.name}"

    def as_dict(self):
        data = model_to_dict(
            self,
            backrefs=False,
            recurse=False,
            exclude=[Location.room, Location.index, Location.options],
        )
        if self.options is not None:
            data["options"] = self.options.as_dict()
        else:
            data["options"] = {}
        return data

    def create_floor(self, name="ground"):
        index = (Floor.select(fn.Max(
            Floor.index)).where(Floor.location == self).scalar() or -1) + 1
        floor = Floor.create(location=self, name=name, index=index)
        Layer.create(
            location=self,
            name="map",
            type_="normal",
            player_visible=True,
            index=0,
            floor=floor,
        )
        Layer.create(
            location=self,
            name="grid",
            type_="grid",
            selectable=False,
            player_visible=True,
            index=1,
            floor=floor,
        )
        Layer.create(
            location=self,
            name="tokens",
            type_="normal",
            player_visible=True,
            player_editable=True,
            index=2,
            floor=floor,
        )
        Layer.create(location=self,
                     type_="normal",
                     name="dm",
                     index=3,
                     floor=floor)
        Layer.create(
            location=self,
            type_="fow",
            name="fow",
            player_visible=True,
            index=4,
            floor=floor,
        )
        Layer.create(
            location=self,
            name="fow-players",
            type_="fow-players",
            selectable=False,
            player_visible=True,
            index=5,
            floor=floor,
        )
        Layer.create(
            location=self,
            name="draw",
            type_="normal",
            selectable=False,
            player_visible=True,
            player_editable=True,
            index=6,
            floor=floor,
        )
        return floor
Beispiel #23
0
class Proposal(GovernanceClass, BaseModel):
    governance_object = ForeignKeyField(GovernanceObject,
                                        related_name='proposals',
                                        on_delete='CASCADE',
                                        on_update='CASCADE')
    name = CharField(default='', max_length=40)
    url = CharField(default='')
    start_epoch = IntegerField()
    end_epoch = IntegerField()
    payment_address = CharField(max_length=36)
    payment_amount = DecimalField(max_digits=16, decimal_places=8)
    object_hash = CharField(max_length=64)

    govobj_type = ANOND_GOVOBJ_TYPES['proposal']

    class Meta:
        db_table = 'proposals'

    def is_valid(self):
        import anonlib

        printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__)

        try:
            # proposal name exists and is not null/whitespace
            if (len(self.name.strip()) == 0):
                printdbg("\tInvalid Proposal name [%s], returning False" %
                         self.name)
                return False

            # proposal name is normalized (something like "[a-zA-Z0-9-_]+")
            if not re.match(r'^[-_a-zA-Z0-9]+$', self.name):
                printdbg(
                    "\tInvalid Proposal name [%s] (does not match regex), returning False"
                    % self.name)
                return False

            # end date < start date
            if (self.end_epoch <= self.start_epoch):
                printdbg(
                    "\tProposal end_epoch [%s] <= start_epoch [%s] , returning False"
                    % (self.end_epoch, self.start_epoch))
                return False

            # amount must be numeric
            if misc.is_numeric(self.payment_amount) is False:
                printdbg(
                    "\tProposal amount [%s] is not valid, returning False" %
                    self.payment_amount)
                return False

            # amount can't be negative or 0
            if (float(self.payment_amount) <= 0):
                printdbg(
                    "\tProposal amount [%s] is negative or zero, returning False"
                    % self.payment_amount)
                return False

            # payment address is valid base58 dash addr, non-multisig
            if not anonlib.is_valid_anon_address(self.payment_address,
                                                 config.network):
                printdbg(
                    "\tPayment address [%s] not a valid Dash address for network [%s], returning False"
                    % (self.payment_address, config.network))
                return False

            # URL
            if (len(self.url.strip()) < 4):
                printdbg("\tProposal URL [%s] too short, returning False" %
                         self.url)
                return False

            try:
                parsed = urlparse.urlparse(self.url)
            except Exception as e:
                printdbg(
                    "\tUnable to parse Proposal URL, marking invalid: %s" % e)
                return False

        except Exception as e:
            printdbg(
                "Unable to validate in Proposal#is_valid, marking invalid: %s"
                % e.message)
            return False

        printdbg("Leaving Proposal#is_valid, Valid = True")
        return True

    def is_expired(self, superblockcycle=None):
        from constants import SUPERBLOCK_FUDGE_WINDOW
        import anonlib

        if not superblockcycle:
            raise Exception("Required field superblockcycle missing.")

        printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__)
        now = misc.now()
        printdbg("\tnow = %s" % now)

        # half the SB cycle, converted to seconds
        # add the fudge_window in seconds, defined elsewhere in Sentinel
        expiration_window_seconds = int(
            (anonlib.blocks_to_seconds(superblockcycle) / 2) +
            SUPERBLOCK_FUDGE_WINDOW)
        printdbg("\texpiration_window_seconds = %s" %
                 expiration_window_seconds)

        # "fully expires" adds the expiration window to end time to ensure a
        # valid proposal isn't excluded from SB by cutting it too close
        fully_expires_at = self.end_epoch + expiration_window_seconds
        printdbg("\tfully_expires_at = %s" % fully_expires_at)

        if (fully_expires_at < now):
            printdbg("\tProposal end_epoch [%s] < now [%s] , returning True" %
                     (self.end_epoch, now))
            return True

        printdbg("Leaving Proposal#is_expired, Expired = False")
        return False

    def is_deletable(self):
        # end_date < (current_date - 30 days)
        thirty_days = (86400 * 30)
        if (self.end_epoch < (misc.now() - thirty_days)):
            return True

        # TBD (item moved to external storage/anondrive, etc.)
        return False

    @classmethod
    # def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget):
    def approved_and_ranked(self, proposal_quorum):
        # return all approved proposals, in order of descending vote count
        #
        # we need a secondary 'order by' in case of a tie on vote count, since
        # superblocks must be deterministic
        query = (
            self.select(
                self,
                GovernanceObject)  # Note that we are selecting both models.
            .join(GovernanceObject).where(
                GovernanceObject.absolute_yes_count > proposal_quorum).
            order_by(GovernanceObject.absolute_yes_count.desc(),
                     GovernanceObject.object_hash.desc()))

        ranked = []
        for proposal in query:
            # proposal.max_budget = next_superblock_max_budget
            if proposal.is_valid():
                ranked.append(proposal)

        return ranked

    @classmethod
    def expired(self, superblockcycle=None):
        if not superblockcycle:
            raise Exception("Required field superblockcycle missing.")

        expired = []

        for proposal in self.select():
            if proposal.is_expired(superblockcycle):
                expired.append(proposal)

        return expired

    @property
    def rank(self):
        rank = 0
        if self.governance_object:
            rank = self.governance_object.absolute_yes_count
            return rank

    def get_prepare_command(self):
        import anonlib
        obj_data = anonlib.SHIM_serialise_for_anond(self.serialise())

        # new superblocks won't have parent_hash, revision, etc...
        cmd = ['gobject', 'prepare', '0', '1', str(int(time.time())), obj_data]

        return cmd

    def prepare(self, anond):
        try:
            object_hash = anond.rpc_command(*self.get_prepare_command())
            printdbg("Submitted: [%s]" % object_hash)
            self.go.object_fee_tx = object_hash
            self.go.save()

            manual_submit = ' '.join(self.get_submit_command())
            print(manual_submit)

        except JSONRPCException as e:
            print("Unable to prepare: %s" % e.message)
Beispiel #24
0
class ImageAnnotation(BaseModel):
    timestamp = DateTimeField(default=datetime.datetime.now)
    points = JSONField(null=True)
    image_id = IntegerField()
    class_id = IntegerField(default=0)
    session_name = TextField()
Beispiel #25
0
class Pessoa(BaseModel):

    nome = CharField(max_length=60)
    email = CharField(max_length=60, unique=True)
    senha = CharField(max_length=60)
    idade = IntegerField()
Beispiel #26
0
class OrderItem(BaseModel):
    order = ForeignKeyField(Order, related_name="order_items")
    item = ForeignKeyField(Item)
    quantity = IntegerField()
    subtotal = DecimalField()
Beispiel #27
0
class Pokemon(BaseModel):
    # We are base64 encoding the ids delivered by the api
    # because they are too big for sqlite to handle
    encounter_id = CharField(primary_key=True, max_length=50)
    spawnpoint_id = CharField(index=True)
    pokemon_id = IntegerField(index=True)
    latitude = DoubleField()
    longitude = DoubleField()
    disappear_time = DateTimeField(index=True)

    class Meta:
        indexes = ((('latitude', 'longitude'), False), )

    @staticmethod
    def get_active(swLat, swLng, neLat, neLng):
        if swLat is None or swLng is None or neLat is None or neLng is None:
            query = (Pokemon.select().where(
                Pokemon.disappear_time > datetime.utcnow()).dicts())
        else:
            query = (Pokemon.select().where(
                (Pokemon.disappear_time > datetime.utcnow())
                & (((Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng)
                    & (Pokemon.latitude <= neLat)
                    & (Pokemon.longitude <= neLng)))).dicts())

        # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append()
        gc.disable()

        pokemons = []
        for p in query:
            p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
            p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id'])
            p['pokemon_types'] = get_pokemon_types(p['pokemon_id'])
            if args.china:
                p['latitude'], p['longitude'] = \
                    transform_from_wgs_to_gcj(p['latitude'], p['longitude'])
            pokemons.append(p)

        # Re-enable the GC.
        gc.enable()

        return pokemons

    @staticmethod
    def get_active_by_id(ids, swLat, swLng, neLat, neLng):
        if swLat is None or swLng is None or neLat is None or neLng is None:
            query = (Pokemon.select().where((Pokemon.pokemon_id << ids) & (
                Pokemon.disappear_time > datetime.utcnow())).dicts())
        else:
            query = (Pokemon.select().where(
                (Pokemon.pokemon_id << ids)
                & (Pokemon.disappear_time > datetime.utcnow())
                & (Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng)
                & (Pokemon.latitude <= neLat)
                & (Pokemon.longitude <= neLng)).dicts())

        # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append()
        gc.disable()

        pokemons = []
        for p in query:
            p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
            p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id'])
            p['pokemon_types'] = get_pokemon_types(p['pokemon_id'])
            if args.china:
                p['latitude'], p['longitude'] = \
                    transform_from_wgs_to_gcj(p['latitude'], p['longitude'])
            pokemons.append(p)

        # Re-enable the GC.
        gc.enable()

        return pokemons

    @classmethod
    def get_seen(cls, timediff):
        if timediff:
            timediff = datetime.utcnow() - timediff
        pokemon_count_query = (Pokemon.select(
            Pokemon.pokemon_id,
            fn.COUNT(Pokemon.pokemon_id).alias('count'),
            fn.MAX(Pokemon.disappear_time).alias('lastappeared')).where(
                Pokemon.disappear_time > timediff).group_by(
                    Pokemon.pokemon_id).alias('counttable'))
        query = (Pokemon.select(
            Pokemon.pokemon_id, Pokemon.disappear_time, Pokemon.latitude,
            Pokemon.longitude, pokemon_count_query.c.count).join(
                pokemon_count_query,
                on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id
                    )).distinct().where(
                        Pokemon.disappear_time ==
                        pokemon_count_query.c.lastappeared).dicts())

        # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append()
        gc.disable()

        pokemons = []
        total = 0
        for p in query:
            p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
            pokemons.append(p)
            total += p['count']

        # Re-enable the GC.
        gc.enable()

        return {'pokemon': pokemons, 'total': total}

    @classmethod
    def get_appearances(cls, pokemon_id, last_appearance):
        query = (Pokemon.select().where((Pokemon.pokemon_id == pokemon_id) & (
            Pokemon.disappear_time > datetime.utcfromtimestamp(
                last_appearance / 1000.0))).order_by(
                    Pokemon.disappear_time.asc()).dicts())

        return list(query)

    @classmethod
    def get_spawnpoints(cls, southBoundary, westBoundary, northBoundary,
                        eastBoundary):
        query = Pokemon.select(Pokemon.latitude, Pokemon.longitude,
                               Pokemon.spawnpoint_id)

        if None not in (northBoundary, southBoundary, westBoundary,
                        eastBoundary):
            query = (query.where((Pokemon.latitude <= northBoundary)
                                 & (Pokemon.latitude >= southBoundary)
                                 & (Pokemon.longitude >= westBoundary)
                                 & (Pokemon.longitude <= eastBoundary)))

        # Sqlite doesn't support distinct on columns
        if args.db_type == 'mysql':
            query = query.distinct(Pokemon.spawnpoint_id)
        else:
            query = query.group_by(Pokemon.spawnpoint_id)

        return list(query.dicts())

    @classmethod
    def get_spawnpoints_in_hex(cls, center, steps):
        log.info('Finding spawn points {} steps away'.format(steps))

        n, e, s, w = hex_bounds(center, steps)

        query = (Pokemon.select(Pokemon.latitude.alias('lat'),
                                Pokemon.longitude.alias('lng'),
                                ((Pokemon.disappear_time.minute * 60) +
                                 Pokemon.disappear_time.second).alias('time'),
                                Pokemon.spawnpoint_id))
        query = (query.where((Pokemon.latitude <= n) & (Pokemon.latitude >= s)
                             & (Pokemon.longitude >= w)
                             & (Pokemon.longitude <= e)))
        # Sqlite doesn't support distinct on columns
        if args.db_type == 'mysql':
            query = query.distinct(Pokemon.spawnpoint_id)
        else:
            query = query.group_by(Pokemon.spawnpoint_id)

        s = list(query.dicts())

        # Filter to spawns which actually fall in the hex locations
        # This loop is about as non-pythonic as you can get, I bet.
        # Oh well.
        filtered = []
        hex_locations = list(generate_location_steps(center, steps, 0.07))
        for hl in hex_locations:
            for idx, sp in enumerate(s):
                if geopy.distance.distance(
                        hl, (sp['lat'], sp['lng'])).meters <= 70:
                    filtered.append(s.pop(idx))

        # at this point, 'time' is DISAPPEARANCE time, we're going to morph it to APPEARANCE time
        for location in filtered:
            # examples: time    shifted
            #           0       (   0 + 2700) = 2700 % 3600 = 2700 (0th minute to 45th minute, 15 minutes prior to appearance as time wraps around the hour)
            #           1800    (1800 + 2700) = 4500 % 3600 =  900 (30th minute, moved to arrive at 15th minute)
            # todo: this DOES NOT ACCOUNT for pokemons that appear sooner and live longer, but you'll _always_ have at least 15 minutes, so it works well enough
            location['time'] = (location['time'] + 2700) % 3600

        return filtered
Beispiel #28
0
class Proposal(GovernanceClass, BaseModel):
    governance_object = ForeignKeyField(GovernanceObject,
                                        related_name='proposals',
                                        on_delete='CASCADE',
                                        on_update='CASCADE')
    name = CharField(default='', max_length=40)
    url = CharField(default='')
    start_epoch = IntegerField()
    end_epoch = IntegerField()
    payment_address = CharField(max_length=36)
    payment_amount = DecimalField(max_digits=16, decimal_places=8)
    object_hash = CharField(max_length=64)

    # src/governance-validators.cpp
    MAX_DATA_SIZE = 512

    govobj_type = BRIXCOIND_GOVOBJ_TYPES['proposal']

    class Meta:
        db_table = 'proposals'

    def is_valid(self):
        import brixcoinlib

        printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__)

        try:
            # proposal name exists and is not null/whitespace
            if (len(self.name.strip()) == 0):
                printdbg("\tInvalid Proposal name [%s], returning False" %
                         self.name)
                return False

            # proposal name is normalized (something like "[a-zA-Z0-9-_]+")
            if not re.match(r'^[-_a-zA-Z0-9]+$', self.name):
                printdbg(
                    "\tInvalid Proposal name [%s] (does not match regex), returning False"
                    % self.name)
                return False

            # end date < start date
            if (self.end_epoch <= self.start_epoch):
                printdbg(
                    "\tProposal end_epoch [%s] <= start_epoch [%s] , returning False"
                    % (self.end_epoch, self.start_epoch))
                return False

            # amount must be numeric
            if misc.is_numeric(self.payment_amount) is False:
                printdbg(
                    "\tProposal amount [%s] is not valid, returning False" %
                    self.payment_amount)
                return False

            # amount can't be negative or 0
            if (float(self.payment_amount) <= 0):
                printdbg(
                    "\tProposal amount [%s] is negative or zero, returning False"
                    % self.payment_amount)
                return False

            # payment address is valid base58 brixcoin addr, non-multisig
            if not brixcoinlib.is_valid_brixcoin_address(
                    self.payment_address, config.network):
                printdbg(
                    "\tPayment address [%s] not a valid Brixcoin address for network [%s], returning False"
                    % (self.payment_address, config.network))
                return False

            # URL
            if (len(self.url.strip()) < 4):
                printdbg("\tProposal URL [%s] too short, returning False" %
                         self.url)
                return False

            # proposal URL has any whitespace
            if (re.search(r'\s', self.url)):
                printdbg(
                    "\tProposal URL [%s] has whitespace, returning False" %
                    self.name)
                return False

            # Brixcoin Core restricts proposals to 512 bytes max
            if len(self.serialise()) > (self.MAX_DATA_SIZE * 2):
                printdbg("\tProposal [%s] is too big, returning False" %
                         self.name)
                return False

            try:
                parsed = urlparse.urlparse(self.url)
            except Exception as e:
                printdbg(
                    "\tUnable to parse Proposal URL, marking invalid: %s" % e)
                return False

        except Exception as e:
            printdbg(
                "Unable to validate in Proposal#is_valid, marking invalid: %s"
                % e.message)
            return False

        printdbg("Leaving Proposal#is_valid, Valid = True")
        return True

    def is_expired(self, superblockcycle=None):
        from constants import SUPERBLOCK_FUDGE_WINDOW
        import brixcoinlib

        if not superblockcycle:
            raise Exception("Required field superblockcycle missing.")

        printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__)
        now = misc.now()
        printdbg("\tnow = %s" % now)

        # half the SB cycle, converted to seconds
        # add the fudge_window in seconds, defined elsewhere in Sentinel
        expiration_window_seconds = int(
            (brixcoinlib.blocks_to_seconds(superblockcycle) / 2) +
            SUPERBLOCK_FUDGE_WINDOW)
        printdbg("\texpiration_window_seconds = %s" %
                 expiration_window_seconds)

        # "fully expires" adds the expiration window to end time to ensure a
        # valid proposal isn't excluded from SB by cutting it too close
        fully_expires_at = self.end_epoch + expiration_window_seconds
        printdbg("\tfully_expires_at = %s" % fully_expires_at)

        if (fully_expires_at < now):
            printdbg("\tProposal end_epoch [%s] < now [%s] , returning True" %
                     (self.end_epoch, now))
            return True

        printdbg("Leaving Proposal#is_expired, Expired = False")
        return False

    @classmethod
    def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget):
        # return all approved proposals, in order of descending vote count
        #
        # we need a secondary 'order by' in case of a tie on vote count, since
        # superblocks must be deterministic
        query = (
            self.select(
                self,
                GovernanceObject)  # Note that we are selecting both models.
            .join(GovernanceObject).where(
                GovernanceObject.absolute_yes_count > proposal_quorum).
            order_by(GovernanceObject.absolute_yes_count.desc(),
                     GovernanceObject.object_hash.desc()))

        ranked = []
        for proposal in query:
            proposal.max_budget = next_superblock_max_budget
            if proposal.is_valid():
                ranked.append(proposal)

        return ranked

    @classmethod
    def expired(self, superblockcycle=None):
        if not superblockcycle:
            raise Exception("Required field superblockcycle missing.")

        expired = []

        for proposal in self.select():
            if proposal.is_expired(superblockcycle):
                expired.append(proposal)

        return expired

    @property
    def rank(self):
        rank = 0
        if self.governance_object:
            rank = self.governance_object.absolute_yes_count
            return rank
Beispiel #29
0
class Infraction(BaseModel):
    Types = Enum(
        'MUTE',
        'KICK',
        'TEMPBAN',
        'SOFTBAN',
        'BAN',
        'TEMPMUTE',
        'UNBAN',
        'TEMPROLE',
        'WARNING',
        bitmask=False,
    )

    guild_id = BigIntegerField()
    user_id = BigIntegerField()
    actor_id = BigIntegerField(null=True)

    type_ = IntegerField(db_column='type')
    reason = TextField(null=True)
    metadata = BinaryJSONField(default={})

    expires_at = DateTimeField(null=True)
    created_at = DateTimeField(default=datetime.utcnow)
    active = BooleanField(default=True)

    class Meta:
        db_table = 'infractions'

        indexes = ((('guild_id', 'user_id'), False), )

    def serialize(self,
                  guild=None,
                  user=None,
                  actor=None,
                  include_metadata=False):
        base = {
            'id': str(self.id),
            'guild': (guild and guild.serialize()) or {
                'id': str(self.guild_id)
            },
            'user': (user and user.serialize()) or {
                'id': str(self.user_id)
            },
            'actor': (actor and actor.serialize()) or {
                'id': str(self.actor_id)
            },
            'reason': self.reason,
            'expires_at': self.expires_at,
            'created_at': self.created_at,
            'active': self.active,
        }

        base['type'] = {
            'id':
            self.type_,
            'name':
            next(i.name for i in Infraction.Types.attrs
                 if i.index == self.type_)
        }

        if include_metadata:
            base['metadata'] = self.metadata

        return base

    @staticmethod
    def admin_config(event):
        return getattr(event.base_config.plugins, 'infractions', None)

    @classmethod
    def temprole(cls, plugin, event, member, role_id, reason, expires_at):
        User.from_disco_user(member.user)

        # TODO: modlog

        member.add_role(role_id, reason=reason)

        cls.create(guild_id=event.guild.id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.TEMPROLE,
                   reason=reason,
                   expires_at=expires_at,
                   metadata={'role': role_id})

    @classmethod
    def kick(cls, plugin, event, member, reason):
        from rowboat.plugins.modlog import Actions

        User.from_disco_user(member.user)

        # Prevent the GuildMemberRemove log event from triggering
        plugin.call('ModLogPlugin.create_debounce',
                    event, ['GuildMemberRemove'],
                    user_id=member.user.id)

        member.kick(reason=reason)

        # Create a kick modlog event
        plugin.call('ModLogPlugin.log_action_ext',
                    Actions.MEMBER_KICk,
                    event.guild.id,
                    member=member,
                    actor=unicode(event.author)
                    if event.author.id != member.id else 'Automatic',
                    reason=reason or 'no reason')

        cls.create(guild_id=member.guild_id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.KICK,
                   reason=reason)

    @classmethod
    def tempban(cls, plugin, event, member, reason, expires_at):
        from rowboat.plugins.modlog import Actions
        User.from_disco_user(member.user)

        plugin.call('ModLogPlugin.create_debounce',
                    event, ['GuildMemberRemove', 'GuildBanAdd'],
                    user_id=member.user.id)

        member.ban(reason=reason)

        plugin.call(
            'ModLogPlugin.log_action_ext',
            Actions.MEMBER_TEMPBAN,
            event.guild.id,
            member=member,
            actor=unicode(event.author)
            if event.author.id != member.id else 'Automatic',
            reason=reason or 'no reason',
            expires=expires_at,
        )

        cls.create(guild_id=member.guild_id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.TEMPBAN,
                   reason=reason,
                   expires_at=expires_at)

    @classmethod
    def softban(cls, plugin, event, member, reason):
        from rowboat.plugins.modlog import Actions
        User.from_disco_user(member.user)

        plugin.call('ModLogPlugin.create_debounce',
                    event,
                    ['GuildMemberRemove', 'GuildBanAdd', 'GuildBanRemove'],
                    user_id=member.user.id)

        member.ban(delete_message_days=7, reason=reason)
        member.unban(reason=reason)

        plugin.call('ModLogPlugin.log_action_ext',
                    Actions.MEMBER_SOFTBAN,
                    event.guild.id,
                    member=member,
                    actor=unicode(event.author)
                    if event.author.id != member.id else 'Automatic',
                    reason=reason or 'no reason')

        cls.create(guild_id=member.guild_id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.SOFTBAN,
                   reason=reason)

    @classmethod
    def ban(cls, plugin, event, member, reason, guild):
        from rowboat.plugins.modlog import Actions
        if isinstance(member, (int, long)):
            user_id = member
        else:
            User.from_disco_user(member.user)
            user_id = member.user.id

        plugin.call(
            'ModLogPlugin.create_debounce',
            event,
            ['GuildMemberRemove', 'GuildBanAdd'],
            user_id=user_id,
        )

        guild.create_ban(user_id, reason=reason)

        plugin.call('ModLogPlugin.log_action_ext',
                    Actions.MEMBER_BAN,
                    event.guild.id,
                    user=unicode(member),
                    user_id=user_id,
                    actor=unicode(event.author)
                    if event.author.id != user_id else 'Automatic',
                    reason=reason or 'no reason')

        cls.create(guild_id=guild.id,
                   user_id=user_id,
                   actor_id=event.author.id,
                   type_=cls.Types.BAN,
                   reason=reason)

    @classmethod
    def warn(cls, plugin, event, member, reason, guild):
        from rowboat.plugins.modlog import Actions
        User.from_disco_user(member.user)
        user_id = member.user.id

        cls.create(guild_id=guild.id,
                   user_id=user_id,
                   actor_id=event.author.id,
                   type_=cls.Types.WARNING,
                   reason=reason)

        plugin.call('ModLogPlugin.log_action_ext',
                    Actions.MEMBER_WARNED,
                    event.guild.id,
                    member=member,
                    actor=unicode(event.author)
                    if event.author.id != member.id else 'Automatic',
                    reason=reason or 'no reason')

    @classmethod
    def mute(cls, plugin, event, member, reason):
        from rowboat.plugins.modlog import Actions
        admin_config = cls.admin_config(event)
        #admin_config = event.config

        plugin.call(
            'ModLogPlugin.create_debounce',
            event,
            ['GuildMemberUpdate'],
            user_id=member.user.id,
            role_id=admin_config.mute_role,
        )

        member.add_role(admin_config.mute_role, reason=reason)

        plugin.call('ModLogPlugin.log_action_ext',
                    Actions.MEMBER_MUTED,
                    event.guild.id,
                    member=member,
                    actor=unicode(event.author)
                    if event.author.id != member.id else 'Automatic',
                    reason=reason or 'no reason')

        cls.create(guild_id=event.guild.id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.MUTE,
                   reason=reason,
                   metadata={'role': admin_config.mute_role})

    @classmethod
    def tempmute(cls, plugin, event, member, reason, expires_at):
        from rowboat.plugins.modlog import Actions
        admin_config = cls.admin_config(event)
        #admin_config = event.config

        if not admin_config.mute_role:
            plugin.log.warning('Cannot tempmute member %s, no tempmute role',
                               member.id)
            return

        plugin.call(
            'ModLogPlugin.create_debounce',
            event,
            ['GuildMemberUpdate'],
            user_id=member.user.id,
            role_id=admin_config.mute_role,
        )

        member.add_role(admin_config.mute_role, reason=reason)

        plugin.call(
            'ModLogPlugin.log_action_ext',
            Actions.MEMBER_TEMP_MUTED,
            event.guild.id,
            member=member,
            actor=unicode(event.author)
            if event.author.id != member.id else 'Automatic',
            reason=reason or 'no reason',
            expires=expires_at,
        )

        cls.create(guild_id=event.guild.id,
                   user_id=member.user.id,
                   actor_id=event.author.id,
                   type_=cls.Types.TEMPMUTE,
                   reason=reason,
                   expires_at=expires_at,
                   metadata={'role': admin_config.mute_role})

    @classmethod
    def clear_active(cls, event, user_id, types):
        """
        Marks a previously active tempmute as inactive for the given event/user.
        This should be used in all locations where we either think this is no
        longer active (e.g. the mute role was removed) _or_ when we don't want to
        unmute the user any longer, e.g. they've been remuted by another command.
        """
        return cls.update(active=False).where(
            (cls.guild_id == event.guild.id) & (cls.user_id == user_id)
            & (cls.type_ << types) & (cls.active == 1)).execute() >= 1
Beispiel #30
0
class Arrangement(_BaseModel):
    """A possible combination of `Person` and `Role`."""

    id = IntegerField(primary_key=True)
    person = ForeignKeyField(Person)
    role = ForeignKeyField(Role)