class Proposal(GovernanceClass, BaseModel): governance_object = ForeignKeyField(GovernanceObject, related_name='proposals', on_delete='CASCADE', on_update='CASCADE') name = CharField(default='', max_length=40) url = CharField(default='') start_epoch = IntegerField() end_epoch = IntegerField() payment_address = CharField(max_length=36) payment_amount = DecimalField(max_digits=16, decimal_places=8) object_hash = CharField(max_length=64) govobj_type = ENERGID_GOVOBJ_TYPES['proposal'] class Meta: db_table = 'proposals' def is_valid(self): import energilib as egilib printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__) try: # proposal name exists and is not null/whitespace if (len(self.name.strip()) == 0): printdbg("\tInvalid Proposal name [%s], returning False" % self.name) return False # proposal name is normalized (something like "[a-zA-Z0-9-_]+") if not re.match(r'^[-_a-zA-Z0-9]+$', self.name): printdbg("\tInvalid Proposal name [%s] (does not match regex), returning False" % self.name) return False # end date < start date if (self.end_epoch <= self.start_epoch): printdbg("\tProposal end_epoch [%s] <= start_epoch [%s] , returning False" % (self.end_epoch, self.start_epoch)) return False # amount must be numeric if misc.is_numeric(self.payment_amount) is False: printdbg("\tProposal amount [%s] is not valid, returning False" % self.payment_amount) return False # amount can't be negative or 0 if (float(self.payment_amount) <= 0): printdbg("\tProposal amount [%s] is negative or zero, returning False" % self.payment_amount) return False # payment address is valid base58 energi addr, non-multisig if not egilib.is_valid_energi_address(self.payment_address, config.network): printdbg("\tPayment address [%s] not a valid Energi address for network [%s], returning False" % (self.payment_address, config.network)) return False # URL if (len(self.url.strip()) < 4): printdbg("\tProposal URL [%s] too short, returning False" % self.url) return False try: parsed = urlparse.urlparse(self.url) except Exception as e: printdbg("\tUnable to parse Proposal URL, marking invalid: %s" % e) return False except Exception as e: printdbg("Unable to validate in Proposal#is_valid, marking invalid: %s" % e.message) return False printdbg("Leaving Proposal#is_valid, Valid = True") return True def is_expired(self, superblockcycle=None): from constants import SUPERBLOCK_FUDGE_WINDOW import energilib if not superblockcycle: raise Exception("Required field superblockcycle missing.") printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__) now = misc.now() printdbg("\tnow = %s" % now) # half the SB cycle, converted to seconds # add the fudge_window in seconds, defined elsewhere in Sentinel expiration_window_seconds = int( (energilib.blocks_to_seconds(superblockcycle) / 2) + SUPERBLOCK_FUDGE_WINDOW ) printdbg("\texpiration_window_seconds = %s" % expiration_window_seconds) # "fully expires" adds the expiration window to end time to ensure a # valid proposal isn't excluded from SB by cutting it too close fully_expires_at = self.end_epoch + expiration_window_seconds printdbg("\tfully_expires_at = %s" % fully_expires_at) if (fully_expires_at < now): printdbg("\tProposal end_epoch [%s] < now [%s] , returning True" % (self.end_epoch, now)) return True printdbg("Leaving Proposal#is_expired, Expired = False") return False def is_deletable(self): # end_date < (current_date - 30 days) thirty_days = (86400 * 30) if (self.end_epoch < (misc.now() - thirty_days)): return True # TBD (item moved to external storage/EnergiDrive, etc.) return False @classmethod def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget): # return all approved proposals, in order of descending vote count # # we need a secondary 'order by' in case of a tie on vote count, since # superblocks must be deterministic query = (self .select(self, GovernanceObject) # Note that we are selecting both models. .join(GovernanceObject) .where(GovernanceObject.absolute_yes_count >= proposal_quorum) .order_by(GovernanceObject.absolute_yes_count.desc(), GovernanceObject.object_hash.desc()) ) ranked = [] for proposal in query: proposal.max_budget = next_superblock_max_budget if proposal.is_valid(): ranked.append(proposal) return ranked @classmethod def expired(self, superblockcycle=None): if not superblockcycle: raise Exception("Required field superblockcycle missing.") expired = [] for proposal in self.select(): if proposal.is_expired(superblockcycle): expired.append(proposal) return expired @property def rank(self): rank = 0 if self.governance_object: rank = self.governance_object.absolute_yes_count return rank def get_prepare_command(self): import energilib obj_data = energilib.SHIM_serialise_for_energid(self.serialise()) # new superblocks won't have parent_hash, revision, etc... cmd = ['gobject', 'prepare', '0', '1', str(int(time.time())), obj_data] return cmd def prepare(self, energid): try: object_hash = energid.rpc_command(*self.get_prepare_command()) printdbg("Submitted: [%s]" % object_hash) self.go.object_fee_tx = object_hash self.go.save() manual_submit = ' '.join(self.get_submit_command()) print(manual_submit) except JSONRPCException as e: print("Unable to prepare: %s" % e.message)
class ArtworkCache(ModelBase): book = ForeignKeyField(Book) uuid = CharField()
class CartItem(BaseModel): cart = ForeignKeyField(Cart, backref='items') item = ForeignKeyField(Item, backref='carts') quantity = IntegerField()
class Notification(BaseModel): ID_FIELD_NAME = 'id' MAX_PER_USER = 10 user = ForeignKeyField(User) created = DateTimeField(default=datetime.now, index=True) kind = IntegerField() message = TextField() related_id = IntegerField(null=True, index=True) action_url = CharField(null=True) viewed = BooleanField(default=False) def read(self) -> bool: self.viewed = True return bool(self.save()) @classmethod def fetch(cls, user: User) -> Iterable['Notification']: user_id = cls.user == user.id return ( cls .select() .join(User) .where(user_id) .order_by(Notification.created.desc()) .limit(cls.MAX_PER_USER) ) @classmethod def of( cls, related_id: int, user: Optional[int] = None, ) -> Iterable['Notification']: where_clause = [Notification.related_id == related_id] if user is not None: where_clause.append(Notification.user == user) return ( cls .select() .join(User) .where(*where_clause) .limit(cls.MAX_PER_USER) ) @classmethod def send( cls, user: User, kind: int, message: str, related_id: Optional[int] = None, action_url: Optional[str] = None, ) -> 'Notification': return cls.create(**{ cls.user.name: user, cls.kind.name: kind, cls.message.name: message, cls.related_id.name: related_id, cls.action_url.name: action_url, })
class InstitutionUser(CherryPyAPI): """ Relates persons and institution objects. Attributes: +-------------------+-------------------------------------+ | Name | Description | +===================+=====================================+ | user | Link to the Users model | +-------------------+-------------------------------------+ | relationship | Link to the Relationships model | +-------------------+-------------------------------------+ | institution | Link to the Institutions model | +-------------------+-------------------------------------+ """ uuid = UUIDField(primary_key=True, default=uuid.uuid4, index=True) user = ForeignKeyField(Users, backref='institutions') institution = ForeignKeyField(Institutions, backref='users') relationship = ForeignKeyField(Relationships, backref='institution_user') # pylint: disable=too-few-public-methods class Meta(object): """PeeWee meta class contains the database and the primary key.""" database = DB indexes = ( (('user', 'institution', 'relationship'), True), ) # pylint: enable=too-few-public-methods def to_hash(self, **flags): """Convert the object to a hash.""" obj = super(InstitutionUser, self).to_hash(**flags) obj['uuid'] = str(self.__data__['uuid']) obj['user'] = int(self.__data__['user']) obj['institution'] = int(self.__data__['institution']) obj['relationship'] = str(self.__data__['relationship']) return obj def from_hash(self, obj): """Convert the hash into the object.""" super(InstitutionUser, self).from_hash(obj) self._set_only_if('uuid', obj, 'uuid', lambda: uuid.UUID(obj['uuid'])) self._set_only_if_by_name('relationship', obj, Relationships) self._set_only_if( 'user', obj, 'user', lambda: Users.get(Users.id == obj['user']) ) self._set_only_if( 'institution', obj, 'institution', lambda: Institutions.get(Institutions.id == obj['institution']) ) @classmethod def where_clause(cls, kwargs): """Where clause for the various elements.""" where_clause = super(InstitutionUser, cls).where_clause(kwargs) attrs = ['uuid', 'user', 'institution', 'relationship'] return cls._where_attr_clause(where_clause, kwargs, attrs)
class PaperAuthors(BaseModel): paper = ForeignKeyField(Paper) author = ForeignKeyField(Author)
class SequenceModel(Model): identifier = IntegerField() profile = ForeignKeyField(ProfileModel, backref='sequences') class Meta: database = db
class Room(BaseModel): id = AutoField() number = IntegerField(unique=True) name = CharField(null=True) floor = ForeignKeyField(Floor, null=True, on_delete='SET NULL', backref='rooms')
class Sensor(BaseModel): id = AutoField() number = IntegerField(unique=True) room = ForeignKeyField(Room, null=True, on_delete='SET NULL', backref='sensors')
class InstrumentDataSource(CherryPyAPI): """ Relates instruments and data sources objects. Attributes: +-------------------+-------------------------------------+ | Name | Description | +===================+=====================================+ | instrument | Link to the Instruments model | +-------------------+-------------------------------------+ | data_source | Link to the DataSources model | +-------------------+-------------------------------------+ | relationship | Link to the Relationships model | +-------------------+-------------------------------------+ """ instrument = ForeignKeyField(Instruments, backref='data_sources') data_source = ForeignKeyField(DataSources, backref='instruments') relationship = ForeignKeyField(Relationships, backref='instrument_data_source') # pylint: disable=too-few-public-methods class Meta(object): """PeeWee meta class contains the database and the primary key.""" database = DB primary_key = CompositeKey('instrument', 'data_source', 'relationship') # pylint: enable=too-few-public-methods def to_hash(self, **flags): """Convert the object to a hash.""" obj = super(InstrumentDataSource, self).to_hash(**flags) obj['_id'] = index_hash( int(self.__data__['instrument']), str(self.__data__['data_source']), str(self.__data__['relationship']), ) obj['instrument'] = int(self.__data__['instrument']) obj['data_source'] = str(self.__data__['data_source']) obj['relationship'] = str(self.__data__['relationship']) return obj def from_hash(self, obj): """Convert the hash into the object.""" super(InstrumentDataSource, self).from_hash(obj) self._set_only_if( 'instrument', obj, 'instrument', lambda: Instruments.get(Instruments.id == int(obj['instrument'])) ) self._set_only_if_by_name('relationship', obj, Relationships) attr_rel_cls = [('data_source', DataSources)] for attr, rel_cls in attr_rel_cls: self._set_only_if( attr, obj, attr, lambda cls=rel_cls, o=obj, a=attr: cls.get(cls.uuid == uuid.UUID(o[a])) ) @classmethod def where_clause(cls, kwargs): """Where clause for the various elements.""" where_clause = super(InstrumentDataSource, cls).where_clause(kwargs) attrs = ['instrument', 'data_source', 'relationship'] return cls._where_attr_clause(where_clause, kwargs, attrs)
class DaySchedule(BaseModel): id = AutoField() index = IntegerField() content = TextField() mode = CharField(default=ThermostatGroup.Modes.HEATING) thermostat = ForeignKeyField(Thermostat, backref='day_schedules', on_delete='CASCADE')
class Superblock(BaseModel, GovernanceClass): governance_object = ForeignKeyField(GovernanceObject, related_name='superblocks', on_delete='CASCADE', on_update='CASCADE') event_block_height = IntegerField() payment_addresses = TextField() payment_amounts = TextField() proposal_hashes = TextField(default='') sb_hash = CharField() object_hash = CharField(max_length=64) govobj_type = monacoCoinD_GOVOBJ_TYPES['superblock'] only_masternode_can_submit = True class Meta: db_table = 'superblocks' def is_valid(self): import monacoCoinlib import decimal printdbg("In Superblock#is_valid, for SB: %s" % self.__dict__) # it's a string from the DB... addresses = self.payment_addresses.split('|') for addr in addresses: if not monacoCoinlib.is_valid_monacoCoin_address(addr, config.network): printdbg("\tInvalid address [%s], returning False" % addr) return False amounts = self.payment_amounts.split('|') for amt in amounts: if not misc.is_numeric(amt): printdbg("\tAmount [%s] is not numeric, returning False" % amt) return False # no negative or zero amounts allowed damt = decimal.Decimal(amt) if not damt > 0: printdbg("\tAmount [%s] is zero or negative, returning False" % damt) return False # verify proposal hashes correctly formatted... if len(self.proposal_hashes) > 0: hashes = self.proposal_hashes.split('|') for object_hash in hashes: if not misc.is_hash(object_hash): printdbg("\tInvalid proposal hash [%s], returning False" % object_hash) return False # ensure number of payment addresses matches number of payments if len(addresses) != len(amounts): printdbg("\tNumber of payment addresses [%s] != number of payment amounts [%s], returning False" % (len(addresses), len(amounts))) return False printdbg("Leaving Superblock#is_valid, Valid = True") return True def is_deletable(self): # end_date < (current_date - 30 days) # TBD (item moved to external storage/monacoCoinDrive, etc.) pass def hash(self): import monacoCoinlib return monacoCoinlib.hashit(self.serialise()) def hex_hash(self): return "%x" % self.hash() # workaround for now, b/c we must uniquely ID a superblock with the hash, # in case of differing superblocks # # this prevents sb_hash from being added to the serialised fields @classmethod def serialisable_fields(self): return [ 'event_block_height', 'payment_addresses', 'payment_amounts', 'proposal_hashes' ] # has this masternode voted to fund *any* superblocks at the given # event_block_height? @classmethod def is_voted_funding(self, ebh): count = (self.select() .where(self.event_block_height == ebh) .join(GovernanceObject) .join(Vote) .join(Signal) .switch(Vote) # switch join query context back to Vote .join(Outcome) .where(Vote.signal == VoteSignals.funding) .where(Vote.outcome == VoteOutcomes.yes) .count()) return count @classmethod def latest(self): try: obj = self.select().order_by(self.event_block_height).desc().limit(1)[0] except IndexError as e: obj = None return obj @classmethod def at_height(self, ebh): query = (self.select().where(self.event_block_height == ebh)) return query @classmethod def find_highest_deterministic(self, sb_hash): # highest block hash wins query = (self.select() .where(self.sb_hash == sb_hash) .order_by(self.object_hash.desc())) try: obj = query.limit(1)[0] except IndexError as e: obj = None return obj
class Proposal(GovernanceClass, BaseModel): governance_object = ForeignKeyField(GovernanceObject, related_name='proposals', on_delete='CASCADE', on_update='CASCADE') name = CharField(default='', max_length=40) url = CharField(default='') start_epoch = IntegerField() end_epoch = IntegerField() payment_address = CharField(max_length=36) payment_amount = DecimalField(max_digits=16, decimal_places=8) object_hash = CharField(max_length=64) govobj_type = monacoCoinD_GOVOBJ_TYPES['proposal'] class Meta: db_table = 'proposals' def is_valid(self): import monacoCoinlib printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__) try: # proposal name exists and is not null/whitespace if (len(self.name.strip()) == 0): printdbg("\tInvalid Proposal name [%s], returning False" % self.name) return False # proposal name is normalized (something like "[a-zA-Z0-9-_]+") if not re.match(r'^[-_a-zA-Z0-9]+$', self.name): printdbg("\tInvalid Proposal name [%s] (does not match regex), returning False" % self.name) return False # end date < start date if (self.end_epoch <= self.start_epoch): printdbg("\tProposal end_epoch [%s] <= start_epoch [%s] , returning False" % (self.end_epoch, self.start_epoch)) return False # amount can't be negative or 0 if (self.payment_amount <= 0): printdbg("\tProposal amount [%s] is negative or zero, returning False" % self.payment_amount) return False # payment address is valid base58 monacoCoin addr, non-multisig if not monacoCoinlib.is_valid_monacoCoin_address(self.payment_address, config.network): printdbg("\tPayment address [%s] not a valid monacoCoin address for network [%s], returning False" % (self.payment_address, config.network)) return False # URL if (len(self.url.strip()) < 4): printdbg("\tProposal URL [%s] too short, returning False" % self.url) return False try: parsed = urlparse.urlparse(self.url) except Exception as e: printdbg("\tUnable to parse Proposal URL, marking invalid: %s" % e) return False except Exception as e: printdbg("Unable to validate in Proposal#is_valid, marking invalid: %s" % e.message) return False printdbg("Leaving Proposal#is_valid, Valid = True") return True def is_expired(self): printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__) now = misc.now() printdbg("\tnow = %s" % now) # end date < current date if (self.end_epoch <= now): printdbg("\tProposal end_epoch [%s] <= now [%s] , returning True" % (self.end_epoch, now)) return True printdbg("Leaving Proposal#is_expired, Expired = False") return False def is_deletable(self): # end_date < (current_date - 30 days) thirty_days = (86400 * 30) if (self.end_epoch < (misc.now() - thirty_days)): return True # TBD (item moved to external storage/monacoCoinDrive, etc.) return False @classmethod def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget): # return all approved proposals, in order of descending vote count # # we need a secondary 'order by' in case of a tie on vote count, since # superblocks must be deterministic query = (self .select(self, GovernanceObject) # Note that we are selecting both models. .join(GovernanceObject) .where(GovernanceObject.absolute_yes_count > proposal_quorum) .order_by(GovernanceObject.absolute_yes_count.desc(), GovernanceObject.object_hash.desc()) ) ranked = [] for proposal in query: proposal.max_budget = next_superblock_max_budget if proposal.is_valid(): ranked.append(proposal) return ranked @property def rank(self): rank = 0 if self.governance_object: rank = self.governance_object.absolute_yes_count return rank def get_prepare_command(self): import monacoCoinlib obj_data = monacoCoinlib.SHIM_serialise_for_monacoCoind(self.serialise()) # new superblocks won't have parent_hash, revision, etc... cmd = ['gobject', 'prepare', '0', '1', str(int(time.time())), obj_data] return cmd def prepare(self, monacoCoind): try: object_hash = monacoCoind.rpc_command(*self.get_prepare_command()) printdbg("Submitted: [%s]" % object_hash) self.go.object_fee_tx = object_hash self.go.save() manual_submit = ' '.join(self.get_submit_command()) print(manual_submit) except JSONRPCException as e: print("Unable to prepare: %s" % e.message)
class Block(BaseModel): account = ForeignKeyField(UserProfile, on_delete='CASCADE') target = ForeignKeyField(UserProfile, on_delete='CASCADE') created_at = DateTimeField(default=datetime.datetime.now)
class Location(BaseModel): room = ForeignKeyField(Room, backref="locations", on_delete="CASCADE") name = TextField() unit_size = FloatField(default=5) unit_size_unit = TextField(default="ft") use_grid = BooleanField(default=True) full_fow = BooleanField(default=False) fow_opacity = FloatField(default=0.3) fow_los = BooleanField(default=False) vision_mode = TextField(default="triangle") # default is 1km max, 0.5km min vision_min_range = FloatField(default=1640) vision_max_range = FloatField(default=3281) def __repr__(self): return f"<Location {self.get_path()}>" def get_path(self): return f"{self.room.get_path()}/{self.name}" def as_dict(self): return model_to_dict(self, recurse=False, exclude=[Location.id, Location.room]) def add_default_layers(self): Layer.create( location=self, name="map", type_="normal", player_visible=True, index=0 ) Layer.create( location=self, name="grid", type_="grid", selectable=False, player_visible=True, index=1, ) Layer.create( location=self, name="tokens", type_="normal", player_visible=True, player_editable=True, index=2, ) Layer.create(location=self, type_="normal", name="dm", index=3) Layer.create( location=self, type_="fow", name="fow", player_visible=True, index=4 ) Layer.create( location=self, name="fow-players", type_="fow-players", selectable=False, player_visible=True, index=5, ) Layer.create( location=self, name="draw", type_="normal", selectable=False, player_visible=True, player_editable=True, index=6, ) class Meta: indexes = ((("room", "name"), True),)
class Pump(BaseModel): id = AutoField() name = CharField() output = ForeignKeyField(Output, null=True, backref='pumps', on_delete='SET NULL', unique=True)
class Position(BaseModel): timestamp = IntegerField() x = FloatField() y = FloatField() capture_session = ForeignKeyField(CaptureSession, related_name='positions')
class Valve(BaseModel): id = AutoField() name = CharField() delay = IntegerField(default=60) output = ForeignKeyField(Output, backref='valves', on_delete='CASCADE', unique=True)
class PaperKeywords(BaseModel): paper = ForeignKeyField(Paper) keyword = ForeignKeyField(Keyword)
class Address(Model): address = CharField() person = ForeignKeyField(Person) class Meta: database = db
class Entry(db.Model): date = DateField() user = ForeignKeyField(User, related_name='reported_by') approver = ForeignKeyField(User, related_name='approved_by', null=True) started_at = TimeField() finished_at = TimeField() modified_at = DateTimeField(default=datetime.now) approved_at = DateTimeField(null=True) comment = TextField(null=True, default="") break_for = ForeignKeyField(Break, related_name='break_for', null=True) is_approved = BooleanField(default=False) break_length = property(lambda self: self.break_for.minutes if self.break_for else 0) @property def total_min(self): if self.started_at is None or self.finished_at is None: return None total = (self.finished_at.hour - self.started_at.hour) * 60 total += (self.finished_at.minute - self.started_at.minute) total -= self.break_length return total @property def total_time(self): total = self.total_min if total is None: return None return timedelta(hours=(total / 60), minutes=(total % 60)) def __str__(self): output = "On %s from %s to %s" % ( self.date.isoformat(), "N/A" if self.started_at is None else self.started_at.strftime("%H:%M"), "N/A" if self.finished_at is None else self.finished_at.strftime("%H:%M")) if self.break_for: output += " with beak for " + self.break_for.name total_min = self.total_min if total_min: output += ", total: %d:%02d" % (total_min // 60, total_min % 60) return output class Meta: table_alias = 'e' @classmethod def get_user_timesheet(cls, *, user=None, week_ending_date=None): """ Retrievs timesheet entries for a user a week ending on week_ending_date. """ if user is None: user = current_user if week_ending_date is None: week_ending_date = current_week_ending_date() rq = RawQuery( cls, """ WITH daynums(num) AS (VALUES (6),(5),(4),(3),(2),(1),(0)), week(day) AS (SELECT date(?, '-'||num||' day') FROM daynums) SELECT id, day as date, finished_at, started_at, user_id, modified_at, break_for_id, is_approved, approver_id, approved_at, comment FROM week LEFT JOIN entry ON "date" = day AND user_id = ? ORDER BY "date" ASC""", week_ending_date.isoformat(), user.id) return rq.execute() @classmethod def get_for_approving(cls, *, user=None, week_ending_date=None): """ Retrievs timesheet entries for approval """ query = Entry.select() if user: query = query.where(Entry.user_id == user.id) if week_ending_date: week_start_date = week_ending_date - timedelta(days=7) query = query.where((Entry.date >= week_start_date) & (Entry.date <= week_ending_date)) return query.order_by(Entry.date).limit(100).execute()
class Article(Model): title = CharField() link = CharField() description = CharField() pubDate = CharField() comment = CharField() guid = IntegerField() meta = CharField() nfo = CharField() fichier = CharField() taille = CharField() categorie = ForeignKeyField(Categorie, related_name="articles") categorie_origine = ForeignKeyField(Categorie, related_name="articles_2") categorie_str = CharField() lu = BooleanField(index=True, default=False) annee = IntegerField(default=0) def analyse_description(self): logger.debug("analyse_description : debut") meta = {} self.analyse_annee() # analyse description for x in self.description.split("<br>"): data = x.strip() logger.debug("analyse_description : champs <%s>", data) # detection lien if data.startswith("<a href"): reg = re.match(r'<a[ ]*href="(.*)">[ ]*Fichier Nfo[ ]*</a>', data) logger.debug("analyse_description : lien detecte %s", str(reg)) if reg is not None: self.nfo = reg.groups()[0] # r = requests.get(self.nfo) # self.nfo_html = r.text else: decoup = [y.strip() for y in data.strip().split(":")] logger.debug("analyse_description : split champs %s", str(decoup)) if decoup[0] == "Nom du fichier": self.fichier = decoup[1] elif decoup[0] == "Taille": self.taille = decoup[1] elif decoup[0] == "Catégorie": try: self.categorie_str = decoup[1] self.categorie = Categorie.get( Categorie.nom == decoup[1]) self.categorie_origine = Categorie.get( Categorie.nom == decoup[1]) except Categorie.DoesNotExist: self.categorie = Categorie(nom=decoup[1]) self.categorie_origine = self.categorie self.categorie.save() logger.info("Creation nouvelle categorie : %s" % self.categorie.nom) elif len(decoup) == 2: meta[decoup[0]] = decoup[1] self.meta = str(meta) logger.debug("analyse_description : fin") def lancer_recherche(self, start_multi=0, stop_multi=9): url_nzbindex = "http://www.nzbindex.nl/search/?q={0}&max=100" url_binsearch = "https://binsearch.info/?q={0}&max=100" liste_fichier = self.fichier.split(" / ") cpt_etoile = self.fichier.find("*") if cpt_etoile != -1: cpt_etoile_fin = self.fichier.find("*", cpt_etoile + 1) if cpt_etoile_fin == -1: cpt_etoile_fin = cpt_etoile for x in range(start_multi, stop_multi + 1, 1): liste_fichier.append(self.fichier[0:cpt_etoile] + ( ("%0" + str(cpt_etoile_fin - cpt_etoile + 1) + "d") % x) + self.fichier[cpt_etoile_fin + 1:]) # print(liste_fichier) for fichier in liste_fichier: ret = recherche_indexeur(url_nzbindex, fichier, parseur=MyParserNzbIndex) if len(ret) == 0: ret = recherche_indexeur(url_binsearch, fichier) if len(ret) > 0: for item in ret: logger.debug("item %s", str(item)) try: rec = Recherche( id_check=item["id"], url=item["url"], taille=item["taille"] if "taille" in item else "Vide", title=item["title"], fichier=fichier, article=self, ) rec.save() except IntegrityError: logger.error( "recherche_indexeur : item deja existant <%s>", item["id"]) def creer_recherche_tous(self, nom_fichier, url): rec = Recherche( id_check=0, url=url, taille="Vide", title="Tous", fichier=self.fichier, article=self, ) rec.save() def nettoyer_recherche(self): n = Recherche.delete().where(Recherche.article == self).execute() logger.debug("%d recherches supprimes pour article %d", n, self.id) def analyse_annee(self): if self.annee != 0: return logger.debug("analyse_annee : %s", self.title) # analyse annee reg = re.match(r".*([0-9]{4}).*", self.title) # on essaye en ??/??/???? if reg is None: logger.debug("analyse_annee : premier reg non trouve") reg = re.match(r".*[0-9][0-9]/[0-9][0-9]/([0-9]{4}).*", self.title) if reg is not None: logger.debug("analyse_annee : reg trouve %s", str(reg.group())) self.annee = int(reg.groups()[0]) logger.debug("analyse_annee fin : %d", self.annee) def marquer_favoris(self): logger.debug("marquer_favorie") self.categorie = Categorie.get(Categorie.nom == "Favoris") self.save() def __str__(self): return "<%s %s %s>" % (self.title, self.pubDate, self.lu) def printall(self): return ("<" + "<" + str(self.title) + ">,\n" + "<" + str(self.link) + ">,\n" + "<" + str(self.description) + ">,\n" + "<" + str(self.pubDate) + ">,\n" + "<" + str(self.comment) + ">,\n" + "<" + str(self.guid) + ">,\n" + "<" + str(self.meta) + ">,\n" + "<" + str(self.nfo) + ">,\n" + "<" + str(self.fichier) + ">,\n" + "<" + str(self.taille) + ">,\n" + "<" + str(self.categorie) + ">,\n" + "<" + str(self.categorie_str) + ">,\n" + "<" + str(self.lu) + ">" + ">,\n" + "<" + (str(self.status_nzbd) if "status_nzbd" in dir(self) else "") + ">") class Meta: database = db
class Solution(BaseModel): STATES = SolutionState MAX_CHECK_TIME_SECONDS = 60 * 10 exercise = ForeignKeyField(Exercise, backref='solutions') solver = ForeignKeyField(User, backref='solutions') checker = ForeignKeyField(User, null=True, backref='solutions') state = CharField( choices=STATES.to_choices(), default=STATES.CREATED.name, index=True, ) grade = IntegerField( default=0, constraints=[Check('grade <= 100'), Check('grade >= 0')], ) submission_timestamp = DateTimeField(index=True) hashed = TextField() @property def solution_files( self, ) -> Union[Iterable['SolutionFile'], 'SolutionFile']: return SolutionFile.filter(SolutionFile.solution == self) @property def is_checked(self): return self.state == self.STATES.DONE.name @staticmethod def create_hash(content: Union[str, bytes], *args, **kwargs) -> str: return hashing.by_content(content, *args, **kwargs) @classmethod def is_duplicate( cls, content: Union[str, bytes], user: User, *, already_hashed: bool = False, ) -> bool: hash_ = cls.create_hash(content) if not already_hashed else content return cls.select().where( cls.hashed == hash_, cls.solver == user, ).exists() def start_checking(self) -> bool: return self.set_state(Solution.STATES.IN_CHECKING) def set_state(self, new_state: SolutionState, **kwargs) -> bool: # Optional: filter the old state of the object # to make sure that no two processes set the state together requested_solution = (Solution.id == self.id) changes = Solution.update( **{Solution.state.name: new_state.name}, **kwargs, ).where(requested_solution) updated = changes.execute() == 1 return updated def ordered_versions(self) -> Iterable['Solution']: return Solution.select().where( Solution.exercise == self.exercise, Solution.solver == self.solver, ).order_by(Solution.submission_timestamp.asc()) def test_results(self) -> Iterable[dict]: return SolutionExerciseTestExecution.by_solution(self) @classmethod def of_user( cls, user_id: int, with_archived: bool = False, ) -> Iterable[Dict[str, Any]]: db_exercises = Exercise.get_objects(fetch_archived=with_archived) exercises = Exercise.as_dicts(db_exercises) solutions = ( cls .select(cls.exercise, cls.id, cls.state, cls.checker) .where(cls.exercise.in_(db_exercises), cls.solver == user_id) .order_by(cls.submission_timestamp.desc()) ) for solution in solutions: exercise = exercises[solution.exercise_id] if exercise.get('solution_id') is None: exercise['solution_id'] = solution.id exercise['is_checked'] = solution.is_checked if solution.is_checked and solution.checker: exercise['checker'] = solution.checker.fullname return tuple(exercises.values()) @property def comments(self): return Comment.select().join( SolutionFile, ).where(SolutionFile.solution == self) @classmethod def create_solution( cls, exercise: Exercise, solver: User, files: List['File'], hash_: Optional[str] = None, ) -> 'Solution': if len(files) == 1: hash_ = cls.create_hash(files[0].code) if hash_ and cls.is_duplicate(hash_, solver, already_hashed=True): raise AlreadyExists('This solution already exists.') instance = cls.create(**{ cls.exercise.name: exercise, cls.solver.name: solver, cls.submission_timestamp.name: datetime.now(), cls.hashed.name: hash_, }) files_details = [ { SolutionFile.path.name: f.path, SolutionFile.solution_id.name: instance.id, SolutionFile.code.name: f.code, SolutionFile.file_hash.name: SolutionFile.create_hash(f.code), } for f in files ] SolutionFile.insert_many(files_details).execute() # update old solutions for this exercise other_solutions: Iterable[Solution] = cls.select().where( cls.exercise == exercise, cls.solver == solver, cls.id != instance.id, ) for old_solution in other_solutions: old_solution.set_state(Solution.STATES.OLD_SOLUTION) return instance @classmethod def _base_next_unchecked(cls): comments_count = fn.Count(Comment.id).alias('comments_count') fails = fn.Count(SolutionExerciseTestExecution.id).alias('failures') return cls.select( cls.id, cls.state, cls.exercise, comments_count, fails, ).join( SolutionFile, join_type=JOIN.LEFT_OUTER, on=(SolutionFile.solution == cls.id), ).join( Comment, join_type=JOIN.LEFT_OUTER, on=(Comment.file == SolutionFile.id), ).join( SolutionExerciseTestExecution, join_type=JOIN.LEFT_OUTER, on=(SolutionExerciseTestExecution.solution == cls.id), ).where( cls.state == Solution.STATES.CREATED.name, ).group_by( cls.id, ).order_by( comments_count, fails, cls.submission_timestamp.asc(), ) def mark_as_checked( self, by: Optional[Union[User, int]] = None, ) -> bool: return self.set_state( Solution.STATES.DONE, checker=by, ) @classmethod def next_unchecked(cls) -> Optional['Solution']: try: return cls._base_next_unchecked().get() except cls.DoesNotExist: return None @classmethod def next_unchecked_of(cls, exercise_id) -> Optional['Solution']: try: return cls._base_next_unchecked().where( cls.exercise == exercise_id, ).get() except cls.DoesNotExist: return None @classmethod def status(cls): one_if_is_checked = Case( Solution.state, ((Solution.STATES.DONE.name, 1),), 0, ) fields = [ Exercise.id, Exercise.subject.alias('name'), Exercise.is_archived.alias('is_archived'), fn.Count(Solution.id).alias('submitted'), fn.Sum(one_if_is_checked).alias('checked'), ] join_by_exercise = (Solution.exercise == Exercise.id) active_solutions = Solution.state.in_( Solution.STATES.active_solutions(), ) return ( Exercise .select(*fields) .join(Solution, JOIN.LEFT_OUTER, on=join_by_exercise) .where(active_solutions) .group_by(Exercise.subject, Exercise.id) .order_by(Exercise.id) ) @classmethod def left_in_exercise(cls, exercise: Exercise) -> int: one_if_is_checked = Case( Solution.state, ((Solution.STATES.DONE.name, 1),), 0) active_solutions = cls.state.in_(Solution.STATES.active_solutions()) response = cls.filter( cls.exercise == exercise, active_solutions, ).select( fn.Count(cls.id).alias('submitted'), fn.Sum(one_if_is_checked).alias('checked'), ).dicts().get() return int(response['checked'] * 100 / response['submitted'])
class Vote(BaseModel): user = IntegerField(index=True) nominee = ForeignKeyField(Nominee, related_name='votes') preliminary = BooleanField(index=True)
def __new__(self, name, bases, attrs): # Because the nested VersionModel shares this metaclass, we need to # test for it and act like :class:`peewee.BaseModel` if (attrs.pop('_RECURSION_BREAK_TEST', None) or name == 'VersionedModel'): # We don't want versions for the mixin VersionModel = BaseModel.__new__(self, name, bases, attrs) # Because ``VersionModel`` inherits from the initial class # we need to mask the reference to itself that is inherited to avoid # infinite recursion and for detection setattr(VersionModel, self._version_model_attr_name, None) return VersionModel # Instantiate the fields we want to add # These fields will be added to the nested ``VersionModel`` _version_fields = {'_valid_from': DateTimeField(default=datetime.datetime.now, index=True), '_valid_until': DateTimeField(null=True, default=None,), '_deleted': BooleanField(default=False), '_original_record': None, # ForeignKeyField. Added later. '_original_record_id': None, # added later by peewee '_version_id': IntegerField(default=1), '_id': PrimaryKeyField(primary_key=True)} # Make an explicit primary key # Create the class, create the nested ``VersionModel``, link them together. for field in attrs.keys(): if field in _version_fields: raise ValueError('You can not declare the attribute {}. ' 'It is automatically created by VersionedModel'.format(field)) # Create the top level ``VersionedModel`` class new_class = super(MetaModel, self).__new__(self, name, bases, attrs) # Mung up the attributes for our ``VersionModel`` version_model_attrs = _version_fields.copy() version_model_attrs['__qualname__'] = name + self._version_model_name_suffix # Add ForeignKeyField linking to the original record version_model_attrs['_original_record'] = ForeignKeyField( new_class, related_name=self._version_model_related_name, null=True, on_delete="SET NULL" ) # Mask all ``peewee.RelationDescriptor`` fields to avoid related name conflicts for field, value in vars(new_class).items(): if isinstance(value, RelationDescriptor): version_model_attrs[field] = None # needed to avoid infinite recursion version_model_attrs['_RECURSION_BREAK_TEST'] = self._RECURSION_BREAK_TEST # Create the nested ``VersionedModel`` class that inherits from the top level new_class VersionModel = type(name + self._version_model_name_suffix, # Name (new_class,), # bases version_model_attrs) # attributes # Modify the nested ``VersionedModel`` setattr(VersionModel, '_version_fields', _version_fields) # Modify the newly created class before returning setattr(new_class, self._version_model_attr_name, VersionModel) setattr(new_class, '_version_model_attr_name', self._version_model_attr_name) return new_class
class GridLayer(BaseModel): size = FloatField(default=50) layer = ForeignKeyField(Layer, on_delete="CASCADE")
class UserRoles(BaseModel): # Because peewee does not come with built-in many-to-many # relationships, we need this intermediary class to link # user to roles. user = ForeignKeyField(User, backref='roles') role = ForeignKeyField(Role, backref='users')
class PlayerRoom(BaseModel): player = ForeignKeyField(User, backref="rooms_joined", on_delete="CASCADE") room = ForeignKeyField(Room, backref="players", on_delete="CASCADE") def __repr__(self): return f"<PlayerRoom {self.room.get_path()} - {self.player.name}>"
class Message(BaseModel): id = BigIntegerField(primary_key=True) channel_id = BigIntegerField() guild_id = BigIntegerField(null=True) author = ForeignKeyField(User) content = TextField() timestamp = DateTimeField() edited_timestamp = DateTimeField(null=True, default=None) deleted = BooleanField(default=False) num_edits = BigIntegerField(default=0) command = TextField(null=True) mentions = ArrayField(BigIntegerField, default=[], null=True) emojis = ArrayField(BigIntegerField, default=[], null=True) attachments = ArrayField(TextField, default=[], null=True) embeds = BinaryJSONField(default=[], null=True) SQL = ''' CREATE INDEX\ IF NOT EXISTS messages_content_fts ON messages USING gin(to_tsvector('english', content)); CREATE INDEX\ IF NOT EXISTS messages_mentions ON messages USING gin (mentions); ''' class Meta: db_table = 'messages' indexes = ( # These indexes are mostly just general use (('channel_id', ), False), (('guild_id', ), False), (('deleted', ), False), # Timestamp is regularly sorted on (('timestamp', ), False), # Some queries want to get history in a guild or channel (('author', 'guild_id', 'channel_id'), False), ) @classmethod def from_disco_message_update(cls, obj): if not obj.edited_timestamp: return to_update = { 'edited_timestamp': obj.edited_timestamp, 'num_edits': cls.num_edits + 1, 'mentions': list(obj.mentions.keys()), } if obj.content is not UNSET: to_update['content'] = obj.with_proper_mentions to_update['emojis'] = list(map(int, EMOJI_RE.findall(obj.content))) if obj.attachments is not UNSET: to_update['attachments'] = [ i.url for i in obj.attachments.values() ] if obj.embeds is not UNSET: to_update['embeds'] = [ json.dumps(i.to_dict(), default=default_json) for i in obj.embeds ] cls.update(**to_update).where(cls.id == obj.id).execute() @classmethod def from_disco_message(cls, obj): _, created = cls.get_or_create( id=obj.id, defaults=dict( channel_id=obj.channel_id, guild_id=(obj.guild and obj.guild.id), author=User.from_disco_user(obj.author), content=obj.with_proper_mentions, timestamp=obj.timestamp, edited_timestamp=obj.edited_timestamp, num_edits=(0 if not obj.edited_timestamp else 1), mentions=list(obj.mentions.keys()), emojis=list(map(int, EMOJI_RE.findall(obj.content))), attachments=[i.url for i in obj.attachments.values()], embeds=[ json.dumps(i.to_dict(), default=default_json) for i in obj.embeds ])) for user in obj.mentions.values(): User.from_disco_user(user) return created @classmethod def from_disco_message_many(cls, messages, safe=False): q = cls.insert_many(map(cls.convert_message, messages)).returning(cls.id) if safe: q = q.on_conflict('DO NOTHING') return q.execute() @staticmethod def convert_message(obj): return { 'id': obj.id, 'channel_id': obj.channel_id, 'guild_id': (obj.guild and obj.guild.id), 'author': User.from_disco_user(obj.author), 'content': obj.with_proper_mentions, 'timestamp': obj.timestamp, 'edited_timestamp': obj.edited_timestamp, 'num_edits': (0 if not obj.edited_timestamp else 1), 'mentions': list(obj.mentions.keys()), 'emojis': list(map(int, EMOJI_RE.findall(obj.content))), 'attachments': [i.url for i in obj.attachments.values()], 'embeds': [ json.dumps(i.to_dict(), default=default_json) for i in obj.embeds ], } @classmethod def for_channel(cls, channel): return cls.select().where(cls.channel_id == channel.id)
class DailyAttendance(Attendance): class Meta: primary_key = CompositeKey('date', 'user_id') break_count = IntegerField(null=False) working_time_seconds = IntegerField(null=False) user = ForeignKeyField(User, null=False, related_name='daily_attendances', on_delete='CASCADE') create_view_statement_mysql = """\ create or replace view dailyattendance as select min(started_at) as started_at, max(finished_at) as finished_at, count(*) - 1 as break_count, sum(unix_timestamp(finished_at) - unix_timestamp(started_at)) as working_time_seconds, min(created_at) as created_at, user_id from attendance where started_at is not null and finished_at is not null group by date(started_at), user_id order by started_at""" create_view_statement_sqlite = """\ create view if not exists dailyattendance as select min(started_at) as started_at, max(finished_at) as finished_at, count(*) - 1 as break_count, sum(cast(strftime('%s', finished_at) as integer) - cast(strftime('%s', started_at) as integer)) as working_time_seconds, min(created_at) as created_at, user_id from attendance where started_at is not null and finished_at is not null group by date(started_at), user_id order by started_at""" @classmethod def create_view(cls): if isinstance(cls._meta.database, MySQLDatabase): statement = cls.create_view_statement_mysql elif isinstance(cls._meta.database, SqliteDatabase): statement = cls.create_view_statement_sqlite else: raise NotImplementedError( 'An SQL statement for the current database {} is not implemented.' .format(cls._meta.database)) cls._meta.database.execute_sql(statement) @property def working_time(self): return timedelta(seconds=self.working_time_seconds)