class Note(BaseModel): model = pv.ForeignKeyField(Model, backref='notes') data = sqlite_ext.JSONField() # format = dict() constraint = ConstraintField(unique=True) # format = list() _tags = pv.ManyToManyField(Tag, backref='notes') created = pv.DateTimeField(constraints=[pv.SQL('DEFAULT CURRENT_TIMESTAMP')]) modified = pv.DateTimeField(constraints=[pv.SQL('DEFAULT CURRENT_TIMESTAMP')]) info = sqlite_ext.JSONField(default=dict) def to_dict(self): return super(Note, self).to_dict(manytomany=False, backrefs=False, exclude=['_tags'], extra_attrs=['tags']) @property def tags(self): return [t.name for t in self._tags] def mark(self, tag='marked'): Tag.get_or_create(name=tag)[0].notes.add(self) add_tag = mark def unmark(self, tag='marked'): Tag.get_or_create(name=tag)[0].notes.remove(self) remove_tag = unmark
class Meta: indexes = [ pv.SQL("CREATE INDEX idx_cards_odid on cards (odid)"), pv.SQL("CREATE INDEX ix_cards_usn on cards (usn)"), pv.SQL("CREATE INDEX ix_cards_nid on cards (nid)"), pv.SQL("CREATE INDEX ix_cards_sched on cards (did, queue, due)"), ]
def detailed_profit(days: int) -> list: sell_query = (Transactions.select( Transactions.type_id, peewee.fn.sum(Transactions.quantity).alias("number_sold"), peewee.fn.sum(Transactions.quantity * Transactions.unit_price).alias("product"), peewee.fn.AvgSalePrice( Transactions.unit_price, Transactions.quantity).alias("avg_sell")).where( (Transactions.date > datetime.now() - timedelta(days)) & ~Transactions.is_buy).group_by(Transactions.type_id).having( peewee.SQL("product") > float( configuration.get("profit_sum_limit"))).order_by( Transactions.type_id)) sq = sell_query.alias("sq1") profit_query = ( Transactions.select( Transactions.type_id, sq.c.avg_sell, sq.c.number_sold, Names.name, peewee.fn.sum(Transactions.quantity * Transactions.unit_price).alias("product"), peewee.fn.AvgBuyPrice( Transactions.unit_price, Transactions.quantity).alias("avg_buy")).join( sq, on=(Transactions.type_id == sq.c.type_id)).join( Names, on=(sq.c.type_id == Names.id)). where((Transactions.date > datetime.now() - timedelta(days)) & Transactions.is_buy).group_by(Transactions.type_id).having( peewee.SQL("product") > float( configuration.get("profit_sum_limit"))).order_by( Transactions.type_id)) return profit_query
def set_default(db, migrator, table_name, column_name, field): default = field.default if callable(default): default = default() param = pw.Param(field.db_value(default)) op = pw.Clause(pw.SQL('ALTER TABLE'), pw.Entity(table_name), pw.SQL('ALTER COLUMN'), pw.Entity(column_name), pw.SQL('SET DEFAULT'), param) return normalize_whatever_junk_peewee_migrations_gives_you(migrator, op)
class Meta: indexes = [ pv.SQL( "CREATE INDEX idx_templates_name_ntid on templates (name, ntid)" ), pv.SQL("CREATE INDEX idx_templates_usn on templates (usn)"), ] primary_key = pv.CompositeKey("ntid", "ord") without_rowid = True
def rename_column(db, migrator, ntn, ocn, ncn, field): compiler = db.compiler() if is_mysql(db): junk = pw.Clause( pw.SQL('ALTER TABLE'), pw.Entity(ntn), pw.SQL('CHANGE'), pw.Entity(ocn), compiler.field_definition(field) ) else: junk = migrator.rename_column(ntn, ocn, ncn, generate=True) return normalize_whatever_junk_peewee_migrations_gives_you(migrator, junk)
def change_column_type(db, migrator, table_name, column_name, field): column_type = _field_type(field) if is_postgres(db): op = pw.Clause(pw.SQL('ALTER TABLE'), pw.Entity(table_name), pw.SQL('ALTER'), field.as_entity(), pw.SQL('TYPE'), field.__ddl_column__(column_type)) elif is_mysql(db): op = pw.Clause(*[pw.SQL('ALTER TABLE'), pw.Entity(table_name), pw.SQL('MODIFY')] + field.__ddl__(column_type)) else: raise Exception('how do i change a column type for %s?' % db) return normalize_whatever_junk_peewee_migrations_gives_you(migrator, op)
def recent(cls, user_id): return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \ join(Event, on=(Dashboard.id == peewee.SQL("t2.object_id::integer"))). \ where(Event.action << ('edit', 'view')).\ where(Event.user == user_id). \ where(~(Event.object_id >> None)). \ where(Event.object_type == 'dashboard'). \ group_by(Event.object_id, Dashboard.id). \ order_by(peewee.SQL("count(0) desc"))
def recent(cls, user_id): return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\ join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\ where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\ where(Event.user == user_id).\ where(~(Event.object_id >> None)).\ where(Event.object_type == 'query'). \ where(cls.is_archived == False).\ group_by(Event.object_id, Query.id).\ order_by(peewee.SQL("count(0) desc"))
def add_not_null(db, migrator, table, field, column_name): qc = db.compiler() if is_postgres(db) or is_sqlite(db): junk = migrator.add_not_null(table, column_name, generate=True) return normalize_whatever_junk_peewee_migrations_gives_you( db, migrator, junk) elif is_mysql(db): op = pw.Clause(pw.SQL('ALTER TABLE'), pw.Entity(table), pw.SQL('MODIFY'), qc.field_definition(field)) return [qc.parse_node(op)] raise Exception('how do i add a not null for %s?' % db)
def outdated_queries(cls): # TODO: this will only find scheduled queries that were executed before. I think this is # a reasonable assumption, but worth revisiting. outdated_queries_ids = cls.select( peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \ .join(QueryResult) \ .where(cls.ttl > 0, (QueryResult.retrieved_at + (cls.ttl * peewee.SQL("interval '1 second'"))) < peewee.SQL("(now() at time zone 'utc')")) queries = cls.select(cls, DataSource).join(DataSource) \ .where(cls.id << outdated_queries_ids) return queries
class HouseNumber(VersionedModelEndpoint): endpoint = '/housenumber' model = models.HouseNumber filters = ['number', 'ordinal', 'parent', 'postcode', 'ancestors', 'group'] order_by = [ peewee.SQL('number ASC NULLS FIRST'), peewee.SQL('ordinal ASC NULLS FIRST') ] def filter_group(self, qs): values = request.args.getlist('group') if values: field = getattr(self.model, 'parent') try: values = list(map(field.coerce, values)) except ValueError: abort(400, error='Invalid value for filter {}'.format('group')) except peewee.DoesNotExist: # Return an empty collection as the fk is not found. return None qs = qs.where(field << values) return qs def filter_ancestors(self, qs): # ancestors is a m2m so we cannot use the basic filtering # from self.filters. ancestors = request.args.getlist('ancestors') values = list(map(self.model.ancestors.coerce, ancestors)) if values: m2m = self.model.ancestors.get_through_model() qs = (qs.join(m2m, on=(m2m.housenumber == self.model.pk))) # We evaluate the qs ourselves here, because it's a CompoundSelect # that does not know about our SelectQuery custom methods (like # `serialize`), and CompoundSelect is hardcoded in peewee # SelectQuery, and we'd need to copy-paste code to be able to use # a custom CompoundQuery class instead. mask = self.get_collection_mask() qs = [h.serialize(mask) for h in qs.order_by(*self.order_by)] return qs def get_queryset(self): qs = super().get_queryset() bbox = get_bbox(request.args) if bbox: qs = (qs.join(models.Position).where( models.Position.center.in_bbox(**bbox)).group_by( models.HouseNumber.pk).order_by(models.HouseNumber.pk)) return qs
def _load_targets(self, RModel): """Load data from the intermediate table tp targetdb.target.""" log.debug('loading data into targetdb.target.') n_inserted = (tdb.Target.insert_from( cdb.Catalog.select( cdb.Catalog.catalogid, cdb.Catalog.ra, cdb.Catalog.dec, cdb.Catalog.pmra, cdb.Catalog.pmdec, cdb.Catalog.parallax, peewee.Value(EPOCH), ).join( RModel, on=(cdb.Catalog.catalogid == RModel.catalogid )).where(RModel.selected >> True).where(~peewee.fn.EXISTS( tdb.Target.select(peewee.SQL('1')).where( tdb.Target.catalogid == RModel.catalogid))), [ tdb.Target.catalogid, tdb.Target.ra, tdb.Target.dec, tdb.Target.pmra, tdb.Target.pmdec, tdb.Target.parallax, tdb.Target.epoch, ], ).returning().execute()) log.info(f'Inserted {n_inserted:,} new rows into targetdb.target.') return
class Discovery(BaseModel): """ A discovered device """ id = peewee.PrimaryKeyField(constraints=[peewee.SQL('AUTOINCREMENT')]) scan = peewee.ForeignKeyField(Scan, backref='discoveries') device = peewee.ForeignKeyField(Device, backref='discoveries') ip_address = peewee.TextField() hostname = peewee.TextField(null=True)
def build_query(self, version_id, query_region=None): sample = (Catalog.select( Catalog.catalogid, Catalog.ra, Catalog.dec, Gaia.phot_g_mean_mag).join(CatalogToTIC_v8).join(TIC_v8).join( Gaia).join(Gaia_Clean).where( (Gaia.phot_g_mean_mag > self.parameters['g_min']) & (Gaia.phot_g_mean_mag < self.parameters['g_max'])).where( Catalog.version_id == version_id, CatalogToTIC_v8.version_id == version_id)) if query_region: sample = (sample.where( peewee.fn.q3c_radial_query(Catalog.ra, Catalog.dec, query_region[0], query_region[1], query_region[2]))) sample = sample.cte('sample') # We should use q3c_join_pm and q3c_dist_pm here. subq = (Gaia.select(Gaia.source_id).where( peewee.fn.q3c_join(sample.c.ra, sample.c.dec, Gaia.ra, Gaia.dec, self.parameters['min_separation'])).order_by( peewee.fn.q3c_dist( sample.c.ra, sample.c.dec, Gaia.ra, Gaia.dec).asc()).limit(1).offset(1)) decollided = (sample.select(sample.c.catalogid).join( subq, peewee.JOIN.LEFT_LATERAL, on=peewee.SQL('true')).where( subq.c.source_id.is_null()).with_cte(sample)) return decollided
class Meta: indexes = ((('room_id', 'event_time', 'building'), True), ) constraints = [ peewee.SQL('FOREIGN KEY(room_id, building)' 'REFERENCES room(room_num, building)' 'ON DELETE CASCADE ON UPDATE CASCADE') ]
class BBS(Base): name = peewee.TextField(unique=True) address = peewee.TextField() port = peewee.IntegerField() method = peewee.TextField() created_date = peewee.DateTimeField( constraints=[peewee.SQL('DEFAULT CURRENT_TIMESTAMP')])
def recent(cls, user_id=None, limit=20): query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \ join(Event, on=(Dashboard.id == peewee.SQL("t2.object_id::integer"))). \ where(Event.action << ('edit', 'view')).\ where(~(Event.object_id >> None)). \ where(Event.object_type == 'dashboard'). \ where(Dashboard.is_archived == False). \ group_by(Event.object_id, Dashboard.id). \ order_by(peewee.SQL("count(0) desc")) if user_id: query = query.where(Event.user == user_id) query = query.limit(limit) return query
def add_constraint(self, value): """ Add a constraint to the model. :param name: String value of constraint. :return: None """ self.model._meta.constraints.append(peewee.SQL(value))
def test_constraint(): tc = TableCreator('awesome') tc.column('char', 'fname') const = peewee.SQL('fname not null') tc.add_constraint(const) assert tc.model._meta.constraints == [const]
def recent(cls, user_id=None, limit=20): # TODO: instead of t2 here, we should define table_alias for Query table query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\ join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\ where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\ where(~(Event.object_id >> None)).\ where(Event.object_type == 'query'). \ where(cls.is_archived == False).\ group_by(Event.object_id, Query.id).\ order_by(peewee.SQL("count(0) desc")) if user_id: query = query.where(Event.user == user_id) query = query.limit(limit) return query
def add_not_null(db, migrator, table, column_name, field): cmds = [] compiler = db.compiler() if field.default is not None: # if default is a function, turn it into a value # this won't work on columns requiring uniquiness, like UUIDs # as all columns will share the same called value default = field.default() if hasattr(field.default, '__call__') else field.default op = pw.Clause(pw.SQL('UPDATE'), pw.Entity(table), pw.SQL('SET'), field.as_entity(), pw.SQL('='), default, pw.SQL('WHERE'), field.as_entity(), pw.SQL('IS NULL')) cmds.append(compiler.parse_node(op)) if is_postgres(db) or is_sqlite(db): junk = migrator.add_not_null(table, column_name, generate=True) cmds += normalize_whatever_junk_peewee_migrations_gives_you( migrator, junk) return cmds elif is_mysql(db): op = pw.Clause(pw.SQL('ALTER TABLE'), pw.Entity(table), pw.SQL('MODIFY'), compiler.field_definition(field)) cmds.append(compiler.parse_node(op)) return cmds raise Exception('how do i add a not null for %s?' % db)
def set_default(db, migrator, table_name, column_name, field): default = field.default if callable(default): default = default() migration = ( migrator.make_context() .literal('UPDATE ').sql(pw.Entity(table_name)) .literal(' SET ').sql(pw.Expression(pw.Entity(column_name), pw.OP.EQ, field.db_value(default), flat=True)) .literal(' WHERE ').sql(pw.Expression(pw.Entity(column_name), pw.OP.IS, pw.SQL('NULL'), flat=True)) ) return extract_query_from_migration(migration)
class DataCode(pw.Model): id = pw.IntegerField(primary_key=True, constraints=[pw.SQL("AUTOINCREMENT")]) label = pw.CharField() code = pw.CharField() class Meta: database = db table_name = "data_code"
def paginate_query(self, query, count, offset=None, sort=None): """ Apply pagination to query :attr query: Instance of `peewee.Query` :attr count: Max rows to return :attr offset: Pagination offset, str/int :attr sort: List of tuples, e.g. [('id', 'asc')] :returns: Instance of `peewee.Query` """ assert isinstance(query, peewee.Query) assert isinstance(count, int) assert isinstance(offset, (str, int, type(None))) assert isinstance(sort, (list, set, tuple, type(None))) # ensure our model has a primary key fields = query.model._meta.get_primary_keys() if len(fields) == 0: raise peewee.ProgrammingError( 'Cannot apply pagination on model without primary key') # ensure our model doesn't use a compound primary key if len(fields) > 1: raise peewee.ProgrammingError( 'Cannot apply pagination on model with compound primary key') # apply offset if offset is not None: query = query.where(fields[0] >= offset) # do we need to apply sorting? order_bys = [] if sort: for field, direction in sort: # does this field have a valid sort direction? if not isinstance(direction, str): raise ValueError( "Invalid sort direction on field '{}'".format(field)) direction = direction.lower().strip() if direction not in ['asc', 'desc']: raise ValueError( "Invalid sort direction on field '{}'".format(field)) # apply sorting order_by = peewee.SQL(field) order_by = getattr(order_by, direction)() order_bys += [order_by] # add primary key ordering after user sorting order_bys += [fields[0].asc()] # apply ordering and limits query = query.order_by(*order_bys) query = query.limit(count) return query
class Comment(BaseModel): id = pw.AutoField(primary_key=True) content = pw.TextField(null=False) image = pw.CharField(null=True) creation_date = pw.DateField(constraints=[pw.SQL('DEFAULT CURRENT_TIMESTAMP')]) answer = pw.BooleanField(null=False, default=False) ticket_id = pw.ForeignKeyField(Ticket, backref="comment_to_ticket") author_id = pw.ForeignKeyField(Client, backref="comment_from_author") task_id = pw.ForeignKeyField(Task, backref="comment_to_task")
class Meta: indexes = (( ( 'record', 'page', ), True, ), ) constraints = (peewee.SQL('UNIQUE (record, page)'), )
def recent(cls, groups, user_id=None, limit=20): query = cls.select(Query, User).where(Event.created_at > peewee.SQL("current_date - 7")).\ join(Event, on=(Query.id == Event.object_id.cast('integer'))). \ join(DataSourceGroup, on=(Query.data_source==DataSourceGroup.data_source)). \ switch(Query).join(User).\ where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\ where(~(Event.object_id >> None)).\ where(Event.object_type == 'query'). \ where(DataSourceGroup.group << groups).\ where(cls.is_archived == False).\ group_by(Event.object_id, Query.id, User.id).\ order_by(peewee.SQL("count(0) desc")) if user_id: query = query.where(Event.user == user_id) query = query.limit(limit) return query
class Ticket(BaseModel): id = pw.AutoField(primary_key=True) name = pw.CharField(null=False) description = pw.TextField(null=False) closed = pw.BooleanField(null=False, default=False) creation_date = pw.DateField( null=False, constraints=[pw.SQL('DEFAULT CURRENT_TIMESTAMP')]) image = pw.CharField(null=True) author_id = pw.ForeignKeyField(Client, backref="ticket_by") product_id = pw.ForeignKeyField(Product, backref="ticket_for_product")
class Task(BaseModel): id = pw.AutoField(primary_key=True) title = pw.CharField(null=False) description = pw.TextField(null=True) completion_date = pw.CharField(null=True) creation_date = pw.DateField( constraints=[pw.SQL('DEFAULT CURRENT_TIMESTAMP')]) state_id = pw.ForeignKeyField(Task_State, default=DEFAULT_STATE) worker_id = pw.ForeignKeyField(Client, backref="worked_task") creator_id = pw.ForeignKeyField(Client, backref="created_task")