class User(AscensionModel): chat_id = peewee.IntegerField(primary_key=True) referral = peewee.ForeignKeyField('self', backref='partners', null=True, on_delete='SET NULL') deposit = peewee.FloatField(default=0) balance = peewee.FloatField(default=0) sum_deposit_reward = peewee.FloatField(default=0) first_level_partners_deposit = peewee.FloatField(default=0) second_level_partners_deposit = peewee.FloatField(default=0) third_level_partners_deposit = peewee.FloatField(default=0) wallet = peewee.CharField(max_length=40, null=True, unique=True) username = peewee.CharField(max_length=40, null=True) first_name = peewee.CharField(max_length=40) last_name = peewee.CharField(max_length=40, null=True) created_at = DateTimeField(default=datetime.datetime.now()) def __str__(self): text = f'{self.first_name}' if self.last_name: text = f'{text} {self.last_name}' if self.username: text = f'@{self.username}' return text @hybrid_property def deposit_reward(self): gold_tariff_comp = self.deposit >= tariffs.tariff_deposit( tariffs.GOLD_TARIFF_INDEX) silver_tariff_comp = self.deposit >= tariffs.tariff_deposit( tariffs.SILVER_TARIFF_INDEX) bronze_tariff_comp = self.deposit >= tariffs.tariff_deposit( tariffs.BRONZE_TARIFF_INDEX) if gold_tariff_comp: return tariffs.tariff_reward(tariffs.GOLD_TARIFF_INDEX) elif silver_tariff_comp: return tariffs.tariff_reward(tariffs.SILVER_TARIFF_INDEX) elif bronze_tariff_comp: return tariffs.tariff_reward(tariffs.BRONZE_TARIFF_INDEX) else: return tariffs.tariff_reward(tariffs.NO_TARIFF_INDEX) @hybrid_property def first_level_deposit(self): return 0 @first_level_deposit.expression def first_level_deposit(cls): return User.select(fn.SUM(User.deposit)).where(User.referral == cls) @deposit_reward.expression def deposit_reward(cls): return Case( None, expression_tuples=[ (cls.deposit.__ge__( tariffs.tariff_deposit(tariffs.GOLD_TARIFF_INDEX)), tariffs.tariff_reward(tariffs.GOLD_TARIFF_INDEX)), (cls.deposit.between( tariffs.tariff_deposit(tariffs.SILVER_TARIFF_INDEX), tariffs.tariff_deposit(tariffs.GOLD_TARIFF_INDEX)), tariffs.tariff_reward(tariffs.SILVER_TARIFF_INDEX)), (cls.deposit.between( tariffs.tariff_deposit(tariffs.BRONZE_TARIFF_INDEX), tariffs.tariff_deposit(tariffs.SILVER_TARIFF_INDEX)), tariffs.tariff_reward(tariffs.BRONZE_TARIFF_INDEX)), ], default=tariffs.tariff_reward(tariffs.NO_TARIFF_INDEX)) @property def partners_per_levels(self): partners_list = [] first_level_query = User.select().where(User.referral == self) first_level_partners = first_level_query.execute() partners_list.append(first_level_partners) first_level_ids = [] for partner in first_level_partners: first_level_ids.append(partner.chat_id) second_level_query = ( User.select().where(User.referral << first_level_ids)) second_level_partners = second_level_query.execute() partners_list.append(second_level_partners) second_level_ids = [] for partner in second_level_partners: second_level_ids.append(partner.chat_id) third_level_query = ( User.select().where(User.referral << second_level_ids)) third_level_partners = third_level_query.execute() partners_list.append(third_level_partners) return partners_list
class QueryResult(BaseModel): id = peewee.PrimaryKeyField() data_source = peewee.ForeignKeyField(DataSource) query_hash = peewee.CharField(max_length=32, index=True) query = peewee.TextField() data = peewee.TextField() runtime = peewee.FloatField() retrieved_at = DateTimeTZField() class Meta: db_table = 'query_results' def to_dict(self): return { 'id': self.id, 'query_hash': self.query_hash, 'query': self.query, 'data': json.loads(self.data), 'data_source_id': self._data.get('data_source', None), 'runtime': self.runtime, 'retrieved_at': self.retrieved_at } @classmethod def unused(cls): week_ago = datetime.datetime.now() - datetime.timedelta(days=7) unused_results = cls.select().where(Query.id == None, cls.retrieved_at < week_ago)\ .join(Query, join_type=peewee.JOIN_LEFT_OUTER) return unused_results @classmethod def get_latest(cls, data_source, query, max_age=0): query_hash = utils.gen_query_hash(query) if max_age == -1: query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source).order_by(cls.retrieved_at.desc()) else: query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source, peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'", max_age)).order_by(cls.retrieved_at.desc()) return query.first() @classmethod def store_result(cls, data_source_id, query_hash, query, data, run_time, retrieved_at): query_result = cls.create(query_hash=query_hash, query=query, runtime=run_time, data_source=data_source_id, retrieved_at=retrieved_at, data=data) logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id) sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id" query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))] # TODO: when peewee with update & returning support is released, we can get back to using this code: # updated_count = Query.update(latest_query_data=query_result).\ # where(Query.query_hash==query_hash, Query.data_source==data_source_id).\ # execute() logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash) return query_result, query_ids def __unicode__(self): return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
class hardwareTable(MySQLModel): class Meta: db_table = 'hardwareTable' id = pw.IntegerField() value = pw.FloatField()
class NullModel(TestModel): char_field = peewee.CharField(null=True) text_field = peewee.TextField(null=True) datetime_field = peewee.DateTimeField(null=True) int_field = peewee.IntegerField(null=True) float_field = peewee.FloatField(null=True)
class StationInfo(StationCommon, BaseModel): """ Holds "permanent" station information """ moment = peewee.IntegerField() state = peewee.CharField() # TODO: "Operative"/"Work in progress"/.../? name = peewee.CharField() stype = peewee.BooleanField() code = peewee.IntegerField() due_date = peewee.IntegerField(null=True) gps_latitude = peewee.FloatField() gps_longitude = peewee.FloatField() class Meta: primary_key = peewee.CompositeKey('moment', 'code') def __repr__(self): return "{0}({1}, {2}, {3}, {4}, {5}, {6}, {7}, {8})".format( __class__.__name__, self.moment, self.state, self.name, self.stype, self.code, self.due_date, self.gps_latitude, self.gps_longitude) def has_changed(self, other): """ Compare everything except moment and code """ return (self.state != other.state or self.name != other.name or self.stype != other.stype or self.due_date != other.due_date or self.gps_latitude != other.gps_latitude or self.gps_longitude != other.gps_longitude) @classmethod def from_dict(cls, moment, data): """ Builds an object from a dictionary : { 'state': 'Operative', 'name': 'Assas - Luxembourg', 'type': 'yes', 'code': '6008', 'dueDate': 1514761200.0, 'gps': { 'longitude': 2.333428381875887, 'latitude': 48.84373446877937 } } """ try: return cls( moment=moment, state=data['state'], name=data['name'], stype=VelibMetropoleApi.bool_from_yes_no_str(data['type']), code=int(data['code']), gps_latitude=float(data['gps']['latitude']), gps_longitude=float(data['gps']['longitude']), # FIX: due_date is None seen on 2018-01-07 10:09 # { # 'name': 'Saint-Fargeau - Mortier', # 'code': '20117', # 'type': 'yes', # 'dueDate': None, # 'gps': { # 'latitude': 48.872747269036246, # 'longitude': 2.408203454302088 # }, # 'state': 'Operative' # } due_date=int(data['dueDate']) if data['dueDate'] is not None else None) except (TypeError, KeyError, ValueError, arrow.parser.ParserError) as exception: logging.warning("Input station information: %s", data) raise ApiParsingException("Cannot build station information: ({0}) {1}".format(type(exception).__name__, exception))
def _load_carton_to_target(self, RModel): """Populate targetdb.carton_to_target.""" log.debug('Loading data into targetdb.carton_to_target.') version_pk = tdb.Version.get( plan=self.plan, tag=self.tag, target_selection=True, ) carton_pk = tdb.Carton.get(carton=self.name, version_pk=version_pk).pk Target = tdb.Target CartonToTarget = tdb.CartonToTarget select_from = (RModel.select(Target.pk, carton_pk).join( Target, on=(Target.catalogid == RModel.catalogid) ).where(RModel.selected >> True).where(~peewee.fn.EXISTS( CartonToTarget.select(peewee.SQL('1')).join(tdb.Carton).where( CartonToTarget.target_pk == Target.pk, CartonToTarget.carton_pk == carton_pk, tdb.Carton.version_pk == version_pk, )))) if self.cadence is not None: # Check that not both the carton cadence and the cadence column # are not null. if 'cadence' in RModel._meta.fields: if RModel.select().where(~(RModel.cadence >> None)).exists(): raise TargetSelectionError( 'both carton cadence and target ' 'cadence defined. This is not ' 'allowed.') cadence_pk = tdb.Cadence.get(label=self.cadence) select_from = select_from.select_extend(cadence_pk) if not self.value: # improve robustness of cadence name pattern matching slightly: try: cadence_payload = [ s for s in self.cadence.split("_") if 'x' in s ][0].split('x') except BaseException: raise ("Uninterpretable cadence name: ", self.cadence) self.value = float( numpy.multiply( # *map(int, self.cadence.split('_')[-1].split('x')) *map(int, cadence_payload))) else: # If all cadences are null we'll set that as a value and save us # a costly join. if not RModel.select().where(~(RModel.cadence >> None)).exists(): select_from = select_from.select_extend(peewee.SQL('null')) else: select_from = (select_from.select_extend( tdb.Cadence.pk).switch(RModel).join( tdb.Cadence, 'LEFT OUTER JOIN', on=(tdb.Cadence.label == RModel.cadence), )) if self.priority is None: select_from = select_from.select_extend(RModel.priority) else: select_from = select_from.select_extend(self.priority) if self.value is not None: select_from = select_from.select_extend(self.value) else: # We will use the cadence to determine the value. First, if there is # not a user-defined value column, create it. if 'value' not in RModel._meta.columns: self.database.execute_sql(f'ALTER TABLE {self.path} ' 'ADD COLUMN value REAL;') # We need to add the field like this and not call get_model() because # at this point the temporary table is locked and reflection won't work. RModel._meta.add_field('value', peewee.FloatField()) # Get the value as the n_epochs * n_exposures_per_epoch. Probably this can # be done directly in SQL but it's just easier in Python. Note that because # we set value above in the case when cadence is a single value, if we # are here that means there is a cadence column. data = numpy.array( RModel.select(RModel.catalogid, RModel.cadence).where( RModel.cadence.is_null(False)).tuples()) if data.size > 0: values = tuple( int( numpy.multiply( # *map(int, cadence.split('_')[-1].split('x')))) # improve robustness of cadence name pattern matching slightly: *map(int, [s for s in cadence.split("_") if 'x' in s][0].split('x')))) for cadence in data[:, 1]) catalogid_values = zip(map(int, data[:, 0]), values) vl = peewee.ValuesList(catalogid_values, columns=('catalogid', 'value'), alias='vl') (RModel.update(value=vl.c.value).from_(vl).where( RModel.catalogid == vl.c.catalogid).where( RModel.value.is_null())).execute() select_from = select_from.select_extend(RModel.value) if 'instrument' in RModel._meta.columns: select_from = (select_from.select_extend( tdb.Instrument.pk).switch(RModel).join( tdb.Instrument, 'LEFT OUTER JOIN', on=(tdb.Instrument.label == RModel.instrument))) elif self.instrument is not None: select_from = select_from.select_extend( tdb.Instrument.get(label=self.instrument).pk) else: raise RuntimeError( f'Instrument not defined for carton {self.name}') for colname in ['delta_ra', 'delta_dec', 'inertial']: if colname in RModel._meta.columns: select_from = select_from.select_extend( RModel._meta.columns[colname]) else: if colname == 'inertial': select_from = select_from.select_extend( peewee.Value(False)) else: select_from = select_from.select_extend(peewee.Value(0.0)) if 'lambda_eff' in RModel._meta.columns: select_from = select_from.select_extend( RModel._meta.columns['lambda_eff']) else: if self.instrument is not None: instrument = self.instrument else: instrument = RModel.instrument select_from = select_from.select_extend( tdb.Instrument.select(tdb.Instrument.default_lambda_eff).where( tdb.Instrument.label == instrument)) # Now do the insert n_inserted = (CartonToTarget.insert_from( select_from, [ CartonToTarget.target_pk, CartonToTarget.carton_pk, CartonToTarget.cadence_pk, CartonToTarget.priority, CartonToTarget.value, CartonToTarget.instrument_pk, CartonToTarget.delta_ra, CartonToTarget.delta_dec, CartonToTarget.inertial, CartonToTarget.lambda_eff ], ).returning().execute()) log.info( f'Inserted {n_inserted:,} rows into targetdb.carton_to_target.')
class Authors(BaseModel): id = pw.PrimaryKeyField() name = pw.TextField() pagerank = pw.FloatField()
class SsNodeInfoLog(AsyncBaseModel): node = peewee.ForeignKeyField(SS_Node, related_name='ss_node_info_logs') uptime = peewee.FloatField() load = peewee.CharField(max_length=64) log_time = peewee.IntegerField()
class PartyResult(BaseModel): party = peewee.CharField() result = peewee.FloatField() vote = peewee.ForeignKeyField(Vote)
class Product(BaseModel): product_name = pw.CharField(unique=True, null=False) stock = pw.FloatField(null=False)
class RegistroMensal(BaseModel): id = peewee.PrimaryKeyField() data = peewee.DateField() total = peewee.FloatField() numero_pessoas = peewee.IntegerField()
class Record(BaseModel): user = pw.ForeignKeyField(User, backref="records") weight = pw.FloatField(null=False) height = pw.FloatField(null=False) bmi = pw.FloatField(null=False)
class User(peewee.Model): class Meta: database = portal_db # ident-related fields hw_addr = peewee.CharField(primary_key=True) ip_addr = peewee.IPField() # metadata platform = peewee.CharField(null=True) system = peewee.CharField(null=True) system_version = peewee.FloatField(null=True) browser = peewee.CharField(null=True) browser_version = peewee.FloatField(null=True) language = peewee.CharField(null=True) # registration-related fields last_seen_on = peewee.DateTimeField(default=datetime.datetime.now) registered_on = peewee.DateTimeField(null=True) @property def is_registered(self): """ """ if not self.registered_on: return False now = datetime.datetime.now() return ( now > self.registered_on and (now - self.registered_on).total_seconds() < REGISTRATION_TIMEOUT) @property def is_being_registered(self): """ has started but not completed registration process """ if not self.registered_on: return False now = datetime.datetime.now() return self.registered_on > now @property def is_active(self): return is_active(self.ip_addr) @property def is_apple(self): return self.platform in ("macos", "iphone", "ipad") @property def is_recent_android(self): if self.system == "Android" and self.system_version >= 7: return True if self.platform == "linux" and self.browser == "chrome": return True return False def register(self, delay=0): self.registered_on = datetime.datetime.now() + datetime.timedelta( seconds=delay) self.save() @classmethod def create_or_update(cls, hw_addr, ip_addr, extras): now = datetime.datetime.now() data = {"ip_addr": ip_addr, "last_seen_on": now} user, created = cls.get_or_create(hw_addr=hw_addr, defaults=data) extras.update(data) for key, value in extras.items(): if hasattr(user, key): setattr(user, key, value) user.save() return user
class Machine(db.Model): name = pw.CharField(max_length=60, null=False) wattage = pw.FloatField(null=True) type = pw.ForeignKeyField(Type, related_name='machines')
class Markets(CustomModel): exchange = peewee.CharField(null=True) symbol = peewee.CharField(null=True) tag = peewee.CharField(null=True, default="all") favorite = peewee.BooleanField(null=True, default=False) # limit_min_price = peewee.FloatField(null=True) limit_max_price = peewee.FloatField(null=True) limit_min_amount = peewee.FloatField(null=True) limit_max_amount = peewee.FloatField(null=True) limit_min_cost = peewee.FloatField(null=True) precision_price = peewee.SmallIntegerField(null=True) margin = peewee.BooleanField(null=True, default=False) actived = peewee.BooleanField(null=True) # prices ask_price = peewee.FloatField(null=True) bid_price = peewee.FloatField(null=True) last_price = peewee.FloatField(null=True) class Meta: db_table = 'markets' def toggle_fav(self): self.favorite = not self.favorite self.save() @classmethod def get_all_by_exchange(cls, exchange): return cls.select().where(cls.exchange == exchange) @classmethod def get_symbol_by_exchange(cls, symbol, exchange): return cls.get(cls.exchange == exchange, cls.symbol == symbol) @classmethod def check_symbol_is_fav(cls, symbol, exchange): item = cls.get_symbol_by_exchange(symbol, exchange) return item.favorite @classmethod def check_symbol_is_margin(cls, symbol, exchange): item = cls.get_symbol_by_exchange(symbol, exchange) return item.margin def update_data(self, data): self.limit_min_price = data['limits']['price']['min'] self.limit_max_price = data['limits']['price']['max'] self.limit_min_amount = data['limits']['amount']['min'] self.limit_max_amount = data['limits']['amount']['max'] self.precision_price = data['precision']['price'] if data.get("margin"): self.margin = data.get("margin") if data['info'].get("margin"): self.margin = data['info'].get("margin") # pprint(data) self.actived = data['active'] def update_prices(self, prices): self.ask_price = prices['ask'] self.bid_price = prices['bid'] self.last_price = prices['last']
class Video(BaseModel): _id = pw.IntegerField(unique=True) cid = pw.IntegerField() title = pw.TextField() thumbnail = pw.TextField() updated = pw.FloatField(default=time.time)
def add_optical_magnitudes(self): """Adds ``gri`` magnitude columns.""" Model = self.RModel magnitudes = ['g', 'r', 'i', 'z'] # Check if ALL the columns have already been created in the query. # If so, just return. if any([mag in Model._meta.columns for mag in magnitudes]): if not all([mag in Model._meta.columns for mag in magnitudes]): raise TargetSelectionError( 'Some optical magnitudes are defined in the query ' 'but not all of them.') if 'optical_prov' not in Model._meta.columns: raise TargetSelectionError( 'optical_prov column does not exist.') warnings.warn( 'All optical magnitude columns are defined in the query.', TargetSelectionUserWarning) return # First create the columns. Also create z to speed things up. We won't # use transformations for z but we can use the initial query to populate # it and avoid doing the same query later when loading the magnitudes. for mag in magnitudes: self.database.execute_sql( f'ALTER TABLE {self.path} ADD COLUMN {mag} REAL;') Model._meta.add_field(mag, peewee.FloatField()) self.database.execute_sql( f'ALTER TABLE {self.path} ADD COLUMN optical_prov TEXT;') Model._meta.add_field('optical_prov', peewee.TextField()) # Step 1: join with sdss_dr13_photoobj and use SDSS magnitudes. with self.database.atomic(): self.database.execute_sql('DROP TABLE IF EXISTS ' + self.table_name + '_sdss') temp_table = peewee.Table(self.table_name + '_sdss') (Model.select( Model.catalogid, cdb.SDSS_DR13_PhotoObj.psfmag_g, cdb.SDSS_DR13_PhotoObj.psfmag_r, cdb.SDSS_DR13_PhotoObj.psfmag_i, cdb.SDSS_DR13_PhotoObj.psfmag_z).join( cdb.CatalogToSDSS_DR13_PhotoObj_Primary, on=(cdb.CatalogToSDSS_DR13_PhotoObj_Primary.catalogid == Model.catalogid)).join( cdb.SDSS_DR13_PhotoObj, on=(cdb.CatalogToSDSS_DR13_PhotoObj_Primary. target_id == cdb.SDSS_DR13_PhotoObj.objid)). where( cdb.CatalogToSDSS_DR13_PhotoObj_Primary.best >> True, cdb.CatalogToSDSS_DR13_PhotoObj_Primary.version_id == self. get_version_id()).where(Model.selected >> True).create_table( temp_table._path[0], temporary=True)) nrows = (Model.update({ Model.g: temp_table.c.psfmag_g, Model.r: temp_table.c.psfmag_r, Model.i: temp_table.c.psfmag_i, Model.optical_prov: peewee.Value('sdss_psfmag') }).from_(temp_table).where( Model.catalogid == temp_table.c.catalogid).where( temp_table.c.psfmag_g.is_null(False) & temp_table.c.psfmag_r.is_null(False) & temp_table.c.psfmag_i.is_null(False))).execute() self.log.debug(f'{nrows:,} associated with SDSS magnitudes.') # Step 2: localise entries with empty magnitudes and use PanSTARRS1 # transformations. # PS1 fluxes are in Janskys. We use stacked fluxes instead of mean # magnitudes since they are more complete on the faint end. ps1_g = 8.9 - 2.5 * peewee.fn.log(cdb.Panstarrs1.g_stk_psf_flux) ps1_r = 8.9 - 2.5 * peewee.fn.log(cdb.Panstarrs1.r_stk_psf_flux) ps1_i = 8.9 - 2.5 * peewee.fn.log(cdb.Panstarrs1.i_stk_psf_flux) # Use transformations to SDSS from Tonry et al. 2012 (section 3.2, table 6). x = ps1_g - ps1_r ps1_sdss_g = 0.013 + 0.145 * x + 0.019 * x * x + ps1_g ps1_sdss_r = -0.001 + 0.004 * x + 0.007 * x * x + ps1_r ps1_sdss_i = -0.005 + 0.011 * x + 0.010 * x * x + ps1_i with self.database.atomic(): self.database.execute_sql('DROP TABLE IF EXISTS ' + self.table_name + '_ps1') temp_table = peewee.Table(self.table_name + '_ps1') (Model.select( Model.catalogid, ps1_sdss_g.alias('ps1_sdss_g'), ps1_sdss_r.alias('ps1_sdss_r'), ps1_sdss_i.alias('ps1_sdss_i')).join( cdb.CatalogToPanstarrs1, on=(cdb.CatalogToPanstarrs1.catalogid == Model.catalogid)). join(cdb.Panstarrs1, on=(cdb.CatalogToPanstarrs1.target_id == cdb.Panstarrs1.catid_objid)).where(Model.g.is_null() | Model.r.is_null() | Model.i.is_null()). where(Model.selected >> True).where( cdb.CatalogToPanstarrs1.best >> True, cdb.CatalogToPanstarrs1.version_id == self.get_version_id() ).where( cdb.Panstarrs1.g_stk_psf_flux.is_null(False) & (cdb.Panstarrs1.g_stk_psf_flux > 0)).where( cdb.Panstarrs1.r_stk_psf_flux.is_null(False) & (cdb.Panstarrs1.r_stk_psf_flux > 0)).where( cdb.Panstarrs1.i_stk_psf_flux.is_null(False) & (cdb.Panstarrs1.i_stk_psf_flux > 0)).create_table( temp_table._path[0], temporary=True)) nrows = (Model.update({ Model.g: temp_table.c.ps1_sdss_g, Model.r: temp_table.c.ps1_sdss_r, Model.i: temp_table.c.ps1_sdss_i, Model.optical_prov: peewee.Value('sdss_psfmag_ps1') }).from_(temp_table).where( Model.catalogid == temp_table.c.catalogid).where( temp_table.c.ps1_sdss_g.is_null(False) & temp_table.c.ps1_sdss_r.is_null(False) & temp_table.c.ps1_sdss_i.is_null(False))).execute() self.log.debug(f'{nrows:,} associated with PS1 magnitudes.') # Step 3: localise entries with empty magnitudes and use Gaia transformations # from Evans et al (2018). gaia_G = cdb.Gaia_DR2.phot_g_mean_mag gaia_BP = cdb.Gaia_DR2.phot_bp_mean_mag gaia_RP = cdb.Gaia_DR2.phot_rp_mean_mag x = gaia_BP - gaia_RP x2 = x * x x3 = x * x * x gaia_sdss_g = -1 * (0.13518 - 0.46245 * x - 0.25171 * x2 + 0.021349 * x3) + gaia_G gaia_sdss_r = -1 * (-0.12879 + 0.24662 * x - 0.027464 * x2 - 0.049465 * x3) + gaia_G gaia_sdss_i = -1 * (-0.29676 + 0.64728 * x - 0.10141 * x2) + gaia_G with self.database.atomic(): self.database.execute_sql('DROP TABLE IF EXISTS ' + self.table_name + '_gaia') temp_table = peewee.Table(self.table_name + '_gaia') (Model.select( Model.catalogid, gaia_sdss_g.alias('gaia_sdss_g'), gaia_sdss_r.alias('gaia_sdss_r'), gaia_sdss_i.alias('gaia_sdss_i')).join( cdb.CatalogToTIC_v8, on=(cdb.CatalogToTIC_v8.catalogid == Model.catalogid )).join(cdb.TIC_v8).join(cdb.Gaia_DR2). where(Model.g.is_null() | Model.r.is_null() | Model.i.is_null()).where(Model.selected >> True).where( cdb.CatalogToTIC_v8.best >> True, cdb.CatalogToTIC_v8.version_id == self.get_version_id() ).where( cdb.Gaia_DR2.phot_g_mean_mag.is_null(False)).where( cdb.Gaia_DR2.phot_bp_mean_mag.is_null(False)).where( cdb.Gaia_DR2.phot_rp_mean_mag.is_null( False)).create_table(temp_table._path[0], temporary=True)) nrows = (Model.update({ Model.g: temp_table.c.gaia_sdss_g, Model.r: temp_table.c.gaia_sdss_r, Model.i: temp_table.c.gaia_sdss_i, Model.optical_prov: peewee.Value('sdss_psfmag_gaia') }).from_(temp_table).where( Model.catalogid == temp_table.c.catalogid).where( temp_table.c.gaia_sdss_g.is_null(False) & temp_table.c.gaia_sdss_r.is_null(False) & temp_table.c.gaia_sdss_i.is_null(False))).execute() self.log.debug(f'{nrows:,} associated with Gaia magnitudes.') # Finally, check if there are any rows in which at least some of the # magnitudes are null. n_empty = (Model.select().where(Model.g.is_null() | Model.r.is_null() | Model.i.is_null()).where( Model.selected >> True).count()) if n_empty > 0: warnings.warn(f'Found {n_empty} entries with empty magnitudes.', TargetSelectionUserWarning)
class Digiccy(peewee.Model): coin_type = SymbolField() purchased_at = DateTimeField() price = peewee.FloatField() amount = peewee.FloatField()
def init_db(con): db.initialize(con) db.connect() db.create_tables([ RepoModel, RepoPassword, BackupProfileModel, SourceFileModel, SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion ]) if BackupProfileModel.select().count() == 0: default_profile = BackupProfileModel(name='Default') default_profile.save() # Create missing settings and update labels. Leave setting values untouched. for setting in get_misc_settings(): s, created = SettingsModel.get_or_create(key=setting['key'], defaults=setting) if created and setting['key'] == "use_dark_theme": s.value = bool(uses_dark_mode()) if created and setting['key'] == "use_light_icon": s.value = bool(uses_dark_mode()) s.label = setting['label'] s.save() # Delete old log entries after 3 months. three_months_ago = datetime.now() - timedelta(days=180) EventLogModel.delete().where(EventLogModel.start_time < three_months_ago) # Migrations # See http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#schema-migrations current_schema, created = SchemaVersion.get_or_create( id=1, defaults={'version': SCHEMA_VERSION}) current_schema.save() if created or current_schema.version == SCHEMA_VERSION: pass else: migrator = SqliteMigrator(con) if current_schema.version < 4: # version 3 to 4 _apply_schema_update( current_schema, 4, migrator.add_column(ArchiveModel._meta.table_name, 'duration', pw.FloatField(null=True)), migrator.add_column(ArchiveModel._meta.table_name, 'size', pw.IntegerField(null=True))) if current_schema.version < 5: _apply_schema_update( current_schema, 5, migrator.drop_not_null(WifiSettingModel._meta.table_name, 'last_connected'), ) if current_schema.version < 6: _apply_schema_update( current_schema, 6, migrator.add_column(EventLogModel._meta.table_name, 'repo_url', pw.CharField(null=True))) if current_schema.version < 7: _apply_schema_update( current_schema, 7, migrator.rename_column(SourceFileModel._meta.table_name, 'config_id', 'profile_id'), migrator.drop_column(EventLogModel._meta.table_name, 'profile_id'), migrator.add_column(EventLogModel._meta.table_name, 'profile', pw.CharField(null=True))) if current_schema.version < 8: _apply_schema_update( current_schema, 8, migrator.add_column(BackupProfileModel._meta.table_name, 'prune_keep_within', pw.CharField(null=True))) if current_schema.version < 9: _apply_schema_update( current_schema, 9, migrator.add_column( BackupProfileModel._meta.table_name, 'new_archive_name', pw.CharField( default="{hostname}-{profile_slug}-{now:%Y-%m-%dT%H:%M:%S}" )), migrator.add_column( BackupProfileModel._meta.table_name, 'prune_prefix', pw.CharField(default="{hostname}-{profile_slug}-")), ) if current_schema.version < 10: _apply_schema_update( current_schema, 10, migrator.add_column(BackupProfileModel._meta.table_name, 'pre_backup_cmd', pw.CharField(default='')), migrator.add_column(BackupProfileModel._meta.table_name, 'post_backup_cmd', pw.CharField(default='')), ) if current_schema.version < 11: _apply_schema_update(current_schema, 11) for profile in BackupProfileModel: if profile.compression == 'zstd': profile.compression = 'zstd,3' if profile.compression == 'lzma,6': profile.compression = 'auto,lzma,6' profile.save()
class Discharge(BaseModel): shift = peewee.ForeignKeyField(Shift, related_name='discharges') amount = peewee.FloatField() time = peewee.FloatField() reason = peewee.TextField()
class Reading(BaseModel): """A reading of <temperature> from sensor <sensor>.""" timestamp = peewee.DateTimeField(default=datetime.datetime.now) sensor = peewee.FixedCharField(max_length=15) temperature = peewee.FloatField()
class SensorDangerMessage(pw.Model): plant = pw.ForeignKeyField(db_column='plant_id', rel_model=migrator.orm['plant'], to_field='id') sensor = pw.ForeignKeyField(db_column='sensor_id', rel_model=migrator.orm['sensor'], to_field='id') level = pw.ForeignKeyField(db_column='level_id', rel_model=migrator.orm['sensorsatisfactionlevel'], to_field='id') message = pw.TextField() value = pw.FloatField()
def init_db(con): db.initialize(con) db.connect() db.create_tables([ RepoModel, RepoPassword, BackupProfileModel, SourceFileModel, SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion ]) if BackupProfileModel.select().count() == 0: default_profile = BackupProfileModel(name='Default') default_profile.save() # Create missing settings and update labels. Leave setting values untouched. for setting in get_misc_settings(): s, created = SettingsModel.get_or_create(key=setting['key'], defaults=setting) if created and setting['key'] == "use_dark_theme": # Check if macOS with enabled dark mode s.value = bool(uses_dark_mode()) if created and setting['key'] == "use_light_icon": # Check if macOS with enabled dark mode or Linux with GNOME DE s.value = bool(uses_dark_mode()) or 'GNOME' in os.environ.get( 'XDG_CURRENT_DESKTOP', '') s.label = setting['label'] s.save() # Delete old log entries after 3 months. three_months_ago = datetime.now() - timedelta(days=180) EventLogModel.delete().where(EventLogModel.start_time < three_months_ago) # Migrations # See http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#schema-migrations current_schema, created = SchemaVersion.get_or_create( id=1, defaults={'version': SCHEMA_VERSION}) current_schema.save() if created or current_schema.version == SCHEMA_VERSION: pass else: migrator = SqliteMigrator(con) if current_schema.version < 4: # version 3 to 4 _apply_schema_update( current_schema, 4, migrator.add_column(ArchiveModel._meta.table_name, 'duration', pw.FloatField(null=True)), migrator.add_column(ArchiveModel._meta.table_name, 'size', pw.IntegerField(null=True))) if current_schema.version < 5: _apply_schema_update( current_schema, 5, migrator.drop_not_null(WifiSettingModel._meta.table_name, 'last_connected'), ) if current_schema.version < 6: _apply_schema_update( current_schema, 6, migrator.add_column(EventLogModel._meta.table_name, 'repo_url', pw.CharField(null=True))) if current_schema.version < 7: _apply_schema_update( current_schema, 7, migrator.rename_column(SourceFileModel._meta.table_name, 'config_id', 'profile_id'), migrator.drop_column(EventLogModel._meta.table_name, 'profile_id'), migrator.add_column(EventLogModel._meta.table_name, 'profile', pw.CharField(null=True))) if current_schema.version < 8: _apply_schema_update( current_schema, 8, migrator.add_column(BackupProfileModel._meta.table_name, 'prune_keep_within', pw.CharField(null=True))) if current_schema.version < 9: _apply_schema_update( current_schema, 9, migrator.add_column( BackupProfileModel._meta.table_name, 'new_archive_name', pw.CharField( default="{hostname}-{profile_slug}-{now:%Y-%m-%dT%H:%M:%S}" )), migrator.add_column( BackupProfileModel._meta.table_name, 'prune_prefix', pw.CharField(default="{hostname}-{profile_slug}-")), ) if current_schema.version < 10: _apply_schema_update( current_schema, 10, migrator.add_column(BackupProfileModel._meta.table_name, 'pre_backup_cmd', pw.CharField(default='')), migrator.add_column(BackupProfileModel._meta.table_name, 'post_backup_cmd', pw.CharField(default='')), ) if current_schema.version < 11: _apply_schema_update(current_schema, 11) for profile in BackupProfileModel: if profile.compression == 'zstd': profile.compression = 'zstd,3' if profile.compression == 'lzma,6': profile.compression = 'auto,lzma,6' profile.save() if current_schema.version < 12: _apply_schema_update( current_schema, 12, migrator.add_column(RepoModel._meta.table_name, 'extra_borg_arguments', pw.CharField(default=''))) if current_schema.version < 13: """ Migrate ArchiveModel data to new table to remove unique constraint from snapshot_id column. """ tables = db.get_tables() if ArchiveModel.select().count() == 0 and 'snapshotmodel' in tables: cursor = db.execute_sql('select * from snapshotmodel;') fields = [ ArchiveModel.id, ArchiveModel.snapshot_id, ArchiveModel.name, ArchiveModel.repo, ArchiveModel.time, ArchiveModel.duration, ArchiveModel.size ] data = [row for row in cursor.fetchall()] with db.atomic(): ArchiveModel.insert_many(data, fields=fields).execute() _apply_schema_update(current_schema, 13)
class SomeModel(pw.Model): some_field = pw.FloatField(default=8) class Meta: database = self.db
class Part(database.Part): input_data = peewee.CharField(null=True) difficulty = peewee.FloatField(null=True)
class ModelB(peewee.Model): model_a = peewee.ForeignKeyField(ModelA) title = peewee.CharField(max_length=5) speed = peewee.IntegerField() @pytest.mark.parametrize('field,value,expected,expected_type', [ (peewee.IntegerField(), 5, 5, int), (peewee.BigIntegerField(), 5, 5, int), (peewee.SmallIntegerField(), 5, 5, int), (peewee.AutoField(), 5, 5, int), (peewee.BigAutoField(), 5, 5, int), (peewee.IdentityField(), 5, 5, int), (peewee.FloatField(), 5.5, 5.5, float), (peewee.DoubleField(), 5.5, 5.5, float), (peewee.DecimalField(), Decimal('13.37'), '13.37', str), (peewee.CharField(), 'foo', 'foo', str), (peewee.FixedCharField(), 'foo', 'foo', str), (peewee.TextField(), 'foo', 'foo', str), (peewee.BooleanField(), True, True, bool), (peewee.DateTimeField(), datetime(2018, 11, 1), '2018-11-01T00:00:00', str), (postgres_ext.HStoreField(), { 'foo': 'bar' }, { 'foo': 'bar' }, dict), ]) def test_field_is_correctly_serialized(field, value, expected, expected_type):
class LogoConfidenceThreshold(BaseModel): type = peewee.CharField(null=True, index=True) value = peewee.CharField(null=True, index=True) threshold = peewee.FloatField(null=False)
class Parameter(BaseModel): machine = peewee.ForeignKeyField(Machine, null=False, backref='parameters') piece = peewee.ForeignKeyField(CastingCode, null=False, backref='parameters') parameter_1 = peewee.FloatField(null=True) parameter_2 = peewee.FloatField(null=True) parameter_3 = peewee.FloatField(null=True) parameter_4 = peewee.FloatField(null=True) parameter_5 = peewee.FloatField(null=True) parameter_6 = peewee.FloatField(null=True) parameter_7 = peewee.FloatField(null=True) parameter_8 = peewee.FloatField(null=True) parameter_9 = peewee.FloatField(null=True) parameter_10 = peewee.FloatField(null=True) parameter_11 = peewee.FloatField(null=True) parameter_12 = peewee.FloatField(null=True) parameter_13 = peewee.FloatField(null=True) parameter_14 = peewee.FloatField(null=True) parameter_15 = peewee.FloatField(null=True) parameter_16 = peewee.FloatField(null=True) parameter_17 = peewee.FloatField(null=True) parameter_18 = peewee.FloatField(null=True) parameter_19 = peewee.FloatField(null=True) parameter_20 = peewee.FloatField(null=True) date_added = peewee.DateTimeField(default=datetime.datetime.now)
class Price(peewee.Model): timestamp = peewee.DateTimeField(primary_key=True) BTCUSD = peewee.FloatField() class Meta: database = db
def migrate(migrator, *_, **__): migrator.add_fields('performance', min_accepted_step=pw.FloatField(default=300.0))