class Game(Model): uuid = CharField() black = ForeignKeyField(User) white = ForeignKeyField(User) size = IntegerField() rule_set_name = CharField() komi = FloatField() class Meta: database = db_proxy def save(self, **kwargs): if self.uuid is None: self.uuid = str(uuid.uuid4()) super(Game, self).save(**kwargs) GameUser(game=self, user=self.black).save() GameUser(game=self, user=self.white).save() @property def players(self) -> Dict[Color, User]: return {Color.black: self.black, Color.white: self.white} @players.setter def players(self, value: Dict[Color, User]) -> None: self.black = value[Color.black] self.white = value[Color.white] @property def rule_set(self) -> RuleSet: return get_rule_set_by_name(self.rule_set_name) @rule_set.setter def rule_set(self, value: RuleSet) -> None: self.rule_set_name = value.name self.komi = value.komi.w_score @property def users(self) -> List[User]: return list( GameUser.select(GameUser.user).where(GameUser.game == self)) # This should probably not be used, but it's here as an example # @users.setter # def users(self, value: List[User]) -> None: # GameUser.insert_many([{'game': self, 'user': user} for user in value]).on_conflict_ignore() @classmethod def new_game(cls, size, rule_set, players, timer, uuid=None): state = State.new_game(size, rule_set) return Game(players=players, timer=timer, state=state, rule_set=rule_set, size=size, uuid=uuid) def is_legal(self, point): assert isinstance(point, Point) move = Move.play(point) return self.state.is_valid_move(move) def make_move(self, point): return self._make_move(Move.play(point)) def _make_move(self, move: Move): move.save() GameMove(move=move, game=self).save() self.state = self.state.apply_move(move) if self.state.is_over(): return move, self.state.get_game_result() return move, None def pass_turn(self): return self._make_move(Move.pass_turn()) def resign(self): return self._make_move(Move.resign()) def to_sgf(self): return SGF(self) @property def sgf_str(self): return str(self.to_sgf()) @staticmethod def _from_sgf(sgf): assert isinstance(sgf, SGF) uuid = sgf.header.get('UD', None) size = sgf.header['SZ'] rules = get_rule_set_by_name(sgf.header['RU']) players = { Color.black: User.get_or_create(display_name=sgf.header['BP'], defaults={'token': '1'})[0], Color.white: User.get_or_create(display_name=sgf.header['WP'], defaults={'token': '2'})[0] } game = Game.new_game(int(size), rules, players, Timer(), uuid=uuid) for move in sgf.moves: game.make_move(Move.from_sgf(move[game.state.next_color.sgf_str])) return game @staticmethod def from_sgf(sgf_string): sgf = SGF.from_string(sgf_string) return Game._from_sgf(sgf) @property def score(self): return self.state.score @property def grid(self): return self.state.board.grid def get_black_white_points(self): # return {Point(1, 1)}, {Point(2, 2)} self.state.change_dead_stone_marking(None) return self.state.territory.black_territory, self.state.territory.white_territory def mark_dead_stone(self, point): # actually changes a group of stones from dead to alive and vice versa. return self.state.change_dead_stone_marking(point)
class Admixture(BaseModel): batch = CharField() Samples = CharField() GBR = FloatField() FIN = FloatField() CHS = FloatField() PUR = FloatField() CDX = FloatField() CLM = FloatField() IBS = FloatField() PEL = FloatField() PJL = FloatField() KHV = FloatField() ACB = FloatField() GWD = FloatField() ESN = FloatField() BEB = FloatField() MSL = FloatField() STU = FloatField() ITU = FloatField() CEU = FloatField() YRI = FloatField() CHB = FloatField() JPT = FloatField() LWK = FloatField() ASW = FloatField() MXL = FloatField() TSI = FloatField() GIH = FloatField() class Meta: table_name = 'Admixture_stat'
class User(BaseModel): id = UUIDField(primary_key=True, unique=True) traffic_quota = FloatField() traffic = FloatField(constraints=[Check('traffic <= traffic_quota')])
class DbTickData(ModelBase): """ Tick data for database storage. Index is defined unique with (datetime, symbol) """ id = AutoField() symbol: str = CharField() exchange: str = CharField() datetime: datetime = DateTimeField() name: str = CharField() volume: float = FloatField() open_interest: float = FloatField() last_price: float = FloatField() last_volume: float = FloatField() limit_up: float = FloatField() limit_down: float = FloatField() open_price: float = FloatField() high_price: float = FloatField() low_price: float = FloatField() pre_close: float = FloatField() bid_price_1: float = FloatField() bid_price_2: float = FloatField(null=True) bid_price_3: float = FloatField(null=True) bid_price_4: float = FloatField(null=True) bid_price_5: float = FloatField(null=True) ask_price_1: float = FloatField() ask_price_2: float = FloatField(null=True) ask_price_3: float = FloatField(null=True) ask_price_4: float = FloatField(null=True) ask_price_5: float = FloatField(null=True) bid_volume_1: float = FloatField() bid_volume_2: float = FloatField(null=True) bid_volume_3: float = FloatField(null=True) bid_volume_4: float = FloatField(null=True) bid_volume_5: float = FloatField(null=True) ask_volume_1: float = FloatField() ask_volume_2: float = FloatField(null=True) ask_volume_3: float = FloatField(null=True) ask_volume_4: float = FloatField(null=True) ask_volume_5: float = FloatField(null=True) class Meta: database = db indexes = ((("symbol", "exchange", "datetime"), True),) @staticmethod def from_tick(tick: TickData): """ Generate DbTickData object from TickData. """ # Change datetime to database timezone, then # remove tzinfo since not supported by SQLite. dt = tick.datetime.astimezone(DB_TZ) dt = dt.replace(tzinfo=None) db_tick = DbTickData() db_tick.symbol = tick.symbol db_tick.exchange = tick.exchange.value db_tick.datetime = dt db_tick.name = tick.name db_tick.volume = tick.volume db_tick.open_interest = tick.open_interest db_tick.last_price = tick.last_price db_tick.last_volume = tick.last_volume db_tick.limit_up = tick.limit_up db_tick.limit_down = tick.limit_down db_tick.open_price = tick.open_price db_tick.high_price = tick.high_price db_tick.low_price = tick.low_price db_tick.pre_close = tick.pre_close db_tick.bid_price_1 = tick.bid_price_1 db_tick.ask_price_1 = tick.ask_price_1 db_tick.bid_volume_1 = tick.bid_volume_1 db_tick.ask_volume_1 = tick.ask_volume_1 if tick.bid_price_2: db_tick.bid_price_2 = tick.bid_price_2 db_tick.bid_price_3 = tick.bid_price_3 db_tick.bid_price_4 = tick.bid_price_4 db_tick.bid_price_5 = tick.bid_price_5 db_tick.ask_price_2 = tick.ask_price_2 db_tick.ask_price_3 = tick.ask_price_3 db_tick.ask_price_4 = tick.ask_price_4 db_tick.ask_price_5 = tick.ask_price_5 db_tick.bid_volume_2 = tick.bid_volume_2 db_tick.bid_volume_3 = tick.bid_volume_3 db_tick.bid_volume_4 = tick.bid_volume_4 db_tick.bid_volume_5 = tick.bid_volume_5 db_tick.ask_volume_2 = tick.ask_volume_2 db_tick.ask_volume_3 = tick.ask_volume_3 db_tick.ask_volume_4 = tick.ask_volume_4 db_tick.ask_volume_5 = tick.ask_volume_5 return db_tick def to_tick(self): """ Generate TickData object from DbTickData. """ tick = TickData( symbol=self.symbol, exchange=Exchange(self.exchange), datetime=self.datetime.replace(tzinfo=DB_TZ), name=self.name, volume=self.volume, open_interest=self.open_interest, last_price=self.last_price, last_volume=self.last_volume, limit_up=self.limit_up, limit_down=self.limit_down, open_price=self.open_price, high_price=self.high_price, low_price=self.low_price, pre_close=self.pre_close, bid_price_1=self.bid_price_1, ask_price_1=self.ask_price_1, bid_volume_1=self.bid_volume_1, ask_volume_1=self.ask_volume_1, gateway_name="DB", ) if self.bid_price_2: tick.bid_price_2 = self.bid_price_2 tick.bid_price_3 = self.bid_price_3 tick.bid_price_4 = self.bid_price_4 tick.bid_price_5 = self.bid_price_5 tick.ask_price_2 = self.ask_price_2 tick.ask_price_3 = self.ask_price_3 tick.ask_price_4 = self.ask_price_4 tick.ask_price_5 = self.ask_price_5 tick.bid_volume_2 = self.bid_volume_2 tick.bid_volume_3 = self.bid_volume_3 tick.bid_volume_4 = self.bid_volume_4 tick.bid_volume_5 = self.bid_volume_5 tick.ask_volume_2 = self.ask_volume_2 tick.ask_volume_3 = self.ask_volume_3 tick.ask_volume_4 = self.ask_volume_4 tick.ask_volume_5 = self.ask_volume_5 return tick @staticmethod def save_all(objs: List["DbTickData"]): dicts = [i.to_dict() for i in objs] with db.atomic(): if driver is Driver.POSTGRESQL: for tick in dicts: DbTickData.insert(tick).on_conflict( update=tick, conflict_target=( DbTickData.symbol, DbTickData.exchange, DbTickData.datetime, ), ).execute() else: for c in chunked(dicts, 50): DbTickData.insert_many(c).on_conflict_replace().execute()
class Journals(CherryPyAPI): """ Journal model and associated fields. Attributes: +-------------------+-------------------------------------+ | Name | Description | +===================+=====================================+ | name | name of the journal | +-------------------+-------------------------------------+ | author | impact factor of the journal | +-------------------+-------------------------------------+ | website_url | website for the journal (optional) | +-------------------+-------------------------------------+ | encoding | language encoding for the name | +-------------------+-------------------------------------+ """ name = CharField(default='') impact_factor = FloatField(default=-1.0) website_url = CharField(default='') encoding = CharField(default='UTF8') @staticmethod def elastic_mapping_builder(obj): """Build the elasticsearch mapping bits.""" super(Journals, Journals).elastic_mapping_builder(obj) obj['name'] = obj['website_url'] = obj['encoding'] = \ {'type': 'text', 'fields': {'keyword': {'type': 'keyword', 'ignore_above': 256}}} obj['impact_factor'] = {'type': 'float'} def to_hash(self, recursion_depth=1): """Convert the object to a hash.""" obj = super(Journals, self).to_hash(recursion_depth) obj['_id'] = int(self.id) obj['name'] = unicode_type(self.name) obj['impact_factor'] = float(self.impact_factor) obj['website_url'] = str(self.website_url) obj['encoding'] = str(self.encoding) return obj def from_hash(self, obj): """Convert the hash into the object.""" super(Journals, self).from_hash(obj) self._set_only_if('_id', obj, 'id', lambda: int(obj['_id'])) self._set_only_if('name', obj, 'name', lambda: unicode_type(obj['name'])) self._set_only_if('impact_factor', obj, 'impact_factor', lambda: float(obj['impact_factor'])) self._set_only_if('website_url', obj, 'website_url', lambda: str(obj['website_url'])) self._set_only_if('encoding', obj, 'encoding', lambda: str(obj['encoding'])) def where_clause(self, kwargs): """PeeWee specific where clause used for search.""" where_clause = super(Journals, self).where_clause(kwargs) if '_id' in kwargs: where_clause &= Expression(Journals.id, OP.EQ, kwargs['_id']) for key in ['name', 'impact_factor', 'website_url', 'encoding']: if key in kwargs: key_oper = OP.EQ if '{0}_operator'.format(key) in kwargs: key_oper = getattr( OP, kwargs['{0}_operator'.format(key)].upper()) where_clause &= Expression(getattr(Journals, key), key_oper, kwargs[key]) return where_clause
class Product(SyncModel): MOD_NAME = 'product' name = CharField(max_length=150) code = CharField(max_length=50) stock = FloatField(default=0.0) odoo_id = IntegerField(unique=True) uom_name = CharField(max_length=80) categ_id = ForeignKeyField(ProductCategory, on_delete='CASCADE') brand_id = ForeignKeyField(ProductBrand, on_delete='CASCADE', null=True) pvi_1 = FloatField(default=0.0) pvi_2 = FloatField(default=0.0) pvi_3 = FloatField(default=0.0) pvi_4 = FloatField(default=0.0) pvi_5 = FloatField(default=0.0) pvd_1 = FloatField(default=0.0) pvd_2 = FloatField(default=0.0) pvd_3 = FloatField(default=0.0) pvd_4 = FloatField(default=0.0) pvd_5 = FloatField(default=0.0) pvm_1 = FloatField(default=0.0) pvm_2 = FloatField(default=0.0) pvm_3 = FloatField(default=0.0) last_sixty_days_sales = FloatField(default=0.0) joking_index = FloatField(default=0.0) external_stock = FloatField(default=0.0) sale_ok = BooleanField() ean13 = CharField(max_length=13) description_sale = TextField() manufacturer_ref = CharField(max_length=100, null=True) type = CharField(max_length=10) is_pack = BooleanField() discontinued = BooleanField() state = CharField(max_length=50) sale_in_groups_of = FloatField(default=1.0) replacement_id = IntegerField() def __unicode__(self): return self.name
class TagTrend(BaseModel): name = CharField() start = DateTimeField() period = FloatField()
class Reading(BaseModel): id = PrimaryKeyField() sensor = ForeignKeyField(Sensor, on_delete='CASCADE') created = DateTimeField(default=datetime.now) reading = FloatField()
class Journal(Model): timestamp = DateTimeField(default=datetime.now) level = SmallIntegerField(index=True) text = CharField(max_length=255, index=True) col_float1 = FloatField(default=2.2) col_smallint1 = SmallIntegerField(default=2) col_int1 = IntegerField(default=2000000) col_bigint1 = BigIntegerField(default=99999999) col_char1 = CharField(max_length=255, default="value1") col_text1 = TextField( default="Moo,Foo,Baa,Waa,Moo,Foo,Baa,Waa,Moo,Foo,Baa,Waa") col_decimal1 = DecimalField(12, 8, default=Decimal("2.2")) col_json1 = JSONField(default={ "a": 1, "b": "b", "c": [2], "d": { "e": 3 }, "f": True }) col_float2 = FloatField(null=True) col_smallint2 = SmallIntegerField(null=True) col_int2 = IntegerField(null=True) col_bigint2 = BigIntegerField(null=True) col_char2 = CharField(max_length=255, null=True) col_text2 = TextField(null=True) col_decimal2 = DecimalField(12, 8, null=True) col_json2 = JSONField(null=True) col_float3 = FloatField(default=2.2) col_smallint3 = SmallIntegerField(default=2) col_int3 = IntegerField(default=2000000) col_bigint3 = BigIntegerField(default=99999999) col_char3 = CharField(max_length=255, default="value1") col_text3 = TextField( default="Moo,Foo,Baa,Waa,Moo,Foo,Baa,Waa,Moo,Foo,Baa,Waa") col_decimal3 = DecimalField(12, 8, default=Decimal("2.2")) col_json3 = JSONField(default={ "a": 1, "b": "b", "c": [2], "d": { "e": 3 }, "f": True }) col_float4 = FloatField(null=True) col_smallint4 = SmallIntegerField(null=True) col_int4 = IntegerField(null=True) col_bigint4 = BigIntegerField(null=True) col_char4 = CharField(max_length=255, null=True) col_text4 = TextField(null=True) col_decimal4 = DecimalField(12, 8, null=True) col_json4 = JSONField(null=True) class Meta: database = db
class DbBarData(Model): """ Candlestick bar data for database storage. Index is defined unique with vt_symbol, interval and datetime. """ symbol = CharField() exchange = CharField() datetime = DateTimeField() interval = CharField() volume = FloatField() open_price = FloatField() high_price = FloatField() low_price = FloatField() close_price = FloatField() vt_symbol = CharField() gateway_name = CharField() class Meta: database = DB indexes = ((("vt_symbol", "interval", "datetime"), True), ) @staticmethod def from_bar(bar: BarData): """ Generate DbBarData object from BarData. """ db_bar = DbBarData() db_bar.symbol = bar.symbol db_bar.exchange = bar.exchange.value db_bar.datetime = bar.datetime db_bar.interval = bar.interval.value db_bar.volume = bar.volume db_bar.open_price = bar.open_price db_bar.high_price = bar.high_price db_bar.low_price = bar.low_price db_bar.close_price = bar.close_price db_bar.vt_symbol = bar.vt_symbol db_bar.gateway_name = "DB" return db_bar def to_bar(self): """ Generate BarData object from DbBarData. """ bar = BarData( symbol=self.symbol, exchange=Exchange(self.exchange), datetime=self.datetime, interval=Interval(self.interval), volume=self.volume, open_price=self.open_price, high_price=self.high_price, low_price=self.low_price, close_price=self.close_price, gateway_name=self.gateway_name, ) return bar
class DbTickData(Model): """ Tick data for database storage. Index is defined unique with vt_symbol, interval and datetime. """ symbol = CharField() exchange = CharField() datetime = DateTimeField() name = CharField() volume = FloatField() last_price = FloatField() last_volume = FloatField() limit_up = FloatField() limit_down = FloatField() open_price = FloatField() high_price = FloatField() low_price = FloatField() close_price = FloatField() bid_price_1 = FloatField() bid_price_2 = FloatField() bid_price_3 = FloatField() bid_price_4 = FloatField() bid_price_5 = FloatField() ask_price_1 = FloatField() ask_price_2 = FloatField() ask_price_3 = FloatField() ask_price_4 = FloatField() ask_price_5 = FloatField() bid_volume_1 = FloatField() bid_volume_2 = FloatField() bid_volume_3 = FloatField() bid_volume_4 = FloatField() bid_volume_5 = FloatField() ask_volume_1 = FloatField() ask_volume_2 = FloatField() ask_volume_3 = FloatField() ask_volume_4 = FloatField() ask_volume_5 = FloatField() vt_symbol = CharField() gateway_name = CharField() class Meta: database = DB indexes = ((("vt_symbol", "datetime"), True), ) @staticmethod def from_tick(tick: TickData): """ Generate DbTickData object from TickData. """ db_tick = DbTickData() db_tick.symbol = tick.symbol db_tick.exchange = tick.exchange.value db_tick.datetime = tick.datetime db_tick.name = tick.name db_tick.volume = tick.volume db_tick.last_price = tick.last_price db_tick.last_volume = tick.last_volume db_tick.limit_up = tick.limit_up db_tick.limit_down = tick.limit_down db_tick.open_price = tick.open_price db_tick.high_price = tick.high_price db_tick.low_price = tick.low_price db_tick.pre_close = tick.pre_close db_tick.bid_price_1 = tick.bid_price_1 db_tick.ask_price_1 = tick.ask_price_1 db_tick.bid_volume_1 = tick.bid_volume_1 db_tick.ask_volume_1 = tick.ask_volume_1 if tick.bid_price_2: db_tick.bid_price_2 = tick.bid_price_2 db_tick.bid_price_3 = tick.bid_price_3 db_tick.bid_price_4 = tick.bid_price_4 db_tick.bid_price_5 = tick.bid_price_5 db_tick.ask_price_2 = tick.ask_price_2 db_tick.ask_price_3 = tick.ask_price_3 db_tick.ask_price_4 = tick.ask_price_4 db_tick.ask_price_5 = tick.ask_price_5 db_tick.bid_volume_2 = tick.bid_volume_2 db_tick.bid_volume_3 = tick.bid_volume_3 db_tick.bid_volume_4 = tick.bid_volume_4 db_tick.bid_volume_5 = tick.bid_volume_5 db_tick.ask_volume_2 = tick.ask_volume_2 db_tick.ask_volume_3 = tick.ask_volume_3 db_tick.ask_volume_4 = tick.ask_volume_4 db_tick.ask_volume_5 = tick.ask_volume_5 db_tick.vt_symbol = tick.vt_symbol db_tick.gateway_name = "DB" return tick def to_tick(self): """ Generate TickData object from DbTickData. """ tick = TickData( symbol=self.symbol, exchange=Exchange(self.exchange), datetime=self.datetime, name=self.name, volume=self.volume, last_price=self.last_price, last_volume=self.last_volume, limit_up=self.limit_up, limit_down=self.limit_down, open_price=self.open_price, high_price=self.high_price, low_price=self.low_price, pre_close=self.pre_close, bid_price_1=self.bid_price_1, ask_price_1=self.ask_price_1, bid_volume_1=self.bid_volume_1, ask_volume_1=self.ask_volume_1, gateway_name=self.gateway_name, ) if self.bid_price_2: tick.bid_price_2 = self.bid_price_2 tick.bid_price_3 = self.bid_price_3 tick.bid_price_4 = self.bid_price_4 tick.bid_price_5 = self.bid_price_5 tick.ask_price_2 = self.ask_price_2 tick.ask_price_3 = self.ask_price_3 tick.ask_price_4 = self.ask_price_4 tick.ask_price_5 = self.ask_price_5 tick.bid_volume_2 = self.bid_volume_2 tick.bid_volume_3 = self.bid_volume_3 tick.bid_volume_4 = self.bid_volume_4 tick.bid_volume_5 = self.bid_volume_5 tick.ask_volume_2 = self.ask_volume_2 tick.ask_volume_3 = self.ask_volume_3 tick.ask_volume_4 = self.ask_volume_4 tick.ask_volume_5 = self.ask_volume_5 return tick
class Thermostat(BaseModel): class ValveConfigs(object): CASCADE = 'cascade' EQUAL = 'equal' id = AutoField() number = IntegerField(unique=True) name = CharField(default='Thermostat') sensor = ForeignKeyField(Sensor, null=True, backref='thermostats', on_delete='SET NULL') pid_heating_p = FloatField(default=120) pid_heating_i = FloatField(default=0) pid_heating_d = FloatField(default=0) pid_cooling_p = FloatField(default=120) pid_cooling_i = FloatField(default=0) pid_cooling_d = FloatField(default=0) automatic = BooleanField(default=True) room = ForeignKeyField(Room, null=True, on_delete='SET NULL', backref='thermostats') start = IntegerField() valve_config = CharField(default=ValveConfigs.CASCADE) # Options: 'cascade' or 'equal' thermostat_group = ForeignKeyField(ThermostatGroup, backref='thermostats', on_delete='CASCADE') def get_preset(self, preset_type): # type: (str) -> Preset if preset_type not in Preset.ALL_TYPES: raise ValueError('Preset type `{0}` unknown'.format(preset_type)) preset = Preset.get_or_none((Preset.type == preset_type) & (Preset.thermostat_id == self.id)) if preset is None: preset = Preset(thermostat=self, type=preset_type) preset.save() return preset @property def setpoint(self): return self.active_preset.heating_setpoint if self.mode == ThermostatGroup.Modes.HEATING else self.active_preset.cooling_setpoint @property def active_preset(self): preset = Preset.get_or_none(thermostat=self.id, active=True) if preset is None: preset = self.get_preset(Preset.Types.SCHEDULE) preset.active = True preset.save() return preset @active_preset.setter def active_preset(self, value): if value is None or value.thermostat_id != self.id: raise ValueError('The given Preset does not belong to this Thermostat') if value != self.active_preset: if self.active_preset is not None: current_active_preset = self.active_preset current_active_preset.active = False current_active_preset.save() value.active = True value.save() @property def valves(self): # type: () -> List[Valve] return [valve for valve in Valve.select(Valve) .join(ValveToThermostat) .where(ValveToThermostat.thermostat_id == self.id) .order_by(ValveToThermostat.priority)] @property def active_valves(self): # type: () -> List[Valve] return self._valves(mode=self.thermostat_group.mode) @property def heating_valves(self): # type: () -> List[Valve] return self._valves(mode=ThermostatGroup.Modes.HEATING) @property def cooling_valves(self): # type: () -> List[Valve] return self._valves(mode=ThermostatGroup.Modes.COOLING) def _valves(self, mode): # type: (str) -> List[Valve] return [valve for valve in Valve.select(Valve, ValveToThermostat.mode, ValveToThermostat.priority) .join(ValveToThermostat) .where((ValveToThermostat.thermostat_id == self.id) & (ValveToThermostat.mode == mode)) .order_by(ValveToThermostat.priority)] def heating_schedules(self): # type: () -> List[DaySchedule] return [schedule for schedule in DaySchedule.select() .where((DaySchedule.thermostat == self.id) & (DaySchedule.mode == ThermostatGroup.Modes.HEATING)) .order_by(DaySchedule.index)] def cooling_schedules(self): # type: () -> List[DaySchedule] return [x for x in DaySchedule.select() .where((DaySchedule.thermostat == self.id) & (DaySchedule.mode == ThermostatGroup.Modes.COOLING)) .order_by(DaySchedule.index)]
class DbBarData(ModelBase): """ Candlestick bar data for database storage. Index is defined unique with datetime, interval, symbol """ id = AutoField() symbol: str = CharField() exchange: str = CharField() datetime: datetime = DateTimeField() interval: str = CharField() volume: float = FloatField() open_interest: float = FloatField() open_price: float = FloatField() high_price: float = FloatField() low_price: float = FloatField() close_price: float = FloatField() class Meta: database = db indexes = ((("symbol", "exchange", "interval", "datetime"), True), ) @staticmethod def from_bar(bar: BarData): """ Generate DbBarData object from BarData. """ db_bar = DbBarData() db_bar.symbol = bar.symbol db_bar.exchange = bar.exchange.value db_bar.datetime = bar.datetime db_bar.interval = bar.interval.value db_bar.volume = bar.volume db_bar.open_interest = bar.open_interest db_bar.open_price = bar.open_price db_bar.high_price = bar.high_price db_bar.low_price = bar.low_price db_bar.close_price = bar.close_price return db_bar def to_bar(self): """ Generate BarData object from DbBarData. """ bar = BarData( symbol=self.symbol, exchange=Exchange(self.exchange), datetime=self.datetime, interval=Interval(self.interval), volume=self.volume, open_price=self.open_price, high_price=self.high_price, open_interest=self.open_interest, low_price=self.low_price, close_price=self.close_price, gateway_name="DB", ) return bar @staticmethod def save_all(objs: List["DbBarData"], progress_bar_dict=None): """ save a list of objects, update if exists. """ dicts = [i.to_dict() for i in objs] with db.atomic(): if driver is Driver.POSTGRESQL: for bar in dicts: DbBarData.insert(bar).on_conflict( update=bar, conflict_target=( DbBarData.symbol, DbBarData.exchange, DbBarData.interval, DbBarData.datetime, ), ).execute() else: total_sz = len(dicts) loaded = 0 for c in chunked(dicts, 50): DbBarData.insert_many( c).on_conflict_replace().execute() if 'save_progress_bar' in progress_bar_dict: loaded += 50 percent_saved = min( round(100 * loaded / total_sz, 2), 100) QApplication.processEvents() progress_bar_dict['save_progress_bar'].setValue( percent_saved)
class Donation(BaseModel): value = FloatField() donor = ForeignKeyField(Donor, field='name', backref='donations')
class RmspropOptimizer(BaseModel): hyperparameters = ForeignKeyField(Hyperparameters, related_name='rmsprop_optimizer') learning_rate = FloatField() decay = FloatField() momentum = FloatField()
class DbTickData(Model): """""" id = AutoField() symbol: str = CharField() exchange: str = CharField() datetime: datetime = DateTimeField() name: str = CharField() volume: float = FloatField() open_interest: float = FloatField() last_price: float = FloatField() last_volume: float = FloatField() limit_up: float = FloatField() limit_down: float = FloatField() open_price: float = FloatField() high_price: float = FloatField() low_price: float = FloatField() pre_close: float = FloatField() bid_price_1: float = FloatField() bid_price_2: float = FloatField(null=True) bid_price_3: float = FloatField(null=True) bid_price_4: float = FloatField(null=True) bid_price_5: float = FloatField(null=True) ask_price_1: float = FloatField() ask_price_2: float = FloatField(null=True) ask_price_3: float = FloatField(null=True) ask_price_4: float = FloatField(null=True) ask_price_5: float = FloatField(null=True) bid_volume_1: float = FloatField() bid_volume_2: float = FloatField(null=True) bid_volume_3: float = FloatField(null=True) bid_volume_4: float = FloatField(null=True) bid_volume_5: float = FloatField(null=True) ask_volume_1: float = FloatField() ask_volume_2: float = FloatField(null=True) ask_volume_3: float = FloatField(null=True) ask_volume_4: float = FloatField(null=True) ask_volume_5: float = FloatField(null=True) class Meta: database = db indexes = ((("symbol", "exchange", "datetime"), True), )
class AdamOptimizer(BaseModel): hyperparameters = ForeignKeyField(Hyperparameters, related_name='adam_optimizer') learning_rate = FloatField() beta1 = FloatField() beta2 = FloatField()
class NdsTrialConfig(Model): """ Trial config for NDS. Attributes ---------- model_family : str Could be ``nas_cell``, ``residual_bottleneck``, ``residual_basic`` or ``vanilla``. model_spec : dict If ``model_family`` is ``nas_cell``, it contains ``num_nodes_normal``, ``num_nodes_reduce``, ``depth``, ``width``, ``aux`` and ``drop_prob``. If ``model_family`` is ``residual_bottleneck``, it contains ``bot_muls``, ``ds`` (depths), ``num_gs`` (number of groups) and ``ss`` (strides). If ``model_family`` is ``residual_basic`` or ``vanilla``, it contains ``ds``, ``ss`` and ``ws``. cell_spec : dict If ``model_family`` is not ``nas_cell`` it will be an empty dict. Otherwise, it specifies ``<normal/reduce>_<i>_<op/input>_<x/y>``, where i ranges from 0 to ``num_nodes_<normal/reduce> - 1``. If it is an ``op``, the value is chosen from the constants specified previously like :const:`nni.nas.benchmark.nds.CONV_1X1`. If it is i's ``input``, the value range from 0 to ``i + 1``, as ``nas_cell`` uses previous two nodes as inputs, and node 0 is actually the second node. Refer to NASNet paper for details. Finally, another two key-value pairs ``normal_concat`` and ``reduce_concat`` specify which nodes are eventually concatenated into output. dataset : str Dataset used. Could be ``cifar10`` or ``imagenet``. generator : str Can be one of ``random`` which generates configurations at random, while keeping learning rate and weight decay fixed, ``fix_w_d`` which further keeps ``width`` and ``depth`` fixed, only applicable for ``nas_cell``. ``tune_lr_wd`` which further tunes learning rate and weight decay. proposer : str Paper who has proposed the distribution for random sampling. Available proposers include ``nasnet``, ``darts``, ``enas``, ``pnas``, ``amoeba``, ``vanilla``, ``resnext-a``, ``resnext-b``, ``resnet``, ``resnet-b`` (ResNet with bottleneck). See NDS paper for details. base_lr : float Initial learning rate. weight_decay : float L2 weight decay applied on weights. num_epochs : int Number of epochs scheduled, during which learning rate will decay to 0 following cosine annealing. """ model_family = CharField(max_length=20, index=True, choices=[ 'nas_cell', 'residual_bottleneck', 'residual_basic', 'vanilla', ]) model_spec = JSONField(json_dumps=json_dumps, index=True) cell_spec = JSONField(json_dumps=json_dumps, index=True, null=True) dataset = CharField(max_length=15, index=True, choices=['cifar10', 'imagenet']) generator = CharField(max_length=15, index=True, choices=[ 'random', 'fix_w_d', 'tune_lr_wd', ]) proposer = CharField(max_length=15, index=True) base_lr = FloatField() weight_decay = FloatField() num_epochs = IntegerField() class Meta: database = proxy
class File(BaseModel): watch = ForeignKeyField(Watch) filename = CharField(max_length=640) last_modified = FloatField()
class CeresSettings(BaseModel): name = CharField() revision = IntegerField() rc_template = TextField() # files resource_files = BlobField() # settings psf_sigma = FloatField() apd_dead_time = FloatField() apd_recovery_time = FloatField() apd_cross_talk = FloatField() apd_afterpulse_probability_1 = FloatField() apd_afterpulse_probability_2 = FloatField() excess_noise = FloatField() nsb_rate = FloatField(null=True) additional_photon_acceptance = FloatField() dark_count_rate = FloatField() pulse_shape_function = TextField() residual_time_spread = FloatField() gapd_time_jitter = FloatField() discriminator_threshold = FloatField(null=True) def format_rc(self, run, resource_directory): return Template(self.rc_template, undefined=StrictUndefined).render( settings=self, run=run, resource_directory=resource_directory) def rc_path(self, run, resource_directory): if run.diffuse: name = f'ceres_diffuse_{run.off_target_distance:.0f}d.rc' else: if run.off_target_distance > 0: name = f'ceres_wobble_{run.off_target_distance:.1f}d.rc' else: name = f'ceres_on.rc' return os.path.join(resource_directory, name) def write_rc(self, run, resource_directory): rc_path = self.rc_path(run, resource_directory) rc_content = self.format_rc(run, resource_directory) with open(rc_path, 'w') as f: f.write(rc_content) def write_resources(self, resource_directory): try: os.makedirs(resource_directory, exist_ok=True) sp.run( ['tar', 'xz', '-C', resource_directory], input=self.resource_files, check=True, ) except: shutil.rmtree(resource_directory, ignore_errors=True) raise class Meta: database = database indexes = ( # unique index corsika run / ceres settings (('name', 'revision'), True), )
class TagValue(BaseModel): tag = ForeignKeyField(TagTrend, backref='values') value = FloatField() timestamp = DateTimeField(default=datetime.now)
class CeresRun(BaseModel): ceres_settings = ForeignKeyField(CeresSettings) # input file corsika_run = ForeignKeyField(CorsikaRun) # runwise settings off_target_distance = FloatField(default=6) diffuse = BooleanField(default=True) # processing related fields location = TextField(null=True) duration = IntegerField(null=True) status = ForeignKeyField(Status) walltime = IntegerField(default=120) priority = IntegerField(default=4) result_events_file = TextField(null=True) result_runheader_file = TextField(null=True) class Meta: database = database indexes = ( # unique index corsika run / ceres settings / off_target_distance / diffuse (('corsika_run', 'ceres_settings', 'off_target_distance', 'diffuse'), True), ) def build_mode_string(self): corsika_run = self.corsika_run if self.diffuse or corsika_run.viewcone > 0: if self.off_target_distance == 0: angle = corsika_run.viewcone else: angle = self.off_target_distance mode = f'diffuse_{angle:.0f}d' else: if self.off_target_distance > 0: mode = f'wobble_{self.off_target_distance:.1f}d' else: mode = 'on' return mode @property def directory_name(self): ceres_settings = self.ceres_settings corsika_run = self.corsika_run mode = self.build_mode_string() return os.path.join( 'ceres', f'r{ceres_settings.revision}', f'{ceres_settings.name}', corsika_run.corsika_settings.name, primary_id_to_name(corsika_run.primary_particle), mode, f'{corsika_run.id // 1000:05d}000', ) @property def basename(self): corsika_run = self.corsika_run ceres_settings = self.ceres_settings mode = self.build_mode_string() return 'ceres_{primary}_{mode}_run_{run:08d}_az{min_az:03.0f}-{max_az:03.0f}_zd{min_zd:02.0f}-{max_zd:02.0f}'.format( name=ceres_settings.name, primary=primary_id_to_name(corsika_run.primary_particle), mode=mode, run=corsika_run.id, min_az=corsika_run.azimuth_min, max_az=corsika_run.azimuth_max, min_zd=corsika_run.zenith_min, max_zd=corsika_run.zenith_max, ) @property def logfile(self): return os.path.join(config.mopro_directory, 'logs', self.directory_name, self.basename + '.log')
class DbBarData(ModelBase): """ Candlestick bar data for database storage. Index is defined unique with datetime, interval, symbol """ id = AutoField() symbol: str = CharField() exchange: str = CharField() datetime: datetime = DateTimeField() interval: str = CharField() volume: float = FloatField() open_interest: float = FloatField() open_price: float = FloatField() high_price: float = FloatField() low_price: float = FloatField() close_price: float = FloatField() class Meta: database = db indexes = ((("symbol", "exchange", "interval", "datetime"), True),) @staticmethod def from_bar(bar: BarData): """ Generate DbBarData object from BarData. """ # Change datetime to database timezone, then # remove tzinfo since not supported by SQLite. dt = bar.datetime.astimezone(DB_TZ) dt = dt.replace(tzinfo=None) db_bar = DbBarData() db_bar.symbol = bar.symbol db_bar.exchange = bar.exchange.value db_bar.datetime = dt db_bar.interval = bar.interval.value db_bar.volume = bar.volume db_bar.open_interest = bar.open_interest db_bar.open_price = bar.open_price db_bar.high_price = bar.high_price db_bar.low_price = bar.low_price db_bar.close_price = bar.close_price return db_bar def to_bar(self): """ Generate BarData object from DbBarData. """ bar = BarData( symbol=self.symbol, exchange=Exchange(self.exchange), datetime=self.datetime.replace(tzinfo=DB_TZ), interval=Interval(self.interval), volume=self.volume, open_price=self.open_price, high_price=self.high_price, open_interest=self.open_interest, low_price=self.low_price, close_price=self.close_price, gateway_name="DB", ) return bar @staticmethod def save_all(objs: List["DbBarData"]): """ save a list of objects, update if exists. """ dicts = [i.to_dict() for i in objs] with db.atomic(): if driver is Driver.POSTGRESQL: for bar in dicts: DbBarData.insert(bar).on_conflict( update=bar, conflict_target=( DbBarData.symbol, DbBarData.exchange, DbBarData.interval, DbBarData.datetime, ), ).execute() else: for c in chunked(dicts, 50): DbBarData.insert_many( c).on_conflict_replace().execute()
class CorsikaRun(BaseModel): ''' Attributes ---------- primary_particle: int primary particle id, e.g. 14 for proton, 1 for gamma zenith_min: float minimum zenith angle in degree zenith_max: float maximum zenith angle in degree, set equal to `zenith_min` for a fixed zenith azimuth_min: float minimum azimuth angle in degree azimuth_max: float maximum azimuth angle in degree, set equal to `zenith_min` for a fixed zenith energy_min: float minimum energy to simulate in GeV energy_max: float maximum energy to simulate in GeV spectral_index: float Spectral index, must be <= 0 viewcone: float outer radius of the viewcone in degree reuse: int number of reuses for each shower ''' # CORSIKA related fields corsika_settings = ForeignKeyField(CorsikaSettings) primary_particle = IntegerField() n_showers = IntegerField(default=5000) zenith_min = FloatField() zenith_max = FloatField() azimuth_min = FloatField() azimuth_max = FloatField() energy_min = FloatField() energy_max = FloatField() spectral_index = FloatField() viewcone = FloatField(default=0) reuse = IntegerField(default=1) max_radius = FloatField() bunch_size = IntegerField(default=1) # processing related fields priority = IntegerField(default=5) location = TextField(null=True) duration = IntegerField(null=True) status = ForeignKeyField(Status) walltime = IntegerField(default=2880) result_file = TextField(null=True) class Meta: constraints = [ Check('n_showers >= 1'), Check('zenith_min >= 0'), Check('zenith_max >= zenith_min'), Check('azimuth_min >= 0'), Check('azimuth_max >= azimuth_min'), Check('energy_min >= 0'), Check('energy_max >= energy_min'), Check('spectral_index <= 0'), Check('viewcone >= 0'), Check('reuse >= 1'), Check('reuse <= 20'), Check('max_radius >= 0'), Check('bunch_size >= 1'), ] def __repr__(self): return 'CorsikaRun(\n ' + '\n '.join([ f'run_id={self.id}', f'corsika_settings={self.corsika_settings_id}', f'primary_particle={self.primary_particle}', f'n_showers={self.n_showers}', f'zenith_min={self.zenith_min} °', f'zenith_max={self.zenith_max} °', f'azimuth_min={self.azimuth_min} °', f'azimuth_max={self.azimuth_max} °', f'spectral_index={self.spectral_index}', f'viewcone={self.viewcone} °', f'reuse={self.reuse}', f'max_radius={self.max_radius} m', f'bunch_size={self.bunch_size}', ]) + '\n)' def __str__(self): return repr(self) @property def directory_name(self): return os.path.join( 'corsika', str(self.corsika_settings.version), self.corsika_settings.name, primary_id_to_name(self.primary_particle), f'{self.id // 1000:05d}000', ) @property def basename(self): return 'corsika_{primary}_run_{run:08d}_az{min_az:03.0f}-{max_az:03.0f}_zd{min_zd:02.0f}-{max_zd:02.0f}'.format( primary=primary_id_to_name(self.primary_particle), run=self.id, min_az=self.azimuth_min, max_az=self.azimuth_max, min_zd=self.zenith_min, max_zd=self.zenith_max, ) @property def logfile(self): return os.path.join(config.mopro_directory, 'logs', self.directory_name, self.basename + '.log')
class Daily(BaseModel): class Meta: db_table = 'daily' id = IntegerField(primary_key=True) ts_code = CharField(32, default='') # TS代码 trade_date = IntegerField(11, default=0) # 交易日期 open = FloatField(default=0) # 开盘价 high = FloatField(default=0) # 最高价 low = FloatField(default=0) # 最低价 close = FloatField(default=0) # 收盘价 pre_close = FloatField(default=0) # 昨收价 change = FloatField(default=0) # 涨跌额 pct_chg = FloatField(default=0) # 涨跌幅 vol = FloatField(default=0) # 成交量(手) amount = FloatField(default=0) # 成交额 (千元) ma5 = FloatField(default=0) # 5日均价 ma10 = FloatField(default=0) # 10日均价 ma20 = FloatField(default=0) # 20日均价 ma30 = FloatField(default=0) # 30日均价 ma60 = FloatField(default=0) # 60日均价 ma120 = FloatField(default=0) # 120日均价 ma12 = FloatField(default=0) # 12日均价 ma26 = FloatField(default=0) # 26日均价 ma_v_3 = FloatField(default=0) # 3日均成交量(手) create_time = DateTimeField() update_time = DateTimeField()
class Dados(ModeloBase): rasp_id = CharField() temp = FloatField(default=0) umi = FloatField(default=0) pres = FloatField(default=0) data = DateTimeField()
class BWAMappingStat(BaseModel): batch = CharField() Samples = CharField() InsertSize_mean = FloatField() Insert_std = FloatField() Mapped_rate = FloatField() PCR_duplication = FloatField() MEAN_COVERAGE = FloatField() MEDIAN_COVERAGE = FloatField() Coverage_at_least_1X = FloatField() Coverage_at_least_5X = FloatField() Coverage_at_least_10X = FloatField() Coverage_at_least_20X = FloatField() Coverage_at_least_30X = FloatField() class Meta: table_name = 'mapping_stat'
class AdadeltaOptimizer(BaseModel): hyperparameters = ForeignKeyField(Hyperparameters, related_name='adadelta_optimizer') learning_rate = FloatField() rho = FloatField()
class Log(BaseModel): user_id = CharField(unique=True, max_length=20) action = CharField(max_length=30) traffic_use = FloatField()
elif data_type == "spending" and per_day: amount = self.average_spending_per_day elif data_type == "spending" and not per_day: amount = self.average_spending elif data_type == "impressions" and per_day: amount = self.average_impressions_per_day elif data_type == "impressions" and not per_day: amount = self.average_impressions elif data_type == "estimated-audience-size" and per_day: amount = self.average_audience_size_per_day elif data_type == "estimated-audience-size" and not per_day: amount = self.average_audience_size else: raise ValueError(f"Unknown data type: {data_type}") if demographic == "total": return amount return amount * getattr(self, Ad.demographic_to_field_name(demographic)) for dt in GENDERS + REGIONS + AGE_RANGES: Ad._meta.add_field(Ad.demographic_to_field_name(dt), FloatField(default=0)) database_handler.create_tables([Ad])