def upgrade(version): if version == 3: from models import GridLayer db.execute_sql( "CREATE TEMPORARY TABLE _grid_layer AS SELECT * FROM grid_layer") db.drop_tables([GridLayer]) db.create_tables([GridLayer]) db.execute_sql("INSERT INTO grid_layer SELECT * FROM _grid_layer") Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 4: from models import Location db.foreign_keys = False db.execute_sql( "CREATE TEMPORARY TABLE _location AS SELECT * FROM location") db.execute_sql("DROP TABLE location") db.create_tables([Location]) db.execute_sql("INSERT INTO location SELECT * FROM _location") db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 5: from models import Layer from peewee import ForeignKeyField migrator = SqliteMigrator(db) field = ForeignKeyField(Layer, Layer.id, backref="active_users", null=True) with db.atomic(): migrate( migrator.add_column("location_user_option", "active_layer_id", field)) from models import LocationUserOption LocationUserOption._meta.add_field("active_layer", field) for luo in LocationUserOption.select(): luo.active_layer = luo.location.layers.select().where( Layer.name == "tokens")[0] luo.save() migrate( migrator.add_not_null("location_user_option", "active_layer_id")) Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 6: migrator = SqliteMigrator(db) migrate( migrator.drop_not_null("location_user_option", "active_layer_id")) Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 7: # Remove shape index unique constraint from models import Shape db.foreign_keys = False db.execute_sql("CREATE TEMPORARY TABLE _shape AS SELECT * FROM shape") db.execute_sql("DROP TABLE shape") db.create_tables([Shape]) db.execute_sql("INSERT INTO shape SELECT * FROM _shape") db.foreign_keys = True # Check all indices and reset to 0 index logger.info("Validating all shape indices") from models import Layer with db.atomic(): for layer in Layer.select(): shapes = layer.shapes.order_by(fn.ABS(Shape.index)) for i, shape in enumerate(shapes): shape.index = i shape.save() Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 8: from models import Polygon db.create_tables([Polygon]) Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 9: from models import Location db.foreign_keys = False migrator = SqliteMigrator(db) with db.atomic(): migrate( migrator.add_column("location", "vision_mode", Location.vision_mode), migrator.add_column("location", "vision_min_range", Location.vision_min_range), migrator.add_column("location", "vision_max_range", Location.vision_max_range), ) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 10: from models import Shape db.foreign_keys = False migrator = SqliteMigrator(db) with db.atomic(): migrate( migrator.add_column("shape", "name_visible", Shape.name_visible)) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 11: from models import Label, LocationUserOption, ShapeLabel db.foreign_keys = False migrator = SqliteMigrator(db) with db.atomic(): db.create_tables([Label, ShapeLabel]) migrate( migrator.add_column( "location_user_option", "active_filters", LocationUserOption.active_filters, )) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 12: from models import Label, LabelSelection db.foreign_keys = False migrator = SqliteMigrator(db) with db.atomic(): try: migrate( migrator.add_column("label", "category", Label.category)) except OperationalError as e: if e.args[0] != "duplicate column name: category": raise e db.create_tables([LabelSelection]) with db.atomic(): for label in Label: if ":" not in label.name: continue cat, *name = label.name.split(":") label.category = cat label.name = ":".join(name) label.save() db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 13: from models import LocationUserOption, MultiLine, Polygon db.foreign_keys = False migrator = SqliteMigrator(db) migrate(migrator.drop_column("location_user_option", "active_filters")) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 14: db.foreign_keys = False migrator = SqliteMigrator(db) from models import GridLayer, Layer db.execute_sql( 'CREATE TABLE IF NOT EXISTS "base_rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)' ) db.execute_sql( 'CREATE TABLE IF NOT EXISTS "shape_type" ("shape_id" TEXT NOT NULL PRIMARY KEY, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)' ) shape_types = [ "asset_rect", "circle", "circular_token", "line", "multi_line", "polygon", "rect", "text", ] with db.atomic(): for table in shape_types: db.execute_sql( f"CREATE TEMPORARY TABLE _{table} AS SELECT * FROM {table}" ) db.execute_sql(f"DROP TABLE {table}") for query in [ 'CREATE TABLE IF NOT EXISTS "asset_rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, "src" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "circle" ("shape_id" TEXT NOT NULL PRIMARY KEY, "radius" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "circular_token" ("shape_id" TEXT NOT NULL PRIMARY KEY, "radius" REAL NOT NULL, "text" TEXT NOT NULL, "font" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "line" ("shape_id" TEXT NOT NULL PRIMARY KEY, "x2" REAL NOT NULL, "y2" REAL NOT NULL, "line_width" INTEGER NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "multi_line" ("shape_id" TEXT NOT NULL PRIMARY KEY, "line_width" INTEGER NOT NULL, "points" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "polygon" ("shape_id" TEXT NOT NULL PRIMARY KEY, "vertices" TEXT NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "rect" ("shape_id" TEXT NOT NULL PRIMARY KEY, "width" REAL NOT NULL, "height" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', 'CREATE TABLE IF NOT EXISTS "text" ("shape_id" TEXT NOT NULL PRIMARY KEY, "text" TEXT NOT NULL, "font" TEXT NOT NULL, "angle" REAL NOT NULL, FOREIGN KEY ("shape_id") REFERENCES "shape" ("uuid") ON DELETE CASCADE)', ]: db.execute_sql(query) for table in shape_types: db.execute_sql( f"INSERT INTO {table} SELECT _{table}.* FROM _{table} INNER JOIN shape ON shape.uuid = _{table}.uuid" ) field = ForeignKeyField(Layer, Layer.id, null=True) with db.atomic(): migrate(migrator.add_column("grid_layer", "layer_id", field)) for gl in GridLayer.select(): l = Layer.get_or_none(id=gl.id) if l: gl.layer = l gl.save() else: gl.delete_instance() migrate(migrator.add_not_null("grid_layer", "layer_id")) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 15: from peewee import BooleanField migrator = SqliteMigrator(db) db.foreign_keys = False with db.atomic(): migrate( migrator.add_column("room", "is_locked", BooleanField(default=False))) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 16: from peewee import TextField migrator = SqliteMigrator(db) db.foreign_keys = False with db.atomic(): migrate( migrator.add_column("location", "unit_size_unit", TextField(default="ft"))) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 17: from peewee import BooleanField, IntegerField migrator = SqliteMigrator(db) db.foreign_keys = False with db.atomic(): migrate( migrator.add_column("polygon", "open_polygon", BooleanField(default=False)), migrator.add_column("polygon", "line_width", IntegerField(default=2)), ) db.execute_sql( "INSERT INTO polygon (shape_id, line_width, vertices, open_polygon) SELECT shape_id, line_width, points, 1 FROM multi_line" ) db.execute_sql("DROP TABLE multi_line") db.execute_sql( "UPDATE shape SET type_ = 'polygon' WHERE type_ = 'multiline'") db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 18: from peewee import TextField migrator = SqliteMigrator(db) db.foreign_keys = False with db.atomic(): migrate(migrator.add_column("user", "email", TextField(null=True))) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 19: from peewee import ForeignKeyField db.foreign_keys = False migrator = SqliteMigrator(db) db.execute_sql( 'CREATE TABLE IF NOT EXISTS "floor" ("id" INTEGER NOT NULL PRIMARY KEY, "location_id" INTEGER NOT NULL, "name" TEXT, "index" INTEGER NOT NULL, FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE)' ) db.execute_sql( 'INSERT INTO floor (location_id, name, "index") SELECT id, "ground", 0 FROM location' ) with db.atomic(): db.execute_sql( "CREATE TEMPORARY TABLE _layer AS SELECT * FROM layer") db.execute_sql("DROP TABLE layer") db.execute_sql( 'CREATE TABLE IF NOT EXISTS "layer" ("id" INTEGER NOT NULL PRIMARY KEY, "floor_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "type_" TEXT NOT NULL, "player_visible" INTEGER NOT NULL, "player_editable" INTEGER NOT NULL, "selectable" INTEGER NOT NULL, "index" INTEGER NOT NULL, FOREIGN KEY ("floor_id") REFERENCES "floor" ("id") ON DELETE CASCADE)' ) db.execute_sql( 'INSERT INTO layer (id, floor_id, name, type_, player_visible, player_editable, selectable, "index") SELECT _layer.id, floor.id, _layer.name, type_, player_visible, player_editable, selectable, _layer."index" FROM _layer INNER JOIN floor ON floor.location_id = _layer.location_id' ) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() elif version == 20: from peewee import BooleanField, BooleanField, IntegerField migrator = SqliteMigrator(db) db.foreign_keys = False with db.atomic(): migrate( migrator.add_column("shape", "badge", IntegerField(default=1)), migrator.add_column("shape", "show_badge", BooleanField(default=False)), ) db.foreign_keys = True Constants.get().update(save_version=Constants.save_version + 1).execute() else: raise Exception( f"No upgrade code for save format {version} was found.")
class Proposal(GovernanceClass, BaseModel): governance_object = ForeignKeyField(GovernanceObject, related_name='proposals', on_delete='CASCADE', on_update='CASCADE') name = CharField(default='', max_length=40) url = CharField(default='') start_epoch = IntegerField() end_epoch = IntegerField() payment_address = CharField(max_length=36) payment_amount = DecimalField(max_digits=16, decimal_places=8) object_hash = CharField(max_length=64) # src/governance-validators.cpp MAX_DATA_SIZE = 512 govobj_type = ariond_GOVOBJ_TYPES['proposal'] class Meta: db_table = 'proposals' def is_valid(self): import arionlib printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__) try: # proposal name exists and is not null/whitespace if (len(self.name.strip()) == 0): printdbg("\tInvalid Proposal name [%s], returning False" % self.name) return False # proposal name is normalized (something like "[a-zA-Z0-9-_]+") if not re.match(r'^[-_a-zA-Z0-9]+$', self.name): printdbg( "\tInvalid Proposal name [%s] (does not match regex), returning False" % self.name) return False # end date < start date if (self.end_epoch <= self.start_epoch): printdbg( "\tProposal end_epoch [%s] <= start_epoch [%s] , returning False" % (self.end_epoch, self.start_epoch)) return False # amount must be numeric if misc.is_numeric(self.payment_amount) is False: printdbg( "\tProposal amount [%s] is not valid, returning False" % self.payment_amount) return False # amount can't be negative or 0 if (float(self.payment_amount) <= 0): printdbg( "\tProposal amount [%s] is negative or zero, returning False" % self.payment_amount) return False # payment address is valid base58 arion addr, non-multisig if not arionlib.is_valid_arion_address(self.payment_address, config.network): printdbg( "\tPayment address [%s] not a valid Arion address for network [%s], returning False" % (self.payment_address, config.network)) return False # URL if (len(self.url.strip()) < 4): printdbg("\tProposal URL [%s] too short, returning False" % self.url) return False # proposal URL has any whitespace if (re.search(r'\s', self.url)): printdbg( "\tProposal URL [%s] has whitespace, returning False" % self.name) return False # Arion Core restricts proposals to 512 bytes max if len(self.serialise()) > (self.MAX_DATA_SIZE * 2): printdbg("\tProposal [%s] is too big, returning False" % self.name) return False try: parsed = urlparse.urlparse(self.url) except Exception as e: printdbg( "\tUnable to parse Proposal URL, marking invalid: %s" % e) return False except Exception as e: printdbg( "Unable to validate in Proposal#is_valid, marking invalid: %s" % e.message) return False printdbg("Leaving Proposal#is_valid, Valid = True") return True def is_expired(self, superblockcycle=None): from constants import SUPERBLOCK_FUDGE_WINDOW import arionlib if not superblockcycle: raise Exception("Required field superblockcycle missing.") printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__) now = misc.now() printdbg("\tnow = %s" % now) # half the SB cycle, converted to seconds # add the fudge_window in seconds, defined elsewhere in Sentinel expiration_window_seconds = int( (arionlib.blocks_to_seconds(superblockcycle) / 2) + SUPERBLOCK_FUDGE_WINDOW) printdbg("\texpiration_window_seconds = %s" % expiration_window_seconds) # "fully expires" adds the expiration window to end time to ensure a # valid proposal isn't excluded from SB by cutting it too close fully_expires_at = self.end_epoch + expiration_window_seconds printdbg("\tfully_expires_at = %s" % fully_expires_at) if (fully_expires_at < now): printdbg("\tProposal end_epoch [%s] < now [%s] , returning True" % (self.end_epoch, now)) return True printdbg("Leaving Proposal#is_expired, Expired = False") return False @classmethod def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget): # return all approved proposals, in order of descending vote count # # we need a secondary 'order by' in case of a tie on vote count, since # superblocks must be deterministic query = ( self.select( self, GovernanceObject) # Note that we are selecting both models. .join(GovernanceObject).where( GovernanceObject.absolute_yes_count > proposal_quorum). order_by(GovernanceObject.absolute_yes_count.desc(), GovernanceObject.object_hash.desc())) ranked = [] for proposal in query: proposal.max_budget = next_superblock_max_budget if proposal.is_valid(): ranked.append(proposal) return ranked @classmethod def expired(self, superblockcycle=None): if not superblockcycle: raise Exception("Required field superblockcycle missing.") expired = [] for proposal in self.select(): if proposal.is_expired(superblockcycle): expired.append(proposal) return expired @property def rank(self): rank = 0 if self.governance_object: rank = self.governance_object.absolute_yes_count return rank
class GovernanceObject(BaseModel): parent_id = IntegerField(default=0) object_creation_time = IntegerField(default=int(time.time())) object_hash = CharField(max_length=64) object_parent_hash = CharField(default='0') object_type = IntegerField(default=0) object_revision = IntegerField(default=1) object_fee_tx = CharField(default='') yes_count = IntegerField(default=0) no_count = IntegerField(default=0) abstain_count = IntegerField(default=0) absolute_yes_count = IntegerField(default=0) class Meta: db_table = 'governance_objects' # sync ariond gobject list with our local relational DB backend @classmethod def sync(self, ariond): golist = ariond.rpc_command('gobject', 'list') # objects which are removed from the network should be removed from the DB try: for purged in self.purged_network_objects(list(golist.keys())): # SOMEDAY: possible archive step here purged.delete_instance(recursive=True, delete_nullable=True) except Exception as e: printdbg("Got an error while purging: %s" % e) for item in golist.values(): try: (go, subobj) = self.import_gobject_from_ariond(ariond, item) except Exception as e: printdbg("Got an error upon import: %s" % e) @classmethod def purged_network_objects(self, network_object_hashes): query = self.select() if network_object_hashes: query = query.where(~(self.object_hash << network_object_hashes)) return query @classmethod def import_gobject_from_ariond(self, ariond, rec): import decimal import arionlib import binascii import gobject_json object_hash = rec['Hash'] gobj_dict = { 'object_hash': object_hash, 'object_fee_tx': rec['CollateralHash'], 'absolute_yes_count': rec['AbsoluteYesCount'], 'abstain_count': rec['AbstainCount'], 'yes_count': rec['YesCount'], 'no_count': rec['NoCount'], } # deserialise and extract object json_str = binascii.unhexlify(rec['DataHex']).decode('utf-8') dikt = gobject_json.extract_object(json_str) subobj = None type_class_map = { 1: Proposal, 2: Superblock, } subclass = type_class_map[dikt['type']] # set object_type in govobj table gobj_dict['object_type'] = subclass.govobj_type # exclude any invalid model data from ariond... valid_keys = subclass.serialisable_fields() subdikt = {k: dikt[k] for k in valid_keys if k in dikt} # get/create, then sync vote counts from ariond, with every run govobj, created = self.get_or_create(object_hash=object_hash, defaults=gobj_dict) if created: printdbg("govobj created = %s" % created) count = govobj.update(**gobj_dict).where( self.id == govobj.id).execute() if count: printdbg("govobj updated = %d" % count) subdikt['governance_object'] = govobj # get/create, then sync payment amounts, etc. from ariond - ariond is the master try: newdikt = subdikt.copy() newdikt['object_hash'] = object_hash if subclass(**newdikt).is_valid() is False: govobj.vote_delete(ariond) return (govobj, None) subobj, created = subclass.get_or_create(object_hash=object_hash, defaults=subdikt) except Exception as e: # in this case, vote as delete, and log the vote in the DB printdbg("Got invalid object from ariond! %s" % e) govobj.vote_delete(ariond) return (govobj, None) if created: printdbg("subobj created = %s" % created) count = subobj.update(**subdikt).where( subclass.id == subobj.id).execute() if count: printdbg("subobj updated = %d" % count) # ATM, returns a tuple w/gov attributes and the govobj return (govobj, subobj) def vote_delete(self, ariond): if not self.voted_on(signal=VoteSignals.delete, outcome=VoteOutcomes.yes): self.vote(ariond, VoteSignals.delete, VoteOutcomes.yes) return def get_vote_command(self, signal, outcome): cmd = [ 'gobject', 'vote-conf', self.object_hash, signal.name, outcome.name ] return cmd def vote(self, ariond, signal, outcome): import arionlib # At this point, will probably never reach here. But doesn't hurt to # have an extra check just in case objects get out of sync (people will # muck with the DB). if (self.object_hash == '0' or not misc.is_hash(self.object_hash)): printdbg("No governance object hash, nothing to vote on.") return # have I already voted on this gobject with this particular signal and outcome? if self.voted_on(signal=signal): printdbg("Found a vote for this gobject/signal...") vote = self.votes.where(Vote.signal == signal)[0] # if the outcome is the same, move on, nothing more to do if vote.outcome == outcome: # move on. printdbg( "Already voted for this same gobject/signal/outcome, no need to re-vote." ) return else: printdbg( "Found a STALE vote for this gobject/signal, deleting so that we can re-vote." ) vote.delete_instance() else: printdbg("Haven't voted on this gobject/signal yet...") # now ... vote! vote_command = self.get_vote_command(signal, outcome) printdbg(' '.join(vote_command)) output = ariond.rpc_command(*vote_command) # extract vote output parsing to external lib voted = arionlib.did_we_vote(output) if voted: printdbg('VOTE success, saving Vote object to database') Vote(governance_object=self, signal=signal, outcome=outcome, object_hash=self.object_hash).save() else: printdbg('VOTE failed, trying to sync with network vote') self.sync_network_vote(ariond, signal) def sync_network_vote(self, ariond, signal): printdbg('\tSyncing network vote for object %s with signal %s' % (self.object_hash, signal.name)) vote_info = ariond.get_my_gobject_votes(self.object_hash) for vdikt in vote_info: if vdikt['signal'] != signal.name: continue # ensure valid outcome outcome = VoteOutcomes.get(vdikt['outcome']) if not outcome: continue printdbg( '\tFound a matching valid vote on the network, outcome = %s' % vdikt['outcome']) Vote(governance_object=self, signal=signal, outcome=outcome, object_hash=self.object_hash).save() def voted_on(self, **kwargs): signal = kwargs.get('signal', None) outcome = kwargs.get('outcome', None) query = self.votes if signal: query = query.where(Vote.signal == signal) if outcome: query = query.where(Vote.outcome == outcome) count = query.count() return count
class Trainer(BaseModel): name = CharField(primary_key=True) team = IntegerField() level = IntegerField() last_seen = DateTimeField(default=datetime.utcnow)
class StoreVersion(Model): version = IntegerField()
class LeanItemModel(BaseModel): kind = CharField(max_length=10, default='unknown') name = CharField(unique=True) # null ⇔ root namespace namespace = ForeignKeyField(NameSpaceModel, null=True) leanfile = ForeignKeyField(LeanFileModel, backref='items', null=True) line_nb = IntegerField(default=0) size = IntegerField(default=0) proof_size = IntegerField(default=0) def to_py(self) -> LeanItem: def_deps = [] pf_deps = [] for item in self.deps: if item.kind == 'def': def_deps.append(item.used.name) else: pf_deps.append(item.used.name) return LeanItem(self.kind, name=self.name, size=self.size, line_nb=self.line_nb, def_depends=def_deps, proof_size=self.proof_size, proof_depends=pf_deps) @staticmethod def from_py(lf_m: LeanFileModel, item: LeanItem) -> 'LeanItemModel': li_m, _ = LeanItemModel.get_or_create(name=item.name) li_m.kind = item.kind li_m.name = item.name li_m.namespace = NameSpaceModel.get_or_create( fullname='.'.join(item.namespace))[0] li_m.leanfile = lf_m li_m.line_nb = item.line_nb or 0 li_m.size = item.size li_m.proof_size = item.proof_size li_m.save() for dep in item.def_depends: try: used_m = LeanItemModel.get(name=dep) except LeanItemModel.DoesNotExist: used_m = LeanItemModel.create(name=dep) DependanceModel.create(kind='def', user=li_m, used=used_m) for dep in item.proof_depends: try: used_m = LeanItemModel.get(name=dep) except LeanItemModel.DoesNotExist: used_m = LeanItemModel.create(kind='unknown', name=dep) DependanceModel.create(kind='proof', user=li_m, used=used_m) for field in item.fields: StructureFieldModel.create(name=field, parent=li_m) if item.kind == 'instance': try: class_m = LeanItemModel.get(name=item.instance_target) except LeanItemModel.DoesNotExist: class_m = LeanItemModel.create(name=item.instance_target) InstanceModel.create(instance=li_m, target=class_m) return li_m
class Gym(BaseModel): UNCONTESTED = 0 TEAM_MYSTIC = 1 TEAM_VALOR = 2 TEAM_INSTINCT = 3 gym_id = CharField(primary_key=True, max_length=50) team_id = IntegerField() guard_pokemon_id = IntegerField() gym_points = IntegerField() enabled = BooleanField() latitude = DoubleField() longitude = DoubleField() last_modified = DateTimeField(index=True) last_scanned = DateTimeField(default=datetime.utcnow) class Meta: indexes = ((('latitude', 'longitude'), False), ) @staticmethod def get_gyms(swLat, swLng, neLat, neLng): if swLat is None or swLng is None or neLat is None or neLng is None: results = (Gym.select().dicts()) else: results = (Gym.select().where((Gym.latitude >= swLat) & (Gym.longitude >= swLng) & (Gym.latitude <= neLat) & (Gym.longitude <= neLng)).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() gyms = {} gym_ids = [] for g in results: g['name'] = None g['pokemon'] = [] gyms[g['gym_id']] = g gym_ids.append(g['gym_id']) if len(gym_ids) > 0: pokemon = ( GymMember.select(GymMember.gym_id, GymPokemon.cp.alias('pokemon_cp'), GymPokemon.pokemon_id, Trainer.name.alias('trainer_name'), Trainer.level.alias('trainer_level')). join(Gym, on=(GymMember.gym_id == Gym.gym_id)).join( GymPokemon, on=(GymMember.pokemon_uid == GymPokemon.pokemon_uid)).join( Trainer, on=(GymPokemon.trainer_name == Trainer.name)) .where(GymMember.gym_id << gym_ids).where( GymMember.last_scanned > Gym.last_modified).order_by( GymMember.gym_id, GymPokemon.cp).dicts()) for p in pokemon: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) gyms[p['gym_id']]['pokemon'].append(p) details = (GymDetails.select( GymDetails.gym_id, GymDetails.name).where(GymDetails.gym_id << gym_ids).dicts()) for d in details: gyms[d['gym_id']]['name'] = d['name'] # Re-enable the GC. gc.enable() return gyms
class webapp_keyword(PostgresqlModel): keyword = CharField(max_length=100, null=True) kwgrp = IntegerField(null=True)
class ItemsDB(CianModel): url = CharField() site = CharField() price = IntegerField(null=True) price_per_meter = IntegerField(null=True) rooms = CharField(null=True) total_square = FloatField(null=True) rooms_square = CharField(null=True) living_square = CharField(null=True) kitchen_square = CharField(null=True) wc = CharField(null=True) balcony = CharField(null=True) elevator = CharField(null=True) parking = CharField(null=True) window_look = CharField(null=True) issue_date = CharField(null=True) house_type = CharField(null=True) matherial_type = CharField(null=True) floor = CharField(null=True) floors = CharField(null=True) type_salary = CharField(null=True) region = CharField(null=True) city = CharField(null=True) district = CharField(null=True) microdistrict = CharField(null=True) street = CharField(null=True) house_num = CharField(null=True) JK_name = CharField(null=True) seller_phone = CharField(null=True) premium_status = CharField(null=True) publish_date = DateField(null=True) up_date = CharField(null=True) decoration = CharField(null=True) ad_text = TextField(null=True) views = CharField(null=True) def create_db(self): try: db.create_table(ItemsDB) except InternalError: pass @db.execution_context(with_transaction=False) def add_item(self, **kwargs): if not self.search_item(url=kwargs['url']): new_item = ItemsDB.create(**kwargs) ItemsDB.save(new_item) else: new_item = ItemsDB.get(url=kwargs['url']) new_item.price = kwargs['price'] new_item.price_per_meter = kwargs['price_per_meter'] new_item.issue_date = kwargs['issue_date'] new_item.type_salary = kwargs['type_salary'] new_item.seller_phone = kwargs['seller_phone'] new_item.premium_status = kwargs['premium_status'] new_item.up_date = kwargs['up_date'] new_item.ad_text = kwargs['ad_text'] ItemsDB.save(new_item) def search_item(self, url): try: return ItemsDB.get(ItemsDB.url == url) except DoesNotExist: return None
class part_group(PostgresqlModel): id = IntegerField(primary_key=True) group = CharField(max_length = 5, null=True) part = CharField(max_length = 5, null=True)
class webapp_board_keyword(PostgresqlModel): bid = IntegerField(null=False) kid = IntegerField(null=False)
class keyword_part_freq(PostgresqlModel): word = CharField(max_length = 20, null=True) word_part = CharField(max_length = 5, null=True) word_sugg_freq = IntegerField(null=True)
class keywords_list(PostgresqlModel): duration = IntegerField(null=True) source = CharField(max_length = 20, null=True)
class webapp_keyword_match(PostgresqlModel): guid = CharField(max_length=1000) kwgrp = IntegerField() board_name = CharField(max_length=100) tm = BigIntegerField(null=True)
class Pokemon(BaseModel): # We are base64 encoding the ids delivered by the api # because they are too big for sqlite to handle encounter_id = CharField(primary_key=True, max_length=50) spawnpoint_id = CharField(index=True) pokemon_id = IntegerField(index=True) latitude = DoubleField() longitude = DoubleField() disappear_time = DateTimeField(index=True) class Meta: indexes = ((('latitude', 'longitude'), False), ) @staticmethod def get_active(swLat, swLng, neLat, neLng): if swLat is None or swLng is None or neLat is None or neLng is None: query = (Pokemon.select().where( Pokemon.disappear_time > datetime.utcnow()).dicts()) else: query = (Pokemon.select().where( (Pokemon.disappear_time > datetime.utcnow()) & (((Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng) & (Pokemon.latitude <= neLat) & (Pokemon.longitude <= neLng)))).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) if args.china: p['latitude'], p['longitude'] = \ transform_from_wgs_to_gcj(p['latitude'], p['longitude']) pokemons.append(p) # Re-enable the GC. gc.enable() return pokemons @staticmethod def get_active_by_id(ids, swLat, swLng, neLat, neLng): if swLat is None or swLng is None or neLat is None or neLng is None: query = (Pokemon.select().where((Pokemon.pokemon_id << ids) & ( Pokemon.disappear_time > datetime.utcnow())).dicts()) else: query = (Pokemon.select().where( (Pokemon.pokemon_id << ids) & (Pokemon.disappear_time > datetime.utcnow()) & (Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng) & (Pokemon.latitude <= neLat) & (Pokemon.longitude <= neLng)).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) if args.china: p['latitude'], p['longitude'] = \ transform_from_wgs_to_gcj(p['latitude'], p['longitude']) pokemons.append(p) # Re-enable the GC. gc.enable() return pokemons @classmethod @cached(cache) def get_seen(cls, timediff): if timediff: timediff = datetime.utcnow() - timediff pokemon_count_query = (Pokemon.select( Pokemon.pokemon_id, fn.COUNT(Pokemon.pokemon_id).alias('count'), fn.MAX(Pokemon.disappear_time).alias('lastappeared')).where( Pokemon.disappear_time > timediff).group_by( Pokemon.pokemon_id).alias('counttable')) query = (Pokemon.select( Pokemon.pokemon_id, Pokemon.disappear_time, Pokemon.latitude, Pokemon.longitude, pokemon_count_query.c.count).join( pokemon_count_query, on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id )).distinct().where( Pokemon.disappear_time == pokemon_count_query.c.lastappeared).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] total = 0 for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) pokemons.append(p) total += p['count'] # Re-enable the GC. gc.enable() return {'pokemon': pokemons, 'total': total} @classmethod def get_appearances(cls, pokemon_id, timediff): ''' :param pokemon_id: id of pokemon that we need appearances for :param timediff: limiting period of the selection :return: list of pokemon appearances over a selected period ''' if timediff: timediff = datetime.utcnow() - timediff query = (Pokemon.select( Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, fn.Count(Pokemon.spawnpoint_id).alias('count'), Pokemon.spawnpoint_id).where((Pokemon.pokemon_id == pokemon_id) & ( Pokemon.disappear_time > timediff)).group_by( Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, Pokemon.spawnpoint_id).dicts()) return list(query) @classmethod def get_appearances_times_by_spawnpoint(cls, pokemon_id, spawnpoint_id, timediff): ''' :param pokemon_id: id of pokemon that we need appearances times for :param spawnpoint_id: spawnpoing id we need appearances times for :param timediff: limiting period of the selection :return: list of time appearances over a selected period ''' if timediff: timediff = datetime.utcnow() - timediff query = (Pokemon.select(Pokemon.disappear_time).where( (Pokemon.pokemon_id == pokemon_id) & (Pokemon.spawnpoint_id == spawnpoint_id) & (Pokemon.disappear_time > timediff)).order_by( Pokemon.disappear_time.asc()).tuples()) return list(itertools.chain(*query)) @classmethod def get_spawn_time(cls, disappear_time): return (disappear_time + 2700) % 3600 @classmethod def get_spawnpoints(cls, southBoundary, westBoundary, northBoundary, eastBoundary): query = Pokemon.select(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, ((Pokemon.disappear_time.minute * 60) + Pokemon.disappear_time.second).alias('time'), fn.Count(Pokemon.spawnpoint_id).alias('count')) if None not in (northBoundary, southBoundary, westBoundary, eastBoundary): query = (query.where((Pokemon.latitude <= northBoundary) & (Pokemon.latitude >= southBoundary) & (Pokemon.longitude >= westBoundary) & (Pokemon.longitude <= eastBoundary))) query = query.group_by(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, SQL('time')) queryDict = query.dicts() spawnpoints = {} for sp in queryDict: key = sp['spawnpoint_id'] disappear_time = cls.get_spawn_time(sp.pop('time')) count = int(sp['count']) if key not in spawnpoints: spawnpoints[key] = sp else: spawnpoints[key]['special'] = True if 'time' not in spawnpoints[ key] or count >= spawnpoints[key]['count']: spawnpoints[key]['time'] = disappear_time spawnpoints[key]['count'] = count for sp in spawnpoints.values(): del sp['count'] return list(spawnpoints.values()) @classmethod def get_spawnpoints_in_hex(cls, center, steps): log.info('Finding spawn points {} steps away'.format(steps)) n, e, s, w = hex_bounds(center, steps) query = (Pokemon.select(Pokemon.latitude.alias('lat'), Pokemon.longitude.alias('lng'), ((Pokemon.disappear_time.minute * 60) + Pokemon.disappear_time.second).alias('time'), Pokemon.spawnpoint_id)) query = (query.where((Pokemon.latitude <= n) & (Pokemon.latitude >= s) & (Pokemon.longitude >= w) & (Pokemon.longitude <= e))) # Sqlite doesn't support distinct on columns if args.db_type == 'mysql': query = query.distinct(Pokemon.spawnpoint_id) else: query = query.group_by(Pokemon.spawnpoint_id) s = list(query.dicts()) # The distance between scan circles of radius 70 in a hex is 121.2436 # steps - 1 to account for the center circle then add 70 for the edge step_distance = ((steps - 1) * 121.2436) + 70 # Compare spawnpoint list to a circle with radius steps * 120 # Uses the direct geopy distance between the center and the spawnpoint. filtered = [] for idx, sp in enumerate(s): if geopy.distance.distance( center, (sp['lat'], sp['lng'])).meters <= step_distance: filtered.append(s[idx]) # at this point, 'time' is DISAPPEARANCE time, we're going to morph it to APPEARANCE time for location in filtered: # examples: time shifted # 0 ( 0 + 2700) = 2700 % 3600 = 2700 (0th minute to 45th minute, 15 minutes prior to appearance as time wraps around the hour) # 1800 (1800 + 2700) = 4500 % 3600 = 900 (30th minute, moved to arrive at 15th minute) # todo: this DOES NOT ACCOUNT for pokemons that appear sooner and live longer, but you'll _always_ have at least 15 minutes, so it works well enough location['time'] = cls.get_spawn_time(location['time']) return filtered
class ClubUser(BaseModel): id = IntegerField(primary_key=True) is_bot = BooleanField(default=False) is_member = BooleanField(default=True) avatar_path = CharField(null=True) display_name = CharField() mention = CharField() coupon = CharField(null=True) joined_at = DateTimeField(null=True) roles = JSONField(default=lambda: []) def messages_count(self): return self.list_messages.count() def recent_messages_count(self, today=None): return self.list_recent_messages(today).count() def upvotes_count(self): messages = self.list_messages \ .where(ClubMessage.channel_id.not_in(UPVOTES_EXCLUDE_CHANNELS)) return sum([message.upvotes_count for message in messages]) def recent_upvotes_count(self, today=None): messages = self.list_recent_messages(today) \ .where(ClubMessage.channel_id.not_in(UPVOTES_EXCLUDE_CHANNELS)) return sum([message.upvotes_count for message in messages]) def has_intro(self): intro_message = self.list_messages \ .where(ClubMessage.channel_id == INTRO_CHANNEL, ClubMessage.type == 'default') \ .first() return bool(intro_message) def first_seen_on(self): first_message = self.list_messages \ .order_by(ClubMessage.created_at) \ .first() return first_message.created_at.date( ) if first_message else self.joined_at.date() def list_recent_messages(self, today=None): recent_period_start_at = (today or date.today()) - timedelta( days=RECENT_PERIOD_DAYS) return self.list_messages.where( ClubMessage.created_at >= recent_period_start_at) def is_new(self, today=None): return (self.first_seen_on() + timedelta(days=IS_NEW_PERIOD_DAYS)) >= (today or date.today()) @classmethod def members_count(cls): return cls.members_listing().count() @classmethod def top_members_limit(cls): return math.ceil(cls.members_count() * TOP_MEMBERS_PERCENT) @classmethod def listing(cls): return cls.select() @classmethod def members_listing(cls): return cls.listing().where(cls.is_bot == False, cls.is_member == True) @classmethod def avatars_listing(cls): return cls.members_listing().where(cls.avatar_path.is_null(False))
class Stock(BaseModel): rahavardId = IntegerField(unique=True) symbol = CharField() title = CharField()
class CampaignOrdering(ModelBase): _order_field = "order" campaign = ForeignKeyField(Campaign) package = ForeignKeyField(PresampleResource) order = IntegerField()
class Day(Model): day = IntegerField() menu = CharField() class Meta: database = DB
class Session(Model): name = CharField(unique=True) crack_type = CharField() hash_file = CharField() pot_file = CharField() hash_mode_id = IntegerField() rule_file = CharField(null=True) wordlist_file = CharField(null=True) mask_file = CharField(null=True) username_included = BooleanField() session_status = CharField() time_started = DateTimeField(null=True) progress = FloatField() class Meta: database = database def setup(self): # File to store the processes output random_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) self.result_file = os.path.join("/tmp", random_name + ".cracked") # File to store the hashcat output random_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) self.hashcat_output_file = os.path.join("/tmp", random_name + ".hashcat") open(self.hashcat_output_file, 'a').close() self.hash_type = "N/A" self.time_estimated = "N/A" self.speed = "N/A" self.recovered = "N/A" def start(self): self.thread = threading.Thread(target=self.session_thread) self.thread.start() # Little delay to ensure the process if properly launched time.sleep(1) self.status() def session_thread(self): # Prepare regex to parse the main hashcat process output regex_list = [ ("hash_type", re.compile("^Hash\.Type\.+: (.*)\s*$")), ("speed", re.compile("^Speed\.Dev\.#1\.+: (.*)\s*$")), ] if self.crack_type == "dictionary": regex_list.append( ("progress", re.compile("^Progress\.+: \d+/\d+ \((\S+)%\)\s*$"))) regex_list.append(("time_estimated", re.compile("^Time\.Estimated\.+: (.*)\s*$"))) elif self.crack_type == "mask": regex_list.append(( "progress", re.compile( "^Input\.Mode\.+:\s+Mask\s+\(\S+\)\s+\[\d+\]\s+\((\S+)%\)\s*$" ))) self.time_started = str(datetime.now()) if not self.session_status in ["Aborted"]: # Command lines used to crack the passwords if self.crack_type == "dictionary": if self.rule_file != None: cmd_line = [ Hashcat.binary, '--session', self.name, '--status', '-a', '0', '-m', str(self.hash_mode_id), self.hash_file, self.wordlist_file, '-r', self.rule_file ] else: cmd_line = [ Hashcat.binary, '--session', self.name, '--status', '-a', '0', '-m', str(self.hash_mode_id), self.hash_file, self.wordlist_file ] if self.crack_type == "mask": cmd_line = [ Hashcat.binary, '--session', self.name, '--status', '-a', '3', '-m', str(self.hash_mode_id), self.hash_file, self.mask_file ] if self.username_included: cmd_line += ["--username"] # workload profile cmd_line += ["--workload-profile", Hashcat.workload_profile] # set pot file cmd_line += ["--potfile-path", self.pot_file] else: # resume previous session cmd_line = [Hashcat.binary, '--session', self.name, '--restore'] print("Session:%s, startup command:%s" % (self.name, " ".join(cmd_line))) logging.debug("Session:%s, startup command:%s" % (self.name, " ".join(cmd_line))) with open(self.hashcat_output_file, "a") as f: f.write("Command: %s\n" % " ".join(cmd_line)) self.session_status = "Running" self.time_started = datetime.utcnow() self.save() self.session_process = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) self.update_session() for line in self.session_process.stdout: with open(self.hashcat_output_file, "ab") as f: f.write(line) line = line.decode() line = line.rstrip() if line == "Resumed": self.session_status = "Running" self.save() if line == "Paused": self.session_status = "Paused" self.save() for var_regex in regex_list: var = var_regex[0] regex = var_regex[1] m = regex.match(line) if m: setattr(self, var, m.group(1)) return_code = self.session_process.wait() # The cracking ended, set the parameters accordingly if return_code >= 0: self.session_status = "Done" elif return_code == -1: self.session_status = "Error" elif return_code == -2: self.session_status = "Aborted" self.time_estimated = "N/A" self.speed = "N/A" self.save() def details(self): return { "name": self.name, "crack_type": self.crack_type, "rule": self.rule_file.split("/")[-1][:-5] if self.rule_file else None, "mask": self.mask_file.split("/")[-1][:-7] if self.mask_file else None, "wordlist": self.wordlist_file.split("/")[-1][:-1 * len(".wordlist")] if self.wordlist_file else None, "status": self.session_status, "time_started": str(self.time_started), "time_estimated": self.time_estimated, "speed": self.speed, "progress": self.progress, } """ Returns the first 100000 lines from the potfile starting from a specific line """ def get_potfile(self, from_line): line_count = 0 selected_line_count = 0 potfile_data = "" complete = True if os.path.exists(self.pot_file): for line in open(self.pot_file, encoding="utf-8"): if not line.endswith("\n"): complete = True break if line_count >= from_line: potfile_data += line selected_line_count += 1 if selected_line_count >= 100000: complete = False break line_count += 1 return { "line_count": selected_line_count, "remaining_data": not complete, "potfile_data": potfile_data, } else: return { "line_count": 0, "remaining_data": False, "potfile_data": "", } """ Returns hashcat output file """ def hashcat_output(self): return open(self.hashcat_output_file).read() """ Returns hashes file """ def hashes(self): return open(self.hash_file).read() """ Cleanup the session before deleting it """ def remove(self): self.quit() try: os.remove(self.result_file) except: pass try: os.remove(self.pot_file) except: pass try: os.remove(self.hash_file) except: pass try: os.remove(self.hashcat_output_file) except: pass """ Return cracked passwords """ def cracked(self): # gather cracked passwords cmd_line = [ Hashcat.binary, '--show', '-m', str(self.hash_mode_id), self.hash_file, '-o', self.result_file ] if self.username_included: cmd_line += ["--username", "--outfile-format", "2"] else: cmd_line += ["--outfile-format", "3"] cmd_line += ["--potfile-path", self.pot_file] p = subprocess.Popen(cmd_line) p.wait() return open(self.result_file).read() """ Update the session """ def update_session(self): self.status() """ Update the session """ def status(self): if not self.session_status in ["Paused", "Running"]: return self.session_process.stdin.write(b's') self.session_process.stdin.flush() """ Pause the session """ def pause(self): if not self.session_status in ["Paused", "Running"]: return while self.session_status != "Paused": self.session_process.stdin.write(b'p') self.session_process.stdin.flush() self.update_session() time.sleep(0.1) """ Resume the session """ def resume(self): if not self.session_status in ["Paused", "Running"]: return while self.session_status != "Running": self.session_process.stdin.write(b'r') self.session_process.stdin.flush() self.update_session() time.sleep(0.1) """ Quit the session """ def quit(self): if not self.session_status in ["Paused", "Running"]: return self.session_process.stdin.write(b'q') self.session_process.stdin.flush() self.thread.join() self.session_status = "Aborted" self.save()
class Versions(flaskDb.Model): key = CharField() val = IntegerField() class Meta: primary_key = False
class RunInfo(FactDataModel): fangletomoon = FloatField(db_column='fAngleToMoon', null=True) fangletosun = FloatField(db_column='fAngleToSun', null=True) fazimuthmax = FloatField(db_column='fAzimuthMax', null=True) fazimuthmean = FloatField(db_column='fAzimuthMean', null=True) fazimuthmin = FloatField(db_column='fAzimuthMin', null=True) fbiasvoltagemedian = FloatField(db_column='fBiasVoltageMedian', null=True) fcamhumiditymean = FloatField(db_column='fCamHumidityMean', null=True) fcameratempmean = FloatField(db_column='fCameraTempMean', null=True) fcameratemprms = FloatField(db_column='fCameraTempRms', null=True) fcameratemprmsmean = FloatField(db_column='fCameraTempRmsMean', null=True) fchecksum = CharField(db_column='fCheckSum', null=True) fcompiletime = DateTimeField(db_column='fCompileTime', null=True) fcontainertempmean = FloatField(db_column='fContainerTempMean', null=True) fctrldevmean = FloatField(db_column='fCtrlDevMean', null=True) fctrldevrms = FloatField(db_column='fCtrlDevRms', null=True) fcurrentsdevmean = FloatField(db_column='fCurrentsDevMean', null=True) fcurrentsdevrms = FloatField(db_column='fCurrentsDevRms', null=True) fcurrentsdifftoprediction = FloatField(db_column='fCurrentsDiffToPrediction', null=True) fcurrentslinerms = FloatField(db_column='fCurrentsLineRms', null=True) fcurrentsmedmean = FloatField(db_column='fCurrentsMedMean', null=True) fcurrentsmedmeanbeg = FloatField(db_column='fCurrentsMedMeanBeg', null=True) fcurrentsmedmeanend = FloatField(db_column='fCurrentsMedMeanEnd', null=True) fcurrentsmedrms = FloatField(db_column='fCurrentsMedRms', null=True) fcurrentsreldifftoprediction = FloatField(db_column='fCurrentsRelDiffToPrediction', null=True) fcurrentsrellinerms = FloatField(db_column='fCurrentsRelLineRms', null=True) fdatasum = CharField(db_column='fDataSum', null=True) fdeclination = FloatField(db_column='fDeclination', null=True) fdrsstep = IntegerField(db_column='fDrsStep', null=True) fdrstempmaxmean = FloatField(db_column='fDrsTempMaxMean', null=True) fdrstempmaxrmsmean = FloatField(db_column='fDrsTempMaxRmsMean', null=True) fdrstempminmean = FloatField(db_column='fDrsTempMinMean', null=True) fdrstempminrmsmean = FloatField(db_column='fDrsTempMinRmsMean', null=True) feffectiveon = FloatField(db_column='fEffectiveOn', null=True) feffectiveonrms = FloatField(db_column='fEffectiveOnRms', null=True) fexcludedfdakey = IntegerField(db_column='fExcludedFDAKEY', null=True) ffilesize = BigIntegerField(db_column='fFileSize', null=True) ffitsfileerrors = IntegerField(db_column='fFitsFileErrors') fhasdrsfile = IntegerField(db_column='fHasDrsFile') flastupdate = DateTimeField(db_column='fLastUpdate') flidartransmission12 = FloatField(db_column='fLidarTransmission12', null=True) flidartransmission3 = FloatField(db_column='fLidarTransmission3', null=True) flidartransmission6 = FloatField(db_column='fLidarTransmission6', null=True) flidartransmission9 = FloatField(db_column='fLidarTransmission9', null=True) fmd5sumraw = CharField(db_column='fMd5sumRaw', null=True) fmd5sumrawzip = CharField(db_column='fMd5sumRawZip', null=True) fmoondisk = FloatField(db_column='fMoonDisk', null=True) fmoonzenithdistance = FloatField(db_column='fMoonZenithDistance', null=True) fnight = IntegerField(db_column='fNight') fnumelptrigger = IntegerField(db_column='fNumELPTrigger', null=True) fnumevents = IntegerField(db_column='fNumEvents', null=True) fnumext1trigger = IntegerField(db_column='fNumExt1Trigger', null=True) fnumext2trigger = IntegerField(db_column='fNumExt2Trigger', null=True) fnumilptrigger = IntegerField(db_column='fNumILPTrigger', null=True) fnumothertrigger = IntegerField(db_column='fNumOtherTrigger', null=True) fnumpedestaltrigger = IntegerField(db_column='fNumPedestalTrigger', null=True) fnumphysicstrigger = IntegerField(db_column='fNumPhysicsTrigger', null=True) fnumtimetrigger = IntegerField(db_column='fNumTimeTrigger', null=True) fontime = FloatField(db_column='fOnTime', null=True) foutsidetempmean = FloatField(db_column='fOutsideTempMean', null=True) foutsidetemprms = FloatField(db_column='fOutsideTempRms', null=True) fperiod = IntegerField(db_column='fPeriod', null=True) froi = IntegerField(db_column='fROI') froitimemarker = IntegerField(db_column='fROITimeMarker', null=True) frevisionnumber = CharField(db_column='fRevisionNumber', null=True) frightascension = FloatField(db_column='fRightAscension', null=True) frunid = IntegerField(db_column='fRunID') frunstart = DateTimeField(db_column='fRunStart', null=True) frunstop = DateTimeField(db_column='fRunStop', null=True) fruntypekey = IntegerField(db_column='fRunTypeKey') fsequenceid = IntegerField(db_column='fSequenceID', null=True) fsourcekey = IntegerField(db_column='fSourceKEY', null=True) fsqmmaglinfitchi2 = FloatField(db_column='fSqmMagLinFitChi2', null=True) fsqmmaglinfitndf = IntegerField(db_column='fSqmMagLinFitNdf', null=True) fsqmmaglinfitpvalue = FloatField(db_column='fSqmMagLinFitPValue', null=True) fsqmmaglinfitslope = FloatField(db_column='fSqmMagLinFitSlope', null=True) fsqmmagmean = FloatField(db_column='fSqmMagMean', null=True) fsunzenithdistance = FloatField(db_column='fSunZenithDistance', null=True) ftngdust = FloatField(db_column='fTNGDust', null=True) fthresholdavgmean = FloatField(db_column='fThresholdAvgMean', null=True) fthresholdmax = IntegerField(db_column='fThresholdMax', null=True) fthresholdmedmean = FloatField(db_column='fThresholdMedMean', null=True) fthresholdmedrms = FloatField(db_column='fThresholdMedRms', null=True) fthresholdmedian = FloatField(db_column='fThresholdMedian', null=True) fthresholdminset = IntegerField(db_column='fThresholdMinSet', null=True) fthresholdmintimediff = IntegerField(db_column='fThresholdMinTimeDiff', null=True) ftriggerratemedian = FloatField(db_column='fTriggerRateMedian', null=True) ftriggerraterms = FloatField(db_column='fTriggerRateRms', null=True) ftriggerratetimeover100 = FloatField(db_column='fTriggerRateTimeOver100', null=True) ftriggerratetimeover125 = FloatField(db_column='fTriggerRateTimeOver125', null=True) ftriggerratetimeover150 = FloatField(db_column='fTriggerRateTimeOver150', null=True) ftriggerratetimeover175 = FloatField(db_column='fTriggerRateTimeOver175', null=True) fzenithdistancemax = FloatField(db_column='fZenithDistanceMax', null=True) fzenithdistancemean = FloatField(db_column='fZenithDistanceMean', null=True) fzenithdistancemin = FloatField(db_column='fZenithDistanceMin', null=True) frealontime=fn.TIME_TO_SEC(fn.TIMEDIFF('fRunStop','fRunStart'))*'fEffectiveOn' class Meta: db_table = 'RunInfo' indexes = ( (('fnight', 'frunid'), True), ) primary_key = CompositeKey('fnight', 'frunid')
class Pokemon(BaseModel): # We are base64 encoding the ids delivered by the api # because they are too big for sqlite to handle encounter_id = CharField(primary_key=True, max_length=50) spawnpoint_id = CharField(index=True, null=True) pokestop_id = CharField(null=True) pokemon_id = IntegerField(index=True) latitude = DoubleField() longitude = DoubleField() disappear_time = DateTimeField(index=True) move_1 = IntegerField(null=True) move_2 = IntegerField(null=True) class Meta: indexes = ((('latitude', 'longitude'), False), ) @staticmethod def get_encountered_pokemon(encounter_id): query = (Pokemon.select().where( Pokemon.encounter_id == encounter_id).dicts()) pokemon = [] for a in query: pokemon.append(a) return pokemon @staticmethod def get_active(swLat, swLng, neLat, neLng): if swLat is None or swLng is None or neLat is None or neLng is None: query = (Pokemon.select().where( Pokemon.disappear_time > datetime.utcnow()).dicts()) else: query = (Pokemon.select().where( (Pokemon.disappear_time > datetime.utcnow()) & (((Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng) & (Pokemon.latitude <= neLat) & (Pokemon.longitude <= neLng)))).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) if args.china: p['latitude'], p['longitude'] = \ transform_from_wgs_to_gcj(p['latitude'], p['longitude']) pokemons.append(p) # Re-enable the GC. gc.enable() return pokemons @staticmethod def get_active_by_id(ids, swLat, swLng, neLat, neLng): if swLat is None or swLng is None or neLat is None or neLng is None: query = (Pokemon.select().where((Pokemon.pokemon_id << ids) & ( Pokemon.disappear_time > datetime.utcnow())).dicts()) else: query = (Pokemon.select().where( (Pokemon.pokemon_id << ids) & (Pokemon.disappear_time > datetime.utcnow()) & (Pokemon.latitude >= swLat) & (Pokemon.longitude >= swLng) & (Pokemon.latitude <= neLat) & (Pokemon.longitude <= neLng)).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) if args.china: p['latitude'], p['longitude'] = \ transform_from_wgs_to_gcj(p['latitude'], p['longitude']) pokemons.append(p) # Re-enable the GC. gc.enable() return pokemons @classmethod def get_seen(cls, timediff): if timediff: timediff = datetime.utcnow() - timediff pokemon_count_query = (Pokemon.select( Pokemon.pokemon_id, fn.COUNT(Pokemon.pokemon_id).alias('count'), fn.MAX(Pokemon.disappear_time).alias('lastappeared')).where( Pokemon.disappear_time > timediff).group_by( Pokemon.pokemon_id).alias('counttable')) query = (Pokemon.select( Pokemon.pokemon_id, Pokemon.disappear_time, Pokemon.latitude, Pokemon.longitude, pokemon_count_query.c.count).join( pokemon_count_query, on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id )).distinct().where( Pokemon.disappear_time == pokemon_count_query.c.lastappeared).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] total = 0 for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) pokemons.append(p) total += p['count'] # Re-enable the GC. gc.enable() return {'pokemon': pokemons, 'total': total} @classmethod def get_appearances(cls, pokemon_id, last_appearance): query = (Pokemon.select().where((Pokemon.pokemon_id == pokemon_id) & ( Pokemon.disappear_time > datetime.utcfromtimestamp( last_appearance / 1000.0))).order_by( Pokemon.disappear_time.asc()).dicts()) return list(query) @classmethod def get_spawnpoints(cls, southBoundary, westBoundary, northBoundary, eastBoundary): query = (Pokemon.select( Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, ((Pokemon.disappear_time.minute * 60) + Pokemon.disappear_time.second).alias('time'), fn.Count(Pokemon.spawnpoint_id).alias('count')).where( Pokemon.spawnpoint_id.is_null(False))) if None not in (northBoundary, southBoundary, westBoundary, eastBoundary): query = (query.where((Pokemon.latitude <= northBoundary) & (Pokemon.latitude >= southBoundary) & (Pokemon.longitude >= westBoundary) & (Pokemon.longitude <= eastBoundary))) # Sqlite doesn't support distinct on columns if args.db_type == 'mysql': query = query.distinct(Pokemon.spawnpoint_id) else: query = query.group_by(Pokemon.spawnpoint_id) return list(query.dicts()) @classmethod def get_spawnpoints_in_hex(cls, center, steps): log.info('Finding spawn points {} steps away'.format(steps)) n, e, s, w = hex_bounds(center, steps) query = (Pokemon.select(Pokemon.latitude.alias('lat'), Pokemon.longitude.alias('lng'), ((Pokemon.disappear_time.minute * 60) + Pokemon.disappear_time.second).alias('time'), Pokemon.spawnpoint_id)) query = (query.where((Pokemon.latitude <= n) & (Pokemon.latitude >= s) & (Pokemon.longitude >= w) & (Pokemon.longitude <= e))) # Sqlite doesn't support distinct on columns if args.db_type == 'mysql': query = query.distinct(Pokemon.spawnpoint_id) else: query = query.group_by(Pokemon.spawnpoint_id) s = list(query.dicts()) # Filter to spawns which actually fall in the hex locations # This loop is about as non-pythonic as you can get, I bet. # Oh well. filtered = [] hex_locations = list(generate_location_steps(center, steps, 0.07)) for hl in hex_locations: for idx, sp in enumerate(s): if geopy.distance.distance( hl, (sp['lat'], sp['lng'])).meters <= 70: filtered.append(s.pop(idx)) # at this point, 'time' is DISAPPEARANCE time, we're going to morph it to APPEARANCE time for location in filtered: # examples: time shifted # 0 ( 0 + 2700) = 2700 % 3600 = 2700 (0th minute to 45th minute, 15 minutes prior to appearance as time wraps around the hour) # 1800 (1800 + 2700) = 4500 % 3600 = 900 (30th minute, moved to arrive at 15th minute) # todo: this DOES NOT ACCOUNT for pokemons that appear sooner and live longer, but you'll _always_ have at least 15 minutes, so it works well enough location['time'] = (location['time'] + 2700) % 3600 return filtered
class User(BaseModel): uid = IntegerField(primary_key=True) admin = BooleanField(default=False) bboxes = TextField(null=True)
class Block(Model): id = CharField(max_length=64, primary_key=True) version = SmallIntegerField() timestamp = IntegerField(unique=True) previous_block = CharField(max_length=64, null=True, unique=True) height = IntegerField(unique=True) number_of_transactions = IntegerField() total_amount = BigIntegerField() total_fee = BigIntegerField() reward = BigIntegerField() payload_length = IntegerField() payload_hash = CharField(max_length=64) generator_public_key = CharField(max_length=66, index=True) block_signature = CharField(max_length=256) class Meta: table_name = "blocks" @classmethod def from_crypto(cls, block): # TODO: figure out how to improve this model = cls() model.id = block.id model.version = block.version model.timestamp = block.timestamp model.previous_block = block.previous_block model.height = block.height model.number_of_transactions = block.number_of_transactions model.total_amount = block.total_amount model.total_fee = block.total_fee model.reward = block.reward model.payload_length = block.payload_length model.payload_hash = block.payload_hash model.generator_public_key = block.generator_public_key model.block_signature = block.block_signature return model # @staticmethod # def count(): # return Block.select(fn.COUNT(Block.height.distinct())).scalar() @staticmethod def statistics(): """Returns statistics about Blocks table Returns a tuple containing: (total number of transactions, total fee, total amount, height) """ # TODO: check if this actually returns correct values stats = Block.select( fn.SUM(Block.number_of_transactions), fn.SUM(Block.total_fee), fn.SUM(Block.total_amount), fn.COUNT(Block.height.distinct()), ).scalar(as_tuple=True) return { "transactions_count": stats[0], "total_fee": stats[1], "total_amount": stats[2], "blocks_count": stats[3], }
class Version(BaseModel): version = IntegerField()
class Superblock(BaseModel, GovernanceClass): governance_object = ForeignKeyField(GovernanceObject, related_name='superblocks', on_delete='CASCADE', on_update='CASCADE') event_block_height = IntegerField() payment_addresses = TextField() payment_amounts = TextField() proposal_hashes = TextField(default='') sb_hash = CharField() object_hash = CharField(max_length=64) govobj_type = ariond_GOVOBJ_TYPES['superblock'] only_masternode_can_submit = True class Meta: db_table = 'superblocks' def is_valid(self): import arionlib import decimal printdbg("In Superblock#is_valid, for SB: %s" % self.__dict__) # it's a string from the DB... addresses = self.payment_addresses.split('|') for addr in addresses: if not arionlib.is_valid_arion_address(addr, config.network): printdbg("\tInvalid address [%s], returning False" % addr) return False amounts = self.payment_amounts.split('|') for amt in amounts: if not misc.is_numeric(amt): printdbg("\tAmount [%s] is not numeric, returning False" % amt) return False # no negative or zero amounts allowed damt = decimal.Decimal(amt) if not damt > 0: printdbg("\tAmount [%s] is zero or negative, returning False" % damt) return False # verify proposal hashes correctly formatted... if len(self.proposal_hashes) > 0: hashes = self.proposal_hashes.split('|') for object_hash in hashes: if not misc.is_hash(object_hash): printdbg("\tInvalid proposal hash [%s], returning False" % object_hash) return False # ensure number of payment addresses matches number of payments if len(addresses) != len(amounts): printdbg( "\tNumber of payment addresses [%s] != number of payment amounts [%s], returning False" % (len(addresses), len(amounts))) return False printdbg("Leaving Superblock#is_valid, Valid = True") return True def hash(self): import arionlib return arionlib.hashit(self.serialise()) def hex_hash(self): return "%x" % self.hash() # workaround for now, b/c we must uniquely ID a superblock with the hash, # in case of differing superblocks # # this prevents sb_hash from being added to the serialised fields @classmethod def serialisable_fields(self): return [ 'event_block_height', 'payment_addresses', 'payment_amounts', 'proposal_hashes' ] # has this masternode voted to fund *any* superblocks at the given # event_block_height? @classmethod def is_voted_funding(self, ebh): count = ( self.select().where(self.event_block_height == ebh).join( GovernanceObject).join(Vote).join(Signal).switch( Vote) # switch join query context back to Vote .join(Outcome).where(Vote.signal == VoteSignals.funding).where( Vote.outcome == VoteOutcomes.yes).count()) return count @classmethod def latest(self): try: obj = self.select().order_by( self.event_block_height).desc().limit(1)[0] except IndexError as e: obj = None return obj @classmethod def at_height(self, ebh): query = (self.select().where(self.event_block_height == ebh)) return query @classmethod def find_highest_deterministic(self, sb_hash): # highest block hash wins query = (self.select().where(self.sb_hash == sb_hash).order_by( self.object_hash.desc())) try: obj = query.limit(1)[0] except IndexError as e: obj = None return obj
class Report(BaseModel): my_id = IntegerField() target_id = IntegerField() is_offline = BooleanField() latency = IntegerField() stamp = DateTimeField()
class Volume(BaseModel): name = CharField(max_length=150, unique=True) create_time = DateTimeField(default=datetime.now) update_time = DateTimeField(default=datetime.now) status = IntegerField(default=0)
class Measure(BaseModel): run = ForeignKeyField(Run, related_name='measures') cpu = IntegerField() memory = IntegerField()