class Hint(DatabaseObject): ''' Holds the source code for a box which can be purchased from the source code market. ''' uuid = Column(String(36), unique=True, nullable=False, default=lambda: str(uuid4()) ) box_id = Column(Integer, ForeignKey('box.id'), nullable=False) flag_id = Column(Integer, ForeignKey('flag.id'), nullable=True) _price = Column(Integer, nullable=False) _description = Column(Unicode(512), nullable=False) @classmethod def all(cls): ''' Returns a list of all objects in the database ''' return dbsession.query(cls).all() @classmethod def by_id(cls, _id): ''' Returns a the object with id of _id ''' return dbsession.query(cls).filter_by(id=_id).first() @classmethod def by_uuid(cls, _uuid): ''' Returns a the object with a given uuid ''' return dbsession.query(cls).filter_by(uuid=unicode(_uuid)).first() @classmethod def by_box_id(cls, _id): return dbsession.query(cls).filter_by(box_id=_id).all() @classmethod def by_flag_id(cls, _id): return dbsession.query(cls).filter_by(flag_id=_id).all() @classmethod def taken_by_flag(cls, _id): return dbsession.query(cls, team_to_hint).filter_by(flag_id=_id).join(team_to_hint).all() @classmethod def taken_by_box(cls, _id): return dbsession.query(cls, team_to_hint).filter_by(box_id=_id, flag_id=None).join(team_to_hint).all() @property def price(self): return self._price @price.setter def price(self, value): try: self._price = abs(int(value)) except ValueError: raise ValidationError("Hint price must be an integer") @property def description(self): return self._description @description.setter def description(self, value): if not 0 < len(value) < 512: raise ValidationError("Hint description must be 1 - 512 characters") self._description = unicode(value) def to_xml(self, parent): hint_elem = ET.SubElement(parent, "hint") ET.SubElement(hint_elem, "price").text = str(self.price) ET.SubElement(hint_elem, "description").text = self._description def to_dict(self): flag = Flag.by_id(self.flag_id) if flag: flag_uuid = flag.uuid else: flag_uuid = "" return { 'price': str(self.price), 'description': self.description, 'flag_uuid': flag_uuid, 'uuid': self.uuid, 'flaglist': Box.flaglist(self.box_id) }
class Map(Base): __tablename__ = 'map' uuid = Column(Unicode, primary_key=True, default=lambda: unicode(uuid.uuid4().hex)) user_login = Column(Unicode(50)) title = Column(Unicode(50)) description = Column(Unicode) public = Column(Boolean) create_date = Column(DateTime, default=datetime.datetime.now) update_date = Column(DateTime, onupdate=datetime.datetime.now) zoom = Column(Integer) x = Column(Float) y = Column(Float) theme = Column(Unicode) bg_layer = Column(Unicode) bg_opacity = Column(Float, default=100) layers = Column(Unicode) layers_indices = Column(Unicode) layers_opacity = Column(Unicode) layers_visibility = Column(Unicode) selected_node = Column(Unicode) rating = Column(Float, default=0) rating_count = Column(Integer, default=0) category_id = Column(Integer, ForeignKey('category.id'), default=999) label = Column(Unicode) category = relationship('Category', backref='maps') features = relationship('Feature', backref='map') def get_title(self): if self.title is not None: return self.title.replace("'", "_") return None def todict(self): def convert_datetime(value): if value is not None: return value.strftime("%Y-%m-%d %H:%M:%S") else: return None for c in self.__table__.columns: if isinstance(c.type, DateTime): value = convert_datetime(getattr(self, c.name)) else: value = getattr(self, c.name) yield (c.name, value) def __iter__(self): """Returns an iterable that supports .next() so we can do dict(sa_instance) """ return self.todict() @staticmethod def get(id, session): """ Get map by its id. """ return session.query(Map).get(id) @staticmethod def belonging_to(user, session): """ Get maps that belong to user. """ maps = session.query(Map).filter( func.lower(Map.user_login) == func.lower(user)) \ .order_by("category_id asc,title asc").all() return [{ 'title': map.title, 'uuid': map.uuid, 'public': map.public, 'create_date': map.create_date, 'update_date': map.update_date, 'category': map.category.name if map.category_id is not None else None, 'owner': map.user_login.lower() } for map in maps]
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'User' def setdefaultkey(self): uid = str(uuid.uuid4()) while DBSession.query(User).filter(User.key == uid).first(): uid = str(uuid.uuid4()) return uid # columns id = Column(Integer, autoincrement=True, primary_key=True) name = Column(Unicode(255)) _email = Column(Unicode(255), unique=True, info={'rum': {'field':'Email'}}) _created = Column(DateTime, default=datetime.now) key = Column(Unicode(255), unique=True,default=setdefaultkey) def _get_date(self): return self._created.strftime(date_format); def _set_date(self,date): self._created=date created = synonym('_created', descriptor=property(_get_date, _set_date)) # email and user_name properties def _get_email(self): return self._email def _set_email(self, email): self._email = email.lower() email = synonym('_email', descriptor=property(_get_email, _set_email)) # class methods @classmethod def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter(cls.email == email).first() # non-column properties def validate_login(self, password): print 'validate_login' print password @property def permissions(self): """Return a set with all permissions granted to the user.""" perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms def __repr__(self): return '<User: id=%r, name=%r, email=%r, key=%r>' % (self.id, self.name, self.email,self.key) def __unicode__(self): return self.name
class Team(DatabaseObject): """ Team definition """ uuid = Column(String(36), unique=True, nullable=False, default=lambda: str(uuid4())) _name = Column(Unicode(24), unique=True, nullable=False) _motto = Column(Unicode(32)) _avatar = Column(String(64)) _code = Column( "code", String(16), unique=True, default=lambda: str(uuid4().hex)[:16] ) files = relationship( "FileUpload", backref=backref("team", lazy="select"), cascade="all,delete,delete-orphan", ) pastes = relationship( "PasteBin", backref=backref("team", lazy="select"), cascade="all,delete,delete-orphan", ) money = Column(Integer, default=options.starting_team_money, nullable=False) members = relationship( "User", backref=backref("team", lazy="select"), cascade="all,delete,delete-orphan", ) flags = relationship( "Flag", secondary=team_to_flag, backref=backref("team", lazy="select") ) boxes = relationship( "Box", secondary=team_to_box, backref=backref("team", lazy="select") ) items = relationship( "MarketItem", secondary=team_to_item, backref=backref("team", lazy="joined") ) purchased_source_code = relationship( "SourceCode", secondary=team_to_source_code, backref=backref("team", lazy="select"), ) hints = relationship( "Hint", secondary=team_to_hint, backref=backref("team", lazy="select") ) game_levels = relationship( "GameLevel", secondary=team_to_game_level, backref=backref("team", lazy="select"), ) @classmethod def all(cls): """ Returns a list of all objects in the database """ return dbsession.query(cls).all() @classmethod def by_id(cls, _id): """ Returns a the object with id of _id """ return dbsession.query(cls).filter_by(id=_id).first() @classmethod def by_uuid(cls, _uuid): """ Return and object based on a uuid """ return dbsession.query(cls).filter_by(uuid=_uuid).first() @classmethod def by_name(cls, name): """ Return the team object based on "team_name" """ return dbsession.query(cls).filter_by(_name=str(name)).first() @classmethod def by_code(cls, code): """ Return the team object based on the _code """ return dbsession.query(cls).filter_by(_code=code).first() @classmethod def ranks(cls): """ Returns a list of all objects in the database """ return sorted(dbsession.query(cls).all()) @classmethod def count(cls): return dbsession.query(cls).count() @property def name(self): return self._name def get_score(self, item): if item == "money": return self.money elif item == "flag": return len(self.flags) elif item == "hint": return len(self.hints) elif item == "bot": return self.bot_count return 0 @name.setter def name(self, value): if not 3 <= len(value) <= 24: raise ValidationError("Team name must be 3 - 24 characters") else: self._name = str(value) @property def motto(self): return self._motto @motto.setter def motto(self, value): if 32 < len(value): raise ValidationError("Motto must be less than 32 characters") else: self._motto = str(value) @property def code(self): return self._code @property def avatar(self): if self._avatar is not None: return self._avatar else: if options.teams: avatar = get_new_avatar("team") else: avatar = get_new_avatar("user", True) if not avatar.startswith("default_"): self._avatar = avatar dbsession.add(self) dbsession.commit() return avatar @avatar.setter def avatar(self, image_data): if MIN_AVATAR_SIZE < len(image_data) < MAX_AVATAR_SIZE: ext = imghdr.what("", h=image_data) if ext in IMG_FORMATS and not is_xss_image(image_data): try: if self._avatar is not None and os.path.exists( options.avatar_dir + "/upload/" + self._avatar ): os.unlink(options.avatar_dir + "/upload/" + self._avatar) file_path = str( options.avatar_dir + "/upload/" + self.uuid + "." + ext ) image = Image.open(io.BytesIO(image_data)) cover = resizeimage.resize_cover(image, [500, 250]) cover.save(file_path, image.format) self._avatar = "upload/" + self.uuid + "." + ext except Exception as e: raise ValidationError(e) else: raise ValidationError( "Invalid image format, avatar must be: %s" % (", ".join(IMG_FORMATS)) ) else: raise ValidationError( "The image is too large must be %d - %d bytes" % (MIN_AVATAR_SIZE, MAX_AVATAR_SIZE) ) @property def levels(self): """ Sorted game_levels """ return sorted(self.game_levels) def level_flags(self, lvl): """ Given a level number return all flags captured for that level """ return [flag for flag in self.flags if flag.game_level.number == lvl] def box_flags(self, box): """ Given a box return all flags captured for that box """ return [flag for flag in self.flags if flag.box == box] @property def bot_count(self): bot_manager = BotManager.instance() return bot_manager.count_by_team_uuid(self.uuid) def file_by_file_name(self, file_name): """ Return file object based on file_name """ ls = self.files.filter_by(file_name=file_name) return ls[0] if 0 < len(ls) else None def to_dict(self): """ Use for JSON related tasks; return public data only """ return { "uuid": self.uuid, "name": self.name, "motto": self.motto, "money": self.money, "avatar": self.avatar, } def to_xml(self, parent): team_elem = ET.SubElement(parent, "team") ET.SubElement(team_elem, "name").text = self.name ET.SubElement(team_elem, "motto").text = self.motto users_elem = ET.SubElement(team_elem, "users") users_elem.set("count", "%s" % str(len(self.members))) for user in self.members: user.to_xml(users_elem) def __repr__(self): return "<Team - name: %s, money: %d>" % (self.name, self.money) def __str__(self): return self.name def __eq__(self, other): return self.id == other.id def __ne__(self, other): return not self.__eq__(other) def __cmp__(self, other): """ Compare based on the config option rank_by """ if options.rank_by.lower() != "money": """ flags ▲, money ▲, hints ▼ """ this, that = len(self.flags), len(other.flags) if this == that: this, that = self.money, other.money if this == that: this, that = len(other.hints), len(self.hints) else: """ money ▲, hints ▼, flags ▲ """ this, that = self.money, other.money if this == that: this, that = len(other.hints), len(self.hints) if this == that: this, that = len(self.flags), len(other.flags) if this < that: return 1 elif this == that: return 0 else: return -1 def __gt__(self, other): return self.__cmp__(other) > 0 def __lt__(self, other): return self.__cmp__(other) < 0 def __ge__(self, other): return self.__cmp__(other) >= 0 def __le__(self, other): return self.__cmp__(other) <= 0
class Dataset(TableHandler, db.Model): """ The dataset is the core entity of any access to data. All requests to the actual data store are routed through it, as well as data loading and model generation. The dataset keeps an in-memory representation of the data model (including all dimensions and measures) which can be used to generate necessary queries. """ __tablename__ = 'dataset' id = Column(Integer, primary_key=True) name = Column(Unicode(255), unique=True) label = Column(Unicode(2000)) description = Column(Unicode()) currency = Column(Unicode()) default_time = Column(Unicode()) schema_version = Column(Unicode()) entry_custom_html = Column(Unicode()) ckan_uri = Column(Unicode()) category = Column(Unicode()) serp_title = Column(Unicode(), nullable=True) serp_teaser = Column(Unicode(), nullable=True) private = Column(Boolean, default=False) created_at = Column(DateTime, default=datetime.utcnow) updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) data = Column(MutableDict.as_mutable(JSONType), default=dict) languages = association_proxy('_languages', 'code') territories = association_proxy('_territories', 'code') def __init__(self, data): self.data = data.copy() dataset = self.data['dataset'] del self.data['dataset'] self.label = dataset.get('label') self.name = dataset.get('name') self.description = dataset.get('description') self.currency = dataset.get('currency') self.category = dataset.get('category') self.serp_title = dataset.get('serp_title') self.serp_teaser = dataset.get('serp_teaser') self.default_time = dataset.get('default_time') self.entry_custom_html = dataset.get('entry_custom_html') self.languages = dataset.get('languages', []) self.territories = dataset.get('territories', []) self.ckan_uri = dataset.get('ckan_uri') self._load_model() @property def model(self): model = self.data.copy() model['dataset'] = self.as_dict() return model @property def mapping(self): return self.data.get('mapping', {}) @reconstructor def _load_model(self): """ Construct the in-memory object representation of this dataset's dimension and measures model. This is called upon initialization and deserialization of the dataset from the SQLAlchemy store. """ self.dimensions = [] self.measures = [] for dim, data in self.mapping.items(): if data.get('type') == 'measure' or dim == 'amount': self.measures.append(Measure(self, dim, data)) continue elif data.get('type') == 'date' or \ (dim == 'time' and data.get('datatype') == 'date'): dimension = DateDimension(self, dim, data) elif data.get('type') in ['value', 'attribute']: dimension = AttributeDimension(self, dim, data) else: dimension = CompoundDimension(self, dim, data) self.dimensions.append(dimension) self.init() self._is_generated = None def __getitem__(self, name): """ Access a field (dimension or measure) by name. """ for field in self.fields: if field.name == name: return field raise KeyError() def __contains__(self, name): try: self[name] return True except KeyError: return False @property def fields(self): """ Both the dimensions and metrics in this dataset. """ return self.dimensions + self.measures @property def compounds(self): """ Return only compound dimensions. """ return filter(lambda d: isinstance(d, CompoundDimension), self.dimensions) @property def facet_dimensions(self): return [d for d in self.dimensions if d.facet] def init(self): """ Create a SQLAlchemy model for the current dataset model, without creating the tables and columns. This needs to be called both for access to the data and in order to generate the model physically. """ self.bind = db.engine self.meta = MetaData() self.meta.bind = db.engine self._init_table(self.meta, self.name, 'entry', id_type=Unicode(42)) for field in self.fields: field.column = field.init(self.meta, self.table) self.alias = self.table.alias('entry') def generate(self): """ Create the tables and columns necessary for this dataset to keep data. """ for field in self.fields: field.generate(self.meta, self.table) for dim in self.dimensions: if isinstance(dim, CompoundDimension): self.table.append_constraint(ForeignKeyConstraint( [dim.name + '_id'], [dim.table.name + '.id'], # use_alter=True, name='fk_' + self.name + '_' + dim.name )) self._generate_table() self._is_generated = True @property def is_generated(self): if self._is_generated is None: self._is_generated = self.table.exists() return self._is_generated @property def has_badges(self): """ Property that returns True if the dataset has been given any badges """ # Cast the badge count as a boolean and return it return bool(self.badges.count()) def can_read(self, user): """ Permissions for dataset access (read). Returns a boolean indicating if a user may read the dataset """ # If the dataset is not private anybody can read # If the datset is private only users who can update it can read it return not self.private or self.can_update(user) def can_update(self, user): """ Permissions for dataset updates. Returns a boolean indicating if a user may update the dataset """ # User needs to be logged in and either admin or one of the dataset # managers return user is not None and ( user.admin or db.session.query( # Check if the user exists in managers self.managers.filter_by(id=user.id).exists()).first() ) def can_delete(self, user): """ Permissions for dataset removal (delete). Returns a boolean indicating if a user may delete the dataset. """ # Users who can update the dataset can also delete it return self.can_update(user) def commit(self): pass # self.tx.commit() # self.tx = self.bind.begin() def _make_key(self, data): """ Generate a unique identifier for an entry. This is better than SQL auto-increment because it is stable across mutltiple loads and thus creates stable URIs for entries. """ uniques = [self.name] for field in self.fields: if not field.key: continue obj = data.get(field.name) if isinstance(obj, dict): obj = obj.get('name', obj.get('id')) uniques.append(obj) return hash_values(uniques) def load(self, data): """ Handle a single entry of data in the mapping source format, i.e. with all needed columns. This will propagate to all dimensions and set values as appropriate. """ entry = dict() for field in self.fields: field_data = data[field.name] entry.update(field.load(self.bind, field_data)) entry['id'] = self._make_key(data) self._upsert(self.bind, entry, ['id']) def flush(self): """ Delete all data from the dataset tables but leave the table structure intact. """ for dimension in self.dimensions: dimension.flush(self.bind) self._flush(self.bind) def drop(self): """ Drop all tables created as part of this dataset, i.e. by calling ``generate()``. This will of course also delete the data itself. """ self._drop(self.bind) for dimension in self.dimensions: dimension.drop(self.bind) self._is_generated = False def key(self, key): """ For a given ``key``, find a column to indentify it in a query. A ``key`` is either the name of a simple attribute (e.g. ``time``) or of an attribute of a complex dimension (e.g. ``to.label``). The returned key is using an alias, so it can be used in a query directly. """ attr = None if '.' in key: key, attr = key.split('.', 1) dimension = self[key] if hasattr(dimension, 'alias'): attr_name = dimension[attr].column.name if attr else 'name' return dimension.alias.c[attr_name] return self.alias.c[dimension.column.name] def entries(self, conditions="1=1", order_by=None, limit=None, offset=0, step=10000, fields=None): """ Generate a fully denormalized view of the entries on this table. This view is nested so that each dimension will be a hash of its attributes. This is somewhat similar to the entries collection in the fully denormalized schema before OpenSpending 0.11 (MongoDB). """ if not self.is_generated: return if fields is None: fields = self.fields joins = self.alias for d in self.dimensions: if d in fields: joins = d.join(joins) selects = [f.selectable for f in fields] + [self.alias.c.id] # enforce stable sorting: if order_by is None: order_by = [self.alias.c.id.asc()] for i in count(): qoffset = offset + (step * i) qlimit = step if limit is not None: qlimit = min(limit - (step * i), step) if qlimit <= 0: break query = select(selects, conditions, joins, order_by=order_by, use_labels=True, limit=qlimit, offset=qoffset) rp = self.bind.execute(query) first_row = True while True: row = rp.fetchone() if row is None: if first_row: return break first_row = False yield decode_row(row, self) def aggregate(self, measures=['amount'], drilldowns=[], cuts=[], page=1, pagesize=10000, order=[]): """ Query the dataset for a subset of cells based on cuts and drilldowns. It returns a structure with a list of drilldown items and a summary about the slice cutted by the query. ``measures`` The numeric units to be aggregated over, defaults to [``amount``]. (type: `list`) ``drilldowns`` Dimensions to drill down to. (type: `list`) ``cuts`` Specification what to cut from the cube. This is a `list` of `two-tuples` where the first item is the dimension and the second item is the value to cut from. It is turned into a query where multible cuts for the same dimension are combined to an *OR* query and then the queries for the different dimensions are combined to an *AND* query. ``page`` Page the drilldown result and return page number *page*. type: `int` ``pagesize`` Page the drilldown result into page of size *pagesize*. type: `int` ``order`` Sort the result based on the dimension *sort_dimension*. This may be `None` (*default*) or a `list` of two-`tuples` where the first element is the *dimension* and the second element is the order (`False` for ascending, `True` for descending). Type: `list` of two-`tuples`. Raises: :exc:`ValueError` If a cube is not yet computed. Call :meth:`compute` to compute the cube. :exc:`KeyError` If a drilldown, cut or order dimension is not part of this cube or the order dimensions are not a subset of the drilldown dimensions. Returns: A `dict` containing the drilldown and the summary: {"drilldown": [ {"num_entries": 5545, "amount": 41087379002.0, "cofog1": {"description": "", "label": "Economic affairs"}}, ... ] "summary": {"amount": 7353306450299.0, "num_entries": 133612}} """ # Get the joins (aka alias) and the dataset joins = alias = self.alias dataset = self # Aggregation fields are all of the measures, so we create individual # summary fields with the sum function of SQLAlchemy fields = [func.sum(alias.c[m]).label(m) for m in measures] # We append an aggregation field that counts the number of entries fields.append(func.count(alias.c.id).label("entries")) # Create a copy of the statistics fields (for later) stats_fields = list(fields) # Create label map for time columns (year and month) for lookup # since they are found under the time attribute labels = { 'year': dataset['time']['year'].column_alias.label('year'), 'month': dataset['time']['yearmonth'].column_alias.label('month'), } # Get the dimensions we're interested in. These would be the drilldowns # and the cuts. For compound dimensions we are only interested in the # most significant one (e.g. for from.name we're interested in from) dimensions = drilldowns + [k for k, v in cuts] dimensions = [d.split('.')[0] for d in dimensions] # Loop over the dimensions as a set (to avoid multiple occurances) for dimension in set(dimensions): # If the dimension is year or month we're interested in 'time' if dimension in labels: dimension = 'time' # If the dimension table isn't in the automatic joins we add it if dimension not in [c.table.name for c in joins.columns]: joins = dataset[dimension].join(joins) # Drilldowns are performed using group_by SQL functions group_by = [] for key in drilldowns: # If drilldown is in labels we append its mapped column to fields if key in labels: column = labels[key] group_by.append(column) fields.append(column) else: # Get the column from the dataset column = dataset.key(key) # If the drilldown is a compound dimension or the columns table # is in the joins we're already fetching the column so we just # append it to fields and the group_by if '.' in key or column.table == alias: fields.append(column) group_by.append(column) else: # If not we add the column table to the fields and add all # of that tables columns to the group_by fields.append(column.table) for col in column.table.columns: group_by.append(col) # Cuts are managed using AND statements and we use a dict with set as # the default value to create the filters (cut on various values) conditions = and_() filters = defaultdict(set) for key, value in cuts: # If the key is in labels (year or month) we get the mapped column # else we get the column from the dataset if key in labels: column = labels[key] else: column = dataset.key(key) # We add the value to the set for that particular column filters[column].add(value) # Loop over the columns in the filter and add that to the conditions # For every value in the set we create and OR statement so we get e.g. # year=2007 AND (from.who == 'me' OR from.who == 'you') for attr, values in filters.items(): conditions.append(or_(*[attr == v for v in values])) # Ordering can be set by a parameter or ordered by measures by default order_by = [] # If no order is defined we default to order of the measures in the # order they occur (furthest to the left is most significant) if order is None or not len(order): order = [(m, True) for m in measures] # We loop through the order list to add the columns themselves for key, direction in order: # If it's a part of the measures we have to order by the # aggregated values (the sum of the measure) if key in measures: column = func.sum(alias.c[key]).label(key) # If it's in the labels we have to get the mapped column elif key in labels: column = labels[key] # ...if not we just get the column from the dataset else: column = dataset.key(key) # We append the column and set the direction (True == descending) order_by.append(column.desc() if direction else column.asc()) # query 1: get overall sums. # Here we use the stats_field we saved earlier query = select(stats_fields, conditions, joins) rp = dataset.bind.execute(query) # Execute the query and turn them to a list so we can pop the # entry count and then zip the measurements and the totals together stats = list(rp.fetchone()) num_entries = stats.pop() total = zip(measures, stats) # query 2: get total count of drilldowns if len(group_by): # Select 1 for each group in the group_by and count them query = select(['1'], conditions, joins, group_by=group_by) query = select([func.count('1')], '1=1', query.alias('q')) rp = dataset.bind.execute(query) num_drilldowns, = rp.fetchone() else: # If there are no drilldowns we still have to do one num_drilldowns = 1 # The drilldown result list drilldown = [] # The offset in the db, based on the page and pagesize (we have to # modify it since page counts starts from 1 but we count from 0 offset = int((page - 1) * pagesize) # query 3: get the actual data query = select(fields, conditions, joins, order_by=order_by, group_by=group_by, use_labels=True, limit=pagesize, offset=offset) rp = dataset.bind.execute(query) while True: # Get each row in the db result and append it, decoded, to the # drilldown result. The decoded version is a json represenation row = rp.fetchone() if row is None: break result = decode_row(row, dataset) drilldown.append(result) # Create the summary based on the stats_fields and other things # First we add a the total for each measurement in the root of the # summary (watch out!) and then we add various other, self-explanatory # statistics such as page, number of entries. The currency value is # strange since it's redundant for multiple measures but is left as is # for backwards compatibility summary = {key: value for (key, value) in total} summary.update({ 'num_entries': num_entries, 'currency': {m: dataset.currency for m in measures}, 'num_drilldowns': num_drilldowns, 'page': page, 'pages': int(math.ceil(num_drilldowns / float(pagesize))), 'pagesize': pagesize }) return {'drilldown': drilldown, 'summary': summary} def timerange(self): """ Get the timerange of the dataset (based on the time attribute). Returns a tuple of (first timestamp, last timestamp) where timestamp is a datetime object """ try: # Get the time column time = self.key('time') # We use SQL's min and max functions to get the timestamps query = db.session.query(func.min(time), func.max(time)) # We just need one result to get min and max time return [datetime.strptime(date, '%Y-%m-%d') if date else None for date in query.one()] except: return (None, None) def __repr__(self): return "<Dataset(%s:%s:%s)>" % (self.name, self.dimensions, self.measures) def __len__(self): if not self.is_generated: return 0 rp = self.bind.execute(self.alias.count()) return rp.fetchone()[0] def as_dict(self): return { 'label': self.label, 'name': self.name, 'description': self.description, 'default_time': self.default_time, 'schema_version': self.schema_version, 'currency': self.currency, 'category': self.category, 'serp_title': self.serp_title, 'serp_teaser': self.serp_teaser, 'timestamps': { 'created': self.created_at, 'last_modified': self.updated_at }, 'languages': list(self.languages), 'territories': list(self.territories), 'badges': [b.as_dict(short=True) for b in self.badges] } @classmethod def all_by_account(cls, account): """ Query available datasets based on dataset visibility. """ criteria = [cls.private == false()] if account is not None: criteria += ["1=1" if account.admin else "1=2", cls.managers.any(type(account).id == account.id)] q = db.session.query(cls).filter(or_(*criteria)) q = q.order_by(cls.label.asc()) return q @classmethod def by_name(cls, name): return db.session.query(cls).filter_by(name=name).first()
class Configuration(DeclarativeBase): """Represents a stored configuration for the application""" __tablename__ = 'config' __table_args__ = ( ForeignKeyConstraint( ['program_name'], ['programs.name'], onupdate='cascade', ondelete='set null'), ForeignKeyConstraint( ['program_name', 'season_number'], ['seasons.program_name', 'seasons.number'], onupdate='cascade', ondelete='set null'), {} ) id = Column(Integer, primary_key=True) source = Column(Unicode(300), nullable=False, default='/dev/dvd') target = Column(Unicode(300), nullable=False, default=os.path.expanduser('~/Videos')) temp = Column(Unicode(300), nullable=False, default=tempfile.gettempdir()) template = Column(Unicode(300), nullable=False, default='{program} - {id} - {name}.mp4') id_template = Column(Unicode(100), nullable=False, default='{season}x{episode:02d}') _duration_min = Column('duration_min', Integer, nullable=False, default=40) _duration_max = Column('duration_max', Integer, nullable=False, default=50) program_name = Column(Unicode(200)) season_number = Column(Integer) subtitle_format = Column(Unicode(6), CheckConstraint("subtitle_format in ('none', 'vobsub', 'cc', 'any')"), nullable=False, default='none') audio_mix = Column(Unicode(6), CheckConstraint("audio_mix in ('mono', 'stereo', 'dpl1', 'dpl2')"), nullable=False, default='dpl2') decomb = Column(Unicode(4), CheckConstraint("decomb in ('off', 'on', 'auto')"), nullable=False, default='off') audio_all = Column(Boolean, nullable=False, default=False) audio_langs = relationship('AudioLanguage', backref='config') subtitle_all = Column(Boolean, nullable=False, default=False) subtitle_default = Column(Boolean, nullable=False, default=False) subtitle_langs = relationship('SubtitleLanguage', backref='config') video_style = Column(Unicode(10), CheckConstraint("video_style in ('tv', 'film', 'animation')"), nullable=False, default='tv') dvdnav = Column(Boolean, nullable=False, default=True) duplicates = Column(Unicode(5), CheckConstraint("duplicates in ('all', 'first', 'last')"), nullable=False, default='all') paths = relationship('ConfigPath', backref='config') program = relationship('Program') season = relationship('Season', primaryjoin='and_(' 'Season.program_name == Configuration.program_name, ' 'Season.number == foreign(Configuration.season_number)' ')') def _get_duration_min(self): return timedelta(minutes=self._duration_min) def _set_duration_min(self, value): self._duration_min = value.seconds / 60 duration_min = synonym('_duration_min', descriptor=property(_get_duration_min, _set_duration_min)) def _get_duration_max(self): return timedelta(minutes=self._duration_max) def _set_duration_max(self, value): self._duration_max = value.seconds / 60 duration_max = synonym('_duration_max', descriptor=property(_get_duration_max, _set_duration_max)) def in_audio_langs(self, lang): """Returns True if lang is a selected audio language""" return any(l.lang == lang for l in self.audio_langs) def in_subtitle_langs(self, lang): """Returns True if lang is a selected subtitle language""" return any(l.lang == lang for l in self.subtitle_langs) def get_path(self, name): """Returns the configured path of the specified utility""" session = Session.object_session(self) return session.query(ConfigPath).\ filter(ConfigPath.config_id == self.id).\ filter(ConfigPath.name == name).one().path def set_path(self, name, value): """Sets the configured path of the specified utility""" session = Session.object_session(self) session.query(ConfigPath).\ filter(ConfigPath.config_id == self.id).\ filter(ConfigPath.name == name).one().path = value session.commit() def __repr__(self): return "<Configuration(...)>"
class User(Base): # Meta __tablename__ = 'tickee_users' # Columns id = Column(Integer, primary_key=True) first_name = Column(Unicode(30)) last_name = Column(Unicode(30)) email = Column(String(60), unique=True) password = Column(String) activation_key = Column(String(16)) date_joined = Column(DateTime) last_login = Column(DateTime) meta = Column(MutationDict.as_mutable(JSONEncodedDict)) # Constructor def __init__(self, email): self.email = email self.date_joined = datetime.datetime.utcnow() self.meta = dict() def get_full_name(self): name = u"{0} {1}".format(self.first_name, self.last_name) return name # Slug def slugify(self): return quote_plus(self.email) # Activation related def is_active(self): """ Returns True if user is active. """ return self.activation_key is None def activate(self): """ Activates the user. """ self.activation_key = None blogger.debug("activated user %s" % self.id) def deactivate(self): """ Deactivates the user. """ self.activation_key = self._generate_activation_key() blogger.debug("deactivated user %s" % self.id) def reset(self): """ Marks the user as inactive and generates a new activation key. """ self.password = UNUSABLE_PASSWORD self.deactivate() blogger.debug("resetting user %s." % self.id) def _generate_activation_key(self): from random import choice allowed_chars = "abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ0123456789" while True: key_value = ''.join([choice(allowed_chars) for i in range(8)]) if not Session.query(User).filter_by( activation_key=key_value).count(): return key_value # Password recovery def create_recovery_code(self): """ Creates an opportunity to recover the password """ self.meta['recovery'] = dict( code=self._generate_activation_key(), deadline=date(datetime.datetime.utcnow() + datetime.timedelta(hours=48))) return self.meta['recovery'] def remove_recovery_code(self): """ Removes the existing recovery code and time window """ if "recovery" in self.meta: del self.meta['recovery'] def get_recovery_code(self): """ Returns the recovery code if any exists """ return self.meta['recovery'].get('code') def has_recovery_window(self): """ Checks if the user can currently recover his password. """ return "recovery" in self.meta def is_valid_recovery_code(self, code): """ Checks if the code is correct and still in a valid time window """ return (code == self.meta.get('recovery', {}).get('code', False)\ and datetime.datetime.utcnow() <= json_to_date(self.meta['recovery'].get('deadline'))) # Password related def set_password(self, raw_password): """ Sets a new password. """ import os, base64 algo = "ssha" salt = unicode.lower(unicode(base64.b16encode(os.urandom(4)))) hsh = self._encrypt_password(algo, salt, raw_password) self.password = '******' % (algo, salt, hsh) def check_password(self, raw_password): """ Validates the raw_password to the one in the database. """ if self.has_usable_password(): algo, salt, hsh = self.password.split("$") return hsh == self._encrypt_password(algo, salt, raw_password) return False def has_usable_password(self): """ Checks whether the user has a password set. """ return self.password != UNUSABLE_PASSWORD def _encrypt_password(self, algo, salt, raw_password): """ Encrypts the raw_password for safe storage in the database. """ if algo == "ssha": import base64, hashlib raw_salt = base64.b16encode(unicode.upper(salt)) return hashlib.sha1(raw_password + raw_salt).hexdigest() raise ValueError('Got unknown password algorithm type.')
class PbxDialog(Base): __tablename__ = 'sip_dialogs' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) call_id = Column(Unicode(255)) uuid = Column(Unicode(255)) sip_to_user = Column(Unicode(255)) sip_to_host = Column(Unicode(255)) sip_from_user = Column(Unicode(255)) sip_from_host = Column(Unicode(255)) contact_user = Column(Unicode(255)) contact_host = Column(Unicode(255)) state = Column(Unicode(255)) direction = Column(Unicode(255)) user_agent = Column(Unicode(255)) profile_name = Column(Unicode(255)) hostname = Column(Unicode(255)) contact = Column(Unicode(255)) presence_id = Column(Unicode(255)) presence_data = Column(Unicode(255)) call_info = Column(Unicode(255)) call_info_state = Column(Unicode(255)) expires = Column(Integer, default=0) status = Column(Unicode(255)) rpid = Column(Unicode(255)) sip_to_tag = Column(Unicode(255)) sip_from_tag = Column(Unicode(255)) rcd = Column(Integer, default=0)
class PbxChannel(Base): __tablename__ = 'channels' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) uuid = Column(Unicode(255)) direction = Column(Unicode(32)) created = Column(Unicode(128)) created_epoch = Column(Integer, nullable=False, default=0) name = Column(Unicode(1024)) state = Column(Unicode(64)) cid_name = Column(Unicode(1024)) cid_num = Column(Unicode(255)) ip_addr = Column(Unicode(255)) dest = Column(Unicode(1024)) application = Column(Unicode(128)) application_data = Column(Unicode(4096)) dialplan = Column(Unicode(128)) context = Column(Unicode(128)) read_codec = Column(Unicode(128)) read_rate = Column(Unicode(32)) read_bit_rate = Column(Unicode(32)) write_codec = Column(Unicode(128)) write_rate = Column(Unicode(32)) write_bit_rate = Column(Unicode(32)) secure = Column(Unicode(32)) hostname = Column(Unicode(255)) presence_id = Column(Unicode(4096)) presence_data = Column(Unicode(4096)) callstate = Column(Unicode(64)) callee_name = Column(Unicode(1024)) callee_num = Column(Unicode(255)) callee_direction = Column(Unicode(15)) call_uuid = Column(Unicode(255)) sent_callee_name = Column(Unicode(1024)) sent_callee_num = Column(Unicode(255))
class PbxCdr(Base): __tablename__ = 'cdr' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) caller_id_name = Column(Unicode(64)) caller_id_number = Column(Unicode(64)) destination_number = Column(Unicode(64)) context = Column(Unicode(64)) start_stamp = Column(DateTime, default=datetime.now()) answer_stamp = Column(DateTime, default=datetime.now()) end_stamp = Column(DateTime, default=datetime.now()) duration = Column(Integer, default=0) billsec = Column(Integer, default=0) hangup_cause = Column(Unicode(128)) uuid = Column(Unicode(64)) bleg_uuid = Column(Unicode(64)) accountcode = Column(Unicode(16)) local_ip_v4 = Column(Unicode(15)) read_codec = Column(Unicode(128)) write_codec = Column(Unicode(128)) call_direction = Column(Unicode(16)) user_id = Column(Unicode(16)) customer_id = Column(Unicode(16)) extension = Column(Unicode(16)) def __repr__(self): return "PbxCdr(%(id)s)" % self.__dict__
class PbxRegistration(Base): __tablename__ = 'sip_registrations' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) call_id = Column(Unicode(255)) sip_user = Column(Unicode(255)) sip_host = Column(Unicode(255)) presence_hosts = Column(Unicode(255)) contact = Column(Unicode(1024)) status = Column(Unicode(255)) rpid = Column(Unicode(255)) expires = Column(Integer) user_agent = Column(Unicode(255)) server_user = Column(Unicode(255)) server_host = Column(Unicode(255)) profile_name = Column(Unicode(255)) hostname = Column(Unicode(255)) network_ip = Column(Unicode(255)) network_port = Column(Unicode(255)) sip_username = Column(Unicode(255)) sip_realm = Column(Unicode(255)) mwi_user = Column(Unicode(255)) mwi_host = Column(Unicode(255)) orig_server_host = Column(Unicode(255)) orig_hostname = Column(Unicode(255)) sub_host = Column(Unicode(255))
class PbxEndpoint(Base): __tablename__ = 'pbx_endpoints' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) auth_id = Column(Unicode(64), nullable=False) password = Column(Unicode(64), nullable=False, default=u"O9876543$") outbound_caller_id_name = Column(Unicode(64), nullable=False, default=u"Anonymous") outbound_caller_id_number = Column(Unicode(64), nullable=False) internal_caller_id_name = Column(Unicode(64), nullable=False, default=u"Anonymous") internal_caller_id_number = Column(Unicode(64), nullable=False, default=auth_id) user_context = Column(Unicode(64), nullable=False, default=u"sip.freepybx.org") force_transfer_context = Column(Unicode(64), nullable=False, default=u"sip.freepybx.org") user_originated = Column(Unicode(64), nullable=False, default=True) mac = Column(Unicode(12)) timezone = Column(Unicode(12)) toll_allow = Column(Unicode(64), nullable=False, default=u"domestic") accountcode = Column(Unicode(64), nullable=False, default=u"0") vm_email = Column(Unicode(64), nullable=True) vm_password = Column(Unicode(64), nullable=False, default=u'9999') vm_attach_email = Column(Boolean, default=False) vm_notify_email = Column(Boolean, default=False) vm_save = Column(Boolean, default=True) sip_force_contact = Column(Unicode(64), nullable=False, default=u"nat-connectile-dysfunction") transfer_fallback_extension = Column(Unicode(64), nullable=False, default=u"operator") ring_strategy = Column(Unicode(32), default=u'sequential') find_me = Column(Boolean, default=False) follow_me_1 = Column(Unicode(15)) follow_me_2 = Column(Unicode(15)) follow_me_3 = Column(Unicode(15)) follow_me_4 = Column(Unicode(15)) device_type_id = Column(Integer, default=0) auto_provision = Column(Boolean, default=False) include_xml_directory = Column(Boolean, default=False) call_timeout = Column(Integer) timeout_destination = Column(Integer) calling_rule_id = Column(Integer, default=3) record_outbound_calls = Column(Boolean, default=False) record_inbound_calls = Column(Boolean, default=False) user_id = Column( Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="CASCADE")) def __str__(self): return self.id def form_dict(self): endpoint = {} endpoint['id'] = self.id endpoint['password'] = self.password return endpoint
class PbxGateway(Base): __tablename__ = 'pbx_gateways' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) name = Column(Unicode(64), nullable=True) pbx_profile_id = Column( Integer, ForeignKey('pbx_profiles.id', onupdate="CASCADE", ondelete="CASCADE")) username = Column(Unicode(64), nullable=True) password = Column(Unicode(64), nullable=True) proxy = Column(Unicode(64), nullable=True) mask = Column(Unicode(15), default=u'32') register = Column(Boolean, default=False) register_proxy = Column(Unicode(128), nullable=True) register_transport = Column(Unicode(64), default=u"udp") extension = Column(Unicode(64), nullable=True) realm = Column(Unicode(64), nullable=True) from_domain = Column(Unicode(64), nullable=True) from_user = Column(Unicode(32), nullable=True) expire_seconds = Column(Integer, nullable=False, default=600) retry_seconds = Column(Integer, nullable=False, default=30) ping = Column(Unicode(4), default=u"60") context = Column(Unicode(32), default=u"default") caller_id_in_from = Column(Boolean, default=False) contact_params = Column(Unicode(32), nullable=True) rfc5626 = Column(Boolean, default=True) reg_id = Column(Integer, nullable=True, default=1) def __str__(self): return self.name
class PbxProfile(Base): __tablename__ = 'pbx_profiles' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) name = Column(Unicode(64), nullable=True) odbc_credentials = Column(Unicode(64), nullable=True) manage_presence = Column(Boolean, default=True) presence_db_name = Column(Unicode(64), default=u'share_presence') presence_hosts = Column(UnicodeText, default=u'sip.mydomain.com') send_presence_on_register = Column(Boolean, default=True) delete_subs_on_register = Column(Boolean, default=True) caller_id_type = Column(Unicode(16), default=u'rpid') auto_jitterbuffer_msec = Column(Integer, default=120) apply_inbound_acl = Column(Unicode(64), default=u'gateways') dialplan = Column(Unicode(64), default=u'XML,enum') ext_rtp_ip = Column(Unicode(64), default=u"auto") ext_sip_ip = Column(Unicode(64), default=u"auto") rtp_ip = Column(Unicode(64), default=u"auto") sip_ip = Column(Unicode(64), default=u"auto") sip_port = Column(Integer, nullable=False, default=5060) sql_in_transactions = Column(Boolean, default=False) nonce_ttl = Column(Integer, default=60) use_rtp_timer = Column(Boolean, default=True) rtp_timer_name = Column(Unicode(64), default=u"soft") codec_prefs = Column(Unicode(255), default=u'PCMU,PCMA,G722,G726,H264,H263') inbound_codec_negotiation = Column(Unicode(64), default=u"generous") rtp_timeout_sec = Column(Integer, default=300) rtp_hold_timeout_sec = Column(Integer, default=1800) rfc2833_pt = Column(Integer, default=101) dtmf_duration = Column(Integer, default=100) dtmf_type = Column(Unicode(64), default=u'rfc2833') session_timeout = Column(Integer, default=1800) multiple_registrations = Column(Unicode(64), default=u'contact') vm_from_email = Column(Unicode(64), default=u'*****@*****.**') accept_blind_reg = Column(Boolean, default=False) auth_calls = Column(Boolean, default=True) auth_all_packets = Column(Boolean, default=False) log_auth_failures = Column(Boolean, default=True) disable_register = Column(Boolean, default=False) codec_ms = Column(Integer, default=20) minimum_session_expires = Column(Integer, default=120) email_domain = Column(Unicode(64), default=u'freeswitch.org') def get_gateways(self): return Session.query(PbxGateway).filter_by(pbx_profile_id=self.id) def __str__(self): return "sip profile: %s:%s" % (self.ext_rtp_ip, self.sip_port) def __repr__(self): return u'%r' % self.__dict__
class User(DatabaseObject): DOMAIN_CHARS = digits + ascii_lowercase LINUX_EPOCH = datetime(1970, 1, 1, 0, 0) MIN_PASSWORD_LENGTH = 1 if options.debug else 12 OTP_LENGTH = 8 OTP_STEP = 30 OTP_ISSUER = "XSS-Hunter" FULL_NAME_LENGTH = 120 _full_name = Column(Unicode(FULL_NAME_LENGTH)) FULL_NAME_SCHEMA = { "type": "string", "minLength": 1, "maxLength": FULL_NAME_LENGTH } USERNAME_LENGTH = 80 _username = Column(Unicode(USERNAME_LENGTH), unique=True, nullable=False) USERNAME_SCHEMA = { "type": "string", "minLength": 1, "maxLength": USERNAME_LENGTH } _password = Column(String(120)) EMAIL_LENGTH = 120 _email = Column(String(EMAIL_LENGTH), unique=True, nullable=False) EMAIL_SCHEMA = { "type": "string", "format": "email", "minLength": 1, "maxLength": EMAIL_LENGTH } DOMAIN_LENGTH = 32 _domain = Column(String(DOMAIN_LENGTH), unique=True) DOMAIN_SCHEMA = { "type": "string", "maxLength": DOMAIN_LENGTH, "minLength": 1 } _pgp_key = Column(Text()) _chainload_uri = Column(URLType()) email_enabled = Column(Boolean, default=False) _locked = Column(Boolean, default=False) _last_login = Column(DateTime) _otp_enabled = Column(Boolean, default=False) _otp_secret = Column(EncryptedType(String(128), options.database_secret)) _password_reset_token_expires = Column(DateTime, default=LINUX_EPOCH) _password_reset_token = Column(String(128), nullable=False, default=lambda: urandom(32).encode('hex')) _api_key = Column(String(128), nullable=False, default=lambda: urandom(32).encode('hex')) injections = relationship("InjectionRecord", backref=backref("user", lazy="select"), cascade="all,delete,delete-orphan") permissions = relationship("Permission", backref=backref("user", lazy="select"), cascade="all,delete,delete-orphan") @classmethod def by_domain(cls, domain): return DBSession().query(cls).filter_by(_domain=domain).first() @classmethod def by_username(cls, username): username = ''.join(username[:80].split()) return DBSession().query(cls).filter_by(_username=username).first() @classmethod def by_api_key(cls, api_key): return DBSession().query(cls).filter_by( _api_key=sha512(api_key).digest()).first() @staticmethod def hash_password(password, salt=None): """ BCrypt has a max lenght of 72 chars, we first throw the plaintext thru SHA256 to support passwords greater than 72 chars. """ if salt is None: salt = bcrypt.gensalt(10) return bcrypt.hashpw(sha512(password).digest(), salt) @property def permission_names(self): """ Return a list with all permissions accounts granted to the user """ return [permission.name for permission in self.permissions] def has_permission(self, permission): """ Return True if 'permission' is in permissions_names """ return True if permission in self.permission_names else False def compare_password(self, in_password): return self.hash_password(in_password, self.password) == self.password def generate_password_reset_token(self): """ Generates a new password reset token and returns it, also save the new token as a hash in the database. """ token = urandom(32).encode('hex') self._password_reset_token = sha512(token).hexdigest() expires_at = datetime.utcnow() + timedelta(hours=1) self._password_reset_token_expires = expires_at return token def generate_api_key(self): token = urandom(32).encode('hex') self._api_key = sha512(token).hexdigest() return token def validate_password_reset_token(self, token): """ You can't do a remote timing attack since we hash the input token, well unless you can generate lots of sha512 collisions, in which case you earned it buddy. """ if datetime.utcnow() < self._password_reset_token_expires: if sha512(token).hexdigest() == self._password_reset_token: # Token can only be used once, override old value with garbage self._password_reset_token = urandom(32).encode('hex') self._password_reset_token_expires = User.LINUX_EPOCH return True return False @property def full_name(self): return self._full_name @full_name.setter def full_name(self, in_fullname): assert isinstance(in_fullname, basestring) self._full_name = in_fullname[:self.FULL_NAME_LENGTH].strip() @property def username(self): return self._username @username.setter def username(self, in_username): assert isinstance(in_username, basestring) self.username = ''.join(in_username.split())[:self.USERNAME_LENGTH] @property def password(self): return self._password @password.setter def password(self, in_password): if len(in_password) < self.MIN_PASSWORD_LENGTH: raise ValueError("Password must be %d+ chars" % (self.MIN_PASSWORD_LENGTH)) self._password = self.hash_password(in_password) @property def pgp_key(self): return self._pgp_key @pgp_key.setter def pgp_key(self, in_pgp_key): self._pgp_key = in_pgp_key @property def email(self): return self._email @email.setter def email(self, in_email): in_email = in_email.strip() @property def domain(self): return self._domain @domain.setter def domain(self, set_domain): assert isinstance(set_domain, basestring) if not 0 < len(set_domain) <= 32: raise ValueError("Invalid domain length") if any(char not in self.DOMAIN_CHARS for char in set_domain): raise ValueError("Invalid domain, domains can only contain %s" % (self.DOMAIN_CHARS)) # Check for duplicates if self.by_domain(set_domain) is not None: raise ValueError("Duplicate domain") else: self._domain = set_domain @property def last_login(self): return time.mktime(self._last_login.timetuple()) @last_login.setter def last_login(self, value): self._last_login = value @property def locked(self): return self._locked @locked.setter def locked(self, value): """ Lock account and revoke all API keys """ assert isinstance(value, bool) if value: self._locked = True self._api_key = urandom(32).encode('hex') else: self._locked = False def validate_otp(self, value): """ Validate a one-time password """ try: self._otp.verify(value.encode("ascii", "ignore"), time()) return True except InvalidToken: return False @property def otp_enabled(self): return self._otp_enabled @otp_enabled.setter def otp_enabled(self, value): """ Ensures that when 2fa is enabled/disabled we always use a fresh key """ assert isinstance(value, bool) if value: self._otp_enabled = True self._otp_secret = urandom(64).encode('hex') else: self._otp_enabled = False self._otp_secret = "" @property def _otp(self): """ Current one time password implementation, time-based "TOTP" https://cryptography.io/en/latest/hazmat/primitives/twofactor/ """ if not self._otp_enabled or len(self._otp_secret) < 1: raise ValueError("2FA/OTP is not enabled for this user") key = self._otp_secret.decode('hex') return TOTP(key, self.OTP_LENGTH, SHA512(), self.OTP_STEP, backend=default_backend()) @property def otp_provisioning_uri(self): """ Generate an enrollment URI for Authetnicator apps """ return self._otp.get_provisioning_uri(self.username, self.OTP_ISSUER) def to_dict(self): return { "id": self.id, "created": self.created, "full_name": self.full_name, "email": self.email, "username": self.username, "pgp_key": self.pgp_key, "domain": self.domain, "email_enabled": self.email_enabled } def to_admin_dict(self): data = self.to_dict() data["updated"] = self.updated data["locked"] = self.locked data["last_login"] = self.last_login return data def __str__(self): return self.username + " - ( " + self.full_name + " )"
class e911Address(Base): __tablename__ = 'e911_addresses' query = Session.query_property() id = Column(Integer, autoincrement=True, primary_key=True) customer_id = Column( Integer, ForeignKey('customers.id', onupdate="CASCADE", ondelete="CASCADE")) house_num = Column(Unicode(32)) house_num_suffix = Column(Unicode(32)) prefix_directional = Column( Integer, ForeignKey('e911_directional_types.id', onupdate="CASCADE", ondelete="CASCADE")) street_name = Column(Unicode(32)) street_suffix = Column( Integer, ForeignKey('e911_street_types.id', onupdate="CASCADE", ondelete="CASCADE")) post_directional = Column( Integer, ForeignKey('e911_directional_types.id', onupdate="CASCADE", ondelete="CASCADE")) msag_community = Column(Unicode(32)) state_province = Column(Unicode(32)) county_id = Column(Unicode(32)) country = Column(Unicode(32)) tar_code = Column(Unicode(32)) postal_code = Column(Unicode(32)) building = Column(Unicode(32)) floor = Column(Unicode(32)) unit_num = Column(Unicode(32)) unit_type = Column( Integer, ForeignKey('e911_unit_types.id', onupdate="CASCADE", ondelete="CASCADE")) location_description = Column(UnicodeText) editable = Column(Boolean, default=True)
from datetime import datetime from sqlalchemy import Table, ForeignKey, Column, sql from sqlalchemy.types import Unicode, UnicodeText, Integer, DateTime, Boolean, Float from sqlalchemy.orm import mapper, relation, backref, synonym, interfaces, validates, Query from sqlalchemy.orm.attributes import set_committed_value from mediacore.lib.compat import defaultdict from mediacore.model import SLUG_LENGTH, slugify from mediacore.model.meta import DBSession, metadata from mediacore.plugin import events categories = Table('categories', metadata, Column('id', Integer, autoincrement=True, primary_key=True), Column('name', Unicode(50), nullable=False, index=True), Column('slug', Unicode(SLUG_LENGTH), nullable=False, unique=True), Column( 'parent_id', Integer, ForeignKey('categories.id', onupdate='CASCADE', ondelete='CASCADE')), mysql_engine='InnoDB', mysql_charset='utf8') class CategoryNestingException(Exception): pass
class Flag(DatabaseObject): ''' Flags that can be captured by players and what not. This object comes in these flavors: -static -regex -datetime -file -choice Depending on the cls._type value. For more information see the wiki. ''' uuid = Column(String(36), unique=True, nullable=False, default=lambda: str(uuid4())) box_id = Column(Integer, ForeignKey('box.id'), nullable=False) lock_id = Column(Integer, ForeignKey('flag.id', ondelete="SET NULL"), nullable=True) _name = Column(Unicode(64), nullable=False) _token = Column(Unicode(256), nullable=False) _description = Column(Unicode(1024), nullable=False) _capture_message = Column(Unicode(512)) _case_sensitive = Column(Integer, nullable=True) _value = Column(Integer, nullable=False) _original_value = Column(Integer, nullable=False) _order = Column(Integer, nullable=True, index=True) _type = Column(Unicode(16), default=False) flag_attachments = relationship("FlagAttachment", backref=backref("flag", lazy="select")) flag_choice = relationship("FlagChoice", backref=backref("flag", lazy="select"), cascade="all,delete,delete-orphan") penalties = relationship("Penalty", backref=backref("flag", lazy="select"), cascade="all,delete,delete-orphan") hints = relationship("Hint", backref=backref("flag", lazy="select"), cascade="all,delete,delete-orphan") FLAG_TYPES = [ FLAG_FILE, FLAG_REGEX, FLAG_STATIC, FLAG_DATETIME, FLAG_CHOICE ] @classmethod def all(cls): ''' Returns a list of all objects in the database ''' return dbsession.query(cls).all() @classmethod def by_id(cls, _id): ''' Returns a the object with id of _id ''' return dbsession.query(cls).filter_by(id=_id).first() @classmethod def by_name(cls, name): ''' Returns a the object with name of _name ''' return dbsession.query(cls).filter_by(_name=unicode(name)).first() @classmethod def by_uuid(cls, _uuid): ''' Return and object based on a uuid ''' return dbsession.query(cls).filter_by(uuid=unicode(_uuid)).first() @classmethod def by_token(cls, token): ''' Return and object based on a token ''' return dbsession.query(cls).filter_by(_token=unicode(token)).first() @classmethod def by_token_and_box_id(cls, token, box_id): ''' Return and object based on a token ''' return dbsession.query(cls).filter_by(_token=unicode(token), box_id=box_id).first() @classmethod def by_type(cls, _type): ''' Return and object based on a token ''' return dbsession.query(cls).filter_by(_type=unicode(_type)).all() @classmethod def captures(cls, _id): return dbsession.query(team_to_flag).filter_by(flag_id=_id).all() @classmethod def create_flag(cls, _type, box, name, raw_token, description, value): ''' Check parameters applicable to all flag types ''' creators = { FLAG_STATIC: cls._create_flag_static, FLAG_REGEX: cls._create_flag_regex, FLAG_FILE: cls._create_flag_file, FLAG_DATETIME: cls._create_flag_datetime, FLAG_CHOICE: cls._create_flag_choice, } #TODO Don't understand why this is here - name is not unqiue value # and you could simply name questions per box, like "Question 1" - ElJefe 6/1/2018 #if cls.by_name(name) is not None: #raise ValidationError('Flag name already exists in database') assert box is not None and isinstance(box, Box) new_flag = creators[_type](box, name, raw_token, description, value) new_flag._type = _type return new_flag @classmethod def _create_flag_file(cls, box, name, raw_token, description, value): ''' Check flag file specific parameters ''' token = cls.digest(raw_token) return cls(box_id=box.id, name=name, token=token, description=description, value=value, original_value=value) @classmethod def _create_flag_regex(cls, box, name, raw_token, description, value): ''' Check flag regex specific parameters ''' try: re.compile(raw_token) except: raise ValidationError('Flag token is not a valid regex') return cls(box_id=box.id, name=name, token=raw_token, description=description, value=value, original_value=value) @classmethod def _create_flag_static(cls, box, name, raw_token, description, value): ''' Check flag static specific parameters ''' return cls(box_id=box.id, name=name, token=raw_token, description=description, value=value, original_value=value) @classmethod def _create_flag_datetime(cls, box, name, raw_token, description, value): ''' Check flag datetime specific parameters ''' try: parse(raw_token) except: raise ValidationError('Flag token is not a valid datetime') return cls(box_id=box.id, name=name, token=raw_token, description=description, value=value, original_value=value) @classmethod def _create_flag_choice(cls, box, name, raw_token, description, value): ''' Check flag choice specific parameters ''' return cls(box_id=box.id, name=name, token=raw_token, description=description, value=value, original_value=value) @classmethod def digest(self, data): ''' Token is SHA1 of data ''' return hashlib.sha1(data).hexdigest() @property def game_level(self): return self.box.game_level @property def name(self): return self._name @name.setter def name(self, value): if not 3 <= len(value) <= 16: raise ValidationError("Flag name must be 3 - 16 characters") self._name = unicode(value) @property def order(self): return self._order @order.setter def order(self, value): self._order = int(value) @property def description(self): return self._description @description.setter def description(self, value): self._description = unicode(value)[:1024] @property def capture_message(self): return self._capture_message if self._capture_message else '' @capture_message.setter def capture_message(self, value): self._capture_message = unicode(value) @property def type(self): return self._type @type.setter def type(self, value): if value not in self.FLAG_TYPES: raise ValueError("Invalid flag type") self._type = unicode(value) @property def token(self): return self._token @token.setter def token(self, value): self._token = unicode(value) @property def case_sensitive(self): return self._case_sensitive @case_sensitive.setter def case_sensitive(self, value): if value is None: self._case_sensitive = 0 else: self._case_sensitive = value @property def value(self): return self._value @value.setter def value(self, value): try: self._value = abs(int(value)) except ValueError: raise ValidationError("Reward value must be an integer") @property def original_value(self): return self._original_value @value.setter def original_value(self, value): try: self._original_value = abs(int(value)) except ValueError: raise ValidationError("Reward value must be an integer") @property def get_lock_id(self): return self.lock_id @get_lock_id.setter def set_lock_id(self, value): try: if value is None: self.lock_id = value else: self.lock_id = abs(int(value)) except ValueError: self.lock_id = None @property def is_text(self): return self._type == FLAG_REGEX or self._type == FLAG_STATIC @property def is_static(self): return self._type == FLAG_STATIC @property def is_file(self): return self._type == FLAG_FILE @property def box(self): return Box.by_id(self.box_id) def choices(self): #inlucdes the choice uuid - needed for editing choice choices = [] if self._type == FLAG_CHOICE: choicelist = FlagChoice.by_flag_id(self.id) if choicelist is not None and len(choicelist) > 0: for flagchoice in choicelist: choices.append(flagchoice.to_dict()) return json.dumps(choices) def choicelist(self): #excludes the choice uuid choices = [] if self._type == FLAG_CHOICE: choicelist = FlagChoice.by_flag_id(self.id) if choicelist is not None and len(choicelist) > 0: for flagchoice in choicelist: choices.append(flagchoice.choice) return json.dumps(choices) def capture(self, submission): if self._type == FLAG_STATIC: if self._case_sensitive == 0: return str(self.token).lower().strip() == str( submission).lower().strip() else: return str(self.token).strip() == str(submission).strip() elif self._type == FLAG_REGEX: if not self.token.startswith("^(") and not self.token.endswith( ")$"): self.token = "^(" + self.token + ")$" if self._case_sensitive == 0: pattern = re.compile(self.token, re.IGNORECASE) else: pattern = re.compile(self.token) return pattern.match(submission) is not None elif self._type == FLAG_FILE: return self.token == self.digest(submission) elif self._type == FLAG_CHOICE: return self.token == submission elif self._type == FLAG_DATETIME: try: return parse(self.token) == parse(submission) except: return False else: raise ValueError('Invalid flag type, cannot capture') def to_xml(self, parent): ''' Write attributes to XML doc ''' flag_elem = ET.SubElement(parent, "flag") flag_elem.set("type", self._type) ET.SubElement(flag_elem, "name").text = self.name ET.SubElement(flag_elem, "token").text = self.token ET.SubElement(flag_elem, "description").text = self.description ET.SubElement(flag_elem, "capture_message").text = self.capture_message ET.SubElement(flag_elem, "value").text = str(self.value) ET.SubElement(flag_elem, "original_value").text = str(self.original_value) if self.lock_id: ET.SubElement(flag_elem, "depends_on").text = Flag.by_id(self.lock_id).name ET.SubElement(flag_elem, "case_sensitive").text = str(self.case_sensitive) attachements_elem = ET.SubElement(flag_elem, "flag_attachments") attachements_elem.set("count", str(len(self.flag_attachments))) for attachement in self.flag_attachments: attachement.to_xml(attachements_elem) choice_elem = ET.SubElement(flag_elem, "flag_choices") choice_elem.set("count", str(len(self.flag_choice))) for choice in self.flag_choice: ET.SubElement(choice_elem, "choice").text = choice.choice from models.Hint import Hint xml_hints = Hint.by_flag_id(self.id) hints_elem = ET.SubElement(flag_elem, "hints") hints_elem.set("count", str(len(xml_hints))) for hint in xml_hints: if not hint.flag_id is None: hint.to_xml(hints_elem) def to_dict(self): ''' Returns public data as a dict ''' box = Box.by_id(self.box_id) if self.lock_id: lock_uuid = Flag.by_id(self.lock_id).uuid else: lock_uuid = '' case_sensitive = self.case_sensitive if case_sensitive != 0: case_sensitive = 1 return { 'name': self.name, 'uuid': self.uuid, 'description': self.description, 'capture_message': self.capture_message, 'value': self.value, 'original_value': self.original_value, 'box': box.uuid, 'token': self.token, 'lock_uuid': lock_uuid, 'case-sensitive': case_sensitive, 'flagtype': self.type, 'choices': self.choices() } def __repr__(self): return "<Flag - name:%s, type:%s >" % (self.name, str(self._type))
from sqlalchemy import Column, sql, Table from sqlalchemy.orm import column_property, dynamic_loader, mapper from sqlalchemy.types import Boolean, DateTime, Integer, Unicode from mediadrop.lib.storage import StorageEngine from mediadrop.model.media import MediaFile, MediaFileQuery, media_files from mediadrop.model.meta import metadata from mediadrop.model.util import JSONType log = logging.getLogger(__name__) storage = Table( 'storage', metadata, Column('id', Integer, primary_key=True, autoincrement=True), Column('engine_type', Unicode(30), nullable=False), Column('display_name', Unicode(100), nullable=False, unique=True), Column('enabled', Boolean, nullable=False, default=True), Column('created_on', DateTime, nullable=False, default=datetime.now), Column('modified_on', DateTime, nullable=False, default=datetime.now, onupdate=datetime.now), Column('data', JSONType, nullable=False, default=dict), mysql_engine='InnoDB', mysql_charset='utf8', ) storage_mapper = mapper( StorageEngine,
class TriggerRule(DeclarativeBase): """ A single rule or line of a set for a trigger. The stop field will mean no more rules for this Trigger are evaluated if this rule is true. """ __tablename__ = 'trigger_rules' # { Columns id = Column(Integer, autoincrement=True, primary_key=True) trigger_id = Column(Integer, ForeignKey('triggers.id'), nullable=False) trigger = relationship('Trigger') position = Column(SmallInteger, nullable=False, default=1) field = Column(SmallInteger, nullable=False, default=0) oper = Column(Unicode(5)) limit = Column(Unicode(100)) stop = Column(Boolean, nullable=False, default=False) and_rule = Column(Boolean, nullable=False, default=True) # } allowed_opers = { '=': operator.eq, '<>': operator.ne, '>': operator.gt, '<': operator.lt, '>=': operator.ge, '<=': operator.le, 'IN': MyOperator.in_, '!IN': MyOperator.notin, 'C': MyOperator.contains, '!C': MyOperator.notcontains, } def __init__(self, trigger=None, field=None, oper=None, value=None, position=1): self.trigger = trigger if field is not None: self.set_field(field) if operator is not None: self.oper = oper self.value = value self.position = position def field_name(self): return trigger_fields[self.field] def set_field(self, fname): """ Set the field using the fieldname """ if fname not in trigger_fields: ValueError('Trigger Field {} not valid.'.format(fname)) self.field = trigger_fields.index(fname) def set_limit(self, limits): from rnms.model import EventType """ Convert the limits as tags into indexes """ fname = trigger_fields[self.field] if fname == 'event_type': self.limit = ','.join( [unicode(x[0]) for x in DBSession.query(EventType.id). filter(EventType.tag.in_(limits.split(',')))]) return raise ValueError('Dont have limits for {}'.format(fname)) def eval(self, previous_result, event): """ Process this trigger rule against the event Returns rule_result = whether or not the rule matches """ if previous_result and not self.and_rule: return True # True OR whatever is True test_value = self._get_event_field(event) if test_value is None: return False this_result = self.operate(test_value) if self.and_rule is True: return previous_result and this_result else: return previous_result or this_result def operate(self, test_value): """ Given the event field, run the operator against our limit """ try: x = float(test_value) y = float(self.limit) except ValueError: x = test_value y = self.limit try: this_oper = self.allowed_opers[self.oper] except KeyError: return False else: return this_oper(x, y) def _get_event_field(self, event): """ Extracts the field out of the given event """ if event is None: return None try: field_name = trigger_fields[self.field] except ValueError: return None if field_name == 'hour': return event.created.hour if field_name == 'attribute': return event.attribute_id if field_name == 'attribute_name': return event.attribute.display_name if field_name == 'attribute_type': return event.attribute.attribute_type_id if field_name == 'host': return event.attribute.host_id if field_name == 'event_type': return event.event_type_id if field_name == 'event_state': return event.event_state_id
from sqlalchemy import create_engine, MetaData, Table, Column from sqlalchemy.types import Integer, Unicode, UnicodeText from sqlalchemy.schema import ForeignKey # Get engine. engine = create_engine('sqlite:///school.sqlite') # Get metadata. metadata = MetaData() # Define teachers table schema. teachersTable = Table('Teachers', metadata, Column('id', Integer, primary_key=True), Column('name', Unicode(255)), Column('subject', Unicode(255))) # Create table. metadata.create_all(engine)
class Tag(Base): __tablename__ = 'tags' name = Column(Unicode(length=128), unique=True, primary_key=True) type = Column(Unicode(length=256))
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'tg_user' #{ Columns user_id = Column(Integer, autoincrement=True, primary_key=True) user_name = Column(Unicode(16), unique=True, nullable=False) email_address = Column(Unicode(255), unique=True, nullable=False, info={'rum': { 'field': 'Email' }}) display_name = Column(Unicode(255)) _password = Column('password', Unicode(128), info={'rum': { 'field': 'Password' }}) created = Column(DateTime, default=datetime.now) #{ Special methods def __repr__(self): return ('<User: name=%s, email=%s, display=%s>' % (self.user_name, self.email_address, self.display_name)).encode('utf-8') def __unicode__(self): return self.display_name or self.user_name #{ Getters and setters @property def permissions(self): """Return a set with all permissions granted to the user.""" perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms @classmethod def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter_by(email_address=email).first() @classmethod def by_user_name(cls, username): """Return the user object whose user name is ``username``.""" return DBSession.query(cls).filter_by(user_name=username).first() @classmethod def _hash_password(cls, password): # Make sure password is a str because we cannot hash unicode objects if isinstance(password, unicode): password = password.encode('utf-8') salt = sha256() salt.update(os.urandom(60)) hash = sha256() hash.update(password + salt.hexdigest()) password = salt.hexdigest() + hash.hexdigest() # Make sure the hashed password is a unicode object at the end of the # process because SQLAlchemy _wants_ unicode objects for Unicode cols if not isinstance(password, unicode): password = password.decode('utf-8') return password def _set_password(self, password): """Hash ``password`` on the fly and store its hashed version.""" self._password = self._hash_password(password) def _get_password(self): """Return the hashed version of the password.""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password)) #} def validate_password(self, password): """ Check the password against existing credentials. :param password: the password that was provided by the user to try and authenticate. This is the clear text version that we will need to match against the hashed one in the database. :type password: unicode object. :return: Whether the password is valid. :rtype: bool """ hash = sha256() if isinstance(password, unicode): password = password.encode('utf-8') hash.update(password + str(self.password[:64])) return self.password[64:] == hash.hexdigest()
class Source(Base): __tablename__ = 'sources' name = Column(Unicode(length=256)) shortname = Column(Unicode(length=64), unique=True, primary_key=True)
class Feature(Base): __tablename__ = 'feature' __template__ = 'tooltips/category.mako' id = Column(Integer, primary_key=True) name = Column(Unicode(50)) description = Column(Unicode) image = Column(Unicode(255)) thumbnail = Column(Unicode(255)) color = Column(Unicode(255)) stroke = Column(Integer, default=2) is_label = Column(Boolean, default=False) is_circle = Column(Boolean, default=False) linestyle = Column(Integer, default=0) show_orientation = Column(Boolean, default=False) geometry = Column(Geometry(srid=2169)) map_id = Column(Unicode, ForeignKey('map.uuid')) symbol_id = Column(Integer) size = Column(Float, default=10) angle = Column(Float, default=0) font_size = Column(Integer, default=15) opacity = Column(Float, default=0.5) shape = Column(Unicode(255)) last_modified_by = Column(Unicode(50)) display_order = Column(Integer, default=0) update_date = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now) def __init__(self, feature=None): if feature: self.__update__(feature) def __update__(self, feature): try: order = feature.properties.get('display_order') self.display_order = order except: self.display_order = 0 self.name = feature.properties.get('name') self.description = feature.properties.get('description') self.thumbnail = feature.properties.get('thumbnail') self.image = feature.properties.get('image') self.color = feature.properties.get('color') self.stroke = feature.properties.get('stroke') self.is_label = feature.properties.get('isLabel') self.is_circle = feature.properties.get('isCircle') self.show_orientation = feature.properties.get('showOrientation') linestyle = feature.properties.get('linestyle') self.linestyle = 0 if linestyle == 'plain' else 1\ if linestyle == 'dashed' else 2\ if linestyle == 'dotted' else 0 self.shape = feature.properties.get('shape') size = feature.properties.get('size') self.size = size if size is not None and unicode(size).isnumeric()\ else 10 angle = feature.properties.get('angle') try: self.angle = float(angle) except TypeError: self.angle = 0 font_size = feature.properties.get('fontSize') self.font_size = font_size if font_size is not None and\ unicode(font_size).isnumeric() else 15 symbol_id = feature.properties.get('symbolId') self.symbol_id = None if symbol_id is not None and\ len(unicode(symbol_id)) == 0\ else symbol_id opacity = feature.properties.get('opacity') self.opacity = opacity if opacity is not None and\ unicode(opacity).isnumeric() else 0.5 if hasattr(feature.geometry, "__geo_interface__"): ob = feature.geometry.__geo_interface__ else: ob = feature.geometry geom_type = ob.get("type").lower() if geom_type != 'geometrycollection': # openlayers gpx writter creates a 4 dimension geometry and # shapely does not allow if for linestring. if geom_type == 'linestring': feature.geometry.coordinates = \ [coordinate[0:2] for coordinate in feature.geometry.coordinates] elif geom_type == 'multilinestring': multilinestring = feature.geometry.coordinates feature.geometry.coordinates = \ [[coord[0:2] for coord in multilinestring[i]] for i in range(len(multilinestring))] shape = asShape(feature.geometry) else: geoms = [] is_transformable = True types = None for geom in feature.geometry.geometries: if hasattr(geom, "__geo_interface__"): ob = geom.__geo_interface__ else: ob = geom geom_type = ob.get("type").lower() if types is None: types = geom_type else: is_transformable = types == geom_type if not is_transformable: break geoms.append(asShape(geom)) if is_transformable: if types == "point": shape = asMultiPoint(geoms) elif types == "linestring": shape = asMultiLineString(geoms) elif types == "polygon": shape = MultiPolygonAdapter(geoms, context_type='geojson') else: shape = None # ST_FORCE2D is used because the db only allows geometry with # 2 dimensions. self.geometry = func.ST_Force2D(from_shape(shape, srid=2169))\ if shape is not None else None @property def __geo_interface__(self): geometry = wkb.loads(str(self.geometry), True) properties = dict( name=self.name, description=self.description, thumbnail=self.thumbnail, image=self.image, color=self.color, stroke=self.stroke, isLabel=self.is_label, isCircle=self.is_circle, showOrientation=self.show_orientation, linestyle='plain' if self.linestyle == 0 else 'dashed' if self.linestyle == 1 else 'dotted', fid=self.id, symbolId=self.symbol_id, angle=self.angle if self.angle is not None else 0, size=self.size if self.size is not None else 10, fontSize=self.font_size if self.font_size is not None else 15, opacity=self.opacity if self.opacity is not None else 0.5, shape=self.shape, display_order=self.display_order if self.display_order is not None else 0, ) return geojson.Feature(id=self.id, geometry=geometry, properties=properties) @property def geom(self): if hasattr(self.geometry, "geom_wkb"): return wkb.loads(str(self.geometry.geom_wkb)) else: if hasattr(self, "_shape"): return self._shape else: return None
class Image(Base): __tablename__ = 'images' id = Column(BigInteger, primary_key=True) title = Column(Unicode(length=512)) text = Column(UnicodeText) url = Column(Unicode(length=1024)) date = Column(DateTime) height = Column(Integer) width = Column(Integer) remote_id = Column(BigInteger) nsfw = Column(Boolean) source_url = Column(Unicode(length=1024)) full_url = Column(Unicode(length=1024)) thumb_url = Column(Unicode(length=1024)) remote_url = Column(Unicode(length=1024)) set_id = Column(Unicode(length=10)) size = Column(Integer) name = Column(Unicode(length=128)) extension = Column(Unicode(length=6)) fetched = Column(Integer) atags = Column(ARRAY(UnicodeText)) source_name = Column(Unicode(length=64), ForeignKey('sources.shortname')) source = relationship("Source", backref='images') @classmethod def from_awwnime_response(cls, response): basename = response['cdnUrl'].split('/')[-1].split('.')[0] db_dict = { 'title': unicode(response['title']), 'text': unicode(response['caption']) if response['caption'] else None, 'url': 'https://reddit.com/tb/' + response['externalId'].decode('utf-8'), 'date': datetime.fromtimestamp(response['dateCreated']), 'height': response['height'], 'width': response['width'], 'remote_id': response['imageId'], 'nsfw': response['nsfw'], 'set_id': response['externalId'], 'source_url': unicode(response['sourceUrl']) if response['sourceUrl'] else u'', 'full_url': unicode(conf.fullprefix + '/%s.%s' % (basename, response['type'])), 'thumb_url': unicode(conf.thumbprefix + '/%s.500.webp' % basename), 'remote_url': unicode('https://s3.amazonaws.com/cdn.awwni.me/%s.%s' % (basename, response['type'])), 'name': unicode(basename), 'extension': unicode(response['type']), 'fetched': 0, 'source_name': u'awwnime', } image_obj = cls(**db_dict) tags = response['keywords'].split(' ') + [ 'r/' + response['sourceName'] ] image_obj.atags = list({unicode(tag) for tag in tags}) new_tags = [Tag(name=unicode(keyword)) for keyword in tags] return image_obj, new_tags @classmethod def from_danbooru_response( cls, response, fork=False, fork_url_format=u'https://konachan.com/post/show/%d/', fork_name=u'konachan'): db_dict = { 'remote_id': int(response['id']), 'source_url': response['source'], 'size': int(response['file_size']) if response['file_size'] else None, 'fetched': 0, } tags = [] if not fork: db_dict.update({ 'date': dateparse(response['created_at']), 'url': u'https://danbooru.donmai.us/posts/%d' % response['id'], 'height': int(response['image_height']), 'width': int(response['image_width']), 'nsfw': response['is_banned'] == True, 'remote_url': 'https://danbooru.donmai.us' + response['file_url'], 'extension': response['file_ext'], 'source_name': u'danbooru', 'name': response['file_url'].split('/')[-1].split('.')[0], }) tags += [(keyword, None) for keyword in response['tag_string_general'].split(' ')] for tagname in (u'artist', u'character', u'copyright'): if int(response['tag_count_' + tagname]) >= 1: strings = set(response['tag_string_' + tagname].split(' ')) if u'loli' in strings: return tags += [(string, tagname) for string in strings] else: db_dict.update({ 'date': datetime.fromtimestamp(response['created_at']), 'url': fork_url_format % response['id'], 'height': int(response['height']), 'width': int(response['width']), 'remote_url': response['file_url'], 'extension': response['file_url'].split('/')[-1].split('.')[-1], 'source_name': fork_name, 'name': response['md5'], }) keywords = set(response['tags'].split(' ')) if u'loli' in keywords: return tags += [(keyword, None) for keyword in keywords] db_dict.update({ 'full_url': unicode(conf.fullprefix + '/%s.%s' % (db_dict['name'], db_dict['extension'])), 'thumb_url': unicode(conf.thumbprefix + '/%s.500.webp' % db_dict['name']), }) image_obj = cls(**db_dict) tags = set(tags) new_tags = [] for tag in tags: new_tag = Tag(name=tag[0]) if tag[1]: new_tag.type = tag[1] new_tags.append(new_tag) image_obj.atags = list({tag[0] for tag in tags}) return image_obj, new_tags
class Item(Base): __tablename__ = 'item' __table_args__ = ({'schema': 'themes_prod', 'autoload': False}) id = Column(Unicode(255), primary_key=True) isgroup = Column(Boolean) label = Column(Unicode(255)) open_expanded = Column(Boolean) icon_type = Column(Unicode(10)) image_format = Column(Unicode(20)) metadataid = Column(Unicode(50)) legendname = Column(Unicode(255)) queryable = Column(Boolean) exclusion = Column(Unicode(1000)) opacity = Column(Float) service_url = Column(Unicode(255)) category_id = Column(Integer) server_resolutions = Column(Unicode(255)) use_client_zoom = Column(Boolean) is_wms = Column(Boolean) wms_url = Column(Unicode(1000)) wms_layers = Column(Unicode(2500)) wms_format = Column(Unicode(20)) wms_profiles_guichet = Column(Unicode(255)) is_poi = Column(Boolean) id_collection = Column(Integer)
def upgrade(): op.create_table( 'order', Column('id', UUID(), nullable=False), Column('created_at', DateTime(timezone=True), nullable=False), Column('side', Enum('buy', 'sell', name='order_side'), nullable=False), Column('user_id', UUID(), nullable=False), Column('volume', Numeric(precision=36, scale=18), nullable=False), Column( 'remaining_volume', Numeric(precision=36, scale=18), nullable=False, ), Column('price', Numeric(precision=36, scale=18), nullable=False), Column('filled_at', DateTime(timezone=True), nullable=True), Column('canceled_at', DateTime(timezone=True), nullable=True), Column('base_currency', Unicode(), nullable=False), Column('quote_currency', Unicode(), nullable=False), CheckConstraint('price > 0', name='ck_order_price_positive'), CheckConstraint( 'volume > 0 AND ' 'remaining_volume >= 0 AND ' 'remaining_volume <= volume', name='ck_order_volume', ), CheckConstraint( 'base_currency != quote_currency', name='ck_order_currency', ), CheckConstraint( 'filled_at IS NULL OR remaining_volume = 0', name='ck_order_filled', ), CheckConstraint( 'canceled_at IS NULL OR remaining_volume > 0', name='ck_order_canceled', ), ForeignKeyConstraint( ['base_currency', 'quote_currency'], ['market.base_currency', 'market.quote_currency'], ), ForeignKeyConstraint( ['base_currency'], ['currency.id'], ), ForeignKeyConstraint( ['quote_currency'], ['currency.id'], ), ForeignKeyConstraint( ['user_id'], ['user.id'], ), PrimaryKeyConstraint('id'), UniqueConstraint('id', 'base_currency', 'quote_currency'), ) op.create_index( op.f('ix_order_created_at'), 'order', ['created_at'], unique=False, ) op.create_index( op.f('ix_order_user_id'), 'order', ['user_id'], unique=False, )
# revision identifiers, used by Alembic. revision = 'e1488bb4dd' down_revision = '47f9265e77e5' from alembic.op import execute, inline_literal from sqlalchemy import and_ from sqlalchemy.types import Integer, Unicode, UnicodeText from sqlalchemy.schema import Column, MetaData, Table # -- table definition --------------------------------------------------------- metadata = MetaData() settings = Table( 'settings', metadata, Column('id', Integer, autoincrement=True, primary_key=True), Column('key', Unicode(255), nullable=False, unique=True), Column('value', UnicodeText), mysql_engine='InnoDB', mysql_charset='utf8', ) # ----------------------------------------------------------------------------- def upgrade(): update_setting(u'general_site_name', u'MediaCore', u'MediaDrop') update_settings_key(u'appearance_display_mediacore_footer', u'appearance_display_mediadrop_footer') update_settings_key(u'appearance_display_mediacore_credits', u'appearance_display_mediadrop_credits')
class User(DatabaseObject): ''' User definition ''' uuid = Column(String(36), unique=True, nullable=False, default=lambda: str(uuid4())) team_id = Column(Integer, ForeignKey('team.id')) _avatar = Column(String(64)) _locked = Column(Boolean, default=False, nullable=False) last_login = Column(DateTime) logins = Column(Integer, default=0) _handle = Column(Unicode(16), unique=True, nullable=False) _password = Column('password', String(64)) _bank_password = Column('bank_password', String(128)) theme_id = Column(Integer, ForeignKey('theme.id'), default=3, nullable=False) algorithm = Column(String(8), default=DEFAULT_HASH_ALGORITHM, nullable=False) permissions = relationship("Permission", backref=backref("user", lazy="select"), cascade="all,delete,delete-orphan") notifications = relationship("Notification", backref=backref("user", lazy="select"), cascade="all,delete,delete-orphan") algorithms = { 'md5': ( md5, 1, 'md5', ), 'sha1': ( sha1, 2, 'sha1', ), 'sha256': ( sha256, 3, 'sha256', ), 'sha512': ( sha512, 4, 'sha512', ), } @classmethod def all(cls): ''' Returns a list of all objects in the database ''' return dbsession.query(cls).all() @classmethod def all_users(cls): ''' Return all non-admin user objects ''' return filter( lambda user: user.has_permission(ADMIN_PERMISSION) is False, cls.all()) @classmethod def not_team(cls, tid): ''' Return all users not on a given team, exclude admins ''' teams = dbsession.query(cls).filter(cls.team_id != tid).all() return filter( lambda user: user.has_permission(ADMIN_PERMISSION) is False, teams) @classmethod def by_id(cls, _id): ''' Returns a the object with id of _id ''' return dbsession.query(cls).filter_by(id=_id).first() @classmethod def by_uuid(cls, _uuid): ''' Return and object based on a uuid ''' return dbsession.query(cls).filter_by(uuid=unicode(_uuid)).first() @classmethod def by_handle(cls, handle): ''' Return the user object whose user is "_handle" ''' return dbsession.query(cls).filter_by(_handle=unicode(handle)).first() @classmethod def _hash_bank_password(cls, algorithm_name, password): ''' Hashes the password using Md5/Sha1/Sha256/Sha512 only used for the admin accounts. We only allow whitespace/non-ascii. ''' if algorithm_name is None: algorithm_name = DEFAULT_HASH_ALGORITHM if algorithm_name in cls.algorithms: algo = cls.algorithms[algorithm_name][0]() algo.update(password) return algo.hexdigest() else: raise ValueError("Algorithm %s not supported." % algorithm_name) @classmethod def _hash_password(cls, password): return PBKDF2.crypt(password, iterations=ITERATE) @property def password(self): return self._password @password.setter def password(self, value): _password = filter(lambda char: char in printable[:-6], value) if len(_password) >= options.min_user_password_length: self._password = self._hash_password(value) else: raise ValidationError("Invalid password length (min %d chars)" % (options.min_user_password_length, )) @property def bank_password(self): return self._bank_password @bank_password.setter def bank_password(self, value): _password = filter(lambda char: char in printable[:-6], value) if 0 < len(_password) <= options.max_password_length: self._bank_password = self._hash_bank_password( self.algorithm, _password) else: raise ValidationError( "Invalid bank password length (max %d chars)" % (options.max_password_length, )) @property def handle(self): return self._handle @handle.setter def handle(self, new_handle): if not 3 <= len(new_handle) <= 16: raise ValidationError("Handle must be 3 - 16 characters") self._handle = unicode(new_handle) @property def permissions(self): ''' Return a set with all permissions granted to the user ''' return dbsession.query(Permission).filter_by(user_id=self.id) @property def permissions_names(self): ''' Return a list with all permissions accounts granted to the user ''' return [permission.name for permission in self.permissions] @property def locked(self): ''' Determines if an admin has locked an account, accounts with administrative permissions cannot be locked. ''' if self.has_permission(ADMIN_PERMISSION): return False # Admin accounts cannot be locked else: return self._locked @locked.setter def locked(self, value): ''' Setter method for _lock ''' assert isinstance(value, bool) if not self.has_permission(ADMIN_PERMISSION): self._locked = value @property def avatar(self): if self._avatar is not None: return self._avatar else: return "default_avatar.jpeg" @avatar.setter def avatar(self, image_data): if MIN_AVATAR_SIZE < len(image_data) < MAX_AVATAR_SIZE: ext = imghdr.what("", h=image_data) if ext in IMG_FORMATS and not is_xss_image(image_data): if self._avatar is not None and os.path.exists( options.avatar_dir + '/' + self._avatar): os.unlink(options.avatar_dir + '/' + self._avatar) file_path = str(options.avatar_dir + '/' + self.uuid + '.' + ext) with open(file_path, 'wb') as fp: fp.write(image_data) self._avatar = self.uuid + '.' + ext else: raise ValidationError( "Invalid image format, avatar must be: %s" % (' '.join(IMG_FORMATS))) else: raise ValidationError( "The image is too large must be %d - %d bytes" % (MIN_AVATAR_SIZE, MAX_AVATAR_SIZE)) def has_item(self, item_name): ''' Check to see if a team has purchased an item ''' item = MarketItem.by_name(item_name) if item is None: raise ValueError("Item '%s' not in database." % str(item_name)) return True if item in self.team.items else False def has_permission(self, permission): ''' Return True if 'permission' is in permissions_names ''' return True if permission in self.permissions_names else False def validate_password(self, attempt): ''' Check the password against existing credentials ''' if self._password is not None: return self.password == PBKDF2.crypt(attempt, self.password) else: return False def validate_bank_password(self, attempt): ''' Check the bank password against existing credentials ''' if self._bank_password is not None: result = self._hash_bank_password(self.algorithm, attempt) return self.bank_password == result else: return False def get_new_notifications(self): ''' Returns any unread messages @return: List of unread messages @rtype: List of Notification objects ''' return filter(lambda notify: notify.viewed is False, self.notifications) def get_notifications(self, limit=10): ''' Returns most recent notifications @param limit: Max number of notifications to return, defaults to 10 @return: Most recent notifications @rtype: List of Notification objects ''' return self.notifications.sort( key=lambda notify: notify.created)[:limit] def next_algorithm(self): ''' Returns next algo ''' current = self.get_algorithm(self.algorithm) return self.get_algorithm(current[1] + 1) def get_algorithm(self, index): ''' Return algorithm tuple based on string or int ''' if isinstance(index, basestring) and index in self.algorithms: return self.algorithms[index] elif isinstance(index, int): # Find by numeric index for key in self.algorithms: if index == self.algorithms[key][1]: return self.algorithms[key] return None def to_dict(self): ''' Return public data as dictionary ''' return { 'uuid': self.uuid, 'handle': self.handle, 'hash_algorithm': self.algorithm, 'team_uuid': self.team.uuid, } def to_xml(self, parent): ''' Admins cannot be exported as XML, not that they would be exported because they're not on a team, but check anyways ''' if not self.has_permission(ADMIN_PERMISSION): user_elem = ET.SubElement(parent, "user") ET.SubElement(user_elem, "handle").text = self.handle ET.SubElement(user_elem, "password").text = self._password bpass_elem = ET.SubElement(user_elem, "bankpassword") bpass_elem.text = self._bank_password bpass_elem.set("algorithm", self.algorithm) with open(options.avatar_dir + self.avatar) as fp: data = fp.read() ET.SubElement(user_elem, "avatar").text = data.encode('base64') def __eq__(self, other): return self.id == other.id def __ne__(self, other): return not self == other def __str__(self): return self.handle def __repr__(self): return u'<User - handle: %s>' % (self.handle, )