def serialize(self): url = {c: getattr(self, c) for c in inspect(self).attrs.keys()} if url['Tags']: for key, value in dict(json.loads(url['Tags'])).items(): url[key] = value del url['Tags'] return url
def init_db(): # Drop tables ins = inspect(db.engine) table_list = ins.get_table_names() if table_list is not None: print("> Tables in DB are exist") for table in table_list: if table == "account": Account.__table__.drop(db.engine) print("> Completed deleting the table, 'Account'") # Create tables db.create_all() print("> Completed creating tables") # Insert dummy data to db db.session.add( Account(user_id=config.ADMIN_USER_ID, user_pw=config.ADMIN_USER_PW, position="admin", email=config.ADMIN_EMAIL)) db.session.add( Account(user_id="test", user_pw="test", position="test", email="*****@*****.**")) db.session.commit() print("> Completed inserting dummy data to db")
def _get_orm_descriptors(cls): """Return a dictionary with all ORM descriptor names as keys, and their types (TEXT, DateTime, etc.) as values. """ # The descriptor needs to be invoked once (using __get__) in order # to have access to its attributes (e.g. `remote_attr`) all_descs = {name: desc.__get__(None, cls) for name, desc in inspect(cls).all_orm_descriptors.items() if not name.startswith('_')} attrs_dict = dict() for name, desc in all_descs.items(): extension_type = _get_extension_type(desc) if extension_type is ASSOCIATION_PROXY: # Association proxies must be followed to get their type while not desc.remote_attr.is_attribute: desc = desc.remote_attr # Get the type of the remote attribute attrs_dict[name] = desc.remote_attr.expression.type elif extension_type is HYBRID_PROPERTY: attrs_dict[name] = desc.type return attrs_dict
def toDict(self): data = { c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs } data["date"] = data["date"].strftime("%Y-%m-%d %H:%M") return data
def _get_orm_descriptors(cls): """Return a dictionary with all ORM descriptor names as keys, and their types (TEXT, DateTime, etc.) as values. """ # The descriptor needs to be invoked once (using __get__) in order # to have access to its attributes (e.g. `remote_attr`) all_descs = { name: desc.__get__(None, cls) for name, desc in inspect(cls).all_orm_descriptors.items() if not name.startswith('_') } attrs_dict = dict() for name, desc in all_descs.items(): extension_type = _get_extension_type(desc) if extension_type is ASSOCIATION_PROXY: # Association proxies must be followed to get their type while not desc.remote_attr.is_attribute: desc = desc.remote_attr # Get the type of the remote attribute attrs_dict[name] = desc.remote_attr.expression.type elif extension_type is HYBRID_PROPERTY: attrs_dict[name] = desc.type return attrs_dict
def toDictPublic(self): data = { c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs } del data["email"] del data["facebook"] del data["google"] return data
def get_web_columns(model, headers_override=None) -> List[str]: mapper: orm.mapper = inspect(model) columns = [col.key for col in mapper.attrs if ('_id' not in col.key and '_ids' not in col.key)] # columns.remove('id') # Id is needed for url endpoints print('columns', columns) if headers_override: if 'id' not in headers_override: headers_override.append('id') columns = headers_override return columns
def get_api_columns(model, include_type=False): mapper = inspect(model) exceptions = [] if include_type: columns = [(col.key, col.type.__visit_name__) for col in mapper.columns if col.key not in exceptions] else: columns = [col.key for col in mapper.columns if col.key not in exceptions] print('Obtained api columns') pprint(columns) return columns
def object_to_dict(obj): object_dict = {} for col in inspect(obj).mapper.column_attrs: value = getattr(obj, col.key) if value: object_dict[col.key] = value else: object_dict[col.key] = 'N/A' return object_dict
def toDict(self): data = { c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs } if isinstance(data["mcuType"], Choice): data["mcuType"] = data["mcuType"].code data["created_on"] = data["created_on"].strftime("%Y-%m-%d %H:%M") if data["updated_on"]: data["updated_on"] = data["updated_on"].strftime("%Y-%m-%d %H:%M") return data
def resource_fields(cls): """Return a mapping of available field names and their corresponding flask types """ fields = dict() columns = inspect(cls).columns columns_dict = {col.name: col.type for col in columns if not col.name.startswith('_')} columns_dict.update(cls._get_orm_descriptors()) for field_name, field_type in columns_dict.iteritems(): field_type_name = field_type.__class__.__name__ fields[field_name] = cls._sql_to_flask_type_map[field_type_name] return fields
def create_or_update(cls, df): """ 以主键为参考值,决定 dataframe 数据: 如果已经在表中存在,则进行更新操作; 若不存在,则新增数据 """ primary_key = inspect(cls).primary_key[0].name primary_values = list( chain(*db.session.query(getattr(cls, primary_key)).all()) ) cls.update(df[df[primary_key].isin(primary_values)]) cls.create(df[~df[primary_key].isin(primary_values)])
def serialize(self, attr_remove=[]): attr_remove.append('create_at') attrs = inspect(self).attrs.keys() result = {} for attr in attrs: val = getattr(self, attr) if not attr in attr_remove and not issubclass(type(val), Base): if isinstance(val, Enum): result.update({attr: val.value}) else: result.update({attr: val}) return result
def cupcakes_post(): columns = inspect(Cupcake).columns.keys() for key, val in request.json.items(): if key not in columns or key == "id": abort(400) cupcake = Cupcake(flavor=request.json["flavor"], size=request.json["size"], rating=request.json["rating"], image=request.json.get( "image", "https://tinyurl.com/demo-cupcake")) db.session.add(cupcake) try: db.session.commit() except: db.session.rollback() abort(400) return (jsonify(cupcake=cupcake.serialize), 201)
def serialize(self, include={}, exclude=[], only=[]): serialized = {} for key in inspect(self).attrs.keys(): to_be_serialized = True value = getattr(self, key) if key in exclude or (only and key not in only): to_be_serialized = False elif isinstance(value, BaseQuery): to_be_serialized = False if key in include: to_be_serialized = True nested_params = include.get(key, {}) value = [i.serialize(**nested_params) for i in value] if to_be_serialized: serialized[key] = value return serialized
def update_url(url_id): url = Url.query.filter_by(Id=url_id).first() if not url: abort(404) if not request.json or not set(['Value', 'IsValid', 'IsDone']).issubset(set(request.json.keys())): abort(400) tags = {} tags_key = set(dict(request.json).keys()) - (set(inspect(url).attrs.keys())) for tag_key in tags_key: if str(request.json[tag_key]).isdigit(): tags[tag_key] = int(request.json[tag_key]) else: abort(400) url.Value = request.json['Value'] url.Tags = str(json.dumps(tags)) url.IsDone = request.json['IsDone'] url.IsValid = request.json['IsValid'] db.session.commit() return jsonify(dict(url.serialize())), 201
def get_schema(model, operation) -> Dict[str, Dict[str, Any]]: """ Creates a cerberus schema for API-level view, create and update operations """ mapper: orm.mapper = inspect(model) schema = OrderedDict() try: unsigned_attrs = model.unsigned_attrs() except AttributeError: unsigned_attrs = None for col in mapper.columns: field = col.key ftype = col.type.__visit_name__ if field == 'id' and operation == 'create': continue schema[field] = {'type': ftype} if unsigned_attrs and field in unsigned_attrs: schema[field] = {'min': 0} if ftype == 'string': schema[field].update({'maxlength': col.type.length}) if not col.nullable: schema[field].update({'empty': False}) if (operation == 'update' and field == 'id') or operation != 'update': schema[field].update({'required': not col.nullable}) # rules = { # 'name': col.key, # 'type': col.type.__visit_name__, # 'maxlength': col.type.length if col.type.__visit_name__ == 'string' else None, # 'required': not col.nullable # } # if not update: # rules.update({'required': not col.nullable}) # if rules['type'] == 'string' and not col.nullable: # rules['empty'] = False # print("RULES", rules) print('Obtained schema') pprint(schema) return schema
def _get_association_proxies(cls): """Return a dictionary with all association proxy names as keys, and their types (TEXT, DateTime, etc.) as values """ all_descs = inspect(cls).all_orm_descriptors attrs_dict = dict() # The descriptor needs to be invoked once (using __get__) in order # to have access to its attributes (e.g. `remote_attr`) proxies = { name: desc.__get__(None, cls) for name, desc in all_descs.items() if desc.extension_type is ASSOCIATION_PROXY and not name.startswith('_') } for proxy_name, proxy in proxies.iteritems(): # Get the underlying attribute in case of multiple assoc. proxies while not proxy.remote_attr.is_attribute: proxy = proxy.remote_attr # Get the type of the remote attribute attrs_dict[proxy_name] = proxy.remote_attr.expression.type return attrs_dict
def cupcakes_id(cupcake_id): cupcake = Cupcake.query.get(cupcake_id) if cupcake == None: abort(404) # DELETE entry if request.method == "DELETE": db.session.delete(cupcake) db.session.commit() return jsonify(message="Deleted") # PATCH entry if request.method == "PATCH": columns = inspect(Cupcake).columns.keys() for key, val in request.json.items(): # Unknown key or trying to edit id if key not in columns or key == "id": db.session.rollback(0) abort(400) setattr(cupcake, key, val) db.session.commit() # Return the cupcake on a GET or PATCH request return jsonify(cupcake=cupcake.serialize)
def serialize(self): return { c: escape(getattr(self, c)) for c in inspect(self).attrs.keys() }
def as_dict(self): mapper = inspect(self) return {col.key: getattr(self, col.key) for col in mapper.attrs}
def __have_primary(self): return inspect(self).identity
def __prune_fields(self): columns = inspect(self.__class__).columns if not self._fields: all_columns = set([column.name for column in columns]) self._fields = list(all_columns - set(self._exclude))
def get_m2o_columns(model): """ Gets a dictionary where the keys are the relationship name and the values are the model classes""" mapper = inspect(model) columns = {col.key: col.argument.class_ for col in mapper.relationships if col.direction.name == 'MANYTOONE'} return columns
def object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
def toDict(self): return { c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs }
def get_table_columns(cls): """Return all columns for table except id.""" return [col.name for col in flask_sqlalchemy.inspect(cls).c if col.name not in ['id', ]]
def get_dict_from_object(obj): return { column.key: getattr(obj, column.key) for column in inspect(obj).mapper.column_attrs }
def edit_todo(id=None): with db.session.no_autoflush: if id is not None: todo = db.session.query(Todo).get(id) if not (current_user.id == todo.user_id or current_user.role in ['helpdesk', 'admin']): todo = None else: todo = Todo() if todo is not None: todoform = TodoForm(obj=todo) # Default for select field, because it gets overwritten by the above statement. todoform.priority.data = (todoform.priority.data if todoform.priority.data else todoform.priority.default) if todoform.validate_on_submit(): todoform.populate_obj(todo) if id is None: todo.user_id = current_user.id todo.created = datetime.datetime.now() todo.state = 'open' db.session.add(todo) else: db.session.merge(todo) changes = [] for t in db.session.dirty: attrs = inspect(t).attrs for attr in attrs: if attr.history.has_changes(): changes.append({ 'field': attr.key, 'old': attr.history.deleted[0], 'new': attr.history.added[0] # or attr.value }) if len(changes) > 0 or todoform.comment.data is not '': u = Update(todo_id=todo.id, user_id=current_user.id, comment=todoform.comment.data, changes=changes) db.session.add(u) db.session.commit() flash('Saved') if id is None: return redirect(url_for('edit_todo', id=todo.id)) else: todoform = TodoForm flash('Not authorized') return render_template('todo_edit.html', todoform=todoform, todo=todo)