def addFilter(): data = json.loads(request.get_data()) if data['uemail'] in session: ofilter = Filter(data['ffrom'], data['ftag'], data['flongi'], data['flati'], data['fradius'], data['ftime'], data['fstate']) ofilter.schedule = Schedule(data['starttime'], data['endtime'], data['repetition']) ofilter.user = User.query.filter_by( uid=session[data['uemail']]).first() db.session.add(ofilter) db.session.commit() return jsonify({'status': 1}) else: return jsonify({'status': 0, 'message': 'User haven\'t login yet!'})
def update_by_filter(cls, kv, filter_str=''): # 过滤掉不予支持批量更新的字段 _kv = {} for k, v in kv.iteritems(): if k in cls.get_allow_update_keywords(): _kv[k] = v if _kv.__len__() < 1: return # set_str = ', '.join(map(lambda x: x + ' = %(' + x + ')s', _kv.keys())) # 上面为通过map实现的方式 set_str = ', '.join( ['{0} = %({0})s'.format(key) for key in _kv.keys()]) where_str = Filter.filter_str_to_sql( allow_keywords=cls.get_filter_keywords(), filter_str=filter_str) sql_stmt = ("UPDATE " + cls._table_name + " SET " + set_str + " WHERE " + where_str) cnx = db.cnxpool.get_connection() cursor = cnx.cursor(dictionary=True, buffered=True) try: cursor.execute(sql_stmt, _kv) cnx.commit() finally: cursor.close() cnx.close()
def get_by_filter(cls, offset=0, limit=1000, order_by=None, order='asc', filter_str=''): if order_by is None: order_by = cls._primary_key sql_stmt = ("SELECT * FROM " + cls._table_name + " ORDER BY " + order_by + " " + order + " LIMIT %(offset)s, %(limit)s") sql_stmt_count = ("SELECT count(" + cls._primary_key + ") FROM " + cls._table_name) where_str = Filter.filter_str_to_sql( allow_keywords=cls.get_filter_keywords(), filter_str=filter_str) if where_str != '': sql_stmt = ("SELECT * FROM " + cls._table_name + " WHERE " + where_str + " ORDER BY " + order_by + " " + order + " LIMIT %(offset)s, %(limit)s") sql_stmt_count = ("SELECT count(" + cls._primary_key + ") FROM " + cls._table_name + " WHERE " + where_str) cnx = db.cnxpool.get_connection() cursor = cnx.cursor(dictionary=True, buffered=True) try: cursor.execute(sql_stmt, {'offset': offset, 'limit': limit}) rows = cursor.fetchall() cursor.execute(sql_stmt_count) count = cursor.fetchone() return rows, count["count(" + cls._primary_key + ")"] finally: cursor.close() cnx.close()
def make_filter(queries, page): articles = [] limit = 10 offset = limit * page - limit filters = [Filter.get(user=user, name=query) for query in queries] articles += FilterItem.get_articles(filter_=filters, limit=limit, offset=offset) articles = filter(lambda x: x is not None, articles) next_articles = FilterItem.get_articles(filter_=filters, limit=1, offset=offset + limit) if len(next_articles) < 1: has_next = False else: has_next = True paginator = Paginator(page, has_next) #TODO problem with multiple queries, will get 30 articles for limit 10. # should change sql to get all queries together # pager = Paginator() return render( 'templates/home.html', { 'blots_all': articles, 'filter_keywords': ', '.join(queries), 'paginator': paginator })
def save_filters( _, name, graph_scalars_options, graph_timeseries_options, order_by, agg_group_by, normalize, filter_div, ): if not name: raise PreventUpdate filters = preprocessing.extract_filters("scalars", filter_div) filters["order_by"] = order_by filters["agg_group_by"] = agg_group_by filters["normalize"] = normalize scalar_graph_options = preprocessing.extract_graph_options("scalars", graph_scalars_options) ts_graph_options = preprocessing.extract_graph_options("timeseries", graph_timeseries_options) db_filter = Filter( name=name, filters=filters, scalar_graph_options=scalar_graph_options, ts_graph_options=ts_graph_options, ) db.session.add(db_filter) db.session.commit() return get_model_options(Filter), ""
def init_filter(self, operator): filter = Filter() filter.initialize(operator) filter.save() for band in Band.objects.all(): filter.bands.add(band) filter.save() return filter
def delete_by_filter(cls, filter_str=''): where_str = Filter.filter_str_to_sql(allow_keywords=cls.get_filter_keywords(), filter_str=filter_str) sql_stmt = ("DELETE FROM " + cls._table_name + " WHERE " + where_str) cnx = db.cnxpool.get_connection() cursor = cnx.cursor(dictionary=True, buffered=True) try: cursor.execute(sql_stmt) cnx.commit() finally: cursor.close() cnx.close()
def signup(): if current_user.is_authenticated: return redirect(url_for('home_page.index')) form = RegistrationForm() if form.validate_on_submit(): user = User(username=form.username.data, email=form.email.data) user.set_password(form.password.data) user.filters = Filter() db.session.add(user) db.session.commit() return redirect(url_for('users.login')) return render_template('signup.html', form=form)
def get_data(self, request, id): response = api.get_filter_metadata(request, id) data = json.loads(response.text) filter = Filter(data['dsl_name'], data['filter_name'], data['dsl_name'], data['language'], data['filter_type'], data['dependencies'], None, data['main'], data['execution_server'], data['reverse'], data['put'], data['get'], data['post'], data['head'], data['delete'], data['valid_parameters']) return filter
def apply(self): # Create Transformation object self.transformation = StandardScaler(copy=True, with_mean=self.with_mean, with_std=self.with_variance) # Create Filter object self.filter = Filter.Scaler() self.filter.set_transformation(self.transformation) info = "Filter - {} \n".format("StandardScaler") info += "Zentrieren auf Mittel = {}\n".format(self.with_mean) info += "Skalierung um Einheits Varianz = {}\n".format( self.with_variance) self.filter.set_configuration_info(info)
def test_create_metadata_and_verify_filters(): def pk(): n = 0 step = 1 while True: yield n n = n + step meta_data = [] # contains metadata n = 1000 primary_key = pk() for _ in range(n): random_title = ''.join(random.choices(string.ascii_uppercase, k=10)) meta_data.append(Metadata(next(primary_key), title=random_title)) f = Filter(meta_data, ['kind'], ['book']) assert len(meta_data) > len(f.tofilter) g = Filter(meta_data, ['kind', 'category'], ['book', 'textbook']) assert len(meta_data) > len(g.tofilter) assert len(f.tofilter) > len(g.tofilter)
async def create_filter(filter: Filter, aroio: Aroio = Depends(get_auth_aroio)): """Creates a new filter in the database. Returns the created filter with its id""" filters = aroio.configuration.convolver.filters if len(filters) >= 10: raise ForbiddenException( detail="Only 10 filters are allowed with the userconfig.txt pattern" ) # compute next possible filter id by maximum id filter_id = max([f.id for f in filters]) + 1 filter_in_db = FilterInDb(id=filter_id, **filter.dict()) aroio.configuration.convolver.filters.append(filter_in_db) datasource.save(aroio=aroio) return filter_in_db
def apply(self): pass # Following Code is executed after closing KNeighborsClassifierDialog Window if self.dataMinLineEdit.text() == '': data_min = 0 # default value else: data_min = int(self.dataMinLineEdit.text()) if self.dataMaxLineEdit.text() == '': data_max = 1 # default value else: data_max = int(self.dataMaxLineEdit.text()) feature_range = (data_min, data_max) # Create Transformation object self.transformation = MinMaxScaler(feature_range=feature_range) # Create Filter object self.filter = Filter.MinMaxFilter() self.filter.set_transformation(self.transformation) self.filter.set_feature_range(feature_range)