def sites_ip(self, *kw): session['func'] = 'sites_ip' session['site'] = kw[0] session.save() site = DBSession.query(Domain).get(session['site']) c.reports = sites_userip_report_grid c.query_params = { 'Date' : session['date'], 'Site' : site.text } c.backlink = '/reports/sites/' + session['date'] return dict(page = 'squid')
def update(mini_cls, access): proxy_status = access.proxy_status.text if proxy_status.startswith("UDP_") or proxy_status == "NONE": return if access.proxy_status.text == "TCP_DENIED": status = mini_cls.denied else: status = mini_cls.allowed mini = ( DBSession.query(mini_cls) .join(Address) .join(Domain) .join(ProxyUser) .filter( and_( Address.text == access.address.text, Domain.text == access.domain.text, ProxyUser.text == access.proxy_user.text, mini_cls.date == access.date, mini_cls.status == status, ) ) .all() ) if len(mini) == 0: mini = mini_cls() mini.date = access.date mini.address = access.address mini.domain = access.domain mini.proxy_user = access.proxy_user mini.upload_bytes = int(access.request_size) mini.download_bytes = int(access.reply_size) mini.count = 1 mini.status = status DBSession.add(mini) else: mini = mini[0] mini.upload_bytes += int(access.request_size) mini.download_bytes += int(access.reply_size) mini.count += 1
def get(cls, text): cls.check(text) if fk_cache.has_key(cls) and fk_cache[cls].has_key(text): return fk_cache[cls][text] query = DBSession.query(cls) query._autoflush = False data = query.filter(cls.text == text).all() if len(data) == 0: data = cls() data.text = text DBSession.add(data) elif len(data) == 1: data = data[0] else: # XXX: do this better raise Exception() if not fk_cache.has_key(cls): fk_cache[cls] = {} fk_cache[cls][text] = data return data
def fetch_sites_userip_report(self, page = 1, rp = 25, sortname = 'download_bytes', sortorder = 'desc', qtype = None, query = None): if session['func'] == 'sites_ip': userip = Address.text else: userip = ProxyUser.text clients = DBSession.query(userip, func.sum(MiniAccess.count), func.sum(MiniAccess.download_bytes), func.sum(MiniAccess.upload_bytes)) if session['func'] == 'sites_ip': clients = clients.join(MiniAccess.address) clients = clients.group_by(MiniAccess.address_id) else: clients = clients.join(MiniAccess.proxy_user) clients = clients.group_by(MiniAccess.proxy_user_id) clients = clients.join(MiniAccess.domain) date = datetime.strptime(session['date'], '%Y-%m-%d') date = date.date() clients = clients.filter(and_(MiniAccess.date == date, MiniAccess.domain_id == session['site'])) sort_fn = get_sortfn(sortorder) if sortname == 'requests': clients = clients.order_by(sort_fn('sum_1')) elif sortname == 'download_bytes': clients = clients.order_by(sort_fn('sum_2')) elif sortname == 'upload_bytes': clients = clients.order_by(sort_fn('sum_3')) total = clients.count() offset = (page - 1) * rp clients = clients.offset(offset).limit(rp) rows = [] for data in clients: values = [ data[0], data[1], mb(data[2]), mb(data[3]) ] row = { 'id' : data[0], 'cell' : values } rows.append(row) return dict(page = page, total = total, rows = rows)
def setup(self): try: new_attrs = {} new_attrs.update(self.attrs) new_attrs.update(self.do_get_dependencies()) self.obj = self.klass(**new_attrs) DBSession.add(self.obj) DBSession.flush() return self.obj except: DBSession.rollback() raise
def fetch_sites_report(self, page = 1, rp = 25, sortname = 'download_bytes', sortorder = 'desc', qtype = None, query = None): clients = DBSession.query(Domain.text, func.count(MiniAccess.address_id.distinct()), func.count(MiniAccess.proxy_user_id.distinct()), func.sum(MiniAccess.count), func.sum(MiniAccess.download_bytes), func.sum(MiniAccess.upload_bytes), Domain.id) clients = clients.join(MiniAccess) clients = clients.group_by(Domain.text) clients = clients.filter(MiniAccess.date == session['date']) sort_fn = get_sortfn(sortorder) if sortname == 'address': clients = clients.order_by(sort_fn('count_1')) elif sortname == 'proxy_user': clients = clients.order_by(sort_fn('count_2')) elif sortname == 'requests': clients = clients.order_by(sort_fn('sum_1')) elif sortname == 'download_bytes': clients = clients.order_by(sort_fn('sum_2')) elif sortname == 'upload_bytes': clients = clients.order_by(sort_fn('sum_3')) total = clients.count() offset = (page - 1) * rp clients = clients.offset(offset).limit(rp) rows = [] for data in clients: clients = link("reports/sites_ip", data[6], data[1]) users = link("reports/sites_users", data[6], int(data[2]) - 1) values = [ data[0], clients, users, data[3], mb(data[4]), mb(data[5]) ] row = { 'id' : data[0], 'cell' : values } rows.append(row) return dict(page = page, total = total, rows = rows)
def fetch_users_report(self, page = 1, rp = 25, sortname = 'download_bytes', sortorder = 'desc', qtype = None, query = None): clients = DBSession.query(ProxyUser.text, func.count(MiniAccess.domain_id.distinct()), func.sum(MiniAccess.count), func.sum(MiniAccess.download_bytes), func.sum(MiniAccess.upload_bytes)) clients = clients.join(MiniAccess) clients = clients.group_by(ProxyUser.text) clients = clients.filter(and_(MiniAccess.date == session['date'], ProxyUser.text != '-')) sort_fn = get_sortfn(sortorder) if sortname == 'sites': clients = clients.order_by(sort_fn('count_1')) elif sortname == 'requests': clients = clients.order_by(sort_fn('sum_1')) elif sortname == 'download_bytes': clients = clients.order_by(sort_fn('sum_2')) elif sortname == 'upload_bytes': clients = clients.order_by(sort_fn('sum_3')) total = clients.count() offset = (page - 1) * rp clients = clients.offset(offset).limit(rp) rows = [] for data in clients: sites = link('reports/user_sites', data[0], data[1]) values = [ data[0], sites, data[2], mb(data[3]), mb(data[4]) ] row = { 'id' : data[0], 'cell' : values } rows.append(row) return dict(page = page, total = total, rows = rows)
def test_query_obj(self): obj = DBSession.query(self.klass).one() for key, value in self.attrs.iteritems(): assert_equals(getattr(obj, key), value)
def tearDown(self): DBSession.rollback()
def by_user_name(cls, username): """Return the user object whose user name is ``username``.""" return DBSession.query(cls).filter(cls.user_name==username).first()
def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter(cls.email_address==email).first()
def fetch_main_report(self, page = 1, rp = 25, sortname = 'date', sortorder = 'desc', qtype = None, query = None): dash_id = DBSession.query(ProxyUser.id).filter(ProxyUser.text == "-") dash_id = dash_id.one()[0] count_ip = """count(distinct if(miniaccess.proxy_user_id = %d, miniaccess.address_id, 0))""" % (dash_id) count_user = """count(distinct if(miniaccess.proxy_user_id <> %d, miniaccess.proxy_user_id, 0))""" % (dash_id) if session['group_by'] == 'day': date_col = MiniAccess.date else: date_col = func.concat(func.min(MiniAccess.date), ' - ', func.max(MiniAccess.date)) #count the clients address only if the request is not authenticated #I use literal SQL because i don't find how to do this with ORM. main = DBSession.query(date_col, count_ip, count_user, func.count(MiniAccess.domain_id.distinct()), func.sum(MiniAccess.count), func.sum(MiniAccess.download_bytes), func.sum(MiniAccess.upload_bytes)) main = main.group_by(MiniAccess.date) sort_fn = get_sortfn(sortorder) if sortname == 'date': main = main.order_by(sort_fn(MiniAccess.date)) elif sortname == 'clients': main = main.order_by(sort_fn('2')) elif sortname == 'users': main = main.order_by(sort_fn('3')) elif sortname == 'sites': main = main.order_by(sort_fn('count_1')) elif sortname == 'requests': main = main.order_by(sort_fn('sum_1')) elif sortname == 'download_bytes': main = main.order_by(sort_fn('sum_2')) elif sortname == 'upload_bytes': main = main.order_by(sort_fn('sum_3')) total = main.count() offset = (page - 1) * rp main = main.offset(offset).limit(rp) rows = [] print main for data in main: urldate = data[0] clients = link('reports/clients', urldate, data[1]) # We don't count '-' user because it's used in unauth. msgs. users = link('reports/users', urldate, data[2]) sites = link('reports/sites', urldate, data[3]) values = [ data[0], clients, users, sites, data[4], mb(data[5]), mb(data[6]) ] row = { 'id' : data[0], 'cell' : values } rows.append(row) return dict(page = page, total = total, rows = rows)