Exemple #1
0
    def execute(self, message, alliance=None, race=None, sortby="score"):
        planet = aliased(Planet)
        planet_intel = aliased(Intel)

        Q = session.query(planet.x, planet.y, planet.z, planet.score, planet.value, planet.size, planet.xp, planet.race, planet_intel.nick)
        Q = Q.outerjoin(planet.intel, planet_intel)
        if alliance:
            Q = Q.filter(planet_intel.alliance == alliance)
        if race:
            Q = Q.filter(planet.race == race)
        Q = Q.group_by(planet.x, planet.y, planet.z, planet.score, planet.value, planet.size, planet.xp, planet.race, planet_intel.nick)
        if sortby == "xp":
            Q = Q.order_by(desc(planet.xp))
        elif sortby == "size":
            Q = Q.order_by(desc(planet.size))
        elif sortby == "value":
            Q = Q.order_by(desc(planet.value))
        else:
            Q = Q.order_by(desc(planet.score))
        result = Q.all()
        
        reply = "Top%s planets" % (" "+race if race is not None else "")
        if alliance:
            reply+=" in %s"%(alliance.name,)
        reply+=" by %s:\n"%(sortby)
        prev = []
        i=0
        for x, y, z, score, value, size, xp, race, nick in result[:10]:
            i+=1
            line = "#%s - %s:%s:%s (%s) - Score: %s Value: %s Size: %s XP: %s"%(i,x,y,z,race,score,value,size,xp)
            if nick:
                line = "%s Nick: %s"%(line,nick)
            prev.append(line)
        print(prev)
        message.reply(reply+"\n".join(prev))
Exemple #2
0
def user(request, info, session, userid):
    try:
        u = session.query(User).options(joinedload("permissions")).options(joinedload("tokens")).get(int(userid))
    except NoResultFound:
        raise Http404
    sales = (
        session.query(Transline)
        .filter(Transline.user == u)
        .options(joinedload("transaction"))
        .options(joinedload_all("stockref.stockitem.stocktype.unit"))
        .order_by(desc(Transline.time))[:50]
    )
    payments = (
        session.query(Payment)
        .filter(Payment.user == u)
        .options(joinedload("transaction"))
        .options(joinedload("paytype"))
        .order_by(desc(Payment.time))[:50]
    )
    annotations = (
        session.query(StockAnnotation)
        .options(joinedload_all("stockitem.stocktype"))
        .options(joinedload("type"))
        .filter(StockAnnotation.user == u)
        .order_by(desc(StockAnnotation.time))[:50]
    )
    return ("user.html", {"tuser": u, "sales": sales, "payments": payments, "annotations": annotations})
Exemple #3
0
    def execute(self, message, alliance=None, race=None, sortby="score"):
        planet = aliased(Planet)
        planet_intel = aliased(Intel)
        

        Q = session.query(planet.x, planet.y, planet.z, planet_intel.nick)
        Q = Q.outerjoin((planet.intel, planet_intel))
        if alliance:
            Q = Q.filter(planet_intel.alliance == alliance)
        if race:
            Q = Q.filter(planet.race == race)
        if sortby == "xp":
            Q = Q.order_by(desc(planet.xp))
        elif sortby == "size":
            Q = Q.order_by(desc(planet.size))
        elif sortby == "value":
            Q = Q.order_by(desc(planet.value))
        else:
            Q = Q.order_by(desc(planet.score))
        result = Q.all()
        
        reply = "Top%s planets" % (" "+race if race is not None else "")
        if alliance:
            reply+=" in %s"%(alliance.name,)
        reply+=" by %s:\n"%(sortby)
        prev = []
        i=0
        for x, y, z, nick in result[:10]:
            i+=1
            line = "#%2s %12s%s"%(i, "["+nick+"] " if nick else "", Planet.load(x,y,z))
            prev.append(line)
        message.reply(reply+"\n".join(prev))
Exemple #4
0
 def execute(self, request, user, sort=None):
     
     levels = [] + User.levels
     
     if sort is not None:
         levels = [("All member", levels[-1][1],),]
     
     order =  {"name"  : (asc(User.name),),
               "sponsor" : (asc(User.sponsor),),
               "access" : (desc(User.access),desc(User.carebears),asc(User.name),),
               "carebears" : (desc(User.carebears),),
               "planet" : (asc(Planet.x),asc(Planet.y),asc(Planet.z),),
               "defage" : (asc(User.fleetupdated),),
               }
     if sort not in order.keys():
         sort = "name"
     order = order.get(sort)
     
     members = []
     for level in levels:
         Q = session.query(User.name, User.alias, User.sponsor, User.access, User.carebears, Planet, User.fleetupdated,
                           User.phone, User.pubphone, User._smsmode,  or_(User.id == user.id, User.id.in_(session.query(PhoneFriend.user_id).filter_by(friend=user))))
         Q = Q.outerjoin(User.planet)
         Q = Q.filter(User.active == True)
         Q = Q.filter(User.access >= level[1])
         Q = Q.filter(User.access < levels[levels.index(level)-1][1]) if levels.index(level) > 0 else Q
         for o in order:
             Q = Q.order_by(o)
         
         members.append((level[0], Q.all(),))
     
     return render("members.tpl", request, accesslist=members)
Exemple #5
0
def organizations_and_counters():
    '''Query organizations with their counters'''
    query = DB.query(Group,
        func.count(distinct(Package.id)).label('nb_datasets'),
        func.count(distinct(Member.id)).label('nb_members')
    )
    query = query.outerjoin(CertifiedPublicService)
    query = query.outerjoin(Package, and_(
        Group.id == Package.owner_org,
        ~Package.private,
        Package.state == 'active',
    ))
    query = query.outerjoin(Member, and_(
        Member.group_id == Group.id,
        Member.state == 'active',
        Member.table_name == 'user'
    ))
    query = query.filter(Group.state == 'active')
    query = query.filter(Group.approval_status == 'approved')
    query = query.filter(Group.is_organization == True)
    query = query.group_by(Group.id, CertifiedPublicService.organization_id)
    query = query.order_by(
        CertifiedPublicService.organization_id == null(),
        desc('nb_datasets'),
        desc('nb_members'),
        Group.title
    )
    query = query.options(orm.joinedload(Group.certified_public_service))
    return query
Exemple #6
0
 def execute(self, request, user, sort=None):
     
     levels = [] + User.levels
     if "galmate" in Config.options("Access"):
         levels.append(("Galaxy", Config.getint("Access","galmate"),))
     else:
         levels.append(("Galaxy", 0,))
     
     if sort is not None:
         levels = [("All", 0,),]
     
     order =  {"name"  : (asc(Channel.name),),
               "userlevel" : (desc(Channel.userlevel),),
               "maxlevel" : (desc(Channel.maxlevel),),
               }
     if sort not in order.keys():
         sort = "name"
     order = order.get(sort)
     
     channels = []
     for level in levels:
         Q = session.query(Channel.name, Channel.userlevel, Channel.maxlevel)
         Q = Q.filter(Channel.userlevel >= level[1])
         Q = Q.filter(Channel.userlevel < levels[levels.index(level)-1][1]) if levels.index(level) > 0 else Q
         for o in order:
             Q = Q.order_by(o)
         
         channels.append((level[0], Q.all(),))
     
     return render("channels.tpl", request, accesslist=channels)
Exemple #7
0
 def execute(self, request, user):
     
     planet, galaxy = (user.planet, user.planet.galaxy,) if user.planet else (Planet(), Galaxy(),)
     
     planets = session.query(Planet).filter(Planet.active == True)
     galaxies = session.query(Galaxy).filter(Galaxy.active == True)
     alliances = session.query(Alliance).filter(Alliance.active == True)
     
     dup = lambda l,o,c=True: l+[o] if o in session and c and o not in l else l
     
     return render("index.tpl", request,
                  topplanets = dup(planets.order_by(asc(Planet.score_rank))[:20], 
                                   planet),
              roidingplanets = dup(planets.filter(Planet.size_growth > 0).order_by(desc(Planet.size_growth))[:5],
                                   planet, planet.size_growth > 0),
               roidedplanets = dup(planets.filter(Planet.size_growth < 0).order_by(asc(Planet.size_growth))[:5],
                                   planet, planet.size_growth < 0),
                   xpplanets = dup(planets.filter(Planet.xp_growth > 0).order_by(desc(Planet.xp_growth))[:5],
                                   planet, planet.xp_growth > 0),
               bashedplanets = dup(planets.filter(Planet.value_growth < 0).order_by(asc(Planet.value_growth))[:5],
                                   planet, planet.value_growth < 0),
             
                 topgalaxies = dup(galaxies.order_by(asc(Galaxy.score_rank))[:10],
                                   galaxy),
             roidinggalaxies = dup(galaxies.filter(Galaxy.size_growth > 0).order_by(desc(Galaxy.size_growth))[:5],
                                   galaxy, galaxy.size_growth > 0),
              roidedgalaxies = dup(galaxies.filter(Galaxy.size_growth < 0).order_by(asc(Galaxy.size_growth))[:5],
                                   galaxy, galaxy.size_growth < 0),
                  xpgalaxies = dup(galaxies.filter(Galaxy.xp_growth > 0).order_by(desc(Galaxy.xp_growth))[:5],
                                   galaxy, galaxy.xp_growth > 0),
              bashedgalaxies = dup(galaxies.filter(Galaxy.value_growth < 0).order_by(asc(Galaxy.value_growth))[:5],
                                   galaxy, galaxy.value_growth < 0),
             
                topalliances =     alliances.order_by(asc(Alliance.score_rank))[:8],
                         )
Exemple #8
0
    def newer_submissions(self):
        class Newer(object):
            '''You may use me like a list'''
            user = []
            team = []

            def __iter__(self):
                for i in self.user + self.team:
                    yield i

            def __len__(self):
                return len(self.user) + len(self.team)

            def __getitem__(self, i):
                return sorted(self.user + self.team, key=lambda s: s.modified, reverse=True)[0]

        newer = Newer()

        newer.user = (Submission.by_assignment_and_user(self.assignment, self.user)
            .filter(Submission.modified > self.modified).order_by(desc(Submission.modified)).all())
        newer.team = []
        if hasattr(self.user, 'teams'):
            for team in self.teams:
                for member in team.members:
                    if member != self.user:
                        newer.team.extend(Submission.by_assignment_and_user(self.assignment, member)
                            .filter(Submission.modified > self.modified).order_by(desc(Submission.modified)).all())
        return newer
Exemple #9
0
def index():
    campaigns = Campaign.query.order_by(desc(Campaign.status_code), desc(Campaign.id)).all()
    calls = (db.session.query(Campaign.id, func.count(Call.id))
            .filter(Call.status == 'completed')
            .join(Call).group_by(Campaign.id))
    return render_template('campaign/list.html',
        campaigns=campaigns, calls=dict(calls.all()))
Exemple #10
0
 def execute(self, request, user, x, y, z, fleets=False):
     week = Updates.week_tick()
     
     planet = Planet.load(x,y,z)
     if planet is None:
         return HttpResponseRedirect(reverse("planet_ranks"))
     
     Q = session.query(FleetScan, Planet, Alliance)
     Q = Q.join(FleetScan.target)
     Q = Q.outerjoin(Planet.intel).outerjoin(Intel.alliance)
     Q = Q.filter(FleetScan.owner == planet)
     Q = Q.order_by(desc(FleetScan.landing_tick))
     if not fleets:
         Q = Q.filter(FleetScan.landing_tick >= week)
     outgoing = Q.all()
     
     Q = session.query(FleetScan, Planet, Alliance)
     Q = Q.join(FleetScan.owner)
     Q = Q.outerjoin(Planet.intel).outerjoin(Intel.alliance)
     Q = Q.filter(FleetScan.target == planet)
     Q = Q.order_by(desc(FleetScan.landing_tick))
     if not fleets:
         Q = Q.filter(FleetScan.landing_tick >= week)
     incoming = Q.all()
     
     scan = planet.scan("A") or planet.scan("U")
     
     return render("iplanet.tpl", request, planet=planet, scan=scan, outgoing=outgoing, incoming=incoming)
Exemple #11
0
def user(request, info, session, userid):
    u = session\
        .query(User)\
        .get(int(userid))
    if not u:
        raise Http404

    sales = session\
            .query(Transline)\
            .filter(Transline.user == u)\
            .options(joinedload('transaction'),
                     joinedload_all('stockref.stockitem.stocktype.unit'))\
            .order_by(desc(Transline.time))[:50]

    payments = session\
               .query(Payment)\
               .filter(Payment.user == u)\
               .options(joinedload('transaction'),
                        joinedload('paytype'))\
               .order_by(desc(Payment.time))[:50]

    annotations = session\
                  .query(StockAnnotation)\
                  .options(joinedload_all('stockitem.stocktype'),
                           joinedload('type'))\
                  .filter(StockAnnotation.user == u)\
                  .order_by(desc(StockAnnotation.time))[:50]

    return ('user.html',
            {'tillobject': u,
             'tuser': u,
             'sales': sales,
             'payments': payments,
             'annotations': annotations,
            })
Exemple #12
0
def organizations_and_counters():
    '''Query organizations with their counters'''
    memberships = aliased(model.Member)

    query = DB.query(model.Group,
        func.count(distinct(model.Package.id)).label('nb_datasets'),
        func.count(distinct(memberships.id)).label('nb_members')
    )
    query = query.outerjoin(CertifiedPublicService)
    query = query.outerjoin(model.Package, and_(
        model.Group.id == model.Package.owner_org,
        ~model.Package.private,
        model.Package.state == 'active',
    ))
    query = query.outerjoin(memberships, and_(
        memberships.group_id == model.Group.id,
        memberships.state == 'active',
        memberships.table_name == 'user'
    ))
    query = query.filter(model.Group.state == 'active')
    query = query.filter(model.Group.approval_status == 'approved')
    query = query.filter(model.Group.is_organization == True)
    query = query.group_by(model.Group.id, CertifiedPublicService.organization_id)
    query = query.order_by(
        CertifiedPublicService.organization_id == null(),
        desc('nb_datasets'),
        desc('nb_members'),
        model.Group.title
    )
    return query
Exemple #13
0
    def load_plasmids(self, tsession = None, engine = None):

        engine = engine or self.engine
        tsession = tsession or self.tsession

        for p in tsession.query(Plasmid).order_by(desc(Plasmid.date), desc(Plasmid.creator_entry_number)): # todo: parameterize sorting
            k = (p.creator, p.creator_entry_number)
            self.plasmid_order.append(k)
            self.plasmids[k] = p
Exemple #14
0
 def execute(self, request, user, x, y, z, h=False, hs=False, ticks=None):
     planet = Planet.load(x,y,z)
     if planet is None:
         return HttpResponseRedirect(reverse("planet_ranks"))
     
     ticks = int(ticks or 0) if (h or hs) else 12
     
     if not hs:
         sizediffvalue = PlanetHistory.rdiff * PA.getint("numbers", "roid_value")
         valuediffwsizevalue = PlanetHistory.vdiff - sizediffvalue
         resvalue = valuediffwsizevalue * PA.getint("numbers", "res_value")
         shipvalue = valuediffwsizevalue * PA.getint("numbers", "ship_value")
         xpvalue = PlanetHistory.xdiff * PA.getint("numbers", "xp_value")
         Q = session.query(PlanetHistory,
                             sizediffvalue,
                             valuediffwsizevalue,
                             resvalue, shipvalue,
                             xpvalue,
                             )
         Q = Q.filter(PlanetHistory.current == planet)
         Q = Q.order_by(desc(PlanetHistory.tick))
         history = Q[:ticks] if ticks else Q.all()
     else:
         history = None
     
     if not (h or hs):
         landings = session.query(PlanetLandings.hour, count()).filter(PlanetLandings.planet==planet).group_by(PlanetLandings.hour).all()
         landed = session.query(PlanetLandedOn.hour, count()).filter(PlanetLandedOn.planet==planet).group_by(PlanetLandedOn.hour).all()
         vdrops = session.query(PlanetValueDrops.hour, count()).filter(PlanetValueDrops.planet==planet).group_by(PlanetValueDrops.hour).all()
         idles = session.query(PlanetIdles.hour, count()).filter(PlanetIdles.planet==planet).group_by(PlanetIdles.hour).all()
         hourstats = {
                         'landings' : dict(landings), 'landingsT' : sum([c for hour,c in landings]),
                         'landed'   : dict(landed),   'landedT'   : sum([c for hour,c in landed]),
                         'vdrops'   : dict(vdrops),   'vdropsT'   : sum([c for hour,c in vdrops]),
                         'idles'    : dict(idles),    'idlesT'    : sum([c for hour,c in idles]),
                         }
     else:
         hourstats = None
     
     if not h:
         Q = session.query(PlanetHistory)
         Q = Q.filter(or_(PlanetHistory.hour == 23, PlanetHistory.tick == Updates.current_tick()))
         Q = Q.filter(PlanetHistory.current == planet)
         Q = Q.order_by(desc(PlanetHistory.tick))
         hsummary = Q.all() if hs else Q[:14]
     else:
         hsummary = None
     
     return render(["planet.tpl",["hplanet.tpl","hsplanet.tpl"][hs]][h or hs],
                     request,
                     planet = planet,
                     history = history,
                     hour = datetime.utcnow().hour, hourstats = hourstats,
                     hsummary = hsummary,
                     ticks = ticks,
                   )
Exemple #15
0
 def list_inactive(self):
     """Returns a list of bans that are currently inactive."""
     return list(
         self.dbsession.query(Ban)
         .filter(
             or_(Ban.active == False, Ban.active_until <= func.now())  # noqa: E712
         )
         .order_by(desc(Ban.active_until), desc(Ban.created_at))
         .all()
     )
Exemple #16
0
 def execute(self, request, user, x, y, h=False, hs=False, ticks=None):
     galaxy = Galaxy.load(x,y)
     if galaxy is None:
         return HttpResponseRedirect(reverse("galaxy_ranks"))
     
     ticks = int(ticks or 0) if (h or hs) else 12
     
     if not (h or hs):
         Q = session.query(Planet, Intel.nick, Alliance.name)
         Q = Q.outerjoin(Planet.intel)
         Q = Q.outerjoin(Intel.alliance)
         Q = Q.filter(Planet.active == True)
         Q = Q.filter(Planet.galaxy == galaxy)
         Q = Q.order_by(asc(Planet.z))
         planets = Q.all()
         exiles = galaxy.exiles[:10]
     else:
         planets, exiles = None, None
     
     if not hs:
         sizediffvalue = GalaxyHistory.rdiff * PA.getint("numbers", "roid_value")
         valuediffwsizevalue = GalaxyHistory.vdiff - sizediffvalue
         resvalue = valuediffwsizevalue * PA.getint("numbers", "res_value")
         shipvalue = valuediffwsizevalue * PA.getint("numbers", "ship_value")
         xpvalue = GalaxyHistory.xdiff * PA.getint("numbers", "xp_value")
         Q = session.query(GalaxyHistory,
                             sizediffvalue,
                             valuediffwsizevalue,
                             resvalue, shipvalue,
                             xpvalue,
                             )
         Q = Q.filter(GalaxyHistory.current == galaxy)
         Q = Q.order_by(desc(GalaxyHistory.tick))
         history = Q[:ticks] if ticks else Q.all()
     else:
         history = None
     
     if not h:
         Q = session.query(GalaxyHistory)
         Q = Q.filter(or_(GalaxyHistory.hour == 23, GalaxyHistory.tick == Updates.current_tick()))
         Q = Q.filter(GalaxyHistory.current == galaxy)
         Q = Q.order_by(desc(GalaxyHistory.tick))
         hsummary = Q.all() if hs else Q[:14]
     else:
         hsummary = None
     
     return render(["galaxy.tpl",["hgalaxy.tpl","hsgalaxy.tpl"][hs]][h or hs],
                     request,
                     galaxy = galaxy,
                     planets = planets,
                     exiles = exiles,
                     history = history,
                     hsummary = hsummary,
                     ticks = ticks,
                   )
Exemple #17
0
 def execute(self, request, user, page="1", sort="score"):
     page = int(page)
     offset = (page - 1)*50
     order =  {"score" : (asc(Alliance.score_rank),),
               "size"  : (asc(Alliance.size_rank),),
               "ratio" : (desc(Alliance.ratio),),
               "avg_score" : (asc(Alliance.score_avg_rank),),
               "avg_size"  : (asc(Alliance.size_avg_rank),),
               "members"   : (asc(Alliance.members_rank),),
               "score_growth" : (desc(Alliance.score_growth),),
               "size_growth"  : (desc(Alliance.size_growth),),
               "avg_score_growth" : (desc(Alliance.score_avg_growth),),
               "avg_size_growth"  : (desc(Alliance.size_avg_growth),),
               "score_growth_pc" : (desc(Alliance.score_growth_pc),),
               "size_growth_pc"  : (desc(Alliance.size_growth_pc),),
               "avg_score_growth_pc" : (desc(Alliance.score_avg_growth_pc),),
               "avg_size_growth_pc"  : (desc(Alliance.size_avg_growth_pc),),
               } 
     if sort not in order.keys():
         sort = "score"
     order = order.get(sort)
     
     Q = session.query(Alliance)
     Q = Q.filter(Alliance.active == True)
     
     count = Q.count()
     pages = count/50 + int(count%50 > 0)
     pages = range(1, 1+pages)
     
     for o in order:
         Q = Q.order_by(o)
     Q = Q.limit(50).offset(offset)
     return render("alliances.tpl", request, alliances=Q.all(), offset=offset, pages=pages, page=page, sort=sort)
Exemple #18
0
 def last_observation(self):
     session = object_session(self)
     q = session.query(ObservedMac)
     q = q.filter_by(mac_address=self.mac)
     # Group the results into 'any port number but zero' and 'port 0'.
     # This prioritizes any port over the uplink port.
     # Saying that port 0 is an uplink port isn't very elegant, also
     # with real port names it's not even true.
     q = q.order_by(desc(case([(ObservedMac.port == "0", 0)], else_=1)))
     q = q.order_by(desc(ObservedMac.last_seen))
     return q.first()
Exemple #19
0
    def get_feed_entries(self):
        max_count=20
        entries = Session.query(BlogEntry)[:max_count]
        myor = or_()
        for friend in self.friends:
            myor.append(Spotlight.uid == friend.id)

        if len(myor)>0:
            entries.extend(Session.query(Spotlight).filter(
                and_(myor, Spotlight.active==True)).\
                order_by(sql.desc(Spotlight.timestamp))\
                    [:max_count])

        commentor = or_()
        spotlightor = or_()
        for friend in self.friends:
            commentor.append(SpotlightComment.uid == friend.id)
            spotlightor.append(Spotlight.uid == friend.id)


        if len(commentor)>0 and len(spotlightor)>0:
            entries.extend(Session.query(SpotlightComment).\
                    join((Spotlight, SpotlightComment.spotlight)).\
                    filter(and_(
                        SpotlightComment.uid!=session['userid'],
                        or_(Spotlight.uid==session['userid'],
                            and_(commentor, spotlightor)),
                        Spotlight.active == True)).\
                    order_by(sql.desc(SpotlightComment.timestamp))[:max_count])

        entries.extend(Session.query(Recommendation).\
                filter(and_(
                    Recommendation.recommendeefbid == self.fbid,
                    Recommendation.active == True))[:max_count])

        def sort_by_timestamp(x, y):
            if x.timestamp == None:
                if y.timestamp == None:
                    return 0
                return 1
            elif y.timestamp == None:
                return -1
            elif x.timestamp > y.timestamp:
                return -1
            elif x.timestamp == y.timestamp:
                return 0
            else:
                return 1

        entries.sort(sort_by_timestamp)
        return entries[:max_count]
Exemple #20
0
def statistics():
    campaigns = Campaign.query.order_by(desc(Campaign.status_code), desc(Campaign.id)).all()
    today = datetime.today()
    this_month_start = today.replace(day=1)  # first day of the current month

    last_month = this_month_start - timedelta(days=28) # a day in last month
    next_month = today.replace(day=28) + timedelta(days=4)  # a day in next month (for months with 28,29,30,31)

    last_month_start = last_month - timedelta(days=(last_month.day-1))
    this_month_end = next_month - timedelta(days=next_month.day)  # the last day of the current month
    return render_template('admin/statistics.html',
        campaigns=campaigns, timespans=API_TIMESPANS,
        default_start=last_month_start.strftime('%Y/%m/%d'),
        default_end=this_month_end.strftime('%Y/%m/%d'))
Exemple #21
0
 def execute(self, message, user, params):
     
     # Planet
     if len(params.groups()) > 1:
         planet = Planet.load(*params.group(1,3,5))
         if planet is None:
             message.reply("No planet with coords %s:%s:%s found" % params.group(1,3,5))
             return
         
         # List of last 10 scans
         if params.group(6) == "o":
             scans = planet.scans.filter_by(scantype=self.type).order_by(desc(Scan.id))[:10]
             if len(scans) < 1:
                 message.reply("No %s Scans of %s:%s:%s found"%(PA.get(self.type,"name"),planet.x,planet.y,planet.z))
                 return
             prev = []
             for scan in scans:
                 prev.append("(pt%s %s)" % (scan.tick, scan.pa_id,))
             reply = "Last 10 %s Scans on %s:%s:%s "%(PA.get(self.type,"name"),planet.x,planet.y,planet.z) + " ".join(prev)
             message.reply(reply)
             return
         
         # Latest scan
         scan = planet.scan(self.type)
         if scan is None:
             message.reply("No %s Scans of %s:%s:%s found"%(PA.get(self.type,"name"),planet.x,planet.y,planet.z))
             return
         
         # Link to scan
         if params.group(7) == "l":
             reply = "%s on %s:%s:%s " % (PA.get(self.type,"name"),planet.x,planet.y,planet.z,)
             reply+= Config.get("URL","viewscan") % (scan.pa_id,)
             message.reply(reply)
             return
         
         # Display the scan
         message.reply(str(scan))
     
     # ID
     else:
         Q = session.query(Scan)
         Q = Q.filter(Scan.pa_id.ilike("%"+params.group(1)+"%"))
         Q = Q.order_by(desc(Scan.id))
         scan = Q.first()
         if scan is None:
             message.reply("No Scans matching ID '%s'"%(params.group(1),))
             return
         # Display the scan
         message.reply(str(scan))
Exemple #22
0
def view(_user):
    """
    Show the logged-in user their own feed.
    """
    from pyaspora.diaspora.models import MessageQueue
    if MessageQueue.has_pending_items(_user):
        return redirect(url_for('diaspora.run_queue', _external=True))

    limit = int(request.args.get('limit', 99))
    friend_ids = [f.id for f in _user.contact.friends()]
    clauses = [Post.Queries.shared_with_contact(_user.contact)]
    if friend_ids:
        clauses.append(
            Post.Queries.authored_by_contacts_and_public(friend_ids))
    tag_ids = [t.id for t in _user.contact.interests]
    if tag_ids:
        clauses.append(Tag.Queries.public_posts_for_tags(tag_ids))
    feed_query = or_(*clauses)
    feed = db.session.query(Share).join(Post). \
        outerjoin(PostTag).outerjoin(Tag). \
        filter(feed_query). \
        order_by(desc(Post.thread_modified_at)). \
        group_by(Post.id). \
        options(contains_eager(Share.post)). \
        limit(limit)

    data = {
        'feed': json_posts([(s.post, s) for s in feed], _user, True)
    }

    add_logged_in_user_to_data(data, _user)

    return render_response('feed.tpl', data)
    def get_biggest_donations(cls, limit=None, offset=None):
        """Getter for biggest donations.

        Donations from the same person are grouped.

        Args:
            limit: Maximum number of donations to be returned.
            offset: Offset of the result.

        Returns:
            Tuple with two items. First is total number if donations. Second
            is a list of donations sorted by amount with a specified offset.
        """
        query = db.session.query(
            cls.first_name.label("first_name"),
            cls.last_name.label("last_name"),
            cls.editor_name.label("editor_name"),
            func.max(cls.payment_date).label("payment_date"),
            func.sum(cls.amount).label("amount"),
            func.sum(cls.fee).label("fee"),
        )
        query = query.filter(cls.anonymous == False)
        query = query.group_by(cls.first_name, cls.last_name, cls.editor_name)
        query = query.order_by(desc("amount"))
        count = query.count()  # Total count should be calculated before limits
        if limit is not None:
            query = query.limit(limit)
        if offset is not None:
            query = query.offset(offset)
        return count, query.all()
Exemple #24
0
def json_feed(guid):
    """
    Look up the User identified by GUID and return the User's public feed
    as Diaspora-style JSON.
    """
    contact = DiasporaContact.get_by_guid(guid)
    if not(contact and contact.contact.user):
        abort(404, 'No such contact', force_status=True)

    feed_query = Post.Queries.public_wall_for_contact(contact.contact)
    feed = db.session.query(Post).join(Share).filter(feed_query). \
        order_by(desc(Post.thread_modified_at)). \
        group_by(Post.id).limit(99)

    ret = []
    for post in feed:
        text = DiasporaPost.get_for_post(post, commit=False).as_text()
        rep = {
            "author": {
                "diaspora_id": contact.username,
                "name": contact.contact.realname,
                "guid": contact.guid,
            },
            "created_at": post.created_at.isoformat(),
            "text": text,
            "public": True,
            "post_type": "StatusMessage",
            "guid": post.diasp.guid,
            "interacted_at": post.root().thread_modified_at.isoformat(),
            "provider_display_name": None,
        }
        ret.append(rep)

    return jsonify(response)
Exemple #25
0
    def execute(self, message, user, params):
        if params.group(1):
            u = User.load(params.group(1))
            if u:
                message.reply("%s has %s amps." % (u.name, u.planet.intel.amps))
                return
            else:
                if not params.group(1).isdigit():
                    message.reply("No users matching '%s'" % (params.group(1)))
                    return

        Q = session.query(User, Intel)
        Q = Q.join(Intel, User.planet_id == Intel.planet_id)
        if params.group(1):
            Q = Q.filter(Intel.amps >= int(params.group(1)))
        else:
            Q = Q.filter(Intel.amps > 0)
        Q = Q.order_by(desc(Intel.amps))
        Q = Q.limit(10)
        if Q.count() == 0:
            message.reply("No scanners found with at least %s amps." % (params.group(1) if params.group(1) else "1"))
            return
        reply = "Scanners%s:  " % ((" with at least %s amps" % (params.group(1))) if params.group(1) else "")
        for scanner in Q:
            reply += "%s: %s  " % (scanner[0].name, scanner[1].amps)
        message.reply(reply[:-2])
        return
Exemple #26
0
def userlist(request):
    data = None
    if request.method == 'POST':
        form = ListSearchForm(request.POST, request.FILES)
        print request.POST
        if form.is_valid():
                try:
                    keyword = form.cleaned_data['search'].strip()
                    data = session.query(UserSL).filter(
                        and_(
                          UserSL.id > 0 if keyword == "" else  
                          or_(
                            UserSL.name.like('%%%s%%'%(keyword)),
                            UserSL.account.like('%%%s%%'%(keyword)),
                            UserSL.mobile.like('%%%s%%'%(keyword)),
                            UserSL.email.like('%%%s%%'%(keyword)),
                            UserSL.linkid.like('%%%s%%'%(keyword)),
                            ),
                          )
                    ).order_by(desc(UserSL.id)).all()
                    print data
                except Exception,e:
                    error = u"获取用户列表失败.请联系管理员"
                    session.rollback()
                    printError()
        else:
            print "not valid"                
Exemple #27
0
 def planet(self, message, user, params):
     planet = Planet.load(*params.group(1,3,5))
     if planet is None:
         message.reply("No planet with coords %s:%s:%s found" % params.group(1,3,5))
         return
     
     # List of last 10 scans
     if params.group(6) == "o":
         scans = planet.scans.filter_by(scantype=self.type).order_by(desc(Scan.id))[:10]
         if len(scans) < 1:
             message.reply("No %s Scans of %s:%s:%s found"%(PA.get(self.type,"name"),planet.x,planet.y,planet.z))
             return
         prev = []
         for scan in scans:
             prev.append("(pt%s %s)" % (scan.tick, scan.pa_id,))
         reply = "Last 10 %s Scans on %s:%s:%s "%(PA.get(self.type,"name"),planet.x,planet.y,planet.z) + " ".join(prev)
         message.reply(reply)
         return
     
     # Latest scan
     scan = planet.scan(self.type)
     if scan is None:
         message.reply("No %s Scans of %s:%s:%s found"%(PA.get(self.type,"name"),planet.x,planet.y,planet.z))
         return
     
     # Link to scan
     if params.group(7) == "l":
         reply = "%s on %s:%s:%s " % (scan.type,planet.x,planet.y,planet.z,)
         reply+= self.url(scan.link, user)
         message.reply(reply)
         return
     
     # Display the scan
     message.reply(self.url(str(scan), user))
 def testSaveDatapoint(self):
     dbDatapoint = (
         self.reactor.session.query(dataProcessing.Datapoint).order_by(desc(dataProcessing.Datapoint.id)).first()
     )
     assert isinstance(dbDatapoint, dataProcessing.Datapoint)
     assert isinstance(dbDatapoint.value, float)
     self.assertEqual(1, 2, msg=dbDatapoint.id)
Exemple #29
0
  def properties(self, name):
    connection = self._client.connect()
    rval = {}

    for interval,config in self._intervals.items():
      rval.setdefault(interval, {})

      stmt = select([self._table.c.i_time]).where(
        and_(
          self._table.c.name==name,
          self._table.c.interval==interval
        )
      ).order_by( asc(self._table.c.i_time) ).limit(1)
      rval[interval]['first'] = config['i_calc'].from_bucket(
        connection.execute(stmt).first()['i_time'] )

      stmt = select([self._table.c.i_time]).where(
        and_(
          self._table.c.name==name,
          self._table.c.interval==interval
        )
      ).order_by( desc(self._table.c.i_time) ).limit(1)
      rval[interval]['last'] = config['i_calc'].from_bucket(
        connection.execute(stmt).first()['i_time'] )

    return rval
Exemple #30
0
def popular_datasets():
    '''Get the ``num`` most popular (ie. with the most related) datasets'''
    query = datasets()
    query = query.outerjoin(RelatedDataset)
    query = query.group_by(Package, Group, CertifiedPublicService)
    query = query.order_by(desc(func.count(RelatedDataset.related_id)))
    return query
Exemple #31
0
    def execute(self, request, user, page="1", sort="score"):
        page = int(page)
        offset = (page - 1) * 60
        order = {
            "members": (desc("ialliances_members"), ),
            "size": (desc("ialliances_size"), ),
            "value": (desc("ialliances_value"), ),
            "score": (desc("ialliances_score"), ),
            "avg_size": (desc("ialliances_avg_size"), ),
            "avg_value": (desc("ialliances_avg_value"), ),
            "avg_score": (desc("ialliances_avg_score"), ),
            "t10s": (desc("ialliances_t10s"), ),
            "t50s": (desc("ialliances_t50s"), ),
            "t100s": (desc("ialliances_t100s"), ),
            "t200s": (desc("ialliances_t200s"), ),
            "t10v": (desc("ialliances_t10v"), ),
            "t50v": (desc("ialliances_t50v"), ),
            "t100v": (desc("ialliances_t100v"), ),
            "t200v": (desc("ialliances_t200v"), ),
        }
        if sort not in order.keys():
            sort = "score"
        order = order.get(sort)

        members = count().label("members")
        size = sum(Planet.size).label("size")
        value = sum(Planet.value).label("value")
        score = sum(Planet.score).label("score")
        avg_size = size.op("/")(members).label("avg_size")
        avg_value = value.op("/")(members).label("avg_value")
        avg_score = score.op("/")(members).label("avg_score")
        t10s = count(case(whens=((Planet.score_rank <= 10, 1), ),
                          else_=None)).label("t10s")
        t50s = count(case(whens=((Planet.score_rank <= 50, 1), ),
                          else_=None)).label("t50s")
        t100s = count(case(whens=((Planet.score_rank <= 100, 1), ),
                           else_=None)).label("t100s")
        t200s = count(case(whens=((Planet.score_rank <= 200, 1), ),
                           else_=None)).label("t200s")
        t10v = count(case(whens=((Planet.value_rank <= 10, 1), ),
                          else_=None)).label("t10v")
        t50v = count(case(whens=((Planet.value_rank <= 50, 1), ),
                          else_=None)).label("t50v")
        t100v = count(case(whens=((Planet.value_rank <= 100, 1), ),
                           else_=None)).label("t100v")
        t200v = count(case(whens=((Planet.value_rank <= 200, 1), ),
                           else_=None)).label("t200v")

        alliance1 = aliased(Alliance)
        Q = session.query(
            size,
            value,
            score,
            avg_size,
            avg_value,
            avg_score,
            t10s,
            t50s,
            t100s,
            t200s,
            t10v,
            t50v,
            t100v,
            t200v,
            members,
            alliance1.id,
        )
        Q = Q.join(Planet.intel)
        Q = Q.join((
            alliance1,
            Intel.alliance,
        ))
        Q = Q.filter(Planet.active == True)
        Q = Q.group_by(alliance1.id)
        ialliances = alias(Q.subquery(), "ialliances")

        alliance2 = aliased(Alliance)
        Q = session.query(alliance2.name, alliance2.members)
        Q = Q.add_columns(ialliances)
        Q = Q.filter(alliance2.id == ialliances.c.id)

        count_ = Q.count()
        pages = count_ // 60 + int(count_ % 60 > 0)
        pages = range(1, 1 + pages)

        for o in order:
            Q = Q.order_by(o)
        Q = Q.limit(60).offset(offset)
        return render("ialliances.tpl",
                      request,
                      alliances=Q.all(),
                      offset=offset,
                      pages=pages,
                      page=page,
                      sort=sort)
Exemple #32
0
def question_view():
    #Categories for filters
    categories = ['Machine Learning','Computer Science','Statistics','Combinatorics','Case Study',\
                        'Systems Design','Behavioral']
    if request.method == 'POST':
        print(request.form.getlist('category'))
        category = request.form.getlist('category')
        filter_questions_likes = db.session.query(Evaluation.id,Evaluation.evaluation_category,\
                            Evaluation.evaluation_question,cast(coalesce(func.avg(Evaluation_Likes.like),0),sqlalchemy.Integer).\
                            label('Likes'),cast(coalesce(func.avg(Evaluation_Difficulty.difficulty),0),sqlalchemy.Integer).label('Difficulty')).\
                            outerjoin(Evaluation_Likes).outerjoin(Evaluation_Difficulty).group_by(Evaluation.id).\
                            filter(Evaluation.evaluation_category.in_(category)).order_by(desc('Likes')).all()
        return render_template('question_view.html',
                               questions_likes=filter_questions_likes,
                               categories=categories)
    #Count likes for each question
    questions_likes = db.session.query(Evaluation.id,Evaluation.evaluation_category,\
                    Evaluation.evaluation_question,cast(coalesce(func.avg(Evaluation_Likes.like),0),sqlalchemy.Integer).\
                    label('Likes'),cast(coalesce(func.avg(Evaluation_Difficulty.difficulty),0),sqlalchemy.Integer).label('Difficulty')).\
                    outerjoin(Evaluation_Likes).outerjoin(Evaluation_Difficulty).group_by(Evaluation.id).order_by(desc('Likes')).all()
    return render_template('question_view.html',
                           questions_likes=questions_likes,
                           categories=categories)
Exemple #33
0
def casestudies_search():
    search_query = get_json_from_request()

    offset = get_nonnegative_int_or_400(request.args, 'from', 0)
    result_count = get_positive_int_or_400(
        request.args, 'size', current_app.config['DM_API_SUPPLIERS_PAGE_SIZE'])

    sort_dir = search_query.get('sort_dir', 'asc')
    sort_by = search_query.get('sort_by', None)
    domains = search_query.get('domains', None)
    seller_types = search_query.get('seller_types', None)
    search_term = search_query.get('search_term', None)
    framework_slug = request.args.get('framework', 'digital-marketplace')

    q = db.session.query(CaseStudy).join(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \
        .outerjoin(SupplierFramework).outerjoin(Framework)
    q = q.filter(
        Supplier.status != 'deleted',
        or_(Framework.slug == framework_slug, ~Supplier.frameworks.any()))
    tsquery = None
    if search_term:
        if ' ' in search_term:
            tsquery = func.plainto_tsquery(search_term)
        else:
            tsquery = func.to_tsquery(search_term + ":*")
        q = q.add_column(
            func.ts_headline(
                'english',
                func.concat(CaseStudy.data['approach'].astext, ' ',
                            CaseStudy.data['role'].astext), tsquery,
                'MaxWords=150, MinWords=75, ShortWord=3, HighlightAll=FALSE, FragmentDelimiter=" ... " '
            ))
    else:
        q = q.add_column("''")
    q = q.add_column(Supplier.name)
    q = q.add_column(postgres.array_agg(Supplier.data))
    q = q.group_by(CaseStudy.id, Supplier.name)

    if domains:
        d_agg = postgres.array_agg(cast(Domain.name, TEXT))
        q = q.having(d_agg.contains(array(domains)))

    if seller_types:
        selected_seller_types = select(
            [postgres.array_agg(column('key'))],
            from_obj=func.json_each_text(Supplier.data[('seller_type', )]),
            whereclause=cast(column('value'), Boolean)).as_scalar()

        q = q.filter(selected_seller_types.contains(array(seller_types)))

    if sort_dir in ('desc', 'z-a'):
        ob = [desc(CaseStudy.data['title'].astext)]
    else:
        ob = [asc(CaseStudy.data['title'].astext)]

    if search_term:
        ob = [
            desc(
                func.ts_rank_cd(
                    func.to_tsvector(
                        func.concat(
                            Supplier.name, CaseStudy.data['title'].astext,
                            CaseStudy.data['approach'].astext)), tsquery))
        ] + ob

        condition = func.to_tsvector(
            func.concat(Supplier.name, CaseStudy.data['title'].astext,
                        CaseStudy.data['approach'].astext)).op('@@')(tsquery)

        q = q.filter(condition)
    q = q.order_by(*ob)

    raw_results = list(q)
    results = []

    for x in range(len(raw_results)):
        result = raw_results[x][0].serialize()
        if raw_results[x][1] is not None and raw_results[x][1] != '':
            result['approach'] = raw_results[x][1]
        if raw_results[x][2] is not None:
            result['supplierName'] = raw_results[x][2]
        if raw_results[x][3] is not None and raw_results[x][3][0] is not None:
            result['seller_type'] = raw_results[x][3][0].get('seller_type')
        results.append(result)

    total_results = len(results)

    sliced_results = results[offset:(offset + result_count)]

    result = {
        'hits': {
            'total': total_results,
            'hits': [{
                '_source': r
            } for r in sliced_results]
        }
    }

    try:
        response = jsonify(result), 200
    except Exception as e:
        response = jsonify(message=str(e)), 500

    return response
Exemple #34
0
def do_search(search_query, offset, result_count, new_domains, framework_slug):
    try:
        sort_dir = list(search_query['sort'][0].values())[0]['order']
    except (KeyError, IndexError):
        sort_dir = 'asc'

    try:
        sort_by = list(search_query['sort'][0].values())[0]['sort_by']
    except (KeyError, IndexError):
        sort_by = None

    try:
        terms = search_query['query']['filtered']['filter']['terms']
    except (KeyError, IndexError):
        terms = {}

    roles_list = None
    seller_types_list = None

    if terms:
        new_domains = 'prices.serviceRole.role' not in terms

        try:
            if new_domains:
                roles_list = terms['domains.assessed']
            else:
                roles = terms['prices.serviceRole.role']
                roles_list = set(_['role'][7:] for _ in roles)
        except KeyError:
            pass

        try:
            seller_types_list = terms['seller_types']
        except:  # noqa
            pass

    try:
        search_term = search_query['query']['match_phrase_prefix']['name']
    except KeyError:
        search_term = ''

    EXCLUDE_LEGACY_ROLES = not current_app.config['LEGACY_ROLE_MAPPING']

    if new_domains:
        q = db.session.query(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \
            .outerjoin(SupplierFramework).outerjoin(Framework)
    else:
        q = db.session.query(Supplier).outerjoin(PriceSchedule).outerjoin(ServiceRole) \
            .outerjoin(SupplierFramework).outerjoin(Framework)

    q = q.filter(
        Supplier.status != 'deleted', Supplier.abn != Supplier.DUMMY_ABN,
        or_(Framework.slug == framework_slug, ~Supplier.frameworks.any()))

    tsquery = None
    if search_term:
        if any(c in search_term for c in ['#', '-', '_', '/', '\\']):
            tsquery = func.phraseto_tsquery(search_term)
        elif ' ' in search_term:
            tsquery = func.plainto_tsquery(search_term)
        else:
            tsquery = func.to_tsquery(search_term + ":*")
        q = q.add_column(
            func.ts_headline(
                'english',
                func.concat(Supplier.summary, ' ',
                            Supplier.data['tools'].astext, ' ',
                            Supplier.data['methodologies'].astext, ' ',
                            Supplier.data['technologies'].astext, ''), tsquery,
                'MaxWords=25, MinWords=20, ShortWord=3, HighlightAll=FALSE, MaxFragments=1'
            ))

    q = q.group_by(Supplier.id)

    try:
        code = search_query['query']['term']['code']
        q = q.filter(Supplier.code == code)
    except KeyError:
        pass

    if roles_list is not None:
        if new_domains:
            if EXCLUDE_LEGACY_ROLES:
                d_agg = postgres.array_agg(cast(Domain.name, TEXT))
                q = q.filter(SupplierDomain.status == 'assessed')
                q = q.having(d_agg.contains(array(roles_list)))
        else:
            sr_agg = postgres.array_agg(
                cast(func.substring(ServiceRole.name, 8), TEXT))
            q = q.having(sr_agg.contains(array(roles_list)))

    if seller_types_list is not None and 'recruiter' in seller_types_list:
        q = q.filter(Supplier.is_recruiter == 'true')
        seller_types_list.remove('recruiter')
        if len(seller_types_list) == 0:
            seller_types_list = None

    if seller_types_list is not None:
        selected_seller_types = select(
            [postgres.array_agg(column('key'))],
            from_obj=func.json_each_text(Supplier.data[('seller_type', )]),
            whereclause=cast(column('value'), Boolean)).as_scalar()

        q = q.filter(selected_seller_types.contains(array(seller_types_list)))

    if sort_by:
        if sort_by == 'latest':
            ob = [desc(Supplier.last_update_time)]
        else:
            ob = [asc(Supplier.name)]
    else:
        if sort_dir == 'desc':
            ob = [desc(Supplier.name)]
        else:
            ob = [asc(Supplier.name)]

    if search_term:
        ob = [desc(func.ts_rank_cd(Supplier.text_vector, tsquery))] + ob

        q = q.filter(Supplier.text_vector.op('@@')(tsquery))

    q = q.order_by(*ob)

    raw_results = list(q)
    results = []

    for x in range(len(raw_results)):
        if type(raw_results[x]) is Supplier:
            result = raw_results[x]
        else:
            result = raw_results[x][0]
            if raw_results[x][1] is not None and raw_results[x][1] != '':
                result.summary = raw_results[x][1]
        results.append(result)

    sliced_results = results[offset:(offset + result_count)]

    q = db.session.query(Supplier.code, Supplier.name, Supplier.summary, Supplier.is_recruiter,
                         Supplier.data, Domain.name.label('domain_name'),
                         SupplierDomain.status.label('domain_status'))\
        .outerjoin(SupplierDomain, Domain)\
        .filter(Supplier.id.in_([sr.id for sr in sliced_results]))\
        .order_by(Supplier.name)

    suppliers = [r._asdict() for r in q]

    sliced_results = []
    for key, group in groupby(suppliers, key=itemgetter('code')):
        supplier = group.next()

        supplier['seller_type'] = supplier.get(
            'data') and supplier['data'].get('seller_type')

        supplier['domains'] = {'assessed': [], 'unassessed': []}
        for s in chain([supplier], group):
            domain, status = s['domain_name'], s['domain_status']
            if domain:
                if status == 'assessed':
                    supplier['domains']['assessed'].append(domain)
                else:
                    supplier['domains']['unassessed'].append(domain)

        for e in ['domain_name', 'domain_status', 'data']:
            supplier.pop(e, None)

        sliced_results.append(supplier)

    return sliced_results, len(results)
Exemple #35
0
 def get_current_app_set(self):
     """Use this to get the current application settings, without
     in-thread/memory caching.
     """
     return self.dbsession.query(old_models.ApplicationSettings).order_by(
         desc(old_models.ApplicationSettings.id)).first()
Exemple #36
0
    async def fetch_channels(client: ClientConnection, message: dict):
        filters = []
        from_obj = Channel

        if message.get('title', None):
            filters.append(
                or_(Channel.title.ilike(f'%{message["title"]}%'),
                    Channel.description.ilike('%s' % message['title']),
                    func.lower(Tag.name).startswith(message['title'].lower())))
            from_obj = outerjoin(outerjoin(Channel, ChannelTag), Tag)

        if "category_id" in message:
            filters.append(Channel.category_id == message["category_id"])

        if "members" in message:
            filters.append(
                Channel.members.between(message["members"][0],
                                        message["members"][1]))

        if "cost" in message:
            filters.append(
                Channel.cost.between(message["cost"][0], message["cost"][1]))

        if "likes" in message:
            filters.append(
                Channel.likes.between(message["likes"][0],
                                      message["likes"][1]))

        if "mut_promo" in message:
            filters.append(Channel.mutual_promotion == message['mut_promo'])

        if "verified" in message:
            filters.append(Channel.verified == message['verified'])

        if "partner" in message:
            # TODO: proper premium functions implementation required
            filters.append(Channel.vip == message['partner'])

        if 'language' in message:
            filters.append(Channel.language == message['language'].lower())

        total = await pg.fetchval(
            select([count(Channel.id.distinct())
                    ]).select_from(from_obj).where(and_(*filters)))

        if total:
            sel_q = select([Channel
                            ]).select_from(from_obj).where(and_(*filters))

            # Apply ordering
            # TODO: proper premium functions implementation required
            # TODO: manage sorting
            sel_q = sel_q.order_by(desc(Channel.vip), desc(Channel.members),
                                   desc(Channel.cost))

            # Apply Limit/Offset
            sel_q = sel_q.offset(message['offset']).limit(message['count'])

            res = await pg.fetch(sel_q)

            # And finally fetch channel tags
            tag_q = select([ChannelTag, Tag]).\
                select_from(outerjoin(ChannelTag, Tag)).\
                where(ChannelTag.channel_id.in_([item['id'] for item in res]))
            tags_raw = await pg.fetch(tag_q)

            # Serialize all the stuff
            tags_dict = {item['id']: [] for item in res}
            for entry in tags_raw:
                tags_dict[entry['channel_id']].append(entry['name'])
            channels = [
                dict(list(item.items()) + [('tags', tags_dict[item['id']])])
                for item in res
            ]
        else:
            channels = []

        stat_q = select(
            [max(Channel.members),
             max(Channel.cost),
             max(Channel.likes)])
        stats = await pg.fetchrow(stat_q)

        message["data"] = {
            "items": channels,
            "total": total,
            "max_members": stats['max_1'],
            "max_cost": stats['max_2'],
            "max_likes": stats['max_3'],
        }

        await client.send_response(message)
Exemple #37
0
 def get_recent_props(self):
     props = self.base_props_selectable()
     Q = session.query(props.c.id, props.c.person, props.c.vote_result, props.c.type)
     Q = Q.filter(props.c.active==False).order_by(desc(props.c.id))
     return Q[:10]
Exemple #38
0
        if self.assignments:
            raise ValueError("{0} cannot be enslaved as long as it holds "
                             "addresses.".format(self))
        return value

    @property
    def last_observation(self):
        session = object_session(self)
        q = session.query(ObservedMac)
        q = q.filter_by(mac_address=self.mac)
        # Group the results into 'any port number but zero' and 'port 0'.
        # This prioritizes any port over the uplink port.
        # Saying that port 0 is an uplink port isn't very elegant, also
        # with real port names it's not even true.
        q = q.order_by(desc(case([(ObservedMac.port == "0", 0)], else_=1)))
        q = q.order_by(desc(ObservedMac.last_seen))
        return q.first()

    def __init__(self, **kw):
        """ Overload the Base initializer to prevent null MAC addresses
            where the interface is bootable or is of type 'management'
        """
        super(Interface, self).__init__(**kw)
        self.validate_mac("mac", self.mac)

    def __repr__(self):
        msg = "<{0} {1} of {2}, MAC={3}>".format(self._get_class_label(),
                                                 self.name,
                                                 self.hardware_entity,
                                                 self.mac)
        return msg
Exemple #39
0
def ionosphere_echo(base_name, mirage_full_duration):

    logger = logging.getLogger(skyline_app_logger)
    child_process_pid = os.getpid()
    context = 'ionosphere_echo'
    logger.info(
        'ionosphere_echo :: started with child_process_pid - %s for %s' %
        (str(child_process_pid), base_name))
    full_duration_in_hours = int(settings.FULL_DURATION / 60 / 60)

    try:
        # Allow for 3 seconds
        ionosphere_echo_max_fp_create_time = (
            settings.IONOSPHERE_ECHO_MAX_FP_CREATE_TIME - 3)
    except:
        ionosphere_echo_max_fp_create_time = 52
    echo_started_at = int(time())

    def echo_get_an_engine():

        try:
            engine, fail_msg, trace = get_engine(skyline_app)
            return engine, fail_msg, trace
        except:
            trace = traceback.format_exc()
            logger.error('%s' % trace)
            fail_msg = 'error :: ionosphere_echo :: get_an_engine :: failed to get MySQL engine'
            logger.error('%s' % fail_msg)
            return None, fail_msg, trace

    def echo_engine_disposal(engine):
        try:
            if engine:
                try:
                    engine.dispose()
                    logger.info('ionosphere_echo :: MySQL engine disposed of')
                    return True
                except:
                    logger.error(traceback.format_exc())
                    logger.error(
                        'error :: ionosphere_echo :: calling engine.dispose()')
            else:
                logger.info('ionosphere_echo :: no MySQL engine to dispose of')
                return True
        except:
            return False
        return False

    # Determine the metric details from the database
    metrics_id = None
    metric_db_object = None
    engine = None

    # Get the metric db object data to memcache it is exists
    metric_db_object = get_memcache_metric_object(skyline_app, base_name)
    if metric_db_object:
        metrics_id = metric_db_object['id']
        logger.info(
            'ionosphere_echo :: determined metric id %s from memcache for %s' %
            (str(metrics_id), base_name))
    else:
        # Only if no memcache data
        # Get a MySQL engine
        try:
            engine, log_msg, trace = echo_get_an_engine()
            logger.info('ionosphere_echo :: %s' % log_msg)
            logger.info(
                'ionosphere_echo :: determining metric id from DB as not found in memcache for %s'
                % (base_name))
        except:
            logger.error(traceback.format_exc())
            logger.error(
                'error :: ionosphere_echo :: could not get a MySQL engine to get metric_db_object'
            )

        if not engine:
            logger.error(
                'error :: ionosphere_echo :: engine not obtained to get metric_db_object'
            )
            logger.info(
                'ionosphere_echo :: exiting this work but not removing work item, as database may be available again before the work expires'
            )
            return

        try:
            metrics_id, metric_db_object = echo_get_metric_from_metrics(
                base_name, engine)
            echo_engine_disposal(engine)
        except:
            logger.error(traceback.format_exc())
            logger.error(
                'error :: ionosphere_echo :: failed get the metric details from the database'
            )
            logger.info(
                'ionosphere_echo :: exiting this work but not removing work item, as database may be available again before the work expires'
            )

        if not metrics_id:
            logger.error(
                'error :: ionosphere_echo :: failed get the metrics_id from the database'
            )
            logger.info(
                'ionosphere_echo :: exiting this work but not removing work item, as database may be available again before the work expires'
            )
            echo_engine_disposal(engine)
            return

        if not metric_db_object:
            logger.error(
                'error :: ionosphere_echo :: failed get the metric_db_object from the database'
            )
            logger.info(
                'ionosphere_echo :: exiting this work but not removing work item, as database may be available again before the work expires'
            )
            echo_engine_disposal(engine)
            return

    # Determine the metric fp ids from the database
    if not engine:
        logger.info(
            'ionosphere_echo :: getting MySQL engine to determine fp ids for metric id %s - %s'
            % (str(metrics_id), base_name))
        try:
            engine, log_msg, trace = echo_get_an_engine()
            logger.info(log_msg)
        except:
            logger.error(traceback.format_exc())
            logger.error(
                'error :: ionosphere_echo :: could not get a MySQL engine to get fp_ids'
            )

    if not engine:
        logger.error(
            'error :: ionosphere_echo :: engine not obtained to get fp_ids')
        return

    try:
        ionosphere_table, log_msg, trace = ionosphere_table_meta(
            skyline_app, engine)
        logger.info(log_msg)
        logger.info('ionosphere_echo :: ionosphere_table OK')
    except:
        logger.error(traceback.format_exc())
        logger.error(
            'error :: ionosphere_echo :: failed to get ionosphere_table meta for %s'
            % base_name)

    # Determine the fp_ids that exist for the metric
    echo_fp_ids_result = []
    try:
        connection = engine.connect()
        stmt = select([
            ionosphere_table
        ]).where(ionosphere_table.c.metric_id == metrics_id).order_by(
            desc(ionosphere_table.c.id))
        echo_fp_ids = connection.execute(stmt)
        echo_fp_ids_result = [{
            column: value
            for column, value in rowproxy.items()
        } for rowproxy in echo_fp_ids]
        connection.close()
    except:
        logger.error(traceback.format_exc())
        logger.error(
            'error :: ionosphere_echo :: could not determine fp ids from DB for %s'
            % base_name)
        return

    if engine:
        echo_engine_disposal(engine)

    if not echo_fp_ids_result:
        logger.error(
            'error :: ionosphere_echo :: no echo_fp_ids_result - could not determine fp ids from DB for %s'
            % base_name)
    else:
        logger.info(
            'ionosphere_echo :: echo_fp_ids_result - determined fp ids from DB for %s'
            % base_name)

    try:
        db_fp_count = len(echo_fp_ids_result)
        logger.info('ionosphere_echo :: %s features profile ids found' %
                    str(db_fp_count))
    except:
        logger.error(traceback.format_exc())
        logger.error(
            'error :: ionosphere_echo :: could not calculate len of echo_fp_ids_result'
        )

    echo_enabled_mirage_fp_ids = []
    for row in echo_fp_ids_result:
        if row['enabled'] != 1:
            continue
        if row['deleted'] == 1:
            continue
        # Only create features profiles at settings.FULL_DURATION for Mirage
        # features profiles if the fp has been validated
        if row['validated'] == 0:
            continue
        if row['full_duration'] == int(mirage_full_duration):
            fp_id = row['id']
            echo_enabled_mirage_fp_ids.append(fp_id)

    echo_enabled_mirage_fp_ids_count = len(echo_enabled_mirage_fp_ids)
    logger.info('ionosphere_echo :: %s Mirage features profile ids found' %
                str(echo_enabled_mirage_fp_ids_count))

    # Check which Mirage features profile do not have has a
    # settings.FULL_DURATION features profile
    mirage_fd_fp_count = 0
    echo_create_fd_fp_for = []
    for validated_mirage_fp_id in echo_enabled_mirage_fp_ids:
        mirage_fd_fp_exists = False
        for row in echo_fp_ids_result:
            if int(row['parent_id']) != int(validated_mirage_fp_id):
                continue
            if int(row['full_duration']) == int(settings.FULL_DURATION):
                mirage_fd_fp_exists = True
                mirage_fd_fp_count += 1
        if not mirage_fd_fp_exists:
            echo_create_fd_fp_for.append(int(validated_mirage_fp_id))

    del echo_enabled_mirage_fp_ids

    logger.info(
        'ionosphere_echo :: there are %s FULL_DURATION features profiles for %s'
        % (str(mirage_fd_fp_count), base_name))

    echo_create_fd_fp_for_count = len(echo_create_fd_fp_for)
    logger.info(
        'ionosphere_echo :: %s FULL_DURATION features profiles to be created for %s'
        % (str(echo_create_fd_fp_for_count), base_name))
    echo_created_fp_count = 0

    last_created_fp = int(time())
    for mirage_fp_id in echo_create_fd_fp_for:
        fp_timestamp = None
        for row in echo_fp_ids_result:
            if int(row['id']) != int(mirage_fp_id):
                continue
            else:
                fp_timestamp = int(row['anomaly_timestamp'])
                fp_generation = int(row['generation'])
        if not fp_timestamp:
            continue
        if not fp_generation:
            fp_generation = 0

        time_check_now = int(time())
        echo_runtime = time_check_now - echo_started_at
        if echo_runtime >= ionosphere_echo_max_fp_create_time:
            logger.info(
                'ionosphere_echo :: ionosphere_echo running for %s seconds, exiting before IONOSPHERE_ECHO_MAX_FP_CREATE_TIME of %s seconds is breached'
                % (str(echo_runtime), str(ionosphere_echo_max_fp_create_time)))
            break

        logger.info(
            'ionosphere_echo :: creating FULL_DURATION features profile based on data from fp id %s - %s'
            % (str(mirage_fp_id), base_name))

        context = 'ionosphere_echo'
        ionosphere_job = 'learn_fp_human'
        generation = fp_generation + 1
        fp_learn = False

        # What is the path of the features profile files
        metric_timeseries_dir = base_name.replace('.', '/')
        metric_fp_data_dir = '%s/%s/%s' % (settings.IONOSPHERE_PROFILES_FOLDER,
                                           metric_timeseries_dir,
                                           str(fp_timestamp))

        # What is the path of the new training data dir to copy the files to
        # and create it
        created_ts = int(time())
        # Ensure features profile creation timestamps do not overlap
        if last_created_fp == created_ts:
            sleep(1)
            created_ts = int(time())
            last_created_fp = created_ts

        metric_training_data_dir = '%s/%s/%s' % (
            settings.IONOSPHERE_DATA_FOLDER, str(created_ts),
            metric_timeseries_dir)
        if not os.path.exists(metric_training_data_dir):
            try:
                mkdir_p(metric_training_data_dir)
                logger.info(
                    'ionosphere_echo :: training data dir created - %s' %
                    metric_training_data_dir)
            except:
                logger.error(
                    'error :: ionosphere_echo :: failed to create training data dir - %s'
                    % metric_training_data_dir)
                continue

        if not os.path.isdir(metric_fp_data_dir):
            logger.error(
                'error :: ionosphere_echo :: features profile data dir does not exist - %s'
                % metric_fp_data_dir)
            continue

        data_files = []
        try:
            glob_path = '%s/*.*' % metric_fp_data_dir
            data_files = glob.glob(glob_path)
        except:
            trace = traceback.format_exc()
            logger.error('%s' % trace)
            logger.error(
                'error :: ionosphere_echo :: glob could not read - %s' %
                metric_fp_data_dir)

        # Make a list of the files to copy
        copy_files = []
        for i_file in data_files:
            # Exclude all the file resources related to the Mirage
            # full_duration features profile
            if 'matched.fp_id' in i_file:
                continue
            if 'fp.created.txt' in i_file:
                continue
            if 'fp.details.txt' in i_file:
                continue
            if 'csv.features.transposed.csv' in i_file:
                continue
            # Specifically include the required files
            if 'graphite_now' in i_file:
                copy_files.append(i_file)
            echo_metric_txt = '%s.txt' % base_name
            if echo_metric_txt in i_file:
                copy_files.append(i_file)
            echo_metric_json = '%s.json' % base_name
            if echo_metric_json in i_file:
                copy_files.append(i_file)
            if 'mirage.graphite' in i_file:
                copy_files.append(i_file)
            if 'mirage.redis' in i_file:
                copy_files.append(i_file)

        # Copy the required files to the new training_data dir
        for i_file in copy_files:
            copying_filename = os.path.basename(i_file)
            dest_file = '%s/%s' % (metric_training_data_dir, copying_filename)
            if not os.path.isfile(dest_file):
                try:
                    shutil.copy(i_file, metric_training_data_dir)
                    # logger.info('ionosphere_echo ::  training data copied - %s' % (i_file))
                except shutil.Error as e:
                    trace = traceback.format_exc()
                    logger.error('%s' % trace)
                    logger.error(
                        'error :: ionosphere_echo ::  shutil error - training data not copied to %s'
                        % metric_training_data_dir)
                    logger.error('error :: ionosphere_echo ::  %s' % (e))
                # Any error saying that the directory doesn't exist
                except OSError as e:
                    trace = traceback.format_exc()
                    logger.error('%s' % trace)
                    logger.error(
                        'error :: ionosphere_echo ::  OSError error - training data not copied to %s'
                        % metric_training_data_dir)
                    logger.error('error :: %s' % (e))

        calculated_feature_file = '%s/%s.tsfresh.input.csv.features.transposed.csv' % (
            metric_training_data_dir, base_name)
        calculated_feature_file_found = False
        fp_csv = None
        if os.path.isfile(calculated_feature_file):
            calculated_feature_file_found = True
            fp_csv = calculated_feature_file
            logger.info(
                'ionosphere_echo :: calculated features file is available - %s'
                % (calculated_feature_file))
        echo_json_file = '%s.mirage.redis.%sh.json' % (
            base_name, str(full_duration_in_hours))
        if not calculated_feature_file_found:
            logger.info(
                'ionosphere_echo :: calculating features from mirage.redis data ts json - %s'
                % (echo_json_file))
            str_created_ts = str(created_ts)
            try:
                fp_csv, successful, fp_exists, fp_id, log_msg, traceback_format_exc, f_calc = calculate_features_profile(
                    skyline_app, str_created_ts, base_name, context)
            except:
                logger.error(traceback.format_exc())
                logger.error(
                    'error :: ionosphere_echo :: failed to calculate features')
                continue
        else:
            logger.info(
                'ionosphere_echo :: using available calculated features file')

        if os.path.isfile(calculated_feature_file):
            logger.info('ionosphere_echo :: calculated features - %s' %
                        (calculated_feature_file))
        else:
            logger.error(
                'error :: ionosphere_echo :: failed to calculate features no file found - %s'
                % calculated_feature_file)
            continue

        # Create the new settings.FULL_DURATION features profile
        ionosphere_job = 'learn_fp_human'
        fp_learn = False
        try:
            fp_id, fp_in_successful, fp_exists, fail_msg, traceback_format_exc = create_features_profile(
                skyline_app, created_ts, base_name, context, ionosphere_job,
                mirage_fp_id, generation, fp_learn)
        except:
            logger.error(traceback.format_exc())
            logger.error(
                'error :: ionosphere_echo :: failed to create a settings.FULL_DURATION features profile from fp_id %s for %s'
                % (str(mirage_fp_id), base_name))
            continue
        if not fp_in_successful:
            logger.error(traceback.format_exc())
            logger.error(
                'error :: ionosphere_echo :: create_features_profile failed')
            continue
        else:
            echo_created_fp_count += 1
            logger.info(
                'ionosphere_echo :: new generation %s features profile with id %s settings.FULL_DURATION created from parent feature profile with id %s'
                % (str(generation), str(fp_id), str(mirage_fp_id)))

    del echo_create_fd_fp_for
    if engine:
        echo_engine_disposal(engine)
    logger.info(
        'ionosphere_echo :: created %s of %s FULL_DURATION features profile that were to be created for %s'
        % (str(echo_created_fp_count), str(echo_create_fd_fp_for_count),
           base_name))

    return
Exemple #40
0
 def list_inactive(self):
     """Returns a list of banwords that are currently inactive."""
     return list(
         self.dbsession.query(Banword).filter(
             and_(Banword.active == False))  # noqa: E712
         .order_by(desc(Banword.id)))