def rate_object(self, **kwargs): #log.info('args = %s' % str(args)) #log.info('kwargs = %s' % str(kwargs)) id = kwargs.get("ratingID") rating = kwargs.get("value") print "ID: %s" % id print "RATING: %s" % rating if id.startswith("Host"): sep = id.find("@") if sep == -1: host_id = id[4:] host = session.query(Host).filter_by(uuid=host_id).one() host.rating = int(rating) session.flush() return dict() host_id = id[4:sep] id = id[sep+1:] if id.startswith("Device"): device_id = int(id[6:]) host = session.query(Host).filter_by(uuid=host_id).one() for device in host.devices: if device.device_id == device_id: device.rating = int(rating) session.flush([host, device]) return dict() return dict()
def get_sa_catalog(domain): """ Retrieves all translations for locale and domain from database and stores in thread data. """ if domain is None: domain = turbogears.config.get("i18n.domain", "messages") catalog = _catalogs.get(domain) if not catalog: catalog = {} query = session.query(TG_Domain) domain = query.filter(TG_Domain.name==domain).first() if not domain: return catalog query = session.query(TG_Message) query = query.filter(TG_Message.domain==domain) for message in query: locale = message.locale messages = catalog.get(locale, {}) messages[message.name] = message.text catalog[locale] = messages _catalogs[domain.name] = catalog return catalog
def _handle_historical(session, data,myth_uuid): showcount = 0 rectime = 0 db_age = 0 reccount = 0 try: myth_hist = data['features']['historical'] except: myth_hist = {} #session.query(mythtvHistorical).filter_by(machine_id = machine_id).delete() session.query(mythtvHistorical).filter_by(myth_uuid = myth_uuid).delete() try: showcount = myth_hist['showcount'] except: pass try: rectime = myth_hist['rectime'] except: pass try: db_age = myth_hist['db_age'] except: pass try: reccount = myth_hist['reccount'] except: pass session.add(mythtvHistorical(myth_uuid, showcount,rectime,db_age,reccount) ) session.flush()
def rate_object(self, **kwargs): #log.info('args = %s' % str(args)) #log.info('kwargs = %s' % str(kwargs)) id = kwargs.get("ratingID") rating = kwargs.get("value") print "ID: %s" % id print "RATING: %s" % rating if id.startswith("Host"): sep = id.find("@") if sep == -1: host_id = id[4:] host = session.query(Host).filter_by(uuid=host_id).one() host.rating = int(rating) session.flush() return dict() host_id = id[4:sep] id = id[sep + 1:] if id.startswith("Device"): device_id = int(id[6:]) host = session.query(Host).filter_by(uuid=host_id).one() for device in host.devices: if device.device_id == device_id: device.rating = int(rating) session.flush([host, device]) return dict() return dict()
def deleteQuery(self, query_id, *args, **kwargs): ''' Allows user to delete a query. Updates query logging. @param query_id: identifies the query @return: status of attempted delete operation of the query ''' query = session.query(Query).get_by(query_id=int(query_id)) status = "" if not query: status = "Query not found" elif session.query(ChatSession).get_by(query_id=int(query_id)): status = "Chat already started" elif query.user_id != identity.current.user.user_id: status = "Permission denied" else: query.experts[:] = [] query_log=QueryLog( query_id = int(query_id), user_id = query.user_id, user_name = session.query(User).get_by(user_id=query.user_id).user_name, created = datetime.now(), status = 'Deleted') session.save(query_log) session.flush() session.delete(query); session.flush(); return dict(status=status)
def _handle_pbp(session, data, machine_id): name = "unknown" profiles = ['unknown'] try: myth_pb = data['features']['playbackprofile'] except: myth_pb = {} try: name = myth_pb['name'] except: pass try: profiles = myth_pb['profiles'] except: pass #Remove old entry session.query(mythtvPbp).filter_by(machine_id = machine_id).delete() #Add new entry session.add(mythtvPbp(machine_id,name,profiles)) session.flush()
def getStatistics(self, *args, **kwargs): ''' @return: statistics about the user for statistical box ''' current_user = identity.current.user n_users = getNumberOfAllUsers(); n_users_online = getNumberOfAllUsersCurrentlyOnline(); n_queries = session.query(Query).count() n_chats = session.query(ChatSession).count(ChatSession.c.status == 'ONGOING'); user=session.query(User).get_by(user_id=current_user.user_id) stats = user.user_stats n_answered = stats.no_of_ques_answered n_asked = stats.no_of_ques_asked n_blogs = session.query(BlogEntry).count(BlogEntry.c.user_id==current_user.user_id) name = current_user.user_name[:25] if not " " in name: name = name[:15] return dict(user_name = name, n_users = n_users, n_users_online = n_users_online, n_queries = n_queries, n_chats = n_chats, n_answered = n_answered, n_asked = n_asked, n_blogs = n_blogs )
def genkey(self): username = turbogears.identity.current.user_name person = People.by_username(username) created = time.strftime("%Y-%m-%dT%H:%M:%S") hexctr = "%012x" % person.id publicname = hex2modhex(hexctr) internalname = gethexrand(12) aeskey = gethexrand(32) lockcode = gethexrand(12) try: new_ykksm = Ykksm(serialnr=person.id, publicname=publicname, created=created, internalname=internalname, aeskey=aeskey, lockcode=lockcode, creator=username) session.add(new_ykksm) session.flush() except IntegrityError: session.rollback() old_ykksm = session.query(Ykksm).filter_by(serialnr=person.id).all()[0] session.delete(old_ykksm) new_ykksm = Ykksm(serialnr=person.id, publicname=publicname, created=created, internalname=internalname, aeskey=aeskey, lockcode=lockcode, creator=username) old_ykksm = new_ykksm session.flush() try: old_ykval = session.query(Ykval).filter_by(yk_publicname=publicname).all()[0] session.delete(old_ykval) session.flush() except IndexError: # No old record? Maybe they never used their key pass string = "%s %s %s" % (publicname, internalname, aeskey) return dict(key=string)
def _handle_pbp(session, data, machine_id): name = "unknown" profiles = ['unknown'] try: myth_pb = data['features']['playbackprofile'] except: myth_pb = {} try: name = myth_pb['name'] except: pass try: profiles = myth_pb['profiles'] except: pass #Remove old entry session.query(mythtvPbp).filter_by(machine_id=machine_id).delete() #Add new entry session.add(mythtvPbp(machine_id, name, profiles)) session.flush()
def test_exc_done_rollback(): """No problems with error handler if controller manually rollbacks.""" app = make_app(RbRoot) response = app.get('/doerr?id=28&dorb=1') assert 'KARL27' in response, 'Exception handler should have answered' assert session.query(User).get(28) is None assert session.query(User).get(29) is not None
def _handle_historical(session, data, myth_uuid): showcount = 0 rectime = 0 db_age = 0 reccount = 0 try: myth_hist = data['features']['historical'] except: myth_hist = {} #session.query(mythtvHistorical).filter_by(machine_id = machine_id).delete() session.query(mythtvHistorical).filter_by(myth_uuid=myth_uuid).delete() try: showcount = myth_hist['showcount'] except: pass try: rectime = myth_hist['rectime'] except: pass try: db_age = myth_hist['db_age'] except: pass try: reccount = myth_hist['reccount'] except: pass session.add( mythtvHistorical(myth_uuid, showcount, rectime, db_age, reccount)) session.flush()
def show(self, shortname, buildname, epoch, version, rel, arch): try: repo = session.query(Repo).filter_by(shortname=shortname).one() except NoResultFound: flash('Repo "%s" was not found' % shortname) redirect('/builds') try: build = session.query(PackageBuild)\ .join(PackageBuild.repos)\ .filter(and_( PackageBuild.name==buildname, PackageBuild.epoch==epoch, PackageBuild.version==version, PackageBuild.release==rel, PackageBuild.architecture==arch, Repo.id==repo.id))\ .one() except NoResultFound: flash('Build (%s-%s:%s-%s.%s) was not found' % (buildname, epoch, version, rel, arch)) redirect('/builds') return dict(title=self.app_title, version=release.VERSION, build=build)
def user_name_is_unique(user_name): "Return True if the user_name is not yet in the database." UserClass = user_class_finder.user_class user_count = session.query(UserClass).count_by(user_name=user_name) pending_count = session.query(RegistrationPendingUser).count_by(user_name=user_name) if user_count or pending_count: return False else: return True
def doBlogRating(self, blogentry_id, rate, *args, **kwargs): ''' Calculates blog rating. Updates note's score, score of the user who created it and keeps track of who rated which note (this way user can rate note only once). @param blogentry_id: identifies note @param rate: points given ''' #update the blogentry table entry = session.query(BlogEntry).get_by(blogentry_id=int(blogentry_id)) if not entry: return dict() new_value=round(entry.average_rating*entry.no_ratings) #print 'old blog score', new_value,'old average',entry.average_rating entry.no_ratings+=1 entry.average_rating=(new_value+float(rate))/entry.no_ratings #print 'rate', rate,'new average', entry.average_rating session.flush() #updates the user_blog table new_rate=UserBlog() new_rate.user_id = identity.current.user.user_id new_rate.blogentry_id = blogentry_id new_rate.rating = rate new_rate.created = datetime.datetime.now() session.save(new_rate) session.flush() #updates the user_stats table user=session.query(User).get_by(user_id = entry.user_id) user.user_stats.no_of_blog_ratings+=1 #print 'old averege blog rating',user.user_stats.average_blog_rating user_blogs=session.query(BlogEntry).select(BlogEntry.c.user_id==entry.user_id) #[BlogEntry.c.average_rating, BlogEntry.c.no_ratings] sum=0.0 no_blogs=0 for ub in user_blogs: sum+=ub.average_rating*ub.no_ratings no_blogs+=ub.no_ratings sum=round(sum)/no_blogs #print 'new average blog rating',sum user.user_stats.average_blog_rating=sum #print 'old score',user.user_stats.score #print 'new values', user.user_stats.no_of_ques_answered_rated * user.user_stats.average_rating, user.user_stats.no_of_blog_ratings * user.user_stats.average_blog_rating user.user_stats.score=round(user.user_stats.no_of_ques_answered_rated * user.user_stats.average_rating + user.user_stats.no_of_blog_ratings * user.user_stats.average_blog_rating) #print 'new score',user.user_stats.score session.flush() return dict()
def getIncomingLoad(self): ''' @return: the number of questions in the users' inbox ''' from spree.spree_model import QueryExpert, ChatSession load=session.query(QueryExpert).count_by(and_(QueryExpert.c.expert_id == self.user_id,QueryExpert.c.status=='HANDSHAKE')) load+=session.query(ChatSession).count_by(and_(ChatSession.c.expert_id == self.user_id,or_(ChatSession.c.status=='ONGOING', ChatSession.c.status=='HANDSHAKE'))) return load
def test_setup(self): "Make sure our setup is what we think it is." u1 = session.query(User).filter_by(user_name='bobvilla') l = session.query(User).all() print u1 for u in l: print u assert u1[0].user_id==self.user1.user_id assert u1[0].email_address=='*****@*****.**' assert len(l) == 2
def email_is_unique(email): "Return True if the email is not yet in the database." UserClass = user_class_finder.user_class user_count = session.query(UserClass).count_by(email_address=email) pending_count = session.query(RegistrationPendingUser).count_by(email_address=email) changed_count = session.query(RegistrationUserEmailChange).count_by(new_email_address=email) if user_count or pending_count or changed_count: return False else: return True
def getWaitSearchContent(self, query_id, *args, **kwargs): ''' Serves content about the current status of the query (which experts are contacted, has anybody declined etc...) to the loqged in user , after the query has been posted. Also provides related notes. @param query_id: identifies the query @return: values needed for the template ''' query = session.query(Query).get_by(query_id=int(query_id)) if not query: raise redirect("/content/search/getQueryOverviewContent/"+query_id) #TODO: add matching logic here!! blog_entries = query.relatedBlogs length_blog_entries = len(blog_entries) blogs = [{"topic": entry.title, "id": entry.blogentry_id, "user_id": entry.user_id, "created": helpers.formatDate(entry.created), "last_changed": entry.lastChanged, "categories": entry.getCategoriesString(), "user_name": session.query(User).get_by(user_id = entry.user_id).display_name} for entry in blog_entries] experts = self.getExperts(int(query_id)) dic = {"text": query.text, "created": helpers.formatDate(query.created), "topic": query.getTopicString(True), "id": query.query_id, "blogs": blogs, "lengthBlogEntries": length_blog_entries, "query_status":query.status, "profile": query.getCategoriesString(), "all_experts":experts} expert_strings = {"experts":"","experts_accepted":"","experts_declined":""} for type in expert_strings: for expert in experts[type]: expert_strings[type] += expert + ", " state=experts['state'] dic['doPolling'] = 'True' if (state=='accepted') or (state=='all_declined') or (state=='nobody_available'): dic['doPolling'] = 'False' dic.update(experts) dic['expert_strings'] = expert_strings return dic
def getQueryDetails(self, query_id, *args, **kwargs): ''' Fetches query detailes. @param query_id: identifies query @return: the query details for the tooltip ''' query = session.query(Query).get_by(query_id=int(query_id)) user = session.query(User).get_by(user_id=query.user_id) return dict(topic=query.topic, text=query.text, user= user.display_name, created=query.created.strftime("%I:%M"))
def _handle_tuners(session, data, myth_uuid): session.query(mythtvtuners).filter_by(myth_uuid=myth_uuid).delete() try: myth_tunner = data['features']['tuners'] except: myth_tunner = {'unknown': 0} for key, value in myth_tunner.items(): session.add(mythtvtuners(myth_uuid, key, value)) session.flush()
def _handle_grabbers(session, data, myth_uuid): session.query(mythtvGrabbers).filter_by(myth_uuid=myth_uuid).delete() try: myth_grabber = data['features']['grabbers'] except: myth_grabber = ['unknown'] for i in myth_grabber: if i: session.add(mythtvGrabbers(myth_uuid, i)) session.flush()
def getSearchContent(self, *args, **kwargs): ''' Takes the entered query and topic and tries to classify it. It also checks that the limit of questions and open chats per user is not exceeded. @param **kwargs: contains query_id if the question is asked again @return: values needed for the template ''' # if the question is asked for the first time it gets query_id=-1 otherwise it is read from the db if 'query_id' in kwargs: query_id = kwargs['query_id'] query=session.query(Query).get_by(Query.c.query_id==query_id) text=query.text topic=query.topic else: text='' topic='' query_id=-1 openQueryCount = session.query(Query).count( and_( Query.c.user_id == identity.current.user.user_id, Query.c.status != 'FINISHED' ) ) unratedQueryCount = session.query(ChatSession).count( and_( ChatSession.c.user_id == identity.current.user.user_id, ChatSession.c.rating == 0, ChatSession.c.status == "FINISHED" ) ) #show warning when to many queries are still open if not openQueryCount + unratedQueryCount < self.limit: raise redirect("/content/search/getSearchContentLimitReached") categories = [] tree = tree_model.getTree() for node in tree.asDict.values(): if node.name == 'root': categories.append([" Not enough data for classification", -1]) else: categories.append([node.getFullPath(), node.node_id]) categories.sort() return dict(categories = categories,text=text,topic=topic,query_id=query_id )
def _handle_tuners(session, data, myth_uuid): session.query(mythtvtuners).filter_by(myth_uuid = myth_uuid).delete() try: myth_tunner = data['features']['tuners'] except: myth_tunner={'unknown':0} for key,value in myth_tunner.items(): session.add(mythtvtuners(myth_uuid,key,value)) session.flush()
def _handle_grabbers(session, data, myth_uuid): session.query(mythtvGrabbers).filter_by(myth_uuid = myth_uuid).delete() try: myth_grabber = data['features']['grabbers'] except: myth_grabber=['unknown'] for i in myth_grabber: if i: session.add(mythtvGrabbers(myth_uuid,i)) session.flush()
def test_exc_rollback(): """An exception within a controller method causes a rollback. Try to create a user that should rollback because of an exception so user 25 should not exist, but user 26 should be present since it is created by the exception handler. """ app = make_app(RbRoot) response = app.get('/doerr?id=26') assert 'KARL27' in response, 'Exception handler should have answered' assert session.query(User).get(26) is None assert session.query(User).get(27) is not None
def __common(self, method=None): if method == "Q": data = session.query(Address) elif method == "QA": data = session.query(Address).all() elif method == "SO": data = SOAddress.select() elif method == "SL": data = list(SOAddress.select()) else: raise ValueError("Invalid method %r" % method) spy = Spy(var_name="data", pages=xrange(1, 3), limit=10, page_count=2, order=None, row_count=16) return dict(data=data, spy=spy)
def zero_limit(self, method=None): if method == "Q": data = session.query(Address) elif method == "QA": data = session.query(Address).all() elif method == "SO": data = SOAddress.select() elif method == "SL": data = list(SOAddress.select()) else: raise Exception("Invalid method") spy = Spy(var_name="data", pages=xrange(1, 2), limit=16, page_count=1, order=None, row_count=16) return dict(data=data, spy=spy)
def do_BlogPost(self, *args, **kwargs): ''' Creates new note entry. @param **kwargs: contains blogentry_id,text and title of the note, and information weather the note is private ''' id = int(kwargs['blogentry_id']); private = kwargs['private'] == "true"; query_id = None if 'query_id' in kwargs and kwargs['query_id'] > -1: query_id = int(kwargs['query_id']); text=kwargs['text'][:16250] title=kwargs['title'][:50] if id < 0: b = BlogEntry(); b.title = title b.text = text; b.private = private; b.user_id = identity.current.user.user_id; subtree = self.getBlogSubtree("",b.text+" "+b.title) b.profile_subtree=cPickle.dumps(subtree) if query_id: b.query_id = query_id user=session.query(User).get_by(user_id = b.user_id) user.user_stats.no_of_blogs+=1 else: b = session.query(BlogEntry).get_by(blogentry_id=id) b.title = title b.text = text b.private = private; subtree = self.getBlogSubtree("",b.text+" "+b.title) b.profile_subtree=cPickle.dumps(subtree) if query_id: b.query_id = query_id session.save(b); session.flush(); return dict()
def _handle_scheduler(session, data, myth_uuid): count = 0 place_stddev = 0 match_stddev = 0 match_avg = 0 place_avg = 0 try: myth_sch = data['features']['scheduler'] except: myth_sch = {} try: count = myth_sch['count'] except: pass try: place_stddev = myth_sch['place_stddev'] except: pass try: match_stddev = myth_sch['match_stddev'] except: pass try: match_avg = myth_sch['match_avg'] except: pass try: place_avg = myth_sch['place_avg'] except: pass #Remove old entry session.query(mythtvScheduler).filter_by(myth_uuid = myth_uuid).delete() #Add new entry session.add(mythtvScheduler(myth_uuid, count, place_stddev, match_stddev, match_avg, place_avg) ) session.flush()
def getQueryOverviewContent(self, query_id, *args, **kwargs): ''' Serves information about query to the expert who got it. @param query_id: identifies the query @return: values needed for the template ''' query = session.query(Query).get_by(query_id=int(query_id)) if query: taken = query.chatSession != None declined = False isContactedExpert = False # the expert was returnd by the matching algo if not taken: try: contact = session.query(QueryExpert).select( and_(QueryExpert.c.query_id == int(query_id), QueryExpert.c.expert_id == identity.current.user.user_id) )[0] if contact and contact.status == "DECLINED": declined = True isContactedExpert = True except: pass isOverloaded = not isContactedExpert and session.query(User).get_by(user_id = identity.current.user.user_id).getIncomingLoad() > 4 #print query.query_id, query.topic date = helpers.formatDate(query.created) return { "text": query.text, "topic": query.getTopicString(False), "id": query.query_id, "date": date, "taken":taken, "declined":declined, "categories":query.getCategoriesString(), "user": session.query(User).get_by(user_id = query.user_id).display_name, "user_id": query.user_id, "isContactedExpert": isContactedExpert, "isOverloaded": isOverloaded } # no query else: return {"deleted":True}
def test_query_in_session(): i = users_table.insert() i.execute(user_name="globbo", password="******") query = session.query(User) globbo = query.filter_by(user_name="globbo").one() assert globbo.password == "thegreat!" users_table.delete().execute()
def filter(self, filter): """ .. seealso:: :meth:`distrotrees.filter` Returns a list of details for distros filtered by the given criteria. The *filter* argument must be an XML-RPC structure (dict) specifying filter criteria. The following keys are recognised: 'name' Distro name. May include % SQL wildcards, for example ``'%20101121.nightly'``. 'family' Distro family name, for example ``'RedHatEnterpriseLinuxServer5'``. Matches are exact. 'distroid' Distro id. Matches are exact. 'tags' List of distro tags, for example ``['STABLE', 'RELEASED']``. All given tags must be present on the distro for it to match. 'limit' Integer limit to number of distros returned. The return value is an array with one element per distro (up to the maximum number of distros given by 'limit'). Each element is an XML-RPC structure (dict) describing a distro. .. versionchanged:: 0.9 Some return columns were removed, because they no longer apply to distros in Beaker. Use the new :meth:`distrotrees.filter` method to fetch details of distro trees. """ distros = session.query(Distro) name = filter.get('name', None) family = filter.get('family', None) distroid = filter.get('distroid', None) tags = filter.get('tags', None) or [] limit = filter.get('limit', None) for tag in tags: distros = distros.filter(Distro._tags.any(DistroTag.tag == tag)) if name: distros = distros.filter(Distro.name.like('%s' % name)) if distroid: distros = distros.filter(Distro.id == int(distroid)) if family: distros = distros.join(Distro.osversion, OSVersion.osmajor) distros = distros.filter(OSMajor.osmajor == '%s' % family) # we only want distros that are active in at least one lab controller distros = distros.filter( Distro.trees.any(DistroTree.lab_controller_assocs.any())) distros = distros.order_by(Distro.date_created.desc()) if limit: distros = distros[:limit] return [{ 'distro_id': distro.id, 'distro_name': distro.name, 'distro_version': unicode(distro.osversion), 'distro_tags': [unicode(tag) for tag in distro.tags], } for distro in distros]
def show_all(self, uuid, admin): try: uuid = u'%s' % uuid.strip() uuid = uuid.encode('utf8') except: raise ValueError("Critical: Unicode Issue - Tell Mike!") try: host_object = session.query(Host).filter_by(pub_uuid=uuid).one() except: raise ValueError("Critical: UUID Not Found - %s" % uuid) if admin: admin = self.token.check_admin_token(admin, host_object.uuid) devices = {} for dev in host_object.devices: #This is to prevent duplicate devices showing up, in the future, #There will be no dups in the database devices[dev.device_id] = (dev.device, dev.rating) ven = DeviceMap('pci') devices = devices.values() devices.sort(key=lambda x: x[0].cls) return dict(host_object=host_object, host_link=getHostWikiLink(host_object), devices=devices, ven=ven, ratingwidget=SingleRatingWidget(), getDeviceWikiLink=getDeviceWikiLink, getOSWikiLink=getOSWikiLink, admin=admin)
def groups(self, groups=None, *args,**kw): if groups is None: groups = session.query(Group) def get_remove_link(x): try: if x.can_edit(identity.current.user) and not x.is_protected_group(): return self.delete_link.display(dict(group_id=x.group_id), action=url('remove'), action_text='Delete Group') else: return '' except AttributeError: return '' group_name = ('Group Name', lambda group: make_link( 'edit?group_id=%s' % group.group_id, group.group_name)) display_name = ('Display Name', lambda x: x.display_name) remove_link = ('', get_remove_link) grid_fields = [group_name, display_name, remove_link] grid = myPaginateDataGrid(fields=grid_fields, add_action='./new' if not identity.current.anonymous else None) return_dict = dict(title=u"Groups", grid=grid, search_bar = None, search_widget = self.search_widget_form, list = groups) return return_dict
def validate_identity(self, user_name, password, visit_key): """Validate the identity represented by user_name using the password. Must return either None if the credentials weren't valid or an object with the following properties: user_name: original user name user: a provider dependent object (TG_User or similar) groups: a set of group names permissions: a set of permission names """ user = session.query(user_class).filter_by( user_name=user_name).first() if not user: log.warning("No such user: %s", user_name) return None if not self.validate_password(user, user_name, password): log.info("Passwords don't match for user: %s", user_name) return None log.info("Associating user (%s) with visit (%s)", user_name, visit_key) return SqlAlchemyIdentity(visit_key, user)
def groups(self, groups=None, *args, **kw): if groups is None: groups = session.query(Group) def get_remove_link(x): try: if x.can_edit( identity.current.user) and not x.is_protected_group(): return self.delete_link.display(dict(group_id=x.group_id), action=url('remove'), action_text='Delete Group') else: return '' except AttributeError: return '' group_name = ('Group Name', lambda group: make_link( 'edit?group_id=%s' % group.group_id, group.group_name)) display_name = ('Display Name', lambda x: x.display_name) remove_link = ('', get_remove_link) grid_fields = [group_name, display_name, remove_link] grid = myPaginateDataGrid( fields=grid_fields, add_action='./new' if not identity.current.anonymous else None) return_dict = dict(title=u"Groups", grid=grid, search_bar=None, search_widget=self.search_widget_form, list=groups) return return_dict
def getExpert(self): ''' @return: user object of the expert who is chatting ''' from spree.model import User return session.query(User).get_by(User.c.user_id == self.expert_id)
def create_tables(): ''' Creates the appropriate database tables. ''' Stopword.table.create(checkfirst=True) if session.query(Stopword).count() == 0: load(default_stopwards_file);
def _handle_call_flags(session, data, machine_id): for call_flag_class_upper in ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'MAKEOPTS'): try: call_flag_class_object = session.query(GentooCallFlagClassString).filter_by(name=call_flag_class_upper).one() except sqlalchemy.orm.exc.NoResultFound: call_flag_class_object = GentooCallFlagClassString(call_flag_class_upper) session.add(call_flag_class_object) session.flush() call_flag_class_id = call_flag_class_object.id # Find current entries try: current_call_flag_list = data['call_flags'][call_flag_class_upper.lower()] except KeyError: current_call_flag_list = [] # Find old entries old_call_flag_objects = session.query(GentooCallFlagRel).options(\ eagerload('call_flag')).\ filter_by(machine_id=machine_id, call_flag_class_id=call_flag_class_id).all() # Re-construct call flag list old_call_flag_list = [a.call_flag.name for a in sorted(\ [e for e in old_call_flag_objects], key=lambda x: x.position)] # Consistent data? if len(old_call_flag_list) != len(old_call_flag_objects): old_call_flag_list = None # Calculate diff if cmp(current_call_flag_list, old_call_flag_list) != 0: # Resolve diff for e in old_call_flag_objects: session.delete(e) if old_call_flag_objects: session.flush() for position, call_flag in enumerate(current_call_flag_list): try: call_flag_object = session.query(GentooCallFlagString).filter_by(name=call_flag).one() except sqlalchemy.orm.exc.NoResultFound: call_flag_object = GentooCallFlagString(call_flag) session.add(call_flag_object) session.flush() call_flag_id = call_flag_object.id session.add(GentooCallFlagRel(machine_id, call_flag_class_id, call_flag_id, position)) session.flush()
def by_user_name(cls, user_name): """Look up User by given user name. This class method that permits to search users based on their user_name attribute. """ return session.query(cls).filter_by(user_name=user_name).first()
def _handle_package_mask(session, data, machine_id): # Find current entries try: package_mask = data['user_package_mask'] except KeyError: package_mask = {} current_package_mask_set = set() for package, atoms in package_mask.items(): for i in atoms: key = (package, i) current_package_mask_set.add(key) # Find old entries old_package_mask_rel_objects = session.query(\ GentooPackageMaskRel).options(\ eagerload('package'), \ eagerload('atom')).\ filter_by(machine_id=machine_id).all() old_package_mask_dict = {} for e in old_package_mask_rel_objects: key = (e.package.name, e.atom.name) old_package_mask_dict[key] = e old_package_mask_set = set(old_package_mask_dict.keys()) # Calculate diff mask_entries_to_add = current_package_mask_set - old_package_mask_set mask_entries_to_remove = old_package_mask_set - current_package_mask_set # Resolve diff for i in mask_entries_to_remove: session.delete(old_package_mask_dict[i]) if mask_entries_to_remove: session.flush() for i in mask_entries_to_add: package, atom = i lookup_or_add_jobs = ( {'thing':'atom', }, {'thing':'package', }, ) for job in lookup_or_add_jobs: thing = job['thing'] details = { 'class_name':pool_class_name(thing, vector_flag=False), 'source_var_name':thing, 'new_object_name':'%s_pool_object' % thing } program = _LOOKUP_OR_ADD_TEMPLATE % details dump_gentoo_python_code(program) exec(program) session.flush() package_id = package_pool_object.id atom_id = atom_pool_object.id mask_rel_object = GentooPackageMaskRel(machine_id, package_id, atom_id) session.add(mask_rel_object)
def _handle_accept_keywords(session, data, machine_id): # Find current entries try: accept_keywords = data['accept_keywords'] except KeyError: accept_keywords = {} current_accept_keywords_set = set() for i in accept_keywords: if i.startswith('~'): key = (i.lstrip('~'), False) else: key = (i, True) current_accept_keywords_set.add(key) # Find old entries old_accept_keywords_objects = session.query(\ GentooAcceptKeywordRel).options(\ eagerload('keyword')).\ filter_by(machine_id=machine_id).all() old_accept_keywords_dict = {} for i in old_accept_keywords_objects: key = (i.keyword.name, bool(i.stable)) old_accept_keywords_dict[key] = i old_accept_keywords_set = set(old_accept_keywords_dict.keys()) # Calculate diff mappings_to_add = current_accept_keywords_set - old_accept_keywords_set mappings_to_remove = old_accept_keywords_set - current_accept_keywords_set # Resolve diff for i in mappings_to_remove: session.delete(old_accept_keywords_dict[i]) if mappings_to_remove: session.flush() for i in mappings_to_add: keyword, stable = i try: pool_object = session.query(GentooKeywordString).filter_by(name=keyword).one() except sqlalchemy.orm.exc.NoResultFound: pool_object = GentooKeywordString(keyword) session.add(pool_object) session.flush() keyword_id = pool_object.id session.add(GentooAcceptKeywordRel(machine_id, keyword_id, stable)) session.flush()
def _handle_scheduler(session, data, myth_uuid): count = 0 place_stddev = 0 match_stddev = 0 match_avg = 0 place_avg = 0 try: myth_sch = data['features']['scheduler'] except: myth_sch = {} try: count = myth_sch['count'] except: pass try: place_stddev = myth_sch['place_stddev'] except: pass try: match_stddev = myth_sch['match_stddev'] except: pass try: match_avg = myth_sch['match_avg'] except: pass try: place_avg = myth_sch['place_avg'] except: pass #Remove old entry session.query(mythtvScheduler).filter_by(myth_uuid=myth_uuid).delete() #Add new entry session.add( mythtvScheduler(myth_uuid, count, place_stddev, match_stddev, match_avg, place_avg)) session.flush()
def by_email_address(cls, email_address): """Look up User by given email address. This class method that can be used to search users based on their email addresses since it is unique. """ return session.query(cls).filter_by( email_address=email_address).first()
def find(cls, language): '''Returns a shortname after searching for both short and longname. :arg name: a short or long Language name ''' #pylint:disable-msg=E1101 return session.query(Language).filter( or_(Language.name == language, Language.shortname == language)).one()
def _make_and_go_to_owner_page(self, user, group, set_owner=True): if set_owner: with session.begin(): user_group = session.query(UserGroup). \ filter_by(user_id=user.user_id, group_id=group.group_id). \ one() user_group.is_owner = True self.browser.get(get_server_base() + 'groups/mine') self.browser.find_element_by_link_text(group.group_name).click()
def index(self): tags = session.query(DistroTag) tags_grid = widgets.PaginateDataGrid(fields=[ widgets.PaginateDataGrid.Column(name='tag', getter=lambda x: make_link(url = '../distros/tagsearch/?tag=%s' % x.tag, text = x.tag), title='Tag', options=dict(sortable=True)), ]) return dict(title="Tags", grid = tags_grid, search_bar = None, list = tags)
def host_rating(self, vendor, system): q = session.query(Host).filter_by(vendor=vendor, system=system) q = compat.add_column(q, func.count(Host.rating).label('count')).group_by( Host.rating) ratings = {} for rate in q: ratings[rate[0].rating] = rate[1] return dict(ratings=ratings)
def test_task_update_task_not_available_404(self): req_sess = requests.Session() with session.begin(): result = session.query(func.max(Task.id)).first() fake_id = result[0] + 1 requests_login(req_sess, data_setup.ADMIN_USER, data_setup.ADMIN_PASSWORD) response = patch_json(get_server_base() + 'tasks/%s' % fake_id, session=req_sess, data={'disabled': True}) self.assertEqual(response.status_code, 404) self.assertEqual(response.text, 'Task %s does not exist' % fake_id)
def _handle_privacy_metrics(session, data, machine_id): # Find current entries try: privacy_metrics = data['privacy_metrics'] except KeyError: privacy_metrics = {} current_privacy_metrics_set = set() for k, v in privacy_metrics.items(): key = (k, ) + tuple(v) current_privacy_metrics_set.add(key) # Find old entries old_privacy_metrics_objects = session.query(\ GentooPrivacyMetricRel).options(\ eagerload('data_class')).\ filter_by(machine_id=machine_id).all() old_privacy_metrics_dict = {} for i in old_privacy_metrics_objects: key = (i.data_class.name, bool(i.revealed), i.count_private, i.count_non_private) old_privacy_metrics_dict[key] = i old_privacy_metrics_set = set(old_privacy_metrics_dict.keys()) # Calculate diff mappings_to_add = current_privacy_metrics_set - old_privacy_metrics_set mappings_to_remove = old_privacy_metrics_set - current_privacy_metrics_set # Resolve diff for i in mappings_to_remove: session.delete(old_privacy_metrics_dict[i]) if mappings_to_remove: session.flush() for i in mappings_to_add: data_class, revealed, count_private, count_non_private = i try: pool_object = session.query(GentooDataClassString).filter_by(name=data_class).one() except sqlalchemy.orm.exc.NoResultFound: pool_object = GentooDataClassString(data_class) session.add(pool_object) session.flush() data_class_id = pool_object.id session.add(GentooPrivacyMetricRel(machine_id, data_class_id, revealed, count_private, count_non_private)) session.flush()
def get_osmajors(self, tags=None): """ Returns a list of all distro families. If *tags* is given, limits to distros with at least one of the given tags. """ osmajors = session.query(OSMajor.osmajor) if tags: osmajors = osmajors\ .join(OSMajor.osversions, OSVersion.distros, Distro.trees)\ .filter(DistroTree.lab_controller_assocs.any())\ .filter(Distro._tags.any(DistroTag.tag.in_(tags))) return [osmajor for osmajor, in osmajors.distinct()]
def test_implicit_trans_no_error(): """If a controller runs sucessfully, the transaction is commited.""" capture_log("turbogears.database") cherrypy.root = MyRoot() create_request("/no_error?name=A.%20Dent") print_log() try: session.expunge_all() except AttributeError: # SQLAlchemy < 0.5.1 session.clear() q = session.query(Person) arthur = q.filter_by(name="A. Dent").one()
def test_active_mapper(): p = Person(name="Ford Prefect") a = Address(address="1 West Guildford", city="Betelgeuse") p.addresses.append(a) session.flush() try: session.expunge_all() except AttributeError: # SQLAlchemy < 0.5.1 session.clear() q = session.query(Person) ford = q.filter_by(name="Ford Prefect").one() assert ford is not p assert len(ford.addresses) == 1
def _handle_logs(session, data, myth_uuid): crit = -1 info = -1 notice = -1 warning = -1 err = -1 try: myth_log = data['features']['logurgency'] except: myth_log = {} try: crit = myth_log['CRIT'] except: pass try: info = myth_log['INFO'] except: pass try: notice = myth_log['NOTICE'] except: pass try: warning = myth_log['WARNING'] except: pass try: err = myth_log['ERR'] except: pass #Remove old entry session.query(mythtvLogUrgency).filter_by(myth_uuid=myth_uuid).delete() #Add new entry session.add(mythtvLogUrgency(myth_uuid, crit, info, notice, warning, err)) session.flush()
def index(self): labcontrollers = session.query(LabController) labcontrollers_grid = LabControllerDataGrid(fields=[ ('FQDN', lambda x: make_edit_link(x.fqdn, x.id)), ('Disabled', lambda x: x.disabled), ('Removed', lambda x: x.removed), (' ', lambda x: self.make_lc_remove_link(x)), ], add_action='./new') return dict(title="Lab Controllers", grid=labcontrollers_grid, search_bar=None, list=labcontrollers)
def delete(self, uuid): # TODO also search and clean batch queue? try: host = session.query(Host).filter_by(uuid=uuid).one() except: raise ValueError("Critical: UUID does not exist %s " % uuid) try: session.delete(host) session.flush() except: raise ValueError( "Critical: Could not delete UUID - Please contact the smolt development team" ) raise ValueError('Success: UUID Removed')
def test_invalid_newowner_errors(self): """If an invalid username is passed as a new owner, we expect the command to error without changing the system.""" invalid_username = u'asdfasdfasdf' with session.begin(): user = data_setup.create_user() data_setup.create_system() self.assertFalse(session.query(User).filter_by(user_name=invalid_username).count()) try: run_client(['bkr', 'remove-account', '--new-owner=%s' % invalid_username, user.user_name]) self.fail('Expected client to fail due to invalid new owner') except ClientError, e: self.assertIn('Invalid user name for owner', e.stderr_output)