def get_toxresults_info(linkstore, for_link, newest=True): result = [] seen = set() toxlinks = linkstore.get_links(rel="toxresult", for_entrypath=for_link) for reflink in reversed(toxlinks): try: toxresult = load_toxresult(reflink) for envname in toxresult["testenvs"]: seen_key = (toxresult["host"], toxresult["platform"], envname) if seen_key in seen: continue if newest: seen.add(seen_key) env = toxresult["testenvs"][envname] info = dict( basename=reflink.basename, _key="-".join(seen_key), host=toxresult["host"], platform=toxresult["platform"], envname=envname) info["setup"] = _get_commands_info(env.get("setup", [])) try: info["pyversion"] = env["python"]["version"].split(None, 1)[0] except KeyError: pass info["test"] = _get_commands_info(env.get("test", [])) info['failed'] = info["setup"]["failed"] or info["test"]["failed"] result.append(info) except Exception: log.exception("Couldn't parse test results %s." % reflink.basename) return result
def add_document(**kw): try: writer.add_document(**kw) except Exception: log.exception( "Exception while trying to add the following data to the search index:\n%r" % kw) raise
def xmlrpc_search(self): try: query = self.query_from_xmlrpc(self.request.body) log.debug("xmlrpc_search {0}".format(query)) hits = self.search_index_packages(query) response = dumps((hits, ), methodresponse=1, encoding='utf-8') except Exception as e: log.exception("Error in xmlrpc_search") response = dumps(Fault(1, repr(e)), encoding='utf-8') return Response(response)
def xmlrpc_search(self): try: query = self.query_from_xmlrpc(self.request.body) search_index = self.request.registry['search_index'] context = ContextWrapper(self.request.context) hits = search_index.query_packages(query, list(context.stage.sro())) response = dumps((hits,), methodresponse=1, encoding='utf-8') except Exception as e: log.exception("Error in xmlrpc_search") response = dumps(Fault(1, repr(e)), encoding='utf-8') return Response(response)
def _open_and_bind(self, conn): try: conn.open() if not conn.bind(): return False except socket.timeout: msg = "Timeout on LDAP connect to %s" % self['url'] threadlog.exception(msg) reraise(AuthException, AuthException(msg), sys.exc_info()[2]) except self.LDAPException: msg = "Couldn't open LDAP connection to %s" % self['url'] threadlog.exception(msg) reraise(AuthException, AuthException(msg), sys.exc_info()[2]) return True
def update_projects(self, projects, clear=False): writer = self.project_ix.writer() try: count = self._update_projects(writer, projects, clear=clear) except: log.exception("Aborted write to search index after exception.") writer.cancel() else: log.info("Committing %s new documents to search index." % count) if clear: writer.commit(mergetype=CLEAR) else: writer.commit() log.info("Finished committing %s documents to search index." % count)
def thread_run(self): thread_push_log("[IDX]") last_time = time.time() event_serial = None serial = -1 while 1: try: if time.time() - last_time > 5: last_time = time.time() size = self.shared_data.queue.qsize() if size: log.info("Indexer queue size ~ %s" % size) event_serial = self.xom.keyfs.notifier.read_event_serial() serial = self.xom.keyfs.get_current_serial() if event_serial is not None and event_serial < serial: # be nice to everything else self.thread.sleep(1.0) self.tick() except mythread.Shutdown: raise except Exception: log.exception("Unhandled exception in indexer thread.") self.thread.sleep(1.0)