def __get_preference_security_cheetah(httpRequestHandler, args): comment = "" if args.get("changesetting"): remoteaccess = args.get("remoteaccess") and args.get("remoteaccess")[0] if remoteaccess: globalvars.remoteAccess = 1 else: globalvars.remoteAccess = 0 globalvars.config.setValue("RemoteAccess", globalvars.remoteAccess) localauth = args.get("localauth") and args.get("localauth")[0] if localauth: globalvars.sessionID = tools.generate_id() headers = [("Set-Cookie", "sessionID=%s" % globalvars.sessionID)] globalvars.localAuthentification = 1 else: globalvars.localAuthentification = 0 globalvars.config.setValue("LocalAuthentification", globalvars.localAuthentification) comment = "Settings changed !" elif args.get("changeauth"): password = args.get("password")[0] newlogin = args.get("newlogin") and args.get("newlogin")[0] newpassword1 = args.get("newpassword1") and args.get("newpassword1")[0] newpassword2 = args.get("newpassword2") and args.get("newpassword2")[0] globalvars.logger if password == globalvars.password: if newpassword1 == newpassword2: comment = "Password changed !" globalvars.login = newlogin globalvars.password = newpassword1 globalvars.config.setValue("Login", newlogin) globalvars.config.setValue("Password", newpassword1) globalvars.config.save() else: comment = "New passwords do not match !" else: comment = "Bad password !" dict = {"comment": comment} obj = globalvars template_file = open("config/templates/prefsecurity.tmpl", "r") t = Cheetah.Template.Template(file=template_file, searchList=[dict, obj]) httpRequestHandler.wfile.write(str(t)) template_file.close()
def producer(): """Generic JSON POST""" reqdat_ = generate_id() try: client = get_pubsub_client() except: app.logger.error('Pubsub client unavailable') abort(503, 'stream client unavailable') app.logger.debug('webservice processing request') app.logger.debug(json.dumps(reqdat_)) if not request.json: app.logger.debug('No json submitted') abort(400, 'Cannot interpret JSON post') try: hsh = json.dumps(request.json) resource_hash = hashlib.md5(hsh).hexdigest() except Exception as e: app.logger.debug('JSON is unhashable') abort(400, 'Cannot interpret JSON post') reqdat_['resource'] = request.json reqdat_['resource_hash'] = resource_hash try: app.logger.debug('writing to stream') publish(client, config.get('override', 'stream_name'), json.dumps(reqdat_), app.logger, num_retries=config.getint('override', 'num_retries')) except Exception as e: app.logger.error(e) app.logger.error('failed to put to stream ' + json.dumps(reqdat_)) abort(503, 'Internal Error') return json.dumps({'response': 'failure'}) return json.dumps({'response': 'success'})
def __init__(self, item: BaseWrapper, identical_items_index=None): self.id = tools.generate_id() self.props = { '-class_name': item.element_info.class_name, '-class_name_re': None, '-process': item.element_info.process_id, 'title': item.element_info.name, '-title_re': None, '-top_level_only': True, '-visible_only': item.element_info.visible, '-enabled_only': item.element_info.enabled, '-best_match': None, '-handle': item.element_info.handle, '-ctrl_index': None, '-found_index': identical_items_index, '-active_only': False, 'control_id': item.element_info.control_id, 'control_type': item.element_info.control_type, 'auto_id': item.element_info.automation_id, '-framework_id': item.element_info.framework_id, '-backend': None, '-depth': None }
def __init__(self): self.id = tools.generate_id()
def __init__(self, name): self.id = tools.generate_id() self.name = name self.actions = []
def handle_get(httpRequestHandler): u = urlparse.urlparse(httpRequestHandler.path) client_ip, client_port = httpRequestHandler.client_address # let the / (url format) words = u[2].split("/", 2) first_directory = None if len(words) > 1: first_directory = words[1] args = cgi.parse_qs(u[4]) if first_directory == "maay": if len(words) >= 3: maay_action = words[2] else: maay_action = None if maay_action != "remotedownload" and client_ip != "127.0.0.1" and globalvars.remoteAccess == 0: __display_error_page( httpRequestHandler, 403, "Error 403: Forbidden", "Forbidden", "You do not have the right to access to the URL %s on this server" % httpRequestHandler.path, ) return # check that the user is logged, if it is not the case, display login page. if maay_action != "remotedownload" and maay_action != "login": if client_ip != "127.0.0.1" or globalvars.localAuthentification == 1: c = Cookie.SimpleCookie(httpRequestHandler.headers.get("Cookie")) sessionID = None if c.has_key("sessionID"): sessionID = c["sessionID"].value print "sessions %s / %s" % (sessionID, globalvars.sessionID) if not globalvars.sessionID or sessionID != globalvars.sessionID: __get_login_page(httpRequestHandler, httpRequestHandler.path) return if maay_action == "login": login = None password = None try: login = args.get("login")[0] password = args.get("password")[0] except Exception, e: pass urls = args.get("url") url = urls and urls[0] if login == globalvars.login and password == globalvars.password: if not url: url = "/maay/resultspool" globalvars.sessionID = tools.generate_id() httpRequestHandler.send_response(302) httpRequestHandler.send_header("Set-Cookie", "sessionID=%s" % globalvars.sessionID) httpRequestHandler.send_header("Location", "%s" % url) httpRequestHandler.end_headers() else: httpRequestHandler.send_response(302) __get_login_page(httpRequestHandler, url, "Too bad, try again !") httpRequestHandler.end_headers() elif maay_action == "logs": __get_logs(httpRequestHandler) elif maay_action == "logout": httpRequestHandler.send_response(302) httpRequestHandler.send_header("Set-Cookie", "sessionID=; max-age=0") __get_login_page(httpRequestHandler) elif maay_action == "canceldownload": dids = args.get("did") document_id = dids[0] download = globalvars.downloadManager.cancelDownload(document_id) httpRequestHandler.send_response(302) httpRequestHandler.send_header("Location", "/maay/downloads") elif maay_action == "search": qs = args.get("q") # if qs: # qs[0] = qs[0].strip() if qs and qs[0]: query = qs[0].split() else: query = [] qids = args.get("qid") if qids and qids[0]: query_id = qids[0] resultSpool = globalvars.maay_core.getResultSpoolManager().getResultSpool(query_id) globalvars.maay_core.send_search_request( resultSpool.getQuery(), constants.INIT_TTL, resultSpool.getRange(), constants.MIN_SCORE, constants.INIT_FNC, resultSpool.getExpectedResultCount(), query_id=query_id, ) httpRequestHandler.send_response(302) httpRequestHandler.send_header("Location", "/maay/resultspool?qid=%s" % query_id) httpRequestHandler.end_headers() return did = args.get("did") if did: query.append("#%s" % did[0]) else: document_id = None urls = args.get("url") if urls: query.append("url:%s" % urls[0]) # url = urls[0] else: url = None if not query and not document_id and not url: httpRequestHandler.send_response(302) # httpRequestHandler.send_header('Location', "http://%s:%s/maay/resultspool" % (host, globalvars.port)) httpRequestHandler.send_header("Location", "/maay/resultspool") httpRequestHandler.end_headers() return # print "query = %s" % query # number of results expected r = args.get("r") if r: result_count = int(r[0]) else: result_count = globalvars.result_count_per_page if args.get("desktopsearch"): search_range = constants.DESKTOP_SEARCH_RANGE result_count = -1 elif args.get("privatesearch"): search_range = constants.PRIVATE_SEARCH_RANGE elif args.get("publishsearch"): search_range = constants.PUBLISHED_SEARCH_RANGE elif args.get("intranetsearch"): search_range = constants.INTRANET_SEARCH_RANGE elif args.get("googlesearch"): search_range = constants.INTERNET_SEARCH_RANGE else: search_range = constants.MAAY_SEARCH_RANGE query_id = __send_search_query(httpRequestHandler, query, search_range, result_count) httpRequestHandler.send_response(302) # httpRequestHandler.send_header('Location', "http://%s:%s/maay/resultspool?qid=%s" % (host, globalvars.port, query_id)) httpRequestHandler.send_header("Location", "/maay/resultspool?qid=%s" % query_id) httpRequestHandler.end_headers() elif maay_action == "doctext": document_id = args["did"][0] __get_document_text(httpRequestHandler, document_id) elif maay_action == "document": # todo: test if d is correct (40 char and only one word) document_id = args["did"][0] qid = args.get("qid") if qid: query_id = args["qid"][0] else: query_id = -1 nids = args.get("nid") node_id = nids and nids[0] ips = args.get("ip") ip = ips and ips[0] ports = args.get("port") port = ports and ports[0] lsts = args.get("lst") last_seen_time = lsts and lsts[0] lpts = args.get("lpt") last_providing_time = lpts and lpts[0] if node_id and ip and port and last_seen_time and last_providing_time: maay.globalvars.maay_core.updateNodeInfo(node_id, ip, port, 0, 0, last_seen_time) documentInfos = globalvars.database.getDocumentInfos(document_id=document_id) documentInfo = (len(documentInfos) > 0) and documentInfos[0] globalvars.maay_core.updateDocumentProvider(documentInfo.db_document_id, node_id, last_providing_time) __get_document_by_id(httpRequestHandler, document_id, query_id) elif maay_action == "documentinfo": # TODO: test if d is correct (40 char and only one word) document_id = args["did"][0] query = [] __get_document_info_cheetah(httpRequestHandler, document_id) elif maay_action == "resultspool": # TODO: test if d is correct (40 char and only one word) sm = args.get("sm") if sm: if sm[0] == "0": globalvars.search_mode = constants.NORMAL_SEARCH_MODE elif sm[0] == "1": globalvars.search_mode = constants.ADVANCED_SEARCH_MODE qid = args.get("qid") if qid: query_id = qid[0] else: query_id = current_result_spool_query_id p = args.get("p") if p: page = int(p[0]) else: page = 1 r = args.get("r") if r: search_range = int(r[0]) else: search_range = constants.ALL_SEARCH_RANGE s = args.get("s") if s: sort_policy = int(s[0]) else: sort_policy = resultspool.SCORE_SORTED __get_results_cheetah(httpRequestHandler, query_id, page, sort_policy, search_range) elif maay_action == "closeresultspool": # TODO: test if d is correct (40 char and only one word) query_id = args["qid"][0] __close_result_spool(httpRequestHandler, query_id) httpRequestHandler.send_response(302) httpRequestHandler.send_header("Location", "/maay/resultspool") elif maay_action == "nodes": __get_nodes_cheetah(httpRequestHandler) elif maay_action == "nodeinfo": nodeID = args["nid"][0] __get_node_info_cheetah(httpRequestHandler, nodeID) elif maay_action == "words": __get_words_cheetah(httpRequestHandler) elif maay_action == "wordinfo": word = args["w"][0] __get_word_info_cheetah(httpRequestHandler, word) elif maay_action == "system": __get_system(httpRequestHandler) elif maay_action in ("preferences", "prefsecurity"): __get_preference_security_cheetah(httpRequestHandler, args) elif maay_action == "prefindexation": __get_preference_indexation_cheetah(httpRequestHandler, args) elif maay_action == "prefui": __get_preference_ui_cheetah(httpRequestHandler, args) elif maay_action == "prefdebug": __get_preference_debug_cheetah(httpRequestHandler, args) elif maay_action == "documents": v = args.get("v") if v: view = int(v[0]) else: view = maay.datastructure.documentinfo.PUBLISHED_STATE # __get_repository(httpRequestHandler, view) __get_documents_cheetah(httpRequestHandler, view) elif maay_action == "versions": urls = args.get("url") if urls: url = urls[0] qids = args.get("qid") if qids: query_id = qids[0] else: query_id = None __get_versions_cheetah(httpRequestHandler, url, query_id) elif maay_action == "link": did = args.get("did") if did: document_id = urllib.unquote(did[0]) else: __display_error_page(httpRequestHandler, 200, "Bad arguments 1", "URL = %s" % httpRequestHandler.path) return p = args.get("path") if p: path = p[0] else: __display_error_page(httpRequestHandler, 200, "Bad arguments 2", "URL = %s" % httpRequestHandler.path) return u = args.get("url") if u: url = urllib.unquote(u[0]) else: url = None # print "document_id = %s" % document_id # print "path = %s" % path __get_link(httpRequestHandler, document_id, path, url) elif maay_action == "downloads": __get_downloads(httpRequestHandler) elif maay_action == "help": __get_help(httpRequestHandler) elif maay_action == "about": __get_about(httpRequestHandler) elif maay_action == "url": # __get_url(httpRequestHandler) urls = args.get("url") qids = args.get("qid") url = urls[0] query_id = qids[0] httpRequestHandler.send_response(302) httpRequestHandler.send_header("Location", url) httpRequestHandler.end_headers() resultSpool = globalvars.maay_core.getResultSpoolManager().getResultSpool(query_id) globalvars.urlindexer.insertURL(url, resultSpool.getQuery(), weight=constants.DOWNLOAD_SCORE_WEIGHT) elif maay_action == "remotedownload": dids = args.get("did") document_id = dids[0] downloads = args.get("download") download = downloads and downloads[0] __get_remote_download(httpRequestHandler, document_id, download) elif maay_action == "redir": urls = args.get("url") url = urls[0] globalvars.command.start(url) elif maay_action == "maayify": urls = args.get("url") url = urls and urls[0] titles = args.get("title") print "title = %s" % str(titles) title = (titles and titles[0]) or "No title" dids = args.get("did") document_id = dids and dids[0] submit = args.get("maayify") keywords = None if submit: kwss = args.get("keywords") kws = (kwss and kwss[0]) or "" keywords = kws.split() if document_id: __get_maayify_document(httpRequestHandler, document_id=document_id, keywords=keywords, submit=submit) else: __get_maayify_url(httpRequestHandler, url=url, title=title, keywords=keywords, submit=submit) elif maay_action == "maayl": __get_maayl(httpRequestHandler) else: # TODO: do an error page file_path = os.path.normpath( "%s%s%s" % (globalvars.config.getValue("MaayDocumentRoot"), os.path.sep, words[2]) ) __get_document_by_filename(httpRequestHandler, file_path)