def main(): print("Enter file path:") filepath = input() f = open(filepath, "r") line = f.readline() while line: Parse.parse(line, USER_LIST) line = f.readline() f.close() print("Final List:\n") USER_LIST.printList()
def execute(self, exp): parsed = Parse.parse(exp) env = Environment([], [], None) result = scheme.Scheme.evaluate(parsed, env) return Utils.stringtify(result)
def PP_location_deletion(self, si, C): acc = "" tree = Tree.fromstring(Parse.parse(si)) if C == "PP": for node in tree[0]: if node.label() == "PP" or node.label() == ",": acc += "" else: acc += " ".join(node.leaves()) acc += " " return acc elif C == "VP": # print "***************************" for node in tree[0]: # print "&&&", node.label(), " ".join(node.leaves()) if node.label() == "VP": for sub_node in node: # print "&&&", sub_node.label(), " ".join(sub_node.leaves()) if sub_node.label() == "PP": acc += "" else: acc += " ".join(sub_node.leaves()) acc += " " else: acc += " ".join(node.leaves()) acc += " " # print "$$$$$$$$$$", acc return acc else: # print "Error : Invalid Case" return None
def contain_loc(self, s): """ check if s contains prepositions of place and direction """ loc_set = set([ "above", "across", "after", "against", "along", "among", "around", "at", "behind", "below", "beside", "between", "by", "close to", "down", "from", "in front of", "inside", "in", "into", "near", "next to", "off", "on", "onto", "opposite", "out of", "outside", "over", "past", "through" ]) tree = Tree.fromstring(Parse.parse(s)) for i in tree[0]: if (str(i.label()) == "PP" and any(str(j).lower() in loc_set for j in i.leaves()) and not (self.when_check(i))): return "PP" if (str(i.label())) == "VP": for j in i: # print j.label(), j.leaves() if (str(j.label()) == "PP" and any( str(k).lower() in loc_set for k in j.leaves()) and not (self.when_check(j))): return "VP" return None
def read_input(env): exp = "" while True: prefix = "> " if len(exp) > 0: prefix = "..." exp += input(prefix) lp = exp.count('(') rp = exp.count(')') qn = exp.count('"') if len(exp) == 0: print("Empty expression") elif lp == rp and (qn % 2) == 0: try: parsed = Parse.parse(exp) result = Scheme.evaluate(parsed, env) if result != None: print(">> " + Utils.stringtify(result) + "\r\n") except ValueError as e: print(str(e)) finally: exp = ""
def main(self, k): binary_questions = [] sentences_top_k = Parse.main(k) for si in sentences_top_k: si_pt = Parse.parse(si) bin_attemp = self.bin_question_extract(si_pt) if bin_attemp: binary_questions.append(bin_attemp) for q in binary_questions: print(q)
def start(): URL = "http://hackingwpzhxqe3a.onion/author/admin/index.html" s = r.Session() s.proxies = { 'http': 'socks5h://localhost:9150', 'https': 'socks5h://localhost:9150' } linkList = [URL] validLinks = [URL] iter = 0 parser = Parse() while True: for linkUntilAppend in range(10): print(iter) if iter > len(linkList): return try: req = s.get(linkList[iter], allow_redirects=False, verify=False, timeout=10) except r.Timeout: print("{} Timed out".format(linkList[iter])) iter += 1 continue except r.ConnectionError: print("{} Actively refused".format(linkList[iter])) iter += 1 continue if req.status_code == 200: try: validLinks.append(req.url + " title= {}".format( fromstring(req.content).findtext('.//title'))) except (ParserError, ParseError): validLinks.append(req.url + " title=Was Empty") # print(fromstring(req.content).findtext('.//title')) parser.updateReq(req) parser.updatelinkList(linkList) gotLinks = parser.parse() for gottenLinks in gotLinks: linkList.append(gottenLinks) iter += 1 else: iter += 1 else: jsonAppender(validLinks)
def main(self, binary, si, NE): # binary is string rep of binary question if binary: binary_t = Tree.fromstring(Parse.parse(binary)) when = self.when(binary_t) where = self.where(si) why = Why.main(si) what_who = What_Who.main(si, NE) if when: print(" *** when : ", str(when)) if where: print(" *** wher : ", str(where)) if why: print(" *** why : ", str(why)) if what_who: print(" *** www : ", str(what_who))
def fetch_stocks(self, params): filter = Filter() parser = Parse() url = filter.build_query_string(params) results = parser.parse(url, [])