def emit_term(tag, text, is_in_head, term_idx, term): term_data = query_one(('pred', 'args'), '{<' + term + '> kbdbg:has_pred ?pred. <' + term + '> kbdbg:has_args ?args.}') pred = term_data['pred'] args_list = list(query_list(('item', ), term_data['args'])) if len(args_list) == 2: def arrrr(arg_idx): with tag('td', port=port_name(is_in_head, term_idx, arg_idx), border=border_width): text(' ' + shorten(args_list[arg_idx]) + ' ') arrrr(0) with tag("td", border=border_width): text(shorten(pred)) arrrr(1) with tag("td", border=border_width): text('.') else: with tag("td", border=border_width): text(shorten(pred) + '( ') arg_idx = 0 for arg, is_last in tell_if_is_last_element(args_list): with tag('td', port=port_name(is_in_head, term_idx, arg_idx), border=border_width): text(shorten(arg)) arg_idx += 1 if not is_last: with tag("td", border=border_width): text(', ') with tag("td", border=border_width): text(').')
def emit_term(g, tag, text, is_in_head, term_idx, term): pred = g.value(term, kbdbg.has_pred) args_collection = Collection(g, g.value(term, kbdbg.has_args)) if len(args_collection) == 2: def arrrr(arg_idx): with tag('td', port=port_name(is_in_head, term_idx, arg_idx), border=border_width): text(' ' + shorten(args_collection[arg_idx]) + ' ') arrrr(0) with tag("td", border=border_width): text(shorten(pred)) arrrr(1) with tag("td", border=border_width): text('.') else: with tag("td", border=border_width): text(shorten(pred) + '( ') arg_idx = 0 for arg, is_last in tell_if_is_last_element(args_collection): with tag('td', port=port_name(is_in_head, term_idx, arg_idx), border=border_width): text(shorten(arg)) arg_idx += 1 if not is_last: with tag("td", border=border_width): text(', ') with tag("td", border=border_width): text(').')
def request(self, verb, url, data, headers={}): response = self.send_request(verb, url, data, headers) if verb == 'HEAD': response.close() resp_body = '' else: resp_body = response.read() for (k, v) in response.getheaders(): logger.debug('< %s: %s' % (k, v)) logger.debug('< ' + shorten(data, 1024)) lst = response.getheaders() d = dict(lst) if 'set-cookie' in d: #print 'xxxxxxxxxxx' tmp = d['set-cookie'].split(', ') #this is used in httplib... del d['set-cookie'] lst = d.items() for cookie in tmp: #print 'xxxxxxxxxx', cookie lst.append(('set-cookie', cookie)) rst = { 'status': response.status, 'header' : parse_header_list(lst), 'body': resp_body, 'body_file': None, } if (response.status in [200, 206]): return rst else: raise HTTPException(rst)
def request(self, verb, url, data, headers={}): """used by small response (get/put), not get_file. """ response, conn = self.send_request(verb, url, data, headers) if verb == 'HEAD': response.close() conn.close() resp_body = '' else: resp_body = response.read() response.close() conn.close() for (k, v) in response.getheaders(): logger.debug('< %s: %s' % (k, v)) logger.debug('< ' + shorten(data, 1024)) rst = { 'status': response.status, 'header': dict(response.getheaders()), 'body': resp_body, 'body_file': None } if (response.status in [200, 206]): return rst else: print rst raise HTTPException(rst)
def put(self, url, body='', headers={}): tmp_file = self.content_to_tmpfile(body) logger.debug('request body:: ' + shorten(body, 1024)) return self._curl('curl -X "PUT" %s "%s" -T "%s"'% (self._headers2txt(headers), url, tmp_file))
def request(self, verb, url, data, headers={}): response = self.send_request(verb, url, data, headers) if verb == 'HEAD': response.close() resp_body = '' else: resp_body = response.read() for (k, v) in response.getheaders(): logger.debug('< %s: %s' % (k, v)) logger.debug('< ' + shorten(data, 1024)) lst = response.getheaders() d = dict(lst) if 'set-cookie' in d: #print 'xxxxxxxxxxx' tmp = d['set-cookie'].split(', ') #this is used in httplib... del d['set-cookie'] lst = d.items() for cookie in tmp: #print 'xxxxxxxxxx', cookie lst.append(('set-cookie', cookie)) rst = { 'status': response.status, 'header': parse_header_list(lst), 'body': resp_body, 'body_file': None, } if (response.status in [200, 206]): return rst else: raise HTTPException(rst)
def put(self, url, body='', headers={}): headers = copy.deepcopy(headers) logger.info('pycurl -X PUT -d "%s" "%s" ', shorten(body, 100), url) self._init_curl('PUT', url, headers) req_buf = StringIO(body) self.c.setopt(pycurl.INFILESIZE, len(body)) self.c.setopt(pycurl.READFUNCTION, req_buf.read) return self._do_request()
def get_frame_html_label(s, frame, isroot): rule = s.g.value(frame, kbdbg.is_for_rule) params = rule, isroot try: template = s.frame_templates[params] except KeyError: template = s._get_frame_html_label(s.g, *params) s.frame_templates[params] = template return template.replace(frame_name_template_var_name, html_module.escape(shorten(frame.n3())))
def test1(URL): i = 0 for add_header in header_cases: for local_file in body_cases + body_cases2: for qs in query_string_case: url = URL + str(i) + qs r1 = _test_http_client(PyCurlHTTPC, url, add_header, local_file) if local_file not in body_cases2: r2 = _test_http_client(CurlHTTPC, url, add_header, local_file) r3 = _test_http_client(HttplibHTTPC, url, add_header, local_file) for k in r1.keys(): print 'cmpare for ' + k print 'r1:' + shorten(r1[k]['body']) if local_file not in body_cases2: print 'r2:' + shorten(r2[k]['body']) print 'r3:' + shorten(r3[k]['body']) if local_file not in body_cases2: assert( r1[k]['body'] == r2[k]['body'] ) assert( r1[k]['body'] == r3[k]['body'] ) i += 1
def get_bnode_string(s, bnode, parent, items_uri): log('querying bnode ' + bnode + ' ' + ss) (doc, tag, text) = yattag.Doc().tagtext() with tag("table", border=0, cellspacing=0): with tag('tr'): with tag('td', border=1): text(shorten(bnode)) items = query_list(('name', 'value'), items_uri, additional="""?item kbdbg:has_name ?name. ?item kbdbg:has_value ?value.""") for i in items: with tag('tr'): name = i['name'] pn = gv_escape(name) with tag("td", border=1, port=pn): text(shorten(name)) text(' = ') text(shorten(i['value'])) #with tag("td", border=1): # text(shorten(g.value(i, kbdbg.has_value))) return doc.getvalue()
def test1(URL): i = 0 for add_header in header_cases: for local_file in body_cases + body_cases2: for qs in query_string_case: url = URL + str(i) + qs r1 = _test_http_client(PyCurlHTTPC, url, add_header, local_file) if local_file not in body_cases2: r2 = _test_http_client(CurlHTTPC, url, add_header, local_file) r3 = _test_http_client(HttplibHTTPC, url, add_header, local_file) for k in r1.keys(): print 'cmpare for ' + k print 'r1:' + shorten(r1[k]['body']) if local_file not in body_cases2: print 'r2:' + shorten(r2[k]['body']) print 'r3:' + shorten(r3[k]['body']) if local_file not in body_cases2: assert (r1[k]['body'] == r2[k]['body']) assert (r1[k]['body'] == r3[k]['body']) i += 1
def handle(conn, addr): """ Handle each connected client. """ logger.info('connected to {0}'.format(addr)) time.sleep(1) # delay # read payload payload_len_buf = read_bytes(conn, PLEN_BUF_SIZE) payload_len = struct.unpack('<L', payload_len_buf)[0] payload_buf = read_bytes(conn, payload_len) # shorten url and send it back short_url = shorten(payload_buf) payload_len = struct.pack('<L', len(short_url)) conn.sendall(payload_len + short_url) conn.close()
def get_frame_html_label(s, frame, isroot): rule = frame['is_for_rule'] params = rule try: while True: template = frame_templates[params] if template == 'pending...': time.sleep(1) else: break except KeyError: info(params + ' not found in template cache..' + ss) frame_templates[params] = 'pending...' info(params + ' set.') template = s._get_frame_html_label(rule, isroot) frame_templates[params] = template return template.replace(frame_name_template_var_name, html_module.escape(shorten(frame['frame'])))
def request(self, verb, url, data, headers={}): response = self.send_request(verb, url, data, headers) if verb == 'HEAD': response.close() resp_body = '' else: resp_body = response.read() for (k, v) in response.getheaders(): logger.debug('< %s: %s' % (k, v)) logger.debug('< ' + shorten(data, 1024)) rst = { 'status': response.status, 'header' : dict(response.getheaders()), 'body': resp_body, 'body_file': None, } if (response.status in [200, 206]): return rst else: raise HTTPException(rst)
def _curl_ll(self, cmd, body_file=None): if not body_file: body_file = self._random_tmp_file('body') header_file = self._random_tmp_file('header') logger.info('%s -v > %s' % (cmd, body_file)) cmd = cmd.replace( 'curl', 'curl > %s --dump-header %s -s' % (body_file, header_file)) logger.debug(cmd) #commands.getoutput(cmd) (exitstatus, outtext) = commands.getstatusoutput(cmd) errs = [ 1792, # couldn't connect to host ] if (exitstatus in errs): raise HTTPException( None, 'error on curl: (%s, %s) on %s' % (exitstatus, outtext, cmd)) resp_header = file(header_file).read() resp_body_size = os.path.getsize(body_file) if resp_body_size < READ_BODY_TO_MEMORY: resp_body = file(body_file).read() else: resp_body = '' logger.debug(resp_header) logger.debug(shorten(resp_body, 80)) status, header = self._parse_resp_headers(resp_header) rst = { 'status': status, 'header': header, 'body': resp_body, 'body_size': resp_body_size, 'body_file': body_file, } if (status in [200, 206]): return rst else: raise HTTPException(rst)
def send_request(self, verb, url, data, headers={}): """used by all methods. """ logger.info('ll httplibcurl -X "%s" "%s" ', verb, url) for (k, v) in headers.items(): logger.debug('> %s: %s' % (k, v)) logger.debug('\n') logger.debug('> ' + shorten(data, 1024)) o = urlparse(url) host = o.netloc path = o.path if o.query: path+='?' path+=o.query conn = None #print "verb,", verb #print "url:", url #print "host:", host #print "path:", path #print "headers:", headers #print "data:", data if o.scheme == 'https': if not conn: conn = httplib.HTTPSConnection(host, None, False, 300) else: if not conn: conn = httplib.HTTPConnection(host, None, False, 300) try: conn.request(verb, path, data, headers) except: #print "exception......" conn.close() conn = None if o.scheme == 'https': if not conn: conn = httplib.HTTPSConnection(host, None, False, 300) else: if not conn: conn = httplib.HTTPConnection(host, None, False, 300) conn.request(verb, path, data, headers) response = conn.getresponse() return response, conn
def send_request(self, verb, url, data, headers={}): """used by all methods. """ logger.info('ll httplibcurl -X "%s" "%s" ', verb, url) for (k, v) in headers.items(): logger.debug('> %s: %s' % (k, v)) logger.debug('\n') logger.debug('> ' + shorten(data, 1024)) o = urlparse(url) host = o.netloc path = o.path if o.query: path += '?' path += o.query conn = None #print "verb,", verb #print "url:", url #print "host:", host #print "path:", path #print "headers:", headers #print "data:", data if o.scheme == 'https': if not conn: conn = httplib.HTTPSConnection(host, None, False, 300) else: if not conn: conn = httplib.HTTPConnection(host, None, False, 300) try: conn.request(verb, path, data, headers) except: #print "exception......" conn.close() conn = None if o.scheme == 'https': if not conn: conn = httplib.HTTPSConnection(host, None, False, 300) else: if not conn: conn = httplib.HTTPConnection(host, None, False, 300) conn.request(verb, path, data, headers) response = conn.getresponse() return response, conn
def send_request(self, verb, url, data, headers={}): logger.info('ll httplibcurl -X "%s" "%s" ', verb, url) for (k, v) in headers.items(): logger.debug('> %s: %s' % (k, v)) logger.debug('\n') logger.debug('> ' + shorten(data, 1024)) o = urlparse(url) host = o.netloc path = o.path if o.query: path+='?' path+=o.query if o.scheme == 'https': conn = httplib.HTTPSConnection(host) else: conn = httplib.HTTPConnection(host) conn.request(verb, path, data, headers) response = conn.getresponse() return response
def pred(s, pred_name, rules): s._label = 0 s.state_index = 0 #max_body_len = max(len(r.body) for r in rules) #max_states_len = max(r.max_states_len for r in rules) return Collection([ comment(common.shorten(pred_name) if pred_name else 'query'), Line( pred_func_declaration(('pred_'+cppize_identifier(pred_name)) if pred_name else 'query') ), Block( [ Statement('goto *(((char*)&&case0) + state.entry)'), s.label(), If('(top_level_tracing_coro == NULL) && tracing_enabled', Statement('top_level_tracing_coro = &state')) if trace_proof_ else Line(), Lines([s.rule(rule) if type(rule) != Builtin else rule.build_in(rule) for rule in rules]), Statement('return 0') ] ) ])
def send_request(self, verb, url, data, headers={}): logger.info('ll httplibcurl -X "%s" "%s" ', verb, url) for (k, v) in headers.items(): logger.debug('> %s: %s' % (k, v)) logger.debug('\n') logger.debug('> ' + shorten(data, 1024)) o = urlparse(url) host = o.netloc path = o.path if o.query: path += '?' path += o.query if o.scheme == 'https': conn = httplib.HTTPSConnection(host) else: conn = httplib.HTTPConnection(host) conn.request(verb, path, data, headers) response = conn.getresponse() return response
def bnode_printer(s): result = Lines([Line( """ string bnode_to_string2(set<Thing*> &processing, Thing* thing) { processing.insert(thing); stringstream result; result << endl; result << "(" << processing.size() << ")"; for (size_t i = 0; i < processing.size(); i++) result << " "; result << "["; //cerr << "bnode_to_string2: "<< thing << " " << &processing << " " << processing.size()<< endl; switch (thing->origin()) { """)]) def do_arg(arg): result.append(s.substituted_arg2('result', locals, rule, arg, arg!=bnode_name)) for bnode_cpp_name, (rule, bnode_name) in s.bnodes.items(): result.append(Line('case ' + bnode_cpp_name + ':')) for triple, is_last in common.tell_if_is_last_element(rule.original_head_triples): bnode_idx = rule.locals_map[bnode_name] locals = 'thing - '+str(bnode_idx) do_arg(triple.args[0]) result.append(Statement('result << " <' + common.shorten(triple.pred) + '> "')) do_arg(triple.args[1]) if not is_last: result.append(Statement('result << ". "')) result.append(Statement('break')) #from IPython import embed; embed();exit() result.append(Line('}; result << "]"; processing.erase(thing);')) #result.append(Line('cerr << "bnode_to_string2: "<< thing << " " << &processing << " " << processing.size()<< endl;')) result.append(Line('return result.str();}')) result.append(Line(""" string bnode_to_string(Thing* thing) { set<Thing*> processing; return bnode_to_string2(processing, thing); } """)) return result
def _curl_ll(self, cmd, body_file=None): if not body_file: body_file = self._random_tmp_file('body') header_file = self._random_tmp_file('header') logger.info('%s -v > %s' % (cmd, body_file)) cmd = cmd.replace('curl', 'curl > %s --dump-header %s -s'%(body_file, header_file) ) logger.debug(cmd) #commands.getoutput(cmd) (exitstatus, outtext) = commands.getstatusoutput(cmd) errs = [ 1792, # couldn't connect to host ] if (exitstatus in errs) : raise HTTPException(None, 'error on curl: (%s, %s) on %s' % (exitstatus, outtext, cmd) ) resp_header = file(header_file).read() resp_body_size = os.path.getsize(body_file) if resp_body_size < READ_BODY_TO_MEMORY: resp_body = file(body_file).read() else: resp_body = '' logger.debug(resp_header) logger.debug(shorten(resp_body, 80)) status, header = self._parse_resp_headers(resp_header) rst = { 'status': status, 'header' : header, 'body': resp_body, 'body_size': resp_body_size, 'body_file': body_file, } if (status in [200, 206]): return rst else: raise HTTPException(rst)
def _request(self, verb, url, data, headers={}): response = self.send_request(verb, url, data, headers) if verb == 'HEAD': response.close() resp_body = '' else: resp_body = response.read() for (k, v) in response.getheaders(): logger.debug('< %s: %s' % (k, v)) logger.debug('< ' + shorten(data, 1024)) response_headers = dict(response.getheaders()) rst = { 'status': response.status, 'header': response_headers, 'body': resp_body, 'body_file': None, } if (response.status in [200, 206]): return rst else: raise HTTPException(rst)
def cppize_identifier(i: str) -> str: return common.fix_up_identification(common.shorten(i))
def arrrr(arg_idx): with tag('td', port=port_name(is_in_head, term_idx, arg_idx), border=border_width): text(' ' + shorten(args_collection[arg_idx]) + ' ')
def generate_gv_image(s): g = s.g s.gv("digraph frame" + str(s.step) + "{ ") #splines=ortho; #gv("pack=true") log('frames.. ' + '[' + str(s.step) + ']') root_frame = None #current_result = None rrr = list(g.subjects(RDF.type, kbdbg.frame)) last_frame = None for i, frame in enumerate(rrr): if g.value(frame, kbdbg.is_finished, default=False): continue f, text = s.get_frame_gv(i, frame) s.gv(f + text) #if last_frame: # arrow(last_frame, f, color='yellow', weight=100) parent = g.value(frame, kbdbg.has_parent) if parent: # and not g.value(parent, kbdbg.is_finished, default=False): s.arrow(gv_escape(parent), f, color='yellow', weight=10000000) else: root_frame = f last_frame = f #if i == 0 and current_result: # arrow(result_node, f) log('bnodes.. ' + '[' + str(s.step) + ']') if s.step == 51: print('eeee') for bnode in g.subjects(RDF.type, kbdbg.bnode): parent = g.value(bnode, kbdbg.has_parent) if g.value(parent, kbdbg.is_finished, default=False): continue (doc, tag, text) = yattag.Doc().tagtext() with tag("table", border=0, cellspacing=0): #for i in Collection(g, bnode): with tag('tr'): with tag('td', border=1): text((shorten(bnode.n3()))) items = None for i in g.objects(bnode, kbdbg.has_items): items = i # find the latest ones if not items: continue for i in Collection(g, items): with tag('tr'): name = g.value(i, kbdbg.has_name) pn = gv_escape(name) with tag("td", border=1, port=pn): text(shorten(name)) text(' = ') text(shorten(g.value(i, kbdbg.has_value))) #with tag("td", border=1): # text(shorten(g.value(i, kbdbg.has_value))) s.gv( gv_escape(bnode) + ' [shape=none, cellborder=2, label=<' + doc.getvalue() + '>]') s.arrow(gv_escape(parent), gv_escape(bnode), color='yellow', weight=100) last_bindings = get_last_bindings(s.step) log('bindings...' + '[' + str(s.step) + ']') new_last_bindings = [] for binding in g.subjects(RDF.type, kbdbg.binding): weight = 1 source_uri = g.value(binding, kbdbg.has_source) target_uri = g.value(binding, kbdbg.has_target) if g.value(source_uri, kbdbg.is_bnode, default=False) and g.value( target_uri, kbdbg.is_bnode, default=False): weight = 0 if g.value(binding, kbdbg.was_unbound) == rdflib.Literal(True): if (binding.n3() in last_bindings): s.comment("just unbound binding") s.arrow(s.gv_endpoint(source_uri), s.gv_endpoint(target_uri), color='orange', weight=weight, binding=True) continue if g.value(binding, kbdbg.failed) == rdflib.Literal(True): #if (binding.n3() in last_bindings): s.comment("just failed binding") s.arrow(s.gv_endpoint(source_uri), s.gv_endpoint(target_uri), color='red', weight=weight, binding=True) continue s.comment("binding " + binding.n3()) s.arrow(s.gv_endpoint(source_uri), s.gv_endpoint(target_uri), color=('black' if (binding.n3() in last_bindings) else 'purple'), weight=weight, binding=True) new_last_bindings.append(binding.n3()) put_last_bindings(s.step, new_last_bindings) del new_last_bindings log('results..' + '[' + str(s.step) + ']') last_result = root_frame for i, result_uri in enumerate(g.subjects(RDF.type, kbdbg.result)): result_node = gv_escape(result_uri) r = result_node + ' [cellborder=2, shape=none, label=<' (doc, tag, text) = yattag.Doc().tagtext() with tag("table"): with tag('tr'): with tag("td"): text('RESULT' + str(i) + ' ') emit_terms(s.g, tag, text, s.g.value(result_uri, RDF.value), 'result') r += doc.getvalue() + '>]' s.gv(r) false = rdflib.Literal(False) #if g.value(result_uri, kbdbg.was_unbound, default = false) == false: #current_result = result_node #arrow_width = 2 #else: #arrow_width = 1 if last_result: s.arrow(last_result, result_node, color='yellow', weight=100) last_result = result_node s.gv("}") log('}..' + '[' + str(s.step) + ']')