def main(): xmax = 500 ymax = 700 graph.windowSize(xmax, ymax) graph.canvasSize(xmax, ymax) background(xmax, ymax, 300) x0 = 500 y0 = 300 window_width = 180 window_indent = 20 while x0 >= 0: window(x0, y0, window_width, window_indent, 5) x0 -= (window_width + window_indent) clew(200, 640, 50, 1) clew(400, 600, 30, -1) clew(300, 500, 30, -1) clew(150, 370, 15, 1) clew(70, 600, 15, 1) clew(425, 400, 15, -1) cat(350, 300, 80, 1, 'brown') cat(150, 450, 80, -1, 'grey') cat(100, 350, 30, -1, 'brown') cat(450, 500, 30, -1, 'brown') cat(100, 650, 30, -1, 'grey') cat(400, 650, 30, 1, 'grey') cat(350, 550, 30, 1, 'brown') graph.run()
def main(): xmax = 500 ymax = 700 graph.windowSize(xmax, ymax) graph.canvasSize(xmax, ymax) background(xmax, ymax, 300) x0 = 500 y0 = 300 window_width = 180 window_indent = 20 while x0 >= 0: window(x0, y0, window_width, window_indent, 5) x0 -= (window_width + window_indent) my_pic = graph.canvas() graph.canvasSize(xmax, ymax) clew(200, 640, 50, 1) cat(350, 300, 80, 1, 'brown') my_pic.pack() graph.run()
def P1(fo, strategy_q, buffer_q): # t4 = threading.Thread(target=graph.run, name='graph') # t4.daemon = True # t4.start() # time.sleep(5) # plt initilization #plt.figure() f, axarr = plt.subplots(3, sharex=True) li = [ axarr[i].plot([], [], 'r', [], [], 'b', [], [], 'g') for i in xrange(3) ] li.append(axarr) plt.setp(li[2][0], label="X-axis") plt.setp(li[2][1], label="Y-axis") plt.setp(li[2][2], label="Z-axis") axarr[0].set_title("Acceleration") axarr[1].set_title("Velocity") axarr[2].set_title("Displacement") box = axarr[2].get_position() axarr[2].set_position( [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) axarr[2].legend(loc='upper center', bbox_to_anchor=(0.5, -0.2), fancybox=True, ncol=3) # li = plt.plot([],[],'r',[],[],'b') plt.ion() plt.show(block=False) t1 = threading.Thread(target=host.serv, name='server') t1.daemon = True t1.start() t2 = threading.Thread(target=datahandler.handler, name='datahandler', args=( buffer_q, plt, li, )) t2.daemon = True t2.start() print "datahandler start" t3 = threading.Thread(target=servogene.servo_gene, name='servo_generation', args=(strategy_q, )) t3.daemon = True t3.start() print "servo_generation start" t5 = threading.Thread(target=monitor.monitor, name='monitor') t5.daemon = True t5.start() print "monitor start" # t6 = threading.Thread(target=ml, name='ml',args=(fo,)) # t6.daemon = True # t6.start() graph.run() sys.exit()
def get_size(): dict_relations = graph.run('MATCH (n:User)-[r:RETWEETED]-(m:User) \ where (n)-[:RETWEETED]->() and not ()-[:RETWEETED]->(n)\ RETURN DISTINCT n.author,r,m.author').data() # dict_relations = graph.run('MATCH (n:User)\ # WITH n, COUNT(DISTINCT(()-[:REPLIED_TO]->(n))) as NodesCnt\ # RETURN NodesCnt').data() size = len(dict_relations) return size
def route(): data = ImmutableMultiDict(urlparse.parse_qs(request.data)) errors = validate(data) if errors['errors']: return jsonify(errors), 400 target_node_ids = data.getlist('target_node_ids') speed = data.get('speed') bonuses = get_bonuses(data) return jsonify(graph.run(target_node_ids, bonuses, speed))
def P1(fo,strategy_q,buffer_q): # t4 = threading.Thread(target=graph.run, name='graph') # t4.daemon = True # t4.start() # time.sleep(5) # plt initilization #plt.figure() f, axarr = plt.subplots(3, sharex=True) li = [axarr[i].plot([],[],'r',[],[],'b',[],[],'g') for i in xrange(3)] li.append(axarr) plt.setp(li[2][0],label="X-axis") plt.setp(li[2][1],label="Y-axis") plt.setp(li[2][2],label="Z-axis") axarr[0].set_title("Acceleration") axarr[1].set_title("Velocity") axarr[2].set_title("Displacement") box = axarr[2].get_position() axarr[2].set_position([box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) axarr[2].legend(loc='upper center', bbox_to_anchor=(0.5, -0.2), fancybox=True,ncol=3) # li = plt.plot([],[],'r',[],[],'b') plt.ion() plt.show(block=False) t1 = threading.Thread(target=host.serv, name='server') t1.daemon = True t1.start() t2 = threading.Thread(target=datahandler.handler, name='datahandler',args=(buffer_q,plt,li,)) t2.daemon = True t2.start() print "datahandler start" t3 = threading.Thread(target=servogene.servo_gene, name='servo_generation',args=(strategy_q,)) t3.daemon = True t3.start() print "servo_generation start" t5 = threading.Thread(target=monitor.monitor, name='monitor') t5.daemon = True t5.start() print "monitor start" # t6 = threading.Thread(target=ml, name='ml',args=(fo,)) # t6.daemon = True # t6.start() graph.run() sys.exit()
def get_longest_path(): # dict_relations = graph.run('MATCH (n:User)-[r:RETWEETED]-(m:User) \ # RETURN n.author,r,m.author LIMIT ' + str(500)).data() #https://stackoverflow.com/questions/41805735/cypher-query-to-find-the-longest-path-using-neo4j-3-1-0-version #https://stackoverflow.com/questions/41789561/find-longest-path-in-graph dict_relations = graph.run('match (n)\ where (n)-[:REPLIED_TO]->() and not ()-[:REPLIED_TO]->(n)\ match p = (n)-[:REPLIED_TO*1..]->(m)\ return p, length(p) as L\ order by L desc LIMIT 1').data() return dict_relations
def parse_data(): # TODO currently I simply read the file and parse data, THIS IS # UNACCEPTABLY SLOW AND UGLY!!! # fp = open("Datafile.txt") # content = [(lambda s: [float(x) for x in s])(line.split()) for line in fp.readlines()] content = graph.run() length = graph.get_length() size = graph.get_size() content = list(content[:length]) content = [content[i:i+size] for i in xrange(0, len(content), size)] return content
def parse_data(): # TODO currently I simply read the file and parse data, THIS IS # UNACCEPTABLY SLOW AND UGLY!!! # fp = open("Datafile.txt") # content = [(lambda s: [float(x) for x in s])(line.split()) for line in fp.readlines()] # graph = ctypes.cdll.LoadLibrary( 'G:\igem2013_sysu_oschina\project\Python27\web\\graph.dll' ) content = graph.run() length = graph.get_length() size = graph.get_size() content = list(content[:length]) content = [content[i:i+size] for i in xrange(0, len(content), size)] return content
def print_longest_path(): dict_relations = graph.run('match (n)\ where (n)-[:RETWEETED]->() and not ()-[:RETWEETED]->(n)\ match p = (n)-[:RETWEETED*1..]->(m)\ return p, length(p) as L').data() # order by L desc LIMIT 5 # dict_relations = graph.run('match (n)\ # match p = (n)-[:RETWEETED*..]->(m)\ # return p, length(p) as L\ # order by L desc LIMIT 5').data() total_ = [] for item in dict_relations: total_.append(item['L']) average = sum(total_) * 1. / len(total_) * 1. return sum(total_), len(total_), average
def get_breadth(): # dict_relations = graph.run('MATCH (n:User)-[:REPLIED_TO]->(m)\ # WITH m, SIZE((n)-[:REPLIED_TO]->()) as mostNodesCnt\ # where (n)-[:REPLIED_TO]->() and not ()-[:REPLIED_TO]->(n)\ # RETURN mostNodesCnt\ # ORDER BY mostNodesCnt DESC LIMIT 1').data() # dict_relations = graph.run('MATCH p = (n:User)-[:REPLIED_TO]->()\ # WITH n, COLLECT(p) AS paths\ # where (n)-[:REPLIED_TO]->() and not ()-[:REPLIED_TO]->(n)\ # RETURN n, REDUCE(s = paths[0], x IN paths[1..] | CASE WHEN LENGTH(x) < LENGTH(s) THEN x ELSE s END) AS path\ # limit 10').data() dict_relations = graph.run('match (n)\ WITH SIZE((n)-[:RETWEETED]->()) as mostNodesCnt\ where (n)-[:RETWEETED]->() and not ()-[:RETWEETED]->(n)\ match p = (n)-[:RETWEETED*1..]->(m)\ return mostNodesCnt, length(p) as L ORDER BY mostNodesCnt desc LIMIT 3' ).data() return dict_relations
sail_top_mount_x = mast_base_x sail_top_mount_y = mast_base_y - mast_hight sail_bottom_mount_x = mast_base_x sail_bottom_mount_y = mast_base_y sail_left_edge_x = ship_bow_x - ship_size * 134 sail_left_edge_y = (sail_top_mount_y + sail_bottom_mount_y) / 2 sail_right_edge_x = ship_bow_x - ship_size * 96 sail_right_edge_y = sail_left_edge_y g.penSize(pen_width_1) g.penColor('black') sail_color = '#c9c094' g.brushColor(sail_color) g.polygon([(sail_top_mount_x, sail_top_mount_y), (sail_left_edge_x, sail_left_edge_y), (sail_right_edge_x, sail_right_edge_y)]) g.polygon([(sail_bottom_mount_x, sail_bottom_mount_y), (sail_left_edge_x, sail_left_edge_y), (sail_right_edge_x, sail_right_edge_y)]) ship_1_bow_x = 581 ship_1_size = 1 draw_a_ship(ship_1_bow_x, ship_1_size) ship_2_bow_x = 296 ship_2_size = 0.43 draw_a_ship(ship_2_bow_x, ship_2_size) g.run()
(xf5[9], yf5[9]), (650, 435), (675, 450), (xf6[0], yf6[0]), (xf6[1], yf6[1]), (xf6[2], yf6[2]), (xf6[3], yf6[3]), (xf6[4], yf6[4]), (xf6[5], yf6[5]), (xf6[6], yf6[6]), (xf6[7], yf6[7]), (xf6[8], yf6[8]), (xf6[9], yf6[9]), (xf6[10], yf6[10]), (xf6[11], yf6[11]), (xf6[12], yf6[12]), (xf6[13], yf6[13])]) # рисует третью линию гор, используя для рисования кривых массивы xf5, yf5 и xf6, yf6 canvasSize(1000, 500) windowSize(1000, 500) points_filling() backstage() mtn_line1() mtn_line2() mtn_line3() sun(475, 95, 50) penColor(51, 0, 29) brushColor(51, 0, 29) bird(750, 400, 1) bird(650, 330, 0.8) bird(775, 340, 0.5) bird(675, 350, 0.5) bird(475, 207, 0.5) bird(475, 170, 0.5) bird(425, 225, 0.5) bird(415, 160, 0.5) run()
import graph import math def dragon(n, x0, y0, x1, y1, q): if n == 0: graph.line(x0, y0, x1, y1) else: alf1 = alf * q _x = math.cos(alf1) * ((x1 - x0) * math.cos(alf1) - (y1 - y0) * math.sin(alf1)) + x0 _y = math.cos(alf1) * ((x1 - x0) * math.sin(alf1) + (y1 - y0) * math.cos(alf1)) + y0 dragon(n - 1, x0, y0, _x, _y, 1) dragon(n - 1, _x, _y, x1, y1, -1) if __name__ == '__main__': alf = 45 * math.pi / 180 dragon(16, 100, 100, 400, 400, 1) graph.run()
# time.sleep(0.8) # globalvar.s1=105 # globalvar.s2=105 # globalvar.s4=74 # globalvar.s3=74 # time.sleep(0.8) globalvar.s1=48 time.sleep(0.1) globalvar.s4=44 time.sleep(0.5) globalvar.s1=84 globalvar.s2=120 globalvar.s3=127 globalvar.s4=82 time.sleep(0.5) globalvar.s2=140 globalvar.s3=147 time.sleep(0.1) print (globalvar.s1,globalvar.s2,globalvar.s3,globalvar.s4) if __name__=='__main__': t1 = threading.Thread(target=host.serv, name='server') t1.daemon = True t1.start() t2 = threading.Thread(target=servogene, name='servogene') t2.daemon = True t2.start() graph.run() sys.exit()
def get_totals(): dict_relations = graph.run('match (n) return length(n) as T').data() return dict_relations
x0 = width - width / 2 * 0.4 y0 = height - height / 2 * 0.4 xk = 0.4 yk = 0.4 def shrinkPoint(v, x0, y0, xk, yk): return [(v[0] - x0) * xk + x0, (v[1] - y0) * yk + y0] def shrinkPolygon(polygon, x0, y0, xk, yk): return { 'color': polygon['color'], 'points': [shrinkPoint(v, x0, y0, xk, yk) for v in polygon['points']] } polygons = [] for i in range(0, 3): shrink = SIZE**(-(-tickId % SPEED + (i - 1) * SPEED) / SPEED) polygons += [ shrinkPolygon(polygon, width / 2, height / 3, shrink, shrink) for polygon in scene[0]['polygons'] ] updatePolygons() gr.onTimer(drawLoop, 1000 // 30) gr.run()
super(MoNet, self).__init__() self.conv1 = torch_geometric.nn.GMMConv(dataset.num_features, args.hidden, dim=2, kernel_size=args.kernel_size) self.conv2 = torch_geometric.nn.GMMConv(args.hidden, dataset.num_classes, dim=2, kernel_size=args.kernel_size) def reset_parameters(self): self.conv1.reset_parameters() self.conv2.reset_parameters() def forward(self, data): x, edge_index, edge_attr = data.x, data.edge_index, data.edge_attr x = F.dropout(x, p=args.dropout, training=self.training) x = F.elu(self.conv1(x, edge_index, edge_attr)) x = F.dropout(x, p=args.dropout, training=self.training) x = self.conv2(x, edge_index, edge_attr) return F.log_softmax(x, dim=1) device = torch.device('cuda', args.device_idx) args.data_fp = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', args.dataset) dataset = Planetoid(args.data_fp, args.dataset) dataset.transform = transform run(dataset, MoNet(dataset), args.runs, args.epochs, args.lr, args.weight_decay, args.early_stopping, device)