def chat_message_received(self, message, player): successful = True forward_message = True if message.startswith("/debug"): forward_message = False if player.in_game(Uno): options, args = CHEAT_PARSER.parse_args( split(message[len("/debug") + 1:])) if options.player == None: player_ = player else: player_ = find_player(options.player) if player_ == None: successful = False else: for _ in range(options.amount): self.game.give_card(options.face, options.color, player_) if forward_message: broadcast({"player" : player, "message" : message}, "lobby_chat_message", self.players, json_encoder=UserEncoder) # Nothing can go wrong (yet) player.send(successful, "lobby_chat")
def initialize(self): # Broadcast the model to the workers. u.broadcast(self._model) # Start the UDP server for the queueing mechanism. self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) server_address = (u.ip_address(u.network_interface()), 8000) # Fixed port. self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self._sock.bind(server_address)
def end_action(): # todo 通知所有群 global IN_ACTION global groups message="@all 提问时间结束:)" active_target_groups = [group for group in groups if group._group_id] IN_ACTION = False broadcast(itchat,message,active_target_groups)
def _stop(self): if self.playing: self.playing = False # "Deallocation" and user games namespace cleanup self.game.stop() self.game = None broadcast(False, "lobby_playing", self.players) if config.lobby_debug: logging.info("Lobby '%s' stopped" % self)
def SCHED_start_nodes(peers): random_id = random.randint(0, 1000) log_info( "[SCHED_start_node]({}) Wait 3 sec for flask app to launch... ".format( random_id)) time.sleep(3) log_info( "[SCHED_start_node]({}) Attempt broadcast start signal now... ".format( random_id)) utils.broadcast("go", peers, "/start") log_info("[SCHED_start_node]({}) Completed... ".format(random_id)) return "start sent"
def change_direction(self): if self.direction == Uno.LEFT: self.direction = Uno.RIGHT elif self.direction == Uno.RIGHT: self.direction = Uno.LEFT else: # Unexpected direction -> Direction is right self.direction = Uno.RIGHT broadcast(self.direction, "uno_direction", self.lobby.players) if self.debug: logging.info("Direction changed to '%s'" % ("left" if self.direction == Uno.LEFT else "right"))
def start(self, player): successful = False if player == self.host and not self.playing and self.player_count >= 2: self.playing = True broadcast(True, "lobby_playing", self.players) # Game possible replaceable in the future self.game = Uno(self, debug=config.game_debug) successful = True if config.lobby_debug: logging.info("Game '%s' started in lobby '%s'" % (self.game, self)) player.send(successful, "lobby_start")
def receive(): while True: client, address = server.accept() print(f'Conectado com {str(address)}') # requisita e salva um nickname (um login fake) send_serialized(client, 'NICK') data = get_serialized_message(client) nickname = data.message nicknames.append(nickname) clients.append(client) print(f'Nickname do client é {nickname}') broadcast(f'{nickname} entrou.', client) send_serialized(client, 'Conectado ao servidor! [-h para ajuda]') threading.Thread(target=handle, args=(client, )).start()
def end_turn(self, player_inc=1, time_expired=False): next_player = self.get_next_player(player_inc) if self.debug: if time_expired: logging.info("Turn time of '%i' expired" % self.turn_time) logging.info("Next player: '%s'" % next_player) self.playing_player.games.uno.turn_over = True self.playing_player.games.uno.has_drawn_card = False next_player.games.uno.turn_over = False self.playing_player = next_player self.reset_turn_timer() broadcast(self.playing_player, "uno_turn", self.lobby.players, json_encoder=UserEncoder)
def join(self, player): successful = False if player not in self.players and not self.playing: player.lobby = self # Announce new player broadcast(player.name, "lobby_user_join", self.players) self.players.append(player) # Players currently in lobby (including you) player.send(self.players, "lobby_players", json_encoder=UserEncoder) # If host has changed since lobby_list player.send(self.host.name, "lobby_host") successful = True if config.lobby_debug: logging.info("Player '%s' joined lobby '%s'" % (player, self)) player.send(successful, "lobby_join")
def __init__(self, lobby, turn_time=20.0, debug=False): super().__init__(lobby, debug=debug) self._draw_card_stack = [] # First card on the stack is never a special card self.card_stack = [choice(REGULAR_CARDS)] self.direction = Uno.RIGHT self.turn_time = turn_time for player in lobby.players: player.games.uno = SimpleNamespace() player.games.uno.turn_over = True player.games.uno.has_drawn_card = False player.games.uno.cards = sample(ALL_CARDS, 7) player.send(player.games.uno.cards, "uno_give_card", json_encoder=CardEncoder) self.playing_player = lobby.players[0] self.playing_player.games.uno.turn_over = False # Prevent race conditions if player draws or plays too quickly in # succession self.play_card_lock = Lock() self.draw_card_lock = Lock() self.turn_timer = None # Send the first card on the stack to all players broadcast(self.card_stack[0], "uno_card_stack", lobby.players, json_encoder=CardEncoder) # Send whos turn it is to all players broadcast(self.playing_player, "uno_turn", lobby.players, json_encoder=UserEncoder) self.reset_turn_timer()
def SCHED_master_node(blockchain, sched, node_registry): random_id = random.randint(0, 1000) received = len(blockchain.peers) total_expected = len(node_registry.nodemap.keys()) log_info( "[SCHED_master_node]({}) Checking if finish flag received form all ({}) nodes... " .format(random_id, total_expected)) if received >= total_expected: log_info( "[SCHED_master_node]({}) All nodes finished, attempt flask and sched shutdown... " .format(random_id)) ''' Allow experiment to take over ''' try: utils.broadcast("shutdown_attempt", [blockchain.node_ip], "/flask_shutdown") sched.shutdown() log_warn("sched shutdown (ok)") except apscheduler.schedulers.SchedulerNotRunningError: log_warn("sched shutdown (not ok)") pass else: log_info("[SCHED_master_node]({}) {}/{} received. ".format( random_id, received, total_expected)) return ""
def handle(client): while True: try: data = client.recv(1024) if data: try: # caso seja dados serializados (mensagens comuns) data = get_serialized_message(client, data) if data.message == 'EXIT': logout(client) break broadcast(data.message, client) except: # mensagem não serializada (lógica de compartilhamento de arquivos) try: server_resend_file(client, data) except: pass else: logout(client) break except: logout(client) break
def run(args) : desc = "Decribe script here" parser = argparse.ArgumentParser(description=desc) parser.add_argument('-r','--resolution',type=float,default=1.2, help='Get pdbs with resolution higher than the given') parser.add_argument('-v','--verbose',action='store_true',help='Be verbose') parser.add_argument('-c','--cisonly',action='store_true',help='Get cis only') args = parser.parse_args() # Get pdbs pdbs = utils.get_filtered_pdbs(high_resolution=args.resolution) if args.verbose : s = '%i pdbs found with resolution highr than %.1f' utils.broadcast(s % (len(pdbs),args.resolution)) # Get connection to mongo mongocon = utils.MongodbConnection() pdbs = ['3edh','2z63','2z66','3zui','4mge','3sr3','2q3z','3dst', '3dsu','1wd3','3h0t','3h0u','4nn5','4nf4','3hol','3t6q'] # iterate through pdbs cc = CysCys() for i,pdb_id in enumerate(pdbs) : if i % 100 == 0 : print >> sys.stderr, "Through %i of %i.." % (i,len(pdbs)) pdb_id = pdb_id.lower()#'3hol' if args.cisonly : if not has_ciscys(pdb_id,db=mongocon.db) : continue #print has_ciscys(pdb_id,db=mongocon.db) # Get cystines residues = utils.MongoResidueList(pdb_id = pdb_id) cystines = get_cystines(residues, db=mongocon.db) if args.verbose : s = '%i cystines found in %s' utils.broadcast(s % (len(cystines),pdb_id)) # See if any cystine is preceeded by cystine for cys in cystines : # Get previous residues if they are cystine precedes = [res for res in cys.prevres if res.resname == 'CYS' ] if len(precedes) == 0 : continue # get resolution resolution = residues.get_resolution(db=mongocon.db) # get number of residues residue_n = utils.get_num_aas_in_chain(pdb_id,cys.chain_id,db=mongocon.db) # add the cyscys for res in precedes : cc.add_cyscys(res,cys,resolution,residue_n) if args.verbose : s = '%i adjacent cystines found in %s' utils.broadcast(s % (len(precedes),pdb_id)) #if len(cc) == 30 : break #break cc.write_csv()
def evaluate(results, num_batches, evaluate_fn): num_batches = tools.reduce_sum(num_batches) results = tools.collect_results_gpu(results, num_batches.item()) rank = tools.get_dist_info()[0] if rank == 0: all_embeds = list(map(lambda r: r[0], results)) all_labels = list(map(lambda r: r[1], results)) all_embeds = { k: torch.cat(tuple(map(lambda r: r[k], all_embeds)), dim=0) for k in all_embeds[0].keys() } all_labels = torch.cat(all_labels, dim=0) metrics = evaluate_fn(all_embeds, all_labels) early_stop_criterion = torch.tensor([metrics['criterion']], device='cuda') else: metrics = None early_stop_criterion = torch.tensor([0.], device='cuda') # early_stop_criterion is used for all ranks, so broadcast it early_stop_criterion = tools.broadcast(early_stop_criterion, 0) return metrics, early_stop_criterion
print('Turtle reached the goods room, start object detect') broadcast("龟龟已到达,开始寻找需要的物品。") msg = "Object_detect" elif data.data == "Object_detected": # Object detect finish, Turtle goes back to the people room print('Object detect finish, Turtle goes back to the people room') broadcast("已找到物品,龟龟将返回送给客人。") msg = "Turtle_back" elif data.data == "Turtle_reach_human2": # Send message to TCP Server for call face&mic print('Turtle already gone back to the people room, start the Human compare') broadcast("龟龟已回到客人的房间,开始分发物品。") msg = "Send_object" pub.publish(msg) # rate.sleep() if __name__ == '__main__': try: rospy.init_node('Main_py', anonymous=False) start_str = "Turtle_start" broadcast("龟龟启动,出发去客人房间。") pub.publish(start_str) rospy.Subscriber("Main_Sub", String, Main_callback) rospy.spin() except rospy.ROSInitException: pass
log.setLevel(logging.ERROR) app.run(host=node_registry.ip, port=port) print("\n\n==================== Simulation complete ===============\n\n") # collect all blocks from peers max_height = config.END_OF_CHAIN chain = [utils.ListDict() for h in range(max_height+1)] for p, peer in enumerate(blockchain.peers): print('\n\n--------------------------') log_info('Processing peer ({})/({}) @ {} ...'.format(p, len(blockchain.peers), peer)) idx = 0 block_height = 0 while block_height < max_height: log_info('\tGetting block ({})/({}) ... latest height obtained: ({})'.format(idx,max_height,block_height)) response = utils.broadcast(str(idx), [peer], "/sync_next_block") if response[0] != None: block_dict = utils.receive(response[0]) else: block_dict = None if type(block_dict) == dict: block_hash = block_dict['block_hash'] block_height = block_dict['height'] log_info('(height,hash) = {},\n\t\t{}'.format(block_height,block_hash[0:25])) block_dict['height'] = int(block_dict['height']) block_dict['timestamp'] = float(block_dict['timestamp']) chain[block_height].append(block_hash,block_dict) else: log_info('invalid...') idx = idx + 1 log_info('Chain saved: {}'.format([len(each) for each in chain]))
def broadcast_transaction(self,peers): utils.broadcast(self.export_transaction_to_dict(), peers=peers, route="/peer_gossiped_new_transaction")
def backward(self, g_in: np.ndarray): mx = mul(g_in, self.y.data) if self.x.diff else None my = mul(g_in, self.x.data) if self.y.diff else None dx = broadcast(self.x.data, mx) if mx is not None else None dy = broadcast(self.y.data, my) if my is not None else None return [dx, dy]
users = [] while True: readable, writable, exceptional = select.select(inputs, [], inputs) for s in readable: if s is server: # User connected to server conn, addr = server.accept() inputs.append(conn) nickname = conn.recv(1024) user = User(conn, nickname) users.append(user) print(f"Nowy użytkownik: {nickname}") else: try: data = s.recv(1024) except ConnectionResetError as e: # User closed connection user = get_user(users, s) user_nickname = user.get_username print(f"{user_nickname} wyszedł.") inputs.remove(s) users.remove(user) s.close() broadcast(inputs, f'{user_nickname} wyszedł.') continue if data: # Data from user data = pickle.loads(data) if data.type == 'message': broadcast(inputs, f'{data.author}: {data.content}')
def run(args) : desc = "A query script to getnon-pro cis residues from the Top8000 at a " desc+= "given homology level." parser = argparse.ArgumentParser(description=desc) parser.add_argument('-o','--homology_level', type=int,default=70, help='Homology level can be 50, 70, 90, or 95. Default=70') parser.add_argument('-v','--verbose',action='store_true',help='Be verbose') args = parser.parse_args() # Get connection to mongo mongocon = utils.MongodbConnection() # Get pdbs pdbs = utils.get_Top8000_pdb_list(homology_level=args.homology_level, connection=mongocon) if args.verbose : s = '%i pdbs found in Top8000 at homology level %i' utils.broadcast(s % (len(pdbs),args.homology_level)) # Set noncispro filename ncp_fn = 'non_cis_pro_filtered_%i.csv' % args.homology_level ncp_log = open(ncp_fn,'w') heads = ['res0','res0_pass_filter','res1','res1_pass_filter'] heads+= ['resolution','omega','omega_type','phi0','psi0','phi1','psi1'] print >> ncp_log, ','.join(heads) # Iterate through pdbs counts = {"all_omega": {'n_unique':0,'n_alts':0,'n_unique_filter':0,'n_alts_filter':0}, "pro": {'n_unique':0,'n_alts':0,'n_unique_filter':0,'n_alts_filter':0}, "cispro": {'n_unique':0,'n_alts':0,'n_unique_filter':0,'n_alts_filter':0}, "nonpro": {'n_unique':0,'n_alts':0,'n_unique_filter':0,'n_alts_filter':0}, "nonprocis": {'n_unique':0,'n_alts':0,'n_unique_filter':0,'n_alts_filter':0}} nonprocis = NonProCis() for i,pc in enumerate(pdbs) : if i % 100 == 0 : print >> sys.stderr, "Through %i of %i.." % (i,len(pdbs)) pdb_id,chain = pc #pdb_id,chain = "1d3g","A" if args.verbose : print "working on %s %s..." % (pdb_id,chain) omegas = Omegas(pdb_id,chain,db=mongocon.db) omegas.set_counts() counts['all_omega']['n_unique'] += omegas.counts.all_omega_unique counts['all_omega']['n_unique_filter'] +=\ omegas.counts.all_omega_unique_filter counts['all_omega']['n_alts'] += omegas.counts.all_omega_alt counts['all_omega']['n_alts_filter'] += omegas.counts.all_omega_alt_filter counts['pro']['n_unique'] += omegas.counts.pro_unique counts['pro']['n_alts'] += omegas.counts.pro_alt counts['pro']['n_unique_filter'] += omegas.counts.pro_unique_filter counts['pro']['n_alts_filter'] += omegas.counts.pro_alt_filter counts['cispro']['n_unique'] += omegas.counts.cis_pro_unique counts['cispro']['n_alts'] += omegas.counts.cis_pro_alt counts['cispro']['n_unique_filter'] += omegas.counts.cis_pro_unique_filter counts['cispro']['n_alts_filter'] += omegas.counts.cis_pro_alt_filter counts['nonpro']['n_unique'] += omegas.counts.nonpro_unique counts['nonpro']['n_alts'] += omegas.counts.nonpro_alt counts['nonpro']['n_unique_filter'] += omegas.counts.nonpro_unique_filter counts['nonpro']['n_alts_filter'] += omegas.counts.nonpro_alt_filter counts['nonprocis']['n_unique'] += omegas.counts.cis_nonpro_unique counts['nonprocis']['n_alts'] += omegas.counts.cis_nonpro_alt counts['nonprocis']['n_unique_filter'] += omegas.counts.cis_nonpro_unique_filter counts['nonprocis']['n_alts_filter'] += omegas.counts.cis_nonpro_alt_filter # write non-cis pro csv if they exist if omegas.counts.cis_nonpro_unique_filter > 0 : omegas.write_csv(log=ncp_log) # get all_aa counts #counts['all_aa']['n_unique'] += residues.counts.unique_canonical_aa #counts['all_aa']['n_unique_filter'] += \ # residues.counts.unique_canonical_aa_filter #counts['all_aa']['n_alts'] += residues.counts.all_canonical_aa #counts['all_aa']['n_alts_filter'] += residues.counts.all_canonical_aa_filter # get non-pro cis residues #cis_residues = get_cis_residues(residues) # Sanity check #assert len(cis_residues) > 0 #break #if i > 15 : break ncp_log.close() print >> sys.stderr, '%s written.' % ncp_fn re_fn = 'counts_%i.csv' % args.homology_level re_log = open(re_fn,'w') print >> re_log, "In %i pdbs there were :" % i s = ': all omegas unique' v = '%i' % counts['all_omega']['n_unique'] print >> re_log, v.ljust(10), s s = ': all omegas unique filter' v = '%.3f' % counts['all_omega']['n_unique_filter'] print >> re_log, v.ljust(10), s s = ': all omegas alts' v = '%i' % counts['all_omega']['n_alts'] print >> re_log, v.ljust(10), s s = ': all omegas alts filter' v = '%i' % counts['all_omega']['n_alts_filter'] print >> re_log, v.ljust(10), s s = ': pro unique' v = '%i' % counts['pro']['n_unique'] print >> re_log, v.ljust(10), s s = ': pro unique filter' v = '%.3f' % counts['pro']['n_unique_filter'] print >> re_log, v.ljust(10), s s = ': pro alts' v = '%i' % counts['pro']['n_alts'] print >> re_log, v.ljust(10), s s = ': pro alts filter' v = '%i' % counts['pro']['n_alts_filter'] print >> re_log, v.ljust(10), s s = ': cispro unique' v = '%i' % counts['cispro']['n_unique'] print >> re_log, v.ljust(10), s s = ': cispro unique filter' v = '%.3f' % counts['cispro']['n_unique_filter'] print >> re_log, v.ljust(10), s s = ': cispro alts' v = '%i' % counts['cispro']['n_alts'] print >> re_log, v.ljust(10), s s = ': cispro alts filter' v = '%i' % counts['cispro']['n_alts_filter'] print >> re_log, v.ljust(10), s s = ': nonpro unique' v = '%i' % counts['nonpro']['n_unique'] print >> re_log, v.ljust(10), s s = ': nonpro unique filter' v = '%.3f' % counts['nonpro']['n_unique_filter'] print >> re_log, v.ljust(10), s s = ': nonpro alts' v = '%i' % counts['nonpro']['n_alts'] print >> re_log, v.ljust(10), s s = ': nonpro alts filter' v = '%i' % counts['nonpro']['n_alts_filter'] print >> re_log, v.ljust(10), s s = ': nonprocis unique' v = '%i' % counts['nonprocis']['n_unique'] print >> re_log, v.ljust(10), s s = ': nonprocis unique filter' v = '%.3f' % counts['nonprocis']['n_unique_filter'] print >> re_log, v.ljust(10), s s = ': nonprocis alts' v = '%i' % counts['nonprocis']['n_alts'] print >> re_log, v.ljust(10), s s = ': nonprocis alts filter' v = '%i' % counts['nonprocis']['n_alts_filter'] print >> re_log, v.ljust(10), s re_log.close() print >> sys.stderr, '%s written.' % re_fn
def run(args) : desc = "A query script to protein fragments Top8000 at a " desc+= "given homology level." print(desc) parser = argparse.ArgumentParser(description=desc) parser.add_argument('-o','--homology_level', type=int,default=70, help='Homology level can be 50, 70, 90, or 95. Default=70') parser.add_argument('-v','--verbose',action='store_true',help='Be verbose') parser.add_argument('-f','--filter_rmsd',action='store_true',help='Do RMSD filtering') parser.add_argument('-c','--do_cterm',action='store_true',help='Superimpose on C-term residue') parser.add_argument('-r','--rmsd_cutoff', type=float, default=0.5,help='RMSD cutoff value (default: 0.5)') args = parser.parse_args() superimpose_resnum = "0" if args.do_cterm: superimpose_resnum = "2" # Get connection to mongo mongocon = utils.MongodbConnection() # Get pdbs pdbs = utils.get_Top8000_pdb_list(homology_level=args.homology_level, verbose=True, connection=mongocon) if args.verbose : s = '%i pdbs found in Top8000 at homology level %i' utils.broadcast(s % (len(pdbs),args.homology_level)) # print out top8000 list #for pdb_id, chain in pdbs: # print pdb_id.upper() #assert False model_num = 1 file_num = 0 fragment_set = {} out_file = open("output_fragments0.pdb", 'w') mongocon.set_db(db='pdb_info') #seed db with good helix example : 1cxq 1.02A Trp134, Leu135, Ala136 residues = utils.MongoResidueList(mongocon.db, "1cxq", "A", 'residues_colkeys') keys = residues.ordered_keys() for k in keys: mongores=residues[k] if mongores.resseq == "134": residue134 = mongores if mongores.resseq == "135": residue135 = mongores if mongores.resseq == "136": residue136 = mongores fragment = utils.MongoPdbFragment([residue134.clone(), residue135.clone(), residue136.clone()]) fragment.set_bb_atoms(ca_dock(fragment.get_bb_atoms(), superimpose_resnum)) fragment_set[fragment] = 1 for i,pc in enumerate(pdbs) : if i % 100 == 0: print >> sys.stderr, "Through %i of %i.." % (i,len(pdbs)) print("Fragments stored: " + str(len(fragment_set))) pdb_id,chain = pc #pdb_id,chain = "1d3g","A" if args.verbose : print "working on %s %s..." % (pdb_id,chain) #print(mongocon.db) #start_time = time.time() residues = utils.MongoResidueList(mongocon.db, pdb_id, chain, 'top8000_homology70_residues_test') #residues = utils.MongoResidueList(mongocon.db, pdb_id, chain, 'residues_colkeys') #elapsed_time = time.time() - start_time #print str(elapsed_time) + " time taken to create MongoResidueList" #print(residues) #if len(fragment_set) >100: break keys = residues.ordered_keys() # ordered_keys seems to not sort quite correctly #print(keys) for k in keys : mongores = residues[k] if mongores.is_outlier() or not mongores.passes_filter('bb'): if args.verbose: print("excluding "+str(mongores)+" because it has outlier") else: # prevres and nextres are lists because those residues might have alts if mongores.altloc=="": if len(mongores.prevres) == 1: prev_residue = mongores.prevres[0] if prev_residue.is_outlier() or not prev_residue.passes_filter('bb') or not prev_residue.altloc=="": if args.verbose: print("excluding "+str(mongores)+" because previous residue has outlier or alt") else: if len(mongores.nextres) == 1: next_residue = mongores.nextres[0] if next_residue.is_outlier() or not next_residue.passes_filter('bb') or not next_residue.altloc=="": if args.verbose: print("excluding "+str(mongores)+" because next residue has outlier or alt") else: #sys.stdout.write('.') #sys.stdout.flush() #print("building fragment from "+str(mongores)) if model_num == 9999: model_num = 1 file_num = file_num + 1 out_file.close() #print(fragment_set) #assert False out_file = open("output_fragments"+str(file_num)+".pdb", 'w') fragment = utils.MongoPdbFragment([prev_residue.clone(), mongores.clone(), next_residue.clone()]) fragment.set_bb_atoms(ca_dock(fragment.get_bb_atoms(), superimpose_resnum)) if len(fragment_set) == 0: fragment_set[fragment] = 1 else: if args.filter_rmsd: rmsd = 1 #start_time = time.time() working_frag_set = fragment_set.iteritems() if len(fragment_set) < 500 or len(fragment_set) % 500 == 0: working_frag_set = sorted(fragment_set.iteritems(), key=itemgetter(1), reverse=True) #print(type(working_frag_set)) for test_frag_tup in working_frag_set: #for test_frag in fragment_set: test_frag = test_frag_tup[0] rmsd = fragment.get_rmsd(test_frag) if rmsd <= args.rmsd_cutoff: break #elapsed_time = time.time() - start_time #print str(elapsed_time) + " time taken to find matching fragment" if rmsd > args.rmsd_cutoff: fragment_set[fragment] = 1 #assert False #out_file.write("MODEL{:>9}\n".format(model_num)) #out_file.write(fragment.get_atom_records(translated=True, region='bb')) #out_file.write("ENDMDL\n") #model_num = model_num+1 #print("res "+str(mongores)+" prev res: "+str(prev_residue)+" next res: "+str(next_residue)) else: fragment_set[test_frag] = fragment_set[test_frag] + 1 else: out_file.write("MODEL{:>9}\n".format(model_num)) out_file.write(fragment.get_atom_records(translated=True, region='bb')) out_file.write("ENDMDL\n") model_num = model_num+1 if args.filter_rmsd: print(len(fragment_set)) sorted_fragments = sorted(fragment_set.iteritems(), key=itemgetter(1), reverse=True) #pprint.pprint(sorted_fragments) #model_num=1 for filtered_frag, count in sorted_fragments: out_file.write("MODEL{:>9}{:>70}\n".format(model_num, count)) out_file.write(filtered_frag.get_atom_records(translated=True, region='bb')) out_file.write("ENDMDL\n") model_num = model_num+1 out_file.close()
def backward(self, g_in: np.ndarray): dx = broadcast(self.x.data, g_in) if self.x.diff else None dy = broadcast(self.y.data, g_in) if self.y.diff else None return [dx, dy]
def SCHED_mine_for_block_listener(event): random_id = random.randint(0, 1000) log_info( "[SCHED_mine_for_block_listener]({}) Event '{}' finished... ".format( random_id, event.job_id)) e_return = event.retval blockchain = e_return['blockchain'] ''' Start of chain idling ''' if event.job_id == 'my_idle' and blockchain.mining_paused: sched = e_return['sched'] sched.add_job(SCHED_do_none, args=[blockchain, sched], id='my_idle') return 'idle' ''' END OF CHAIN condition check ''' current_height = blockchain.chain[[-1]][0].height log_info( "[SCHED_mine_for_block_listener]({}) Current height {}/{} ... ".format( random_id, current_height, config.END_OF_CHAIN)) if current_height >= config.END_OF_CHAIN: dynamic_log_level.reset_user_log_level() log_info("[SCHED_mine_for_block_listener]({}) END OF CHAIN reached. ". format(random_id)) if config.MASTER != None: log_info( "[SCHED_mine_for_block_listener]({}) Posting to Master @ {} ". format(random_id, config.MASTER)) utils.broadcast({'who': blockchain.node_ip, 'num_trans_gen': blockchain.total_num_transactions_generated}\ , [config.MASTER], "/node_finished") else: log_info( "[SCHED_mine_for_block_listener]({}) Nothing to do.... hanging... " .format(random_id)) return "END OF MINING..." ''' check if block has been discover on the network as a whole ''' if event.job_id == 'mining' or 'possible_block' in event.job_id: # invoke a function to generate more transactions amount = random.randint(0, config.TRANSACTION_RATE) log_info( "[SCHED_mine_for_block_listener]({}) Make some new transactions.. " .format(random_id)) make_more_transactions(amount, blockchain) blockchain.total_num_transactions_generated = blockchain.total_num_transactions_generated + amount ''' check if the mining job has finished ''' if event.job_id == 'mining': new_block = e_return['new_block'] sched = e_return['sched'] if new_block: log_info( "[SCHED_mine_for_block_listener]({}) Minted and Mined new block @ {}" .format(random_id, new_block.block_hash)) if blockchain.validate_possible_block(new_block): blockchain.add_block(new_block) new_block.broadcast_block(blockchain.peers) else: log_info( "[SCHED_mine_for_block_listener]({}) Someone already mined a newer block! REJECT new block proposal" .format(random_id)) sched.add_job(SCHED_mine_for_block, args=[blockchain, sched], id='mining') else: log_info("[SCHED_mine_for_block_listener]({}) Mining returned {}". format(random_id, new_block))
def play_card(self, card_id, player): self.play_card_lock.acquire() successful = False # If it's the turn of the player who wants to play a card if player == self.playing_player: # Is the card_id valid? if card_id in range(len(player.games.uno.cards)): # Acquire the card card = player.games.uno.cards[card_id] if self.debug: logging.info("'%s' played: %s" % (player, card)) logging.info("Cards of '%s': %s" % (player, player.games.uno.cards)) # Does the played card fit on top of the card stack? if self.card_stack[-1].can_play(card): self.card_stack.append(card) # Send the played card to all players broadcast(card, "uno_card_stack", self.lobby.players, json_encoder=CardEncoder) # Change direction if card.face == ROTATE: # Only two players -> Next turn name player if len(self.lobby.players) != 2: self.change_direction() self.end_turn() # Turn goes on if only two players are playing # Player can draw another card if needed else: player.games.uno.has_drawn_card = False # Skip player elif card.face == BLOCK: if len(self.lobby.players) != 2: self.end_turn(player_inc=2) # Turn goes on if only two players are playing # Player can draw another card if needed else: player.games.uno.has_drawn_card = False # Take two cards elif card.face == TAKE_TWO: self.give_cards(2, self.next_player) self.end_turn() # Take four cards elif card.face == TAKE_FOUR: self.give_cards(4, self.next_player) # Turn does not end # End turn only if the card does not require another card elif card.face != PICK_COLOR: self.end_turn() # Remove the card from the players deck del player.games.uno.cards[card_id] broadcast({ "player" : player, "count" : len(player.games.uno.cards) }, "uno_card_count", self.lobby.players, exclude=player, json_encoder=UserEncoder) successful = True else: if self.debug: logging.warning("Card does not fit on top of stack. " "Is the client desynchronized? (player: '%s')" % player) # If player has no cards left if len(player.games.uno.cards) == 0: broadcast(player.name, "uno_win", self.lobby.players) self.lobby.stop() self.play_card_lock.release() player.send(successful, "uno_play_card")
def Main_callback(data): # rate = rospy.Rate(10) global pub msg = '' print("Here is Main_callback.") if data.data == "Turtle_reach_human1": # Once turtle reaches the people room, start face&mic print('Turtle has reached the people room, start the face&mic') broadcast("龟龟已到达房间,开始寻找客人。") msg = "Human_detect" elif data.data == "Human1": # The first human has been detected, find the second one print('Turtle has found the first guest.') broadcast("已找到第一位客人,正在寻找第二位客人。") msg = "Human_find2" elif data.data == "Human2": # The second human has been detected, find the third one print('Turtle has found the second guest.') broadcast("已找到第二位客人,正在寻找第三位客人。") msg = "Human_find3" elif data.data == "Human3": # Face&mic finish, Turtle finds goods print('Face&mic finish, Turtle ready to find goods') broadcast("已找到三位客人,出发去取客人需要的物品。") msg = "Turtle_go" elif data.data == "Turtle_reach_goods": # Once turtle reaches the goods room, start object detect print('Turtle reached the goods room, start object detect') broadcast("龟龟已到达,开始寻找需要的物品。") msg = "Object_detect" elif data.data == "Object_detected": # Object detect finish, Turtle goes back to the people room print('Object detect finish, Turtle goes back to the people room') broadcast("已找到物品,龟龟将返回送给客人。") msg = "Turtle_back" elif data.data == "Turtle_reach_human2": # Send message to TCP Server for call face&mic print('Turtle already gone back to the people room, start the Human compare') broadcast("龟龟已回到客人的房间,开始分发物品。") msg = "Send_object" pub.publish(msg)
def broadcast_block(self,peers): d = self.export_block_to_dict() # log_info('[node.block.Block.broadcast_block] broadcasting {} \n........'.format(utils.format_dict_to_str(d))) utils.broadcast(d, peers=peers, route="/peer_gossiped_new_block")
def leave(self, player): successful = False if player in self.players: # If game is currently being played if self.playing: # Just to be sure if self.game != None: # Invoke player leave hook *before* removing player from # self.players self.game.player_leave(player) # Leave doesn't block, this could kick an unrelated player by # accident player_index = self.players.index(player) player.lobby = None del self.players[player_index] # Broadcast that a player has left broadcast(player_index, "lobby_user_leave", self.players) successful = True if self.playing: # Game stops when all but 1 player leaves if self.player_count <= 1: lobbies[self.name].stop() if config.lobby_debug: logging.info("Too few players in '%s'. Stopping game..." % self) # No players left in lobby -> delete Lobby if self.player_count == 0: lobbies[self.name].stop() del lobbies[self.name] lobby_deleted = True if config.lobby_debug: logging.info("Lobby '%s' is empty. Deleting..." % self) # Still players left # Host left -> Random player becomes host elif player == self.host: self.host = choice(self.players) broadcast(self.host.name, "lobby_host", self.players) if config.lobby_debug: logging.info("Lobby '%s' has new host '%s'" % (self, self.host)) if config.lobby_debug: logging.info("Player '%s' left lobby '%s'" % (player, self)) player.send(successful, "lobby_leave")
def update_ticket(request): if request.is_ajax(): try: data = json.loads(request.POST['data']) engineer_assigned = data.get('engineer_name', '') ticket = TicketRegister.objects.get(id=data['ticket_id']) mark_resolved = data.get('mark_resolved', '') status_id_for_resolved = TicketStatus.objects.get( name='resolved').id comments_received = data.get('comment', '') status_id_for_inprocess = TicketStatus.objects.get( name='inprocess').id priority = data.get('priority', '') # TicketStatus = data.get('TicketStatus', '') engineer_to_change = data.get('engineer', '') comment_popup = data.get('comment_popup', '') action = "" now = datetime.now() time_of_action = "%s%s" % (now.strftime('%b. %d, %Y, %I:%M'), now.strftime('%p.').lower()) user_performing_the_act = request.user user_id = User.objects.get(username=user_performing_the_act).id if priority != '': ticket.priority = priority.lower() action = 'updated priority to %s' % priority elif engineer_to_change != '': if ticket.status.name == 'unassigned': ticket.status_id = status_id_for_inprocess ticket.assigned_to_id = engineer_to_change ticket.save() send_mail(ticket, 'assigned') else: ticket.assigned_to_id = engineer_to_change if engineer_assigned == "Assign to me": engineer_assigned = user_performing_the_act action = 'updated engineer assigned to %s' % engineer_assigned broadcast(ticket_id=data['ticket_id'], engineer_id=engineer_to_change, engineer_name=engineer_assigned, function='change_row', user=request.user.username) elif mark_resolved != '' and comments_received != '': ticket = TicketRegister.objects.get(id=data['ticket_id']) ticket.resolved_by = request.user ticket.status_id = status_id_for_resolved comment = TicketComment.objects.create( created_on=timezone.now(), comment=comments_received, complaints_id=data['ticket_id'], created_by_id=user_id, resolved_flag=True) comment.save() action = 'resolved by %s' % request.user send_mail(ticket, 'resolved') ticket.save() ticket_object = TicketRegister.objects.filter( id=data['ticket_id']) data = serializers.serialize('json', ticket_object) ticket_object = json.loads(data) ticket_object = ticket_object[0]['fields'] ticket_object['ticket_id'] = data['ticket_id'] ticket_object['function'] = 'add_resolved_row' ticket_object['created_by_name'] = ticket_object[ 0].created_by.username ticket_object['function'] = 'add_resolved_row' ticket_object['function'] = 'add_resolved_row' broadcast(ticket_id=data['ticket_id'], function='delete_resolved_row', user=request.user.username) elif comment_popup != '' and data['ticket_id'] != '': comments = TicketComment.objects.filter( complaints_id=data['ticket_id']) comment = [] for i in comments: action = i.comment now = i.created_on time_of_action = "%s%s" % (now.strftime( '%b. %d, %Y, %I:%M'), now.strftime('%p.').lower()) user_performing_the_act = i.created_by.username resolved = i.resolved_flag comment.append({ 'action': action, 'time_of_action': time_of_action, 'user_performing_the_act': user_performing_the_act, 'resolved': resolved }) comment.append({'complaints': ticket.complaints}) return HttpResponse(json.dumps(comment), content_type="application/json") comments = TicketComment.objects.create( created_on=timezone.now(), comment=action, complaints_id=data['ticket_id'], created_by_id=user_id) comments.save() ticket.save() return HttpResponse(json.dumps({ 'action': action, 'time_of_action': time_of_action, 'user_performing_the_act': user_performing_the_act.username }), content_type="application/json") except Exception, e: print 'Error on line {}'.format(sys.exc_info()[-1].tb_lineno) print e.args return HttpResponseBadRequest(e)