def FileUploadCompleted(self, request, context): log_info( "################################### FILE_UPLOAD_COMPLETED_ARRIVED!!! #################################") # pprint.pprint(request) log_info("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$") if Globals.NODE_STATE == NodeState.LEADER: chunk_id = request.chunkUploadInfo.chunkId lst_dc = [(request.chunkUploadInfo.uploadedDatacenter.ip, request.chunkUploadInfo.uploadedDatacenter.port)] if not _send_heartbeat_to_check_majority_consensus(): return Tables.insert_file_chunk_info_to_file_log(request.fileName, chunk_id, lst_dc, raft_proto.Uploaded if request.isSuccess else raft_proto.UploadFaied) _send_heartbeat() # pprint.pprint(Tables.TABLE_FILE_INFO) log_info("###########################################################################") else: client = get_leader_client() if client: my_reply = client._FileUploadCompleted(request) return my_reply else: return raft_proto.Empty() return raft_proto.Empty()
def db_viterbi_alignment(es, fs, transfrom=2, transto=1, db="sqlite:///:memory:", init_val=1.0e-10): """ Calculating viterbi_alignment using specified database. Arguments: trans: it can take "en2ja" or "ja2en" """ engine = create_engine(db) # create session Session = sessionmaker(bind=engine) session = Session() # tablenames table_prefix = "from{0}to{1}".format(transfrom, transto) wordprob_tablename = table_prefix + "_" + "wordprob" wordalign_tablename = table_prefix + "_" + "wordalign" # tables WordProbability = Tables().get_wordprobability_table(wordprob_tablename) WordAlignment = Tables().get_wordalignment_table(wordalign_tablename) def get_wordprob(e, f, init_val=1.0e-10): query = session.query(WordProbability).filter_by(transto=e, transfrom=f) try: return query.one().prob except sqlalchemy.orm.exc.NoResultFound: return init_val def get_wordalign(i, j, l_e, l_f, init_val=1.0e-10): query = session.query(WordAlignment).filter_by(from_pos=i, to_pos=j, to_len=l_e, from_len=l_f) try: return query.one().prob except sqlalchemy.orm.exc.NoResultFound: return init_val # algorithm max_a = collections.defaultdict(float) l_e = len(es) l_f = len(fs) for (j, e) in enumerate(es, 1): current_max = (0, -1) for (i, f) in enumerate(fs, 1): val = get_wordprob(e, f, init_val=init_val) *\ get_wordalign(i, j, l_e, l_f, init_val=init_val) # select the first one among the maximum candidates if current_max[1] < val: current_max = (i, val) max_a[j] = current_max[0] return max_a
def assignPickMeUpDrinkPreference(screen): person = tables.handleSingleSelectTable( screen, "People", State._people, "", 0, "Select a person to assign drink to") drink = tables.handleSingleSelectTable( screen, "Drinks", State._drinks, "", 0, f"Select drink to assign to {person.displayName}") person.PMUDrink = drink
def preparations(self): """ methods to make a json file, get datas, create tables and fill them .""" os.system('clear') print('get datas and create a json file...') self.datas.mkjsonfile() print('create and fille tables...') Tables.creation() Tables.fill_tables()
def createData(self): t = Tables() sql = t.terms() #try: conn = sqlite3.connect(self.dbs) c = conn.cursor() c.execute(sql) conn.commit() conn.close()
def getAllIpAddress(self): table = Tables() table.createTable("ipAddress") iplines = [] try: ipfile = open("chinaiplist.txt") iplines = ipfile.readlines() except Exception,e: self._logger.error("error occured when open file") return None
def __init__(self): self._logger = Logger(__file__) #profile = FirefoxProfile() #profile.set_preference('dom.ipc.plugins.enabled.libflashplayer.so', 'false') #self._browser = webdriver.Firefox(profile) self._browser = webdriver.Firefox() self.baidu = Baidu(self._browser) self.map = BaiduMap() self.ak = "sh0wDYRg1LnB5OYTefZcuHu3zwuoFeOy" self.table = Tables();
def create_train_db(transfrom=2, transto=1, lang1method=lambda x: x, lang2method=lambda x: x, db="sqlite:///:memory:", limit=None, loop_count=1000): engine = create_engine(db) # create session Session = sessionmaker(bind=engine) session = Session() # tablenames table_prefix = "from{0}to{1}".format(transfrom, transto) wordprob_tablename = table_prefix + "_" + "wordprob" wordalign_tablename = table_prefix + "_" + "wordalign" # tables WordProbability = Tables().get_wordprobability_table(wordprob_tablename) WordAlignment = Tables().get_wordalignment_table(wordalign_tablename) # create table for word probability WordProbability.__table__.drop(engine, checkfirst=True) WordProbability.__table__.create(engine) print("created table: {0}to{1}_wordprob".format(transfrom, transto)) # create table for alignment probability WordAlignment.__table__.drop(engine, checkfirst=True) WordAlignment.__table__.create(engine) print("created table: {0}to{1}_wordalign".format(transfrom, transto)) # IBM learning with ProgressLine(0.12, title='IBM Model learning...'): # check arguments for carete_corpus corpus = create_corpus(db=db, limit=limit, lang1method=lang1method, lang2method=lang2method) sentences = [(item["lang{0}".format(transto)], item["lang{0}".format(transfrom)]) for item in corpus] t, a = ibmmodel2.train(sentences=sentences, loop_count=loop_count) # insert with ProgressLine(0.12, title='Inserting items into database...'): for (_to, _from), prob in t.items(): session.add(WordProbability(transto=_to, transfrom=_from, prob=float(prob))) for (from_pos, to_pos, to_len, from_len), prob in a.items(): session.add(WordAlignment(from_pos=from_pos, to_pos=to_pos, to_len=to_len, from_len=from_len, prob=float(prob))) session.commit()
def AddFileLog(self, request, context): if Globals.NODE_STATE == NodeState.LEADER: request.log_index = Globals.get_next_log_index() Tables.FILE_LOGS.append(request) log_info("LOG ADDED") # Update Table_log and File_info_table Tables.set_table_log(Tables.FILE_LOGS) ack = raft_proto.Ack() ack.id = 1 return ack
def RequestFileUpload(self, request, context): if Globals.NODE_STATE == NodeState.LEADER: my_reply = file_transfer_proto.ProxyList() file_name = request.fileName file_size = request.fileSize total_chunks = math.ceil(file_size / CHUNK_SIZE) if Tables.is_file_exists(file_name): return my_reply dcs = Tables.get_all_available_dc() if not _send_heartbeat_to_check_majority_consensus(): return my_reply for chunk_id in range(total_chunks): random_dcs = [get_rand_hashing_node(dcs, file_name, chunk_id)] Tables.insert_file_chunk_info_to_file_log(file_name, chunk_id, random_dcs, raft_proto.UploadRequested) _send_heartbeat() # pprint.pprint("TABLE_FILE_INFO") # pprint.pprint(Tables.TABLE_FILE_INFO) # pprint.pprint(Tables.TABLE_DC_INFO) lst_proxies = Tables.get_all_available_proxies() lst_proxy_info = [] for ip, port in lst_proxies: proxy_info = file_transfer_proto.ProxyInfo() proxy_info.ip = ip proxy_info.port = port lst_proxy_info.append(proxy_info) log_info("LST_PROXIES:") log_info(my_reply.lstProxy) my_reply.lstProxy.extend(lst_proxy_info) log_info("Replied to :") # pprint.pprint(request) # pprint.pprint(my_reply) log_info("############################") return my_reply else: client = get_leader_client() if client: my_reply = client._RequestFileUpload(request) return my_reply else: return file_transfer_proto.ProxyList()
def __init__(self): """ """ self.host = HOST self.user = USER self.password = PASSWORD self.db_name = 'PureBeurre' self.tables = Tables()
def test_find_people_by_team(self): screen = initializeScreen() #Arrange initiator = Mock(Person) initiator.team = "Academy" member1 = Mock(Person) member1.team = "Academy" member2 = Mock(Person) member2.team = "Academy" member3 = Mock(Person) member3.team = "Hermes" member4 = Mock(Person) member4.team = "Sainsburys Bank" testPeople = [initiator, member1, member2, member3, member4] expected_output = [initiator, member1, member2] #Act actual_output = tables.findPeopleByTeam(initiator.team, testPeople) #Assert self.assertEqual(len(expected_output), len(actual_output)) self.assertEqual(expected_output, actual_output) deinitializeScreen()
def __init__(self, version): if version == 128: self.encrypt = self._encrypt_128 self.decrypt = self._decrypt_128 self.encrypt_str = self._encrypt_128_str self.decrypt_str = self._decrypt_128_str self.test_key = [symbol for symbol in range(16)] self.N_k = 4 self.N_b = 4 self.N_r = 10 self.tables = Tables() elif version == 192: pass elif version == 256: pass else: pass
def RaftHeartbeat(self, request, context): log_info("heartbeat arrived: ", len(Tables.FILE_LOGS)) ack = raft_proto.Ack() if len(request.tableLog) > len(Tables.FILE_LOGS): Globals.NODE_STATE = NodeState.FOLLOWER Globals.CURRENT_CYCLE = request.cycle_number Globals.HAS_CURRENT_VOTED = False Globals.NUMBER_OF_VOTES = 0 Globals.LEADER_PORT = request.leader_port Globals.LEADER_IP = request.leader_ip elif len(request.tableLog) == len(Tables.FILE_LOGS) and request.cycle_number > Globals.CURRENT_CYCLE: Globals.NODE_STATE = NodeState.FOLLOWER Globals.CURRENT_CYCLE = request.cycle_number Globals.HAS_CURRENT_VOTED = False Globals.NUMBER_OF_VOTES = 0 Globals.LEADER_PORT = request.leader_port Globals.LEADER_IP = request.leader_ip elif request.leader_ip != Globals.LEADER_IP or request.leader_port != Globals.LEADER_PORT: ack.id = -1 return ack random_timer.reset() # print("MY Leader: ", Globals.LEADER_IP, Globals.LEADER_PORT, len(Tables.FILE_LOGS)) # Update Table_log and File_info_table Tables.set_table_log(request.tableLog) ack.id = len(Tables.FILE_LOGS) # log_info("Tables.TABLE_FILE_INFO") # pprint.pprint(Tables.TABLE_FILE_INFO) log_info("Tables.TABLE_PROXY_INFO") pprint.pprint(Tables.TABLE_PROXY_INFO) log_info("Tables.TABLE_DC_INFO") pprint.pprint(Tables.TABLE_DC_INFO) log_info("###########################################################") return ack
class Sessions: proxy = proxy_io(api_key=CONFIG.proxyio["key"], session=AIOHTTP.ClientSession) websocket = WebSocket() webhook = WebhookSend() steam = Steam() database = Tables().database server = Server().load() cdn = Cdn().load()
def __init__(self): self._logger = Logger(__file__) # the entry point of grabing self.base="http://scenic.cthy.com" self.provinces = [] #self._browser = webdriver.PhantomJS() self._browser = webdriver.Firefox() self.tabopera = Tables(); self.record = open("record.txt","a+") self.fdate = open("date.txt","a+") self.fprice = open("price.txt","a+") self.sprovince = 0 self.spage = 1 self.snum = 0 self.picturenum = 10 self.baidu = Baidu(self._browser) self.map = BaiduMap() self.ak = "sh0wDYRg1LnB5OYTefZcuHu3zwuoFeOy"
def __init__(self): """ This client assumes the developer has taken the initiative to correctly initialize the needed sessions. """ self.routes = Routes(obj=self) self.middlewares = Middlewares(obj=self) self.api = Api(obj=self) self.tables = Tables(obj=self)
def test_filter_table_1(self): #Arrange obj1 = SimpleNamespace(displayName='David') obj2 = SimpleNamespace(displayName='Dave') obj3 = SimpleNamespace(displayName='Henry') test_peeps = [obj1, obj2, obj3] #Act filtered_peeps = tables.filterTable(test_peeps, "D") #Assert self.assertEqual(2, len(filtered_peeps))
def AddProxy(self, request, context): if Globals.NODE_STATE == NodeState.LEADER: if not _send_heartbeat_to_check_majority_consensus(): ack = raft_proto.Ack() ack.id = -1 return ack Tables.register_proxy(request.ip, request.port) _send_heartbeat() return raft_proto.Ack() else: client = get_leader_client() if client: my_reply = client._AddProxy(request) return my_reply else: ack = raft_proto.Ack() ack.id = -1 return ack
def get_file_lists(request): lst_files = [] if request.isClient: lst_files = request_file_list_from_other_raft_nodes(request) lst_files = lst_files + Tables.get_all_available_file_list() my_reply = file_transfer_proto.FileList() # pprint.pprint("lst_files") # pprint.pprint(lst_files) if len(lst_files) > 0: my_reply.lstFileNames.extend(lst_files) return my_reply
def __init__(self, application=None): if application == None: sys.exit(0) application.config.from_object(config) self.db = SQLAlchemy(application) if self.db == None: sys.exit(-1) from tables import Tables #初始化表对象 self.tables = Tables(self.db) self.create_all()
def create_phrase_db(limit=None, lang1method=lambda x: x, lang2method=lambda x: x, init_val=1.0e-10, db="sqlite:///:memory:"): engine = create_engine(db) # create session Session = sessionmaker(bind=engine) session = Session() # tables Sentence = Tables().get_sentence_table() Phrase = Tables().get_phrase_table() # create table for word probability Phrase.__table__.drop(engine, checkfirst=True) Phrase.__table__.create(engine) print("created table: phrase") query = session.query(Sentence)[:limit] if limit \ else session.query(Sentence) with ProgressLine(0.12, title='extracting phrases...'): for item in query: lang1 = item.lang1 lang2 = item.lang2 print(" ", lang1, lang2) phrases = db_phrase_extract(lang1, lang2, lang1method=lang1method, lang2method=lang2method, init_val=init_val, db=db) for lang1ps, lang2ps in phrases: lang1p = u" ".join(lang1ps) lang2p = u" ".join(lang2ps) ph = Phrase(lang1p=lang1p, lang2p=lang2p) session.add(ph) session.commit()
def create_phrase_prob(db=":memory:"): """ """ # create phrase_prob table table_name = "phraseprob" engine = create_engine("sqlite:///{0}".format(db)) # create session Session = sessionmaker(bind=engine) session = Session() # tables TransPhraseProb = Tables().get_transphraseprob_table() # create table for word probability TransPhraseProb.__table__.drop(engine, checkfirst=True) TransPhraseProb.__table__.create(engine) session.commit() print("created table: {0}".format(table_name)) con = sqlite3.connect(db) cur = con.cursor() cur_sel = con.cursor() #cur_rec = con.cursor() cur.execute("select lang1p, lang2p, count from phrasecount") with ProgressLine(0.12, title='phrase learning...'): for lang1p, lang2p, count in cur: # for p2_1 cur_sel.execute(u"""select count from lang1_phrasecount where langp=?""", (lang1p,)) count2_1 = list(cur_sel) count2_1 = count2_1[0][0] p2_1 = count / count2_1 # for p1_2 cur_sel.execute(u"""select count from lang2_phrasecount where langp=?""", (lang2p,)) count1_2 = list(cur_sel) count1_2 = count1_2[0][0] p1_2 = count / count1_2 # insert item transphraseprob = TransPhraseProb(lang1p=lang1p, lang2p=lang2p, p1_2=math.log(p1_2), p2_1=math.log(p2_1)) session.add(transphraseprob) print(u" added phraseprob: {0} <=> {1} ".format(lang1p, lang2p)) session.commit()
def __init__(self): """ Creates an Agent object. The Agent chooses the actions to take according to some policy. It also keeps track of q-value estimates, visits, and which actions do not change the robot/stack locations. Returns ------- None. """ self.tables = Tables() return
def __init__( self, phase_1_corner: int = 0, phase_1_edge: int = 0, phase_1_ud_slice: int = 0, phase_2_corner: int = 0, phase_2_edge: int = 0, phase_2_ud_slice: int = 0, ): self.phase_1_corner = phase_1_corner self.phase_1_edge = phase_1_edge self.phase_1_ud_slice = phase_1_ud_slice self.phase_2_corner = phase_2_corner self.phase_2_edge = phase_2_edge self.phase_2_ud_slice = phase_2_ud_slice self.tables = Tables()
def __init__(self, cube: Cube): super().__init__(cube) self.max_moves_length = 29 # Upper bound of the kociemba algorithm self.moves = [] self.start = 0 self.end = 0 self.time_to_solve = 0 self.tables = Tables() self.face_cube = self.cube.to_face_cube() self.cubie_cube = self.face_cube.to_cubie_cube() self.coord_cube = CoordCube.from_cubie_cube(self.cubie_cube) # Validate cube self._validate_cube() # `moves_face` stores the moves' face whereas `moves_turn` stores the moves' number of # quarter turns (i.e., 1 for clockwise, 2 for clockwise twice, and 3 for counterclockwise). # These are used to generate the list of strings for `moves` once solved. self.moves_face = [0 for i in range(self.max_moves_length)] self.moves_turn = [0 for i in range(self.max_moves_length)] # Coordinates needed for phase 1 self.phase_1_corner = [0 for i in range(self.max_moves_length)] self.phase_1_corner[0] = self.coord_cube.phase_1_corner self.phase_1_edge = [0 for i in range(self.max_moves_length)] self.phase_1_edge[0] = self.coord_cube.phase_1_edge self.phase_1_ud_slice = [0 for i in range(self.max_moves_length)] self.phase_1_ud_slice[0] = self.coord_cube.phase_1_ud_slice # Coordinates needed for phase 2 self.phase_2_corner = [0 for i in range(self.max_moves_length)] self.phase_2_corner[0] = self.coord_cube.phase_2_corner self.phase_2_edge = [0 for i in range(self.max_moves_length)] self.phase_2_edge[0] = self.coord_cube.phase_2_edge self.phase_2_ud_slice = [0 for i in range(self.max_moves_length)] self.phase_2_ud_slice[0] = self.coord_cube.phase_2_ud_slice # This stores the minimum number of moves required to complete phase 1 or 2 after n moves # and are derived from the pruning tables. self.phase_1_min_distance = [0 for i in range(self.max_moves_length)] self.phase_1_min_distance[0] = self._phase_1_heuristic(0) self.phase_2_min_distance = [0 for i in range(self.max_moves_length)] # Used for finding out which moves were calculated in phase 1 and phase 2 self.phase_1_moves_index = 0
def create_corpus(db="sqlite:///:memory:", lang1method=lambda x: x, lang2method=lambda x: x, limit=None): engine = create_engine(db) # create session Session = sessionmaker(bind=engine) session = Session() Sentence = Tables().get_sentence_table() query = session.query(Sentence)[:limit] if limit \ else session.query(Sentence) for item in query: yield {"lang1": lang1method(item.lang1), "lang2": lang2method(item.lang2)}
def addNewPerson(screen, name="", team="", favDrink=None, PMUDrink="N/A"): UI.clearScreen(screen) curses.nocbreak() screen.keypad(False) curses.echo() if not name: name = UI.cursedInput(screen, "Enter name of person: ") if not team: team = UI.cursedInput(screen, "Enter name of team person is in: ") if not favDrink: if State._drinks != []: favDrink = tables.handleSingleSelectTable( screen, "Drinks", State._drinks, "", 0, "Select this persons favorite drink") elif favDrink == None: UI.clearScreen(screen) screen.addstr("No drinks found. You can assign drink later...") screen.getch() State._people.append(Person(name, team, favDrink, PMUDrink))
def GetFileLocation(self, request, context): my_reply = file_transfer_proto.FileLocationInfo() file_name = request.fileName is_file_found = True if file_name not in Tables.TABLE_FILE_INFO.keys(): # is_file_found = False return my_reply max_chunks = len(Tables.TABLE_FILE_INFO[file_name].keys()) log_info("max_chunks from raft ", max_chunks) lst_proxies = Tables.get_all_available_proxies() lst_proxy_info = [] for ip, port in lst_proxies: proxy_info = file_transfer_proto.ProxyInfo() proxy_info.ip = ip proxy_info.port = port lst_proxy_info.append(proxy_info) my_reply.fileName = file_name my_reply.maxChunks = max_chunks my_reply.lstProxy.extend(lst_proxy_info) my_reply.isFileFound = is_file_found return my_reply
i += 1 if i > 20: break # store the project DB.storeProject( project ) #--------------------------------------------------------------------------- # Entry point to the main function of the program. #--------------------------------------------------------------------------- if __name__ == '__main__': print 'Icon database (installation interface)' # start in a blank slate Tables.drop(); # install the tables Tables.create() # install mlp project p = Project( id='testmlp', type='MLP') p.batchSize = 16 p.patchSize = 39 p.hiddenUnits = [500,500,500] install( p ) # install cnn project cnn = Project( id='testcnn', type='CNN') cnn.trainTime = 30 cnn.learningRate = 0.1
def createConducts(self, a): self.a = a t = Tables() sql = t.schoolConducts(self.a) self.logConnect(sql)