def __init__(self): self.configure = Configure() liveContainers = {} self.cgroups={} self.host = None self.isRunning = False self.lastCurrent = 0 ##manager live contianers and update parameters self.liveContainerManager = None ##default heatbeat interval(s) self.heartbeatInterval = 5 ##remote name server proxy self.remoteNameServer = None ##remote name server host address self.remoteNameServerHost = "" ##remote name server host port self.remoteNameServerPort = 0000 ##remote name server object ID self.remoteNameServerID = "container.master"
def __init__(self): #config database self.configure = Configure() dbpath = (self.configure.getDBPath())[1:-1] print "dbpath ", dbpath self.cx = sq.connect(dbpath) self.cu = self.cx.cursor()
def load_models(self, dir: str): conf = Configure() conf.get_conf() board_conf_str = "{0}_{1}".format(conf.conf_dict["board_size"], conf.conf_dict["n_in_a_row"]) model_path = pathlib.Path(dir) / board_conf_str model_path.mkdir(parents=True, exist_ok=True) self.all_model_path = sorted(item for item in model_path.glob('*/') if item.is_dir()) self.all_model_name = [path.name for path in self.all_model_path]
def clear(): config_file = "configures/config.cfg" configure = Configure(config_file) db_path = configure.get_attribute('main', 'db_path') db_name = configure.get_attribute('main', 'db_name') db = Db(db_path, db_name) db.clear_table("Pulses") db.clear_table("Indicators")
def __init__(self): self.configure = Configure() self.configure.init('PicParser') #get width max_width = self.configure.getint('max_width') max_height = self.configure.getint('max_height') self.__maxsize = (max_width, max_height) print 'get maxsize', self.__maxsize #压缩的尺寸 self.__compressed_size = None
class PicParser: ''' 对图片的相关处理 ''' def __init__(self): self.configure = Configure() self.configure.init('PicParser') #get width max_width = self.configure.getint('max_width') max_height = self.configure.getint('max_height') self.__maxsize = (max_width, max_height) print 'get maxsize',self.__maxsize #压缩的尺寸 self.__compressed_size = None def init(self, source): print 'image open' imgData = StringIO(source) #the initial size try: self.img = Image.open(imgData) except: return False def getSize(self): return self.img.size def getCompressSize(self): #the pic size width = self.img.size[0] height = self.img.size[1] ppn = width / height #the compressed size print '__maxsize', self.__maxsize mwidth = self.__maxsize[0] mheight = self.__maxsize[1] if ppn > (mwidth / mheight): width = self.__maxsize[0] height = width / ppn else: height = self.__maxsize[1] width = ppn * height return (width, height) def getCompressedPic(self): ''' compress picture {size: size, source: source} ''' size = self.getCompressSize() _size = (int(size[0]), int(size[1])) source = self.img.resize(_size).tostring() return {'size':size, 'source':source}
def run(): path = Configure.get_value('Test', 'path') method = Configure.get_value('Test', 'method') if method == 'All': suite = TestLoader().discover(path, pattern='*.py') elif method == 'Manual': cases = Configure.get_value('Test', 'manual_list').split(',') suite = build_suite(path, cases) runner = TextTestRunner(verbosity=1) runner.run(suite)
class PicParser: ''' 对图片的相关处理 ''' def __init__(self): self.configure = Configure() self.configure.init('PicParser') #get width max_width = self.configure.getint('max_width') max_height = self.configure.getint('max_height') self.__maxsize = (max_width, max_height) print 'get maxsize', self.__maxsize #压缩的尺寸 self.__compressed_size = None def init(self, source): print 'image open' imgData = StringIO(source) #the initial size try: self.img = Image.open(imgData) except: return False def getSize(self): return self.img.size def getCompressSize(self): #the pic size width = self.img.size[0] height = self.img.size[1] ppn = width / height #the compressed size print '__maxsize', self.__maxsize mwidth = self.__maxsize[0] mheight = self.__maxsize[1] if ppn > (mwidth / mheight): width = self.__maxsize[0] height = width / ppn else: height = self.__maxsize[1] width = ppn * height return (width, height) def getCompressedPic(self): ''' compress picture {size: size, source: source} ''' size = self.getCompressSize() _size = (int(size[0]), int(size[1])) source = self.img.resize(_size).tostring() return {'size': size, 'source': source}
def start(): conf = Configure() conf.get_conf() def player_init(player_selected, name): if player_selected == 1: return Human(name=name) elif player_selected == 2: search_times, greedy_value = set_AI_conf(search_times=2000, greedy_value=5.0) return AI_MCTS(name=name, search_times=search_times, greedy_value=greedy_value, is_output_analysis=conf.conf_dict["AI_is_output_analysis"]) elif player_selected == 3: network = select_network() search_times, greedy_value = set_AI_conf(search_times=400, greedy_value=5.0) return AI_MCTS_Net(name=name, policy_value_function=network.predict, board_to_xlabel=network.board_to_xlabel, is_training=False, search_times=search_times, greedy_value=greedy_value, is_output_analysis=conf.conf_dict["AI_is_output_analysis"]) player1_selected, name1 = select_player("Please input first player. Press <Ctrl-C> to end\n" "1: Human\n" "2: AI with pure Monte Carlo tree search\n" "3: AI with Monte Carlo tree search & neural network\n" ": ", allowed_input=[1, 2, 3]) player1 = player_init(player1_selected, name1) player2_selected, name2 = select_player("Please input second player. Press <Ctrl-C> to end\n" "1: Human\n" "2: AI with pure Monte Carlo tree search\n" "3: AI with Monte Carlo tree search & neural network\n" ": ", allowed_input=[1, 2, 3]) player2 = player_init(player2_selected, name2) console_renderer = ConsoleRenderer() print("############### Game Start ###############") winner = Game.start_until_game_over(player1, player2, board_renderer=console_renderer) if winner == BOARD.o: print("Congrats! \"O\" wins.") elif winner == BOARD.x: print("Congrats! \"X\" wins.") else: print("Draw!") print("############### Game Over ###############")
def __init__(self): conf = Configure(sys.argv) conf.username = "******" # 选手自己的用户名 conf.password = "******" # 选手自己的密码 # 单机版推演系统,变量初始化:room_id固定为1 self._sdk = CSFInterface(conf.ip, conf.room_id, conf.camp_id, conf.scenario_id, conf.seat_id) self._init_failed = False self._scenario = None self._camp_id = None self._seat_ids = None self._war_map = None self._logger = Logger("ai_1") self._fetch_scenario_finish = False self._seat_ids = [] self._round = 1 for seat_id in conf.seat_id.split(','): self._seat_ids.append(int(seat_id)) self._camp_id = conf.camp_id self._game_started = False # 创建AI算法对象 self._algorithm = AIAlgorithm(self._sdk, self._scenario, conf.camp_id, conf.scenario_id) self._algorithm.set_logger(self._logger) self._algorithm.set_seat_ids(self._seat_ids) self.df_operator = None self.df_formation = None self.operator_information_original = [] # self.operator_list = [] self._row = 0 self._col = 0 self._update_operator = UpdateOperator(self._scenario) self._cal_map = Hex(self._row, self._col) self._graph = None # 注册回调函数 self._sdk.register(self._call_back) # AI登录 print(conf.username, conf.password) self._login_result = self._sdk.login(conf.username, conf.password) print("_login_result:{}".format(self._login_result)) if self._login_result != 0: self._init_failed = True self._logger.print(self._round, "AI1 登陆失败") return
def __init__(self, config: ConfigParser, redis_: ClientRedisHelper, checker: CheckFile, forward_thread: ForwardThread): self.configure = Configure.init_instance(config) self.app = Client('forward', config.get('account', 'api_id'), config.get('account', 'api_hash')) self.checker: CheckFile = checker self.redis = redis_ self.ForwardThread: ForwardThread = forward_thread self.min_resolution: int = config.getint('forward', 'lowq_resolution', fallback=120) self.owner_group_id: int = config.getint('account', 'group_id', fallback=-1) self.echo_switch: bool = False self.detail_msg_switch: bool = False # self.delete_blocked_message_after_blacklist: bool = False self.func_blacklist: Callable[[BlackListForwardRequest], int] | None = None self.custom_switch: bool = False self.init_handle() self.future: concurrent.futures.Future | None = None self.plugins: list[PluginLoader] = []
def __init__(self): #config database self.configure = Configure() dbpath = (self.configure.getDBPath())[1:-1] print "dbpath ",dbpath self.cx = sq.connect(dbpath) self.cu = self.cx.cursor()
def __init__(self, config: ConfigParser, redis_: aioredis.Redis, checker: CheckFile, forward_thread: ForwardThread): self.configure = Configure.init_instance(config) self.app = Client('forward', config.get('account', 'api_id'), config.get('account', 'api_hash')) self.checker: CheckFile = checker self.redis: aioredis.Redis = redis_ self.redis_prefix: str = ''.join( random.choices(string.ascii_lowercase, k=5)) self.ForwardThread: ForwardThread = forward_thread self.min_resolution: int = config.getint('forward', 'lowq_resolution', fallback=120) self.owner_group_id: int = config.getint('account', 'group_id', fallback=-1) self.echo_switch: bool = False self.detail_msg_switch: bool = False # self.delete_blocked_message_after_blacklist: bool = False self.func_blacklist: Optional[Callable[[], int]] = None if self.configure.blacklist: self.func_blacklist = ForwardThread.put_blacklist self.custom_switch: bool = False self.init_handle() self.plugins: List[PluginLoader] = []
def __init__(self): self.__conf = Configure() self.__associations = Associations() self.__sites = Sites() resultList = [] xReader = XMLReader() xParser = XMLParser() confTree = xReader.getTree('xml/conf.xml') if confTree == None: exit() searchParams = xParser.getSearchParams(confTree) searchSites = xParser.getSearchSites(confTree) pagesToSearch = xParser.getPagesToSearch(confTree) self.masterInspectionPath = xParser.getMIXML(confTree) self.__conf.setParams(searchSites, searchParams, pagesToSearch) keywordTree = xReader.getTree('xml/keywords.xml') fKeywordTree = xReader.getTree('xml/f_keywords.xml') if keywordTree == None or fKeywordTree == None: exit() keywords = xParser.getKeywords(keywordTree) fKeywords = xParser.getKeywords(fKeywordTree) avoids = xParser.getAvoids(keywordTree) fAvoids = xParser.getAvoids(fKeywordTree) self.__associations.setParams(keywords, avoids, fKeywords, fAvoids) sitesTree = xReader.getTree('xml/sites.xml') if sitesTree == None: exit() goodSites, badSites = xParser.getSites(sitesTree) self.__sites.setParams(goodSites, badSites)
def __init__(self, name, config={}): self.name = name self.config = Configure(config) self.__init_local_and_global() self.__init_logger() self.__init_broker_and_processors() self.__init_runtime_tree_storage()
def config_prepare(): if path.exists('config.json'): return json.load(open('config.json', 'r', encoding='UTF-8')) else: from configure import Configure Configure().run() return None
def __init__(self): self.checker: CheckFile = CheckFile.get_instance() self.configure: Configure = Configure.get_instance() self.logger: logging.Logger = logging.getLogger('fwd_thread') log_file_header: logging.FileHandler = logging.FileHandler('log.log') log_file_header.setFormatter( logging.Formatter('[%(asctime)s][%(levelname)s] %(message)s')) self.logger.setLevel(logging.INFO) self.logger.addHandler(log_file_header) self.logger.propagate = False
def __init__(self): self.configure = Configure() self.configure.init('PicParser') #get width max_width = self.configure.getint('max_width') max_height = self.configure.getint('max_height') self.__maxsize = (max_width, max_height) print 'get maxsize',self.__maxsize #压缩的尺寸 self.__compressed_size = None
def main(): configure = Configure("config.ini") i_m = InputManager(configure) # モデルの読み込み or 新規作成 bayes = prep_model(i_m.can_load_model(), i_m.load_path) # 学習 or 推論 bayes = train_or_predict(bayes, i_m.targets, i_m.sents, i_m.is_train) # モデルの保存 with open(i_m.save_path, "wb") as f: pickle.dump(bayes, f)
def runContainer(): # Creating container contains the home agents # and prediction agents RC = aiomas.Container.create(('localhost', 5555)) # Create the DB engine # db_engine = create_engine("mysql+pymysql://{}@{}/{}".format(CF.DB_USER, CF.DB_HOST, CF.DB_NAME)) db_engine = CF.get_db_engine() # Initiate the blockchain agent blockChainAgent = BlockchainAgent(container=RC, ) # Dump the blockchain agent address logger.info("Blcokchain agent initiated at {}".format( blockChainAgent.addr)) # Record this agent to DB status = recordAgent(agent_addr=blockChainAgent.addr, agent_type='blockchain', db_engine=db_engine) # Run the event loop try: logger.info( "Running the event loop. The blockchain agent is open to be connected!" ) aiomas.run() except KeyboardInterrupt: logging.info("Keyboard Interrupted") except Exception as e: traceback.print_exc(file=sys.stdout) # Shutting donw the controller and thereby cleaning # all agents try: logger.info("Shutting down the root container...") RC.shutdown() logger.info("Done.") logger.info("Killing Blockchain agent") status = killAgent(agent_addr=blockChainAgent.addr, agent_type='blockchain', db_engine=db_engine) if status: logger.info("Done.") else: logger.info("Couldnot kill the agent!") except Exception as e: logger.info("Failed to shutdown the root container") traceback.print_exc(file=sys.stdout)
def __init__(self, configure_filename): """ Инициализация класса :param configure_filename - путь к конфигурационному файлу """ self.configure = Configure(configure_filename) self.task_factory = TaskFactory() self.task_factory.register_task_creator(HttpTask) self.task_factory.register_task_creator(HttpsTask) self.logger = LoggerFactory.create_logger(queue=last_record_queue, **self.configure.logger)
def get_configure(): if len(sys.argv) <= 1: print(Configure.help()) return None conf_path = sys.argv[1] if not os.path.exists(conf_path): raise Exception("未找到配置文件。") f = open(conf_path, 'r') text = "" for line in f.readlines(): text += line + '\n' return text
def run_sprinkler(flag): # Get configuration data config = Configure() section = 'SPConfig' pin = int(config.read(section, 'pin')) sp_timer = float(config.read(section, 'pihouse/sprinkler/timer')) # Turn pin on print('Starting sprinkler') GPIO.output(pin, GPIO.HIGH) os.environ["sp_status"] = "True" config.set('SPConfig', 'pihouse/sprinkler/schedule/last', str(datetime.datetime.now().strftime('%H:%M, %a %d/%m/%y'))) # If flag changes then turn off, otherwise run for config time timer = time() + sp_timer * 60 while time() < timer: if flag == "False": print('Stopping sprinkler (button)') GPIO.output(pin, GPIO.LOW) os.environ["sp_status"] = "False" break else: sleep(0.1) flag = os.environ.get('sp_ctl') else: print('Stopping sprinkler (timer)') GPIO.output(pin, GPIO.LOW) os.environ["sp_status"] = "False"
def handle_args(): args = get_args() if args.configure: from configure import Configure Configure(False).run() elif args.no_update: DataHandler(config['token']).run(config['delta']) elif args.construct: DataHandler(config['token']).make_video() else: MainHandler(config['token']).run()
def __init__(self, client: Client, chat_id: int, target_id: Union[int, str], offset_id: int = 0, dirty_run: bool = False): self.checker: CheckFile = CheckFile.get_instance() self.configure: Configure = Configure.get_instance() self.client: Client = client self.target_id: int = int(target_id) self.offset_id: int = offset_id self.chat_id: int = chat_id self.dirty_run: bool = dirty_run self.start()
def main(session_id, agent_id, port): """ """ # DB engine db_engine = CF.get_db_engine() agent_addr = getAgentAddress(agent_id=int(agent_id), session_id=session_id, db_engine=db_engine) if agent_addr is None: logging.info("Agent address couldn't be retreived. Make sure to provide correct session ID.") return logging.info("Agent's address: {}".format(agent_addr)) try: # Create the container the host trigger agent c = aiomas.Container.create(('localhost', int(port))) # Host the trigger agent trigger_agent = TriggerAgent(container=c) # Kick the home agent by trigger agent aiomas.run(until=trigger_agent.run(agent_addr)) except OSError: logger.info("Probably the provided port is already in use or the home agent is dead!") return except ConnectionResetError: logger.info("Probably the home agent died.") except Exception as e: logger.info("Failed to open/create container or run the triggering agent!") traceback.print_exc(file=sys.stdout) # Shutting down the container logger.info("Shutting down the triggering agents container.") c.shutdown()
from configure import Configure from dna import DNA import numpy as np from population import Population from population_initializer import PopulationInitializer configure = Configure('data.json') number_of_buckets = configure.data['number_of_buckets'] population_size = configure.data['population_size'] kill_percentage = configure.data['kill_percentage'] mutation_percentage = configure.data['mutation_percentage'] Population.kill_percentage = kill_percentage DNA.mutate_percentage = mutation_percentage nucleotide = configure.nucleotide initializer = PopulationInitializer( nucleotide=nucleotide, number_of_buckets_per_container=number_of_buckets) initial_population = initializer.initialize(population_size) populations = [initial_population] fitness_list = [np.mean(initial_population.fitness)] while True: if len(populations) >= 300 or fitness_list[-1] >= -1: break current_population = populations[-1] next_population = current_population.next_population() next_fitness = np.mean(next_population.fitness) populations.append(next_population) fitness_list.append(next_fitness) for fitness, population in zip(fitness_list, populations): print population.containers[0].dna, fitness
import numpy as np from Game.BoardRenderer import BoardRenderer from Function import coordinates_set from configure import Configure conf = Configure() conf.get_conf() # Fixed Configuration. o = conf.conf_dict["o"] x = conf.conf_dict["x"] empty = conf.conf_dict["empty"] # Changeable Configuration. n_in_a_row = conf.conf_dict["n_in_a_row"] # How many pieces in a row. o_win = n_in_a_row x_win = -n_in_a_row start_player = conf.conf_dict["start_player"] # start player board_size = conf.conf_dict["board_size"] # The size of the board. class Board: def __init__(self): self.board = np.zeros((board_size, board_size)) self.available_actions = coordinates_set(board_size, board_size) self.last_action = None # Last move. self.current_player = start_player # current player def __copy__(self): new_board = Board()
coinObj[9].placeItem(gridObj.getMatrix(), 4, 285) coinObj[10].placeItem(gridObj.getMatrix(), 12, 315) coinObj[11].placeItem(gridObj.getMatrix(), 20, 345) coinObj[12].placeItem(gridObj.getMatrix(), 8, 375) coinObj[13].placeItem(gridObj.getMatrix(), 4, 405) coinObj[14].placeItem(gridObj.getMatrix(), 20, 405) coinObj[15].placeItem(gridObj.getMatrix(), 16, 440) speedUps() dragonObj = Dragon() dragonObj.placeItem(gridObj.getMatrix(), 10, 459) T = time.time() configObj = Configure() speedfactor = 1 configObj.setStart(0) gravity = 0.5 remainingTime = 0 def incrementSpeed(): global speedfactor speedfactor = speedfactor + 1 def resetSpeed(): global speedfactor speedfactor = 1
class BLParent(): """docstring for BLParent""" __conf = None __associations = None __sites = None resultList = None masterInspectionPath = None def __init__(self): self.__conf = Configure() self.__associations = Associations() self.__sites = Sites() resultList = [] xReader = XMLReader() xParser = XMLParser() confTree = xReader.getTree('xml/conf.xml') if confTree == None: exit() searchParams = xParser.getSearchParams(confTree) searchSites = xParser.getSearchSites(confTree) pagesToSearch = xParser.getPagesToSearch(confTree) self.masterInspectionPath = xParser.getMIXML(confTree) self.__conf.setParams(searchSites, searchParams, pagesToSearch) keywordTree = xReader.getTree('xml/keywords.xml') fKeywordTree = xReader.getTree('xml/f_keywords.xml') if keywordTree == None or fKeywordTree == None: exit() keywords = xParser.getKeywords(keywordTree) fKeywords = xParser.getKeywords(fKeywordTree) avoids = xParser.getAvoids(keywordTree) fAvoids = xParser.getAvoids(fKeywordTree) self.__associations.setParams(keywords, avoids, fKeywords, fAvoids) sitesTree = xReader.getTree('xml/sites.xml') if sitesTree == None: exit() goodSites, badSites = xParser.getSites(sitesTree) self.__sites.setParams(goodSites, badSites) def startSubProcesses(self): CM = ConnectionManager() lt = ListTool() sitesList = [] sitesList = lt.addOnlyUniqueFromList(self.__sites.goodSites, self.__sites.badSites) CM.initializeConnection( self.__associations.keywordsList, self.__associations.avoidsList, sitesList, self.__conf.siteToSearchList, self.__conf.pagesToSearch, self.__conf.searchParamsList) CM.startThread() CM.join() CM.parseResults() self.resultList = CM.getResults() def createMasterInspectionXML(self, delChildXMLs = False): lt = ListTool() os = OSTool() sort = Sorter() insp = Inspector() xmls = os.getFilesInDir('results/') xmls = lt.popByWord(xmls, self.masterInspectionPath) XMLInspections = insp.getInspections(xmls) if len(XMLInspections) == 0: print('No files read.') exit() XMLInspections = sort.sortInspectionList(XMLInspections) xWriter = XMLWriter() xWriter.writeMIXML(XMLInspections, self.masterInspectionPath) if delChildXMLs: for xml in xmls: os.deleteFile(xml) def startServerProg(self): os = OSTool() os.startProgram('google-chrome', 'localhost:80/tracker/')
class ContainerManager: def __init__(self): self.configure = Configure() liveContainers = {} self.cgroups={} self.host = None self.isRunning = False self.lastCurrent = 0 ##manager live contianers and update parameters self.liveContainerManager = None ##default heatbeat interval(s) self.heartbeatInterval = 5 ##remote name server proxy self.remoteNameServer = None ##remote name server host address self.remoteNameServerHost = "" ##remote name server host port self.remoteNameServerPort = 0000 ##remote name server object ID self.remoteNameServerID = "container.master" def initialize(self): ##initialize current time self.lastCurrent = self.currentTime() ##initialize host name self.host = socket.gethostname() if self.configure.initialize() is False: log.error("initialize configure error") return False if self.configure.get("heartbeatInterval") is not None: self.heartbeatInterval = int(self.configure.get("heartbeatInterval")) ##These configurations are strictly required self.remoteNameServerID = self.configure.get("nameserverID") if self.remoteNameServerID is None: log.error("initialize remote name server error") return False self.remoteNameServerHost= self.configure.get("nameserverHost") if self.remoteNameServerHost is None: log.error("initialize remote host error") return False self.remoteNameServerPort= self.configure.get("nameserverPort") if self.remoteNameServerPort is None: log.error("initialize remote port error") return False if self.liveContainerManager is None: self.liveContainerManager = LiveContainerManager(self.configure,self.host) uri = "PYRONAME:"+self.remoteNameServerID+"@"+self.remoteNameServerHost+":"+self.remoteNameServerPort log.info("uri: %s",uri) try: self.remoteNameServer = Pyro4.Proxy(uri) except Exception as error: log.error("error when try to connect proxy %s",error) return False return True def register(self): try: isSuccess = self.remoteNameServer.register(self.host) except Exception as error: log.error("error while register %s",error) return False log.info("register without exception") if isSuccess: self.isRunning = True log.info("register successfully") return True else: log.info("register failed") return False def statusUpdate(self, hostUpdate): dict_hostUpdate=HostUpdate._class_to_dict_(hostUpdate) dict_hostResponse=None try: dict_hostResponse = self.remoteNameServer.statusUpdate(dict_hostUpdate) except Exception as error: log.error("exception at status Update:",error) pass if dict_hostResponse is None: return None hostResponse = HostResponse._dict_to_class_(dict_hostResponse) log.info("response from host: %s",hostResponse.getHost()) if hostResponse.getHost() != self.host: log.error("wrong host during update") return None return hostResponse def currentTime(self): return int(round(time.time()*1000)) ##main loop of container manager that hearbeat woth master, execute ##command from master def serviceLoop(self): ##initilaze required data structure log.info("initializing") if self.initialize() is False: return log.info("registerring") ##inform master and register itself if self.register() is False: return log.info("looping") while True: if self.isRunning == False: log.error("host is not running") break if (self.currentTime() - self.lastCurrent) / 1000 < self.heartbeatInterval: ##We do not want to burn cpu cycles so we sleep here time.sleep(self.heartbeatInterval/2) continue self.lastCurrent = self.currentTime() ##update live and dead container hostUpdate = self.liveContainerManager.updateLiveContainers() ##send heartbeat and process respond hostResponse=self.statusUpdate(hostUpdate) self.liveContainerManager.liveContainerProcess(hostResponse) log.info("break from loop")
def put_blacklist(cls, request: BlackListForwardRequest) -> None: cls.put( ForwardRequest.from_super(Configure.get_instance().blacklist, request)) # type: ignore
class Pyro4NameServer: ##default host name for NameServer host = "localhost" ##default host port for NameServer port = 51681 ##default object id for NameServer ID = "pyro4NameServer" ##daemon of NameServer daemon = None ##initialize configure configure = Configure() ##containerTacker, main service implimentation, communicate with slaves ##through heartbeat containerTracker = None def initialize(self): ##initialize configure object if self.configure.initialize() is False: log.error("configure initialize failure") return False ##initialize host name, port and object ID self.host = self.configure.get("nameserverHost") if self.host is None: log.error("host is missing for configure") return False self.port = int(self.configure.get("daemonserverPort")) if self.port is None: log.error("port is missing for configure") return False self.ID = self.configure.get("nameserverID") if self.ID is None: log.error("nameserver ID is missing for configure") return False self.containerTracker = ContainerTracker(self.configure) if self.containerTracker.initialize() is False: log.error("containerTracker initialize failed") return False log.info("name server initialize successfully") return True def start(self): ##start Name Server service try: self.daemon = Pyro4.Daemon(host=self.host,port = self.port) nameServer = Pyro4.locateNS() nameURI = self.daemon.register(self.containerTracker) nameServer.register(self.ID,nameURI) log.info("start pyro4 Name Server") self.daemon.requestLoop() except Exception as error: log.error("error when set up server %s",error) ##start containerTracker main loop self.containerTracker.start() def serviceLoop(self): if self.initialize() is False: log.info("serviceLoop failed due to initialize fail") return self.start() def stop(self): pass
from queue import Queue from client_kinect import ClientKinectSocket from config_recv_socket import ConfigReceiveSocket from configure import Configure from frame_segment_udp import FrameSegment from kinect_worker import KinectWorker from udp_mesh_socket import UdpMeshSocket if __name__ == "__main__": configure = Configure() image_queue = Queue() detect_queue = Queue() udp_mesh_client = UdpMeshSocket(configure=configure) config_recv_client = ConfigReceiveSocket(configure=configure) clientKinectSocket = ClientKinectSocket(configure=configure) frameSegment = FrameSegment(configure=configure, image_queue=image_queue) clientKinectSocket.start() frameSegment.start() config_recv_client.start() udp_mesh_client.start() kinect_worker = KinectWorker(configure=configure, image_queue=image_queue, detect_queue=detect_queue) kinect_worker.run()
class DBSource: ''' Database operation of html image and other file source ''' def __init__(self): self.configure = Configure() dbpath = (self.configure.getDBPath())[1:-1] self.cx = sq.connect(dbpath) self.cu = self.cx.cursor() self.siteID = -1 def __del__(self): self.cx.commit() def init(self, siteID): ''' read database connection ''' self.siteID = siteID def saveFlag(self, info): ''' 存储中断后信息 ''' strr = "delete from flag" self.cu.execute(strr) strr = "insert into flag (info) values('%s')" % info self.cu.execute(strr) self.cx.commit() def saveHtml(self, info, source, parsed_source): ''' save html source info = { url: urlstr, title: titlestr, date: date #爬取的日期 } ''' print '-'*200 strr = "insert into source_info%d (url, title, date) values('%s', '%s', '%s')" % (self.siteID, info['url'], info['title'], info['date']) self.cu.execute(strr) strr = "insert into source%d (source, parsedSource) values('%s', '%s')" % (self.siteID, "", parsed_source) print '-'*200 #print strr self.cu.execute(strr) print '-'*200 self.cx.commit() def saveImg(self, info, source): ''' save image into database info = { url: urlstr, width: width, height: height } ''' #save image info strr = "insert into img_info%d (url, width, height) values ('%s', '%s', '%s')" % (self.siteID, info['url'], info['width'], info['height']) print strr self.cu.execute(strr) self.cu.execute('insert into img%d (source) values (?) '%self.siteID,(sq.Binary(source),)) self.cx.commit() def getImg(self, siteID, imgID): strr = "select source from img%d where id=%d" % (siteID, imgID) data = self.cu.execute(strr) print data return data.fetchone()
def __init__(self): self.configure = Configure() dbpath = (self.configure.getDBPath())[1:-1] self.cx = sq.connect(dbpath) self.cu = self.cx.cursor() self.siteID = -1
def runContainer(): # Creating container contains the home agents # and prediction agents HC = aiomas.Container.create(('localhost', 5556), clock=CLOCK) # HC = aiomas.Container.create(('localhost', 5556), clock=CLOCK) # Set the clcok # t_clock_setter = asyncio.async(clock_setter()) # List of Homes homes = [9019, 9981] # 7881, 100237, 7850, 980, 9981,] # Create the DB engine # db_engine = create_engine("mysql+pymysql://{}@{}/{}".format(CF.DB_USER, CF.DB_HOST, CF.DB_NAME)) db_engine = CF.get_db_engine() # Initiate the agents into HC homeAgents = [ HomeAgent( container=HC, agent_id=home, db_engine=db_engine, ) for home in homes ] # Creating the session session_id = createSession(agents=homeAgents, db_engine=db_engine) # Address of the blockchain agent # Later, it will be retreived from the Agent Server bc_address = getActiveBlockchainAddress(db_engine=db_engine) if bc_address is None: logging.info("Blockchain is not initiated.") else: # Bind the blockchain with home agents for agent in homeAgents: agent.setBlockchainAddress(bc_address=bc_address) # Run the event loop try: logger.info( "Running the event loop. One of the home agents is trying to connect with BC agent!" ) logger.info("Session ID:{}".format(session_id)) # Run the even loop aiomas.run() except KeyboardInterrupt: logger.info("Stopping the event loop") # Try to stop the event loop except Exception as e: traceback.print_exc(file=sys.stdout) finally: # Killing the current session killSession(session_id=session_id, db_engine=db_engine) # Shutting donw the controller and thereby cleaning # all agents try: logger.info( "Shutting down the home container...and cancelling the clock") HC.shutdown() # t_clock_setter.cancel() logger.info("Done.") except Exception as e: logger.info("Failed to shutdown the home container") traceback.print_exc(file=sys.stdout)
log_dir = 'log_dir' if not os.path.exists(configures.log_dir) or not hasattr( configures, log_dir): print('log fold not found, creating...') os.mkdir(configures.log_dir) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Entity extractor by binary tagging') parser.add_argument('--config_file', default='system.config', help='Configuration File') args = parser.parse_args() device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') configs = Configure(config_file=args.config_file) fold_check(configs) logger = get_logger(configs.log_dir) configs.show_data_summary(logger) mode = configs.mode.lower() if mode == 'train': logger.info('mode: train') train(configs, device, logger) elif mode == 'interactive_predict': logger.info('mode: predict_one') tokenizer = BertTokenizer.from_pretrained('bert-base-chinese') bert_model = BertModel.from_pretrained('bert-base-chinese').to(device) num_labels = len(configs.class_name) model = Model(hidden_size=768, num_labels=num_labels).to(device) model.load_state_dict( torch.load(os.path.join(configs.checkpoints_dir,
class DBConfig: ''' operation of database concerning file savage ''' def __init__(self): #config database self.configure = Configure() dbpath = (self.configure.getDBPath())[1:-1] print "dbpath ",dbpath self.cx = sq.connect(dbpath) self.cu = self.cx.cursor() def __del__(self): self.cx.commit() def init(self, home_list): ''' mannual init ''' self.home_list = home_list def initConfig(self): ''' create table config and insert some data sitelist: [ { url: urlstr, name: namestr, date: datestr }, ] ''' print 'init configure' #create configure table print 'init flag' self.__create_flag() strr = 'CREATE TABLE IF NOT EXISTS configure ("siteID" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL , "url" CHAR NOT NULL , "name" CHAR NOT NULL)' self.cu.execute(strr) #insert data for site in self.home_list: ''' insert each site into configure table ''' strr = "insert into configure (url, name) values('%s', '%s')" % (site['url'], site['name']) print strr self.cu.execute(strr) self.cx.commit() def __create_flag(self): ''' 存储中断后内存中的信息 ''' strr = 'CREATE TABLE IF NOT EXISTS "flag" ("id" INTEGER PRIMARY KEY NOT NULL, "info" TEXT)' print strr self.cu.execute(strr) def __create_source_info(self, siteID): ''' create {siteID}_source_info table ''' strr = 'CREATE TABLE IF NOT EXISTS "source_info%d" ("docID" INTEGER PRIMARY KEY NOT NULL , "url" CHAR, "title" CHAR, "date" DATETIME)' % siteID print strr self.cu.execute(strr) def __create_source(self, siteID): ''' create {siteID}_source table ''' strr = 'CREATE TABLE IF NOT EXISTS "source%d" ("docID" INTEGER PRIMARY KEY NOT NULL , "source" CHAR, "parsedSource" CHAR)' % siteID print strr self.cu.execute(strr) def __create_img_info(self, siteID): ''' create {siteID}_img_info ''' strr = 'CREATE TABLE IF NOT EXISTS "img_info%d" ("id" INTEGER PRIMARY KEY NOT NULL , "url" CHAR, "width" INTEGER, "height" INTEGER)' % siteID print strr self.cu.execute(strr) def __create_img(self, siteID): ''' {siteID}_img ''' strr = 'CREATE TABLE IF NOT EXISTS "img%d" ("id" INTEGER PRIMARY KEY NOT NULL , "source" blob)' % siteID print strr self.cu.execute(strr) def initSites(self): ''' init tables: {siteID}_source_info {siteID}_source {siteID}_img_info {siteID}_img ''' print 'init Sites' for siteID in range(len(self.home_list)): #sourceinfo self.__create_img(siteID) self.__create_img_info(siteID) self.__create_source(siteID) self.__create_source_info(siteID) def getSiteUrls(self): ''' get all site home_urls ''' strr = "select url from sites" return self.cu.execute(strr)