def testCreateExpenseToday(): assert Service.createExpenseToday([20, 'internet']) == [30, 20, 'internet'] assert Service.createExpenseToday([40, 'food']) == [40, 'food'] assert Service.createExpenseToday([20, 'clothing']) == [20, 'clothing'] assert Service.createExpenseToday([10, 'internet']) == [10, 'internet'] assert Service.createExpenseToday([4000, 'housekeeping']) == [4000, 'housekeeping'] print("testCreateExpenseToday passed")
def create_game(sprite_size, is_new, base_stats): global hero, engine, drawer, iteration if is_new: icon = Service.create_sprite(os.path.join("texture", "Hero.png"), sprite_size) icon_mini = Service.create_sprite(os.path.join("texture", "Hero.png"), 10) hero = Objects.Hero(base_stats, icon, icon_mini) engine = Logic.GameEngine() Service.service_init(sprite_size) Service.reload_game(engine, hero) drawer = SE.GameSurface( (800, 480), pygame.SRCALPHA, (600, 0), SE.MiniMap((200, 200), (0, 480), (SE.ProgressBar( (640, 120), (640, 480), SE.InfoWindow((160, 120), (50, 50), SE.HelpWindow((700, 500), pygame.SRCALPHA, (0, 0), SE.ScreenHandle( (0, 0)))))))) else: engine.sprite_size = sprite_size hero.sprite = Service.create_sprite( os.path.join("texture", "Hero.png"), sprite_size) hero.sprite_mini = Service.create_sprite( os.path.join("texture", "Hero.png"), 10) hero.size = sprite_size Service.service_init(sprite_size, False) Logic.GameEngine.sprite_size = sprite_size drawer.connect_engine(engine)
def testCreateExpense(): assert Service.createExpense(['30', '20', 'internet']) assert Service.createExpense(['30', '40', 'food']) assert Service.createExpense(['10', '20', 'clothing']) assert Service.createExpense(['10', '20', 'internet']) assert Service.createExpense(['30', '4000', 'housekeeping']) print("testCreateExpense passed")
def interact(self, engine, hero): k_endurance = hero.stats["endurance"] / (random.randint( engine.level, engine.level + 1) * self.stats["endurance"]) k_intelligence = hero.stats["intelligence"] / (random.randint( engine.level, engine.level + 1) * self.stats["intelligence"]) k_luck = hero.stats["luck"] / (random.randint( engine.level, engine.level + 1) * self.stats["luck"]) hero_power = k_endurance * k_intelligence * k_luck * hero.stats[ "strength"] if hero_power > self.stats["strength"]: hero.exp += int(1.2 * self.exp) engine.score += 10 * hero.hp elif hero_power == self.stats["strength"]: hero.exp += self.exp else: hero.exp += int(0.2 * self.exp) hero.hp -= int(0.5 * (self.stats["strength"] - hero_power)) if engine.hero.hp > 0: engine.score += 2 * hero.hp else: hero.level_up() engine.level = 5 Service.reload_game(engine, hero) return hero.level_up() if hero.hp > hero.max_hp: hero.hp = hero.max_hp
def __init__(self): """Crea un objeto del tipo MasterI""" Service.__init__(self) # CurrentZones y CurrentIdWork representan al trabajo actual. self.CurrentZones, self.CurrentIdWork = [], -1 # CurrentImage representa la imagen final relativa al trabajo actual. self.CurrentImage = None # RenderAgents representa a los agentes especializados en el render. # (Clave, Valor) == (Nombre del agente, Proxy al agente) self.RenderAgents = {} # BenchmarkValue representa el tiempo medio de ejecuci贸n del benchmark en la plataforma MASYRO. self.BenchmarkValue = 1 # Biddings representa las pujas actuales de los agentes. self.Biddings = [] self.InitialBidding = 1 # NumberOfPieces representa el n煤mero de trozos del trabajo actual que han llegado. self.NumberOfPieces = 0 # CurrentBiddingTime representa el tiempo empleado para las distintas pujas. self.CurrentBiddingTime = 0 # Proxies a distintos servicios. self.Repository, self.Blackboard = None, None # Log del Master. self.Log = "" # Tiempos finales asociados al trabajo. self.FinalTimes = ""
def __init__(self, shard, contest_id): logger.initialize(ServiceCoord("EvaluationService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id self.queue = JobQueue() self.pool = WorkerPool(self) self.scoring_service = self.connect_to( ServiceCoord("ScoringService", 0)) for i in xrange(get_service_shards("Worker")): worker = ServiceCoord("Worker", i) self.pool.add_worker(worker) self.add_timeout(self.dispatch_jobs, None, EvaluationService.CHECK_DISPATCH_TIME, immediately=True) self.add_timeout(self.check_workers_timeout, None, EvaluationService.WORKER_TIMEOUT_CHECK_TIME, immediately=False) self.add_timeout(self.check_workers_connection, None, EvaluationService.WORKER_CONNECTION_CHECK_TIME, immediately=False) self.add_timeout(self.search_jobs_not_done, None, EvaluationService.JOBS_NOT_DONE_CHECK_TIME, immediately=True)
class TestService(unittest.TestCase): def setUp(self): self.serv = Service() def test_process(self): data = {"CommandType": "GetTagsList", "Code": 100, "Data": []} self.serv.process(data)
def post(self, request): """ Executes the test according with the configuration supplied in the JSON :param request: request :return: True or False """ logger.info("A new request of creating user") logger.info("the request body is: " + request.body) # print info # print Service.parseheaders(self, request) var = str(Service.gen_rand_str()) res = [{"success": {"username": var}}] info = { "type": request.method, "header": Service.parseheaders(self, request), "reply": res, "url": request.path } print info # data = serializers.serialize('json',request.META) # for value in request.META: # if "HTTP" in value: # print "ADDED: " + value + " = " + request.META[value] # with open(LOGFILE,'w') as outfile: # json.dump(data, outfile) return Response(res)
def s13(): url = 'http://news.tongji.edu.cn/classid-15.html' rege = re.compile('classid-\d{2}-newsid-\d{5}-t-show.html') list2 = {'class': 'li_black'} content_list = Service.getNewsDate(url, 'li', list2) for i in xrange(0, len(content_list)): date = content_list[i].span if date == None: continue print date url2 = 'http://news.tongji.edu.cn/' print content_list[i].a.attrs['href'] content = Service.getNewsContent(content_list[i].a, url2, 'div', {'class': 'news_content'}) if content == 0: print "获取链接失败,跳过" continue # print content title = Service.getNewTitle(content_list[i].a) print title nid = 'm' time = re.sub(r'[\(,\)]', '', date.string) time = time.replace('年', '-').replace('月', '-').replace('日','') print time res = Service.judgeInsert('news', nid, time, title, content) if res == 0: break
def move_points(self, knot): for p in range(len(knot.points)): knot.points[p] = knot.points[p] + knot.speeds[p] * knot.speed if knot.points[p].int_pair()[0] > self.screen_resolution[0] or knot.points[p].int_pair()[0] < 0: knot.speeds[p] = Service.Vec2d(- knot.speeds[p].int_pair()[0], knot.speeds[p].int_pair()[1]) if knot.points[p].int_pair()[1] > self.screen_resolution[1] or knot.points[p].int_pair()[1] < 0: knot.speeds[p] = Service.Vec2d(knot.speeds[p].int_pair()[0], -knot.speeds[p].int_pair()[1])
def __init__(self, shard): logger.initialize(ServiceCoord("Worker", shard)) Service.__init__(self, shard, custom_logger=logger) self.file_cacher = FileCacher(self) self.work_lock = threading.Lock() self.ignore_job = False
def s18(): url = 'http://www.sbs.edu.cn/xwzx/xyxw/' rege = re.compile('/\d{6}/t\d{8}\w+.html') list2 = {'class': 'news'} title = Service.getNewsDate_div(url, list2, 'li', {'class': 'list_title'}) date = Service.getNewsDate_div(url, list2, 'li', {'class': 'list_date'}) get_href = Service.getNewsUrl_div(url, list2, rege) print get_href[1].attrs['href'], date for i in xrange(0, len(get_href)): content = Service.getNewsContent2(get_href[i], 'http://www.sbs.edu.cn/xwzx/xyxw', 'p', {'align': 'justify'}) if content == 0: print "获取链接失败,跳过" continue contents = "" for j in xrange(0,len(content)): contents = content[j].get_text("", strip=True)+contents print contents # print content) time = re.sub(r'[\(,\)]', '', date[i].string) time = time.replace('年', '-').replace('月', '-').replace('日','') print time, title[i].string nid = 'r' res = Service.judgeInsert('news', nid, time, title[i].string, contents) if res == 0: break
def create_game(sprite_size, is_new): global hero, engine, drawer, iteration if is_new: hero = Objects.Hero( base_stats, Service.create_sprite(os.path.join("texture", "Hero.png"), sprite_size)) hero.sprite2 = Service.create_sprite( os.path.join("texture", "Hero2.png"), sprite_size) engine = Logic.GameEngine() Service.service_init(sprite_size) Service.reload_game(engine, hero) drawer = SE.ScreenHandle((0, 0)) drawer = SE.HelpWindow((700, 500), pygame.SRCALPHA, (0, 0), drawer) drawer = SE.SmallMap((SCREEN_DIM), pygame.SRCALPHA, (0, 0), drawer) drawer = SE.InfoWindow((SCREEN_DIM), pygame.SRCALPHA, (0, 0), drawer) drawer = SE.ProgressBar((SCREEN_DIM[0], 40), pygame.SRCALPHA, (0, 0), drawer) drawer = SE.GameOver((600, 400), pygame.SRCALPHA, (0, 0), drawer) drawer = SE.GameSurface((SCREEN_DIM), pygame.SRCALPHA, (0, 0), drawer) else: engine.sprite_size = sprite_size hero.sprite = Service.create_sprite( os.path.join("texture", "Hero.png"), sprite_size) hero.sprite2 = Service.create_sprite( os.path.join("texture", "Hero2.png"), sprite_size) Service.service_init(sprite_size, False, engine) drawer.connect_engine(engine) Logic.GameEngine.sprite_size = sprite_size iteration = 0
def __init__(self, shard, contest_id=None): """If contest_id is not None, we assume the user wants the autorestart feature. """ logger.initialize(ServiceCoord("ResourceService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id # _local_store is a dictionary indexed by time in int(epoch) self._local_store = [] # Floating point epoch using for precise measurement of percents self._last_saved_time = time.time() # Starting point for cpu times self._prev_cpu_times = self._get_cpu_times() # Sorted list of ServiceCoord running in the same machine self._local_services = self._find_local_services() # Dict service with bool to mark if we will restart them. self._will_restart = dict( (service, None if self.contest_id is None else True) for service in self._local_services) # Found process associate to the ServiceCoord. self._procs = dict((service, None) for service in self._local_services) # Previous cpu time for each service. self._services_prev_cpu_times = dict( (service, (0.0, 0.0)) for service in self._local_services) # Start finding processes and their cputimes. self._store_resources(store=False) self.add_timeout(self._store_resources, None, 5) if self.contest_id is not None: self._launched_processes = set([]) self.add_timeout(self._restart_services, None, 5, immediately=True)
class Test_test1(unittest.TestCase): def setUp(self): self.test_board=Repository() self.test_service=Service(self.test_board) def test_A(self): test_validity=self.test_board.GetBoard() for index in range(0,6): for elem in test_validity[index]: if elem!=0: self.assertEqual(0,1) self.test_service.PlaceShip("A1A2A3") test_validity=self.test_board.GetBoard() self.assertEqual(1,test_validity[1][0]) try: self.test_service.PlaceShip("A1A2A3") self.assertFalse() except Exception as exception: self.assertEqual(str(exception),"Ships overlaping") self.test_service.PlaceShip("C2D2E2") test_validity=self.test_board.GetBoard() self.assertEqual(1,test_validity[2][2]) self.test_service.PlaceShip("A2A3A4") test_validity=self.test_board.GetBoard() self.assertEqual(0,test_validity[1][0]) self.assertEqual(1,test_validity[4][0])
def interact(self, engine, hero): enemy_hp = 5 + self.stats["endurance"] * 2 while enemy_hp > 0 and hero.hp > 0: hero_hit = hero.stats["strength"] + random.randint(0, 10) enemy_hit = self.stats["strength"] if hero_hit > enemy_hit: enemy_hp -= 5 elif hero_hit == enemy_hit: continue else: hero.hp -= 10 n = hero.level if hero.hp > 0: hero.exp += self.stats["experience"] hero.level_up() if hero.level > n: engine.notify(f"Level up {hero.level}") if hero.hp <= 0: hero.hp = 0 hero.level -= 1 hero.exp = 0 engine.notify(f"Level low {hero.level}") if hero.level <= 0: engine.notify(f"Game Over") engine.level = 4 Service.reload_game(engine, hero)
def __init__(self, shard, contest_id=None): """If contest_id is not None, we assume the user wants the autorestart feature. """ logger.initialize(ServiceCoord("ResourceService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id # _local_store is a dictionary indexed by time in int(epoch) self._local_store = [] # Floating point epoch using for precise measurement of percents self._last_saved_time = time.time() # Starting point for cpu times self._prev_cpu_times = self._get_cpu_times() # Sorted list of ServiceCoord running in the same machine self._local_services = self._find_local_services() # Dict service with bool to mark if we will restart them. self._will_restart = dict( (service, None if self.contest_id is None else True) for service in self._local_services ) # Found process associate to the ServiceCoord. self._procs = dict((service, None) for service in self._local_services) # Previous cpu time for each service. self._services_prev_cpu_times = dict((service, (0.0, 0.0)) for service in self._local_services) # Start finding processes and their cputimes. self._store_resources(store=False) self.add_timeout(self._store_resources, None, 5) if self.contest_id is not None: self._launched_processes = set([]) self.add_timeout(self._restart_services, None, 5, immediately=True)
def __init__(self, shard): logger.initialize(ServiceCoord("Worker", shard)) Service.__init__(self, shard, custom_logger=logger) self.file_cacher = FileCacher(self) self.task_type = None self.work_lock = threading.Lock() self.session = None
def __init__(self, dev, **kwargs): """ .. code-block:: python with Firmware(dev) as fw: fw.load(url="/tmp/mcr-bkrc-6.5.7.mpk") """ Service.__init__(self, dev=dev)
def testMaxOfDay(): assert Service.maxOfDay([[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) == (1, 60) assert Service.maxOfDay([[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) != (3, 20) assert Service.maxOfDay( [[5, 20, 'food'], [6, 20, 'food'], [5, 20, 'food'], [10, 20, 'food'], [10, 20, 'food']]) == (10, 40) assert Service.maxOfDay([[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) == (1, 40) assert Service.maxOfDay([[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) == (2, 80) print("testMaxOfDay passed")
def __init__(self, shard, contest_id): logger.initialize(ServiceCoord("ScoringService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id self.scorers = {} self._initialize_scorers() # If for some reason (SS switched off for a while, or broken # connection with ES), submissions have been left without # score, this is the set where you want to pur their ids. Note # that sets != {} if and only if there is an alive timeout for # the method "score_old_submission". self.submission_ids_to_score = set([]) self.submission_ids_to_token = set([]) self.scoring_old_submission = False # We need to load every submission at start, but we don't want # to invalidate every score so that we can simply load the # score-less submissions. So we keep a set of submissions that # we analyzed (for scoring and for tokens). self.submission_ids_scored = set() self.submission_ids_tokened = set() # Initialize ranking web servers we need to send data to. self.rankings = [] for i in xrange(len(config.rankings_address)): address = config.rankings_address[i] username = config.rankings_username[i] password = config.rankings_password[i] self.rankings.append(( address[0], # HTTP / HTTPS "%s:%d" % tuple(address[1:]), get_authorization(username, password))) self.initialize_queue = set() self.submission_queue = dict() self.subchange_queue = dict() self.operation_queue_lock = threading.Lock() for ranking in self.rankings: self.initialize_queue.add(ranking) self.log_bridge = LogBridge() thread = threading.Thread(target=self.dispath_operations_thread, args=(self.log_bridge, )) thread.daemon = True thread.start() self.add_timeout(self.search_jobs_not_done, None, ScoringService.JOBS_NOT_DONE_CHECK_TIME, immediately=True) self.add_timeout(self.forward_logs, None, ScoringService.FORWARD_LOG_TIME, immediately=True)
def testSumOfType(): assert Service.sumOfType('food', [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']]) == 80 assert Service.sumOfType('internet', [[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) == 0 assert Service.sumOfType('internet', [[5, 20, 'food'], [6, 20, 'internet'], [5, 20, 'food'], [10, 20, 'food'], [10, 20, 'food']]) == 20 assert Service.sumOfType('food', [[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) != 50 assert Service.sumOfType('clothing', [[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) != 20 print("testSumOfType passed")
def interact(self, engine, hero): hero.hp -= self.stats['strength'] if hero.hp <= 0: Service.reload_game(engine, hero) Service.restore_hp(engine, hero) engine.notify('You died') else: hero.exp += self.stats['experience'] hero.level_up()
def main(): imagePath = 'Encore/temp/Image/a.jpg' filePath = 'C:\\Users\\JH\\Downloads\\opencv-master\\data\\haarcascades\\haarcascade_frontalface_alt2.XML' root = tk.Tk() fdService = fds.FaceDetecService(imagePath, filePath) app = win.AppWindow(root, '800x600+100+100', imagePath) mkw.Make(app, fdService) s.service() app.mainloop()
def __init__(self): """Crea un objeto de tipo BlackboardI""" Service.__init__(self) # (Clave, Valor) == ((IdWork, IdZone), Register) self.Registers = {} # AnalysisTime representa el tiempo en segundos dedicado al análisis de la escena más reciente. self.AnalysisTime = 0 # EstimatedRenderTime representa el tiempo en segundos dedicado a la estimación de las unidades de trabajo. self.EstimatedRenderTime = 0
def testRemoveExpenseByDay(): assert not Service.removeExpenseByDay([2], [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']]) assert not Service.removeExpenseByDay([4], [[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) assert Service.removeExpenseByDay([6], [[5, 20, 'food'], [6, 20, 'internet'], [5, 20, 'food'], [10, 20, 'food'], [10, 20, 'food']]) != [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']] assert not Service.removeExpenseByDay([2], [[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) == [ [1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']] assert not Service.removeExpenseByDay([2], [[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) == [] print("testRemoveExpenseByDay passed")
def __init__ (self): """Crea un objeto de tipo BlackboardI""" Service.__init__(self) # (Clave, Valor) == ((IdWork, IdZone), Register) self.Registers = {} # AnalysisTime representa el tiempo en segundos dedicado al análisis de la escena más reciente. self.AnalysisTime = 0 # EstimatedRenderTime representa el tiempo en segundos dedicado a la estimación de las unidades de trabajo. self.EstimatedRenderTime = 0
def addCustomService(self): service = Service() service.id = 'custom-%i' % random.randrange(1, 999999) service.name = self.tr('New Service, edit me') service.description = self.tr('Enter a short, concise description here') self.sources[QString('custom.xml')].services.append(service) self.sources[QString('custom.xml')].writeBack() self.readSources() self.populateCustomList(service.id) self.synchronizeLineEdits() # Triggert nicht automatisch
def main(): # hostname = sys.argv[0] # port = sys.argv[1] # # Get Client ID client_id, client_key = client_auth.get_authenticated() print(colored("Private Key:"+client_key,'yellow'),colored('Sensitive Information, Printed Only for Demo','red')) print("Authentication Successful") active_file_server_list = get_access_init(client_id) print(colored("Starting Shell",'blue')) Service.shell(active_file_server_list,client_id,client_key)
def __init__(self, shard, contest_id): logger.initialize(ServiceCoord("ScoringService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id self.scorers = {} self._initialize_scorers() # If for some reason (SS switched off for a while, or broken # connection with ES), submissions have been left without # score, this is the set where you want to pur their ids. Note # that sets != {} if and only if there is an alive timeout for # the method "score_old_submission". self.submission_ids_to_score = set([]) self.submission_ids_to_token = set([]) self.scoring_old_submission = False # We need to load every submission at start, but we don't want # to invalidate every score so that we can simply load the # score-less submissions. So we keep a set of submissions that # we analyzed (for scoring and for tokens). self.submission_ids_scored = set() self.submission_ids_tokened = set() # Initialize ranking web servers we need to send data to. self.rankings = [] for i in xrange(len(config.rankings_address)): address = config.rankings_address[i] username = config.rankings_username[i] password = config.rankings_password[i] self.rankings.append((address[0], # HTTP / HTTPS "%s:%d" % tuple(address[1:]), get_authorization(username, password))) self.initialize_queue = set() self.submission_queue = dict() self.subchange_queue = dict() self.operation_queue_lock = threading.Lock() for ranking in self.rankings: self.initialize_queue.add(ranking) self.log_bridge = LogBridge() thread = threading.Thread(target=self.dispath_operations_thread, args=(self.log_bridge,)) thread.daemon = True thread.start() self.add_timeout(self.search_jobs_not_done, None, ScoringService.JOBS_NOT_DONE_CHECK_TIME, immediately=True) self.add_timeout(self.forward_logs, None, ScoringService.FORWARD_LOG_TIME, immediately=True)
def interact(self, engine, hero): hero.exp += self.stats["experience"] // 3 hero.level_up() damage = ((self.stats["strength"] // 5) * (self.stats["endurance"] // 10)) + (self.stats["intelligence"] // 3) chance = random.randint(0, 100 - hero.stats["luck"]) hero.hp -= damage if chance > 0 else 0 if hero.hp <= 0: engine.level -= 1 hero.hp = hero.max_hp Service.reload_game(engine,hero)
def testGetDaysWithExpenses(): assert Service.getDaysWithExpenses([[1, 20, 'food'], [2, 20, 'food'], [3, 20, 'food'], [3, 20, 'food']]) == {1, 2, 3} assert Service.getDaysWithExpenses([[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) != {1, 2, 3} assert Service.getDaysWithExpenses([[5, 20, 'food'], [6, 20, 'food'], [9, 20, 'food'], [10, 20, 'food']]) == {5, 6, 9, 10} assert Service.getDaysWithExpenses([[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) == {1, 4} assert Service.getDaysWithExpenses([[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) != {1, 2, 3} print("testGetDaysWithExpenses passed")
def testSortType(): assert Service.sortType('food', [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']]) == [ [1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']] assert Service.sortType('internet', [[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) == [] assert Service.sortType('internet', [[5, 20, 'food'], [6, 20, 'internet'], [5, 20, 'food'], [10, 20, 'food'], [10, 20, 'food']]) != [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']] assert Service.sortType('food', [[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) == [ [1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']] assert Service.sortType('clothing', [[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) == [] print("testSumOfType passed")
def response_to_action(info, action): """ If 'action' appears in the params, then refer to this method to process :param info: the key and value in GET url or POST body :param action: the value of action :return: depends on the action """ TAG = 'ActionHandler:response_to_action' service = Service() ret_dict = dict() if action == 'getWords': # user_id = info['userId'] number_of_words = info['numberOfWords'] wordlist_id = info['wordListId'] pick_style = info['style'] if pick_style == 'continuous': from_word = info['fromWord'] else: from_word = "" # word_get:should be an array with every words in dict format [{},{},...,{}] # every dict should be a {'word':word, 'phonetic':phonetic, 'interp':interp,'id':id ,'r':r,'w':w} # here r means right recite times, w means wrong recite times words_get = service.get_words(number_of_words, wordlist_id, pick_style, from_word) if words_get: ret_dict['status'] = 'Success' ret_dict['words_get'] = words_get elif action == 'register': email = info['userEmail'] password = info['password'] name = info['userName'] is_success = service.register(email, password, name) if is_success: ret_dict['status'] = 'Success' Log.i(TAG, 'user:%s has successfully registered' % email) else: ret_dict['status'] = 'Fail' Log.w(TAG, 'registeration failed <email=%s>' % email) elif action == 'userAuthentication': email = info['userEmail'] password = info['password'] is_success, user_name = service.can_login(email, password) if is_success: ret_dict['status'] = 'Success' ret_dict['user_name'] = user_name Log.i(TAG, 'user:%s has successfully login' % email) else: ret_dict['status'] = 'Fail' Log.w(TAG, 'login failed <email=%s>' % email) return ret_dict
def testRemoveExpenseByStartEndDay(): assert not Service.removeExpenseByDay(['4', 'to', '5'], [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']]) assert Service.removeExpenseByDay(['1', 'to', '5'], [[3, 20, 'food'], [5, 20, 'food'], [3, 20, 'food'], [1, 20, 'food']]) assert Service.removeExpenseByDay(['2', 'to', '5'], [[5, 20, 'food'], [6, 20, 'internet'], [5, 20, 'food'], [10, 20, 'food'], [10, 20, 'food']]) != [[1, 20, 'food'], [1, 20, 'food'], [3, 20, 'food'], [5, 20, 'food']] assert not Service.removeExpenseByDay(['3', 'to', '8'], [[1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']]) == [ [1, 20, 'food'], [4, 20, 'food'], [4, 20, 'food'], [1, 20, 'food']] assert not Service.removeExpenseByDay(['1', 'to', '9'], [[2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food'], [2, 20, 'food']]) == [] print("testRemoveExpenseByStartEndDay passed")
def __init__(self, shard, contest_id): logger.initialize(ServiceCoord("ScoringService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id # Initialize scorers, the ScoreType objects holding all # submissions for a given task and deciding scores. self.scorers = {} with SessionGen(commit=False) as session: contest = session.query(Contest).\ filter_by(id=contest_id).first() logger.info("Loaded contest %s" % contest.name) contest.create_empty_ranking_view(timestamp=contest.start) for task in contest.tasks: self.scorers[task.id] = get_score_type(task=task) session.commit() # If for some reason (SS switched off for a while, or broken # connection with ES), submissions have been left without # score, this is the list where you want to pur their # ids. Note that list != [] if and only if there is an alive # timeout for the method "score_old_submission". self.submission_ids_to_score = [] self.submission_ids_to_token = [] # We need to load every submission at start, but we don't want # to invalidate every score so that we can simply load the # score-less submissions. So we keep a set of submissions that # we analyzed (for scoring and for tokens). self.submission_ids_scored = set() self.submission_ids_tokened = set() # Initialize ranking web servers we need to send data to. self.rankings = [] for i in xrange(len(config.rankings_address)): address = config.rankings_address[i] username = config.rankings_username[i] password = config.rankings_password[i] auth = get_authorization(username, password) self.rankings.append(("%s:%d" % tuple(address), auth)) self.operation_queue = [] for ranking in self.rankings: self.operation_queue.append((self.initialize, [ranking])) self.add_timeout(self.dispatch_operations, None, ScoringService.CHECK_DISPATCH_TIME, immediately=True) self.add_timeout(self.search_jobs_not_done, None, ScoringService.JOBS_NOT_DONE_CHECK_TIME, immediately=True)
def test312_invest_new(self): '''新用户投资新手标''' print('invest_new') try: ComMeth.login(Common.teleNum) # ComMeth.login_be() Service.Service().recharge_be(Common.teleNum, '500') token = ComMeth.getToken() response = Service.Service().invest(self.productInfo[0], token[0], token[1], 100, 2) Common.search_str(str(response), [self.SUCCESS_RESULT]) except Exception as ex: raise ex
def __init__(self, shard, custom_logger=None): Service.__init__(self, shard, custom_logger) global logger from cms.async.AsyncLibrary import logger as _logger logger = _logger self.start = 0 self.total_time = 0 self.allright = 0 self.current = -1 self.ongoing = False self.failed = False self.retry = False self.add_timeout(self.test, None, 0.2, immediately=True) self.initialized = False
def loadAuxiliaryData(): # Service.logger.debug("Loading auxiliary data for terminology extraction system...") global ngramFilePath, adskUnwordsRoot global ngrams, nowords # ngrams = codecs.open(ngramFilePath, "r", "utf-8").read() conn = Service.connectToDB() cursor = conn.cursor() cursor.execute("select LangCode3Ltr from TargetLanguages") langs = cursor.fetchall() conn.close() for lang in langs: if __debug_on__: Service.logger.debug("\t\tReading nGram file " + ngramFilePath+"."+lang[0].upper()+".bz2...") ngrams[lang[0]] = bz2.BZ2File(ngramFilePath+"."+lang[0].upper()+".bz2", "r").read() # Load Autodesk-related lists: # - ngram-list (from Ventzi, including only the ngrams without counts) # - NeXLT product names (------ there is an N/A in it???) # - NeXLT language list # - city names from http://www.geodatasource.com/ and http://www.maxmind.com/en/worldcities # - words which should not be harvested (unwords and general words) # - Autodesk trademarks # - company names # Define nowords as filter nowords = preplists(adskUnwordsRoot+"/general_words.txt").union(preplists(adskUnwordsRoot+"/un_words.txt").union(preplists(adskUnwordsRoot+"/autodesk_trademarks.txt").union(preplists(adskUnwordsRoot+"/company_names.txt").union(preplists(adskUnwordsRoot+"/cities_regions.txt")))))
def __init__(self, listen_port, handlers, parameters, shard=0, custom_logger=None, listen_address=""): Service.__init__(self, shard, custom_logger) global logger from cms.async.AsyncLibrary import logger as _logger logger = _logger # This ensures that when the server autoreloads because its source is # modified, the socket is closed correctly. # In the development branch of Tornado, you can add a hook before # the server reloads. try: if parameters["debug"]: fcntl.fcntl(self.server.socket, fcntl.F_SETFD, fcntl.FD_CLOEXEC) except KeyError: pass self.__responses = {} # TODO: why are the following two lines needed? self._RPCRequestHandler__responses = self.__responses self._RPCAnswerHandler__responses = self.__responses handlers += [(r"/rpc_request/([a-zA-Z0-9_-]+)/" \ "([0-9]+)/([a-zA-Z0-9_-]+)", RPCRequestHandler), (r"/rpc_answer", RPCAnswerHandler), (r"/sync_rpc_request/([a-zA-Z0-9_-]+)/" \ "([0-9]+)/([a-zA-Z0-9_-]+)", SyncRPCRequestHandler)] self.application = tornado.web.Application(handlers, **parameters) # xheaders=True means that Tornado uses the content of the # header X-Real-IP as the request IP. This means that if it is # behind a proxy, it can see the real IP the request is coming # from. But, to use it, we need to be sure we can trust it # (i.e., if we are not behind a proxy that sets that header, # we must not use it). self.application.service = self http_server = tornado.httpserver.HTTPServer( self.application, xheaders=parameters.get("is_proxy_used", True)) http_server.listen(listen_port, address=listen_address) self.instance = tornado.ioloop.IOLoop.instance()
def __init__(self, shard, contest_id): logger.initialize(ServiceCoord("ScoringService", shard)) Service.__init__(self, shard, custom_logger=logger) self.contest_id = contest_id self.scorers = {} self._initialize_scorers() # If for some reason (SS switched off for a while, or broken # connection with ES), submissions have been left without # score, this is the list where you want to pur their # ids. Note that list != [] if and only if there is an alive # timeout for the method "score_old_submission". self.submission_ids_to_score = [] self.submission_ids_to_token = [] # We need to load every submission at start, but we don't want # to invalidate every score so that we can simply load the # score-less submissions. So we keep a set of submissions that # we analyzed (for scoring and for tokens). self.submission_ids_scored = set() self.submission_ids_tokened = set() # Initialize ranking web servers we need to send data to. self.rankings = [] for i in xrange(len(config.rankings_address)): address = config.rankings_address[i] username = config.rankings_username[i] password = config.rankings_password[i] auth = get_authorization(username, password) self.rankings.append(("%s:%d" % tuple(address), auth)) self.operation_queue = [] for ranking in self.rankings: self.operation_queue.append((self.initialize, [ranking])) self.add_timeout(self.dispatch_operations, None, ScoringService.CHECK_DISPATCH_TIME, immediately=True) self.add_timeout(self.search_jobs_not_done, None, ScoringService.JOBS_NOT_DONE_CHECK_TIME, immediately=True)
def threadProcess(self, client, clientaddr): recv = client.recv(int(Var.IDF_SOCKET_BUFFER)) try: service = Service() while len(recv): # 处理发送过来的指令 print recv ret = service.process(recv) # 向服务端发送数据 client.send(ret + "\n") time.sleep(0.1) recv = client.recv(int(Var.IDF_SOCKET_BUFFER)) #print recv #function.log('socket recv data', 'data/data-'+clientaddr[0]+'.log').info(recv) #function.log('socket send data', 'data/data-'+clientaddr[0]+'.log').info(ret) function.log('socket data', 'data/data-' + clientaddr[0] + '.log').info("socket close\n") except Exception, ex: print "Socket threadProcess function error:\n" print ex
def __init__(self, shard): logger.initialize(ServiceCoord("LogService", shard)) Service.__init__(self, shard, custom_logger=logger) log_dir = os.path.join(config.log_dir, "cms") if not mkdir(config.log_dir) or \ not mkdir(log_dir): logger.error("Cannot create necessary directories.") self.exit() return log_filename = "%d.log" % int(time.time()) self._log_file = codecs.open(os.path.join(log_dir, log_filename), "w", "utf-8") try: os.remove(os.path.join(log_dir, "last.log")) except OSError: pass os.symlink(log_filename, os.path.join(log_dir, "last.log")) self._last_messages = []
def __init__ (self): """Crea un objeto de tipo ModelRepositoryI""" Service.__init__(self) self.Models = {} self.NumberOfModels = 0
def __init__(self, shard): logger.initialize(ServiceCoord("Checker", shard)) Service.__init__(self, shard, custom_logger=logger) for service in config.async.core_services: self.connect_to(service)
import JAL_instruction import JALR_instruction import Service R_t = R_type.R_type() U_t = U_type.U_type(); SB_t = SB_type.SB_type() L_t = Load_type.Load_type() S_t = Store_type.Store_type() Im_t = I_math_type.I_math_type() Is_t = I_shamt_type.I_shamt_type() Jal = JAL_instruction.JAL_instruction() Jalr = JALR_instruction.JALR_instruction() cfg = Service.readConfig("config.cfg") start_addr = 0; if "start_addr" in cfg: start_addr = Service.Str2Num(cfg["start_addr"]) data = ""; file_addr = "in.ass" if "in_file_addr" in cfg: file_addr = cfg["in_file_addr"] pfile = open(file_addr); code = Service.SplitToDierctives(pfile) if ".CODE" not in code: Service.ERROR("CODE not found error!"); dataD = "" dataK = "" code[".CODE"] = code[".CODE"][:-1].split('\n') if ".DATA" in code:
def gather_capabilities(Nset, nameSet, cur, set_layers, list_WMSservers, server_URL): # Generated file name name_file = nameSet + ".xml" # Product of an object class Document - dom dom = xml.dom.minidom.Document() # DTD schema takes one of the servers Nwms_dtd = list_WMSservers[0] # create service tag with need information (see Service.py).Return subtree dom tag_service = Service.get_tag(dom, server_URL, nameSet, Nwms_dtd) # create tag Capability. capability = dom.createElement("Capability") tag_service.appendChild(capability) # create tag in Capability: Request request_dom = dom.createElement("Request") capability.appendChild(request_dom) # Formated a tag Request with the intersection of all formats by all used servers request_dom = Service.get_tagRequest(dom, "GetCapabilities", request_dom,\ server_URL, cur, list_WMSservers, nameSet, 0) request_dom = Service.get_tagRequest(dom, "GetMap", request_dom, server_URL,\ cur, list_WMSservers, nameSet, 0) request_dom = Service.get_tagRequest(dom, "GetFeatureInfo", request_dom, \ server_URL, cur, list_WMSservers, nameSet, 0) request_dom = Service.get_tagRequest(dom, "DescribeLayer", request_dom, \ server_URL, cur, list_WMSservers, nameSet, 0) request_dom = Service.get_tagRequest(dom, "GetLegendGraphic", request_dom, \ server_URL, cur, list_WMSservers, nameSet, 0) Nwms_used = -1 count_servers = 0 oldlist_formats = [] for server_id in list_WMSservers: # Enumerates all used servers # Check their recurrence if Nwms_used == server_id: continue Nwms_used = server_id # Requested format of the tag with the name "Exception" of the server server_id # SELECT xpath_nodeset(Capabilites, '"//Capability/Exception/Format"') # FROM WMSresources # WHERE Nwms = server_id; res = BD.interset_request(cur, "WMSresources", "Capabilites",\ "//Capability/Exception/Format", "Nwms", server_id) # Stores intersection list formats result = [] if res: # If there is a format for tagFormat in res: strformat = tagFormat.replace("</Format>", "</Format>\n") if count_servers: # This is not the first list of formats oldlist_formats = list_formats # The string format is divided into a list list_formats = strformat.split("\n") # Created by the intersection of the lists of formats belonging to two servers for m in oldlist_formats: for j in list_formats: if m == j: result.append(m) break # The result of suppresion of record in the list of formats list_formats = result else: # This is the first list of formats # The string format is divided into a list list_formats = strformat.split("\n") count_servers += 1 # Formed a string from the list of formats strformat = " " for k in list_formats: strformat = strformat + "%s\n\t"%k # Create tag "Exception" exception = dom.createElement("Exception") capability.appendChild(exception) except_format = dom.createTextNode("%s" % strformat) exception.appendChild(except_format) count_servers = 0 Nwms_used = -1 oldlist_formats = " " for server_id in list_WMSservers: # Enumerates all used servers result = " " # Check their recurrence if Nwms_used == server_id: continue Nwms_used = server_id # Requested format of the tag with the name "Exception" of the server server_id # SELECT xpath_nodeset(Capabilites, '"//Capability/UserDefinedSymbolization"') # FROM WMSresources # WHERE Nwms = server_id; res = BD.interset_request(cur, "WMSresources", "Capabilites",\ "//Capability/UserDefinedSymbolization",\ "Nwms", server_id) if res: # If there is a UserDefinedSymbolization for tagSymbolization in res: if count_servers: # This is not the first string UserDefinedSymbolization oldlist_formats = list_formats list_formats = tagSymbolization if oldlist_formats == list_formats: result = list_formats else: list_formats = tagSymbolization count_servers += 1 list_formats = result # Create tag UserDefinedSymbolization symb = dom.createTextNode("%s" % list_formats) capability.appendChild(symb) # External layer - layer containing all the layers set external_Layer = dom.createElement("Layer") capability.appendChild(external_Layer) Name_exLayer = dom.createElement("Name") Name_exLayer.appendChild(dom.createTextNode("%s"%nameSet)) external_Layer.appendChild(Name_exLayer) Title_exLayer = dom.createElement("Title") Title_exLayer.appendChild(dom.createTextNode("WMS Resource Manager of SB RAS")) external_Layer.appendChild(Title_exLayer) # Requested a list of layers which don't display in "Capabilities" tables = [] keywords = {} tables.append("AuthorsSets.layer_noset") keywords["AuthorsSets.Nset"] = Nset res = BD.ifsome_tables(cur, tables, keywords, "AuthorsSets") if res: list_layernoset = res[0][0] else: list_layernoset = "" # For all layers of the set for Nlayer, Nwms in set_layers.items(): # Checking whether to display the layer in "Capabilities" place_instr = list_layernoset.find("%d"%Nlayer + ";") if (place_instr != -1): if ((place_instr != 0 and (list_layernoset[place_instr-1:place_instr])!='_') or (place_instr == 0)): # If the layer is not necessary to display, then its inner layers too continue; # Requested by the parent layer, a list of internal layers and Capabilities. tables = [] keywords = {} tables.append("KnownLayers.Nl_group") tables.append("KnownLayers.LayerCapabilites") tables.append("SetLayer.sub_group") keywords["KnownLayers.Nwms"] = Nwms keywords["SetLayer.Nlayer"] = Nlayer keywords["KnownLayers.Nlayer"] = "SetLayer.Nlayer" keywords["SetLayer.Nset"] = Nset res = BD.ifsome_tables(cur, tables, keywords, "KnownLayers", "SetLayer") for parent_id, capabilities, subgroup_layers in res: if not subgroup_layers: # If there is no internal layers capabilities = form_layer(capabilities, Nlayer, server_URL) if parent_id != -1: # If the layer has a parent capabilities = form_layerInfo(cur, parent_id, capabilities) else: # Another group is formed from layer capabilities = form_layerGroup(cur, Nlayer) # Is the formation of sublayers capabilities = form_sublayers(cur, Nwms, subgroup_layers, capabilities,\ server_URL, list_layernoset, Nlayer) capabilities = capabilities + "</Layer>\t" # Create new node "Layer" text = dom.createTextNode("%s" % capabilities) external_Layer.appendChild(text) # transform tree in string texts = tag_service.toprettyxml(indent=' ', newl='\n') # transform psevdo codes in symbols texts = texts.replace("<", "<") texts = texts.replace(">", ">") texts = texts.replace(""", "\"") texts = texts.replace("&amp;", "&") print texts