def _put_conn(self, host, port, sock): u""" 将连接添加回连接池 会检查连接状态,不正常的连接会被抛弃。 """ if hasattr(self.sock_mod, "get_display_name"): sock_name = self.sock_mod.get_display_name() else: sock_name = None sock_info = 'sock_mod:%s host:%s port:%s' % (sock_name, host, port) if sock: if is_connection_dropped(sock): logging.debug(u'已关闭连接无法添加回连接池。%s' % sock_info) try: sock.close() except: pass else: with self.lock: site_connes = self.site_dict.get(u'%s:%s' % (host, port), None) if site_connes is None: site_connes = LifoQueue(self.max_site_conn) try: site_connes.put(sock) logging.debug(u'添加连接回连接池。 %s' % sock_info) except Full: logging.debug(u'连接池满. %s' % sock_info) try: sock.close() except: pass return self.site_dict[u'%s:%s' % (host, port)] = site_connes
class PooledIncomingQueue(IncomingQueue): def init_queues(self, n=5, buffsize=0, maxsize=1000*1000*1000): maxsize = maxsize / n self.write_executor = ThreadPoolExecutor(poolsize=1, queuesize=100) self.rqfile = FileDequeue(self.qdir, reader=FPSortingQueueFileReader) #self.rqfile = DummyFileDequeue(self.qdir) self.qfiles = [FileEnqueue(self.qdir, suffix=str(i), maxsize=maxsize, buffer=buffsize, executor=self.write_executor) for i in range(n)] self.avail = LifoQueue() for q in self.qfiles: self.avail.put(q) def shutdown(self): super(PooledIncomingQueue, self).shutdown() self.write_executor.shutdown() def add(self, curis): processed = 0 t0 = time.time() enq = self.avail.get() t = time.time() - t0 if t > 0.1: logging.warn('self.avail.get() %.4f', t) try: enq.queue(curis) self.addedcount += len(curis) processed += len(curis) return dict(processed=processed) finally: t0 = time.time() self.avail.put(enq) t = time.time() - t0 if t > 0.1: logging.warn('slow self.avail.put() %.4f', t)
def copyBucket(maxKeys=1000): print 'start' s_conn = S3Connection(source_aws_key, source_aws_secret_key) srcBucket = s_conn.get_bucket(srcBucketName) resultMarker = '' q = LifoQueue(maxsize=5000) for i in range(10): print 'adding worker' t = Worker(q) t.daemon = True t.start() while True: print 'fetch next 1000, backlog currently at %i' % q.qsize() keys = srcBucket.get_all_keys(max_keys=maxKeys, marker=resultMarker) for k in keys: q.put(k.key) if len(keys) < maxKeys: print 'Done' break resultMarker = keys[maxKeys - 1].key q.join() print 'done'
def copy_job(self, max_keys=1000): logging.info( 'start copy_bucket' ) src = self.job['source'] tgt = self.job['target'] conn = self.get_conn( tgt['owner'] ) srcBucket = conn.get_bucket( src['bucket'] ) tgtBucket = conn.get_bucket( tgt['bucket'] ) if self.job['options']['allow-acl-change']: ownerBucketView = self.get_conn( src['owner'] ).get_bucket( src['bucket'] ) ownerID = self.users[ tgt['owner'] ]['canonical-id'] else: ownerBucketView = None ownerID = None resultMarker = '' q = LifoQueue(maxsize=5000) for i in range(self.parallel): logging.info( 'adding worker %d' % i ) t = BucketCopyWorker(q, srcBucket, tgtBucket, src['key-prefix'], tgt['key-prefix'], ownerBucketView, ownerID) t.daemon = True t.start() while True: logging.info( 'fetch next 1000, backlog currently at %i' % q.qsize() ) keys = srcBucket.get_all_keys( prefix=src['key-prefix'], max_keys=max_keys, marker = resultMarker) for k in keys: q.put(k.key) if len(keys) < max_keys: print 'Done' break resultMarker = keys[maxKeys - 1].key q.join() logging.info( 'done copy_bucket' )
class EulerianCycleDFS: """Finding an Eulerian cycle in a multigraph. Attributes ---------- graph : input graph eulerian_cycle : list of nodes (length |E|+1) _graph_copy : graph, private _stack : LIFO queue, private Notes ----- Based on the description from: http://eduinf.waw.pl./inf/alg/001_search/0135.php """ def __init__(self, graph): """The algorithm initialization.""" self.graph = graph if not self._is_eulerian(): raise ValueError("the graph is not eulerian") self.eulerian_cycle = list() self._graph_copy = self.graph.copy() self._stack = LifoQueue() import sys recursionlimit = sys.getrecursionlimit() sys.setrecursionlimit(max(self.graph.v() * 2, recursionlimit)) def run(self, source=None): """Executable pseudocode.""" if source is None: # get first random node source = self.graph.iternodes().next() self._visit(source) while not self._stack.empty(): self.eulerian_cycle.append(self._stack.get()) #del self._stack #del self._graph_copy def _visit(self, source): """Visiting node.""" while self._graph_copy.outdegree(source) > 0: edge = self._graph_copy.iteroutedges(source).next() self._graph_copy.del_edge(edge) self._visit(edge.target) self._stack.put(source) def _is_eulerian(self): """Test if the graph is eulerian.""" if self.graph.is_directed(): # We assume that the graph is strongly connected. for node in self.graph.iternodes(): if self.graph.indegree(node) != self.graph.outdegree(node): return False else: # We assume that the graph is connected for node in self.graph.iternodes(): if self.graph.degree(node) % 2 == 1: return False return True
def _find_path_dfs(self): """Finding augmenting paths in the residual network.""" parent = dict((node, None) for node in self.residual.iternodes()) # Capacity of found path to node. capacity = {self.source: float("inf")} Q = LifoQueue() Q.put(self.source) while not Q.empty(): node = Q.get() for edge in self.residual.iteroutedges(node): cap = edge.weight - self.flow[edge.source][edge.target] if cap > 0 and parent[edge.target] is None: parent[edge.target] = edge.source capacity[edge.target] = min(capacity[edge.source], cap) if edge.target != self.sink: Q.put(edge.target) else: # Backtrack search and write flow. target = self.sink while target != self.source: node = parent[target] self.flow[node][target] += capacity[self.sink] self.flow[target][node] -= capacity[self.sink] target = node return capacity[self.sink] return 0
def _visit(self, node, pre_action=None, post_action=None): """Explore the connected component,""" self.time = self.time + 1 self.dd[node] = self.time self.color[node] = "GREY" Q = LifoQueue() Q.put(node) # node is GREY if pre_action: # when Q.put pre_action(node) while not Q.empty(): source = Q.get() # GREY node is processed for edge in self.graph.iteroutedges(source): if self.color[edge.target] == "WHITE": self.parent[edge.target] = source self.dag.add_edge(edge) self.time = self.time + 1 self.dd[edge.target] = self.time self.color[edge.target] = "GREY" Q.put(edge.target) # target is GREY if pre_action: # when Q.put pre_action(edge.target) self.time = self.time + 1 self.ff[source] = self.time self.color[source] = "BLACK" if post_action: # source became BLACK post_action(source)
class DummyMessageHandler(MessageHandler): # TODO(steffen): locking def __init__(self): MessageHandler.__init__(self) self._messages = LifoQueue() self._devices = [] def register(self, device): self._devices.append(device) def read_message(self): return self._messages.get() def write_message_from_device(self, message): self._messages.put(message) def write_message(self, message): for d in self._devices: d.handle_message(message) def has_messages(self): for d in self._devices: d.loop() return not self._messages.empty() def stop(self): pass
def get_max_flow(directed_graph, source, sink): residual = {edge: edge.capacity for edges in directed_graph.values() for edge in edges} flow_paths = [] def flow_path(path): max_flow = float("inf") for edge in path: max_flow = min(max_flow, residual[edge]) for edge in path: residual[edge] -= max_flow flow_paths.append((max_flow, path)) bfs_queue = LifoQueue() bfs_queue.put([]) while not bfs_queue.empty(): path = bfs_queue.get() for edge in directed_graph[source if not path else path[-1].to_node]: if residual[edge] > 0: new_path = path[:] new_path.append(edge) if edge.to_node == sink: flow_path(new_path) else: bfs_queue.put(new_path) return flow_paths
class HierholzerWithEdges: """Finding an Eulerian cycle in a multigraph. Attributes ---------- graph : input graph eulerian_cycle : list of edges (length |E|) _graph_copy : graph, private _stack : LIFO queue, private Notes ----- Based on the description from: https://en.wikipedia.org/wiki/Eulerian_path """ def __init__(self, graph): """The algorithm initialization.""" self.graph = graph if not self._is_eulerian(): raise ValueError("the graph is not eulerian") self.eulerian_cycle = list() self._graph_copy = self.graph.copy() self._stack = LifoQueue() def run(self, source=None): """Executable pseudocode.""" if source is None: # get first random node source = self.graph.iternodes().next() while True: if self._graph_copy.outdegree(source) > 0: edge = self._graph_copy.iteroutedges(source).next() self._stack.put(edge) self._graph_copy.del_edge(edge) source = edge.target else: edge = self._stack.get() source = edge.source self.eulerian_cycle.append(edge) if self._stack.empty(): break self.eulerian_cycle.reverse() #del self._stack #del self._graph_copy def _is_eulerian(self): """Test if the graph is eulerian.""" if self.graph.is_directed(): # We assume that the graph is strongly connected. for node in self.graph.iternodes(): if self.graph.indegree(node) != self.graph.outdegree(node): return False else: # We assume that the graph is connected. for node in self.graph.iternodes(): if self.graph.degree(node) % 2 == 1: return False return True
class LiveviewStreamThread(threading.Thread): def __init__(self, url): # Direct class call `threading.Thread` instead of `super()` for python2 capability threading.Thread.__init__(self) self.lv_url = url self._lilo_head_pool = LifoQueue() self._lilo_jpeg_pool = LifoQueue() self.header = None self.frameinfo = [] def run(self): sess = urlopen(self.lv_url) while True: header = sess.read(8) ch = common_header(header) data = sess.read(128) payload = payload_header(data, payload_type=ch['payload_type']) if ch['payload_type'] == 1: data_img = sess.read(payload['jpeg_data_size']) assert len(data_img) == payload['jpeg_data_size'] self._lilo_head_pool.put(header) self._lilo_jpeg_pool.put(data_img) elif ch['payload_type'] == 2: self.frameinfo = [] for x in range(payload['frame_count']): data_img = sess.read(payload['frame_size']) self.frameinfo.append(payload_frameinfo(data_img)) sess.read(payload['padding_size']) def get_header(self): if not self.header: try: self.header = self._lilo_head_pool.get_nowait() except Exception as e: self.header = None return self.header def get_latest_view(self): # note this is a blocking call data_img = self._lilo_jpeg_pool.get() # retrive next header try: self.header = self._lilo_head_pool.get_nowait() except Exception as e: self.header = None return data_img def get_frameinfo(self): return self.frameinfo
def Plan(self, start_config, goal_config): start_time = time.time() if self.visualize and hasattr(self.planning_env, "InitializePlot"): self.planning_env.InitializePlot(goal_config) plan = [] # TODO: Here you will implement the breadth first planner # The return path should be a numpy array # of dimension k x n where k is the number of waypoints # and n is the dimension of the robots configuration space q = LifoQueue() start_id = self.planning_env.discrete_env.ConfigurationToNodeId(start_config) goal_id = self.planning_env.discrete_env.ConfigurationToNodeId(goal_config) found = False q.put(start_id) explored =[start_id] backtrack = {} backtrack[start_id] = None n= 0 while (q.qsize()>0) and not found: current = q.get() successors = self.planning_env.GetSuccessors(current) for successor in successors: if not successor in backtrack: n = n+1 q.put(successor) #explored.append(successor) backtrack[successor] = current if self.visualize: s = self.planning_env.discrete_env.NodeIdToConfiguration(successor) c = self.planning_env.discrete_env.NodeIdToConfiguration(current) self.planning_env.PlotEdge(c,s) if successor == goal_id: found = True break # Shortest Path path = [] path.append(self.planning_env.discrete_env.NodeIdToConfiguration(goal_id)) element = backtrack[goal_id] while element is not None: path.append(self.planning_env.discrete_env.NodeIdToConfiguration(element)) element = backtrack[element] plan = path[::-1] if self.visualize: for i in range(len(path) - 1): self.planning_env.PlotRedEdge(path[i],path[i+1]) print "number of nodes" print n print "time (in seconds):" print time.time()- start_time path_length = 0 for i in range(len(path) - 1): path_length = path_length + self.planning_env.ComputeDistance(self.planning_env.discrete_env.ConfigurationToNodeId(path[i]), self.planning_env.discrete_env.ConfigurationToNodeId(path[i+1])) print "path path_length" print path_length return plan
def lifo_queue_usage(): from Queue import LifoQueue lifo_queue = LifoQueue() lifo_queue.put(1) lifo_queue.put(2) print lifo_queue.get() print lifo_queue.get()
class stack(): def __init__(self): self.s = LifoQueue() def push(self, x): self.s.put(x) def pop(self): return self.s.get() def empty(self): return self.s.empty()
def dfSearch(start, actions, goalTest, depthLimit=False): """Depth-First Search""" queue = LifoQueue() queue.put(start) while True: if queue.empty(): return node node = queue.get() if goalTest(node): return node if (node.depth <= depthLimit) or (depthLimit is False): queue = node.expand(queue, actions)
def stackDFS(Graph, vroot): """ Depth First Search: stack version """ Stack=LifoQueue() Stack.put(vroot) while not Stack.empty(): iV=Stack.get() print ("Visit :", iV) Graph.setVisited(iV) for jV in Graph.VertexList: if Graph.Edges[iV,jV] and not Graph.Visited[jV]: Stack.put(jV)
class PooledEnqueue(object): def __init__(self, qdir, n=5, maxsize=1000*1000*1000, **qargs): maxsize = maxsize / n self.qdir = qdir self.write_executor = ThreadPoolExecutor(poolsize=1, queuesize=100) self.queues = [FileEnqueue(self.qdir, suffix=str(i), maxsize=maxsize, executor=self.write_executor, **qargs) for i in range(n)] self.avail = LifoQueue() for q in self.queues: self.avail.put(q) self.addedcount = 0 def get_status(self): qq = [q.get_status() for q in self.queues] r = dict( buffered=sum(s['buffered'] for s in qq), pending=sum(s['pending'] for s in qq), queues=qq) return r def _flush(self): for q in self.queues: q._flush() def close(self): for q in self.queues: q.close() self.write_executor.shutdown() def queue(self, curis): t0 = time.time() enq = self.avail.get() t = time.time() - t0 if t > 0.1: logging.warn('self.avail.get() %.4f', t) try: enq.queue(curis) self.addedcount += len(curis) finally: t0 = time.time() self.avail.put(enq) t = time.time() - t0 if t > 0.1: logging.warn('slow self.avail.put() %.4f', t)
class Ydl(object): CMD = "youtube-dl --no-playlist -o '%(title)s.%(ext)s' --audio-quality 0 --extract-audio --audio-format best \"{url}\"" def __init__(self, download_folder): self.download_folder = download_folder self.log = logging.getLogger('Ydl') self.processes = LifoQueue() self.watcher = threading.Thread(target=self._cleanup, args=(self.processes, self.log)) self.log.info('Starting download watcher process') self.watcher.start() @staticmethod def _cleanup(processes, log): while True: log.info('Download watcher tick...') url, p = processes.get() log.info("Waiting for finishing download of " + url) retcode = p.wait() if retcode != 0: log.error('Something went wrong when downloading %s, return code is %d' % (url, retcode)) log.info("Finished downloading " + url) def download(self, url): cmd = Ydl.CMD.format(url=url) args = shlex.split(cmd) self.log.info("Downloading " + url) folder = self.download_folder if not os.path.exists(folder): # noinspection PyBroadException try: os.mkdir(folder) except: self.log.error('Unable to create %s' % folder) return p = subprocess.Popen(args, cwd=folder) self.log.info("Started process of downloader for " + url) self.log.info("Pid == " + p.pid) self.processes.put((url, p)) def __del__(self): try: self.watcher.cancel() except Exception, ex: pass
def __init__(self,g,s): queue = LifoQueue() self.marked = {} self.edgeTo = {} self.s = s for v in range(1,g.vertices()+1): self.marked[v]=False self.edgeTo[v]=-1 self.marked[s] = True queue.put(s) while not queue.empty(): v = queue.get() for w in g.adj(v): if not self.marked[w]: queue.put(w) self.marked[w] = True self.edgeTo[w] = v
def copy_bucket(aws_key, aws_secret_key, src_bucket_name, dst_bucket_name): print print 'Start copy of %s to %s' % (src_bucket_name, dst_bucket_name) print max_keys = 1000 conn = S3Connection(aws_key, aws_secret_key) srcBucket = conn.get_bucket(src_bucket_name) result_marker = '' q = LifoQueue(maxsize=5000) for i in range(20): print 'Adding worker thread %s for queue processing' % i t = Worker(q, i, aws_key, aws_secret_key, src_bucket_name, dst_bucket_name) t.daemon = True t.start() i = 0 while True: print 'Fetch next %s, backlog currently at %s, have done %s' % (max_keys, q.qsize(), i) try: keys = srcBucket.get_all_keys(max_keys=max_keys, marker=result_marker) if len(keys) == 0: break for k in keys: i += 1 q.put(k.key) print 'Added %s keys to queue' % len(keys) if len(keys) < max_keys: print 'All items now in queue' break result_marker = keys[max_keys - 1].key while q.qsize() > (q.maxsize - max_keys): time.sleep(1) # sleep if our queue is getting too big for the next set of keys except BaseException: logging.exception('error during fetch, quitting') break print 'Waiting for queue to be completed' q.join() print print 'Done' print
class Player(Fighter): """A Player character, inherits from Fighter Returns: A player object Functions: update, calcNewPos Attributes: """ def __init__(self, name, imagelist, colour, screenwidth, screenheight, *groups): super(Player, self).__init__(name, imagelist, colour, screenwidth, screenheight, *groups) self.directionqueue = LifoQueue() self.directiondict = {"up": False, "down": False, "left": False, "right": False} self.hp = 10 def handlekeyevent(self, keyevent): """ Handle input and set direction or attacking based on rules :param keyevent: (dict) Keyed on 'action' (e.g. 'keydown') and 'key' (e.g. 'up', 'fire') :return: """ if keyevent["action"] == "keydown": if keyevent["key"] in self.directiondict: self.directiondict[keyevent["key"]] = True self.directionqueue.put(keyevent["key"]) self.direction = keyevent["key"] self.moving = True elif keyevent["key"] == "fire": self.attacking = True elif keyevent["action"] == "keyup": if keyevent["key"] in self.directiondict: self.directiondict[keyevent["key"]] = False elif keyevent["key"] == "fire": self.attacking = False if keyevent["key"] in self.directiondict and self.moving: if not self.directiondict[self.direction]: while not self.directionqueue.empty(): self.direction = self.directionqueue.get() if self.directiondict[self.direction]: break if self.directionqueue.empty(): self.moving = False for direction, active in self.directiondict.iteritems(): if active: self.direction = direction self.moving = True
def find_path(self): # use DFS """Finding augmenting paths in the residual network.""" parent = dict((node, None) for node in self.residual.iternodes()) # Capacity of found path to node. capacity = {self.source: float("inf")} Q = LifoQueue() Q.put(self.source) while not Q.empty(): node = Q.get() for edge in self.residual.iteroutedges(node): cap = edge.weight - self.flow[edge.source][edge.target] if cap > 0 and parent[edge.target] is None: parent[edge.target] = edge.source capacity[edge.target] = min(capacity[edge.source], cap) if edge.target != self.sink: Q.put(edge.target) else: return capacity[self.sink], parent return 0, parent
def iterative(path, path_data): stack=LifoQueue() while 1: if not (type(path_data) == dict and path_data): changes.append(self.store_one(path, path_data)) else: for node in path_data: node_path = path + '/' + node node_data = path_data[node] change = self.store_one(node_path, node_data) changes.append(change) if type(node_data) == type(dict()): stack.put([node_path, node_data]) if stack.qsize(): path,path_data=stack.get() continue; break;
class ViewManager(object): def __init__(self): self.main_frame = None self.current_view = None self.last_view_stack = LifoQueue() self.header = None self.footer = None def start(self): self._loop = urwid.MainLoop(self.main_frame, self.get_pallette(), unhandled_input=self.on_keypress) self._loop.run() def change_view(self, view): self.last_view_stack.put(self.current_view) self._update_view(view) def close_current_view(self): view = self.last_view_stack.get() self._update_view(view) def initialize_frame(self, view): self.header = urwid.AttrMap(urwid.Text("Press any key", wrap='clip'), 'header') self.footer = SearchBar() self.main_frame = urwid.Frame(view, self.header, self.footer.control, focus_part='body') def _update_view(self, view): self.current_view = view self.main_frame.contents['body'] = ( view, None ) self._loop.draw_screen() def on_keypress(self, input): self.current_view.on_keypress(input) def get_pallette(self): palette = [('header', 'black', 'dark green', 'standout'), ('footer', 'black', 'dark green', 'standout'), ('normal', 'white', 'black'), ('reveal focus', 'white', 'dark blue', 'standout'), ('filename', 'light blue', 'black'), ('diff', 'black', 'dark green', 'standout'), ('added', 'dark green', 'black'), ('deleted', 'dark red', 'black')] return palette
class MenuAction(object): def __init__(self): self.undo_commands = LifoQueue() self.commands = defaultdict(Actions) def set_command(self, item, activate, deactivate): self.commands[item] = Actions(activate, deactivate) def activate(self, item): action = self.commands[item].activate action.execute() self.undo_commands.put(action) def deactivate(self, item): action = self.commands[item].deactivate action.execute() self.undo_commands.put(action) def undo(self): if not self.undo_commands.empty(): self.undo_commands.get().undo()
def graham_scan(points): """ :param points: numpy array of 2-dimensional points :return: Convex hull as another numpy array of points """ ch = LifoQueue() leftmost = points[np.argmin(points[:, 0])] # finding the leftmost point... definitely in CH dtype = [('x', np.float64), ('y', np.float64), ('slope', np.float64)] # preparing a nicer object for sorting cpts = np.zeros(len(points) - 1, dtype=dtype) cpts[:]['x'] = points[1:, 0] cpts[:]['y'] = points[1:, 1] cpts[:]['slope'] = (cpts[:]['y'] - leftmost[1]) / (cpts[:]['x'] - leftmost[0]) # angle <-> slope from leftmost sorted_pts = np.sort(cpts, order=['slope', 'x']) # sort by angle (slope), then distance from leftmost # shows which points are colinear mask = np.zeros(len(sorted_pts), dtype=bool) # getting rid of points with same angle from leftmost mask = np.logical_not(mask) for i in range(len(sorted_pts[1:])): mask[i - 1] = not sorted_pts[i - 1]['slope'] == sorted_pts[i]['slope'] # only keep farthest away sorted_pts = sorted_pts[mask] sorted_pts[:] = sorted_pts[::-1] # sort greatest slope to lowest (move clockwise) pts = np.zeros((len(sorted_pts) + 1, 2)) # putting leftmost back into a new array object pts[1:, 0] = sorted_pts[:]['x'] pts[1:, 1] = sorted_pts[:]['y'] pts[0] = leftmost ch.put(pts[0]) # leftmost and the point with the highest slope are in the CH for sure ch.put(pts[1]) for i, pt in enumerate(pts): if i < 2: continue else: last = ch.get() second_to_last = ch.get() side = which_side(second_to_last, pts[i], last) # Less than 0 => on the left, o/w on the right while side > 0: # if last point put in on right side, it must have been wrong to be in CH last = second_to_last second_to_last = ch.get() side = which_side(second_to_last, pts[i], last) ch.put(second_to_last) ch.put(last) ch.put(pt) return np.array([ch.get() for i in range(ch.qsize())]) # Put the queue into an array
def process_transaction(self, transaction_id): stack = LifoQueue() tasks = self.storage.get_tasks(transaction_id) logger.debug(tasks) for i, task in enumerate(tasks): try: task = Task(task) task.run() self.storage.set_task_processed(transaction_id, i, True) stack.put(task) except: logger.critical(format_exc()) self.storage.set_task_processed(transaction_id, i, False) while stack.qsize(): task = stack.get() task.reverse() return { 'error': True, 'processed': i, } return { 'success': True }
def validTree(self, n, edges): if edges == [] and n == 1: return True elif edges == []: return False visited = [-1 for i in range(0, n)] father = [-1 for i in range(0, n)] adjl = {i: [] for i in range(0, n)} q = LifoQueue() for edge in edges: i = max(edge) j = min(edge) adjl[i].append(j) adjl[j].append(i) q.put(0) visited[0] == 0 count = 0 while count < n: while q.empty() == False: u = q.get() #print u for i in adjl[u]: if visited[i] == 1 and father[u] != i: return False if visited[i] == -1: visited[i] = 0 father[i] = u q.put(i) visited[u] = 1 count += 1 try: next = visited.index(-1) print next except: return True visited[next] = 0 q.put(next) return True
def inspect_cass_log(config): cass_log_file = get_cass_log_file(config) if not cass_log_file or not os.path.exists(cass_log_file): return None reader = BackwardsFileReader(cass_log_file) lifo = LifoQueue() last_line = reader.readline() lifo.put(last_line) while not re.match('^ERROR', last_line): last_line = reader.readline() if re.match('^\t', last_line): lifo.put(last_line) if re.match('^ERROR', last_line): lifo.put(last_line) if not last_line: break ret_str = "" while not lifo.empty(): ret_str += lifo.get() return ret_str
class Gazzle(object): def __init__(self, *args, **kwargs): self.sockets = [] mongo_client = MongoClient('localhost', 27017) self.mongo = mongo_client['gazzle'] # self.mongo.drop_collection('pages') self.pages = self.mongo['pages'] self._init_whoosh() self.pageset = {} self.crawl_thread_count = kwargs.get('crawl_threads', 3) self.pending_crawls = 0 self.pending_lock = threading.RLock() self.frontier = Queue() self.crawlCount = 0 self.crawling = False self.crawl_cond = threading.Condition() self.crawl_lock = threading.RLock() self._init_crawl() self.index_set = set() self.index_q = LifoQueue() self.index_altq = LifoQueue() self.index_alt_switchoff = False self.indexing = False self.index_cond = threading.Condition() self.index_lock = threading.RLock() self._init_index() self._index_size() self.crosssite_crawl = False self.pagerank_cond = threading.Condition() self._start_thread(target = self._crawl, count = self.crawl_thread_count) self._start_thread(target = self._index, count = 1) # index writer doesn't support multithreading self._start_thread(target = self._pagerank, count = 1) self._start_thread(target = self._assert_thread, count=1) def _init_crawl(self): self.pageset = {} self.frontier = Queue() for page in self.pages.find(): self.pageset[page['url']] = page['page_id'] for page in self.pages.find({'crawled': False}): self.frontier.put(page['page_id']) self.crawlCount = self.pages.find({'crawled': True}).count() print('Added %d pages to page set' % len(self.pageset)) print('Added %d pages to frontier' % self.frontier.qsize()) print('Crawl count set to %d' % self.crawlCount) def _init_index(self): self.index_set = set() self.index_q = LifoQueue() for page in self.pages.find({'indexed': True}): self.index_set.add(page['page_id']) for page in self.pages.find({'crawled':True, 'indexed': False}): self.index_q.put(page['page_id']) print('Added %d pages to index set' % len(self.index_set)) print('Added %d pages to index queue' % self.index_q.qsize()) def _init_whoosh(self, clear = False): schema = Schema(page_id=STORED, title=TEXT(stored=True), content=TEXT, url=ID(stored=True)) if not os.path.exists("index"): os.mkdir("index") clear = True if clear: self.index = create_in('index', schema) else: self.index = open_dir("index") def _assert_thread(self): while True: a = self.pages.find_one({'crawled': True, 'title': {'$exists': False}}) assert a == None, 'Found inconsistent page in db ID: %d URL: %s' % (a['page_id'], a['url']) time.sleep(1) def _pagerank(self): while True: with self.pagerank_cond: self.pagerank_cond.wait() pages = self.pages.find({'crawled': True, 'indexed': True}, { '_id':False, 'content': False, 'links.url': False }) RANK_SCALE = 1 ALPHA = 0.25 page_count = pages.count() id_to_ind = {} ind_to_id = [] for page in pages: ind = len(id_to_ind) ind_to_id.append(page['page_id']) id_to_ind[page['page_id']] = ind pages.rewind() pmat = [] for page in pages: row = [0.0] * page_count link_count = 0 for link in page['links']: if link['page_id'] in id_to_ind: ind = id_to_ind[link['page_id']] row[ind] += RANK_SCALE link_count += 1 alph = ALPHA * RANK_SCALE / page_count for ind in range(page_count): if link_count == 0: row[ind] += 1 / page_count else: row[ind] *= (1 - alph) / link_count row[ind] += alph / page_count pmat.append(row) page_rank = [0] * page_count page_rank[0] = 1 for d in range(30): page_rank = dot(page_rank, pmat) result = [{"page_id": ind_to_id[x], "rank": self._format_rank(page_rank[x])} for x in range(page_count)] self._send_to_all({ 'action': 'page rank', 'pages': result }) for ind in range(page_count): self.pages.update({"page_id": ind_to_id[ind]}, {"$set": {"rank": page_rank[ind]}}, upsert=False) def _index(self): _ = { 'lock': threading.RLock(), 'writer': None, 'need_commit': [], } def flush(_): while True: if len(_['need_commit']) != 0 and _['writer'] != None: _['lock'].acquire() _['writer'].commit() _['writer'] = None need_tmp = _['need_commit'] _['need_commit'] = [] _['lock'].release() self._send_to_all({ 'action': 'index commit', 'pages': map(lambda x: {'page_id': x}, need_tmp) }) self.pages.update({'page_id' : {'$in': need_tmp}}, {'$set': {'indexed': True}}, multi = True, upsert = False) with self.pagerank_cond: self.pagerank_cond.notify() self._send_to_all({ 'action': 'index size', 'value': self.index_size }) time.sleep(5) self._start_thread(target = flush, kwargs={'_':_}) while True: with self.index_cond: with self.pending_lock: pending = self.pending_crawls != 0 while not self.indexing or pending: self.index_cond.wait() with self.pending_lock: pending = self.pending_crawls != 0 try: item_index = self.index_altq.get(False) if self.index_alt_switchoff: self.indexing = False except: item_index = self.index_q.get(True) if item_index in self.index_set: continue item = self.pages.find_one({'page_id': item_index}) _['lock'].acquire() if _['writer'] == None: _['writer'] = self.index.writer() assert item.get('title') != None , 'Uncrawled page in index queue, ID: %d, URL: %s' %(item['page_id'], item['url']) _['writer'].add_document(page_id=item_index, title=item['title'], content=item['content'], url=item['url']) _['need_commit'].append(item_index) _['lock'].release() self.index_set.add(item_index) self._send_to_all({ 'action': 'index page', 'page': {'page_id': item_index} }) def _crawl(self): with self.pending_lock: self.pending_crawls += 1 while True: with self.pending_lock: self.pending_crawls -= 1 with self.crawl_cond: while not self.crawling: if self.indexing: with self.index_cond: self.index_cond.notify() self.crawl_cond.wait() with self.pending_lock: self.pending_crawls += 1 item_index = self.frontier.get(True) item = self.pages.find_one({'page_id': item_index}) page = urllib2.urlopen(item['url']) soup = BeautifulSoup(page.read()) title = soup.title.text #.replace(' - Wikipedia, the free encyclopedia', '') if len(title) > 12: title = title[:12] + '...' body = soup.body.text links = map(lambda link: self.extract_anchor_link(link, item['url']), soup.find_all("a")) links = filter(lambda link: link != '' and link != None, links) with self.crawl_lock: # links = filter(lambda link: link not in self.pageset, links) print("%s Crawling %s found %d links" % (threading.current_thread().name, item['url'], len(links))) result_links = [] for link in links: if link not in self.pageset: page_id = len(self.pageset) self.pages.insert({ 'page_id': page_id, 'url': link, 'crawled': False, 'indexed': False }) self.pageset[link] = page_id self.frontier.put(page_id) else: page_id = self.pageset[link] result_links.append({'url': link, 'page_id': page_id}) self.crawlCount += 1 self.index_q.put(item_index) self.pages.update({'page_id': item_index}, { '$push': {'links': {'$each': result_links}}, '$set': {'title': unicode(title), 'content': unicode(body), 'crawled': True} }) self._send_to_all(json.dumps([ { 'action': 'crawl page', 'page': {'page_id': item_index, 'url': item['url'], 'link_count': len(links), 'title': title} }, { 'action': 'frontier size', 'value': self.frontier.qsize() }, { 'action': 'crawl size', 'value': self.crawlCount }, ])) def extract_anchor_link(self, link, url): href = link.get('href', '') m = re.match('([^?]+)[?].*', unicode(href)) if m != None: href = m.group(1) if href == '': return '' if 'https://' in href: href = href.replace('https://', 'http://') if re.match('#.*', href) != None: return '' elif re.match('//.*', href): return 'http:' + href elif re.match('/.*', href): m = re.match('(http://[0-9a-zA-Z.]+)/*', url) # print("link %s %s going to %s" % (href, "", "")) return m.group(1) + href elif self.crosssite_crawl: return href return '' def search(self, socket, query, rank_part=0): def sort_results(results): scores = {} max_score = 0 max_rank = 0 for res in results: scores[res.fields()['page_id']] = res.score if res.score > max_score: max_score = res.score page_ids = map(lambda x: x.fields()['page_id'], results) pages = self.pages.find({"page_id": {"$in": page_ids}}, {"title": True, "page_id":True, "rank":True, "url": True}) pages = map(lambda x: dict(x), pages) for page in pages: if 'rank' not in page: page['rank'] = 0 if page['rank'] > max_rank: max_rank = page['rank'] for page in pages: del page['_id'] rank = 1 - page['rank'] / float(max_rank) score = scores[page['page_id']] / float(max_score) final_score = rank * (rank_part / 100.0) + score * (1 - rank_part / 100.0) page['score'] = final_score pages.sort(key = lambda x: x['score']) return pages with self.index.searcher() as searcher: parser = QueryParser("content", self.index.schema) parsed_query = parser.parse(query) results = searcher.search(parsed_query) if len(results) > 0: print("found some") print(len(results)) results = sort_results(results) else: results = [] # results = map(lambda x: dict(x), results) print(results) socket.write_message(json.dumps({ 'action': 'search results', 'results' : results })) def clear_index(self): self._init_whoosh(clear = True) self.pages.update({'indexed': True}, {'$set': {'indexed': False}}, multi = True, upsert = False) self._init_index() self._send_to_all({ 'action': 'index clear' }) def clear_frontier(self): self.pages.remove({'crawled': False}) self._init_crawl() self._send_to_all({ 'action': 'init', 'frontier_size': 0 }) def clear_all(self): self.mongo.drop_collection('pages') self._init_whoosh(clear = True) self._init_index() self._init_crawl() self.indexing = False self.crawling = False self.index_size = 0 self.crosssite_crawl = False self._send_to_all(json.dumps({ 'action': 'init', 'pages': [], 'frontier_size': 0, 'crawl_size': 0, 'index_size': 0, 'crawling': False, 'indexing': False, 'crosssite_crawl': False })) def _format_rank(self, rank): if rank == None: return None return "%.2f" % (math.log(rank + 1) * 100) def _send_to_all(self, message): if type(message) != str: message = json.dumps(message) for socket in self.sockets: socket.write_message(message) def _start_thread(self, target, count=1, args=(), kwargs={}): for x in range(count): thread = threading.Thread(target=target, args=args, kwargs=kwargs) thread.setDaemon(True) thread.start() def _index_size(self): self.index_size = sum(os.path.getsize('index/'+f) for f in os.listdir('index') if os.path.isfile('index/'+f)) print("Index Size: %d" % self.index_size) return self.index_size def add_socket(self, socket): self.sockets.append(socket) pages = self.pages.find({'crawled': True}, {'_id': False, 'page_id':True, 'url': True, 'title': True, 'indexed': True, 'rank': True}) pages = map(lambda x: {'page_id': x['page_id'], 'title': x['title'], 'url': x['url'], 'indexed': x['indexed'], 'rank': self._format_rank(x.get('rank'))}, pages) socket.write_message(json.dumps({ 'action': 'init', 'pages': pages, 'frontier_size': self.frontier.qsize(), 'crawl_size': self.crawlCount, 'index_size': self.index_size, 'crawling': self.crawling, 'indexing': self.indexing, 'crosssite_crawl': self.crosssite_crawl })) def remove_socket(self, socket): self.sockets.remove(socket) def start_crawl(self, url=''): if url == '': url = 'http://en.wikipedia.org/wiki/Information_retrieval' with self.crawl_lock: page_id = len(self.pageset) self.pages.insert({ 'page_id': page_id, 'url': url, 'crawled': False, 'indexed': False }) self.frontier.put(len(self.pageset)) self.pageset[url] = page_id self.toggle_crawl(state = True) def toggle_crawl(self, state=None): with self.crawl_cond: if state == None: self.crawling = not self.crawling else: self.crawling = state self.crawl_cond.notifyAll() self._send_to_all({ 'action': 'init', 'crawling': self.crawling }) def toggle_index(self, state=None): with self.index_cond: if state == None: self.indexing = not self.indexing else: self.indexing = state self.index_cond.notifyAll() self._send_to_all({ 'action': 'init', 'indexing': self.indexing }) def index_page(self, page): self.index_altq.put(page) with self.index_cond: self.index_alt_switchoff = not self.indexing self.indexing = True self.index_cond.notifyAll() def toggle_crosssite_crawl(self, state=None): if state == None: self.crosssite_crawl = not self.crosssite_crawl else: self.crosssite_crawl = state self._send_to_all({ 'action': 'init', 'crosssite_crawl': self.crosssite_crawl })
def stage_one(skel_img, dt, anisotropy): """stage one, finds all nodes and edges, except for loops""" # initializing volume = deepcopy(skel_img) is_queued_map = np.zeros(volume.shape, dtype=int) is_node_map = np.zeros(volume.shape, dtype=int) is_term_map = np.zeros(volume.shape, dtype=int) is_branch_map = np.zeros(volume.shape, dtype=int) is_standart_map = np.zeros(volume.shape, dtype=int) nodes = {} edges = [] last_node = 1 current_node = 1 queue = LifoQueue() point = init(volume) loop_list = [] branch_point_list = [] node_list = [] length = 0 if (point == np.array([-1, -1, -1])).all(): return is_node_map, is_term_map, is_branch_map, nodes, edges, loop_list is_queued_map[point[0], point[1], point[2]] = 1 not_queued, is_node_list, are_near = check_box(volume, point, is_queued_map, is_node_map) nodes[current_node] = point while len(not_queued) == 0: volume[point[0], point[1], point[2]] = 0 is_queued_map[point[0], point[1], point[2]] = 0 nodes = {} point = init(volume) if (point == np.array([-1, -1, -1])).all(): return is_node_map, is_term_map, is_branch_map, nodes, edges, loop_list is_queued_map[point[0], point[1], point[2]] = 1 not_queued, is_node_list, are_near = check_box(volume, point, is_queued_map, is_node_map) nodes[current_node] = point for i in not_queued: queue.put( np.array([ i, current_node, length, [[point[0], point[1], point[2]]], [dt[point[0], point[1], point[2]]] ])) is_queued_map[i[0], i[1], i[2]] = 1 if len(not_queued) == 1: is_term_map[point[0], point[1], point[2]] = last_node is_node_map[point[0], point[1], point[2]] = last_node else: is_branch_map[point[0], point[1], point[2]] = last_node is_node_map[point[0], point[1], point[2]] = last_node while queue.qsize(): # pull item from queue point, current_node, length, edge_list, dt_list = queue.get() not_queued, is_node_list, are_near = check_box(volume, point, is_queued_map, is_node_map) # if current_node==531: # print "hi" # print "hi" # standart point if len(not_queued) == 1: dt_list.extend([dt[point[0], point[1], point[2]]]) edge_list.extend([[point[0], point[1], point[2]]]) length = length + norm3d(point, not_queued[0]) queue.put( np.array( [not_queued[0], current_node, length, edge_list, dt_list])) is_queued_map[not_queued[0][0], not_queued[0][1], not_queued[0][2]] = 1 branch_point_list.extend([[point[0], point[1], point[2]]]) is_standart_map[point[0], point[1], point[2]] = 1 elif len(not_queued) == 0 and (len(are_near) > 1 or len(is_node_list) > 0): loop_list.extend([current_node]) # terminating point elif len(not_queued) == 0 and len(are_near) == 1 and len( is_node_list) == 0: last_node = last_node + 1 nodes[last_node] = point dt_list.extend([dt[point[0], point[1], point[2]]]) edge_list.extend([[point[0], point[1], point[2]]]) node_list.extend([[point[0], point[1], point[2]]]) edges.extend([[ np.array([current_node, last_node]), length, edge_list, dt_list ]]) is_term_map[point[0], point[1], point[2]] = last_node is_node_map[point[0], point[1], point[2]] = last_node # branch point elif len(not_queued) > 1: dt_list.extend([dt[point[0], point[1], point[2]]]) edge_list.extend([[point[0], point[1], point[2]]]) last_node = last_node + 1 nodes[last_node] = point # build edge edges.extend([[ np.array([current_node, last_node]), length, edge_list, dt_list ]]) node_list.extend([[point[0], point[1], point[2]]]) # putting node branches in the queue for x in not_queued: length = norm3d(point, x) queue.put( np.array([ x, last_node, length, [[point[0], point[1], point[2]]], [dt[point[0], point[1], point[2]]] ])) is_queued_map[x[0], x[1], x[2]] = 1 is_branch_map[point[0], point[1], point[2]] = last_node is_node_map[point[0], point[1], point[2]] = last_node return is_node_map, is_term_map, is_branch_map, nodes, edges, loop_list
def test_proxy(proxy_dict): url = 'http://rp.ozdata.info/proxy.html' headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/45.0.2454.101 Chrome/45.0.2454.101 Safari/537.36' } r = requests.get(url, proxies=proxy_dict, headers=headers) if r.status_code == 200 and r.text.rstrip() == 'Success': return True else: return False for i in range(num_of_pages): q.put('http://www.proxy4free.com/list/webproxy' + str(i + 1) + '.html') def downloader(queue, proxies): while True: url = queue.get() r = requests.get(url) if r.status_code == 200: parsed_html = BeautifulSoup(r.content) table = parsed_html.body.find( 'table', attrs={'class': 'table table-striped proxy-list'}) table_body = table.find('tbody') rows = table_body.find_all('tr') print url + ' was fetched\n' for row in rows:
from queue import Queue, LifoQueue, PriorityQueue # py3 的写法 # print(help(Queue)) # Queue 类实现了一个基本的先进先出(FIFO)容器,使用put()将元素添加到序列尾端,get()从队列尾部移除元素。 q = Queue() for i in range(3): q.put(i) while not q.empty(): print(q.get()) # 打印 0~2 print("---") # 与标准FIFO实现Queue不同的是,LifoQueue使用后进先出序(会关联一个栈数据结构)。 q1 = LifoQueue() for i in range(3): q1.put(i) while not q1.empty(): print(q1.get()) # 打印 2~0 print("---") # 除了按元素入列顺序外,有时需要根据队列中元素的特性来决定元素的处理顺序。 # 例如,老板的打印任务可能比研发的打印任务优先级更高。PriorityQueue依据队列中内容的排序顺序(sort order)来决定那个元素将被检索。 class Job(object): def __init__(self, priority, description): self.priority = priority self.description = description print('New job:', description) return def __lt__(self, other): return self.priority < other.priority
# Python program to # demostrate stack implementation # using queue module from Queue import LifoQueue # Initializing a stack stack = LifoQueue(max_size=3) # qsize() show the number of elements # in the stack print(stack.qsize()) # put() function to push # element in the stack stack.put('a') stack.put('b') stack.put('c') print("Full: ", stack.full()) print("Size: ", stack.qsize()) # get() function to pop # element from stack in # LIFO order print('\nElement poped from the stack') print(stack.get()) print(stack.get()) print(stack.get()) print("\nEmpty: ", stack.empty())
class Transceiver(object): def __init__(self, config={}, message_handler=None): """Set up a receiver which connects to the messaging hub. :param config: This is a dict in the form:: config = dict( incoming='tcp://localhost:15566', # default outgoing='tcp://localhost:15567', idle_timeout=1000, # milliseconds: ) """ self.log = logging.getLogger("evasion.messenger.endpoint.Transceiver") self.endpoint_uuid = str(uuid.uuid4()) self.exit_time = threading.Event() self.wait_for_exit = threading.Event() self.incoming = None # configured in main(). self.incoming_uri = config.get("incoming", 'tcp://localhost:15566') self.log.info("Recieving on <%s>" % self.incoming_uri) self.outgoing_uri = config.get("outgoing", 'tcp://localhost:15567') self.log.info("Sending on <%s>" % self.outgoing_uri) self.idle_timeout = int(config.get("idle_timeout", 2000)) self.log.info("Idle Timeout (ms): %d" % self.idle_timeout) self.message_handler = message_handler self.sync_message = frames.sync_message( "endpoint-%s" % self.endpoint_uuid ) # Queue up messages to be sent in the main message loop self._out_queue = LifoQueue() def main(self): """Running the main loop sending and receiving. This will keep running until stop() is called. This sets the exit flag causing clean up and shutdown. """ self.exitTime = False context = zmq.Context() incoming = context.socket(zmq.SUB) incoming.setsockopt(zmq.SUBSCRIBE, '') incoming.connect(self.incoming_uri) outgoing = context.socket(zmq.PUSH); outgoing.connect(self.outgoing_uri); def _shutdown(): try: incoming.close() except ZMQError: self.log.exception("main: error calling incoming.close()") try: outgoing.close() except ZMQError: self.log.exception("main: error calling outgoing.close()") try: context.term() except ZMQError: self.log.exception("main: error calling context.term()") try: poller = zmq.Poller() poller.register(incoming, zmq.POLLIN) while not self.exit_time.is_set(): try: events = poller.poll(self.idle_timeout) except ZMQError as e: # 4 = 'Interrupted system call' if e.errno == 4: self.log.info("main: exit time: %s" % e) break else: self.log.info("main: <%s>" % e) break except Exception: self.log.exception("main: fatal error while polling ") break else: if (events > 0): msg = incoming.recv_multipart() self.message_in(tuple(msg)) # Now recover and queued outgoing messages: if not self._out_queue.empty(): message = self._out_queue.get_nowait() if message: try: # send sync hub followed by message. The sync # will kick the hub into life if its just # started: outgoing.send_multipart(self.sync_message) outgoing.send_multipart(message) except ZMQError as e: # 4 = 'Interrupted system call' if e.errno == 4: self.log.info(( "main: sigint or other signal interrupt" ", exit time <%s>" ) % e) break else: self.log.info("main: <%s>" % e) break except Exception: self.log.exception("main: fatal error sending ") break finally: self._out_queue.task_done() finally: self.wait_for_exit.set() _shutdown() def start(self): """Set up zmq communication and start receiving messages from the hub. """ # coverage can't seem to get to this: def _main(notused): # pragma: no cover self.exit_time.clear() self.wait_for_exit.clear() self.main() thread.start_new(_main, (0,)) def stop(self, wait=2): """Stop receiving messages from the hub and clean up. :param wait: The time in seconds to wait before giving up on a clean shutdown. """ self.log.info("stop: shutting down messaging.") self.exit_time.set() self.wait_for_exit.wait(wait) self.log.info("stop: done.") def message_out(self, message): """This sends a message to the messagehub for dispatch to all connected endpoints. :param message: A tuple or list representing a multipart ZMQ message. If the message is not a tuple or list then MessageOutError will be raised. Note: The message is actually queued here so that the main loop will send it when its ready. :returns: None. """ if isinstance(message, list) or isinstance(message, tuple): self._out_queue.put(message) else: m = "The message must be a list or tuple instead of <%s>" % type( message ) raise MessageOutError(m) def message_in(self, message): """Called on receipt of an evasion frame to determine what to do. The message_handler set in the constructer will be called if one was set. If none was set then the message will be logged at the DEBUG level. :param message: A tuple or list representing a multipart ZMQ message. :returns: None. """ if self.message_handler: try: #self.log.debug("message_in: message <%s>" % str(message)) self.message_handler(message) except: self.log.exception("message_in: Error handling message - ") else: self.log.debug("message_in: message <%s>" % str(message))
def put(self, val): if self.sandbox.debug: print 'put last_index='+str(self.sandbox.last_index) return LQ.put(self, val)
class CoverDelegate(QStyledItemDelegate): MARGIN = 4 TOP, LEFT, RIGHT, BOTTOM = object(), object(), object(), object() @pyqtProperty(float) def animated_size(self): return self._animated_size @animated_size.setter def animated_size(self, val): self._animated_size = val def __init__(self, parent): super(CoverDelegate, self).__init__(parent) self._animated_size = 1.0 self.animation = QPropertyAnimation(self, b'animated_size', self) self.animation.setEasingCurve(QEasingCurve.OutInCirc) self.animation.setDuration(500) self.set_dimensions() self.cover_cache = CoverCache() self.render_queue = LifoQueue() self.animating = None self.highlight_color = QColor(Qt.white) self.rating_font = QFont(rating_font()) def set_dimensions(self): width = self.original_width = gprefs['cover_grid_width'] height = self.original_height = gprefs['cover_grid_height'] self.original_show_title = show_title = gprefs['cover_grid_show_title'] self.original_show_emblems = gprefs['show_emblems'] self.orginal_emblem_size = gprefs['emblem_size'] self.orginal_emblem_position = gprefs['emblem_position'] self.emblem_size = gprefs[ 'emblem_size'] if self.original_show_emblems else 0 try: self.gutter_position = getattr( self, self.orginal_emblem_position.upper()) except Exception: self.gutter_position = self.TOP if height < 0.1: height = auto_height(self.parent()) else: height *= self.parent().logicalDpiY() * CM_TO_INCH if width < 0.1: width = 0.75 * height else: width *= self.parent().logicalDpiX() * CM_TO_INCH self.cover_size = QSize(width, height) self.title_height = 0 if show_title: f = self.parent().font() sz = f.pixelSize() if sz < 5: sz = f.pointSize() * self.parent().logicalDpiY() / 72.0 self.title_height = max(25, sz + 10) self.item_size = self.cover_size + QSize( 2 * self.MARGIN, (2 * self.MARGIN) + self.title_height) if self.emblem_size > 0: extra = self.emblem_size + self.MARGIN self.item_size += QSize(extra, 0) if self.gutter_position in ( self.LEFT, self.RIGHT) else QSize(0, extra) self.calculate_spacing() self.animation.setStartValue(1.0) self.animation.setKeyValueAt(0.5, 0.5) self.animation.setEndValue(1.0) def calculate_spacing(self): spc = self.original_spacing = gprefs['cover_grid_spacing'] if spc < 0.01: self.spacing = max(10, min(50, int(0.1 * self.original_width))) else: self.spacing = self.parent().logicalDpiX() * CM_TO_INCH * spc def sizeHint(self, option, index): return self.item_size def render_field(self, db, book_id): is_stars = False try: field = db.pref('field_under_covers_in_grid', 'title') if field == 'size': ans = human_readable( db.field_for(field, book_id, default_value=0)) else: mi = db.get_proxy_metadata(book_id) display_name, ans, val, fm = mi.format_field_extended(field) if fm and fm['datatype'] == 'rating': ans = rating_to_stars( val, fm['display'].get('allow_half_stars', False)) is_stars = True return ('' if ans is None else unicode(ans)), is_stars except Exception: if DEBUG: import traceback traceback.print_exc() return '', is_stars def render_emblem(self, book_id, rule, rule_index, cache, mi, db, formatter, template_cache): ans = cache[book_id].get(rule, False) if ans is not False: return ans, mi ans = None if mi is None: mi = db.get_proxy_metadata(book_id) ans = formatter.safe_format(rule, mi, '', mi, column_name='cover_grid%d' % rule_index, template_cache=template_cache) or None cache[book_id][rule] = ans return ans, mi def cached_emblem(self, cache, name, raw_icon=None): ans = cache.get(name, False) if ans is not False: return ans sz = self.emblem_size ans = None if raw_icon is not None: ans = raw_icon.pixmap(sz, sz) elif name == ':ondevice': ans = QIcon(I('ok.png')).pixmap(sz, sz) elif name: pmap = QIcon(os.path.join(config_dir, 'cc_icons', name)).pixmap(sz, sz) if not pmap.isNull(): ans = pmap cache[name] = ans return ans def paint(self, painter, option, index): QStyledItemDelegate.paint( self, painter, option, empty_index) # draw the hover and selection highlights m = index.model() db = m.db try: book_id = db.id(index.row()) except (ValueError, IndexError, KeyError): return if book_id in m.ids_to_highlight_set: painter.save() try: painter.setPen(self.highlight_color) painter.setRenderHint(QPainter.Antialiasing, True) painter.drawRoundedRect(option.rect, 10, 10, Qt.RelativeSize) finally: painter.restore() marked = db.data.get_marked(book_id) db = db.new_api cdata = self.cover_cache[book_id] device_connected = self.parent().gui.device_connected is not None on_device = device_connected and db.field_for('ondevice', book_id) emblem_rules = db.pref('cover_grid_icon_rules', default=()) emblems = [] if self.emblem_size > 0: mi = None for i, (kind, column, rule) in enumerate(emblem_rules): icon_name, mi = self.render_emblem(book_id, rule, i, m.cover_grid_emblem_cache, mi, db, m.formatter, m.cover_grid_template_cache) if icon_name is not None: pixmap = self.cached_emblem(m.cover_grid_bitmap_cache, icon_name) if pixmap is not None: emblems.append(pixmap) if marked: emblems.insert( 0, self.cached_emblem(m.cover_grid_bitmap_cache, ':marked', m.marked_icon)) if on_device: emblems.insert( 0, self.cached_emblem(m.cover_grid_bitmap_cache, ':ondevice')) painter.save() right_adjust = 0 try: rect = option.rect rect.adjust(self.MARGIN, self.MARGIN, -self.MARGIN, -self.MARGIN) if self.emblem_size > 0: self.paint_emblems(painter, rect, emblems) orect = QRect(rect) if cdata is None or cdata is False: title = db.field_for('title', book_id, default_value='') authors = ' & '.join( db.field_for('authors', book_id, default_value=())) painter.setRenderHint(QPainter.TextAntialiasing, True) painter.drawText(rect, Qt.AlignCenter | Qt.TextWordWrap, '%s\n\n%s' % (title, authors)) if cdata is False: self.render_queue.put(book_id) else: if self.title_height != 0: trect = QRect(rect) rect.setBottom(rect.bottom() - self.title_height) if self.animating is not None and self.animating.row( ) == index.row(): cdata = cdata.scaled(cdata.size() * self._animated_size) dpr = cdata.devicePixelRatio() cw, ch = int(cdata.width() / dpr), int(cdata.height() / dpr) dx = max(0, int((rect.width() - cw) / 2.0)) dy = max(0, rect.height() - ch) right_adjust = dx rect.adjust(dx, dy, -dx, 0) painter.drawPixmap(rect, cdata) if self.title_height != 0: rect = trect rect.setTop(rect.bottom() - self.title_height + 5) painter.setRenderHint(QPainter.TextAntialiasing, True) title, is_stars = self.render_field(db, book_id) if is_stars: painter.setFont(self.rating_font) metrics = painter.fontMetrics() painter.setPen(self.highlight_color) painter.drawText( rect, Qt.AlignCenter | Qt.TextSingleLine, metrics.elidedText(title, Qt.ElideRight, rect.width())) if self.emblem_size > 0: return # We dont draw embossed emblems as the ondevice/marked emblems are drawn in the gutter if marked: try: p = self.marked_emblem except AttributeError: p = self.marked_emblem = m.marked_icon.pixmap(48, 48) self.paint_embossed_emblem(p, painter, orect, right_adjust) if on_device: try: p = self.on_device_emblem except AttributeError: p = self.on_device_emblem = QIcon(I('ok.png')).pixmap( 48, 48) self.paint_embossed_emblem(p, painter, orect, right_adjust, left=False) finally: painter.restore() def paint_emblems(self, painter, rect, emblems): gutter = self.emblem_size + self.MARGIN grect = QRect(rect) gpos = self.gutter_position if gpos is self.TOP: grect.setBottom(grect.top() + gutter) rect.setTop(rect.top() + gutter) elif gpos is self.BOTTOM: grect.setTop(grect.bottom() - gutter + self.MARGIN) rect.setBottom(rect.bottom() - gutter) elif gpos is self.LEFT: grect.setRight(grect.left() + gutter) rect.setLeft(rect.left() + gutter) else: grect.setLeft(grect.right() - gutter + self.MARGIN) rect.setRight(rect.right() - gutter) horizontal = gpos in (self.TOP, self.BOTTOM) painter.save() painter.setClipRect(grect) try: for i, emblem in enumerate(emblems): delta = 0 if i == 0 else self.emblem_size + self.MARGIN grect.moveLeft(grect.left() + delta) if horizontal else grect.moveTop( grect.top() + delta) rect = QRect(grect) rect.setWidth(int(emblem.width() / emblem.devicePixelRatio())), rect.setHeight( int(emblem.height() / emblem.devicePixelRatio())) painter.drawPixmap(rect, emblem) finally: painter.restore() def paint_embossed_emblem(self, pixmap, painter, orect, right_adjust, left=True): drect = QRect(orect) pw = int(pixmap.width() / pixmap.devicePixelRatio()) ph = int(pixmap.height() / pixmap.devicePixelRatio()) if left: drect.setLeft(drect.left() + right_adjust) drect.setRight(drect.left() + pw) else: drect.setRight(drect.right() - right_adjust) drect.setLeft(drect.right() - pw + 1) drect.setBottom(drect.bottom() - self.title_height) drect.setTop(drect.bottom() - ph) painter.drawPixmap(drect, pixmap) @pyqtSlot(QHelpEvent, QAbstractItemView, QStyleOptionViewItem, QModelIndex, result=bool) def helpEvent(self, event, view, option, index): if event is not None and view is not None and event.type( ) == QEvent.ToolTip: try: db = index.model().db except AttributeError: return False try: book_id = db.id(index.row()) except (ValueError, IndexError, KeyError): return False db = db.new_api device_connected = self.parent().gui.device_connected on_device = device_connected is not None and db.field_for( 'ondevice', book_id) p = prepare_string_for_xml title = db.field_for('title', book_id) authors = db.field_for('authors', book_id) if title and authors: title = '<b>%s</b>' % ('<br>'.join(wrap(p(title), 120))) authors = '<br>'.join(wrap(p(' & '.join(authors)), 120)) tt = '%s<br><br>%s' % (title, authors) series = db.field_for('series', book_id) if series: use_roman_numbers = config[ 'use_roman_numerals_for_series_number'] val = _( 'Book %(sidx)s of <span class="series_name">%(series)s</span>' ) % dict(sidx=fmt_sidx(db.field_for( 'series_index', book_id), use_roman=use_roman_numbers), series=p(series)) tt += '<br><br>' + val if on_device: val = _('This book is on the device in %s') % on_device tt += '<br><br>' + val QToolTip.showText(event.globalPos(), tt, view) return True return False
class CoverDelegate(QStyledItemDelegate): MARGIN = 4 @pyqtProperty(float) def animated_size(self): return self._animated_size @animated_size.setter def animated_size(self, val): self._animated_size = val def __init__(self, parent): super(CoverDelegate, self).__init__(parent) self._animated_size = 1.0 self.animation = QPropertyAnimation(self, 'animated_size', self) self.animation.setEasingCurve(QEasingCurve.OutInCirc) self.animation.setDuration(500) self.set_dimensions() self.cover_cache = CoverCache(limit=gprefs['cover_grid_cache_size']) self.render_queue = LifoQueue() self.animating = None self.highlight_color = QColor(Qt.white) def set_dimensions(self): width = self.original_width = gprefs['cover_grid_width'] height = self.original_height = gprefs['cover_grid_height'] self.original_show_title = show_title = gprefs['cover_grid_show_title'] if height < 0.1: height = auto_height(self.parent()) else: height *= self.parent().logicalDpiY() * CM_TO_INCH if width < 0.1: width = 0.75 * height else: width *= self.parent().logicalDpiX() * CM_TO_INCH self.cover_size = QSize(width, height) self.title_height = 0 if show_title: f = self.parent().font() sz = f.pixelSize() if sz < 5: sz = f.pointSize() * self.parent().logicalDpiY() / 72.0 self.title_height = max(25, sz + 10) self.item_size = self.cover_size + QSize( 2 * self.MARGIN, (2 * self.MARGIN) + self.title_height) self.calculate_spacing() self.animation.setStartValue(1.0) self.animation.setKeyValueAt(0.5, 0.5) self.animation.setEndValue(1.0) def calculate_spacing(self): spc = self.original_spacing = gprefs['cover_grid_spacing'] if spc < 0.01: self.spacing = max(10, min(50, int(0.1 * self.original_width))) else: self.spacing = self.parent().logicalDpiX() * CM_TO_INCH * spc def sizeHint(self, option, index): return self.item_size def paint(self, painter, option, index): QStyledItemDelegate.paint( self, painter, option, QModelIndex()) # draw the hover and selection highlights m = index.model() db = m.db try: book_id = db.id(index.row()) except (ValueError, IndexError, KeyError): return if book_id in m.ids_to_highlight_set: painter.save() try: painter.setPen(self.highlight_color) painter.setRenderHint(QPainter.Antialiasing, True) painter.drawRoundedRect(option.rect, 10, 10, Qt.RelativeSize) finally: painter.restore() marked = db.data.get_marked(book_id) db = db.new_api cdata = self.cover_cache[book_id] device_connected = self.parent().gui.device_connected is not None on_device = device_connected and db.field_for('ondevice', book_id) painter.save() right_adjust = 0 try: rect = option.rect rect.adjust(self.MARGIN, self.MARGIN, -self.MARGIN, -self.MARGIN) orect = QRect(rect) if cdata is None or cdata is False: title = db.field_for('title', book_id, default_value='') authors = ' & '.join( db.field_for('authors', book_id, default_value=())) painter.setRenderHint(QPainter.TextAntialiasing, True) painter.drawText(rect, Qt.AlignCenter | Qt.TextWordWrap, '%s\n\n%s' % (title, authors)) if cdata is False: self.render_queue.put(book_id) else: if self.title_height != 0: trect = QRect(rect) rect.setBottom(rect.bottom() - self.title_height) if self.animating is not None and self.animating.row( ) == index.row(): cdata = cdata.scaled(cdata.size() * self._animated_size) dx = max(0, int((rect.width() - cdata.width()) / 2.0)) dy = max(0, rect.height() - cdata.height()) right_adjust = dx rect.adjust(dx, dy, -dx, 0) painter.drawPixmap(rect, cdata) if self.title_height != 0: rect = trect rect.setTop(rect.bottom() - self.title_height + 5) painter.setRenderHint(QPainter.TextAntialiasing, True) title = db.field_for('title', book_id, default_value='') metrics = painter.fontMetrics() painter.drawText( rect, Qt.AlignCenter | Qt.TextSingleLine, metrics.elidedText(title, Qt.ElideRight, rect.width())) if marked: try: p = self.marked_emblem except AttributeError: p = self.marked_emblem = QPixmap(I('rating.png')).scaled( 48, 48, transformMode=Qt.SmoothTransformation) drect = QRect(orect) drect.setLeft(drect.left() + right_adjust) drect.setRight(drect.left() + p.width()) drect.setBottom(drect.bottom() - self.title_height) drect.setTop(drect.bottom() - p.height()) painter.drawPixmap(drect, p) if on_device: try: p = self.on_device_emblem except AttributeError: p = self.on_device_emblem = QPixmap(I('ok.png')).scaled( 48, 48, transformMode=Qt.SmoothTransformation) drect = QRect(orect) drect.setRight(drect.right() - right_adjust) drect.setBottom(drect.bottom() - self.title_height) drect.setTop(drect.bottom() - p.height() + 1) drect.setLeft(drect.right() - p.width() + 1) painter.drawPixmap(drect, p) finally: painter.restore() @pyqtSlot(QHelpEvent, QAbstractItemView, QStyleOptionViewItem, QModelIndex, result=bool) def helpEvent(self, event, view, option, index): if event is not None and view is not None and event.type( ) == QEvent.ToolTip: try: db = index.model().db except AttributeError: return False try: book_id = db.id(index.row()) except (ValueError, IndexError, KeyError): return False db = db.new_api device_connected = self.parent().gui.device_connected on_device = device_connected is not None and db.field_for( 'ondevice', book_id) p = prepare_string_for_xml title = db.field_for('title', book_id) authors = db.field_for('authors', book_id) if title and authors: title = '<b>%s</b>' % ('<br>'.join(wrap(p(title), 120))) authors = '<br>'.join(wrap(p(' & '.join(authors)), 120)) tt = '%s<br><br>%s' % (title, authors) series = db.field_for('series', book_id) if series: use_roman_numbers = config[ 'use_roman_numerals_for_series_number'] val = _( 'Book %(sidx)s of <span class="series_name">%(series)s</span>' ) % dict(sidx=fmt_sidx(db.field_for( 'series_index', book_id), use_roman=use_roman_numbers), series=p(series)) tt += '<br><br>' + val if on_device: val = _('This book is on the device in %s') % on_device tt += '<br><br>' + val QToolTip.showText(event.globalPos(), tt, view) return True return False
class SaveManager(QObject): start_save = pyqtSignal() report_error = pyqtSignal(object) save_done = pyqtSignal() check_for_completion = pyqtSignal() def __init__(self, parent, notify=None): QObject.__init__(self, parent) self.count = 0 self.last_saved = -1 self.requests = LifoQueue() self.notify_requests = LifoQueue() self.notify_data = notify t = Thread(name='save-thread', target=self.run) t.daemon = True t.start() t = Thread(name='notify-thread', target=self.notify_calibre) t.daemon = True t.start() self.status_widget = w = SaveWidget(parent) self.start_save.connect(w.start, type=Qt.QueuedConnection) self.save_done.connect(w.stop, type=Qt.QueuedConnection) def schedule(self, tdir, container): self.count += 1 self.requests.put((self.count, tdir, container)) def run(self): while True: x = self.requests.get() if x is None: self.requests.task_done() self.__empty_queue() break error_occurred = True try: count, tdir, container = x error_occurred = self.process_save(count, tdir, container) except: import traceback traceback.print_exc() finally: self.requests.task_done() if not error_occurred: self.check_for_completion.emit() def notify_calibre(self): while True: if not self.notify_requests.get(): break send_message(self.notify_data) def clear_notify_data(self): self.notify_data = None def __empty_queue(self): ' Only to be used during shutdown ' while True: try: self.requests.get_nowait() except Empty: break else: self.requests.task_done() def process_save(self, count, tdir, container): if count <= self.last_saved: shutil.rmtree(tdir, ignore_errors=True) return self.last_saved = count self.start_save.emit() error_occurred = False try: self.do_save(tdir, container) except: import traceback self.report_error.emit(traceback.format_exc()) error_occurred = True self.save_done.emit() if self.notify_data: self.notify_requests.put(True) return error_occurred def do_save(self, tdir, container): try: save_container(container, container.path_to_ebook) finally: shutil.rmtree(tdir, ignore_errors=True) @property def has_tasks(self): return bool(self.requests.unfinished_tasks) def wait(self, timeout=30): if timeout is None: self.requests.join() else: try: join_with_timeout(self.requests, timeout) except RuntimeError: return False return True def shutdown(self): self.requests.put(None) self.notify_requests.put(None)
class Solve(): def __init__(self, data): # 数据初始化(二维的object数组) self.value = np.array([[0] * 9] * 9, dtype=object) self.new_points = Queue() # 先进先出的新解坐标 self.recoder = LifoQueue() # 先进后出的回溯器 self.guess_times = 0 # 猜测次数 self.count = 0 # 记录解的个数 # 九宫格的基准列表 self.base_points = [[0, 0], [0, 3], [0, 6], [3, 0], [3, 3], [3, 6], [6, 0], [6, 3], [6, 6]] # 整理数据 data = np.array(data).reshape(9, -1) for r in range(0, 9): for c in range(0, 9): if data[r, c]: self.value[r, c] = data[r, c] # 新点添加到列表中,以便遍历 self.new_points.put((r, c)) else: self.value[r, c] = [1, 2, 3, 4, 5, 6, 7, 8, 9] # 剔除数字 def _cut_num(self, point): r, c = point val = self.value[r, c] # 行 for i, item in enumerate(self.value[r]): if isinstance(item, list): if item.count(val): item.remove(val) # 判断移除后,是否剩下一个元素 if len(item) == 1: self.new_points.put((r, i)) self.value[r, i] = item[0] # 列 for i, item in enumerate(self.value[:, c]): if isinstance(item, list): if item.count(val): item.remove(val) # 判断移除后,是否剩下一个元素 if len(item) == 1: self.new_points.put((i, c)) self.value[i, c] = item[0] # 所在九宫格(3x3的数组) b_r, b_c = map(lambda x: x / 3 * 3, point) # 九宫格基准点 for m_r, row in enumerate(self.value[b_r:b_r + 3, b_c:b_c + 3]): for m_c, item in enumerate(row): if isinstance(item, list): if item.count(val): item.remove(val) # 判断移除后,是否剩下一个元素 if len(item) == 1: r = b_r + m_r c = b_c + m_c self.new_points.put((r, c)) self.value[r, c] = item[0] # 同一行、列或九宫格中1~9可能性只有一个的情况 def _check_one_possbile(self): # 同一行只有一个数字的情况 for r in range(0, 9): values = filter(lambda x: isinstance(x, list), self.value[r]) for c, item in enumerate(self.value[r]): if isinstance(item, list): for value in item: if sum(map(lambda x: x.count(value), values)) == 1: self.value[r, c] = value self.new_points.put((r, c)) return True # 同一列只有一个数字的情况 for c in range(0, 9): values = filter(lambda x: isinstance(x, list), self.value[:, c]) for r, item in enumerate(self.value[:, c]): if isinstance(item, list): for value in item: if sum(map(lambda x: x.count(value), values)) == 1: self.value[r, c] = value self.new_points.put((r, c)) return True # 九宫格内的单元格只有一个数字的情况 for r, c in self.base_points: values = filter(lambda x: isinstance(x, list), self.value[r:r + 3, c:c + 3].reshape(1, -1)[0]) for m_r, row in enumerate(self.value[r:r + 3, c:c + 3]): for m_c, item in enumerate(row): if isinstance(item, list): for value in item: if sum(map(lambda x: x.count(value), values)) == 1: self.value[r + m_r, c + m_c] = value self.new_points.put((r + m_r, c + m_c)) return True # 同一个九宫格内数字在同一行或同一列处理 def _check_same_num(self): for b_r, b_c in self.base_points: block = self.value[b_r:b_r + 3, b_c:b_c + 3] # 判断数字1~9在该九宫格的分布情况 data = block.reshape(1, -1)[0] for i in range(1, 10): result = map( lambda x: 0 if not isinstance(x[1], list) else x[0] + 1 if x[1].count(i) else 0, enumerate(data)) result = filter(lambda x: x > 0, result) r_count = len(result) if r_count in [2, 3]: # 2或3个元素才有可能同一行或同一列 rows = map(lambda x: (x - 1) / 3, result) cols = map(lambda x: (x - 1) % 3, result) if len(set(rows)) == 1: # 同一行,去掉其他行的数字 result = map(lambda x: b_c + (x - 1) % 3, result) row = b_r + rows[0] for col in range(0, 9): if col not in result: item = self.value[row, col] if isinstance(item, list): if item.count(i): item.remove(i) # 判断移除后,是否剩下一个元素 if len(item) == 1: self.new_points.put((row, col)) self.value[row, col] = item[0] return True elif len(set(cols)) == 1: # 同一列 result = map(lambda x: b_r + (x - 1) / 3, result) col = b_c + cols[0] for row in range(0, 9): if row not in result: item = self.value[row, col] if isinstance(item, list): if item.count(i): item.remove(i) # 判断移除后,是否剩下一个元素 if len(item) == 1: self.new_points.put((row, col)) self.value[row, col] = item[0] return True # 排除法解题 def solve_sudo(self): is_run_same = True is_run_one = True while is_run_same: while is_run_one: # 剔除数字 while not self.new_points.empty(): point = self.new_points.get() # 先进先出 self._cut_num(point) # 检查单个数字的情况 is_run_one = self._check_one_possbile() # 检查同行或列的情况 is_run_same = self._check_same_num() is_run_one = True # 得到有多少个确定的数字 def get_num_count(self): return sum( map(lambda x: 1 if isinstance(x, int) else 0, self.value.reshape(1, -1)[0])) # 评分,找到最佳的猜测坐标 def get_best_point(self): best_score = 0 best_point = (0, 0) for r, row in enumerate(self.value): for c, item in enumerate(row): point_score = self._get_point_score((r, c)) if best_score < point_score: best_score = point_score best_point = (r, c) return best_point # 计算某坐标的评分 def _get_point_score(self, point): # 评分标准 (10-候选个数) + 同行确定数字个数 + 同列确实数字个数 r, c = point item = self.value[r, c] if isinstance(item, list): score = 10 - len(item) score += sum( map(lambda x: 1 if isinstance(x, int) else 0, self.value[r])) score += sum( map(lambda x: 1 if isinstance(x, int) else 0, self.value[:, c])) return score else: return 0 # 检查有没错误 def check_value(self): # 行 for row in self.value: nums = [] lists = [] for item in row: (lists if isinstance(item, list) else nums).append(item) if len(set(nums)) != len(nums): return False # 数字要不重复 if len(filter(lambda x: len(x) == 0, lists)): return False # 候选列表不能为空集 # 列 for c in range(0, 9): nums = [] lists = [] col = self.value[:, c] for item in col: (lists if isinstance(item, list) else nums).append(item) if len(set(nums)) != len(nums): return False # 数字要不重复 if len(filter(lambda x: len(x) == 0, lists)): return False # 候选列表不能为空集 # 九宫格 for b_r, b_c in self.base_points: nums = [] lists = [] block = self.value[b_r:b_r + 3, b_c:b_c + 3].reshape(1, -1)[0] for item in block: (lists if isinstance(item, list) else nums).append(item) if len(set(nums)) != len(nums): return False # 数字要不重复 if len(filter(lambda x: len(x) == 0, lists)): return False # 候选列表不能为空集 return True # 猜测记录 def recode_guess(self, point, index=0): # 记录 recoder = Recoder() recoder.point = point recoder.point_index = index # recoder.value = self.value.copy() #numpy的copy不行 recoder.value = copy.deepcopy(self.value) self.recoder.put(recoder) self.guess_times += 1 # 记录猜测次数 # 新一轮的排除处理 item = self.value[point] self.value[point] = item[index] self.new_points.put(point) self.solve_sudo() # 回溯,需要先进后出 def reback(self): while True: if self.recoder.empty(): print 'sudo is wrong' else: recoder = self.recoder.get() point = recoder.point index = recoder.point_index + 1 item = recoder.value[point] # 判断索引是否超出范围。若超出,则再回溯一次 if index < len(item): break self.value = recoder.value self.recode_guess(point, index) # 解题 def calc(self): # 第一次解题 self.solve_sudo() # 检查有没错误的,有错误的则回溯;没错误却未解开题目,则再猜测 while True: if self.check_value(): if self.get_num_count() == 81: self.count = self.count + 1 # 得到多个解 if self.count > 1: break if self.recoder.empty(): break # 回溯 self.reback() else: # 获取最佳猜测点 point = self.get_best_point() # 记录并处理 self.recode_guess(point) else: # 出错,则回溯,尝试下一个猜测 self.reback() if self.count == 0: print "no slution" elif self.count == 1: print "one slution" else: print "many slution"
class CarEnv: def __init__(self, car): self.actions = ('forward', 'stop') #, 'forwardLeft') #, 'backward') self.car = car self.cameras_size = 3 * 32 * 24 self.sensors_size = 3 self.state_size = self.cameras_size + self.sensors_size # 3 cameras + 3 sensors self.taken_actions = LifoQueue() self.reward_sensor = -10 self.reward_step = 0.3 self.reward_goal = 100 df = pd.read_csv("dataset.csv", usecols=[1, 2]) df = np.array(df) self.X = [] self.y = [] for data in df: target = data[1] images = self.read_img(data[0]) self.X.append(images) self.y.append(target) self.dataset_count = len(self.X) self.dataset_index = 0 def read_img(self, url): image = cv2.imread(url) return image def reset(self): print self.taken_actions.empty() while not self.taken_actions.empty(): action = ACTIONS.index(ACTIONS_REVERSE[ACTIONS[ self.taken_actions.get()]]) # Get index of reverse action print action self.take_step(action) logger.debug("Env reset done") def _process_image(self, frame): #frame = io.BytesIO(image_bytes) #frame = Image.open(frame).convert(mode='L') #frame = np.asarray(frame) frame = color.rgb2gray(frame) frame = misc.imresize(frame, 10) # TODO: convert to grayscale frame = frame.astype('float32') frame /= 255 return frame def get_dataset_index(self, move=0): if self.dataset_index >= self.dataset_count - 1: self.dataset_index = 0 old_index = self.dataset_index self.dataset_index += move return old_index def get_state(self, latest=True): i = self.get_dataset_index() image = self.X[i] image1, detected = sign_detection.detect(image) cv2.imshow("camera_c", image1) cv2.waitKey(1) print detected detected_list = np.array([float(detected[item]) for item in detected]) print detected_list camera_c_frame = np.array(self._process_image(image)) camera_c_frame = camera_c_frame.reshape(1, camera_c_frame.shape[0], camera_c_frame.shape[1]) state = camera_c_frame return state, detected def take_step(self, action): self.car.take_action(ACTIONS[action]) sleep(self.car.timeframe) self.car.stop() def step(self, action): # self.take_step(action) self.taken_actions.put(action) if action == 1: self.get_dataset_index(0) # next frame else: self.get_dataset_index(1) # next frame state, detected = self.get_state() reward = 0 ## print sensors ## for sensor in sensors: ## if sensors[sensor]: ## reward += self.reward_sensor stop = [key for key in detected if key.startswith('Stop')] #print detected[key] #print stop if detected[stop[0]]: if action == 0: reward -= 2 else: reward += 0.01 elif action == 0: reward += 0.5 ## elif action == 1: ## reward -= 0.1 done = None # To be implement using OpenCV info = None return state, reward, done, info
class SaveManager(QObject): start_save = pyqtSignal() report_error = pyqtSignal(object) save_done = pyqtSignal() def __init__(self, parent): QObject.__init__(self, parent) self.count = 0 self.last_saved = -1 self.requests = LifoQueue() t = Thread(name='save-thread', target=self.run) t.daemon = True t.start() self.status_widget = w = SaveWidget(parent) self.start_save.connect(w.start, type=Qt.QueuedConnection) self.save_done.connect(w.stop, type=Qt.QueuedConnection) def schedule(self, tdir, container): self.count += 1 self.requests.put((self.count, tdir, container)) def run(self): while True: x = self.requests.get() if x is None: self.requests.task_done() self.__empty_queue() break try: count, tdir, container = x self.process_save(count, tdir, container) except: import traceback traceback.print_exc() finally: self.requests.task_done() def __empty_queue(self): ' Only to be used during shutdown ' while True: try: self.requests.get_nowait() except Empty: break else: self.requests.task_done() def process_save(self, count, tdir, container): if count <= self.last_saved: shutil.rmtree(tdir, ignore_errors=True) return self.last_saved = count self.start_save.emit() try: self.do_save(tdir, container) except: import traceback self.report_error.emit(traceback.format_exc()) self.save_done.emit() def do_save(self, tdir, container): temp = None try: path = container.path_to_ebook temp = PersistentTemporaryFile(prefix=('_' if iswindows else '.'), suffix=os.path.splitext(path)[1], dir=os.path.dirname(path)) temp.close() temp = temp.name container.commit(temp) atomic_rename(temp, path) finally: if temp and os.path.exists(temp): os.remove(temp) shutil.rmtree(tdir, ignore_errors=True) @property def has_tasks(self): return bool(self.requests.unfinished_tasks) def wait(self, timeout=30): if timeout is None: self.requests.join() else: try: join_with_timeout(self.requests, timeout) except RuntimeError: return False return True def shutdown(self): self.requests.put(None)