def run(G: Graph) -> Result: memory: int = 0 k: int = 0 W: float = 0.0 path: List[Edge] = [] vertices: List[int] = G.Vertices() memory += sizeof(vertices) i: int = randrange(len(vertices)) start: int = vertices[i] del vertices[i] u: int = start while len(vertices) > 0: i = randrange(len(vertices)) v: int = vertices[i] del vertices[i] e = G.get_edge(u, v) path.append(e) W += e.w k += 1 u = v e = G.get_edge(u, start) path.append(e) W += e.w k += 1 return Result(k, W, memory, path=path)
def index_item(self, item, spider, close_spider): lock = self.locks[spider.name] lock.acquire() cache_queue = self.cache_buffer[spider.name] if not close_spider: cache_queue.append(dict(item)) cache_len = len(cache_queue) cache_size = sizeof(cache_queue) try: if (close_spider and cache_len > 0) or cache_len >= self.solr_cache_max_len \ or (cache_len > 0 and cache_size >= self.solr_cache_max_size): if self.solr_cloud_mode: self.solr_collection.add(list(cache_queue)) else: self.solr_collection.add_many(list(cache_queue)) # self.solr_collection.commit() spider.log( "{cache_len} items of size {cache_size} byte(s) indexed in solr".format( cache_len=cache_len, cache_size=cache_size), level=log.INFO) cache_queue.clear() except Exception, e: trace_info = traceback.format_exc() spider.log( "Failed to index item(s): {message}\n{trace_info}".format( message=e.message, trace_info=trace_info), level=log.ERROR)
def test(time_response=0,last_updates=[],data_inf=[]): result_test=[] if time_response < 0.5: result_test.append(True) else: result_test.append(False) time_now=dt.datetime.now() test_last_time=True for date in last_updates: date=date.split("-") date=[int(c) for c in date] if date[0]==time_now.year and date[1]==time_now.month and date[2]==time_now.day: pass else: test_last_time=False if test_last_time: result_test.append(True) else: result_test.append(False) size_data=sizeof(data_inf) # в байтах size_data=size_data/1024 # в Кбайтах if size_data < 10: result_test.append(True) else: result_test.append(False) return result_test,time_response,size_data
def index_item(self, item, spider, close_spider): lock = self.locks[spider.name] lock.acquire() cache_queue = self.cache_buffer[spider.name] if not close_spider: cache_queue.append(dict(item)) cache_len = len(cache_queue) cache_size = sizeof(cache_queue) try: if (close_spider and cache_len > 0) or cache_len >= self.solr_cache_max_len \ or (cache_len > 0 and cache_size >= self.solr_cache_max_size): if self.solr_cloud_mode: self.solr_collection.add(list(cache_queue)) else: self.solr_collection.add_many(list(cache_queue)) # self.solr_collection.commit() spider.log( "{cache_len} items of size {cache_size} byte(s) indexed in solr" .format(cache_len=cache_len, cache_size=cache_size), level=log.INFO) cache_queue.clear() except Exception, e: trace_info = traceback.format_exc() spider.log( "Failed to index item(s): {message}\n{trace_info}".format( message=e.message, trace_info=trace_info), level=log.ERROR)
def _conditional_insert(self, transaction, item, spider, close_spider): lock = self.locks[spider.name] lock.acquire() cache_queue = self.cache_buffer[spider.name] if not close_spider: cache_queue.append(item) cache_len = len(cache_queue) cache_size = sizeof(cache_queue) if (close_spider and cache_len > 0) or cache_len > self.db_cache_max_len \ or (cache_len > 0 and cache_size >= self.db_cache_max_size): # 按键类型对 item 进行分类 key_types = set((tuple(i.keys()) for i in cache_queue)) item_groups = ([ i for i in cache_queue if tuple(i.keys()) == key_type ] for key_type in key_types) for items in item_groups: try: if len(items) > 0: sql_statement = self._generate_sql_statement( items[0], self.db_table) transaction.executemany(sql_statement, (i.values() for i in items)) spider.log( "{stored_num} items of size {stored_size} byte(s) stored in db" .format(stored_num=len(items), stored_size=sizeof(SpiderCache(items))), level=log.INFO) except self.db_driver_module.Error, e: trace_info = traceback.format_exc() spider.log( "Error related db occurred, store in db failed: {message}\n{trace_info}" .format(message=e.message, trace_info=trace_info), level=log.ERROR) continue except Exception, e: trace_info = traceback.format_exc() spider.log( "Error not related db occurred, store in db failed {message}\n{trace_info}" .format(message=e.message, trace_info=trace_info), level=log.ERROR) continue
def _conditional_insert(self, transaction, item, spider, close_spider): lock = self.locks[spider.name] lock.acquire() cache_queue = self.cache_buffer[spider.name] if not close_spider: cache_queue.append(item) cache_len = len(cache_queue) cache_size = sizeof(cache_queue) if (close_spider and cache_len > 0) or cache_len > self.db_cache_max_len \ or (cache_len > 0 and cache_size >= self.db_cache_max_size): # 按键类型对 item 进行分类 key_types = set((tuple(i.keys()) for i in cache_queue)) item_groups = ([i for i in cache_queue if tuple(i.keys()) == key_type] for key_type in key_types) for items in item_groups: try: if len(items) > 0: sql_statement = self._generate_sql_statement(items[0], self.db_table) transaction.executemany(sql_statement, (i.values() for i in items)) spider.log( "{stored_num} items of size {stored_size} byte(s) stored in db".format( stored_num=len(items), stored_size=sizeof(SpiderCache(items))), level=log.INFO) except self.db_driver_module.Error, e: trace_info = traceback.format_exc() spider.log( "Error related db occurred, store in db failed: {message}\n{trace_info}".format( message=e.message, trace_info=trace_info), level=log.ERROR) continue except Exception, e: trace_info = traceback.format_exc() spider.log( "Error not related db occurred, store in db failed {message}\n{trace_info}".format( message=e.message, trace_info=trace_info), level=log.ERROR) continue
def run(G: Graph) -> Result: c: Counter = Counter() mst, _ = MST.prim(G, 1) # mst, _ = MST.kruskal(G) Tree = Graph(G.N, directed=False) Tree.add_edges(mst) visited: List[bool] = [False for _ in range(G.N + 1)] walk: List[int] = [] def DFS(u: int): visited[u] = True walk.append(u) c.inc() for e in Tree.get_incident_edges(u): v = e.snd if not visited[v]: DFS(v) DFS(1) path: List[Edge] = [] W: float = 0.0 first: int = walk[0] u: int = first walk.append(first) for v in walk[1:]: c.inc() e = G.get_edge(u,v) path.append(e) W += e.w u = v memory: int = 0 memory += sizeof(mst) memory += sizeof(Tree) memory += sizeof(visited) memory += sizeof(walk) return Result(c.get(), W, memory, path=path)
def run(G: Graph) -> Result: memory: int = 0 k: int = 0 W: float = 0.0 path: List[Edge] = [] vertices: List[int] = G.Vertices() memory += sizeof(vertices) i: int = randrange(len(vertices)) start: int = vertices[i] del vertices[i] G.order_edges() u: int = start while len(vertices) > 0: next_e: Optional[Edge] = next( (e for e in G.get_incident_edges(u) if e.snd in vertices), None ) if next_e is None: raise Exception('None') v: int = next_e.snd vertices.remove(v) e = G.get_edge(u, v) path.append(e) W += e.w k += 1 u = v e = G.get_edge(u, start) path.append(e) W += e.w k += 1 return Result(k, W, memory, path=path)
def cache_access_test(): test = DataCache() print("base test", test) test['Foo'] = 'Foovalue' print("setitem test", test) print("pop item test", test.popitem()) try: print("pop empty test", test.popitem()) except KeyError: pass else: raise AssertionError for i in range(10): test[test_key % i] = test_value % i print("Iteration Test") for i in test: print(i) print("Iterating after adding too many links") for i in range(100): test[test_key % i] = test_value % i for i in test: print(i) print("Inserting existing item") test[test_key % 95] = test_value % 95 for i in test.items(): print(i) test.setMaxCache(20) ref = OrderedDict() for i in range(20): test[test_key % i] = test_value % i ref[test_key % i] = test_value % i print("Iter test") print("basic iter") for k in test: print(k) print("Values") for v in test.values(): print(v) print("Keys") for k in test.keys(): print(k) assert test[k] == ref[k] for k, v in test.items(): print(k, v) print("Size Test") print(sizeof(ref)) print(sizeof(test)) print(sizeof(_Link()))
def __sizeof__(self): n = len(self) # number of links size = sizeof(_Link()) * n + sizeof(self.__slots__) return size
print("Doing Test...") cache_result = time_test(cache, 1000, keys) cache_results.append(cache_result) # linked_dict_result = time_test(LinkedDict, 1000, keys) # linked_dict_results.append(linked_dict_result) ref_result = time_test(ref, 1000, keys) ref_results.append(ref_result) print("cache: ", cache_result, int(cache_result/ref_result)) # print("LinkedDict", linked_dict_result, int(linked_dict_result/ref_result)) print("OrderedDict", ref_result) print(sizeof(cache)) # print(sizeof(LinkedDict)) print(sizeof(ref)) print(sizeof(test_list)) print(sizeof(set(test_list))) print(sizeof(frozenset(test_list)))
#s.bind((HOST, 0)) totalSize = 0; while True: packet = s.recvfrom(65565) packet = packet[0] eth_length = 14 eth_header = packet[:eth_length] eth = unpack('!6s6sH' , eth_header) eth_protocol = socket.ntohs(eth[2]) print str(eth_protocol) print str(sys.sizeof(packet)) #s.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1) #s.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON) # packets = (s.recvfrom(65565)[0], "--", s.recvfrom(65565)[1]) # print str(packets[0]) + "--" + str(packets[1] +"\n" # size = sys.getsizeof(s.recvfrom(65565)[0]) # totalSize += size # print "\bTotal Size: " + str(totalSize) # print "Most Recent Packet Size: " +str(size) +"\nFrom: "+ str(s.recvfrom(65565)[1]) #print s.recvfrom(65565) #s.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)