def __init__(self, sock, addr, cwnd, ploss, pcorr): self.init = False self.end = False self.requesting = False self.sock = sock self.addr = addr self.ploss = ploss self.pcorr = pcorr self.cwnd = cwnd if self.cwnd <= 0: self.cwnd = WINDOW_SIZE self.attempt = 0 self.timer = None self.lock = threading.Lock() # PQueue will sort packets by segnum, so when inserting in queue, packets will be like: # Acks - segnum = 0 # Failed packets (for their segnum is securely < unset packets) # Unsent packets # So ordering is guaranteed and ACKs should be sent immediately self.buff = PQueue() # packets to send self.waiting = PQueue(cwnd) # packets wating for ACK self.seg = 0 # current packet self.ack = 0 # last valid ack self.nextSeg = random.randint(1, 1000) # next expected packet
def process_recent_read_queue(recent_read_queue): """process recent read info from string to priority queue of tuples (referred with User_Info recent_read_queue) Args: recent_read_queue: string Returns: a priority queue of tuples. """ queue = PQueue() queue_string = recent_read_queue[1:len(recent_read_queue) - 1] queue_string_list = re.split('\(|\)', queue_string) queue_string_list = queue_string_list[1:len(queue_string_list) - 1] for value in queue_string_list: if (value == ', '): continue value.strip() value_list = re.split(',|\[|\]', value) timestamp = float(str(value_list[0])) title_list = [] for element in value_list[1:]: if (element == '' or element == ' '): continue title_list.append(float(element)) queue.put((timestamp, title_list)) return queue
def find_topic_top_news(user, news_dict, num): """find the most related news in topic model using dot prouct of user_topic_vec and news_topic_dist Args: today_news_list: news list topic: given topic index num: most related number of news you want to find Returns: a list of news id of most related ones """ queue = PQueue() user_topic_vec = np.array(user.topic_vec) print user.topic_vec for id in user.candidate_list: if id not in user.read_list and id not in user.recommend_list: if news_dict.has_key(id): news_topic_dist = np.array(news_dict[id].topic_dist) correlation = np.dot(user_topic_vec, news_topic_dist) queue.put((correlation * -1, id)) top_list = [] cnt = 0 print '------------lda recommend---------------' while not queue.empty() and cnt < num: item = queue.get() print item top_list.append(item[1]) cnt += 1 return top_list
def find_nearest_news(user, news_dict, num): """find nearest news using cosine distance Args: user: UserInfo object today_news_list: news list num: most related number of news you want to find Returns: a list of news id of most related ones """ queue = PQueue() user_vec = user.recent_read user_vec = [] for news_info in user.recent_read: user_vec.append(news_info.title_vec) print user_vec # print matutils.unitvec(np.array(user_vec).mean(axis=0)) for id in user.candidate_list: if id not in user.read_list and id not in user.recommend_list: if news_dict.has_key(id): title_vec = news_dict[id].title_vec if isinstance(title_vec, float) and math.isnan(title_vec): continue if len(user.recent_read) == 0: word2vec_dim = len(title_vec) zero_vec = [0. for n in range(word2vec_dim)] user_vec = [zero_vec, zero_vec] #cos_dist = np.dot(user_vec_mean, title_vec) cos_dist = np.dot( matutils.unitvec(np.array(user_vec).mean(axis=0)), matutils.unitvec(np.array(title_vec))) queue.put((cos_dist * -1, id)) top_list = [] cnt = 0 print '------------word2vec recommend---------------' while not queue.empty() and cnt < num: item = queue.get() print item top_list.append(item[1]) cnt += 1 return top_list
def find_user_topic_top_n(user, top_n): """for using LDA recommendation, first I find the top 3 topics the user most interest in, and then I choose some news relative to these 3 topics with a certain proportion Args: userInfo object Returns: a list of top n topic index """ queue = PQueue() topic_vec = user.topic_vec.tolist() for i in range(len(topic_vec)): queue.put((-1 * topic_vec[i], i)) ret = [] for _ in range(top_n): ret.append(queue.get([1])) return ret
def trapRainWater(self, height): """ :type heightMap: List[List[int]] :rtype: int """ m = len(height) if m == 0: return 0 n = len(height[0]) if m < 3 or n < 3: return 0 count = [[-1] * n for i in range(m)] #count = [[-1 for col in range(n)] for row in range(m)] seaList = PQueue() for i in range(m): count[i][0] = height[i][0] count[i][n - 1] = height[i][n - 1] seaList.put((height[i][0], i, 0)) seaList.put((height[i][n - 1], i, n - 1)) for j in range(1, n - 1): count[0][j] = height[0][j] count[m - 1][j] = height[m - 1][j] seaList.put((height[0][j], 0, j)) seaList.put((height[m - 1][j], m - 1, j)) ret = 0 while not seaList.empty(): start = seaList.get() seaLevel = start[0] i = start[1] j = start[2] if i > 0 and count[i - 1][j] == -1: if seaLevel > height[i - 1][j]: ret += seaLevel - height[i - 1][j] count[i - 1][j] = seaLevel seaList.put((seaLevel, i - 1, j)) else: count[i - 1][j] = height[i - 1][j] seaList.put((height[i - 1][j], i - 1, j)) if i < m - 1 and count[i + 1][j] == -1: if seaLevel > height[i + 1][j]: ret += seaLevel - height[i + 1][j] count[i + 1][j] = seaLevel seaList.put((seaLevel, i + 1, j)) else: count[i + 1][j] = height[i + 1][j] seaList.put((height[i + 1][j], i + 1, j)) if j > 0 and count[i][j - 1] == -1: if seaLevel > height[i][j - 1]: ret += seaLevel - height[i][j - 1] count[i][j - 1] = seaLevel seaList.put((seaLevel, i, j - 1)) else: count[i][j - 1] = height[i][j - 1] seaList.put((height[i][j - 1], i, j - 1)) if j < n - 1 and count[i][j + 1] == -1: if seaLevel > height[i][j + 1]: ret += seaLevel - height[i][j + 1] count[i][j + 1] = seaLevel seaList.put((seaLevel, i, j + 1)) else: count[i][j + 1] = height[i][j + 1] seaList.put((height[i][j + 1], i, j + 1)) return ret
def __init__(self, packet_queue, operation_queue, data_value_queue): self.packet_queue = packet_queue self.operation_queue = operation_queue self.data_value_queue = data_value_queue self.num_day = 14 self.pq = PQueue() self.anomalies = [] # Read Analog Measurement Files self.csv_steady = None with open("../csv/S1_Steady_State.csv") as csvfile: reader = csv.DictReader(csvfile) self.csv_steady = [row for row in reader] self.csv_over_voltage = None with open("../csv/S4_Overvoltage_Tripping.csv") as csvfile: reader = csv.DictReader(csvfile) self.csv_over_voltage = [row for row in reader] self.csv_under_voltage = None with open("../csv/S7_Undervoltage_Tripping.csv") as csvfile: reader = csv.DictReader(csvfile) self.csv_under_voltage = [row for row in reader] self.csv_over_current = None with open("../csv/S3_Overcurrent_Instant_Fault6.csv") as csvfile: reader = csv.DictReader(csvfile) self.csv_over_current = [row for row in reader] # Calculate Substation per Control Center self.ss_num = [] count = 0 ss_per_cc = int(math.ceil(float(STATION_NUM)/CC_NUM)) for i in range(CC_NUM): if i == CC_NUM-1: self.ss_num.append(STATION_NUM - count) else: self.ss_num.append(ss_per_cc) count += ss_per_cc # Create Generators self.cc_list = [] uid = "85:80" for i in range(CC_NUM): cc_ip = "100.0." + str(i) + ".1" ss_list = [] for j in range(self.ss_num[i]): ss_ip = "100.0." + str(i) + "." + str(j+2) anomalies = [] comm_pair = CommunicationPair( self.pq, cc_ip, ss_ip, uid, self.csv_steady, anomalies, self.csv_over_voltage, self.csv_under_voltage, self.csv_over_current, ) ss_list.append(comm_pair) self.cc_list.append(ss_list) # Inject Anomalies self.injectTCPSYNFlooding() self.injectDataIntegrityAttack() self.injectCommandInjection()