def forward(self, x): # TODO: extract more outputs from the backbone like FPN, but for intermediate weak-supervision. x = self.backbone(x) gap = F.adaptive_avg_pool2d(x, 1) gap = F.relu(gap.view(gap.size(0), -1), inplace=True) # if includes embedding layer. embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap # processing the clssifier branch # logits = self.classifier(embed_fea) # calculate distance. DIST = Distance(scaled=self.scaled, cosine_weight=self.cosine_weight) # dist_fea2cen = getattr(DIST, self.distance)(embed_fea, self.centroids) # [n, class_num] # dist_cen2cen = getattr(DIST, self.distance)(self.centroids, self.centroids) # [class_num, class_num] normalized_centroids = F.normalize(self.centroids, dim=1, p=2) dist_fea2cen = getattr(DIST, self.distance)(embed_fea, normalized_centroids) dist_cen2cen = DIST.l2(normalized_centroids,normalized_centroids) return { "backbone_fea": x, # "logits": logits, "embed_fea": embed_fea, "dist_fea2cen": dist_fea2cen, "dist_cen2cen": dist_cen2cen }
def calculerDistance(centres, graph, distance): # calcul de la distance distances = [] dList = [] if distance == "euclidienne": for j in range(len(graph)): dList = [] for k in range(len(centres)): centre = centres[k] dList.append((k + 1, Distance().euclidienne(centre, graph[j]))) distances.append(dList) if distance == "manhattan": for j in range(len(graph)): dList = [] for k in range(len(centres)): centre = centres[k] dList.append((k + 1, Distance().manhattan(centre, graph[j]))) distances.append(dList) if distance == "minkowski": for j in range(len(graph)): dList = [] for k in range(len(centres)): centre = centres[k] dList.append((k + 1, Distance().minkowski(centre, graph[j]))) distances.append(dList) print "distance", distances return distances
def notifications(): di = Distance() lat = curr_df.loc[0]["Lat"] long = curr_df.loc[0]["Long"] spd = curr_df.loc[0]["Spd"] angle = curr_df.loc[0]["Angle"] alt = curr_df.loc[0]["Alt"] for i in range(0, len(alert_df)): alat = alert_df.loc[i]["Lat"] along = alert_df.loc[i]["Long"] aSpeed = alert_df.loc[i]["Spd"] aAngle = alert_df.loc[i]["Angle"] aAlt = alert_df.loc[i]["Alt"] d = di.getDistFeet(lat, long, alat, along) counter = 0 prevMainPlane = [lat, long] prevAlertPlane = [alat, along] newMainPlane = [] newAlertPlane = [] prevDistance = d newDistance = 0 altDiff = di.heightDist(alt, aAlt) c = Coordinates() diverging_flg = 0 min_dist_flg = 0 bad_alt_flg = 0 msg = '' if (altDiff > 1000): msg = "Altitude difference (" + str(altDiff) + "ft) in safe zone, no current possibility of collision." bad_alt_flg = 1 else: while (1): newMainPlane = c.get_next_point(spd, angle, prevMainPlane[0], prevMainPlane[1]) newAlertPlane = c.get_next_point(aSpeed, aAngle, prevAlertPlane[0], prevAlertPlane[1]) newDistance = di.getDistFeet(newMainPlane[0], newMainPlane[1], newAlertPlane[0], newAlertPlane[1]) print(newDistance) counter += 1 if (newDistance > prevDistance): diverging_flg = 1 break; elif (newDistance < 9260): min_dist_flg = 1 break; else: prevMainPlane = newMainPlane prevAlertPlane = newAlertPlane if (diverging_flg): msg = "Flight path not intersecting. No possibility of collision." elif (min_dist_flg): msg = "Altitude in bad zone (" + str(altDiff) + "ft), flight paths intersect in " + str(counter) + "mins." elif(bad_alt_flg): msg="Altitude difference (" + str(altDiff) + "ft) in safe zone, no current possibility of collision." alert_df['Warning'] = msg current=curr_df.to_dict(orient='records') alerts=alert_df.to_dict(orient='records') global selected_Icao return render_template('notifications.html',current=current,alerts=alerts,selected_Icao=selected_Icao)
def changeLabel(self): widget = self.widget index = widget.curselection()[0] if index == 0: Distance.displayParameterName(self, widget) elif index == 1: nthPoint.displayParameterName(self, widget)
def CenterCheck(center1, center2, MainCenter, point0, point1, point2, point3): if Distance(center1, MainCenter) >= Distance(center2, MainCenter): center = center1 px1hat = point0 px2hat = point2 else: center = center2 px1hat = point1 px2hat = point3 return center, px1hat, px2hat
def test_distance(self, point_1, point_2, distance, monkeypatch): def dummy(): return "" # monkeypatch.setattr(Distance, "_calculate_distance", lambda x: None) dist = Distance(point_1, point_2) assert (dist.distance == distance)
def forward(self, x, generates=None): x = self.backbone(x) gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) embed_fea = self.embeddingLayer(gap) if hasattr( self, 'embeddingLayer') else gap logits = self.classifier(embed_fea) # calculate distance. cat_centroids = torch.cat([self.centroids, self.original], dim=0) DIST = Distance(scaled=self.scaled) dist_fea2cen = getattr(DIST, self.distance)(embed_fea, cat_centroids) dist_gen2cen = None if generates is not None: generates = self.backbone(generates) gap_gen = (F.adaptive_avg_pool2d(generates, 1)).view(generates.size(0), -1) embed_gen = self.embeddingLayer(gap_gen) if hasattr( self, 'embeddingLayer') else gap_gen dist_gen2cen = getattr(DIST, self.distance)(embed_gen, cat_centroids) return { "gap": x, "logits": logits, "embed_fea": embed_fea, "dist_fea2cen": dist_fea2cen, "dist_gen2cen": dist_gen2cen }
def forward(self, input): x = self.backbone(input) gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) embed_fea = self.embeddingLayer(gap) if hasattr( self, 'embeddingLayer') else gap gen = self.generator(input) embed_gen = self.embeddingLayer(gen) if hasattr( self, 'embeddingLayer') else gen centroids = F.normalize(self.centroids, dim=1, p=2) if self.norm_centroid else self.centroids SIMI = Similarity(scaled=self.scaled) sim_fea2cen = getattr(SIMI, self.similarity)(embed_fea, centroids) DIST = Distance(scaled=self.scaled) dis_fea2cen = getattr(DIST, self.distance)(embed_fea, centroids) dis_gen2cen = getattr(DIST, self.distance)(embed_gen, centroids) dis_gen2ori = getattr(DIST, self.distance)(embed_gen, self.origin) thresholds = None if hasattr(self, 'thresholds'): thresholds = self.thresholds return { "gap": gap, "embed_fea": embed_fea, "embed_gen": embed_gen, "sim_fea2cen": sim_fea2cen, "dis_fea2cen": dis_fea2cen, "dis_gen2cen": dis_gen2cen, "dis_gen2ori": dis_gen2ori, "thresholds": thresholds }
def forward(self, x): x = self.backbone(x) dis_gen2cen, dis_gen2ori, thresholds, amplified_thresholds, embed_gen = None, None, None, None, None gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) if hasattr(self, 'thresholds'): thresholds = self.thresholds gen = whitennoise_generator(self.estimator, gap) embed_gen = self.embeddingLayer(gen) if hasattr(self, 'embeddingLayer') else gen amplified_thresholds = self.thresholds * self.amplifier embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap centroids = F.normalize(self.centroids, dim=1, p=2) if self.norm_centroid else self.centroids SIMI = Similarity(scaled=self.scaled) sim_fea2cen = getattr(SIMI, self.similarity)(embed_fea, centroids) DIST = Distance(scaled=self.scaled) dis_fea2cen = getattr(DIST, self.distance)(embed_fea, centroids) if hasattr(self, 'thresholds'): dis_gen2cen = getattr(DIST, self.distance)(embed_gen, centroids) dis_gen2ori = getattr(DIST, self.distance)(embed_gen, self.origin) return { "gap": gap, "embed_fea": embed_fea, "embed_gen": embed_gen, "sim_fea2cen": sim_fea2cen, "dis_fea2cen": dis_fea2cen, "dis_gen2cen": dis_gen2cen, "dis_gen2ori": dis_gen2ori, "amplified_thresholds": amplified_thresholds, "thresholds": thresholds }
def forward(self, x): x = self.backbone(x) dis_gen2cen, dis_gen2ori, thresholds, amplified_thresholds, embed_gen = None, None, None, None, None gap = x if hasattr(self, 'estimator'): thresholds = self.thresholds gen = self.estimator.sampler(gap) embed_gen = self.embeddingLayer(gen) if hasattr(self, 'embeddingLayer') else gen embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap embed_fea_2d = (self.fuse*embed_fea).sum(dim=2, keepdim=False).squeeze(dim=-1) centroids = F.normalize(self.centroids, dim=1, p=2) if self.norm_centroid else self.centroids SIMI = Similarity(scaled=self.scaled) sim_fea2cen = getattr(SIMI, self.similarity)(embed_fea_2d, centroids) DIST = Distance(scaled=self.scaled) dis_fea2cen = getattr(DIST, self.distance)(embed_fea_2d, centroids) if hasattr(self, 'estimator'): dis_gen2cen = getattr(DIST, self.distance)(embed_gen, centroids) dis_gen2ori = getattr(DIST, self.distance)(embed_gen, self.origin) return { "gap": gap, "embed_fea": embed_fea, "embed_gen": embed_gen, "sim_fea2cen": sim_fea2cen, "dis_fea2cen": dis_fea2cen, "dis_gen2cen": dis_gen2cen, "dis_gen2ori": dis_gen2ori, "thresholds": thresholds }
def methodsListbox(self): self.modeSelection = Listbox(self, selectmode=SINGLE, width=10, height=3) self.modeSelection.grid(row=0, column=1) lst = [] infile = open('plugins.txt', 'r') for line in infile: line = line.split() if line[0] >= "A" and line[0] <= "Z" or line[ 0] >= "a" and line[0] <= "z": lst.append(line) for item in lst: if item == ['Distance']: METHODS[ 'Distance'] = callDistanceMethod # Append to Methods dict to be triggered by the button mode = Distance.displayName( self) # Event to get the method name self.modeSelection.insert(0, mode) # Insert in the listbox if item == ['nthPoint']: METHODS[ 'nthpoint'] = callNthPointMethod # Append to Methods dict to be triggered by the button mode = nthPoint.displayName( self) # Event to get the method name self.modeSelection.insert(1, mode) # Insert in the listbox infile.close() # Selection event triggers the function that sets the next label mutableLabel(self, self.modeSelection)
def forward(self, x): x = self.backbone(x) dis_gen2cen, dis_gen2ori, thresholds, amplified_thresholds, embed_gen = None, None, None, None, None gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) if hasattr(self,'decorrelation'): gap = self.decorrelation(gap) embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap centroids = F.normalize(self.centroids, dim=1, p=2) if self.norm_centroid else self.centroids SIMI = Similarity(scaled=self.scaled) sim_fea2cen = getattr(SIMI, self.similarity)(embed_fea, centroids) DIST = Distance(scaled=self.scaled) dis_fea2cen = getattr(DIST, self.distance)(embed_fea, centroids) dis_cen2cen = getattr(DIST, self.distance)(centroids, centroids) dis_thr2thr = self.cal_thr2thr() # if hasattr(self, 'estimator'): # dis_gen2cen = getattr(DIST, self.distance)(embed_gen, centroids) # dis_gen2ori = getattr(DIST, self.distance)(embed_gen, self.origin) return { "gap": gap, "embed_fea": embed_fea, "embed_gen": embed_gen, "sim_fea2cen": sim_fea2cen, "dis_fea2cen": dis_fea2cen, "dis_cen2cen": dis_cen2cen, "dis_thr2thr": dis_thr2thr, "thresholds": self.thresholds }
def __init__(self, user, places, g_maps, max_tabu_size, max_combinations, max_till_end): self.user = user self.places = places self.places_sequence_distance_objects = [] self.g_maps = g_maps self.places_sequence_names = [] self.final_combination = [] self.tabu_list = [] self.max_tabu_size = max_tabu_size self.distance_dictionary = {self.user.house: {}} self.max_combinations = max_combinations self.final_cost = None self.final_time = None self.final_function_value = None self.max_till_end = max_till_end print("Collecting data from Google") current_data = 0 collecting_data_time = pow(len(self.places), 2) + len(self.places) for place in self.places: current_data += 1 self.distance_dictionary[self.user.house][place.name] = Distance( self.user.house, place.name, self.user.modes, self.g_maps) print( str(math.floor(current_data * 100 / collecting_data_time)) + "%") current_data += 1 self.distance_dictionary[place.name] = {} self.distance_dictionary[place.name][self.user.house] = Distance( place.name, self.user.house, self.user.modes, self.g_maps) print( str(math.floor(current_data * 100 / collecting_data_time)) + "%") for place in self.places: for place2 in self.places: if place.name != place2.name: current_data += 1 self.distance_dictionary[place.name][ place2.name] = Distance(place.name, place2.name, self.user.modes, self.g_maps) print( str( math.floor(current_data * 100 / collecting_data_time)) + "%") print("Done") print("Wait for the tabu search algorithm to finish")
def Monocycle(Distortion, missing, scale=1, rotation=0): #seed(0) if Distortion == 'True': VarLocaion = random.choices(range(-20, 20, 5), k=2) for i in range(0, 5): VerSign = random.randint(0, 2) * 2 - 1 VarLocaion[i] = VarLocaion[i] * VerSign VarSize = random.choices(range(-20, 20, 5), k=2) VarAngle = random.choices(range(-10, 10, 5), k=2) else: VarSize = [0, 0] VarLocaion = [0, 0] VarAngle = [0, 0] Wheelsize = random.randint(70, 95) * scale c0 = [random.randint(300, 340), random.randint(220, 260)] Obj1R = random.randint(80, 100) * scale obj1 = Triangle(c0, Obj1R, 90 + VarAngle[0]) L = Distance(obj1[0], obj1[1]) l = sqrt(3 / 4) * L c1 = [c0[0] - 0.5 * L, c0[1] + l / 3] obj2 = Triangle([sum(x) for x in zip(c1, VarLocaion)], Obj1R, -90 + VarAngle[1]) p1 = obj2[0] p2 = obj2[1] p3 = obj2[2] c2 = p3 c3 = [p2[0] + L, p2[1]] obj3 = Circle([sum(x) for x in zip(c2, VarLocaion)], Wheelsize + VarSize[0]) obj4 = Circle([sum(x) for x in zip(c3, VarLocaion)], Wheelsize + VarSize[1]) O5L, O5w = 20, 10 c4 = [p1[0], p1[1] - O5w / 2] p11 = [p1[0] + L, p1[1]] p22 = [p2[0] + L, p2[1]] x1 = range(int(p11[0]), int(p22[0])) y1 = [[ num + VarLocaion[0], round(((num - p11[0]) * ((p22[1] - p11[1]) / (p22[0] - p11[0]))) + p11[1] + 2 * VarLocaion[0]) ] for num in x1] c5 = y1[round(0.4 * len(y1))] O6L, O6w = 120, 10 obj5 = Rectangle(c4, O5L, O5w, asin(O5w / sqrt(O5L**2 + O5w**2))) obj6 = Rectangle(c5, O6L, O6w, asin(O6w / sqrt(O6L**2 + O6w**2)) + 45) vertices = [obj1, obj2, obj3, obj4, obj5, obj6] cenroid = [ round((c0[0] + c1[0] + c2[0] + c3[0]) / len(vertices)), round((c0[1] + c1[1] + c2[1] + c3[1]) / len(vertices)) ] vertices = RotationSet(vertices, cenroid, rotation) if missing == 'True': r = random.choice(range(len(vertices) + 1)) del vertices[r] else: r = 0 return vertices
def __init__(self, z, kmin, kmax, N, Y200rhoc, sigma_8, BM0=0.0, alpha=0.0, sLnM0=0.1, beta=0.0, fnl=0, bfNL=False, bfNL_Ph=False, H0=67., Omegab0=0.02256 / 0.67**2, Omegam0=(0.1142 + 0.02256) / 0.67**2, ns=0.962, w0=-1.0, wa=0.0, Tcmb0=2.75, Ps=None, Mlim=1e14, Mcut=4e14): Distance.__init__(self, z, H0, Omegab0, Omegam0, ns, w0, wa, Tcmb0) Calibration.__init__(self, z, BM0=BM0, alpha=alpha, sLnM0=sLnM0, beta=beta) self.kmin = kmin self.kmax = kmax self.N = N self.Mlim = Mlim self.Y200rhoc = Y200rhoc self.h = H0 / 100 self.fnl = fnl self.bfNL = bfNL self.bfNL_Ph = bfNL_Ph self.sigma_8 = sigma_8 self.Ps = Ps
def forward(self, x): x = self.backbone(x) x = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) x = self.embeddingLayer(x) DIST = Distance(scaled=True) dis = getattr(DIST, 'l2')(x, self.centroid) return { "embed_fea":x, "distance": dis, "radius": self.R }
def loadData(self): """ :return: Gets a file name from the user and reads the data and process it the same way as processing the original data file. It process the data and displays the window and the map in the same state as when it was saved. Uses the filedialog library to display file manager window to ask users for the name of the file. The default directory is set to be the current working directory. """ default_directory = os.getcwd() loaded_file = filedialog.askopenfilename(initialdir=default_directory, title="Select file") new_class = Distance() LineSimplification.current_data_dic = initial_object.makeDic(loaded_file) load_window = self.displayMap(LineSimplification.current_data_dic)
def nouveauCentres(data, centres, classe): newCenter = [] print "gggggggggroupppppppe" for k in range(1, len(centres) + 1): groupe = [] for i in range(len(classe)): if classe[i] == k: groupe.append(data[i]) print groupe newCenter.append(Distance().barycentre(groupe)) newCenter[k - 1][0] = k return newCenter
def generate_rate(self,generateData): rate=0 k=15 distanceNear=[] for i in range(len(self.data)): distance=np.sqrt(np.sum(np.square(self.data[i] - generateData))) distanceNear.append(Distance(distance,self.label[i])) distanceNear = sorted(distanceNear, key=lambda distance: distance.get_distance()) defectCount=0 for i in range(k): if distanceNear[i].get_label() == 1: defectCount=defectCount+1 return defectCount/k
def process_message(message, counter): print("Processing message...") # 1) Get tag Id (will omit it for now) msg_text = str(message.payload.decode("utf-8")) topic = message.topic.split('/') tag_id = topic[2] try: if message: if topic[4] == 'location': # 2) Decode and add to DB json_msg = json.loads(msg_text) tag_location = TagLocation(json_msg.get('position').get('_id', None), tag_id, json_msg.get('position').get('x'), json_msg.get('position').get('y'), json_msg.get('position').get('z'), json_msg.get('position').get('quality')) repository = TagLocationRepository() repository.create(tag_location) elif topic[4] == 'data': json_msg = json.loads(msg_text) base64_message = json_msg.get('data') message_bytes = base64.b64decode(base64_message) print(message_bytes) mybytes = bytearray(message_bytes) print(mybytes) count = mybytes[0] print("\nCount: %d", count) counter = mybytes[count*4+1] print("\nCounter: %d", counter) bytescounter = 1 for x in range(count): address = mybytes[bytescounter+0] | (mybytes[bytescounter+1] & 0x000000FF) << 8 print("\nAddress: %x", hex(address)) distance = mybytes[bytescounter+2] | (mybytes[bytescounter+3] & 0x000000FF) << 8 print("\nDistance: %d", distance) bytescounter = bytescounter + 4 distance = Distance(None, tag_id, str(hex(address)), str(distance), ) rep = DistanceRepository() rep.create(distance) except Exception as e: print("An exception occurred") print(e)
def forward(self, x): # TODO: extract more outputs from the backbone like FPN, but for intermediate weak-supervision. x = self.backbone(x) dist_gen2cen = None gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) if self.thresholds is not None: generate = self.generat_rand_feature(gap.clone()) # !!!need clone function, or gradient problem, shit generate_fea = F.relu(generate, inplace=True) # if includes embedding layer. generate_fea = self.embeddingLayer(generate_fea) if hasattr(self, 'embeddingLayer') else generate_fea gap = F.relu(gap, inplace=True) # if includes embedding layer. embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap # calculate distance. DIST = Distance(scaled=self.scaled, cosine_weight=self.cosine_weight) normalized_centroids = F.normalize(self.centroids, dim=1, p=2) dist_fea2cen = getattr(DIST, self.distance)(embed_fea, normalized_centroids) # [n,c+1] dist_cen2cen = DIST.l2(normalized_centroids, normalized_centroids) # [c+1,c+1] if self.thresholds is not None: dist_gen2cen_temp = getattr(DIST, self.distance)(generate_fea, normalized_centroids) # [n,c+1] mask = dist_gen2cen_temp - self.thresholds.unsqueeze(dim=0) value_min, indx_min = mask.min(dim=1, keepdim=False) dist_gen2cen = dist_gen2cen_temp[value_min > 0, :] return { "backbone_fea": x, # "logits": logits, "embed_fea": embed_fea, "dist_fea2cen": dist_fea2cen, "dist_cen2cen": dist_cen2cen, "dist_gen2cen": dist_gen2cen }
def forward(self, x): dist_gen2cen = None x = self.backbone(x) gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) # if includes embedding layer. embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap logits = self.classifier(embed_fea) # calculate distance. DIST = Distance(scaled=self.scaled) normalized_centroids = F.normalize(self.centroids, dim=1, p=2) normalized_logits = F.normalize(logits, dim=1, p=2) dist_fea2cen = getattr(DIST, self.distance)(normalized_logits, normalized_centroids) # [n,c+1] dist_cen2cen = DIST.l2(normalized_centroids, normalized_centroids) # [c+1,c+1] return { "backbone_fea": x, "logits": logits, "embed_fea": embed_fea, "dist_fea2cen": dist_fea2cen, "dist_cen2cen": dist_cen2cen, "dist_gen2cen": dist_gen2cen }
def forward(self, x, labels): batch_size = x.size(0) distanceFun = Distance(x, self.centers) dist = getattr(distanceFun, self.distance)(scaled=self.scaled) # [n, class_num] labels = labels.unsqueeze(1).expand(batch_size, self.num_classes) mask = labels.eq(self.classes.expand(batch_size, self.num_classes)) dist_within = (dist * mask.float()).sum(dim=1, keepdim=False) loss_within = (torch.sigmoid(dist_within).sum()) / batch_size """ Version 1: L2-distance to other labels Function: beta*Sigmoid[-1/(class_num-1)*Sum_i(Dis(x,cls_i))] # A question: distance to all other centroids or the closed non-gt centroid. dist_between = (-dist*(1-mask.float())).sum(dim=1, keepdim=False) # convert max to min dist_between = dist_between/(self.num_classes-1.0) loss_between = self.beta * (torch.sigmoid(dist_between).sum()) / batch_size """ """Version 2: Cosine similarity to other labels() Function: beta*1/(class_num-1)*Sum_i(Dis(x,cls_i)) similarity = getattr(distanceFun, self.similarity)(scaled=True) # [n, class_num] sim_between = (similarity*(1-mask.float())).sum(dim=1, keepdim=False) sim_between = sim_between/(self.num_classes-1.0) loss_between = self.beta *(sim_between.sum()) / batch_size """ """Version 3: L2-distance to other labels """ distanceFun2 = Distance(self.centers, self.centers) dist2 = getattr(distanceFun2, self.distance)(scaled=self.scaled) # [n, class_num] dist_between = -(dist2).sum(dim=1, keepdim=False) # convert max to min dist_between = dist_between / (self.num_classes - 1.0) loss_between = self.beta * ( torch.sigmoid(dist_between).sum()) / batch_size loss = loss_within + loss_between return loss, loss_within, loss_between
def load_distances(self): with open('Data/DistanceFile.csv') as distanceFile: location_list = list(csv.reader(distanceFile, delimiter=',')) # get the location list minus the 1X1 which is not needed locations = location_list[0][1:] distance_list = [] counter = 1 # This will loop over all the records and create a list of distances objects # Space-Time complexity is O(n^2) because nested loop is present. for location in locations: for row in location_list[:][1:]: distance = Distance(location, row[0], row[counter]) distance_list.append(distance) counter += 1 return distance_list
def forward(self, x): x = self.backbone(x) gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) embed_fea = self.embeddingLayer(gap) if hasattr( self, 'embeddingLayer') else gap centroids = F.normalize(self.centroids, dim=1, p=2) if self.norm_centroid else self.centroids SIMI = Similarity(scaled=self.scaled) sim_fea2cen = getattr(SIMI, self.similarity)(embed_fea, centroids) DIST = Distance(scaled=self.scaled) dis_fea2cen = getattr(DIST, self.distance)(embed_fea, centroids) return { "gap": x, "embed_fea": embed_fea, "sim_fea2cen": sim_fea2cen, "dis_fea2cen": dis_fea2cen }
def forward(self, x): x = self.backbone(x) gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1) embed_fea = self.embeddingLayer(gap) if hasattr(self, 'embeddingLayer') else gap logits = self.classifier(embed_fea) embed_fea_normalized = F.normalize(embed_fea, dim=1, p=2) # calculate distance. DIST = Distance(scaled=self.scaled) normalized_centroids = F.normalize(self.centroids, dim=1, p=2) # dist_fea2cen = getattr(DIST, self.distance)(embed_fea_normalized, normalized_centroids) # [n,c+1] dist_fea2cen = getattr(DIST, self.distance)(embed_fea, self.centroids) return { "gap": x, "logits": logits, "embed_fea": embed_fea, "dist_fea2cen": dist_fea2cen }
def SimplifyLnodes_boundarycase(self): full_node = [] cut_node = [] for lnode in self.getLNodeList(): if len(lnode.GetLmer().translate(None,digits)) == self.pdbn.get_Llength(): full_node.append(lnode) else: cut_node.append(lnode) full_node.sort() cut_node.sort() for main_node in full_node: if not cut_node: break if not main_node.GetPrev(): for sub_node in cut_node[::-1]: if Distance.HammingDistance_dif(main_node.GetLmer(), sub_node.GetLmer().translate(None,digits)) == 0: self.CombineTwo(main_node,sub_node) cut_node.remove(sub_node) pass
def processData(self): """ This function is called when user presses the process button. The program checks what method has been selected by the user and based on that an object of the appropriate class will be created. The users input in the entry box is also collected. :return: Processes the data, displays the map saves the data dictionary as the current dictionary so the process can be done with the new data from now on. """ if LineSimplification.listbox_index == 0: # Index 0 of the listbox, shows the Distance method has been selected the_object = Distance() # An object of appropriate class is created elif LineSimplification.listbox_index == 1: # Index 1 of the listbox, shows the nthPoint method has been selected the_object = nthPoint() # An object of appropriate class is created elif LineSimplification.listbox_index == None: # no method has been selected yet print('Please select a method') else: # more objects can be added if there are more methods available pass def repeatProcess(data_dictionary, parameter): """ Calls thinPoints method (chooses the appropriate class based on the object - Poymorphism) to return a revised dictionary. :param data_dictionary: gets a dictionary of points :param parameter: gets the parameter required to reduce the data either the distance or the number of points to skip :return: calls thinPoints method to remove the appropriate points and return a smaller dictionary """ revised_dic = the_object.thinPoints(data_dictionary, parameter) # using thinPoints method returns new dictionary new_window = self.displayMap(revised_dic) # displays a new map based on the new dictionary by calling displayMap method return revised_dic the_entry = LineSimplification.entry_box.get() # gets the entry by the user if LineSimplification.listbox_index != None: try: users_entry = float(the_entry) LineSimplification.current_data_dic = repeatProcess(LineSimplification.current_data_dic, users_entry) # the class variable dic changes to the new dictionary except ValueError: # asks users to enter valid numbers instead of letters, characters, leaving it empty or entering invalid numebr print('Please enter a valid number')
def forward(self, x, labels): batch_size = x.size(0) distanceFun = Distance(x, self.centers) dist = getattr(distanceFun, self.distance)(scaled=self.scaled) # [n, class_num] labels = labels.unsqueeze(1).expand(batch_size, self.num_classes) mask = labels.eq(self.classes.expand(batch_size, self.num_classes)) dist_within = (dist * mask.float()).sum(dim=1, keepdim=True) # dist_between = (dist * (1 - mask.float())) dist_between = F.relu( dist_within - dist, inplace=True) #ensure within_distance greater others dist_between = dist_between.sum(dim=1, keepdim=False) dist_between = dist_between / (self.num_classes - 1.0) loss_within = (dist_within.sum()) / batch_size loss_between = self.beta * (dist_between.sum()) / batch_size loss = loss_within + loss_between return loss, loss_within, loss_between
from Temperature import Temperature from Distance import Distance from Memory import Memory from Weight import Weight if __name__ == '__main__': while (True): choice = input ('Available conversions ... \n' + '1. Temperature\n' + '2. Distance\n' + '3. Memory\n' + '4. Weight\n' + '5. Exit\n' + 'Please enter your choice ... ' ) if choice == '1' : t = Temperature() t.convert() elif choice == '2' : d = Distance() d.convert() elif choice == '3': m = Memory() m.convert() elif choice == '4' : w = Weight() w.convert() else: print('Exiting now ... Bye!') break
map.get_flight_data() df=pd.DataFrame() df = map.getDataFrame() alert_df=map.get_alert_list() analyzed_df=map.get_analyzed_list() curr_df=map.get_current() curr_df=curr_df[curr_df.columns.difference(['arrow'])] alert_df=alert_df[alert_df.columns.difference(['arrow'])] analyzed_df=analyzed_df[analyzed_df.columns.difference(['arrow'])] nTotal = map.getTotalRecs() nFiltered = map.getFilteredRecs() di=Distance() lat=curr_df.loc[0]["Lat"] long=curr_df.loc[0]["Long"] spd=curr_df.loc[0]["Spd"] angle=curr_df.loc[0]["Angle"] alt=curr_df.loc[0]["Alt"] for i in range(0,len(alert_df)): alat = alert_df.loc[i]["Lat"] along = alert_df.loc[i]["Long"] aSpeed= alert_df.loc[i]["Spd"] aAngle=alert_df.loc[i]["Angle"] aAlt=alert_df.loc[i]["Alt"] d=di.getDistFeet(lat,long,alat,along) counter=0 prevMainPlane=[lat,long] prevAlertPlane=[alat,along]