def RV2OE(position,velocity,Mu): pos = np.array(position) vel = np.array(velocity) v = np.linalg.norm(vel) r = np.linalg.norm(pos) dotV = mit.dotproduct(pos,vel) RadialV = dotV/r pos2 = np.reshape(pos, (1,3)) vel2 = np.reshape(vel, (1,3)) h = np.cross(pos2,vel2) H = np.linalg.norm(h) i = np.rad2deg(np.arccos(h[0,2]/H)) K = np.array([0, 0, 1]) n = np.cross(K,h) n3 = np.reshape(n, (3,1)) N = np.linalg.norm(n) if n[0,2] >= 0: Omega = np.rad2deg(np.arccos(n[0,0]/N)) elif n[0,2] < 0: Omega = 360.0 - np.rad2deg(np.arccos(n[0,0]/N)) e = (1/Mu)*((((v**2)-(Mu/r))*pos)-((r*RadialV)*vel)) ec = np.reshape(e, (1,3)) E = np.linalg.norm(ec) dotA = mit.dotproduct(n3,e) NE = N*E if ec[0,2] >= 0: w = np.rad2deg(np.arccos(dotA/NE)) elif ec[0,2] < 0: w = 360 - np.rad2deg(np.arccos(dotA/NE)) dotT = mit.dotproduct(e,pos) Er = E*r if RadialV >= 0: Theta = np.rad2deg(np.arccos(dotT/Er)) elif RadialV < 0: Theta = 360 - np.rad2deg(np.arccos(dotT/Er)) Rp = ((H**2)/Mu)*(1/(1+E)) Ra = ((H**2)/Mu)*(1/(1-E)) a = 0.5 * (Rp + Ra) return(E,a,i,Omega,w,Theta)
def cosine_sim(q_pp,tf_idf_wrt_doc,tf_idf_q,k,tw,dict_titles): ##tw = title weightage result={} # stores dot product value of query vector with various doc vectors for i in tf_idf_wrt_doc: dot = mit.dotproduct(tf_idf_q,tf_idf_wrt_doc[i]) result[i] = dot if(tw=="n" or tw=="N"): result_sort = sorted(result.items(), key=operator.itemgetter(1),reverse=True) else: for w in word_tokenize(q_pp): for j in dict_titles: if w in word_tokenize(preprocess(dict_titles[j])): result[j] = 1.2*result[j] ## increasing weightage of doc by 20% if query term is present in title.. result_sort = sorted(result.items(), key=operator.itemgetter(1),reverse=True) print("\n\t\t\t\tTOP-{} DOCUMENTS RETREIVED BASED ON COSINE SIMILARITY ARE::\n".format(k)) print("="*124) for r in range(k): print(r+1,".",result_sort[r][0])
def test_happy_path(self): """simple dotproduct example""" self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
def test_happy_path(self): """simple dotproduct example""" self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
for line in data: line = [line[0] - mean[0], line[1] - mean[1]] # Training that network technically #randomish weights weights = [1, 1] deltaW = [0, 0] # learning rate c = 0.1 # apply math to data, single iteration for line in data: # get dot product y = mit.dotproduct(line, weights) # make K K = y * y # get deltaW deltaW[0] = c * ((line[0] * y) - (K * weights[0])) deltaW[1] = c * ((line[1] * y) - (K * weights[1])) # update weights weights[0] = weights[0] + deltaW[0] weights[1] = weights[1] + deltaW[1] together = [] # dot product with the input data and new weights