def get_workers(): if request: app.logger.info('request headers - {0}'.format(request.headers)) category = request.args.get('category') filtered_list = [] final_list = [] # query = db.session.query(TweetInfo).filter(TweetInfo.published == TWEET_PUBLISHED).order_by(db.desc(TweetInfo.id)) query = db.session.query(Worker).all() if query: app.logger.info('query value true') for item in query: if item.category == category: filtered_list.append(item) latitude = request.args.get('latitude') unicodedata.normalize('NFKD', latitude).encode('ascii', 'ignore') longitude = request.args.get('longitude') unicodedata.normalize('NFKD', longitude).encode('ascii', 'ignore') distance = request.args.get('distance') if not distance: distance = 2.0 for item in filtered_list: distance_point = calc_dist(float(item.latitude), float(item.longitude), float(latitude), float(longitude)) app.logger.info('dist - {0}'.format(distance_point)) distance_point = 1.5 if distance_point < distance: awesome_dict = create_dictionary(item, distance_point) final_list.append(awesome_dict) return jsonify({'workers': final_list})
def test3(params): name, ncity, D, coord = read(params.f) params.ncity = ncity model, _, _ = construct("PtrNet", params, is_train=False) _input = torch.Tensor([np.asarray([x for x in coord.values()])]) ts = time() _, tour = model(_input) tour = list(tour.detach().numpy()[0]) total_dist = calc_dist(tour, D) print("\nPtrNet", total_dist, "\ntime", time() - ts) print(*tour) model, _, _ = construct("NeuralCombOptRL", params, is_train=False) _input = torch.Tensor([np.asarray([x for x in coord.values()])]) ts = time() _, _, _, tour = model(_input) tour = list(tour.detach().numpy()[0]) total_dist = calc_dist(tour, D) print("\nNeuralCombOptRL", total_dist, "\ntime", time() - ts) print(*tour)
def test1(filename): name, ncity, D, coord = read(filename) two_opt = TwoOpt(ncity, D) two_opt_multi = TwoOpt(ncity, D) simanneal = TSPSimAnneal(ncity, D) ga = TSPGA(ncity, D) IP = PulpIP(ncity, D, MTZ_level=2) ts = time() tour = two_opt.solve_two_opt(strategy="greedy_random") total_dist = calc_dist(tour, D) assert isclose(total_dist, two_opt.best_obj, abs_tol=1e-5) print("\nnormal", total_dist, "\ntime", time() - ts) ts = time() tour = two_opt_multi.solve_multi_start_two_opt(10, strategy="greedy_random") total_dist = calc_dist(tour, D) assert isclose(total_dist, two_opt_multi.best_obj, abs_tol=1e-5) print("\nmulti start", total_dist, "\ntime", time() - ts) ts = time() tour = simanneal.solve_simulated_annealing(T=8215972750, C=0.81, strategy="greedy_random") total_dist = calc_dist(tour, D) assert isclose(total_dist, simanneal.best_obj, abs_tol=1e-5) print("\nsimulated annealing", total_dist, "\ntime", time() - ts) ts = time() tour = ga.solve(cxpb=0.3276646451925047, mutpb=0.6116923679473824) total_dist = calc_dist(tour, D) print("\nga simple", total_dist, "\ntime", time() - ts) ts = time() tour = IP.solve(solver_name="cplex") total_dist = calc_dist(tour, D) print("\nip", total_dist, "\ntime", time() - ts) print(*tour)
def find_neighbours(self, users): neighbours = [] maximal_geo_dist = (180**2 + 360**2)**(1 / 2) for index, user in users.iterrows(): neighbour_dist = maximal_geo_dist neighbour_name = 'No neighbours :( ' for n_index, n_user in users.iterrows(): dist = calc_dist(user, n_user) if (dist < neighbour_dist) and (n_user['id'] != user['id']): neighbour_dist = dist neighbour_name = n_user['name'] neighbours.append((user['name'], neighbour_name)) return neighbours
def Query4ByLLR(lat, lon, rad): (y, x) = ut.mapping(lat, lon) cur.execute("set @poly='Polygon((%f %f,%f %f,%f %f,%f %f,%f %f))'" % (x - rad, y + rad, x + rad, y + rad, x + rad, y - rad, x - rad, y - rad, x - rad, y + rad)) cur.execute( 'select nodeID,ST_AsText(position),name,poitype from POIs where MBRContains(ST_GeomFromText(@poly),planaxy)' ) queryResult = cur.fetchall() ans = [] for row in queryResult: coordinate = row[1].strip().split(' ') lons = float(coordinate[0][6:]) lats = float(coordinate[1][:-1]) d = ut.calc_dist(lat, lon, lats, lons) if d <= rad: ans.append((row[0], (lons, lats), row[2], row[3], d)) return (sorted(ans, key=operator.itemgetter(4)))
def checkImages(): try: databaseName = request.args.get('databaseName') imageId = request.args.get('imageId') #selecting important information from the user request #Accuring the database images based on the request #img1 #img2 or vector2 face1 = get_face_image(img1) face2 = get_face_image(img2) #can be skipped by the inclusion of the vectors time saved - 1s vector1 = get_embeddings(face1) vector2 = get_embeddings(face2) #getting the distance (distance measure can be changed) distOfImages = calc_dist(vector1, vector2) #getting the match score of the image score = get_match_score(distOfImages) #passing this as a api response to the client return jsonify({"Score": score}) except: return jsonify({'trace': traceback.format_exc()})
def Query5ByLL(lat, lon): (y, x) = ut.mapping(lat, lon) rad = 10 queryResult = [] flag = 1 ans = [] while True: cur.execute("set @poly='Polygon((%f %f,%f %f,%f %f,%f %f,%f %f))'" % (x - rad, y + rad, x + rad, y + rad, x + rad, y - rad, x - rad, y - rad, x - rad, y + rad)) cur.execute( 'select nodeID,ST_AsText(position) from nonPOIs where MBRContains(ST_GeomFromText(@poly),planaxy)' ) queryResult = cur.fetchall() ans = [] for row in queryResult: coordinate = row[1].strip().split(' ') lons = float(coordinate[0][6:]) lats = float(coordinate[1][:-1]) d = ut.calc_dist(lat, lon, lats, lons) if d <= rad: ans.append((row[0], (lons, lats), d)) ls = (sorted(ans, key=operator.itemgetter(2))) for each in ls: cur.execute( "select ways.wayid, ways.name, ways.isRoad, ways.otherinfo from waynode, ways where waynode.nodeid=%s and waynode.wayid=ways.wayid and ways.isroad <> '0'" % (each[0])) queryRes = cur.fetchall() if len(queryRes) > 0: ans = queryRes flag = 0 break if flag == 0: break else: rad = rad * 2.7 return ans
#for each pixel in mask: ##if mask pixel is not zero: ###closestDist=maxval ###closest=-1 (indexed) ###for each mineral vector: ####compute distance from current pixel's color values to element vector ####if distance is less than closestDist, set closest and closestDist ###set ouput pixel to closestIndex ###set confidence output pixel to closestDist for vector in calibratedVectors: bufImage = np.zeros((tHeight, tWidth), dtype = np.int32) vector["buf"] = bufImage vector["dbuf"] = bufImage for element in calibration: utils.calc_dist(targetMask, vector["dbuf"], element_scans[element], vector[element]) for vector in calibratedVectors: utils.compare_dist(targetMask, outputImage, mineral_dists, vector["index"], vector["dbuf"]) mapImage = Image.new("P", (tWidth, tHeight), 0) mapImage.putpalette(constants.palette) mineralPixelCounts = {} for mineral in mineralNames: mineralPixelCounts[mineral] = 0 d = ImageDraw.ImageDraw(mapImage) for x in range(0, tWidth): for y in range(0, tHeight): color = outputImage[y,x]
def test_calc_dist(self): test_users_neighbours = pd.read_json( 'test_data/test_users_neighbours.json') self.assertEqual( utils.calc_dist(test_users_neighbours.iloc[0], test_users_neighbours.iloc[1]), 5)
def proc_pcqm4m_sp(smiles_list): w_bond = 100 wo_bond = 100 smiles2pos = {} fail_count = 0 fail_smiles = [] for i, smiles in enumerate(smiles_list): if i % 1000 == 0: logging.info("Processing idx: {}, smiles: {}, fail count so far: {} ...".format(i, smiles, fail_count)) # ob_mol: OpenBabel's molecule object ob_mol = ob.OBMol() conversion.ReadString(ob_mol, smiles) ob_calc = OpenBabelCalculator(ob_mol, forcefield=args.forcefield, removehs=args.removehs) curr_min_dist_w_bond = ob_calc.get_min_dist(with_bond=True) curr_min_dist_wo_bond = ob_calc.get_min_dist(with_bond=False) py_mol = ob_calc.get_pymol() pos = ob_calc.get_pos() pdb.set_trace() if curr_min_dist_w_bond is None or curr_min_dist_wo_bond is None: fail_count += 1 fail_smiles.append(smiles) smiles2pos[smiles] = pos if i % 1000 == 0: logging.info("minimum interatomic distance with bonds so far: {:.4f} ...".format(w_bond)) logging.info("minimum interatomic distance without bonds so far: {:.4f} ...".format(wo_bond)) continue if curr_min_dist_wo_bond < args.threshold: logging.info("Invalid: Minimum interatomic distance without bonds: {:.4f}, Smiles: {} ...".format(curr_min_dist_wo_bond, smiles)) retry = 0 curr_min_dist_wo_bond_ = curr_min_dist_wo_bond while curr_min_dist_wo_bond_ < args.threshold and retry <= args.retry_times: py_mol_, pos_, curr_min_dist_w_bond_, curr_min_dist_wo_bond_ = optimize(smiles) retry += 1 py_mol = py_mol_ pos = pos_ curr_min_dist_w_bond = curr_min_dist_w_bond_ curr_min_dist_wo_bond = curr_min_dist_wo_bond_ logging.info(" Valid: Minimum interatomic distance with bonds: {:.4f}, Smiles: {} ...".format(curr_min_dist_wo_bond, smiles)) min_dist = calc_dist(py_mol, pos, with_bond=False) try: assert min_dist == curr_min_dist_wo_bond except: logging.info("Fail smiles: {} ...".format(smiles)) fail_count += 1 fail_smiles.append(smiles) smiles2pos[smiles] = pos if curr_min_dist_w_bond < w_bond: w_bond = curr_min_dist_w_bond if curr_min_dist_wo_bond < wo_bond: wo_bond = curr_min_dist_wo_bond if i % 1000 == 0: logging.info("minimum interatomic distance with bonds so far: {:.4f} ...".format(w_bond)) logging.info("minimum interatomic distance without bonds so far: {:.4f} ...".format(wo_bond)) if args.max_size >= 0 and i >= args.max_size: break return smiles2pos
def statis_qm9(filepath): supplier = Chem.SDMolSupplier(filepath, removeHs=False, sanitize=False) w_bond = 100 wo_bond = 100 smiles2pos = {} for i, mol in enumerate(supplier): if i < args.start_idx: continue try: smiles = Chem.MolToSmiles(mol) except: logging.info("Processing mol {}, parse smiles fail ...".format(i)) smiles = None if i % 1000 == 0: logging.info("Processing mol {}, smiles: {} ...".format(i, smiles)) # ob_mol: OpenBabel's molecule object ob_mol = ob.OBMol() conversion.ReadString(ob_mol, smiles) ob_calc = OpenBabelCalculator(ob_mol, forcefield=args.forcefield, removehs=args.removehs) py_mol = ob_calc.get_pymol() pos = ob_calc.get_pos() # sanitize if mol is None: logging.info("idx: {}, smiles: {} cannot be parsed ...".format(i, can_smiles)) continue N = mol.GetNumAtoms() try: assert N == pos.size(0) except: pdb.set_trace() tmp = 1 curr_min_dist_w_bond = ob_calc.get_min_dist(with_bond=True) curr_min_dist_wo_bond = ob_calc.get_min_dist(with_bond=False) if curr_min_dist_wo_bond < args.threshold: logging.info("Invalid: Minimum interatomic distance without bonds: {:.4f}, Smiles: {} ...".format(curr_min_dist_wo_bond, smiles)) retry = 0 curr_min_dist_wo_bond_ = curr_min_dist_wo_bond while curr_min_dist_wo_bond_ < args.threshold and retry <= args.retry_times: py_mol_, pos_, curr_min_dist_w_bond_, curr_min_dist_wo_bond_ = optimize(smiles) retry += 1 py_mol = py_mol_ pos = pos_ curr_min_dist_w_bond = curr_min_dist_w_bond_ curr_min_dist_wo_bond = curr_min_dist_wo_bond_ logging.info(" Valid: Minimum interatomic distance without bonds: {:.4f}, Smiles: {} ...".format(curr_min_dist_wo_bond, smiles)) min_dist = calc_dist(py_mol, pos, with_bond=False) try: assert min_dist == curr_min_dist_wo_bond except: logging.info("Fail smiles: {} ...".format(smiles)) smiles2pos[smiles] = pos if curr_min_dist_w_bond < w_bond: w_bond = curr_min_dist_w_bond if curr_min_dist_wo_bond < wo_bond: wo_bond = curr_min_dist_wo_bond if i % 1000 == 0: logging.info("minimum interatomic distance with bonds so far: {:.4f} ...".format(w_bond)) logging.info("minimum interatomic distance without bonds so far: {:.4f} ...".format(wo_bond)) if args.max_size >= 0 and i >= args.max_size: break out_filepath = "./qm9_pos.pt" torch.save(smiles2pos, out_filepath)
from numpy import mean import matplotlib.pyplot as plt # cluster points my_data = [] sy_data = [] for i in range(10): my_data.append(kmeans_cluster_assignment(3, whiten(hard_y))) sy_data.append(kmeans(whiten(hard_y), 3)) centroids_true = find_centroids(whiten(hard_y), y_true) diff = [] for i in range(10): for true in centroids_true: my = calc_dist(centroids_true, sorted(my_data[i][1], key=mean)) sy = calc_dist(centroids_true, sorted(sy_data[i][0], key=mean)) diff.append(sy - my) print(mean(diff)) fig = plt.figure(figsize=FIG_SIZE) plt.xlabel('X_axis') plt.ylabel('Y_axis') plt.grid(True) plt.title('Centroids - all points were whitened for consistency') whitened = whiten(hard_y) # Find 2 clusters in the data codebook, distortion = kmeans(whitened, 3) # Plot whitened data and cluster centers in red