def ontology_alignment(model, ontoTerms_a, ontoTerms_b, words, ceil = 0.5): with open(ontoTerms_a) as f: ontoText_a = f.readlines() with open(ontoTerms_b) as f: ontoText_b = f.readlines() # Remove whitespace characters like `\n` at the end of each line. ontoText_a = [x.strip() for x in ontoText_a] ontoText_b = [x.strip() for x in ontoText_b] whole = [] for text_a in ontoText_a: for text_b in ontoText_b: txt_a = re.sub(' +',' ',text_a) txt_b = re.sub(' +',' ',text_b) if txt_a == txt_b: whole.append([text_a, text_b, 0.0]) try: ontoText_a.remove(text_a) except ValueError: pass #print(text_a) try: ontoText_b.remove(text_b) except ValueError: pass #print(text_b) # Transform to Word & Mask vectors to apply "feedforward_function" ontoData_a, ontoData_b = [], [] for sentence in ontoText_a: ontoData_a.append(getSeq(sentence, words)) for sentence in ontoText_b: ontoData_b.append(getSeq(sentence, words)) x1,m1 = utils.prepare_data(ontoData_a) x2,m2 = utils.prepare_data(ontoData_b) OntoEmbg_a = model.feedforward_function(x1,m1) OntoEmbg_b = model.feedforward_function(x2,m2) # Compute the Cosine Distances: dist = cosine_distances(OntoEmbg_a,OntoEmbg_b) disT = np.transpose(dist) males = preferances(dist) females = preferances(disT) del(disT) match = Matcher(males, females) marriage = match() del(males); del(females) for key, value in marriage.items(): man = ontoText_a[value] woman = ontoText_b[key] value = dist[value][key] if value < ceil: whole.append([man, woman, value]) return whole
def do_match(): print("Mentors:") print(MR) print("Mentee:") print(ME) # initialize Matcher with preference lists for both men and women match = Matcher(MR, ME) return match()
def get(self, user_id, max_return): try: oe = os.environ conn = psycopg2.connect(database=oe['DB_NAME'], user=oe['DB_USER'], password=oe['DB_PASSWORD'], host=oe['DB_HOST']) except Exception as e: return str(e) mc = Matcher(conn) return json.dumps(mc.query(user_id, max_return))
def __init__(self, cfg): self.cfg_ = self.build_cfg(cfg) self.detector_ = cv2.FastFeatureDetector_create(threshold=19, nonmaxSuppression=True) self.extractor_ = cv2.ORB_create(2048, edgeThreshold=19) #self.extractor_ = cv2.xfeatures2d.SURF_create() self.matcher_ = Matcher(ex=self.extractor_) self.tracker_ = Tracker(pLK=cfg['pLK']) self.kf_ = build_ekf() self.db_ = self.build_db() self.state_ = PipelineState.INIT # higher-level handles? self.initializer_ = MapInitializer(db=self.build_db(), matcher=self.matcher_, tracker=self.tracker_, cfg=self.cfg_)
def __init__(self, gpu_id=0, track_model=None, pose_model=None, embedding_model=None): if self.tracker_flag: self.tracker = SiamFCTracker(gpu_id, track_model) self.posenet = PoseNet(gpu_id, pose_model) self.matcher = Matcher() print('----------------------------------------') print('Flag parameters are set as follow:') print('Tracker flag: {}'.format(self.tracker_flag)) print('Tracker update flag: {}'.format(self.tracker_update_flag)) print('Decrease tracker flag: {}'.format(self.descrease_tracker_flag)) print('New embedding(with pose) flag: {}'.format( self.new_embedding_flag)) print('----------------------------------------')
def __init__( self, gpu_id=[0, 0, 0, 0], flag=[True, False, True, False], #track_model='/export/home/zby/SiamFC/models/output/siamfc_35.pth', track_model='/export/home/zby/SiamFC/models/output/siamfc_20.pth', detection_model='/export/home/zby/SiamFC/models/res101_old/pascal_voc/faster_rcnn_1_25_4379.pth', pose_model='/export/home/zby/SiamFC/data/models/final_new.pth.tar', embedding_model='/export/home/zby/SiamFC/models/embedding_model.pth' ): if flag[0]: self.tracker = SiamFCTracker(gpu_id[0], track_model) #input RGB if flag[1]: self.detector = Detector(gpu_id[1], detection_model) #input BGR if flag[2]: self.posenet = PoseNet(gpu_id[2], pose_model) #input BGR if flag[3]: self.embedder = EmbeddingNet(gpu_id[3], embedding_model) #self.tracker = SiamFCTracker(gpu_id[0], track_model) self.matcher = Matcher() print('----------------------------------------')
def __init__( self, gpu_id=0, track_model='/export/home/zby/SiamFC/models/output/siamfc_35_old.pth', #track_model = '/export/home/zby/SiamFC/data/models/siamfc_cpm_368_5.pth', pose_model='/export/home/zby/SiamFC/data/models/final_new.pth.tar', embedding_model='/export/home/zby/SiamFC/models/embedding_model.pth' ): if self.tracker_flag: self.tracker = SiamFCTracker(gpu_id, track_model) self.posenet = PoseNet(gpu_id, pose_model) if not self.new_embedding_flag: self.embedder = EmbeddingNet(gpu_id, embedding_model) self.matcher = Matcher() print('----------------------------------------') print('Flag parameters are set as follow:') print('Tracker flag: {}'.format(self.tracker_flag)) print('Tracker update flag: {}'.format(self.tracker_update_flag)) print('Decrease tracker flag: {}'.format(self.descrease_tracker_flag)) print('New embedding(with pose) flag: {}'.format( self.new_embedding_flag)) print('----------------------------------------')
from match import Matcher # the men and their list of ordered spousal preferences M = dict( (m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('men.txt'))) # the women and their list of ordered spousal preferences W = dict( (m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('women.txt'))) # initialize Matcher with preference lists for both men and women match = Matcher(M, W) # check if the mapping of wives to husbands is stable def is_stable(wives, verbose=False): for w, m in wives.items(): i = M[m].index(w) preferred = M[m][:i] for p in preferred: h = wives[p] if W[p].index(m) < W[p].index(h): msg = "{}'s marriage to {} is unstable: " + \ "{} prefers {} over {} and {} prefers " + \ "{} over her current husband {}" if verbose: print msg.format(m, w, m, p, w, p, m, h) return False return True
import cv2 import imutils import numpy as np import rospy from geometry_msgs.msg import Point from std_msgs.msg import String from background_subtraction import BackgroundSubtractor from match import Matcher MIN_AREA = 10000 MAX_DIST = 10 k_dilate = cv2.getStructuringElement(cv2.MORPH_DILATE, (5, 5), (2, 2)) matcher = Matcher() def within(low, x, high): return low < x and x < high def x2t(x, dim, fov): theta = fov / 2 # angle difference a = float(x) / dim # assuming 640 is frame width return np.arctan2(2 * a * np.sin(theta) - np.sin(theta), np.cos(theta)) def cnt_avg_col(cnt, img): mask = np.zeros(img.shape[:-1], np.uint8)
def main(): m = Matcher() m.fit(TRAIN_FILE0, TRAIN_FILE1, TRAIN_MATCH_FILE) m.transform(TEST_FILE0, TEST_FILE1) m.write(OUTPUT_FILE)
MR = dict( (m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('mentors.txt'))) # the women and their list of ordered spousal preferences ME = dict( (m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('mentees.txt'))) previous_mentee_size = None final_mentee_mentors = dict() i = 0 while len(ME) > 0: i += 1 previous_mentee_size = len(ME) match = Matcher(MR, ME) matches = match() final_mentee_mentors.update(matches) for mentee, mentor in matches.items(): # This mentee already found a mentor for themselves. Delete from matching. ME.pop(mentee, True) # Delete this mentee from all mentor selections, so that mentors can fall back to their other # selections. # todo what to do with mentees that were not selected by any mentor? new_MR = MR.copy() for mrk, mrv in MR.items(): if mentee in mrv: new_MR.get(mrk).remove(mentee)
def setup(v_, j_, flipped): # the volunteers and their list of ordered job preferences # v_ = dict((m, prefs.split(', ')) for [m, prefs] in ( # line.rstrip().split(': ') for line in open('volunteers.short.txt'))) # j_ = dict((m, prefs.split(', ')) for [m, prefs] in ( # line.rstrip().split(': ') for line in open('jobs.txt'))) volunteers = list(v_.keys()) jobs = list(j_.keys()) # print('type v_["abe"]', type(v_['abe']), volunteers) # remove any missing job names from volunteers for v in volunteers: NA = v_[v][-1] v_[v] = list(filter(lambda j: j in jobs, v_[v][:-1])) v_[v].append(NA) # remove any missing volunteer names from jobs for j in jobs: NA = j_[j][-1] j_[j] = list(filter(lambda v: v in volunteers, j_[j][:-1])) j_[j].append(NA) J = {} prefs = v_[list(v_.keys())[0]] print('type(prefs)', type(prefs)) # prefs = prefs_.split(', ') # NA = prefs[-1] for p in jobs: w__ = j_[p] J[Person(p, int(w__[-1]))] = w__[:-1] print(f'initial J keys {J.keys()}') V = {} prefs = j_[list(j_.keys())[0]] # prefs = prefs_.split(', ') for p in volunteers: m__ = v_.get(p, ['0']) person = Person(p, int(m__[-1])) V[person] = [] for n in m__[:-1]: # print('n', n) job = list(filter(lambda j: j.n == n, J.keys()))[0] V[person].append(job) for j, prefs in J.items(): J[j] = [] for n in prefs: volunteer = list(filter(lambda m: m.n == n, V.keys()))[0] J[j].append(volunteer) # for each volunteer construct a list of forbidden jobs forbidden = {} # { 'dan': ['gay', 'eve', 'abi'], 'hal': ['eve'] } for v, prefs in V.items(): NA = v.NA # all jobs at or over the NA index are forbidden forbidden[v] = prefs[NA:] # n = random.randint(0, len(prefs) - 1) # forbidden[m] = random.sample(prefs, n) # random sample of n wives forbidden_v = {} # { 'dan': ['gay', 'eve', 'abi'], 'hal': ['eve'] } for j, prefs in J.items(): NA = j.NA # all volunteers at or over the NA index are forbidden forbidden_v[j] = prefs[NA:] C = defaultdict(list) jKeys = set() loop = 0 while len(J) > 0: print("V & J") print(V) print(J) match = Matcher(V, J, forbidden) # , forbidden_v) # match volunteers and jobs; returns a mapping of jobs to volunteers matches = match() assert match.is_stable(matches) # should be a stable matching print('stable match') print(f'loop {loop} list(matches keys) {list(matches.keys())}') loop += 1 # if loop > 2: # break # if len(C) == 0: # C = dict((value, [key]) for key, value in enumerate(matches)) # print('Initial C.keys()', C.keys()) # else: for _, key in enumerate(matches.items()): C[key[1]].append(key[0]) print('Initial C.keys()', C.keys()) print('Initial C.values()', C.values()) jKeys |= set(matches.keys()) print(f"len jKeys {len(jKeys)} len(J) {len(J)} jKeys {jKeys} ") J_ = copy.copy(J) J = {} for key, value in enumerate(J_.items()): print( f'J.items() key {key} value[0] {value[0]} type(value[0]) {type(value[0])} value[1] {value[1]}' ) if value[0] in jKeys: print(f'value {value[0]} in jKeys)') else: print(f'value {value[0]} NOT in jKeys)') J[value[0]] = value[1] print(f'len filtered J {len(J)} J {J}') if len(J) == 0: break V_ = copy.copy(V) for v, prefs in V_.items(): # print(f'k,v in V k {k} v {v}') prefs = [p for p in prefs if p in list(J.keys())] print(f'new prefs {prefs}') V[v] = prefs # V = {k: v for k, v in mydict.items() if k.startswith('foo')} # J = dict((key, value) in enumerate(J.items())) # J = dict((key, value) in enumerate(J.items()) if key not in jKeys) # J = dict(filter(lamba j, v: j not in jKeys, enumerate(J)) # if len(J) > 0: # print("len(J) > 0") # V_ = sorted(C.items(), key=lambda kv: len(kv[1]))[:len(J)] # match = Matcher(V_, J, forbidden, forbidden_v) # # match volunteers and jobs; returns a mapping of jobs to volunteers # matches = match() # for key, value in enumerate(matches): # C[value].append(key) # print('jobs', jobs) print('C.keys()', C.keys()) print([(key, value) for key, value in enumerate(C)]) if flipped: a = [([j.n for j in value[1]], value[0].n) for key, value in enumerate(C.items())] else: a = [(value[0].n, [j.n for j in value[1]]) for key, value in enumerate(C.items())] # a = [(key.n, [j.n for j in C[key]]) for key in list(C.keys())] # a=[(matches[key].n, key.n) for key in list(matches.keys())] return jsonify(a)
def get(self, user_id, max_return, needs): mc = Matcher() # return json.dumps(mc.query(user_id, max_return, needs)) return json.dumps(mc.query(user_id, max_return, 'all'))
from match import Matcher # the men and their list of ordered spousal preferences M = dict((m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('men.txt'))) # the women and their list of ordered spousal preferences W = dict((m, prefs.split(', ')) for [m, prefs] in (line.rstrip().split(': ') for line in open('women.txt'))) # for each man construct a random list of forbidden wives forbidden = {} # { 'dan': ['gay', 'eve', 'abi'], 'hal': ['eve'] } for m, prefs in M.items(): n = random.randint(0, len(prefs) - 1) forbidden[m] = random.sample(prefs, n) # random sample of n wives match = Matcher(M, W, forbidden) # match men and women; returns a mapping of wives to husbands wives = match() assert match.is_stable(wives) # should be a stable matching # swap the husbands of two wives, which should make the matching unstable a, b = random.sample(wives.keys(), 2) wives[b], wives[a] = wives[a], wives[b] match.is_stable(wives, verbose=True)