def format_and_merge_data(data_dict, objects): global exported_objects unexported_objects = [ x for x in objects if x["uid"] not in exported_objects ] exported_objects.extend([x["uid"] for x in unexported_objects]) formatted_data = format_objects(unexported_objects) merge_data(data_dict, formatted_data)
def get_group_objects(data_dict, api_type, group, client, unexportable_objects): group_object_reply = client.api_call("show-" + api_type, {"uid": group["uid"], "details-level": "full"}) if not group_object_reply.success: debug_log("Failed to retrieve group named '" + group["name"] + "'! Error: " + str(group_object_reply.error_message) + ". Group was not exported!", True, True) return [] group_object = group_object_reply.data if api_type == "group-with-exclusion": include_group_object = None exclude_group_object = None if "include" in group_object: if group_object["include"]["type"] != "CpmiAnyObject": include_group_object = get_group_objects(data_dict, group_object["include"]["type"], group_object["include"], client, unexportable_objects) group_object["include"] = group_object["include"]["name"] if "except" in group_object: if group_object["except"]["type"] != "CpmiAnyObject": exclude_group_object = get_group_objects(data_dict, group_object["except"]["type"], group_object["except"], client, unexportable_objects) group_object["except"] = group_object["except"]["name"] return_list = [group_object] if include_group_object: return_list.extend(include_group_object) if exclude_group_object: return_list.extend(exclude_group_object) return return_list member_objects = [] for container in group_objects_field[api_type]: member_objects.extend(group_object[container]) object_dictionary, group_unexportable_objects, exportable_types = \ get_objects(member_objects, client.api_version) for member_object in member_objects: if should_export(member_object): check_for_export_error(member_object, client) merge_data(unexportable_objects, group_unexportable_objects) for unexportable_object in unexportable_objects: for container in group_objects_field[api_type]: for member in group_object[container]: if unexportable_object["uid"] == member["uid"]: member["name"] = unexportable_object["name"] break for api_type in exportable_types: debug_log("Exporting " + singular_to_plural_dictionary[client.api_version][api_type] + " from group [" + group["name"] + "]", True) export_general_objects(data_dict, api_type, object_dictionary[api_type], unexportable_objects, client) return [group_object]
def face_search(self, face_clusters): known_people = [] new_people = [] for p in face_clusters: # best_match = None feats = [] ranks = [] for f in p['person']: feats.append(f['feat']) feats = np.asarray(feats, dtype=np.float32) ret = self.search(feats) for result in ret: idx, dist, metadata = result print(idx, dist, metadata) for i, d in enumerate(dist): if d < self.threshold: new_idx = 1 for r in ranks: if r['idx'] == idx[i]: r['dist'] = (r['dist']*r['count']+d)/(r['count']+1) r['count'] += 1 new_idx = 0 if new_idx: ranks.append( { 'idx': idx[i], 'dist': d, 'meta': metadata[i].decode()[:-1], # discard the '\n' at the end 'count': 1 } ) if not len(ranks): # new people new_people.append(p) else: # known people ranks = sorted( ranks, key=lambda i: (i['count'], i['dist']), reverse=True ) print(ranks) l_id = self.find_people(ranks[0]['meta'], known_people) if l_id < 0: # different people p.update({'id': ranks[0]['meta']}) known_people.append(p) else: # same person known_people[l_id] = merge_data(known_people[l_id], p, ['time']) return known_people, new_people
def submit(): if 'username' not in session.keys(): return 'illegal request' data = json.loads(request.get_data()) _data = [] for i in data: _data += i['points'] _data = merge_data(_data) action = session['action'] session.pop('action') if action == 'login': return login(_data) elif action == 'register': return register(_data) elif action == 'update': return update(_data) else: return 'unexpected action'
def unique_people_search(uuf, ruf, fdb, threshold): """ TODO input: upload unique faces, refined unique faces, face database, cosine distance threshold output: unique identification with correct data """ known_people = [] unidentified_people = [] for p in uuf: min_dst = 1000 best_match = None for f in p['person']: best_face_match, min_face_dst = bruteforce(f['feat'], fdb, threshold) if min_face_dst < min_dst: min_dst = min_face_dst best_match = best_face_match if best_match is not None: p.update({'id': best_match}) print(min_dst) known_people.append(p) for p in ruf: min_dst = 1000 best_match = None for f in p['person']: best_face_match, min_face_dst = bruteforce(f['feat'], fdb, threshold) if min_face_dst < min_dst: min_dst = min_face_dst best_match = best_face_match if best_match is not None: # known people l_id = find_people(best_match, known_people) if l_id < 0: # different people p.update({'id': best_match}) known_people.append(p) else: # same person known_people[l_id] = merge_data(known_people[l_id], p, ['time']) else: # new people unidentified_people.append(p) return known_people, unidentified_people
""" c0_m7_prelim_weights, c1_m7_prelim_weights = run_prelim_m7(m7_joint_data, c0_data_All, c1_data_All) # Loop over target children for i in range(len(c0_IDs_1Out)): c0_data = load_data(c0_IDs_1Out[i], 0, data_proportion=[0.8,0.8,1,0.8]) c1_data = load_data(c1_IDs_1Out[i], 1, data_proportion=[0.8,0.8,1,0.8]) c0_data_targetRep = load_data(c0_IDs_targetRep[i], 0, data_proportion=[0.2,0,0.2,0.8]) c1_data_targetRep = load_data(c1_IDs_targetRep[i], 1, data_proportion=[0.2,0,0.2,0.8]) c0_data_targetOnly = load_data(c0_IDs_targetOnly[i], 0, data_proportion=[0.2,0,0.2,0.8]) c1_data_targetOnly = load_data(c1_IDs_targetOnly[i], 1, data_proportion=[0.2,0,0.2,0.8]) c0_data_merged = merge_data(c0_data, c1_data) c1_data_merged = merge_data(c1_data, c0_data) # 10-fold k-validation for loop in range(10): print('---------- CHILD {} ----------'.format(i+1)) print('---------- FOLD {} ----------'.format(loop+1)) c0_m3_weights = None c1_m3_weights = None """ Model 1 - Within Culture / SI: Train and test on each culture """