def main(args): detector = MTCNN() embedder = ArcFace(args.model) identifier = Identifier(args.data_path) vc = cv2.VideoCapture(0) while vc.isOpened(): isSuccess, frame = vc.read() bbox, face = detector.align(frame) if face is not None: rgb = np.array(face)[..., ::-1] feature = embedder.get_feature(rgb) name, value = identifier.process(feature) print(name, value) x0, y0, x1, y1, _ = bbox cv2.rectangle(frame, tuple((int(x0), int(y0))), tuple((int(x1), int(y1))), (0, 255, 0), 2) cv2.putText(frame, name, (int(x0), int(y0)), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2) cv2.imshow("Frame", frame) print('===========') key = cv2.waitKey(1) & 0xFF if key == ord('q'): break vc.release() cv2.destroyAllWindows()
def test_papasevent(self): #create a dummy papasevent papasevent = PapasEvent(0) ecals = dict() tracks = dict() mixed = dict() for i in range(0, 2): uid = Identifier.make_id(Identifier.PFOBJECTTYPE.ECALCLUSTER, i, 't', 4.5) ecals[uid] = uid papasevent.history[uid] = Node(uid) uidt = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, i, 's', 4.5) tracks[uidt] = uidt papasevent.history[uidt] = Node(uidt) papasevent.history[uidt].add_child(papasevent.history[uid]) lastid = Identifier.make_id(Identifier.PFOBJECTTYPE.ECALCLUSTER, 3, 't', 3) ecals[lastid] = lastid papasevent.history[lastid] = Node(lastid) papasevent.add_collection(ecals) papasevent.add_collection(tracks) #create HistoryHelper hhelper = HistoryHelper(papasevent) #get all ids in event ids = hhelper.event_ids() self.assertTrue(len(ids) == 5) #check id_from_pretty self.assertTrue(hhelper.id_from_pretty('et3') == lastid) #check get_linked_ids linked = hhelper.get_linked_ids(lastid) #everything linked to lastid (which is just lastid) self.assertTrue(linked[0] == lastid and len(linked) == 1) self.assertTrue( hhelper.get_linked_ids( ids[0], direction="undirected")[1] == hhelper.id_from_pretty('ts0')) self.assertTrue( hhelper.get_linked_ids( ids[0], direction="parents")== hhelper.get_linked_ids( ids[0], direction="undirected")) self.assertTrue( hhelper.get_linked_ids(ids[0], direction="children") == [hhelper.id_from_pretty('et0')]) #filter_ids self.assertTrue( len(hhelper.filter_ids(ids, 'ts')) == 2) self.assertTrue( hhelper.filter_ids(ids, 'no') == []) #get_collection self.assertTrue( len( hhelper.get_collection(ids[1:2], 'no')) == 0) self.assertTrue( len( hhelper.get_collection([99], 'no')) == 0) self.assertTrue( len(hhelper.get_collection(ids[0:2], 'ts')) == 1) pass #get_history_subgroups subgroups = hhelper.get_history_subgroups() self.assertTrue(len(subgroups) == 3) #get_linked_collection self.assertTrue(hhelper.get_linked_collection( hhelper.id_from_pretty('et0'), 'ts').keys() == [hhelper.id_from_pretty('ts0')]) self.assertRaises(KeyError, hhelper.get_linked_collection, 0, 'ts') self.assertTrue(len(hhelper.get_linked_collection( hhelper.id_from_pretty('et0'), 'no')) == 0)
def requestAvatarId(self, credentials): key = Key.fromString(credentials.blob) fingerprint = key.fingerprint().replace(':', '') self.meta.fingerprint = fingerprint credentials.fingerprint = fingerprint iden = Identifier(credentials,"Key") d = iden.identify() d.addCallback(self.verify,credentials,key)
def test_identifier(self): Identifier.reset() uid = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 's', 1.23456) id1 = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 's', 12.782) self.assertTrue (Identifier.pretty(id1) == 'ts2') ids = [] for i in range(-2, 2): uid = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 's', 2**(i) ) ids.append(uid) ids = sorted(ids, reverse = True) self.assertTrue(Identifier.pretty(ids[0]) == 'ts6') self.assertTrue(Identifier.get_value(ids[0]) == 2.0) self.assertTrue(Identifier.pretty(ids[3]) == 'ts3') self.assertTrue(Identifier.get_value(ids[3]) == 0.25)
def generateCode(self, p): declaredIterator = VariableDeclaration(self.pidentifier, islocal=True) declaredIterator.register() iteratorIdentifier = Identifier(self.pidentifier) instructions.FOR_DOWNTO(p, self.fromValue, self.toValue, iteratorIdentifier, self.commands) declaredIterator.delete()
def set_work(self, metadata=None, metadata_client=None, policy=None): """If possible, identify a locally known Work that is the same title as the title identified by this CustomListEntry. :param policy: A PresentationCalculationPolicy, used to determine how far to go when looking for equivalent Identifiers. """ _db = Session.object_session(self) edition = self.edition if not self.edition: # This shouldn't happen, but no edition means no work self.work = None return self.work new_work = None if not metadata: from ..metadata_layer import Metadata metadata = Metadata.from_edition(edition) # Try to guess based on metadata, if we can get a high-quality # guess. potential_license_pools = metadata.guess_license_pools( _db, metadata_client) for lp, quality in sorted(potential_license_pools.items(), key=lambda x: -x[1]): if lp.deliverable and lp.work and quality >= 0.8: # This work has at least one deliverable LicensePool # associated with it, so it's likely to be real # data and not leftover junk. new_work = lp.work break if not new_work: # Try using the less reliable, more expensive method of # matching based on equivalent identifiers. equivalent_identifier_id_subquery = Identifier.recursively_equivalent_identifier_ids_query( self.edition.primary_identifier.id, policy=policy) pool_q = _db.query(LicensePool).filter( LicensePool.identifier_id.in_( equivalent_identifier_id_subquery)).order_by( LicensePool.licenses_available.desc(), LicensePool.patrons_in_hold_queue.asc()) pools = [x for x in pool_q if x.deliverable] for pool in pools: if pool.deliverable and pool.work: new_work = pool.work break old_work = self.work if old_work != new_work: if old_work: logging.info("Changing work for list entry %r to %r (was %r)", self.edition, new_work, old_work) else: logging.info("Setting work for list entry %r to %r", self.edition, new_work) self.work = new_work return self.work
def test_identifier(self): Identifier.reset() uid = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 0, 's', 1.23456) id1 = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 1, 's', 12.782) self.assertTrue(Identifier.pretty(id1) == 'ts1') ids = [] for i in range(0, 5): uid = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, i, 's', 2**(i - 2)) ids.append(uid) ids = sorted(ids, reverse=True) self.assertTrue(Identifier.pretty(ids[0]) == 'ts4') self.assertTrue(Identifier.get_value(ids[0]) == 4.0) self.assertTrue(Identifier.pretty(ids[3]) == 'ts1') self.assertTrue(Identifier.get_value(ids[3]) == 0.5)
def __init__(self, parent): self.table = {} self.parent = parent self.is_global = False self.identifier = Identifier.get_new() if parent is None: self.is_global = True
def equivalent_editions(self, policy=None): """All Editions whose primary ID is equivalent to this Edition's primary ID, according to the given PresentationCalculationPolicy. """ _db = Session.object_session(self) identifier_id_subquery = Identifier.recursively_equivalent_identifier_ids_query( self.primary_identifier.id, policy=policy) return _db.query(Edition).filter( Edition.primary_identifier_id.in_(identifier_id_subquery))
def parse(self, string): tags = string.split(params.INFO_SEPARATOR) tag_types = {} if len(tags) == 1: self.infos[ERROR_TAG] = self.tag_type(tags[0]) return # Identify info class for each tag: for tag in tags: identifier = Identifier() tag_types[tag] = identifier.identify_tag_type(tag) # for tag in tags: # tag_types[tag] = self.tag_type(tag) for tag, types in tag_types.items(): if len(types) == 1: self.infos[types[0]] = tag.strip() # Parse info into nicer, more readable shape: try: self._parse_course_details() self._parse_event_type() self._parse_classroom() self._parse_timestamp() self._parse_building() self._parse_location() self._parse_summary() except: ## For tesing: print("ERROR PARSING:") print(tags) e = sys.exc_info()[0] for k, v in self.infos.items(): print(k, ':', v) a = input("Press enter to continue - " + "write input to raise error\n") if len(a): raise e print() for k, v in self.infos.items(): print(k, ':', v) a = input("Press enter to continue\n")
def save (self, *args, **kwargs): if not hasattr(self, 'identifier'): try: topic_map = self.topic_map except AttributeError: # This is a TopicMap instance being saved for the # first time, so it is not possible to set the # database ID yet. topic_map = None identifier = Identifier(containing_topic_map=topic_map) identifier.save() self.identifier = identifier super(BaseConstructFields, self).save(*args, **kwargs) if self.identifier.containing_topic_map is None: # In the case of a TopicMap instance being saved for the # first time, the containing_topic_map will not have been # set (see above), so set it once the TopicMap is saved. self.identifier.containing_topic_map = self self.identifier.save()
def save(self, *args, **kwargs): if not hasattr(self, 'identifier'): try: topic_map = self.topic_map except AttributeError: # This is a TopicMap instance being saved for the # first time, so it is not possible to set the # database ID yet. topic_map = None identifier = Identifier(containing_topic_map=topic_map) identifier.save() self.identifier = identifier super(BaseConstructFields, self).save(*args, **kwargs) if self.identifier.containing_topic_map is None: # In the case of a TopicMap instance being saved for the # first time, the containing_topic_map will not have been # set (see above), so set it once the TopicMap is saved. self.identifier.containing_topic_map = self self.identifier.save()
def best_cover_within_distance(self, distance, rel=None, policy=None): _db = Session.object_session(self) identifier_ids = [self.primary_identifier.id] if distance > 0: if policy is None: new_policy = PresentationCalculationPolicy() else: new_policy = PresentationCalculationPolicy( equivalent_identifier_levels=distance, equivalent_identifier_cutoff=policy. equivalent_identifier_cutoff, equivalent_identifier_threshold=policy. equivalent_identifier_threshold, ) identifier_ids_dict = Identifier.recursively_equivalent_identifier_ids( _db, identifier_ids, policy=new_policy) identifier_ids += identifier_ids_dict[self.primary_identifier.id] return Identifier.best_cover_for(_db, identifier_ids, rel=rel)
def equivalent_identifiers(self, type=None, policy=None): """All Identifiers equivalent to this Edition's primary identifier, according to the given PresentationCalculationPolicy """ _db = Session.object_session(self) identifier_id_subquery = Identifier.recursively_equivalent_identifier_ids_query( self.primary_identifier.id, policy=policy) q = _db.query(Identifier).filter( Identifier.id.in_(identifier_id_subquery)) if type: if isinstance(type, list): q = q.filter(Identifier.type.in_(type)) else: q = q.filter(Identifier.type == type) return q.all()
def for_foreign_id(cls, _db, data_source, foreign_id_type, foreign_id, create_if_not_exists=True): """Find the Edition representing the given data source's view of the work that it primarily identifies by foreign ID. e.g. for_foreign_id(_db, DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, uuid) finds the Edition for Overdrive's view of a book identified by Overdrive UUID. This: for_foreign_id(_db, DataSource.OVERDRIVE, Identifier.ISBN, isbn) will probably return nothing, because although Overdrive knows that books have ISBNs, it doesn't use ISBN as a primary identifier. """ # Look up the data source if necessary. if isinstance(data_source, basestring): data_source = DataSource.lookup(_db, data_source) identifier, ignore = Identifier.for_foreign_id(_db, foreign_id_type, foreign_id) # Combine the two to get/create a Edition. if create_if_not_exists: f = get_one_or_create kwargs = dict() else: f = get_one kwargs = dict() r = f(_db, Edition, data_source=data_source, primary_identifier=identifier, **kwargs) return r
def main(): symbol_table = SymbolTable() v1 = Identifier('a', 5) v2 = Identifier('b', 7) v3 = Identifier('c', 12) v4 = Identifier('a', 14) c1 = Constant('d', 7) symbol_table.add(v1) symbol_table.add(v2) symbol_table.add(v3) symbol_table.add(c1) symbol_table.add(v4) v2.set_value(42) symbol_table.add(v2) c2 = Constant('c', 17) symbol_table.add(c2) symbol_table.print_symbol_table()
def test_papasevent(self): #create a dummy papasevent papasevent = PapasEvent(0) ecals = dict() tracks = dict() mixed = dict() for i in range(0, 2): uid = Identifier.make_id(Identifier.PFOBJECTTYPE.ECALCLUSTER, 't', 4.5) ecals[uid] = uid papasevent.history[uid] = Node(uid) uidt = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK, 's', 4.5) tracks[uidt] = uidt papasevent.history[uidt] = Node(uidt) papasevent.history[uidt].add_child(papasevent.history[uid]) lastid = Identifier.make_id(Identifier.PFOBJECTTYPE.ECALCLUSTER, 't', 3) ecals[lastid] = lastid papasevent.history[lastid] = Node(lastid) papasevent.add_collection(ecals) papasevent.add_collection(tracks) #create HistoryHelper hhelper = HistoryHelper(papasevent) #get all ids in event ids = hhelper.event_ids() self.assertTrue(len(ids) == 5) #check id_from_pretty self.assertTrue(hhelper.id_from_pretty('et5') == lastid) #check get_linked_ids linked = hhelper.get_linked_ids( lastid) #everything linked to lastid (which is just lastid) self.assertTrue(linked[0] == lastid and len(linked) == 1) self.assertTrue( hhelper.get_linked_ids(ids[0], direction="undirected")[1] == hhelper.id_from_pretty('ts2')) self.assertTrue( hhelper.get_linked_ids(ids[0], direction="parents") == hhelper.get_linked_ids(ids[0], direction="undirected")) self.assertTrue( hhelper.get_linked_ids(ids[0], direction="children") == [hhelper.id_from_pretty('et1')]) #filter_ids self.assertTrue(len(hhelper.filter_ids(ids, 'ts')) == 2) self.assertTrue(hhelper.filter_ids(ids, 'no') == []) #get_collection self.assertTrue(len(hhelper.get_collection(ids[1:2], 'no')) == 0) self.assertTrue(len(hhelper.get_collection([99], 'no')) == 0) self.assertTrue(len(hhelper.get_collection(ids[0:2], 'ts')) == 1) pass #get_history_subgroups subgroups = hhelper.get_history_subgroups() self.assertTrue(len(subgroups) == 3) #get_linked_collection self.assertTrue( hhelper.get_linked_collection(hhelper.id_from_pretty( 'et1'), 'ts').keys() == [hhelper.id_from_pretty('ts2')]) self.assertRaises(KeyError, hhelper.get_linked_collection, 0, 'ts') self.assertTrue( len( hhelper.get_linked_collection(hhelper.id_from_pretty('et1'), 'no')) == 0)
import gsp_support as gsp import gsp_visualize as gsp_v if len(sys.argv) < 2: print('Usage: python visualizer.py [house_num]') print( 'This program is for visualizing full datasets to create refined ones') sys.exit() house_num = int(sys.argv[1]) csvfileaggr = 'dataset/house_{}/output_aggr.csv'.format(house_num) csvfiledisaggr = 'dataset/house_{}/output_disaggr.csv'.format(house_num) csvfileresponse = 'dataset/house_{}/output_response.csv'.format(house_num) identifier = Identifier(20, csvfileresponse) demo_file = pd.read_csv(csvfileaggr, index_col="Time") demo_file.index = pd.to_datetime(demo_file.index) demo_file_truth = pd.read_csv(csvfiledisaggr, index_col="Time") demo_file_truth.index = pd.to_datetime(demo_file_truth.index) # Load settings disag_settings = get_disagg_settings(house_num) data_settings = get_dataset_settings(house_num) # Aggregate similar channels demo_file_truth = gsp.aggregate_channels(demo_file_truth, data_settings) mask = (demo_file.index > disag_settings.start_time) & ( demo_file.index < disag_settings.end_time)
def visit_identifier(self, node, visited_children): literal = Identifier(node.text) return Queue([literal])
import asyncio from functions import videoProcessing from identifier import Identifier i = Identifier() asyncio.ensure_future(videoProcessing(i, True))
def requestAvatarId(self, credentials): self.meta.password = hashlib.md5(credentials.password).hexdigest() iden = Identifier(credentials,"Password") d = iden.identify() return d
def set_get(self): """ Este metodo creará un identifier y usara su set para cambiarlo una variable cualquiera, despues verificara que su atributo no sea el del inicio """ test = Identifier("test") test.set_imgdir("cambio") self.assert_("test" != test.get_imgdir())
def get(self): """ Se creara un identifier con un atributo y despues se revisara que el get de ese mismo atributo""" test = Identifier("test") self.assert_("test" == test.get_imgdir())
def p_identifier(p): '''identifier : pidentifier''' p[0] = Identifier(p[1])
def set_work(self, metadata=None, metadata_client=None, policy=None): """If possible, identify a locally known Work that is the same title as the title identified by this CustomListEntry. :param policy: A PresentationCalculationPolicy, used to determine how far to go when looking for equivalent Identifiers. """ _db = Session.object_session(self) edition = self.edition if not self.edition: # This shouldn't happen, but no edition means no work self.work = None return self.work new_work = None if not metadata: from ..metadata_layer import Metadata metadata = Metadata.from_edition(edition) # Try to guess based on metadata, if we can get a high-quality # guess. potential_license_pools = metadata.guess_license_pools( _db, metadata_client) for lp, quality in sorted( potential_license_pools.items(), key=lambda x: -x[1]): if lp.deliverable and lp.work and quality >= 0.8: # This work has at least one deliverable LicensePool # associated with it, so it's likely to be real # data and not leftover junk. new_work = lp.work break if not new_work: # Try using the less reliable, more expensive method of # matching based on equivalent identifiers. equivalent_identifier_id_subquery = Identifier.recursively_equivalent_identifier_ids_query( self.edition.primary_identifier.id, policy=policy ) pool_q = _db.query(LicensePool).filter( LicensePool.identifier_id.in_(equivalent_identifier_id_subquery)).order_by( LicensePool.licenses_available.desc(), LicensePool.patrons_in_hold_queue.asc()) pools = [x for x in pool_q if x.deliverable] for pool in pools: if pool.deliverable and pool.work: new_work = pool.work break old_work = self.work if old_work != new_work: if old_work: logging.info( "Changing work for list entry %r to %r (was %r)", self.edition, new_work, old_work ) else: logging.info( "Setting work for list entry %r to %r", self.edition, new_work ) self.work = new_work return self.work
def __init__(self, meta_info): self.meta_info = meta_info self.identifier = Identifier(self.meta_info) self.modifier = Modifier(self.identifier)
class Copier: def __init__(self, meta_info): self.meta_info = meta_info self.identifier = Identifier(self.meta_info) self.modifier = Modifier(self.identifier) ############################################################################################### # Main ############################################################################################### # TODO delete # def run2(self): # # https://www.machinelearningplus.com/python/cprofile-how-to-profile-your-python-code/ # import cProfile, pstats # profiler = cProfile.Profile() # profiler.enable() # self.run2() # profiler.disable() # stats = pstats.Stats(profiler).sort_stats("tottime") # stats.print_stats() # #import cProfile # #cProfile.run("self.run2()") def run(self): self.air_pockets = self.meta_info.air_pockets.get() self.water_blocks = self.meta_info.water_blocks.get() self.repl_blocks = self.meta_info.repl_blocks.get() # Print detailed informations # TODO improve if self.air_pockets == 1: self.meta_info.text_queue.put("Air Blocks will be fixed!\n") else: self.meta_info.text_queue.put("Air Blocks will not be fixed!\n") if self.water_blocks == 1: self.meta_info.text_queue.put("Water Blocks will be fixed!\n") else: self.meta_info.text_queue.put("Water Blocks will not be fixed!\n") if self.repl_blocks == 1: self.meta_info.text_queue.put( "Replacement Blocks will be inserted!\n") else: self.meta_info.text_queue.put( "Replacement Blocks will not be inserted!\n") self.meta_info.text_queue.put("\n.. starting\n") t1 = gmtime() self.meta_info.text_queue.put(strftime("%Y-%m-%d %H:%M:%S\n", t1)) ms = int(round(time.time() * 1000)) # Get all files in the directory src_dir = self.meta_info.source_dir.get() filelist = None if os.path.exists(src_dir): filelist = os.listdir(src_dir) if filelist is None or len(filelist) == 0: messagebox.showinfo( message="No files found! Select a different source path.", title="Error") return tgt_dir = self.meta_info.target_dir.get() try: if not os.path.exists(tgt_dir): os.mkdir(tgt_dir) except OSError: messagebox.showinfo(message="Creation of the directory %s failed" % tgt_dir, title="Error") # Update the progressbar and label for the files self.meta_info.file_count_max = len(filelist) # Iterate the files i = 1 for filename in filelist: if filename.endswith(".mca"): # TODO combine path and filename here ? self.copyRegion(filename) else: continue self.meta_info.file_count = i i += 1 # Print that the process is finished self.meta_info.text_queue.put("\n.. finished\n") t2 = gmtime() self.meta_info.text_queue.put(strftime("%Y-%m-%d %H:%M:%S\n", t2)) self.meta_info.text_queue.put("Total runtime: ") self.meta_info.text_queue.put( datetime.timedelta(seconds=time.mktime(t2) - time.mktime(t1))) ms2 = int(round(time.time() * 1000)) print(f"Total time elapsed: {ms2 - ms}") self.meta_info.finished = True ############################################################################################### def copyRegion(self, filename): end = filename.split(".") rX = int(end[1]) rZ = int(end[2]) # Create a new region with the `EmptyRegion` class at region coords new_region = anvil.EmptyRegion(rX, rZ) src_dir = self.meta_info.source_dir.get() region = anvil.Region.from_file(src_dir + "/" + filename) repl_region = False if self.repl_blocks: try: repl_dir = self.meta_info.replacement_dir.get() repl_region = anvil.Region.from_file(repl_dir + "/" + filename) except Exception: print(f"Could not create replacement region for {filename}.") ########################################## # Main function call ########################################## self.copy_chunks(new_region, region, repl_region) if self.water_blocks + self.air_pockets + self.repl_blocks >= 1: self.meta_info.text_queue.put(f"In file {filename}:\n") if self.air_pockets == 1: self.meta_info.text_queue.put( f"Changed {self.meta_info.counts.changed_air.value} air blocks to solid blocks.\n" ) if self.water_blocks == 1: self.meta_info.text_queue.put( f"Changed {self.meta_info.counts.changed_water.value} solid blocks to water.\n" ) if self.repl_blocks == 1: self.meta_info.text_queue.put( f"Changed {self.meta_info.counts.changed_repl.value} " "solid blocks to replacement solid blocks.\n") ms = int(round(time.time() * 1000)) ########################################## # Other modifications ########################################## # if self.meta_info.make_tunnel() # self.modifier.make_tunnel( # region, new_region, rX, rZ, # self.meta_info.get_tunnel_start(), self.meta_info.get_tunnel_end() # ) # self.modifier.make_tunnel(region, new_region, rX, rZ, [125, 80, 100], [125, 80, 350]) # self.modifier.make_tunnel(region, new_region, rX, rZ, [125, 60, 100], [225, 60, 350]) # self.modifier.make_tunnel(region, new_region, rX, rZ, [125, 100, 100], [325, 100, 250]) ms2 = int(round(time.time() * 1000)) print(f"Tunnel time: {ms2 - ms}") # Save to a file self.meta_info.counts.algo_step = cfg.A_SAVE target_dir = self.meta_info.target_dir.get() new_region.save(target_dir + "/" + filename, region) self.meta_info.counts.algo_step = cfg.A_FINISHED ms = int(round(time.time() * 1000)) print(f"Save time: {ms - ms2}") ############################################################################################### def copy_chunks(self, new_region, region, repl_region): ms = int(round(time.time() * 1000)) # TODO combine these into a function? self.meta_info.counts.algo_step = cfg.A_CLASSIFY self.meta_info.counts.chunks_c.value = 0 classifier_mp = ClassifierMP(self.meta_info) if self.air_pockets + self.repl_blocks + self.water_blocks > 0: classifier_mp.classify_all_mp(region, self.meta_info.counts, self.meta_info.timer) ms2 = int(round(time.time() * 1000)) print(f"Classifier time: {ms2 - ms}") self.meta_info.counts.algo_step = cfg.A_IDENTIFY self.identifier.identify(classifier_mp.c_regions, self.meta_info.counts, self.meta_info.timer) ms3 = int(round(time.time() * 1000)) print(f"Identifier time: {ms3 - ms2}") self.meta_info.counts.algo_step = cfg.A_MODIFY self.meta_info.counts.chunks_m.value = 0 for chunk_x in range(cfg.REGION_C_X): self.meta_info.timer.start_time() for chunk_z in range(cfg.REGION_C_Z): self.copy_chunk(new_region, region, repl_region, chunk_x, chunk_z) self.meta_info.counts.chunks_m.value += 1 self.meta_info.timer.end_time() self.meta_info.timer.update_m_elapsed(self.meta_info.counts) ms4 = int(round(time.time() * 1000)) print(f"Modify time: {ms4 - ms3}") ############################################################################################### def copy_chunk(self, new_region, region, repl_region, chunk_x, chunk_z): chunk = None try: chunk = anvil.Chunk.from_region(region, chunk_x, chunk_z) except Exception: print(f"skipped non-existent chunk ({chunk_x},{chunk_z})") if chunk: # TODO only when the option is ticked? repl_chunk = False if repl_region: try: repl_chunk = anvil.Chunk.from_region( repl_region, chunk_x, chunk_z) except Exception: print( f"Could not create replacement chunk for {chunk_x}, {chunk_z}." ) self.modifier.modify(chunk, repl_chunk, new_region, chunk_x, chunk_z)
import string import random from pathlib import Path import base64 from io import BytesIO import asyncio from sanic import Sanic import sanic.response as sanic_response from functions import * from identifier import Identifier loop = asyncio.get_event_loop() app = Sanic(__name__) identifier = Identifier() @app.route('/') async def index(request): return await sanic_response.file('index/index.html') @app.route('/image/<uid>') async def getImage(request, uid): img_loc = identifier.getImageLocation(uid) if not img_loc: return sanic_response.text('not valid') return await sanic_response.file(img_loc)
def __init__(self, _value=None): self.value = _value self.children = [] self.identifier = Identifier.get_new()
import json from identifier import Identifier from os.path import abspath app = Flask(__name__) host = "0.0.0.0" port = 6000 suicidePath = abspath("../resources/articles/suicide") generalPath = abspath("../resources/articles/general") preprocessorEndpoint = 'http://*****:*****@app.route('/api/identifier', methods=['GET', 'POST']) def identifier(): if request.method == 'GET': pass elif request.method == 'POST': pass @app.route('/api/identifier/suicide', methods=['POST']) def suicide_identify(): content = "" if (request.form):