async def on_start(self) -> None: """ Set up data that require information from the game. Note: This function is called automatically at iteration = 0. Args: None Returns: None """ raw_game_data = await self._client._execute(data=sc_pb.RequestData( ability_id=True, unit_type_id=True, upgrade_id=True, buff_id=True, effect_id=True, )) raw_game_info = await self._client._execute( game_info=sc_pb.RequestGameInfo()) raw_observation = self.state.response_observation self.pathing = PathManager(raw_game_data, raw_game_info, raw_observation) self.creeper = Creeper(raw_game_data, raw_game_info, raw_observation) # build_selector = BuildOrderManager(self.enemy_race) # self.build_order = build_selector.select_build_order() with open("builds/1312.pickle", "rb") as f: self.build_order = pickle.load(f) # nosec # all possible arguments are handled by BuildOrderManager class self.tag_dicts.append(self.pathing.pathing_dict) self.target = self.enemy_start_locations[0].position await self.chat_send("gl hf")
def __init__(self, mock_actuator=False): self.health_checker = HealthChecker() self.health_checker.double_flash() self.path_manager = PathManager(verbose=True) self.mpc_bridge = MPCBridge() self.actuator_bridge = ActuatorBridge(mock=mock_actuator) self.health_checker.double_flash() self.active = True
def __init__(self, datapath, model_name, original_size_x, original_size_y, input_size, slice_count_x=1, slice_count_y=1, is_8_channel=True, gpu_number=None, working_dir="/data/working", is_8_bit=True): """ Args: is_8_channel (bool) - True for 8 channel, false for RGB images. """ # Set up what data is used by current classifier. self.slice_count_x = slice_count_x self.slice_count_y = slice_count_y self.slice_count = slice_count_x * slice_count_y self.is_8_channel = is_8_channel self.is_8_bit = is_8_bit self.channel_count = 8 if is_8_channel else 3 self.original_size_x = original_size_x self.original_size_y = original_size_y self.input_size = input_size self.path_mgr = PathManager(datapath, self.slice_count, is_8_channel, model_name, create_dirs=(gpu_number == 0) or (gpu_number is None), working_dir=working_dir) self.datapath = datapath self.prefix = utils.datapath_to_prefix(datapath) self.gpu_number = gpu_number if gpu_number is None: self.device = torch.device('cpu') logger.info("Running on CPU") else: torch.cuda.set_device(gpu_number) self.device = torch.device('cuda') # Get current Cuda device logger.info("Running on GPU #{}".format(gpu_number)) if self.slice_count != 1: self.stride_size_x = int((self.original_size_x - self.input_size) / (self.slice_count_x - 1)) self.stride_size_y = int((self.original_size_y - self.input_size) / (self.slice_count_y - 1))
class Coordinator(object): STEP_TIME = 0.2 def __init__(self, mock_actuator=False): self.health_checker = HealthChecker() self.health_checker.double_flash() self.path_manager = PathManager(verbose=True) self.mpc_bridge = MPCBridge() self.actuator_bridge = ActuatorBridge(mock=mock_actuator) self.health_checker.double_flash() self.active = True def stop_system(self): self.active = False def start_trip(self, start, destination, use_cv=True, fake_gps=False, speed=None): self.health_checker.startup() self.health_checker.double_flash() self.path_manager.retrieve_path(start, destination) self.sensor_fuser = SensorFuser(use_cv=use_cv, fake_gps=fake_gps, speed=speed) coordinator_thread = threading.Thread(target=self.main_loop) coordinator_thread.start() self.health_checker.startup_done() def main_loop(self): time.sleep(10) # Wait until Arduino & GPS is ready while self.active: print("INFO: System time is", str(datetime.datetime.now())) loop_start = time.time() parameters = self.sensor_fuser.retrieve_updates() self.path_manager.potentially_update_next(parameters.gps) parameters.next_target = self.path_manager.get_next() self.health_checker.check(parameters) impulses = self.mpc_bridge.request_step(parameters) self.actuator_bridge.send(impulses) sleep_time = Coordinator.STEP_TIME - (time.time() - loop_start) if sleep_time > 0: time.sleep(sleep_time) print("DEBUG: Main loop took", Coordinator.STEP_TIME - sleep_time) else: print("WARN: Main loop took long to process: ", Coordinator.STEP_TIME - sleep_time) self.sensor_fuser.stop()
def __init__( self, raw_game_data: Any, raw_game_info: Any, raw_observation: Any ) -> None: """ Set up variables for use within Creeper. Args: raw_game_data (Any): self.game_data from main instance raw_game_info (Any): self.game_info from main instance raw_observation (Any): self.game_state from main instance Returns: None """ self.bot = BotAI() game_data = GameData(raw_game_data.data) game_info = GameInfo(raw_game_info.game_info) game_state = GameState(raw_observation) self.bot._initialize_variables() self.bot._prepare_start( client=None, player_id=1, game_info=game_info, game_data=game_data ) self.bot._prepare_step(state=game_state, proto_game_info=raw_game_info) self.pathing = PathManager(raw_game_data, raw_game_info, raw_observation) self.pf = PathFind(self.pathing.map_grid)
def getPathManager(): # argument parser parser = argparse.ArgumentParser() parser.add_argument('--project-path', '-a', type=str, required=True, help='Absolute path to project') parser.add_argument('--printer', '-p', type=str, required=True, help='Printer name') parser.add_argument('--printjob', '-j', type=str, required=True, help='Printer name') parser.add_argument('--gen-training-data', '-t', action='store_true', help='Printer name') parser.add_argument('--gen-testing-data', '-e', action='store_true', help='Printer name') try: idx = argv.index('--') except ValueError: print('No argument to parse') idx = 0 args = parser.parse_args(argv[idx + 1:]) # get file path project_path = args.project_path printer_name = args.printer printjob_name = args.printjob genTrain = args.gen_training_data genTest = args.gen_testing_data print(project_path, printer_name, printjob_name) # import PathManager # FIXME: better not use sys.path syspath.append(project_path) from path_manager import PathManager pm = PathManager(abs_path=project_path, printer_name=printer_name, printjob_name=printjob_name) return pm, genTrain, genTest
''' Preprocessing for Training Data ''' from glob import glob from os import makedirs, path, rename import cv2 import numpy as np from image_processor import ImageProcessor from path_manager import PathManager pm = PathManager() # Preprocess raw images # filter invaliad images and undistort raw images # Note that this is should already done in clawer when collecting data. for printer_name in pm.getPrinterNames(): pm.setPrinter(printer_name) if path.exists(pm.calibration_folder): # printer with calibrated information print(pm.printer_folder) # load calibration data ip = ImageProcessor(pm.intrinsic) for printjob_name in pm.getPrintJobNames(): pm.setPrintJob(printjob_name) if path.exists(pm.printjob_finish) and not path.exists(pm.images):
from os import path import imageio from path_manager import PathManager pm = PathManager( printer_name='S5', printjob_name='UMS5_579f7056-b56e-468a-a45f-37a8f0c22d87_20201228185653') lhs = [] tss = [] with open(pm.test_list, 'r') as fp: for line in fp.readlines(): lh, ts = line.split(', ') lhs.append(lh) tss.append(ts[:-1]) def makeGIF(input_path, file_list, output_name): imgs = [] for filename in file_list: imgs.append(imageio.imread(path.join(input_path, f'{filename}.png'))) output_file = path.join(pm.printjob_folder, f'{output_name}.gif') imageio.mimsave(output_file, imgs) print(f'Saved: {output_file}') makeGIF(pm.images, tss, 'imgs') makeGIF(pm.raw_images, tss, 'raws')
class Paul(sc2.BotAI): """The code that is Paul.""" def __init__(self) -> None: """Set up variables and data for the game.""" super().__init__() self.unit_dict: Dict[int, UnitTypeId] = {} self.inject_queens: Set[int] = set() self.creep_queens: Set[int] = set() self.tag_sets: List[Set[int]] = [self.inject_queens, self.creep_queens] self.tag_dicts: List[Units] = [self.unit_dict] self.target: Point2 = None self.build_order: List[Dict] = [] self.pathing: Any = None # class self.i: int = 0 # build order index self.mode: str = "econ" # econ or army self.rush_start = False async def on_start(self) -> None: """ Set up data that require information from the game. Note: This function is called automatically at iteration = 0. Args: None Returns: None """ raw_game_data = await self._client._execute(data=sc_pb.RequestData( ability_id=True, unit_type_id=True, upgrade_id=True, buff_id=True, effect_id=True, )) raw_game_info = await self._client._execute( game_info=sc_pb.RequestGameInfo()) raw_observation = self.state.response_observation self.pathing = PathManager(raw_game_data, raw_game_info, raw_observation) self.creeper = Creeper(raw_game_data, raw_game_info, raw_observation) # build_selector = BuildOrderManager(self.enemy_race) # self.build_order = build_selector.select_build_order() with open("builds/1312.pickle", "rb") as f: self.build_order = pickle.load(f) # nosec # all possible arguments are handled by BuildOrderManager class self.tag_dicts.append(self.pathing.pathing_dict) self.target = self.enemy_start_locations[0].position await self.chat_send("gl hf") async def on_step(self, iteration: int = 0) -> None: """ Call all relevant functions. Note: This function is called automatically. Args: None Returns: None """ if len(self.units(UnitTypeId.ZERGLING)) >= 6: self.rush_start = True await self.inject(queen_tags=self.inject_queens) if self.rush_start: await self.micro() creep_grid = np.transpose(self.state.creep.data_numpy) if iteration == 0: with open("drawn_grids/creep_triton.txt", "w") as f: for i in range(creep_grid.shape[0]): for j in range(creep_grid.shape[1]): f.write(str(creep_grid[i][j])) f.write("\n") for queen in self.units(UnitTypeId.QUEEN).filter( lambda unit: unit.tag in self.creep_queens): q_abilities = await self.get_available_abilities(queen) if AbilityId.BUILD_CREEPTUMOR_QUEEN in q_abilities: enemy_target = self.enemy_start_locations[0].towards( self._game_info.map_center, 5) to_e_base = self.pathing.pf.find_path( (floor(queen.position.x), floor(queen.position.y)), (floor(enemy_target.x), floor(enemy_target.y)), )[0] for i in range(len(to_e_base) - 1, -1, -1): if creep_grid[to_e_base[i]]: pos = Point2((to_e_base[i][0], to_e_base[i][1])) self.do(queen(AbilityId.BUILD_CREEPTUMOR_QUEEN, pos)) # CAN'T FIND PROPER POINT for tumor in self.structures(UnitTypeId.CREEPTUMORBURROWED): abilities = await self.get_available_abilities(tumor) if AbilityId.BUILD_CREEPTUMOR_TUMOR in abilities: tumor_positions = { unit.position for unit in self.structures.filter( lambda unit: unit.type_id in {UnitTypeId.CREEPTUMORBURROWED, UnitTypeId.CREEPTUMOR}) } location = await self.creeper.find_position( tumor, tumor_positions, creep_grid, self.pathing.map_grid) self.do(tumor(AbilityId.BUILD_CREEPTUMOR_TUMOR, location)) # TODO: place all necessary code above build order due to return statements if self.i >= len(self.build_order): # TODO: Select new build order instead of switching to army self.mode = "army" if self.mode == "econ": order = self.build_order[self.i] if self.supply_used != order["supply"]: self.train(UnitTypeId["DRONE"]) elif self.supply_used == order["supply"]: for tech in order["requires"]: if not self.structures(UnitTypeId[tech]).ready: return if order["category"] == "struct": if self.workers: worker = self.workers.random if order["name"] == "EXTRACTOR": if self.can_afford(UnitTypeId["EXTRACTOR"]): target = self.vespene_geyser.closest_to(worker) if self.do(worker.build_gas(target)): self.i += 1 elif order["name"] == "SPAWNINGPOOL": pos = self.townhalls[0].position.to2.towards( self._game_info.map_center, 5) if self.can_afford(UnitTypeId["SPAWNINGPOOL"]): if self.do( worker.build( UnitTypeId["SPAWNINGPOOL"], pos)): self.i += 1 elif order["name"] == "HATCHERY": if self.minerals >= 300: await self.expand_now() self.i += 1 elif order["category"] == "unit": if len(self.units(UnitTypeId["LARVA"])) > 0: if self.train(UnitTypeId[order["name"]]): self.i += 1 elif order["category"] == "upgrade": if self.can_afford(UpgradeId[order["name"]]): self.research(UpgradeId[order["name"]]) self.i += 1 elif self.mode == "army": if (not self.already_pending(UnitTypeId["SPAWNINGPOOL"]) and not self.structures(UnitTypeId["SPAWNINGPOOL"]).ready): if self.can_afford(UnitTypeId["SPAWNINGPOOL"]): pos = self.townhalls[0].position.to2.towards( self._game_info.map_center, 5) if self.can_afford( UnitTypeId["SPAWNINGPOOL"]) and self.workers: worker = self.workers.closest_to(pos) self.do(worker.build(UnitTypeId["SPAWNINGPOOL"], pos)) else: return if self.supply_left <= 2: if not self.already_pending(UnitTypeId["OVERLORD"]): self.train(UnitTypeId["OVERLORD"]) self.train(UnitTypeId["ZERGLING"], amount=len(self.units(UnitTypeId["LARVA"]))) self.train(UnitTypeId["QUEEN"]) async def on_unit_created(self, unit: Unit) -> None: """ Add unit to dictionaries and determine what should happen to each spawned unit. Note: This function is called automatically. Args: unit (Unit): the unit created Returns: None """ self.unit_dict[unit.tag] = unit.type_id # drone protocol (prioritize gas -> minerals) if unit.type_id in {UnitTypeId.DRONE}: for extractor in self.gas_buildings.ready: if extractor.assigned_harvesters < 3: self.do(unit.gather(extractor)) return for base in self.townhalls.ready: if base.assigned_harvesters < 16: self.do( unit.gather( self.mineral_field.closest_to(base.position))) return # queen protocol (inject > creep > unassigned) if unit.type_id in {UnitTypeId.QUEEN}: if len(self.inject_queens) < min(len(self.townhalls), 3): self.inject_queens.add(unit.tag) return elif len(self.creep_queens) < 4: self.creep_queens.add(unit.tag) return async def on_unit_destroyed(self, unit_tag: int) -> None: """ Remove dead units from stored data points, replace structures/drones. Note: This function is called automatically. Args: unit_tag (int): tag of the deceased unit Returns: None """ # clean up lists and sets for tag_set in self.tag_sets: # type: Union[list, set] if unit_tag in tag_set: tag_set.remove(unit_tag) # clean up dictionaries for tag_dict in self.tag_dicts: # type: Dict if unit_tag in tag_dict: del tag_dict[unit_tag] async def on_building_construction_complete(self, unit: Unit) -> None: """ Determine if anything needs to be done when a building finishes. Note: This function is called automatically. Args: unit (Unit): the building completed Returns: None """ # immediately assign workers to geyser if unit.type_id in {UnitTypeId.EXTRACTOR, UnitTypeId.EXTRACTORRICH}: gas_drones = self.workers.closest_n_units(unit, 3) for drone in gas_drones: self.do(drone.gather(unit)) return async def inject(self, queen_tags: Set[int]) -> None: """ Inject townhalls. Args: queen_tags (Set[int]): tags of queens assigned to inject Returns: None """ queens = self.units.tags_in(queen_tags) for queen in queens: abilities = await self.get_available_abilities(queen) if AbilityId.EFFECT_INJECTLARVA in abilities: possible_targets = self.townhalls.filter( lambda unit: BuffId.QUEENSPAWNLARVATIMER not in unit.buffs) if possible_targets: inject_target = possible_targets.closest_to(queen) self.do(queen(AbilityId.EFFECT_INJECTLARVA, inject_target)) async def on_enemy_unit_entered_vision(self, unit: Unit) -> None: """ Decide what to do based on where the unit is. Args: unit (Unit): the enemy that entered vision Returns: None """ if unit.type_id not in { UnitTypeId.DRONE, UnitTypeId.PROBE, UnitTypeId.SCV }: if self._distance_pos_to_pos( unit, self.townhalls.closest_to(unit)) <= 20: self.mode = "army" async def micro(self, unit_tags: List[int] = []) -> None: """ Issue unit commands for microing. Args: unit_tags (List[int]): Set of units to micro. If empty, micro all units Returns: None """ if not unit_tags: attackers = self.units.filter( lambda unit: unit.tag not in self.inject_queens | self. creep_queens and unit.type_id not in {UnitTypeId.OVERLORD, UnitTypeId.DRONE, UnitTypeId.LARVA}) else: attackers = self.units.filter(lambda unit: unit.tag in unit_tags) for unit in attackers: self.do( unit.attack( self.pathing.follow_path( unit=unit, default=self.enemy_start_locations[0].position))) pass
from os import path, makedirs import pathlib import numpy as np from path_manager import PathManager from segmentation import Evaluator from visualization_client import DetectorVisualizerClient # path manager pm = PathManager(abs_path=pathlib.Path(__file__).parent.absolute(), printer_name='S5', printjob_name='UMS5_4064a7f6-d290-485e-83ca-373373bebcae_20210111154854') # evaluator evaluator = Evaluator('segmentation/model/PAN-se_resnet50-aug-best_model-traced.pth') # visualizer vc = DetectorVisualizerClient(pm.printer_name + ' (Offline)') vc.sendPrinterInfo(printjob_name=pm.printjob_name) if not path.exists(pm.seg_images): makedirs(pm.seg_images) if not path.exists(pm.iou_images): makedirs(pm.iou_images) if not path.exists(pm.blend_images): makedirs(pm.blend_images) eval_result_fp = open(pm.eval_result, 'w') iou_list = [1.0 for _ in range(10)] with open(pm.test_list, 'r') as fp: for line in fp.readlines(): lh, ts = line.split(', ') input_path = path.join(pm.images, f'{ts[:-1]}.png')
return None def _exeCmd(self, cmd): print(cmd) subprocess.run(cmd) print('Simulation finished') if __name__ == "__main__": ''' Offline Simulation ''' from os import path max_thread_num = 5 pm = PathManager(abs_path=pathlib.Path(__file__).parent.absolute()) # generate training data for printer_name in pm.getPrinterNames(): pm.setPrinter(printer_name) if path.exists(pm.calibration_folder): for printjob_name in pm.getPrintJobNames(): pm.setPrintJob(printjob_name) if path.exists(pm.images): # limit number of threads while threading.active_count() >= max_thread_num: # check every 5 mins time.sleep(300) print('thread counts', threading.active_count())
import os from argument_parser import ArgumentParser from configs_writer import ConfigsWriter from corner_detector import CornerDetector from extrinsics_calibrator import extrinsics_calibrator from intrinsics_calibrator import intrinsics_calibrator from path_manager import PathManager if __name__ == '__main__': ArgumentParser._argument = ArgumentParser.args_parser().parse_args() path_manager = PathManager().set_path_manager_options( ArgumentParser.get_path_manager_options()) intrinsics_calibrator.remove_non_chess_images( path_manager.intrinsic_image_dir(), path_manager.remove_non_chess_image_pro_dir()) intrinsics_calibrator.calibrate_intrinsics( path_manager.intrinsic_image_dir()) intrinsics_calibrator.sort_out_result(path_manager.intrinsic_image_dir()) ConfigsWriter.write_total_configs(path_manager.extrinsics_image_dir()) ConfigsWriter.write_front_configs( path_manager.intrinsic_image_dir(), path_manager.extrinsics_image_dir(), path_manager.front_template_camera_configs_path()) ConfigsWriter.write_right_configs( path_manager.intrinsic_image_dir(),
class BaseClassifier: def __init__(self, datapath, model_name, original_size_x, original_size_y, input_size, slice_count_x=1, slice_count_y=1, is_8_channel=True, gpu_number=None, working_dir="/data/working", is_8_bit=True): """ Args: is_8_channel (bool) - True for 8 channel, false for RGB images. """ # Set up what data is used by current classifier. self.slice_count_x = slice_count_x self.slice_count_y = slice_count_y self.slice_count = slice_count_x * slice_count_y self.is_8_channel = is_8_channel self.is_8_bit = is_8_bit self.channel_count = 8 if is_8_channel else 3 self.original_size_x = original_size_x self.original_size_y = original_size_y self.input_size = input_size self.path_mgr = PathManager(datapath, self.slice_count, is_8_channel, model_name, create_dirs=(gpu_number == 0) or (gpu_number is None), working_dir=working_dir) self.datapath = datapath self.prefix = utils.datapath_to_prefix(datapath) self.gpu_number = gpu_number if gpu_number is None: self.device = torch.device('cpu') logger.info("Running on CPU") else: torch.cuda.set_device(gpu_number) self.device = torch.device('cuda') # Get current Cuda device logger.info("Running on GPU #{}".format(gpu_number)) if self.slice_count != 1: self.stride_size_x = int((self.original_size_x - self.input_size) / (self.slice_count_x - 1)) self.stride_size_y = int((self.original_size_y - self.input_size) / (self.slice_count_y - 1)) def load_image_id_list(self, split): """ Loads list of image ids from csv file, either for training or validation. Args: split (DataSplit): One of Train, Validation or Test. """ fn_list = self.path_mgr.get_image_list_csv_path(split) df = pd.read_csv(fn_list, index_col='ImageId') return [image_id for image_id in df.index]
action='store_true', help='Visualize result') args = parser.parse_args() # arguments printer_name = args.printer do_simulate = args.simulate do_evaluate = args.evaluate if do_evaluate: do_simulate = True do_visualize = args.visualizer # printer (specified in ultimaker.ini) printer = Printer(printer_name) # path manager pm = PathManager(printer_name=printer_name, abs_path=pathlib.Path(__file__).parent.absolute()) # evaluator evaluator = None if do_evaluate: # create folder # load evaluation model evaluator = Evaluator( 'segmentation/model/PAN-se_resnet50-aug-best_model-traced.pth') # visualizer vc = None if do_visualize: vc = DetectorVisualizerClient(pm.printer_name) while True: # check printer state every minute