def scrapeFromTodayAndBackward(dumpDir, client, limit, stop_if_already_dumped=True): # dumping count = 1 date = datetime.date.today() while count < limit: if previouslyDumped(dumpDir, date): logger.info("Already scraped {}".format(date.isoformat())) if stop_if_already_dumped: print("Stopping the scraping") break date -= datetime.timedelta(days=1) continue logger.info("Scraping data for {}".format(date.isoformat())) dumpDay(client, dumpDir, date) date -= datetime.timedelta(days=1) count += 1 # while not previouslyDumped(dumpDir, date) and count<limit: # logger.info("Scraping data for {}".format(date.isoformat())) # dumpDay(client, dumpDir, date) # date -= datetime.timedelta(days=1) # count += 1 # Always redump the last dumped day because we may have dumped it before the day was finished. dumpDay(client, dumpDir, date)
def loadFun(jsonData): sleeps = jsonData['sleep'] if not sleeps: return None date = datetime.datetime.strptime(sleeps[0]['dateOfSleep'], "%Y-%m-%d").date() if len(sleeps) > 1: logger.info( "There are {} sleep records for {}, taking main sleep".format( len(sleeps), date)) intradayData = None for sleep in sleeps: if sleep['isMainSleep']: intradayData = sleep['minuteData'] break dayToSubstract = datetime.timedelta(days=1) df = pd.read_json(json.dumps(intradayData), convert_dates=['time']) if (df['value'] == SLEEP_VALUE_NONE).all(): logger.info("There are only none values for {}".format(date)) return None df['datetime'] = df.apply(lambda x: datetime.datetime.combine( date - dayToSubstract if time(12, 00) <= x['dateTime'].time() <= time(23, 59) else date, x['dateTime'].time()), axis=1) df.drop('dateTime', inplace=True, axis=1) return df
def loadFun(jsonData): data = jsonData['activities-steps'] date = datetime.datetime.strptime(data[0]['dateTime'], "%Y-%m-%d").date() if len(data) != 1: logger.info("There are {} steps data entries for {}".format( len(data), date)) totalSteps = int(data[0]['value']) return date, totalSteps
def info(ctx, **kwargs): """Read an AMF file at a given path and display information about the contents of the file.""" ctx.load_args(**kwargs) if ctx.verbose: logger.info(f'info for \"{os.path.basename(ctx.filepath)}\"') reader = AmfFileReader(ctx.filepath) reader.log_info()
def __init__(self, dirpin, movpin, startcount, index): self.dirpin = dirpin self.movpin = movpin self.stepcounter = int(startcount) self.index = index logger.info("Creating new stepper motor instance:\n" + " Directional pin: {}\n".format(self.dirpin) + " Mov pin: {}\n".format(self.movpin) + " Initial stepcount: {}\n".format(self.stepcounter))
def input_log(parser: argparse.Namespace) -> None: """ General information, extra arguments and paths... Assemble. :param parser: argparse.Namespace The parser containing all program arguments. :return: None """ result = "\n" result += general_information(parser) result += extra_arguments(parser) logger.info(result.replace("\\", "/"))
def main(stdscr): logger.info(f"\nmain() at {datetime.datetime.now().isoformat()}") controls.init(stdscr) resources.init(stdscr) display.init(stdscr) model.init() while get_param("mode") != -1: display.display() controls.dispatch_input()
def log_transform_for_info(self, transform): applied_string = "" if transform.applied is True: applied_string = " [applied=\"true\"]" logger.info(" {0}: {1}{2} ({3})".format( transform.type, # transform.transform_id, transform.short_transform_id(), applied_string, transform.description))
def loadFun(jsonData): summaryData = jsonData['activities-heart'] date = summaryData[0]['dateTime'] if len(summaryData) != 1: logger.info("There are {} heart data entries for {}".format( len(summaryData), date)) intradayData = jsonData['activities-heart-intraday']['dataset'] if not intradayData: return None df = pd.read_json(json.dumps(intradayData)) df['datetime'] = pd.to_datetime(date + ' ' + df['time']) df.drop('time', inplace=True, axis=1) return df
def dumpDay(client, dumpDir, date): steps_data = client.intraday_time_series('activities/steps', date) intradayData = steps_data['activities-steps-intraday']['dataset'] if not intradayData: logger.info("No {} measures for {}. Skipping the rest too".format('steps', date.split('\\')[-1])) return None dumpToFile("steps", dumpDir, date, steps_data) dumpToFile("sleep", dumpDir, date, client.get_sleep(date)) dumpToFile("calories", dumpDir, date, client.intraday_time_series('activities/calories', date)) dumpToFile("distance", dumpDir, date, client.intraday_time_series('activities/distance', date)) dumpToFile("floors", dumpDir, date, client.intraday_time_series('activities/floors', date)) dumpToFile("elevation", dumpDir, date, client.intraday_time_series('activities/elevation', date)) dumpToFile("heartbeat", dumpDir, date, client.intraday_time_series('activities/heart', date))
def dumpDay(client, dumpDir: Path, date): steps_data = client.intraday_time_series('activities/steps', date) intradayData = steps_data['activities-steps-intraday']['dataset'] if not intradayData: logger.info("No {} measures for {}. Skipping the rest too".format('steps', date.split('\\')[-1])) return None dumpToFile("steps", dumpDir, date, steps_data) dumpToFile("sleep", dumpDir, date, client.get_sleep(date)) dumpToFile("calories", dumpDir, date, client.intraday_time_series('activities/calories', date)) dumpToFile("distance", dumpDir, date, client.intraday_time_series('activities/distance', date)) dumpToFile("floors", dumpDir, date, client.intraday_time_series('activities/floors', date)) dumpToFile("elevation", dumpDir, date, client.intraday_time_series('activities/elevation', date)) dumpToFile("heartbeat", dumpDir, date, client.intraday_time_series('activities/heart', date))
def loadFun(jsonData): summaryData = jsonData['activities-heart'] date = datetime.datetime.strptime(summaryData[0]['dateTime'], "%Y-%m-%d").date() if len(summaryData) != 1: logger.info("There are {} heart data entries for {}".format( len(summaryData), date)) try: restingHeartRate = summaryData[0]['value']['restingHeartRate'] except KeyError: logger.info("No resting heart rate info for {}".format(date)) return None return date, restingHeartRate
def render(ctx, **kwargs): """Read an AMF file at a given path and output a ctlrender command that renders the pipeline described in the AMF file.""" ctx.load_args(**kwargs) if ctx.verbose: logger.info(f'render for \"{os.path.basename(ctx.filepath)}\"') traverser = TransformsTraverser(ctx.ctl_root_path) if ctx.verbose: traverser.log_ctls() reader = AmfFileReader(ctx.filepath) reader.parse() reader.log_render(traverser.transforms, traverser.root_path)
def scrapeFromTodayAndBackward(dumpDir: Path, client, limit, stop_if_already_dumped=True): # dumping count = 1 date = datetime.date.today() while count < limit: if previouslyDumped(dumpDir, date): logger.info("Already scraped {}".format(date.isoformat())) if stop_if_already_dumped: print("Stopping the scraping") break date -= datetime.timedelta(days=1) continue logger.info("Scraping data for {}".format(date.isoformat())) dumpDay(client, dumpDir, date) date -= datetime.timedelta(days=1) count += 1 dumpDay(client, dumpDir, date)
def brachy_therapy(args: argparse.Namespace) -> None: """ Handler for positional feedback using image acquisition & processing """ if args.init: reset_arduino(args.comport, args.startsteps) else: logger.success("Starting Brachy Therapy.\n") # Create Needle object board_controller = needle.Needle(args.comport, args.startsteps, args.sensitivity, args.invertx) # Call its movement function if args.manual: logger.info("Input type is MANUAL.") board_controller.manual_brachy_therapy(args) else: logger.info("Automated Control. Computer will output its predicted trajectory.") board_controller.automated_brachy_therapy(args)
def traverse(self): """traversing the folder structure""" ctx = click.get_current_context().obj if ctx.verbose: logger.info(f'traversing \"{self.root_path}\"...') for root, directories, filenames in os.walk(self.root_path, topdown=True): for filename in sorted(filenames): filepath = os.path.join(root, filename) if filepath.endswith(".ctl"): ctl = CTL() ctl_file = open(filepath, 'r') ctl_string = ctl_file.read() transform_id = self.extract_tag(ctl_string, "ACEStransformID") if transform_id is not None: ctl.transform_id = transform_id ctl.short_transform_id = re.sub(r'^urn:ampas:aces:transformId:v[0-9].[0-9]:', '', ctl.transform_id) ctl.description = self.extract_tag(ctl_string, "ACESuserName") relative_path = os.path.relpath(filepath, start=self.root_path) ctl.relative_path = relative_path if relative_path == "idt/vendorSupplied/pomfort/IDT.RED.log3G10.ctl": logger.info("") # FIXME: this is still under discussion spec_prefixes = ("ODT", "IDT", "RRT", "LMT", "RRTODT", "ACEScsc", "InvODT", "InvIDT", "InvRRT", "InvLMT", "InvRRTODT") # spec_prefixes = ("ODT", "IDT", "RRT", "LMT", "RRTODT", "ACEScsc") if not ctl.short_transform_id.startswith(spec_prefixes): ignore_prefixes = ("ACESlib", "ACESutil", "utilities") if not ctl.short_transform_id.startswith(ignore_prefixes): logger.error("SKIPPING: wrong prefix \"{0}\"in {1}".format(ctl.short_transform_id, filepath)) else: self.transforms.ctls.append(ctl) else: if ctx.verbose is True: logger.error("ERROR: no <ACEStransformID> found in {0}".format(filepath))
def log_transform_for_info(self, transform): applied_string = "" if transform.applied is True: applied_string = " [applied=\"true\"]" if transform.type == "CDL": cdl_string = "" if transform.SOP is not None: cdl_string = transform.SOP if transform.SAT is not None: cdl_string = cdl_string + transform.SAT logger.info(" {0}: {1}{2}".format( transform.type, cdl_string, applied_string, )) to_wcs_string = "" if transform.toCdlWorkingSpace_transform_id is not None: to_wcs_string = transform.short_transform_id_for_transform_id( transform.toCdlWorkingSpace_transform_id) if transform.toCdlWorkingSpace_description is not None: to_wcs_string = to_wcs_string + " (" + transform.toCdlWorkingSpace_description + ")" from_wcs_string = None if transform.fromCdlWorkingSpace_transform_id is not None: from_wcs_string = transform.short_transform_id_for_transform_id( transform.fromCdlWorkingSpace_transform_id) if transform.fromCdlWorkingSpace_description is not None: from_wcs_string = from_wcs_string + " (" + transform.fromCdlWorkingSpace_description + ")" if to_wcs_string is not None: logger.info( " toCdlWorkingSpace: {0}".format( to_wcs_string)) if from_wcs_string is not None: logger.info( " fromCdlWorkingSpace: {0}".format( from_wcs_string)) else: identifier_string = "" if transform.transform_id is not None: identifier_string = transform.short_transform_id() if transform.file is not None: identifier_string = identifier_string + "/\"" + transform.file + "\"" elif transform.file is not None: identifier_string = "\"" + transform.file + "\"" else: identifier_string = "?" logger.info(" {0}: {1}{2} ({3})".format( transform.type, identifier_string, applied_string, transform.description))
def log_transform_for_render(self, transform, ctl_root_path): if transform.applied is True: logger.info("# skipping {0} [applied=\"true\"]".format( transform.short_transform_id())) else: logger.info(" -ctl {0}/{1} \\".format(ctl_root_path, transform.relative_path)) logger.info(" -param1 aIn 1.0 \\")
def scrapeFromTodayAndBackward(dumpDir, client, limit, stop_if_already_dumped=True): # dumping count = 1 date = datetime.date.today() while count < limit: if previouslyDumped(dumpDir, date): logger.info("Already scraped {}".format(date.isoformat())) if stop_if_already_dumped: print("Stopping the scraping") break date -= datetime.timedelta(days=1) continue logger.info("Scraping data for {}".format(date.isoformat())) dumpDay(client, dumpDir, date) date -= datetime.timedelta(days=1) count += 1 # while not previouslyDumped(dumpDir, date) and count<limit: # logger.info("Scraping data for {}".format(date.isoformat())) # dumpDay(client, dumpDir, date) # date -= datetime.timedelta(days=1) # count += 1 # Always redump the last dumped day because we may have dumped it before the day was finished. dumpDay(client, dumpDir, date)
def ctls(ctx, **kwargs): """Parse a folder with CTL files and print mapping between transformId and filepath.""" ctx.load_args(**kwargs) traverser = TransformsTraverser(ctx.ctl_root_path) if ctx.relativectlpath is None: # logger.info("Mappings:") ctx.verbose = True # show errors while traversing traverser.log_ctl_mappings() else: # logger.info("... looking for {0}".format(ctx.relativectlpath)) # traverser.log_ctl_mappings() transformId = traverser.transforms.transform_id_for_relative_path( ctx.relativectlpath) if ctx.description: if transformId is None: if ctx.verbose is True: logger.error( "Couldn't find description for relative path {0}". format(ctx.relativectlpath)) exit(203) else: description = traverser.transforms.description_for_relative_path( ctx.relativectlpath) logger.info(description) else: if transformId is None: if ctx.verbose is True: logger.error( "Couldn't find transformId for relative path {0}". format(ctx.relativectlpath)) exit(203) else: logger.info(transformId)
def _loadData(dumpDir, dataType, loadFun): """ Helper method. For the data-dump folder there should be one folder per year, and then one sub-folder for each day, in which the different files are generated (e.g. sleep.json, steps.json) :param dumpDir: the folder where the date has been dumped :param dataType: the type of data to be loaded, equivalent to the name of the corresponding file :param loadFun: function defining the procedure for the data loading :return: a list of objects as defined in loadFun """ data = [] # First level should be the year yearDirs = getAllSubDirsNamesOf(dumpDir) # Second level should be the date for year in yearDirs: dates = getAllSubDirsNamesOf(year) for date in dates: # Dumped files are named <dataType>.json filename = os.path.join(date, dataType) + '.json' try: with open(filename) as fileData: jsonData = json.load(fileData) dayData = loadFun(jsonData) if dayData is None: logger.info("No {} measures for {}".format( dataType, date.split('\\')[-1])) continue else: data.append(dayData) except FileNotFoundError: logger.warning( "{} not found. Might be cause last scraped day.".format( filename)) return data
def scrapeFromDateOnward(startDate, dumpDir, client): date = datetime.datetime.strptime(startDate, "%Y-%m-%d").date() todayDate = datetime.date.today() while previouslyDumped(dumpDir, date): logger.info("Already scraped {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) date += datetime.timedelta(days=1) date -= datetime.timedelta(days=1) logger.info("Will RE-Scrape data for {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) while date < todayDate: logger.info("Scraping data for {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) dumpDay(client, dumpDir, date) date += datetime.timedelta(days=1)
def scrapeFromDateOnward(startDate, dumpDir: Path, client): date = datetime.datetime.strptime(startDate, "%Y-%m-%d").date() todayDate = datetime.date.today() while previouslyDumped(dumpDir, date): logger.info("Already scraped {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) date += datetime.timedelta(days=1) date -= datetime.timedelta(days=1) logger.info("Will RE-Scrape data for {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) while date < todayDate: logger.info("Scraping data for {}".format(datetime.datetime.strftime(date, "%Y-%m-%d"))) dumpDay(client, dumpDir, date) date += datetime.timedelta(days=1)
def get_input_method(self) -> None: """ Setup controls. Support for controllers and keyboard. """ pygame.init() if pygame.joystick.get_count() > 0: self.joystick = pygame.joystick.Joystick(0) self.joystick.init() logger.success("Controller connected : " + self.joystick.get_name()) logger.info( "While holding the left stick pointed towards a direction, press the A button to confirm your choice.\n" ) else: logger.info("A Keyboard is connected.") logger.info( "While pressing the arrowkeys to a desired direction, " + "press the Return (Enter) Key to confirm your choice.\n")
def __init__(self, comport_arduino, startsteps, sensitivity, invertx: bool, run_test: str): # Handle input parameters self.port = comport_arduino self.startcount = startsteps self.init_pos = 0 self.sensitivity = float(sensitivity) if self.sensitivity > 1: self.sensitivity = 0.5 logger.info("Invalid sensitivity entered: new value = {}".format( self.sensitivity)) self.invert_x_axis = invertx self.test = run_test # Setup Arduino and Stepper Motors self.board = pyfirmata.Arduino(self.port) time.sleep(1) self.motors = [] self.default_motor_setup() self.dirpull = { 0: [0, 1], 1: [0], 2: [0, 3], 3: [3], 4: [2, 3], 5: [2], 6: [1, 2], 7: [1], } self.dirpush = { 0: [2, 3], 1: [2], 2: [1, 2], 3: [1], 4: [0, 1], 5: [0], 6: [0, 3], 7: [3], } # motor vectors that code for the directions in the x-y space of the needle self.motorvec = { 0: np.array([1, 1]), 1: np.array([-1, 1]), 2: np.array([-1, -1]), 3: np.array([1, -1]), } """ FESTO section !!! to use FESTO: first upload "FESTO_controlv3.lua" to the T7 with Kipling 3 software then close the connection with Kipling 3 software """ # config self.config_object = ConfigParser() self.config_object.read('config.ini') festo = self.config_object["FESTO"] self.init_FESTO_pos = int(festo["initial_pos"]) self.init_FESTO_speed = float(festo["initial_speed"]) self.FESTO_stepsize = int(festo["step_size"]) self.AIN0addr = 0 # position (0-10V) self.DAC0addr = 1000 # speed ref.signal (2.5V) self.DAC1addr = 1002 # speed out signal (-2.5 - 2.5V) self.initialpos_addr = 46000 self.targetpos_addr = 46002 self.speed_addr = 46004 self.enable_addr = 46008 self.f_datatype = ljm.constants.FLOAT32 self.i_datatype = ljm.constants.UINT16 self.offsetV = 2.5 # (offsetV+2.5V on DAC1 = 25 mm/s) self.offV = 0.0299544557929039 # low voltage that T7 can certainly output self.maxpos = 50 # mm self.minpos = 3 # mm self.currentpos = self.init_FESTO_pos try: FESTO_handle = ljm.openS("ANY", "USB", "ANY") except ljm.LJMError as error: FESTO_handle = None logger.error( "No FESTO_handle: thus not able to use the FESTO functions \n Error presented: " + str(error)) if FESTO_handle is not None: self.FESTO_handle = FESTO_handle # Set initial positions (keep target pos at init_FESTO_pos at the start) ljm.eWriteAddress(self.FESTO_handle, self.initialpos_addr, self.f_datatype, self.init_FESTO_pos) ljm.eWriteAddress(self.FESTO_handle, self.targetpos_addr, self.f_datatype, self.init_FESTO_pos) # Set speed ljm.eWriteAddress(self.FESTO_handle, self.speed_addr, self.f_datatype, self.init_FESTO_speed) logger.success( "FESTO connected, handle is available, init is set, current position =" + str( ljm.eReadAddress(self.FESTO_handle, self.AIN0addr, self.f_datatype))) time.sleep(0.3) # Enable init LUA program ljm.eWriteAddress(self.FESTO_handle, self.enable_addr, self.f_datatype, 1) logger.success("FESTO moving to initial position") else: logger.error( "Something went wrong when creating a FESTO Handle. Check if all adresses are correct in needle.py" ) self.FESTO_handle = None
def manual_brachy_therapy(self, args: argparse.Namespace) -> None: """Main Program Loop for MANUAL Brachy Therapy, with possibility of needle tracking by two cameras. Parameters ---------- args : argparse.Namespace Returns ------- None """ # Create Class instances of controller and image acquisition input_method = Controller(self.invert_x_axis) image_acquisition = ImageAcquisition(args.fps, args.camtop, args.camfront, args.nofeed) # Queues allow for communication between threads/processes. LIFO means the most recent image/input will be used input_feed = LifoQueue( maxsize=0 ) # Create LIFO queue of infinite size that reads controller input needle_pos_feed = multiprocessing.Queue( maxsize=0) # Create Queue for communication of needle tip pos/ori # Create and start Process for Image Acq/Proc process_1 = multiprocessing.Process( target=image_acquisition.retrieve_current_image, args=(needle_pos_feed, )) process_1.name = "ImageAcquiProc_process" # Start pygame to allow controller and keyboard inputs pygame.init() logger.success("Ready to receive inputs.\n") while True: # Check for needle coordinates in the Multiprocessing Queue if not needle_pos_feed.empty(): tip_position, tip_ori = needle_pos_feed.get() # Sentinel value found, exit program if tip_position is None and tip_ori is None: break logger.info( "Needletip is currently at {}".format(tip_position)) logger.info( "Needle orientation is currently {}".format(tip_ori)) # TODO: check and finish use of image proc in manual_brachy function # Retrieve any user inputs events = pygame.event.get() input_method.get_direction_from_pygame_events(input_feed, events) direction = None if not input_feed.empty(): direction = input_feed.get() # Sentinel value was put in Queue, exit program if direction is None: break # Check if faulty input and try again if direction is None: continue if direction.direction == -1: continue # TODO: update to synced_motorV2 way of steering # Move the needle: if direction.direction == 100: logger.success("Init called: moving to midpoint then to zero") self.initial_position() else: logger.success("Moving to : {}".format( input_method.dir_to_text(direction.direction))) self.move_to_dir_sync(direction) logger.success("\nReady to receive inputs.\n") # Neatly exiting main program loop pygame.quit() image_acquisition.is_running = False input_method.is_running = False process_1.terminate() process_1.join() print("Finished Program Execution.")
def log_render(self, ctl_transforms, ctl_root_path): self.aces_metadata_file.pipeline.resolve_relative_paths(ctl_transforms) logger.info("#!/bin/bash") logger.info("") logger.info("# {0}".format(self.filepath)) logger.info("# created by {0} {1}".format(amfutil_toolname_string, amfutil_toolversion_string)) logger.info("# transforms:") for transform in self.aces_metadata_file.pipeline.input_transforms: logger.info("# {0}: {1} ({2})".format( transform.type, transform.short_transform_id(), transform.description)) for transform in self.aces_metadata_file.pipeline.look_transforms: logger.info("# {0}: {1} ({2})".format( transform.type, transform.short_transform_id(), transform.description)) for transform in self.aces_metadata_file.pipeline.output_transforms: logger.info("# {0}: {1} ({2})".format( transform.type, transform.short_transform_id(), transform.description)) logger.info("\nCTLRENDER=`which ctlrender`\n") logger.info("if [ -z \"$1\" ] || [ -z \"$2\" ]") logger.info("then") logger.info( " echo \"Usage: [script name] path/to/input-file.[tiff|dpx|exr] path/to/output-file.[tiff|dpx|exr]\"" ) logger.info(" echo") logger.info(" exit 200") logger.info("fi\n") logger.info("INPUTIMAGEPATH=$1") logger.info("OUTPUTIMAGEPATH=$2\n") logger.info("export CTL_MODULE_PATH=\"{0}/utilities/\"\n".format( ctl_root_path)) logger.info("$CTLRENDER \\") for transform in self.aces_metadata_file.pipeline.input_transforms: if transform.applied is False: self.log_transform_for_render(transform, ctl_root_path) for transform in self.aces_metadata_file.pipeline.look_transforms: if transform.applied is False: self.log_transform_for_render(transform, ctl_root_path) for transform in self.aces_metadata_file.pipeline.output_transforms: if transform.applied is False: self.log_transform_for_render(transform, ctl_root_path) logger.info(" -force \\") logger.info(" \"$INPUTIMAGEPATH\" \\") logger.info(" \"$OUTPUTIMAGEPATH\"\n") for transform in self.aces_metadata_file.pipeline.input_transforms: if transform.applied is True: self.log_transform_for_render(transform, ctl_root_path) for transform in self.aces_metadata_file.pipeline.output_transforms: if transform.applied is True: self.log_transform_for_render(transform, ctl_root_path)
def log_ctls(self): for ctl in self.transforms.ctls: logger.info(" found {0}: {1}".format(ctl.short_transform_id, ctl.relative_path))
def run_predefined_test(self, test, input_method): """ Running predefined test :param test: the name of the test to run, passed along by --test and found in config.ini :param input_method: the Controller object made in move_freely, methods of this Controller are used in the function. :return: None, the program exits using sys.exit() """ logger.success("STARTING PREDEFINED TEST") # Load command arrays number_of_positions = int(test["number_of_positions"]) motor0test = ast.literal_eval(test["motor0test"]) motor1test = ast.literal_eval(test["motor1test"]) motor2test = ast.literal_eval(test["motor2test"]) motor3test = ast.literal_eval(test["motor3test"]) coords = ast.literal_eval(test["coords"]) sleep = ast.literal_eval(test["sleep"]) logger.info("Running {} positions".format(number_of_positions)) for position in range(number_of_positions): # Run motors with specified steps if motor0test[position] < 0: logger.info("Motor 0 took {} steps".format( motor0test[position])) self.motors[0].run_backward(motor0test[position]) else: logger.info("Motor 0 took {} steps".format( motor0test[position])) self.motors[0].run_forward(motor0test[position]) if motor1test[position] < 0: logger.info("Motor 1 took {} steps".format( motor1test[position])) self.motors[1].run_backward(motor1test[position]) else: logger.info("Motor 1 took {} steps".format( motor1test[position])) self.motors[1].run_forward(motor1test[position]) if motor2test[position] < 0: logger.info("Motor 2 took {} steps".format( motor2test[position])) self.motors[2].run_backward(motor2test[position]) else: logger.info("Motor 2 took {} steps".format( motor2test[position])) self.motors[2].run_forward(motor2test[position]) if motor3test[position] < 0: logger.info("Motor 3 took {} steps".format( motor3test[position])) self.motors[3].run_backward(motor3test[position]) else: logger.info("Motor 3 took {} steps".format( motor3test[position])) self.motors[3].run_forward(motor3test[position]) # Go to coordinates with move_to_dir_sync test_x = coords[position][0] / 100 test_y = coords[position][1] / 100 gdo_test = input_method.analog_stick_to_dir(test_x, test_y) # Check if the motors should move to a position if test_x != 0 and test_y != 0: logger.info("Motors are moving to position ({}, {})".format( test_x, test_y)) self.move_to_dir_syncv2(gdo_test) # Then sleep if sleep[position] == 0: input("Press Enter to continue...") else: logger.info("Sleeping for {} seconds...".format( sleep[position])) time.sleep(sleep[position]) # Neatly exiting main program loop logger.success("ENDING PREDEFINED TEST") pygame.quit() logger.success("Exiting Program...") sys.exit()
def log_info(self): ctx = click.get_current_context().obj if ctx.compact: logger.info("{0}: {1}".format(self.filepath, self.aces_metadata_file.info.uuid)) else: logger.info("{0}:".format(self.filepath)) logger.info(" description: {0}".format( self.aces_metadata_file.info.description)) logger.info(" uuid: {0}".format( self.aces_metadata_file.info.uuid)) logger.info(" creationDateTime: {0}".format( self.aces_metadata_file.info.creation_date_time)) logger.info(" modificationDateTime: {0}".format( self.aces_metadata_file.info.modification_date_time)) for transform in self.aces_metadata_file.pipeline.input_transforms: self.log_transform_for_info(transform) for transform in self.aces_metadata_file.pipeline.look_transforms: self.log_transform_for_info(transform) for transform in self.aces_metadata_file.pipeline.output_transforms: self.log_transform_for_info(transform)
def parse(self): """parsing the AMF XML file and building the AcesMetadataFile for the aces_metadata_file member variable""" ctx = click.get_current_context().obj if ctx.verbose: logger.info(f'parsing \"{os.path.basename(self.filepath)}\"...') tree = etree.parse(self.filepath) tree = strip_ns_prefix( tree) # otherwise we need to specify fully prefix'ed element names acesmetadatafile_element = tree.getroot() self.aces_metadata_file = AcesMetadataFile(None) for section_element in acesmetadatafile_element.getchildren(): if section_element.tag == 'amfInfo': if ctx.verbose: logger.info(f' extracting <amfInfo>...') for info_element in section_element.getchildren(): if info_element.tag == 'description': self.aces_metadata_file.info.description = info_element.text if info_element.tag == 'uuid': self.aces_metadata_file.info.uuid = info_element.text if info_element.tag == 'dateTime': if ctx.verbose: logger.info(f' extracting <dateTime>...') for date_element in info_element.getchildren(): if date_element.tag == 'creationDateTime': self.aces_metadata_file.info.creation_date_time = date_element.text if date_element.tag == 'modificationDateTime': self.aces_metadata_file.info.modification_date_time = date_element.text if section_element.tag == 'pipeline': if ctx.verbose: logger.info(f' extracting <pipeline>...') for pipeline_element in section_element.getchildren(): if pipeline_element.tag == 'inputTransform': regularIDT = True if ctx.verbose: logger.info(f' extracting <inputTransform>...') transform = Transform() if pipeline_element.tag == 'transformId': transform.type = '???' transform.transform_id = pipeline_element.text else: if pipeline_element.tag == 'inputTransform': applied_attribute = pipeline_element.get( 'applied') if applied_attribute == 'true': transform.applied = True for idt_element in pipeline_element.getchildren(): if idt_element.tag == 'transformId' or idt_element.tag == 'transformID': transform.transform_id = idt_element.text transform.type = 'IDT' if idt_element.tag == 'description': transform.description = idt_element.text # TODO: find out if 'transformId' (introduced in examples in Jan 2020) or 'transformID' if idt_element.tag == 'inverseOutputDeviceTransform': transform = Transform() for invodt_element in idt_element.getchildren( ): if invodt_element.tag == 'transformId' or invodt_element.tag == 'transformID': transform.transform_id = invodt_element.text transform.type = 'InvODT' if invodt_element.tag == 'description': transform.description = invodt_element.text self.aces_metadata_file.pipeline.output_transforms.append( transform) regularIDT = False if idt_element.tag == 'inverseReferenceRenderingTransform': transform = Transform() for invrrt_element in idt_element.getchildren( ): if invrrt_element.tag == 'transformId' or invrrt_element.tag == 'transformID': transform.transform_id = invrrt_element.text transform.type = 'InvRRT' if invrrt_element.tag == 'description': transform.description = invrrt_element.text self.aces_metadata_file.pipeline.output_transforms.append( transform) regularIDT = False if regularIDT: self.aces_metadata_file.pipeline.input_transforms.append( transform) if pipeline_element.tag == 'lookTransform': if ctx.verbose: logger.info(f' extracting <lookTransform>...') transform = None for lmt_element in pipeline_element.getchildren(): if lmt_element.tag == 'transformId' or lmt_element.tag == 'description' or lmt_element.tag == 'hash': # LMT if transform == None: transform = Transform() transform.type = 'LMT' if lmt_element.tag == 'transformId': transform.transform_id = lmt_element.text if lmt_element.tag == 'description': transform.description = lmt_element.text if lmt_element.tag == 'hash': transform.hash_string = lmt_element.text if transform is not None and transform.type == 'LMT': self.aces_metadata_file.pipeline.look_transforms.append( transform) # TODO: implement if pipeline_element.tag == 'outputTransform': if ctx.verbose: logger.info(f' extracting <outputTransform>...') transform = None for output_element in pipeline_element.getchildren(): if output_element.tag == 'transformId' or output_element.tag == 'description' or output_element.tag == 'hash': # HDR: RRTODT if transform == None: transform = Transform() transform.type = 'RRTODT' if output_element.tag == 'transformId': transform.transform_id = output_element.text if output_element.tag == 'description': transform.description = output_element.text if output_element.tag == 'hash': transform.hash_string = output_element.text else: # SDR: RRT + ODT transform = Transform() if output_element.tag == 'referenceRenderingTransform': transform.type = 'RRT' elif output_element.tag == 'outputDeviceTransform': transform.type = 'ODT' for rrt_element in output_element.getchildren( ): if rrt_element.tag == 'description': transform.description = rrt_element.text if rrt_element.tag == 'transformId': transform.transform_id = rrt_element.text self.aces_metadata_file.pipeline.output_transforms.append( transform) if transform.type == 'RRTODT': self.aces_metadata_file.pipeline.output_transforms.append( transform)
def log_ctl_mappings(self): for ctl in self.transforms.ctls: logger.info("{0}: {1} ({2})".format(ctl.relative_path, ctl.short_transform_id, ctl.description))
def cross_validation(): Processor().cross_validation() def predict(): Processor(Model.load()).predict() if __name__ == '__main__': logger = logger.get_logger('main') args = sys.argv try: task = args[1] except IndexError: logger.info('Please set task in args.') sys.exit() if task == 'prepare_data': logger.info('Execute task: prepare data.') prepare_data() elif task == 'preprocess': logger.info('Execute task: preprocess.') preprocess() elif task == 'cross_validation': logger.info('Execute task: cross_validation.') cross_validation() elif task == 'train': logger.info('Execute task: train.') train() elif task == 'predict': logger.info('Execute task: predict.')