def execute_runbook(self): """Executes the job runtime and performs runtime operation (stream upload / status change).""" # set status to running tracer.log_sandbox_job_started(self.job_id, self.runbook.definition_kind_str, self.runbook_data.name, self.runbook_data.runbook_version_id) start_request_time = time.strptime(self.job_data.start_request_time.split("+")[0].split(".")[0], "%Y-%m-%dT%H:%M:%S") time_taken_to_start_td = datetime.utcnow() - datetime.fromtimestamp(time.mktime(start_request_time)) time_taken_to_start_in_seconds = (time_taken_to_start_td.microseconds + (time_taken_to_start_td.seconds + time_taken_to_start_td.days * 24 * 3600) * 10 ** 6) / 10 ** 6 tracer.log_etw_job_status_changed_running(self.job_data.subscription_id, self.job_data.account_id, self.job_data.account_name, self.sandbox_id, self.job_data.job_id, self.runbook.definition_kind_str, self.runbook_data.name, time_taken_to_start_in_seconds) tracer.log_etw_user_requested_start_or_resume(self.job_data.account_id, self.sandbox_id, self.job_data.job_id, self.runbook_data.name, self.job_data.account_name, time_taken_to_start_in_seconds, self.runbook.definition_kind_str) self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.RUNNING, False, self.get_job_extended_properties()) # create runbook subprocess self.runtime.start_runbook_subprocess() # monitor runbook output for streams stream_handler = StreamHandler(self.job_data, self.runtime.runbook_subprocess, self.jrds_client) stream_handler.daemon = True stream_handler.start() # wait for runbook execution to complete pending_action = None while stream_handler.isAlive() or self.runtime.runbook_subprocess.poll() is None: try: pending_action = self.msg_queue.get(block=False) tracer.log_sandbox_job_pending_action_detected(self.job_id, pending_action) if pending_action == pendingactions.STOP_ENUM_INDEX: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.STOPPING, False) self.runtime.kill_runbook_subprocess() break except Queue.Empty: pass time.sleep(0.2) # handle terminal state changes if pending_action == pendingactions.STOP_ENUM_INDEX: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.STOPPED, True, self.get_job_extended_properties()) tracer.log_etw_job_status_changed_stopped(self.job_data.subscription_id, self.job_data.account_id, self.job_data.account_name, self.sandbox_id, self.job_data.job_id, self.runbook.definition_kind_str, self.runbook_data.name) elif self.runtime.runbook_subprocess.poll() is not None and self.runtime.runbook_subprocess.poll() == EXIT_SUCCESS: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.COMPLETED, True, self.get_job_extended_properties()) tracer.log_etw_job_status_changed_completed(self.job_data.subscription_id, self.job_data.account_id, self.job_data.account_name, self.sandbox_id, self.job_data.job_id, self.runbook.definition_kind_str, self.runbook_data.name) else: full_error_output = self.get_full_stderr_content(self.runtime.runbook_subprocess.stderr) self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.FAILED, True, self.get_job_extended_properties(), exception=full_error_output) tracer.log_etw_job_status_changed_failed(self.job_data.subscription_id, self.job_data.account_id, self.job_data.account_name, self.sandbox_id, self.job_id, self.runbook.definition_kind_str, self.runbook_data.name, self.runbook_data.runbook_version_id, full_error_output)
def setUp(self): """ initial setup for testing. """ # txt file for testing the reading of a 'txt' file self.txt_file = '/home/blaxeep/Workspace/test/simpy/test.txt' # a binary file as we want it self.bin_file = '/home/blaxeep/Workspace/vsam/wc_day44' # a bad binary file (our demanded structure is different) self.bad_bin_file = '/home/blaxeep/Workspace/test/bad_bin' self.handler = StreamHandler(self.bin_file)
def execute_runbook(self): """Executes the job runtime and performs runtime operation (stream upload / status change).""" # set status to running tracer.log_debug_trace("Starting runbook.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.RUNNING, False) # create runbook subprocess self.runtime.start_runbook_subprocess() # monitor runbook output for streams stream_handler = StreamHandler(self.job_data, self.runtime.runbook_subprocess, self.jrds_client) stream_handler.daemon = True stream_handler.start() # wait for runbook execution to complete pending_action = None while stream_handler.isAlive( ) or self.runtime.runbook_subprocess.poll() is None: try: pending_action = self.msg_queue.get(block=False) tracer.log_debug_trace("Pending action detected. " + str(pending_action)) if pending_action == pendingactions.STOP_ENUM_INDEX: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.STOPPING, False) self.runtime.kill_runbook_subprocess() break except Queue.Empty: pass time.sleep(0.2) # handle terminal state changes if pending_action == pendingactions.STOP_ENUM_INDEX: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.STOPPED, True) tracer.log_debug_trace("Completed - Stopped") elif self.runtime.runbook_subprocess.poll( ) is not None and self.runtime.runbook_subprocess.poll( ) == EXIT_SUCCESS: self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.COMPLETED, True) tracer.log_debug_trace("Completed - Without error") else: full_error_output = self.get_full_stderr_content( self.runtime.runbook_subprocess.stderr) self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.FAILED, True, exception=full_error_output) tracer.log_debug_trace("Completed - With error")
def main(): params = assign_params() maxTime, maxNodes, maxCoords, dataRate, datapath = params sh = StreamHandler(datapath) initialize() net = Network() stats = NetStats() # for network statistics node_stats = NodeStats() # for node statistics env = Environment(sh, net, maxNodes, maxCoords, dataRate) activate(env, env.run()) simulate(until=maxTime) r = NetResults(stats) #monitor_results(env, params) print_results(env, params, stats)
scale_factor = 0.01 bound = 4 # First we define the callback function for the stream def update(data): global pos_x global pos_y data = data.channels_data move = model(torch.from_numpy(data).float()) v = scale_factor * np.asarray( [float(move[2] - move[0]), float(move[1] - move[3])]) pos_x += v[0] pos_y += v[1] # truncate pos_x = max(min(bound, pos_x), -1 * bound) pos_y = max(min(bound, pos_y), -1 * bound) # plot boundaries ax.cla() ax.plot([-1 * bound, bound, bound, -1 * bound, -1 * bound], [-1 * bound, -1 * bound, bound, bound, -1 * bound]) # Draw circle at interpreted coordinates ax.add_artist(plt.Circle((pos_x, pos_y), 0.1, color='blue')) fig.canvas.draw() # Start the stream stream = StreamHandler(-1, update) stream.start_stream()
# quit if (user_input == "q"): break # ping bci if (user_input == "p"): test = OpenBCICyton(daisy="True") # record elif (user_input == "r"): row_to_modify = int( input( "Row to record to? [0 - Left] [1 - Up] [2 - Right] [3 - Down]") ) frame_input = int(input("Recording how many frames?")) mystream = StreamHandler(frame_input, collect_data) mystream.start_stream() print("Finished recording to", row_to_modify) # save or save as elif (user_input == "s" or user_input == "sa"): if (LOADED and user_input == "s"): np.save(LOADED_PATH, np.asarray(data)) else: file_name = str(input("File path?")) np.save(file_name, np.asarray(data)) # load elif (user_input == "l"): file_name = str(input("File path?")) arrdata = np.load(file_name, allow_pickle=True)
def __init__(self): self.consumer = oauth.Consumer(key=API_KEY, secret=API_SECRET) self.access_token = oauth.Token(key=ACCESS_TOKEN, secret=ACCESS_TOKEN_SECRET) self.client = oauth.Client(self.consumer, self.access_token) self.stream_handler = StreamHandler()