def main():
    X = np.genfromtxt(
        '/home/ubuntu/data/scRNAseq/TabulaMuris/FACS/Marrow-counts.csv',
        delimiter=',')
    X = X[1:, 1:].T
    model = HAL(n_cluster_init=50, clf_type='rf')
    model.fit(X)
Exemple #2
0
def main():
    data =  load()
    #np.savetxt('columns.txt',data.columns.values,fmt='%s')
    col = np.loadtxt('columns.txt', delimiter='\t', dtype=str)
    
    data = data[col[:,0]]

    X = np.arcsinh(data)

    model = HAL( n_cluster_init=50)

    model.fit(X)
 def test_after_connect(self):
     hal = HAL()
     c = Controller(hal=hal, pru=PRU())
     event_listener = EventListener()
     c.register_event_listener(event_listener)
     c.connect()
     c.on_poll()
     expected_events = [
         "compressor=stop",
         "lightbarrier3=off",
         "lightbarrier4=off",
         "lightbarrier5=off",
         "motor=stop",
         "valve1=off",
         "valve2=off",
         "valve3=off",
         "mode=normal",
         "sort-order=blue-red-white",
         "controller=stopped",
         "conveyor=stopped",
         "lightbarrier1=off",
         "lightbarrier2=off",
         "emergency-stop=off",
         "connect",
     ]
     self.assertEqual(event_listener.events, expected_events)
Exemple #4
0
    def __init__(self):
        self.thread = None
        self.reload = False

        # Time variables
        self.time_cycle = 80
        self.ideal_cycle = 80
        self.iteration_counter = 0
        self.frequency_message = {'brain': '', 'gui': ''}

        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI, HAL and Console behind the scenes
        self.console = console.Console()
        self.hal = HAL()
        self.gui = GUI(self.host, self.console, self.hal)
        # initialize Teleoperation variables
        self.teop = False
        self.speedV = 0.3
        self.speedW = 0.5
        self.stop = 0
        self.key = None
        self.flag = 0
        self.pattern_V = 'HAL.motors.sendV'
        self.pattern_W = 'HAL.motors.sendW'
 def test_after_init(self):
     hal = HAL()
     c = Controller(hal=hal, pru=PRU())
     event_listener = EventListener()
     c.register_event_listener(event_listener)
     c.on_poll()
     expected_events = []
     self.assertEqual(event_listener.events, [])
    def __init__(self):
        self.thread = None
        self.reload = False

        # Time variables
        self.time_cycle = 80
        self.ideal_cycle = 80
        self.iteration_counter = 0
        self.real_time_factor = 0
        self.frequency_message = {'brain': '', 'gui': '', 'rtf': ''}

        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI, HAL and Console behind the scenes
        self.hal = HAL()
        self.gui = GUI(self.host, self.hal)
Exemple #7
0
 def test_on_input_change_callback(self):
     hal = HAL()
     hal.register_on_input_change_callback(self._callback_func)
     hal.set_input("Input1", True)
     hal.get_input("Input1")
     self.assertEqual(self._callback_args, ("Input1", True, None))
     hal.set_input("Input1", False)
     hal.get_input("Input1")
     self.assertEqual(self._callback_args, ("Input1", False, True))
def plot_mice_1():

    results = pickle.load(open('results.pkl', 'rb'))
    ypred, df_median_expression, df_frequency = results[
        'c11_20190209_BoneMarrow_1-10_01_BM_1_Singlets.fcs']
    """  model = HAL(warm_start=True, n_cluster_init=50)
    model.load()
    #exit()
    ypossible = model.possible_clusters(cv=cv)

    data = []
    idx = []
    #df = pd.DataFrame([], columns=[str(yu) for yu in ypossible])

    for k, v in results.items():
        _, _, df_freq = v
        data.append(df_freq.values.flatten())
        idx.append(k.split('_')[1])

    df=pd.DataFrame(data, index=idx, columns=ypossible) """

    ax = sns.clustermap(df_median_expression, xticklabels=1, figsize=(15, 15))

    plt.setp(ax.ax_heatmap.xaxis.get_majorticklabels(),
             rotation=90,
             ha='right')

    plt.setp(ax.ax_heatmap.yaxis.get_majorticklabels(), rotation=00, ha='left')
    plt.show()

    model = HAL(warm_start=True, n_cluster_init=50)

    xtsne = model.load('tsne')

    model.cluster_w_label(xtsne, ypred, psize=1)

    #emphasis_plot(xtsne, ypred, 36)

    #plotting.plotly_plot(xtsne, ypred)

    exit()
    def __init__(self):
        self.brain_process = None
        self.reload = multiprocessing.Event()
        
        # Time variables
        self.brain_time_cycle = SharedValue('brain_time_cycle')
        self.brain_ideal_cycle = SharedValue('brain_ideal_cycle')
        self.real_time_factor = 0

        self.frequency_message = {'brain': '', 'gui': '', 'rtf': ''}

        # GUI variables
        self.gui_time_cycle = SharedValue('gui_time_cycle')
        self.gui_ideal_cycle = SharedValue('gui_ideal_cycle')
                
        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI and HAL behind the scenes
        self.hal = HAL()
def predict(cv):

    file_name_list = []

    for filename in os.listdir(
            '/Users/mukherjeer2/Documents/Data/CyTOF/20190209_B6_IdU_Pilot/Live_Single_Cells/'
    ):
        if filename.endswith(".fcs"):
            file_name_list.append(filename)
            continue
        else:
            continue

    col = np.loadtxt('columns.txt', delimiter='\t', dtype=str)
    result = {}

    model = HAL(warm_start=True, n_cluster_init=50)

    model.load()

    ypossible = model.possible_clusters(cv)

    for f in file_name_list:
        print(f)
        data = load(f)

        data = data[col[:, 0]]

        X = np.arcsinh(data)

        ypred = model.predict(X, cv)

        #Xtmp = model.preprocess(X)

        col_names_all = list(col_names[:, 1].flatten())

        df_median_expression = pd.DataFrame(np.array(
            [np.median(X[ypred == yu], axis=0) for yu in ypossible]),
                                            index=list(ypossible),
                                            columns=col_names_all)

        df_frequency = pd.DataFrame(
            [np.count_nonzero(ypred == yu) / len(ypred) for yu in ypossible],
            index=ypossible,
            columns=[f])

        df_frequency.to_csv(
            '/Users/mukherjeer2/Documents/Data/CyTOF/20190209_B6_IdU_Pilot/Live_Single_Cells/Frequencies.csv'
        )

        df_median_expression.to_csv(
            '/Users/mukherjeer2/Documents/Data/CyTOF/20190209_B6_IdU_Pilot/Live_Single_Cells/Median_expression.csv'
        )

        #print(df_median_expression)
        #print(df_frequency)
        #exit()
        result[f] = [ypred, df_median_expression, df_frequency]

    pickle.dump(result, open('results.pkl', 'wb'))
Exemple #11
0
def predict(cv):
    col_names = [
        line.split(',')[0] for line in open(
            '/home/ubuntu/data/scRNAseq/mice/count.csv', 'r').readlines()
    ][1:]
    model = HAL(warm_start=True, n_cluster_init=50, clf_type='rf')
    model.load()
    ypossible = model.possible_clusters(cv)
    X = np.genfromtxt('/home/ubuntu/data/scRNAseq/mice/count.csv',
                      delimiter=',')
    X = X[1:, 1:].T
    ypred = model.predict(X, cv)
    col_names_all = list(col_names[:, 1].flatten())
    df_median_expression = pd.DataFrame(np.array(
        [np.median(X[ypred == yu], axis=0) for yu in ypossible]),
                                        index=list(ypossible),
                                        columns=col_names_all)
    df_frequency = pd.DataFrame(
        [np.count_nonzero(ypred == yu) / len(ypred) for yu in ypossible],
        index=ypossible,
        columns=[f])
    df_frequency.to_csv('/home/ubuntu/data/scRNAseq/mice/Frequencies.csv')
    df_median_expression.to_csv(
        '/home/ubuntu/data/scRNAseq/mice/Median_expression.csv')
    result[f] = [ypred, df_median_expression, df_frequency]
    pickle.dump(result, open('results.pkl', 'wb'))
def analyze(cv):

    results = pickle.load(open('results.pkl', 'rb'))
    model = HAL(warm_start=True, n_cluster_init=50)
    model.load()
    #exit()
    ypossible = model.possible_clusters(cv=cv)

    data = []
    idx = []
    #df = pd.DataFrame([], columns=[str(yu) for yu in ypossible])

    for k, v in results.items():
        _, _, df_freq = v
        data.append(df_freq.values.flatten())
        idx.append(k[:k.find('Singlets.fcs') -
                     1][find_second_last(k[:k.find('Singlets.fcs') - 1], '_') +
                        1:])

    df = pd.DataFrame(
        np.array(data),
        index=idx,
        columns=results['c11_20190209_BoneMarrow_1-10_01_BM_1_Singlets.fcs']
        [2].index)

    ax = sns.clustermap(np.arcsinh(df * 1000).T,
                        xticklabels=df.index,
                        cbar_kws={'label': 'arcsinh(frequency*1000)'},
                        figsize=(15, 15))
    plt.setp(ax.ax_heatmap.xaxis.get_majorticklabels(),
             rotation=90,
             ha='right')
    plt.setp(ax.ax_heatmap.yaxis.get_majorticklabels(), rotation=00, ha='left')

    plt.show()

    pickle.dump(df, open('frequencies.pkl', 'wb'))
    def __init__(self):
        self.thread = None
        self.reload = False

        # Time variables
        self.time_cycle = 80
        self.ideal_cycle = 80
        self.iteration_counter = 0

        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI, HAL and Console behind the scenes
        self.console = console.Console()
        self.gui = GUI(self.host, self.console)
        self.hal = HAL()
    def defineModel(self, markers, score, output_dir=None, retrain=False):
        """ This method creates a training set and runs the clustering algorithm

        markers: a list of strings corresponding to columns in the input data frames
        score: score to use in new model
        arcsinh: whether to apply the arcsinh transformation prior to clustering (this is usually a good idea)
        scaling: z-score scaling (this is considered best practice)
        output_dir (default is None): output directory, None indicates the current working directory
        retrain (default is False): specifies whether to retrain with existing model or create a new one
        """
        assert (not retrain) or (
            self.model is not None
        )  # exception if retraining without existing model

        if output_dir is None:
            output_dir = os.getcwd(
            )  # set to current directory if not specified

        data = copy.copy(
            self.data)  # dictionary of samples (file names are keys)

        n_cells = np.floor(self.n_tsne /
                           len(set(data.index.get_level_values(0)))
                           )  # number of data points selected from each sample
        samples = list(set(
            data.index.get_level_values(0)))  # sample/file names
        origins = []  # list tracking which sample individual points came from

        for ii, sample in enumerate(list(set(
                data.index.get_level_values(0)))):  # iterate through samples
            sample_data = data[data.index.get_level_values(0) == sample]
            sample_size = int(np.min([
                n_cells, sample_data.shape[0]
            ]))  # number of points to select per sample
            random_choice = np.random.choice(sample_data.shape[0], sample_size)
            origins.extend(
                [sample] *
                len(random_choice))  # note where data points came from
            if ii == 0:
                data_samples = sample_data[markers].iloc[
                    random_choice, :].values  # start list of data points
            else:
                data_samples = np.concatenate([
                    data_samples,
                    sample_data[markers].iloc[random_choice, :].values
                ])
        '''
        for i, current_marker in enumerate(markers):
            print(current_marker)
            print(stats.entropy(np.arcsinh(data_samples[:, i])))
            plt.hist(np.arcsinh(data_samples[:, i]))
            plt.show()
        '''
        # determine whether the current experiment has been processed (with any CV score)
        redundant = False
        for file_name in os.listdir(output_dir + '/serialized'):
            match = re.fullmatch('model_0.*%s\.pkl' % self.name, file_name)
            if match is not None:
                redundant = True
                break
        # create new model if not retraining
        model_file = 'model_' + self.name + '.pkl'
        scaler_file = 'scaler_' + self.name + '.pkl'
        label_file = 'Labels_tSNE_' + str(score) + self.name + '.pkl'
        if (label_file
                in os.listdir(output_dir + '/serialized')) and not retrain:
            # re-run experiment with same CV score
            model = pickle.load(
                open(output_dir + '/serialized/' + model_file, 'rb'))
            self.scaler_obj = pickle.load(
                open(output_dir + '/serialized/' + scaler_file, 'rb'))
            tsne_frame = pickle.load(
                open(output_dir + '/serialized/' + label_file, 'rb'))
            labels_tSNE = tsne_frame['clusters']
            data_samples = tsne_frame.loc[:, markers].values
            output = tsne_frame
        else:
            if redundant and not retrain:
                # re-run experiment with different CV score
                model = pickle.load(
                    open(output_dir + '/serialized/' + model_file, 'rb'))
                data_samples = pickle.load(
                    open(
                        output_dir + '/serialized/tSNE_subset_' + self.name +
                        '.pkl', 'rb'))
                self.scaler_obj = pickle.load(
                    open(output_dir + '/serialized/' + scaler_file, 'rb'))
            else:
                # create HAL object and fit model to data (using only training data)
                try:
                    shutil.rmtree('./info_hal')  # remove old info_hal folder
                except FileNotFoundError:
                    pass
                model = HAL(clf_type=self.clf_type,
                            outlier_ratio=0.1,
                            late_exag=900,
                            alpha_late=2.0,
                            n_cluster_init=150,
                            warm_start=True)
            # apply arcsinh transformation (enabled by default)
            if self.arcsinh:
                transformed_samples = np.arcsinh(data_samples)
            else:
                transformed_samples = data_samples
            # apply standard (z-score) scaling (enabled by default)
            if self.scaling:
                if self.scaler_obj is None:
                    self.scaler_obj = MinMaxScaler()
                    scaled_data = self.scaler_obj.fit_transform(
                        transformed_samples)
                else:
                    scaled_data = self.scaler_obj.transform(
                        transformed_samples)
            else:
                scaled_data = transformed_samples  # do not use this option without a good reason!
            model.fit(scaled_data)
            pickle.dump(model,
                        open(output_dir + '/serialized/' + model_file, 'wb'))
            pickle.dump(self.scaler_obj,
                        open(output_dir + '/serialized/' + scaler_file, 'wb'))
            # create a frame with the clusters and samples for each data point
            labels_tSNE = model.predict(scaled_data, cv=score)
            output = pd.DataFrame(data_samples)
            output.columns = markers
            output["clusters"] = labels_tSNE
            output["origin"] = origins
            output = self.addTsne(output)
            output.to_csv(output_dir + '/Labels_tSNE_' + str(score) +
                          self.name + '.csv')
            pickle.dump(
                data_samples,
                open(
                    output_dir + '/serialized/tSNE_subset_' + self.name +
                    '.pkl', "wb"))
            pickle.dump(
                output,
                open(
                    output_dir + '/serialized/Labels_tSNE_' + str(score) +
                    self.name + '.pkl', "wb"))

        self.model = model
        labels_only = np.array(labels_tSNE)

        return labels_only, output  # do not return samples of origin with labels
class Template:
    # Initialize class variables
    # self.time_cycle to run an execution for atleast 1 second
    # self.process for the current running process
    def __init__(self):
        self.brain_process = None
        self.reload = multiprocessing.Event()
        
        # Time variables
        self.brain_time_cycle = SharedValue('brain_time_cycle')
        self.brain_ideal_cycle = SharedValue('brain_ideal_cycle')
        self.real_time_factor = 0

        self.frequency_message = {'brain': '', 'gui': '', 'rtf': ''}

        # GUI variables
        self.gui_time_cycle = SharedValue('gui_time_cycle')
        self.gui_ideal_cycle = SharedValue('gui_ideal_cycle')
                
        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI and HAL behind the scenes
        self.hal = HAL()
        
    # Function for saving   
    def save_code(self, source_code):
    	with open('code/academy.py', 'w') as code_file:
    		code_file.write(source_code)
    
    # Function for loading		
    def load_code(self):
    	with open('code/academy.py', 'r') as code_file:
    		source_code = code_file.read()
    		
    	return source_code

    # Function to parse the code
    # A few assumptions: 
    # 1. The user always passes sequential and iterative codes
    # 2. Only a single infinite loop
    def parse_code(self, source_code):
    	# Check for save/load
    	if(source_code[:5] == "#save"):
    		source_code = source_code[5:]
    		self.save_code(source_code)
    		
    		return "", ""
    	
    	elif(source_code[:5] == "#load"):
    		source_code = source_code + self.load_code()
    		self.server.send_message(self.client, source_code)
    
    		return "", ""

        elif(source_code[:5] == "#resu"):
                restart_simulation = rospy.ServiceProxy('/gazebo/unpause_physics', Empty)
                restart_simulation()

                return "", ""

        elif(source_code[:5] == "#paus"):
                pause_simulation = rospy.ServiceProxy('/gazebo/pause_physics', Empty)
                pause_simulation()

                return "", ""
    		
    	elif(source_code[:5] == "#rest"):
    		reset_simulation = rospy.ServiceProxy('/gazebo/reset_world', Empty)
    		reset_simulation()
    		return "", ""
    		
    	else:
    		sequential_code, iterative_code = self.seperate_seq_iter(source_code)
    		return iterative_code, sequential_code
    
    # Function to seperate the iterative and sequential code
    def seperate_seq_iter(self, source_code):
    	if source_code == "":
            return "", ""

        # Search for an instance of while True
        infinite_loop = re.search(r'[^ \t]while\(True\):|[^ \t]while True:', source_code)

        # Seperate the content inside while True and the other
        # (Seperating the sequential and iterative part!)
        try:
            start_index = infinite_loop.start()
            iterative_code = source_code[start_index:]
            sequential_code = source_code[:start_index]

            # Remove while True: syntax from the code
            # And remove the the 4 spaces indentation before each command
            iterative_code = re.sub(r'[^ ]while\(True\):|[^ ]while True:', '', iterative_code)
            iterative_code = re.sub(r'^[ ]{4}', '', iterative_code, flags=re.M)

        except:
            sequential_code = source_code
            iterative_code = ""
            
        return sequential_code, iterative_code
    
    # Function to maintain thread execution
    def execute_thread(self, source_code):
        # Keep checking until the thread is alive
        # The thread will die when the coming iteration reads the flag
        if(self.brain_process != None):
            while self.brain_process.is_alive():
                pass

        # Turn the flag down, the iteration has successfully stopped!
        self.reload.clear()
        # New thread execution
        code = self.parse_code(source_code)
        if code[0] == "" and code[1] == "":
            return
            
        self.brain_process = BrainProcess(code, self.reload)
        self.brain_process.start()

    # Function to read and set frequency from incoming message
    def read_frequency_message(self, message):
        frequency_message = json.loads(message)

        # Set brain frequency
        frequency = float(frequency_message["brain"])
        self.brain_time_cycle.add(1000.0 / frequency)

        # Set gui frequency
        frequency = float(frequency_message["gui"])
        self.gui_time_cycle.add(1000.0 / frequency)

        return

    # Function to track the real time factor from Gazebo statistics
    # https://stackoverflow.com/a/17698359
    # (For reference, Python3 solution specified in the same answer)
    def track_stats(self):
        args=["gz", "stats", "-p"]
        # Prints gz statistics. "-p": Output comma-separated values containing-
        # real-time factor (percent), simtime (sec), realtime (sec), paused (T or F)
        stats_process = subprocess.Popen(args, stdout=subprocess.PIPE, bufsize=1)
        # bufsize=1 enables line-bufferred mode (the input buffer is flushed 
        # automatically on newlines if you would write to process.stdin )
        with stats_process.stdout:
            for line in iter(stats_process.stdout.readline, b''):
                stats_list = [x.strip() for x in line.split(',')]
                self.real_time_factor = stats_list[0]     


    # Function to generate and send frequency messages
    def send_frequency_message(self):
        # This function generates and sends frequency measures of the brain and gui
        brain_frequency = 0; gui_frequency = 0
        try:
            brain_frequency = round(1000 / self.brain_ideal_cycle.get(), 1)
        except ZeroDivisionError:
            brain_frequency = 0

        try:
            gui_frequency = round(1000 / self.gui_ideal_cycle.get(), 1)
        except ZeroDivisionError:
            gui_frequency = 0

        self.frequency_message["brain"] = brain_frequency
        self.frequency_message["gui"] = gui_frequency
        self.frequency_message["rtf"] = self.real_time_factor

        message = "#freq" + json.dumps(self.frequency_message)
        self.server.send_message(self.client, message)


    # The websocket function
    # Gets called when there is an incoming message from the client
    def handle(self, client, server, message):
        if(message[:5] == "#freq"):
            frequency_message = message[5:]
            self.read_frequency_message(frequency_message)
            self.send_frequency_message()
            return
        
        try:
            # Once received turn the reload flag up and send it to execute_thread function
            code = message
            # print(repr(code))
            self.reload.set()
            self.execute_thread(code)
        except:
            pass

    # Function that gets called when the server is connected
    def connected(self, client, server):
    	self.client = client
    	# Start the HAL update thread
        self.hal.start_thread()

        # Start real time factor tracker thread
        self.stats_thread = threading.Thread(target=self.track_stats)
        self.stats_thread.start()

        # Initialize the ping message
        self.send_frequency_message()
    	
    	print(client, 'connected')
    	
    # Function that gets called when the connected closes
    def handle_close(self, client, server):
    	print(client, 'closed')
    	
    def run_server(self):
    	self.server = WebsocketServer(port=1905, host=self.host)
    	self.server.set_fn_new_client(self.connected)
    	self.server.set_fn_client_left(self.handle_close)
    	self.server.set_fn_message_received(self.handle)
    	self.server.run_forever()
Exemple #16
0
 def __init__(self, laser_object, pose3d):
     self.config = Config()
     self.hal = HAL()
     self.pose3d = pose3d
     self.laser_topic = laser_object
class Template:
    # Initialize class variables
    # self.time_cycle to run an execution for at least 1 second
    # self.process for the current running process
    def __init__(self):
        self.thread = None
        self.reload = False

        # Time variables
        self.time_cycle = 80
        self.ideal_cycle = 80
        self.iteration_counter = 0
        self.real_time_factor = 0
        self.frequency_message = {'brain': '', 'gui': '', 'rtf': ''}

        self.server = None
        self.client = None
        self.host = sys.argv[1]

        # Initialize the GUI, HAL and Console behind the scenes
        self.hal = HAL()
        self.gui = GUI(self.host, self.hal)

    # Function to parse the code
    # A few assumptions:
    # 1. The user always passes sequential and iterative codes
    # 2. Only a single infinite loop
    def parse_code(self, source_code):
        if source_code[:5] == "#resu":
            restart_simulation = rospy.ServiceProxy('/gazebo/unpause_physics',
                                                    Empty)
            restart_simulation()

            return "", ""

        elif source_code[:5] == "#paus":
            pause_simulation = rospy.ServiceProxy('/gazebo/pause_physics',
                                                  Empty)
            pause_simulation()

            return "", ""

        elif source_code[:5] == "#rest":
            reset_simulation = rospy.ServiceProxy('/gazebo/reset_world', Empty)
            reset_simulation()
            self.gui.reset_gui()
            if self.hal.get_landed_state() == 2: self.hal.land()
            return "", ""

        else:
            # Pause and unpause
            sequential_code, iterative_code = self.seperate_seq_iter(
                source_code)
            return iterative_code, sequential_code

    # Function to separate the iterative and sequential code
    def seperate_seq_iter(self, source_code):
        if source_code == "":
            return "", ""

        # Search for an instance of while True
        infinite_loop = re.search(r'[^ \t]while\(True\):|[^ \t]while True:',
                                  source_code)

        # Separate the content inside while True and the other
        # (Separating the sequential and iterative part!)
        try:
            start_index = infinite_loop.start()
            iterative_code = source_code[start_index:]
            sequential_code = source_code[:start_index]

            # Remove while True: syntax from the code
            # And remove the the 4 spaces indentation before each command
            iterative_code = re.sub(r'[^ ]while\(True\):|[^ ]while True:', '',
                                    iterative_code)
            iterative_code = re.sub(r'^[ ]{4}', '', iterative_code, flags=re.M)

        except:
            sequential_code = source_code
            iterative_code = ""

        return sequential_code, iterative_code

    # The process function
    def process_code(self, source_code):
        # Redirect the information to console
        start_console()

        iterative_code, sequential_code = self.parse_code(source_code)

        # print(sequential_code)
        # print(iterative_code)

        # The Python exec function
        # Run the sequential part
        gui_module, hal_module = self.generate_modules()
        reference_environment = {"GUI": gui_module, "HAL": hal_module}
        exec(sequential_code, reference_environment)

        # Run the iterative part inside template
        # and keep the check for flag
        while self.reload == False:
            start_time = datetime.now()

            # Execute the iterative portion
            exec(iterative_code, reference_environment)

            # Template specifics to run!
            finish_time = datetime.now()
            dt = finish_time - start_time
            ms = (dt.days * 24 * 60 * 60 +
                  dt.seconds) * 1000 + dt.microseconds / 1000.0

            # Keep updating the iteration counter
            if (iterative_code == ""):
                self.iteration_counter = 0
            else:
                self.iteration_counter = self.iteration_counter + 1

            # The code should be run for atleast the target time step
            # If it's less put to sleep
            if (ms < self.time_cycle):
                time.sleep((self.time_cycle - ms) / 1000.0)

        close_console()
        print("Current Thread Joined!")

    # Function to generate the modules for use in ACE Editor
    def generate_modules(self):
        # Define HAL module
        hal_module = imp.new_module("HAL")
        hal_module.HAL = imp.new_module("HAL")
        # hal_module.drone = imp.new_module("drone")
        # motors# hal_module.HAL.motors = imp.new_module("motors")

        # Add HAL functions
        hal_module.HAL.get_frontal_image = self.hal.get_frontal_image
        hal_module.HAL.get_ventral_image = self.hal.get_ventral_image
        hal_module.HAL.get_position = self.hal.get_position
        hal_module.HAL.get_velocity = self.hal.get_velocity
        hal_module.HAL.get_yaw_rate = self.hal.get_yaw_rate
        hal_module.HAL.get_orientation = self.hal.get_orientation
        hal_module.HAL.get_roll = self.hal.get_roll
        hal_module.HAL.get_pitch = self.hal.get_pitch
        hal_module.HAL.get_yaw = self.hal.get_yaw
        hal_module.HAL.get_landed_state = self.hal.get_landed_state
        hal_module.HAL.set_cmd_pos = self.hal.set_cmd_pos
        hal_module.HAL.set_cmd_vel = self.hal.set_cmd_vel
        hal_module.HAL.set_cmd_mix = self.hal.set_cmd_mix
        hal_module.HAL.takeoff = self.hal.takeoff
        hal_module.HAL.land = self.hal.land

        # Define GUI module
        gui_module = imp.new_module("GUI")
        gui_module.GUI = imp.new_module("GUI")

        # Add GUI functions
        gui_module.GUI.showImage = self.gui.showImage
        gui_module.GUI.showLeftImage = self.gui.showLeftImage

        # Adding modules to system
        # Protip: The names should be different from
        # other modules, otherwise some errors
        sys.modules["HAL"] = hal_module
        sys.modules["GUI"] = gui_module

        return gui_module, hal_module

    # Function to measure the frequency of iterations
    def measure_frequency(self):
        previous_time = datetime.now()
        # An infinite loop
        while not self.reload:
            # Sleep for 2 seconds
            time.sleep(2)

            # Measure the current time and subtract from the previous time to get real time interval
            current_time = datetime.now()
            dt = current_time - previous_time
            ms = (dt.days * 24 * 60 * 60 +
                  dt.seconds) * 1000 + dt.microseconds / 1000.0
            previous_time = current_time

            # Get the time period
            try:
                # Division by zero
                self.ideal_cycle = ms / self.iteration_counter
            except:
                self.ideal_cycle = 0

            # Reset the counter
            self.iteration_counter = 0

            # Send to client
            self.send_frequency_message()

    # Function to generate and send frequency messages
    def send_frequency_message(self):
        # This function generates and sends frequency measures of the brain and gui
        brain_frequency = 0
        gui_frequency = 0
        try:
            brain_frequency = round(1000 / self.ideal_cycle, 1)
        except ZeroDivisionError:
            brain_frequency = 0

        try:
            gui_frequency = round(1000 / self.thread_gui.ideal_cycle, 1)
        except ZeroDivisionError:
            gui_frequency = 0

        self.frequency_message["brain"] = brain_frequency
        self.frequency_message["gui"] = gui_frequency
        self.frequency_message["rtf"] = self.real_time_factor

        message = "#freq" + json.dumps(self.frequency_message)
        self.server.send_message(self.client, message)

    # Function to track the real time factor from Gazebo statistics
    # https://stackoverflow.com/a/17698359
    # (For reference, Python3 solution specified in the same answer)
    def track_stats(self):
        args = ["gz", "stats", "-p"]
        # Prints gz statistics. "-p": Output comma-separated values containing-
        # real-time factor (percent), simtime (sec), realtime (sec), paused (T or F)
        stats_process = subprocess.Popen(args,
                                         stdout=subprocess.PIPE,
                                         bufsize=1)
        # bufsize=1 enables line-bufferred mode (the input buffer is flushed
        # automatically on newlines if you would write to process.stdin )
        with stats_process.stdout:
            for line in iter(stats_process.stdout.readline, b''):
                stats_list = [x.strip() for x in line.split(',')]
                self.real_time_factor = stats_list[0]

    # Function to maintain thread execution
    def execute_thread(self, source_code):
        # Keep checking until the thread is alive
        # The thread will die when the coming iteration reads the flag
        if self.thread is not None:
            while self.thread.is_alive() or self.measure_thread.is_alive():
                pass

        # Turn the flag down, the iteration has successfully stopped!
        self.reload = False
        # New thread execution
        self.measure_thread = threading.Thread(target=self.measure_frequency)
        self.thread = threading.Thread(target=self.process_code,
                                       args=[source_code])
        self.thread.start()
        self.measure_thread.start()
        print("New Thread Started!")

    # Function to read and set frequency from incoming message
    def read_frequency_message(self, message):
        frequency_message = json.loads(message)

        # Set brain frequency
        frequency = float(frequency_message["brain"])
        self.time_cycle = 1000.0 / frequency

        # Set gui frequency
        frequency = float(frequency_message["gui"])
        self.thread_gui.time_cycle = 1000.0 / frequency

        return

    # The websocket function
    # Gets called when there is an incoming message from the client
    def handle(self, client, server, message):
        if message[:5] == "#freq":
            frequency_message = message[5:]
            self.read_frequency_message(frequency_message)
            return

        try:
            # Once received turn the reload flag up and send it to execute_thread function
            code = message
            # print(repr(code))
            self.reload = True
            self.execute_thread(code)
        except:
            pass

    # Function that gets called when the server is connected
    def connected(self, client, server):
        self.client = client
        # Start the GUI update thread
        self.thread_gui = ThreadGUI(self.gui)
        self.thread_gui.start()

        # Start the real time factor tracker thread
        self.stats_thread = threading.Thread(target=self.track_stats)
        self.stats_thread.start()

        # Initialize the ping message
        self.send_frequency_message()

        print(client, 'connected')

    # Function that gets called when the connected closes
    def handle_close(self, client, server):
        print(client, 'closed')

    def run_server(self):
        self.server = WebsocketServer(port=1905, host=self.host)
        self.server.set_fn_new_client(self.connected)
        self.server.set_fn_client_left(self.handle_close)
        self.server.set_fn_message_received(self.handle)
        self.server.run_forever()
Exemple #18
0
# Enter sequential code!
from gui import GUI
from hal import HAL

while True:
    # Enter iterative code!
    img = HAL.getImage()
    GUI.showImage(img)
    def test_normal_run(self):
        hal = HAL()
        c = Controller(hal=hal, pru=PRU())
        event_listener = EventListener()
        c.register_event_listener(event_listener)
        c.connect()
        c.on_poll()
        c.dispatch_command("compressor=start")
        c.on_poll()
        c.dispatch_command("start")
        c.on_poll()

        c._pru.test_post_event("lightbarrier1=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier1=off")
        c.on_poll()
        c._pru.test_post_event("color=red")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=off")
        c.on_poll()
        c._pru.test_post_event("valve2=on")
        c.on_poll()
        c._hal.set_input(c._hal.LIGHTBARRIER4, True)
        c.on_poll()
        c._pru.test_post_event("valve2=off")
        c.on_poll()

        c._pru.test_post_event("lightbarrier1=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier1=off")
        c.on_poll()
        c._pru.test_post_event("color=white")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=off")
        c.on_poll()
        c._pru.test_post_event("valve3=on")
        c.on_poll()
        c._hal.set_input(c._hal.LIGHTBARRIER5, True)
        c.on_poll()
        c._pru.test_post_event("valve3=off")
        c.on_poll()

        c._pru.test_post_event("lightbarrier1=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier1=off")
        c.on_poll()
        c._pru.test_post_event("color=blue")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=on")
        c.on_poll()
        c._pru.test_post_event("lightbarrier2=off")
        c.on_poll()
        c._pru.test_post_event("valve1=on")
        c.on_poll()
        c._hal.set_input(c._hal.LIGHTBARRIER3, True)
        c.on_poll()
        c._pru.test_post_event("emergency-stop=on")
        c.on_poll()
        c._pru.test_post_event("motor=stop")
        c.on_poll()
        c._pru.test_post_event("controller=stopped")
        c.on_poll()
        c._pru.test_post_event("valve1=off")
        c.on_poll()
        c._pru.test_post_event("conveyor=stopped")
        c.on_poll()

        expected_events = [
            # connect
            "compressor=stop",
            "lightbarrier3=off",
            "lightbarrier4=off",
            "lightbarrier5=off",
            "motor=stop",
            "valve1=off",
            "valve2=off",
            "valve3=off",
            "mode=normal",
            "sort-order=blue-red-white",
            "controller=stopped",
            "conveyor=stopped",
            "lightbarrier1=off",
            "lightbarrier2=off",
            "emergency-stop=off",
            "connect",
            # compressor start
            "compressor=start",
            # start
            "motor=start",
            "controller=started",
            "start",
            "conveyor=running",
            # put red object on conveyor
            "lightbarrier1=on",
            "lightbarrier1=off",
            "color=red",
            "lightbarrier2=on",
            "lightbarrier2=off",
            "valve2=on",
            "lightbarrier4=on",
            "valve2=off",
            # put white object on conveyor
            "lightbarrier1=on",
            "lightbarrier1=off",
            "color=white",
            "lightbarrier2=on",
            "lightbarrier2=off",
            "valve3=on",
            "lightbarrier5=on",
            "valve3=off",
            # put blue object on conveyor
            "lightbarrier1=on",
            "lightbarrier1=off",
            "color=blue",
            "lightbarrier2=on",
            "lightbarrier2=off",
            "valve1=on",
            "lightbarrier3=on",
            "emergency-stop=on",
            "motor=stop",
            "controller=stopped",
            "valve1=off",
            "conveyor=stopped",
        ]

        self.assertEqual(event_listener.events, expected_events)
Exemple #20
0
 def test_instance(self):
     hal = HAL()
 def test_instance(self):
     c = Controller(hal=HAL(), pru=PRU())
Exemple #22
0
            self._event_post_timer = None


if __name__ == "__main__":
    import getpass
    import signal
    import sys

    if "--simulate" in sys.argv[1:]:
        hal = HAL_simulated()
        pru = PRU_simulated()
    else:
        if getpass.getuser() != "root":
            print >> sys.stderr, "run as root"
            sys.exit(1)
        hal = HAL()
        pru = PRU()
    controller = Controller(hal, pru)
    print "controller initialized"

    event_listener = EventListener()
    controller.register_event_listener(event_listener)
    print "event listener connected"

    application = tornado.web.Application([
        (r"/ws", WSHandler,
         dict(controller=controller, event_listener=event_listener)),
    ])
    http_server = tornado.httpserver.HTTPServer(application)
    http_server.listen(WEBSOCKET_PORT)
Exemple #23
0
 def test_output_set(self):
     hal = HAL()
     hal.set_output("Output1", True)
     self.assertTrue(hal.get_output("Output1"))
     hal.set_output("Output1", False)
     self.assertFalse(hal.get_output("Output1"))
Exemple #24
0
 def test_output_default(self):
     hal = HAL()
     self.assertEqual(hal.get_output("Output1"), None)