def __init__(self, parent=None):
        self.parent = parent
        self.parent.statusbar.showMessage("AOI Aggregation in process...")
        QWidget.__init__(self,parent)
        
        self.ui = gui.aoiUi()
        self.ui.setupUi(self)
       
        self.batch_files = filedialog.askopenfilenames(parent=root, title='Choose the file(s) you want to AOI aggregate')
        
        if len(self.batch_files) < 1:
            msgBox = QMessageBox()
            msgBox.setGeometry(400, 400, 400, 200)
            msgBox.setText("Please select one or more datasets to aggregate")
            msgBox.move(QApplication.desktop().screen().rect().center()- self.rect().center())
            msgBox.exec_() 
            
            self.close()
            return        
        
        self.columns = []
        
        self.parent.statusbar.showMessage("Checking column validity...")

        #extract columns
        for item in self.batch_files:
            self.columns.append(helpers.extract_columns(item))
 
        #check to see if all columns are equal in all the datasets
        if not helpers.are_columns_same(self.columns):
            if not helpers.columns_not_equal_message(self):
                self.close()
                return
        
        message = ""
        if self.columns != []:
            if 'GazeAOI' not in self.columns[0]:
                message += "'GAZE AOI' "
            if 'FixationAOI' not in self.columns[0]:
                message += "'FIXATION AOI'"
            
        if message != "":
            message += ": Field(s) missing from dataset - try again with appropriate data."
            reply = QMessageBox.question(self, 'Message',message, QMessageBox.Ok, QMessageBox.Ok)
            if reply == QMessageBox.Ok:
                self.parent.statusbar.showMessage("Welcome back!")
                self.close()
                return

        self.parent.ui.logOutput.append("AOI AGGREGATED:")
        print("AOI AGGREGATED:")
        
        self.AOI_aggregate('GazeAOI')
        self.AOI_aggregate('FixationAOI')
       
        #after job has been completed!
        helpers.job_complete_message(self)     
        
        self.parent.statusbar.showMessage("Welcome back!")
        self.close()
    def keep(self):
        self.parent.statusbar.showMessage("Keep Columns in process...")
        self.columns_to_keep = []

        i = 0
        while self.model.item(i):
            if self.model.item(i).checkState() == 2:
                self.columns_to_keep.append(self.model.item(i).text())
            i += 1
        
        query = "Are you sure you want to proceed?"
        reply = QMessageBox.question(self, 'Message',query, QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
        if reply == QMessageBox.No:
            self.columns_to_keep = []
            return
        
        self.parent.statusbar.showMessage("Processing...")
        
        self.parent.ui.logOutput.append("KEPT:")
        print("KEPT:")
        
        #per file first retrieve data then filter columnarly
        for file in self.batch_files:
        
            list_colindx = []

            datablob = helpers.clean_header(file)
            for item in self.columns_to_keep:
                list_colindx.append(datablob[0].index(item))
                        
            for i in range(len(datablob)):
                datablob[i][:] = [x for i, x in enumerate(datablob[i]) if i in list_colindx]
                        
            split_name = file.split('.')
            now = time.strftime('%Y%m%d_%H%M%S', time.localtime(time.time()))
            output_file = split_name[0] + "_keep_columns_" + now + "." + split_name[1]
            
            helpers.write_out_to_file(output_file,datablob)
            
            self.parent.ui.logOutput.append("      " + str(file.split('/')[-1]))
            print("      " + str(file.split('/')[-1]))
        
        helpers.job_complete_message(self)
            

       
        self.parent.statusbar.showMessage("Welcome Back!")
        self.close()
    def stack(self):
        self.parent.statusbar.showMessage("Stack Datasets in process...")
        self.full_data_complement = {}
        self.columns = []
        
        self.parent.ui.logOutput.append("STACKED:")
        print("STACKED:")

        
        for idx, file in enumerate(self.batch_files):
            datablob = helpers.clean_header(file)
            
            if idx == 0:
                self.columns = datablob[0]
                
            self.full_data_complement[idx] = datablob
            
            self.parent.ui.logOutput.append("      " + str(file.split('/')[-1]))
            print("      " + str(file.split('/')[-1]))

         
        csv.register_dialect('CSV', delimiter='\t', quoting=csv.QUOTE_NONE)
        
        now = time.strftime('%Y%m%d_%H%M%S', time.localtime(time.time()))
        output_file = os.getcwd() + "\\STACKED_DATASETS_" + now + ".txt"
        
        with open(output_file, 'w',newline='') as f:
            writer = csv.writer(f, 'CSV')
         
            writer.writerow(self.columns)
            
            for i in range(len(self.full_data_complement)):
                for j in range(len(self.full_data_complement[i])):
                    if j > 0:
                        writer.writerow(self.full_data_complement[i][j])
        
        #after job has been completed!
        helpers.job_complete_message(self)