def create_connection(self, wb, switch):

        sheet = self.choose_sheet(wb)

        target_column = 2
        target_row = 1

        max_column = sheet.max_column
        max_row = sheet.max_row

        data_row = []
        row_dict = {}
        keys = []
        data_to_process = {}

        i = 0
        dup_keys = 0
        for row in self.row_names:
            row_dict[self.row_names[i]]: ''
            i = i + 1

        for row in range(0, max_row):

            # Get the first value from the row to set as the key
            output = sheet.cell(row=target_row, column=1).value
            key = dp.validate_key(str(output))

            # Check if it's a duplicate key
            if key in keys:
                dup_keys += 1
                data_to_log = "Duplicate Key" + str(key)
                Lfh.append_file('log.txt', data_to_log)

            # Add that key to the list of all keys
            keys.append(key)
            data_to_process[key] = {}

            col_num = 0
            for column in range(0, max_column):
                output = sheet.cell(row=target_row, column=target_column).value
                data_row.append(str(output))

                row_dict[self.row_names[col_num]] = data_row[col_num]
                target_column = target_column + 1
                col_num = col_num + 1

            # Skip the ID and Valid rows
            for row in self.row_names[1:-1]:
                data_to_process[key][row] = row_dict[row]

            data_to_process[key]['valid'] = "0"

            data_row = []
            target_column = 1
            target_row = target_row + 1

        # Send the data to be processed
        dict_valid = dp.send_to_validate(data_to_process, switch, dup_keys)

        return dict_valid
    def fetch_text_contents(file, switch, separator=","):
        f = FileReader()
        dup_keys = 0
        keep_going = True
        data_fields = DataFields.get_data_fields(DataFields)

        if file is not "":
            # Repeat for each line in the text file
            for line in file:
                # Split file into fields using ","
                fields = line.split(separator)
                checked_id = DataProcessor.validate_key(fields[0])
                if checked_id in f.dict_root:
                    dup_keys += 1
                    fields[6] = fields[6].rstrip()
                    data_to_log = "Duplicate Key" + str(fields[0:])
                    LogFileHandler.append_file('log.txt', data_to_log)
                else:
                    test_dict = {}
                    field_number = 1

                    # Ignore the ID field and the Valid field for now
                    for row_name in data_fields[1:-1]:
                        test_dict[row_name] = fields[field_number]
                        field_number += 1

                    test_dict['valid'] = '0'
                    f.dict_root.update({checked_id: test_dict})

            # Close the file to free up resources (good practice)
            file.close()
            if keep_going:
                valid_dict = DataProcessor.send_to_validate(f.dict_root,
                                                            switch, dup_keys)
                return valid_dict
Beispiel #3
0
    def _default(self):

        file_contents = Lfh.load_file_data(Lfh, self.FILE_NAME)
        direction = ""
        if len(file_contents) == 0:
            print(Err.get_error_message(208))

        Lfh.output_file(file_contents, direction)
Beispiel #4
0
    def _read_file(self, file_name, direction):
        file_contents = ""

        if file_name:
            try:
                file_contents = open(file_name, "r")
            except FileNotFoundError:
                print(Err.get_error_message(201))
            except OSError:
                print(Err.get_error_message(102))

            Lfh.output_file(file_contents, direction)
        else:
            print(Err.get_error_message(204))
Beispiel #5
0
 def _pickle_log(self):
     data_to_pickle = Lfh.get_log(Lfh, "log.txt")
     pickled_data = Pkl.pickle_data(data_to_pickle)
     self.pickled_log.append(pickled_data)
     if self.display_detail_output:
         print("Input: {}".format(data_to_pickle))
         print("Pickled Data: {}".format(self.pickled_log))
Beispiel #6
0
    def split_file(self, file_name, switch, separator=","):
        dict_root = {}
        try:
            file = open(file_name, "r")
        except FileNotFoundError:
            print(Err.get_error_message(201))
        else:
            # Repeat for each line in the text file
            f = FileReader()
            dup_keys = 0
            keep_going = True

            for line in file:
                # Split file into fields using ","
                fields = line.split(separator)
                checked_id = DataProcessor.validate_key(fields[0])
                if checked_id in dict_root:
                    dup_keys += 1
                    fields[6] = fields[6].rstrip()
                    data_to_log = "Duplicate Key" + str(fields[0:])
                    LogFileHandler.append_file('log.txt', data_to_log)
                else:
                    try:
                        dict_root.update({checked_id: {'gender': fields[1],
                                                       'age': fields[2],
                                                       'sales': fields[3],
                                                       'bmi': fields[4],
                                                       'salary': fields[5],
                                                       'birthday': fields[6]
                                         .rstrip(),
                                                       'valid': '0'}})
                    except IndexError:
                        print(Err.get_error_message(211))
                        keep_going = False
            # Close the file to free up resources (good practice)
            file.close()
            if keep_going:
                valid_dict = DataProcessor.send_to_validate(dict_root,
                                                            switch, dup_keys)
                self.write_file(valid_dict)
Beispiel #7
0
 def _wipe(self):
     if self.my_command_line.confirm("wipe the log"):
         Lfh.wipe_file(Lfh, self.FILE_NAME)
Beispiel #8
0
    def _reverse(self):
        file_contents = Lfh.load_file_data(Lfh, self.FILE_NAME)
        direction = "r"

        Lfh.output_file(file_contents, direction)
Beispiel #9
0
 def _append(self):
     Lfh.append_file(self.FILE_NAME, self.user_string)
     print("Written to log: {}".format(self.user_string))
 def append_log(data_to_log):
     Lfh.append_file('log.txt', data_to_log)