예제 #1
0
 def __init__(self, path):
     self.t = TimeMonitor(f'\tSub Time', 25)
     self.path = path
     self.line = None
     self.t_convert = None
     self.t_plot = None
     self.t_excel = None
예제 #2
0
class InfoPrint(object):
    def __init__(self, path):
        self.t = TimeMonitor(f'\tSub Time', 25)
        self.path = path
        self.line = None
        self.t_convert = None
        self.t_plot = None
        self.t_excel = None

    def show(self):
        try:
            print(f'Sub Process Succeed!')
            print(f'\tPath: {self.path}')
            print(f"\tLine: {self.line}")
            if self.t_convert is not None:
                print(self.t_convert)
            if self.t_plot is not None:
                print(self.t_plot)
            if self.t_excel is not None:
                print(self.t_excel)
            self.t.show()
        except Exception as e:
            print(f'Sub Process Failed!')
            print(f'\tPath: {self.path}')
            print(f'\tFail Info: {e}')
예제 #3
0
def main(target_path: str = '', process: int = None):
    if target_path == '':
        _path = os.getcwd()
    else:
        _path = target_path

    if not process:
        # TODO: Set Proper Process Number for ProcessingPool!!!
        num_pool = 8
    else:
        num_pool = process

    t_total = TimeMonitor('\nTotal Time')

    pool = multiprocessing.Pool(num_pool)

    path_list = get_all_files(_path)

    for each_path in path_list:
        pool.apply_async(func=single_process, args=(each_path, ))

    pool.close()
    pool.join()

    Summary.to_excel()
    t_total.show()

    print('------------------------------')
    print('Finished!'.center(30))
    print('------------------------------')
예제 #4
0
def delete_data(path):
    """
    Delete by program created files
    """
    t = TimeMonitor('Delete Time', 25)

    file_datasource_path = path

    name_txt = 'Record.txt'
    path_full = ''.join([file_datasource_path, '\\', name_txt])

    # Read List from Record.txt in json Type
    # Convert it to Set Type
    file_origin_set = json_read(path_full)

    # Put all folders and files into a List
    files_walk_list = os.walk(file_datasource_path)
    files_list = []

    for dirpath, dirname, filename in files_walk_list:
        for item in filename:
            file_origin_path = ''.join([dirpath, '\\', item])
            files_list.append(file_origin_path)
        for item in dirname:
            dir_origin_path = ''.join([dirpath, '\\', item])
            files_list.append(dir_origin_path)

    file_now_set = set(files_list)

    # Get Set of by program created files and folders
    file_new_set = file_now_set.difference(file_origin_set)

    for item in file_new_set:
        # Avoid files in already deleted folders
        try:
            # Avoid files in ...\.git denied delete action
            if item.count('.git') != 0:
                continue
            elif os.path.isdir(item):
                shutil.rmtree(item)
            else:
                os.remove(item)

            if __name__ == '__main__':
                print(f'Delete Succeeded: {item}')
            else:
                print(f'\tDelete Succeeded: {item}')
        except FileNotFoundError:
            continue

    if __name__ == '__main__':
        t.show()

        print('\n')
        print('------------------------------')
        print("Delete Succeeded!".center(30))
        print('------------------------------')
예제 #5
0
def excel_write(path: str, data_dict: dict, data_effect: dict):
    """
    Create Excel with path and data_dict

    :param path: Path of target EXCEL
    :param data_dict: Dict to generate DataFrame
    :param data_effect: Dict to get start and end num
    """
    try:
        t = TimeMonitor('\tGenerate Excel Time', 25)

        # TODO Rewrite Header of EXCEL
        # crate dict{} for DataFrame
        excel_dict = {
            'Time [s]':
            data_dict['s'][data_effect['num_start']:data_effect['num_end']],
            'Fx [N]':
            data_dict['fx'][data_effect['num_start']:data_effect['num_end']],
            'Fy [N]':
            data_dict['fy'][data_effect['num_start']:data_effect['num_end']],
            'Fz [N]':
            data_dict['fz'][data_effect['num_start']:data_effect['num_end']],
            'u = Fx/Fz':
            data_dict['mu'][data_effect['num_start']:data_effect['num_end']],
            # 'F4 [N]': data_dict['f4'],
            # 'F5 [N]': data_dict['f5']
        }

        # create Dataframe
        df = pd.DataFrame(excel_dict)

        # write data into Excel
        excel_path_split = os.path.split(path)
        excel_path_splitext = os.path.splitext(excel_path_split[1])
        excel_path_list = [
            excel_path_split[0], '\\', excel_path_splitext[0], '.xlsx'
        ]
        excel_path = ''.join(excel_path_list)
        df.to_excel(excel_path, index=False)

        return t.trans()

    except Exception as e_info:
        print('Data Convert Failed!!!')
        print(f'\tFailed Path: {path}')
        print(f'\t{e_info}')
예제 #6
0
def record_data(path):
    """
    Record source files
    """
    t = TimeMonitor('Record Time', 25)

    file_datasource_path = path

    name_txt = 'Record.txt'
    path_full = ''.join([file_datasource_path, '\\', name_txt])

    with open(path_full, 'w'):
        pass

    files_walk_list = os.walk(file_datasource_path)

    # Put all folders and files into a List
    files_list = []

    for dirpath, dirname, filename in files_walk_list:
        for item in filename:
            file_origin_path = ''.join([dirpath, '\\', item])
            files_list.append(file_origin_path)
        for item in dirname:
            dir_origin_path = ''.join([dirpath, '\\', item])
            files_list.append(dir_origin_path)

    # Write List  in json Type into Record.txt
    json_write(path_full, files_list)
    if __name__ == '__main__':
        t.show()

        print('\n')
        print('------------------------------')
        print("Record Succeeded!".center(30))
        print('------------------------------')
예제 #7
0
def data_convert(file: str):
    """
    Sort data from TXT and transfer data, data_effect, data_avg in Dict type

    :param file: Path of file
    :return: data, data_effect, data_avg
    """
    try:
        t = TimeMonitor('\tData Convert Time', 25)

        # read total TXT file and count line's quantity
        with open(file, 'r') as f:
            data_total = f.readlines()

            # how many item in a data_line
            f.seek(0)
            data_firstline = f.readline()
            data_itemnum = data_firstline.count(',')

        len_data = len(data_total)

        # used by ProcessSingle
        # print(f"\tLine: {len_data}")

        # init data space
        s = []
        fx = []
        fy = []
        fz = []
        # f4 = []
        # f5 = []

        # transfer line's data to readable string
        for data_line in data_total:
            if file.rfind('V2 T2mm 38 0,1 M1.txt') == -1:
                if data_itemnum == 6:
                    data_list = readline_format04(data_line)
                elif data_itemnum == 4:
                    data_list = readline_format03(data_line)
                else:
                    print(f"Error: Data Item Number Wrong\n"
                          f"Error File: {file}")
                    return
            else:
                data_list = readline_format05(data_line)

            if data_list is None:
                break

            # create DataDict for DataFrame
            s.append(data_list[0])
            fx.append(data_list[1])
            fy.append(data_list[2])
            fz.append(data_list[3])
            # f4.append(data_list[4])
            # f5.append(data_list[5])

        s_array = array(s)
        fx_array = array(fx)
        fy_array = array(fy)
        fz_array = array(fz)

        # determine data need to be fixed or not
        fix_stat, calibration_file = determine_data(file)
        if fix_stat:
            fx, fy, fz = fix_data(s_array, fx_array, fy_array, fz_array,
                                  calibration_file)
            # print(f'{file} is fixed')

        # Calculate mu = Fx / Fz
        mu = fx_array / fz_array

        # init DataDict for data transfer to other methode
        data_avg = {}
        # init Dict to save info of effective data
        data_effect = {}

        # init Dict to save info of average and effective data
        fx_avg_ref = max(fx) * 0.8
        fx_eff_ref = max(fx) * 0.1

        # --> effective start point must be later than average start point
        flag_num_start = False
        for i in range(len_data):
            # catch index of effective start point
            if not flag_num_start:
                if abs(fx[i]) >= fx_eff_ref:
                    data_effect['num_start'] = i
                    flag_num_start = True
            # catch index of average start point
            else:
                if abs(fx[i]) >= fx_avg_ref:
                    data_avg['num_start'] = i
                    break
                else:
                    continue

        # --> average start point must be earlier than effective start point
        flag_num_start = False
        for i in range((len_data - 1), 0, -1):
            # catch index of average start point
            if not flag_num_start:
                if abs(fx[i]) >= fx_eff_ref:
                    data_effect['num_end'] = i
                    flag_num_start = True
            # catch index of effective end point
            else:

                if abs(fx[i]) >= fx_avg_ref:
                    data_avg['num_end'] = i
                    break
                else:
                    continue

        data = {
            's': s,
            'fx': fx,
            'fy': fy,
            'fz': fz,
            'mu': mu,
            # 'f4': f4,
            # 'f5': f5
        }

        # get max and min value of Fx, Fy and Fz in Effective Range
        data_effect['fx_max'] = max(
            fx[data_avg['num_start']:data_avg['num_end']])
        data_effect['fx_min'] = min(
            fx[data_avg['num_start']:data_avg['num_end']])
        data_effect['fy_max'] = max(
            fy[data_avg['num_start']:data_avg['num_end']])
        data_effect['fy_min'] = min(
            fy[data_avg['num_start']:data_avg['num_end']])
        data_effect['fz_max'] = max(
            fz[data_avg['num_start']:data_avg['num_end']])
        data_effect['fz_min'] = min(
            fz[data_avg['num_start']:data_avg['num_end']])

        # calculate average value for each force
        data_avg['fx_avg'] = mean(
            list(fx[data_avg['num_start']:data_avg['num_end']]))
        data_avg['fy_avg'] = mean(
            list(fy[data_avg['num_start']:data_avg['num_end']]))
        data_avg['fz_avg'] = mean(
            list(fz[data_avg['num_start']:data_avg['num_end']]))
        # data_avg['f4'] = mean(list(f4[num_avg_start:num_avg_end]))
        # data_avg['f5'] = mean(list(f5[num_avg_start:num_avg_end]))

        # calculate median value for each force
        data_median = dict()
        data_median['fx_median'] = median(
            list(fx[data_avg['num_start']:data_avg['num_end']]))
        data_median['fy_median'] = median(
            list(fy[data_avg['num_start']:data_avg['num_end']]))
        data_median['fz_median'] = median(
            list(fz[data_avg['num_start']:data_avg['num_end']]))
        # data_median['f4_median'] = mean(list(f4[num_avg_start:num_avg_end]))
        # data_median['f5_median'] = mean(list(f5[num_avg_start:num_avg_end]))

        # Calculate Sum F for every time point
        fsum_array = (fx_array**2 + fy_array**2 + fz_array**2)**0.5
        data_sum = {
            'fsum': fsum_array[data_avg['num_start']:data_avg['num_end']],
            'fsum_max':
            fsum_array[data_avg['num_start']:data_avg['num_end']].max(),
            'fsum_min':
            fsum_array[data_avg['num_start']:data_avg['num_end']].min(),
            'fsum_avg':
            mean(fsum_array[data_avg['num_start']:data_avg['num_end']])
        }

        # Standard Deviation 99% --> 6sigma
        amplitude_99 = {
            '6sigma_fx':
            np.std(fx_array[data_avg['num_start']:data_avg['num_end']],
                   ddof=1) * 6,
            '6sigma_fy':
            np.std(fy_array[data_avg['num_start']:data_avg['num_end']],
                   ddof=1) * 6,
            '6sigma_fz':
            np.std(fz_array[data_avg['num_start']:data_avg['num_end']], ddof=1)
            * 6,
            '6sigma_fsum':
            np.std(data_sum['fsum'], ddof=1) * 3,
        }

        # Calculate mu_avg
        mu_avg = mean(list(mu[data_avg['num_start']:data_avg['num_end']]))

        return data, data_effect, data_avg, data_median, data_sum, amplitude_99, len_data, mu_avg, t.trans(
        )

    except Exception as e_info:
        print('Data Convert Failed!!!')
        print(f'\tFailed Path: {file}')
        print(f'\t{e_info}')
예제 #8
0
def data_plot(path: str, data: dict, data_effect: dict, data_avg: dict):
    """
    Piot the data from file

    :param data:
    :param data_effect:
    :param data_avg:
    :param path:Path of the file
    """
    try:
        t = TimeMonitor('\tPlot Time', 25)

        # TODO: Set Size of PNG
        plt.figure(figsize=(18, 6))

        s = data['s']
        fx = data['fx']
        fy = data['fy']
        fz = data['fz']

        num_start = data_effect['num_start']
        num_end = data_effect['num_end']

        fx_max_effect = data_effect['fx_max']
        fx_min_effect = data_effect['fx_min']
        fy_max_effect = data_effect['fy_max']
        fy_min_effect = data_effect['fy_min']
        fz_max_effect = data_effect['fz_max']
        fz_min_effect = data_effect['fz_min']

        fx_avg = data_avg['fx_avg']
        fy_avg = data_avg['fy_avg']
        fz_avg = data_avg['fz_avg']

        l_f1, = plt.plot(s, fx, label='Fx')
        l_f2, = plt.plot(s, fy, label='Fy')
        l_f3, = plt.plot(s, fz, label='Fz')
        l_fx_avg, = plt.plot(s, [fx_avg for _ in s], label='Fx_avg')
        l_fy_avg, = plt.plot(s, [fy_avg for _ in s], label='Fy_avg')
        l_fz_avg, = plt.plot(s, [fz_avg for _ in s], label='Fz_avg')
        # l_f4, = plt.plot(x, f4, label='f4')
        # l_f5, = plt.plot(x, f5, label='f5')

        # TODO Set Effective Horizontal Axis Range
        plt.xlim((s[num_start] - 0.1, s[num_end] + 0.1))  # plot all range
        # plt.xlim((x[0], x[len_data-1]))                # plot effective range

        plt.xlabel('Time (s)')
        plt.ylabel('Force (N)')

        plt.title(f"{os.path.basename(path)}".replace('.txt', ''))

        lg = plt.legend(handles=[l_f1, l_f2, l_f3, l_fx_avg, l_fy_avg, l_fz_avg],
                        labels=[
                            'Fx  -->  Fx_max=' + '{0:6.2f}N'.format(fx_max_effect).rjust(8) +
                            '  Fx_min=' + '{0:6.2f}N'.format(fx_min_effect).rjust(8),

                            'Fy  -->  Fy_max=' + '{0:6.2f}N'.format(fy_max_effect).rjust(8) +
                            '  Fy_min=' + '{0:6.2f}N'.format(fy_min_effect).rjust(8),

                            'Fz  -->  Fz_max=' + '{0:6.2f}N'.format(fz_max_effect).rjust(8) +
                            '  Fz_min=' + '{0:6.2f}N'.format(fz_min_effect).rjust(8),

                            'Fx_avg = {0:6.2f}N'.format(fx_avg).rjust(8),
                            'Fy_avg = {0:6.2f}N'.format(fy_avg).rjust(8),
                            'Fz_avg = {0:6.2f}N'.format(fz_avg).rjust(8),
                        ],
                        loc='best',  # TODO: Set Location of Legend
                        )

        # lg.get_frame().set_facecolor('none')
        # lg.get_frame().set_linewidth(0.0)

        plt.savefig(f"{path.replace('.txt', '')}.png")
        # plt.show()

        plt.close()

        return t.trans()

    except Exception as e_info:
        print('Data Convert Failed!!!')
        print(f'\tFailed Path: {path}')
        print(f'\t{e_info}')