コード例 #1
0
def create_summary_file_for_tail(tail, tail_max_min_list, start_time,
                                 list_of_BSM, day_directory,
                                 tails_counter, list_of_tails):

    matrix_of_events = [['']*22 for
                        i in range(tail_max_min_list[1] + 1 - tail_max_min_list[0])]

    print("\nFiles list for tail  {}  from  {}  are creating...".format(tail, day_directory))
    tail_files = list_of_tail_files(day_directory, list_of_BSM, tail)
    print("Event matrix for tail  {}  from  {}  are creating...".format(tail, day_directory))
    matrix_of_events = fill_the_matrix_of_events(matrix_of_events, tail_files, tail, start_time)
    print("Cleaning the event matrix for tail  {}  from  {}  for 0-coincidences events...".format(tail, day_directory))
    before = len(matrix_of_events)
    matrix_of_events = clean_the_matrix_of_events(matrix_of_events)
    after = len(matrix_of_events)
    print("DELETED  {:.3f}% events".format((before - after)/before*100))
    print("Out file for  {}  tail from  {}  are filling...".format(tail, day_directory))
    with open(day_directory + tail + '.out', 'w+') as out_tail_file:
        for i in range(len(matrix_of_events)):
            out_tail_file.write(
                "Event_number\t{}\tin_tail_files\t{}\tfor_the\t{}\n".format(
                    i, tail, day_directory))
            for j in range(len(matrix_of_events[i])):
                out_tail_file.write("{}\n".format(matrix_of_events[i][j]))
            out_tail_file.write('\n')

    tools.syprogressbar(
        tails_counter,
        len(list_of_tails),
        u'\u24C9',
        "creating summary files for tails",
        start_time)
    stat_file = day_directory + tail + '.stat'
    print("Statistics for tail {} from {} are calculating...".format(tail, day_directory))
    print_statistics_for_matrix_of_events(matrix_of_events, stat_file)
コード例 #2
0
def to_process(start_time):
    """Manages the conveyor of processing. Put all the files on it in order.

    Takes .files_list.txt and file by file (line by line) put them to the
    process_one_file function like children in "Another Brick In The Wall".
    In addition provides all needed interface. Exactly from here
    comes the BASH outstream through all binary files cleaning."""

    with open('.files_list.txt', 'r') as file_of_files:
        number_of_files_to_process = len(file_of_files.readlines())
    with open(".files_list.txt", "r") as files_list:
        print("\nStart to process...\n")
        counter = 0
        for file_to_process in files_list:
            file_to_process = tools.check_and_cut_the_tail(file_to_process)
            to_process_single_file(file_to_process)
            print("\nPreparing binary files:\n")
            counter += 1
            tools.syprogressbar(
                counter,
                number_of_files_to_process,
                u'\u24B7',
                "preparing binary files",
                start_time)
            print("\n{} is processing now.".format(file_to_process))
コード例 #3
0
def fill_the_matrix_of_events(matrix_of_events, tail_files, tail, start_time):

    chunk_size = 282
    tail_files_counter = 0
    for tail_file in tail_files:
        print("Tail file  {}  amplitudes collecting...".format(tail_file))
        tail_file = tools.make_BSM_file_temp(tail_file) + '.wfp'
        try:
            with open(tail_file, 'rb') as tail_file:
                chunk = tail_file.read(chunk_size)
                chunk_counter = 0
                while chunk:
                    try:
                        head_array = tools.unpacked_from_bytes('hhii', chunk[:12])
                        num_event_1 = head_array[2]
                        maroc_number = tools.unpacked_from_bytes('h', chunk[20:22])[0]
                        time_array = tools.unpacked_from_bytes('hhhh', chunk[12:20])
                        ns = (time_array[0] & 0x7f)*10
                        mks = (time_array[0] & 0xff80) >> 7
                        mks |= (time_array[1] & 1) << 9
                        mls = (time_array[1] & 0x7fe) >> 11
                        s = (time_array[1] & 0xf800) >> 11
                        s |= (time_array[2] & 1) << 5
                        m = (time_array[2] & 0x7e) >> 1
                        h = (time_array[2] & 0xf80) >> 7
                        time_string = "{}:{}:{}.{}.{}.{}".format(h, m, s, mls, mks, ns)
                        result_array = tools.unpacked_from_bytes('fBB'*32, chunk[24:-4])
                        result_string_ampls = '\t'.join([str(x) for x in result_array])
                        matrix_of_events[num_event_1][maroc_number] =\
                            "{}\t{}\t{}".format(
                                maroc_number,
                                time_string,
                                result_string_ampls)
                    except Exception:
                        print("{} Chunk number {} in file {} is seems to be corrupted!".format(
                            "WFP FILE CHUNK CORRUPTION ERROR!",
                            chunk_counter,
                            tail_file))
                    chunk_counter += 1
                    chunk = tail_file.read(chunk_size)

            tail_files_counter += 1
            tools.syprogressbar(
                tail_files_counter,
                len(tail_files),
                u'\u24BB',
                "tail files {} amplitudes collecting".format(tail),
                start_time)
        except Exception:
            print("{} File {} is seems to be not existed!".format(
                    "WFP FILE EXISTING ERROR!",
                    tail_file))
    return matrix_of_events
コード例 #4
0
def count_tails_range_mult(start_time):
    """For all tails in .files_list.txt find maximum and minimum event number.

    For each day it finds all the files with the same tail (all the
    files ".001", then all the files ".002" etc.). There must be 22 files
    for every tail in every day in general case. And it searches the
    minimal and maximal event number for this (22) files.

    Finally it returnes the dictionary of dictionaries with next construction:
    dict_of_days = {day: dict_of_max_min},
    dict_of_max_min = {tail: [min_number, max_number]}"""

    dict_of_max_min = Manager().dict()
    dict_of_days = {}
    tails_counter = 0
    print("Evevt numbers range in parallel bsms are finding out...")
    with open('.files_list.txt', 'r') as files:
        files_list = files.readlines()
    days_set = manticore_preprocessing.set_of_days(files_list)

    for day in sorted(list(days_set)):
        tails_list = sorted(
            list(manticore_preprocessing.set_of_tails(files_list, day)))
        chunk_size = tools.MAX_NUMBER_OF_SIMULTANEOUSLY_OPENED_FILES
        chunk_counter = 0
        chunk_to_process =\
        tails_list[chunk_size*chunk_counter:chunk_size*(chunk_counter + 1)]
        while chunk_to_process:
            processes = []
            for tail in chunk_to_process:
                process = Process(target=dict_of_num_min_max_in_tail_mult,
                                  args=(tail, files_list, day,
                                        dict_of_max_min))
                processes.append(process)
                process.start()
                tails_counter += 1
                tools.syprogressbar(
                    tails_counter, len(tails_list), u'\u24C2',
                    "finding out of evevt numbers range in {} tail finished".
                    format(tail), start_time)
                for process in processes:
                    process.join()
                chunk_counter += 1
                chunk_to_process =\
                tails_list[chunk_size*chunk_counter:chunk_size*(chunk_counter + 1)]
        dict_of_days[day] = dict_of_max_min
        print(tools.time_check(start_time))
    print("Finding out of evevt numbers range in parallel bsms was finished.")
    print(tools.time_check(start_time))
    return dict_of_days
コード例 #5
0
def to_process_mult(start_time):
    """Manages the conveyor of processing. Put all the files on it in order.

    Takes .files_list.txt and file by file (line by line) put them to the
    process_one_file function like children in "Another Brick In The Wall".
    In addition provides all needed interface. Exactly from here
    comes the BASH outstream through all binary files cleaning."""

    strings_to_write_to_mess_file = Manager().list()
    with open('.files_list.txt', 'r') as file_of_files:
        files_list = file_of_files.readlines()
        number_of_files_to_process = len(files_list)
        print("\nStart to process...\n")
        files_counter = 0
        chunk_size = tools.MAX_NUMBER_OF_SIMULTANEOUSLY_OPENED_FILES
        chunk_counter = 0
        chunk_to_process =\
        files_list[chunk_size*chunk_counter:chunk_size*(chunk_counter + 1)]
        while chunk_to_process:
            processes = []
            for file_to_process in chunk_to_process:
                file_to_process = tools.check_and_cut_the_tail(file_to_process)
                process = Process(target=to_process_single_file_mult,
                                  args=(file_to_process,
                                        strings_to_write_to_mess_file))
                processes.append(process)
                process.start()
                print("\nPreparing binary files:\n")
                files_counter += 1
                tools.syprogressbar(files_counter, number_of_files_to_process,
                                    u'\u24B7', "preparing binary files",
                                    start_time)
                print("\n{} is processing now.".format(file_to_process))
            for process in processes:
                process.join()
            chunk_counter += 1
            chunk_to_process =\
            files_list[chunk_size*chunk_counter:chunk_size*(chunk_counter + 1)]
    with open(".mess.txt", "a") as mess_file:
        print("Filling the mess file by reports...")
        for string in strings_to_write_to_mess_file:
            mess_file.write(string)
コード例 #6
0
def count_tails_range(start_time):
    """For all tails in .files_list.txt find maximum and minimum event number.

    For each day it finds all the files with the same tail (all the
    files ".001", then all the files ".002" etc.). There must be 22 files
    for every tail in every day in general case. And it searches the
    minimal and maximal event number for this (22) files.

    Finally it returnes the dictionary of dictionaries with next construction:
    dict_of_days = {day: dict_of_max_min},
    dict_of_max_min = {tail: [min_number, max_number]}"""

    dict_of_max_min = {}
    dict_of_days = {}
    tails_counter = 0
    print("Evevt numbers range in parallel bsms are finding out...")
    with open('.files_list.txt', 'r') as files:
        files_list = files.readlines()
    days_list = sorted(list(set_of_days(files_list)))

    for day in days_list:
        tails_list = sorted(list(set_of_tails(files_list, day)))
        for tail in tails_list:
            dict_of_max_min[tail] = dict_of_num_min_max_in_tail(
                tail,
                files_list,
                day)
            dict_of_days[day] = dict_of_max_min
            tails_counter += 1
            tools.syprogressbar(tails_counter,
                                len(tails_list),
                                u'\u24C2',
                                "finding out of evevt numbers range in {} tail finished".format(
                                    tail),
                                start_time)
        print(tools.time_check(start_time))
    print("Finding out of evevt numbers range in parallel bsms was finished.")
    print(tools.time_check(start_time))
    return dict_of_days