Beispiel #1
0
def pulsar_incoherent_dedispersion(
        common_path, filename, pulsar_name, average_const, profile_pic_min,
        profile_pic_max, cleaning_Iana, cleaning, no_of_iterations,
        std_lines_clean, pic_in_line, std_pixels_clean, SpecFreqRange,
        freqStart, freqStop, save_profile_txt, save_compensated_data,
        customDPI, colormap):

    previousTime = time.time()
    currentTime = time.strftime("%H:%M:%S")
    currentDate = time.strftime("%d.%m.%Y")

    rc('font', size=6, weight='bold')
    data_filename = common_path + filename

    # *** Creating a folder where all pictures and results will be stored (if it doesn't exist) ***
    newpath = 'RESULTS_pulsar_single_pulses_' + pulsar_name + '_' + filename
    if not os.path.exists(newpath):
        os.makedirs(newpath)

    # Path to timeline file to be analyzed:
    time_line_file_name = common_path + filename[-31:-13] + '_Timeline.txt'

    if save_profile_txt > 0:
        # *** Creating a name for long timeline TXT file ***
        profile_file_name = newpath + '/' + filename + '_time_profile.txt'
        profile_txt_file = open(
            profile_file_name,
            'w')  # Open and close to delete the file with the same name
        profile_txt_file.close()

    # *** Opening DAT datafile ***
    file = open(data_filename, 'rb')

    # reading FHEADER
    df_filesize = os.stat(data_filename).st_size  # Size of file
    df_filename = file.read(32).decode('utf-8').rstrip(
        '\x00')  # Initial data file name
    file.close()

    receiver_type = df_filename[-4:]

    # Reading file header to obtain main parameters of the file
    if receiver_type == '.adr':
        [TimeRes, fmin, fmax, df, frequency_list,
         FFTsize] = FileHeaderReaderADR(data_filename, 0, 1)

    if receiver_type == '.jds':
        [
            df_filename, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, sp_in_file,
            ReceiverMode, Mode, Navr, TimeRes, fmin, fmax, df, frequency_list,
            FFTsize, dataBlockSize
        ] = FileHeaderReaderJDS(data_filename, 0, 1)

    # Manually set frequencies for two channels mode
    if int(CLCfrq / 1000000) == 33:
        #FFTsize = 8192
        fmin = 16.5
        fmax = 33.0
        frequency_list = np.linspace(fmin, fmax, FFTsize)

    sp_in_file = int(
        ((df_filesize - 1024) / (len(frequency_list) * 8)
         ))  # the second dimension of the array: file size - 1024 bytes

    pulsar_ra, pulsar_dec, DM, p_bar = catalogue_pulsar(pulsar_name)

    # ************************************************************************************
    #                             R E A D I N G   D A T A                                *
    # ************************************************************************************

    # Time line file reading
    timeline, dt_timeline = time_line_file_reader(time_line_file_name)

    # Selecting the frequency range of data to be analyzed
    if SpecFreqRange == 1:
        A = []
        B = []
        for i in range(len(frequency_list)):
            A.append(abs(frequency_list[i] - freqStart))
            B.append(abs(frequency_list[i] - freqStop))
        ifmin = A.index(min(A))
        ifmax = B.index(min(B))
        shift_vector = DM_full_shift_calc(ifmax - ifmin, frequency_list[ifmin],
                                          frequency_list[ifmax],
                                          df / pow(10, 6), TimeRes, DM,
                                          receiver_type)
        print(' Number of frequency channels:  ', ifmax - ifmin)

    else:
        shift_vector = DM_full_shift_calc(
            len(frequency_list) - 4, fmin, fmax, df / pow(10, 6), TimeRes, DM,
            receiver_type)
        print(' Number of frequency channels:  ', len(frequency_list) - 4)
        ifmin = 0
        ifmax = int(len(frequency_list) - 4)

    if save_compensated_data > 0:
        with open(data_filename, 'rb') as file:
            file_header = file.read(1024)  # Data file header read

        # *** Creating a binary file with data for long data storage ***
        new_data_file_name = pulsar_name + '_DM_' + str(DM) + '_' + filename
        new_data_file = open(new_data_file_name, 'wb')
        new_data_file.write(file_header)
        new_data_file.seek(624)  # Lb place in header
        new_data_file.write(np.int32(ifmin).tobytes())
        new_data_file.seek(628)  # Hb place in header
        new_data_file.write(np.int32(ifmax).tobytes())
        new_data_file.seek(632)  # Wb place in header
        new_data_file.write(
            np.int32(ifmax -
                     ifmin).tobytes())  # bytes([np.int32(ifmax - ifmin)]))
        new_data_file.close()

        # *** Creating a name for long timeline TXT file ***
        new_TLfile_name = pulsar_name + '_DM_' + str(
            DM) + '_' + data_filename[:-13] + '_Timeline.txt'
        new_TLfile = open(
            new_TLfile_name,
            'w')  # Open and close to delete the file with the same name
        new_TLfile.close()

        del file_header

    max_shift = np.abs(shift_vector[0])

    if SpecFreqRange == 1:
        buffer_array = np.zeros((ifmax - ifmin, 2 * max_shift))
    else:
        buffer_array = np.zeros((len(frequency_list) - 4, 2 * max_shift))

    num_of_blocks = int(sp_in_file / (1 * max_shift))

    print(' Number of spectra in file:     ', sp_in_file, ' ')
    print(' Maximal shift is:              ', max_shift, ' pixels ')
    print(' Number of blocks in file:      ', num_of_blocks, ' ')
    print(' Dispersion measure:            ', DM, ' pc / cm3 \n')
    print(' Pulsar name:                   ', pulsar_name, '  \n')

    if receiver_type == '.jds':
        num_frequencies_initial = len(frequency_list) - 4

    frequency_list_initial = np.empty_like(frequency_list)
    frequency_list_initial[:] = frequency_list[:]

    dat_file = open(data_filename, 'rb')
    dat_file.seek(1024)  # Jumping to 1024 byte from file beginning

    for block in range(num_of_blocks):  # main loop by number of blocks in file

        print(
            '\n * Data block # ', block + 1, ' of ', num_of_blocks,
            '\n ******************************************************************'
        )

        # Time line arrangements:
        fig_time_scale = []
        fig_date_time_scale = []
        for i in range(block * max_shift, (block + 1) * max_shift
                       ):  # Shows the time of pulse end (at lowest frequency)
            fig_time_scale.append(timeline[i][11:23])
            fig_date_time_scale.append(timeline[i][:])
        print(' Time: ', fig_time_scale[0], ' - ', fig_time_scale[-1],
              ', number of points: ', len(fig_time_scale))

        # Data block reading
        if receiver_type == '.jds':
            data = np.fromfile(dat_file,
                               dtype=np.float64,
                               count=(num_frequencies_initial + 4) * 1 *
                               max_shift)  # 2
            data = np.reshape(data,
                              [(num_frequencies_initial + 4), 1 * max_shift],
                              order='F')  # 2
            data = data[:
                        num_frequencies_initial, :]  # To delete the last channels of DSP data where time is stored

        # Cutting the array in predefined frequency range
        if SpecFreqRange == 1:
            data, frequency_list, fi_start, fi_stop = specify_frequency_range(
                data, frequency_list_initial, freqStart, freqStop)
            num_frequencies = len(frequency_list)
        else:
            num_frequencies = num_frequencies_initial

        # Normalization of data
        Normalization_lin(data, num_frequencies, 1 * max_shift)

        nowTime = time.time()
        print('\n  *** Preparation of data took:              ',
              round((nowTime - previousTime), 2), 'seconds ')
        previousTime = nowTime

        if cleaning_Iana > 0:
            data = survey_cleaning(
                data)  # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

        if cleaning > 0:

            # Cleaning vertical and horizontal lines of RFI
            data, mask, cleaned_pixels_num = clean_lines_of_pixels(
                data, no_of_iterations, std_lines_clean, pic_in_line)

            plt.figure(1, figsize=(10.0, 6.0))
            plt.subplots_adjust(left=None,
                                bottom=0,
                                right=None,
                                top=0.86,
                                wspace=None,
                                hspace=None)
            ImA = plt.imshow(mask, aspect='auto', vmin=0, vmax=1, cmap='Greys')
            plt.title('Full log initial data',
                      fontsize=10,
                      fontweight='bold',
                      style='italic',
                      y=1.025)
            plt.ylabel('One dimension', fontsize=10, fontweight='bold')
            plt.xlabel('Second dimensions', fontsize=10, fontweight='bold')
            plt.colorbar()
            plt.yticks(fontsize=8, fontweight='bold')
            plt.xticks(fontsize=8, fontweight='bold')
            pylab.savefig(newpath + '/00_10' + ' fig. ' + str(block + 1) +
                          ' - Result mask after lines cleaning.png',
                          bbox_inches='tight',
                          dpi=300)
            plt.close('all')

            # Cleaning remaining 1 pixel splashes of RFI
            data, mask, cleaned_pixels_num = array_clean_by_STD_value(
                data, std_pixels_clean)

            plt.figure(1, figsize=(10.0, 6.0))
            plt.subplots_adjust(left=None,
                                bottom=0,
                                right=None,
                                top=0.86,
                                wspace=None,
                                hspace=None)
            ImA = plt.imshow(mask, aspect='auto', vmin=0, vmax=1, cmap='Greys')
            plt.title('Full log initial data',
                      fontsize=10,
                      fontweight='bold',
                      style='italic',
                      y=1.025)
            plt.ylabel('One dimension', fontsize=10, fontweight='bold')
            plt.xlabel('Second dimensions', fontsize=10, fontweight='bold')
            plt.colorbar()
            plt.yticks(fontsize=8, fontweight='bold')
            plt.xticks(fontsize=8, fontweight='bold')
            pylab.savefig(newpath + '/00_11' + ' fig. ' + str(block + 1) +
                          ' - Mask after fine STD cleaning.png',
                          bbox_inches='tight',
                          dpi=300)
            plt.close('all')

            nowTime = time.time()
            print('\n  *** Normalization and cleaning took:       ',
                  round((nowTime - previousTime), 2), 'seconds ')
            previousTime = nowTime
        '''
        # Logging the data
        with np.errstate(invalid='ignore'):
            data[:,:] = 10 * np.log10(data[:,:])
        data[np.isnan(data)] = 0

        # Normalizing log data
        data = data - np.mean(data)
        '''

        # Dispersion delay removing
        data_space = np.zeros((num_frequencies, 2 * max_shift))
        data_space[:, max_shift:] = data[:, :]
        temp_array = pulsar_DM_compensation_with_indices_changes(
            data_space, shift_vector)
        del data, data_space

        nowTime = time.time()
        # print('\n  *** Dispersion compensation took:          ', round((nowTime - previousTime), 2), 'seconds ')
        print('\n  *** Dispersion delay removing took:        ',
              round((nowTime - previousTime), 2), 'seconds ')
        previousTime = nowTime

        # Adding the next data block
        buffer_array += temp_array

        # Making and filling the array with fully ready data for plotting and saving to a file
        array_compensated_DM = buffer_array[:, 0:max_shift]

        if block > 0:
            # Saving data with compensated DM to DAT file
            if save_compensated_data > 0 and block > 0:
                temp = array_compensated_DM.transpose().copy(order='C')
                new_data_file = open(new_data_file_name, 'ab')
                new_data_file.write(temp)
                new_data_file.close()

                # Saving time data to ling timeline file
                with open(new_TLfile_name, 'a') as new_TLfile:
                    for i in range(max_shift):
                        new_TLfile.write((fig_date_time_scale[i][:]))  # str

            # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            # Logging the data
            with np.errstate(divide='ignore'):
                array_compensated_DM[:, :] = 10 * np.log10(
                    array_compensated_DM[:, :])
            array_compensated_DM[array_compensated_DM == -np.inf] = 0

            # Normalizing log data
            array_compensated_DM = array_compensated_DM - np.mean(
                array_compensated_DM)
            # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

            # Preparing single averaged data profile for figure
            profile = array_compensated_DM.mean(axis=0)[:]
            profile = profile - np.mean(profile)

            # Save full profile to TXT file
            if save_profile_txt > 0:
                profile_txt_file = open(profile_file_name, 'a')
                for i in range(len(profile)):
                    profile_txt_file.write(str(profile[i]) + ' \n')
                profile_txt_file.close()

            # Averaging of the array with pulses for figure
            averaged_array = average_some_lines_of_array(
                array_compensated_DM, int(num_frequencies / average_const))
            freq_resolution = (df *
                               int(num_frequencies / average_const)) / 1000.
            max_time_shift = max_shift * TimeRes

            # NEW start
            averaged_array = averaged_array - np.mean(averaged_array)
            # NEW stop

            plot_ready_data(profile, averaged_array, frequency_list,
                            num_frequencies, fig_time_scale, newpath, filename,
                            pulsar_name, DM, freq_resolution, TimeRes,
                            max_time_shift, block, num_of_blocks - 1, block,
                            profile_pic_min, profile_pic_max, df_description,
                            colormap, customDPI, currentDate, currentTime,
                            Software_version)

        # Rolling temp_array to put current data first
        buffer_array = np.roll(buffer_array, -max_shift)
        buffer_array[:, max_shift:] = 0

    dat_file.close()

    # Fourier analysis of the obtained time profile of pulses
    if save_profile_txt > 0:
        print('\n\n  *** Making Fourier transform of the time profile...')
        pulsar_pulses_time_profile_FFT(newpath + '/',
                                       filename + '_time_profile.txt',
                                       pulsar_name, TimeRes, profile_pic_min,
                                       profile_pic_max, customDPI, colormap)

    return new_data_file_name
Beispiel #2
0
                   ' of %s \n' % str(len(fileList)))
    Log_File.write('  * File path: %s \n\n\n' % str(fileList[file_no]))

    # *********************************************************************************

    # *** Opening datafile ***
    fname = ''
    if len(fname) < 1:
        fname = directory + fileList[file_no]

    # Reading the file header
    [
        df_filename, df_filesize, df_system_name, df_obs_place, df_description,
        f_adc, df_creation_timeUTC, receiver_mode, adr_mode, sumDifMode, NAvr,
        TimeRes, fmin, fmax, df, frequency, fft_size, SLine, Width, BlockSize
    ] = FileHeaderReaderADR(fname, 0, 1)

    # Reading the chunk header
    [
        sp_in_file, sp_in_frame, FrameInChunk, ChunksInFile, chunk_size,
        frm_sec, frm_phase
    ] = ChunkHeaderReaderADR(fname, 0, BlockSize, 1)

    freq_points_num = int(Width * 1024)
    Log_File.close()

    # *** Setting the time reference (file beginning) ***
    TimeFirstFramePhase = float(frm_phase) / f_adc
    TimeFirstFrameFloatSec = frm_sec + TimeFirstFramePhase
    TimeScalestart_time = datetime(int('20' + df_filename[1:3]),
                                   int(df_filename[3:5]),
Beispiel #3
0
result_path = 'SMD_results_' + filename
if not os.path.exists(result_path):
    os.makedirs(result_path)


#**************************************************************
# ***              Reading data file header                 ***
#**************************************************************

# Jumping to the end of the file to read the data file header with parameters of data record

if filename[0:3] == 'ADR':
    [df_filename, df_filesize, df_system_name, df_obs_place, df_description,
            F_ADC, df_creation_timeUTC, ReceiverMode, ADRmode, sumDifMode,
            NAvr, TimeRes, fmin, fmax, df, frequency_list, FFTsize,
            SLine, Width, BlockSize] = FileHeaderReaderADR(filepath, smd_filesize - 1024 - 131096, 1)


    record_date_time_dt = datetime(int('20' + df_filename[1:3]), int(df_filename[3:5]), int(df_filename[5:7]), int(df_creation_timeUTC[0:2]), int(df_creation_timeUTC[3:5]), int(df_creation_timeUTC[6:8]), int(df_creation_timeUTC[9:12]) * 1000)
    record_date_time = str(record_date_time_dt)
    telescope = 'GURT'

if filename[0:3] == 'DSP':
    [df_filename, df_filesize, df_system_name, df_obs_place, df_description,
        CLCfrq, df_creation_timeUTC, SpInFile, ReceiverMode, Mode, Navr,
        TimeRes, fmin, fmax, df, frequency_list, FFTsize, BlockSize] = FileHeaderReaderJDS(filepath, smd_filesize - 1024, 1)
    telescope = 'UTR-2'

    record_date_time_dt = datetime(int('20' + df_filename[5:7]), int(df_filename[3:5]), int(df_filename[1:3]),
                                   int(df_creation_timeUTC[11:13]), int(df_creation_timeUTC[14:16]),
                                   int(df_creation_timeUTC[17:19]), 0)
Beispiel #4
0
        # Check is it the file of ADR or JDS data
        df_filename = file.read(32).decode('utf-8').rstrip(
            '\x00')  # Initial data file name
        file.close()

        if df_filename[-4:] == '.adr':

            freqList = freqList_GURT
            AmplitudeReIm = AmplitudeReIm_GURT
            [
                df_filename, df_filesize, df_system_name, df_obs_place,
                df_description, CLCfrq, df_creation_timeUTC, ReceiverMode,
                Mode, sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency,
                FFTsize, SLine, Width, BlockSize
            ] = FileHeaderReaderADR(path_to_data + dat_files_list[file_no], 0,
                                    0)

        if df_filename[-4:] == '.jds':  # If data obtained from DSPZ receiver

            freqList = freqList_UTR2
            AmplitudeReIm = AmplitudeReIm_GURT
            [
                df_filename, df_filesize, df_system_name, df_obs_place,
                df_description, CLCfrq, df_creation_timeUTC, SpInFile,
                ReceiverMode, Mode, Navr, TimeRes, fmin, fmax, df, frequency,
                FreqPointsNum, dataBlockSize
            ] = FileHeaderReaderJDS(path_to_data + dat_files_list[file_no], 0,
                                    0)

        if AutoSourceSwitch == 1:
            if df_filename[
Beispiel #5
0
def DAT_file_reader(common_path, DAT_file_name, typesOfData, DAT_result_path,
                    averOrMin, StartStopSwitch, SpecFreqRange, VminMan,
                    VmaxMan, VminNormMan, VmaxNormMan, RFImeanConst, customDPI,
                    colormap, ChannelSaveTXT, ChannelSavePNG, ListOrAllFreq,
                    AmplitudeReIm, freqStart, freqStop, dateTimeStart,
                    dateTimeStop, freqStartTXT, freqStopTXT, freqList,
                    print_or_not):

    startTime = time.time()
    currentTime = time.strftime("%H:%M:%S")
    currentDate = time.strftime("%d.%m.%Y")

    # Files to be analyzed:
    filename = common_path + DAT_file_name + '_Data_chA.dat'
    timeLineFileName = common_path + DAT_file_name + '_Timeline.txt'

    for j in range(len(typesOfData)):  # Main loop by types of data to analyze

        # Current name of DAT file to be analyzed dependent on data type:
        temp = list(filename)
        temp[-7:-4] = typesOfData[j]
        filename = "".join(temp)
        temp = list(DAT_file_name + '_Data_chA.dat')
        temp[-7:-4] = typesOfData[j]
        only_file_name = "".join(temp)

        if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'):
            temp = list(filename)
            temp[-7:-4] = 'chA'
            filename01 = "".join(temp)
            temp[-7:-4] = 'chB'
            filename02 = "".join(temp)
            filename = filename01

        # Print the type of data to be analyzed
        if print_or_not == 1:
            print('\n\n   Processing data type: ', typesOfData[j], '\n')
        currentTime = time.strftime("%H:%M:%S")
        print('   Processing file: ', only_file_name, '   started at: ',
              currentTime)
        if print_or_not == 1: print('\n')

        #*************************************************************
        #         WHAT TO PLOT AND CORRESPONDING PARAMETERS          *
        #*************************************************************

        YaxName = 'Intensity, dB'
        Label = 'Intensity'
        nameAdd = ''
        fileNameAdd = ''
        fileNameAddNorm = ''
        fileNameAddSpectr = ''
        Vmin = VminMan  # Switch once more to initial manual settings after changes in previous loop
        Vmax = VmaxMan
        VminNorm = VminNormMan
        VmaxNorm = VmaxNormMan

        if typesOfData[j] == 'chA':
            nameAdd = ' channel A'
            fileNameAdd = ''
            fileNameAddNorm = '001_'
            fileNameAddSpectr = '008_'

        if typesOfData[j] == 'chB':
            nameAdd = ' channel B'
            fileNameAdd = ''
            fileNameAddNorm = '001_'
            fileNameAddSpectr = '008_'

        if typesOfData[j] == 'C_m':
            nameAdd = ' correlation module'
            Vmin = -160
            VmaxNorm = 2 * VmaxNormMan
            fileNameAdd = ''
            fileNameAddNorm = '004_'
            fileNameAddSpectr = '011_'

        if typesOfData[j] == 'C_p':
            nameAdd = ' correlation phase'
            YaxName = 'Phase, rad'
            Label = 'Phase'
            Vmin = -3.5
            Vmax = 3.5
            fileNameAdd = '005_'
            fileNameAddSpectr = '012_'

        if typesOfData[j] == 'CRe':
            nameAdd = ' correlation RE part'
            YaxName = 'Amplitude'
            fileNameAdd = '006_'
            fileNameAddSpectr = '013_'

        if typesOfData[j] == 'CIm':
            nameAdd = ' correlation IM part'
            YaxName = 'Amplitude'
            fileNameAdd = '007_'
            fileNameAddSpectr = '014_'

        if typesOfData[j] == 'A+B':
            nameAdd = ' sum A + B'
            fileNameAddNorm = '003_'
            fileNameAddSpectr = '009_'

        if typesOfData[j] == 'A-B':
            nameAdd = ' difference |A - B|'
            Vmin = Vmin - 20
            Vmax = Vmax - 20
            fileNameAdd = ''
            fileNameAddNorm = '002_'
            fileNameAddSpectr = '010_'

        #*********************************************************************************

        # *** Creating a folder where all pictures and results will be stored (if it doen't exist) ***
        newpath = common_path + 'DAT_Results_' + DAT_result_path
        if not os.path.exists(newpath):
            os.makedirs(newpath)

        # *** Opening DAT datafile ***

        file = open(filename, 'rb')

        # *** Data file header read ***
        df_filesize = (os.stat(filename).st_size)  # Size of file
        df_filename = file.read(32).decode('utf-8').rstrip(
            '\x00')  # Initial data file name
        file.close()

        if df_filename[-4:] == '.adr':

            [
                df_filename, df_filesize, df_system_name, df_obs_place,
                df_description, CLCfrq, df_creation_timeUTC, ReceiverMode,
                Mode, sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency,
                FFTsize, SLine, Width, BlockSize
            ] = FileHeaderReaderADR(filename, 0, 0)

            FreqPointsNum = len(frequency)

        if df_filename[-4:] == '.jds':  # If data obrained from DSPZ receiver

            [
                df_filename, df_filesize, df_system_name, df_obs_place,
                df_description, CLCfrq, df_creation_timeUTC, SpInFile,
                ReceiverMode, Mode, Navr, TimeRes, fmin, fmax, df, frequency,
                FreqPointsNum, dataBlockSize
            ] = FileHeaderReaderJDS(filename, 0, 0)

            sumDifMode = ''

        #************************************************************************************
        #                            R E A D I N G   D A T A                                *
        #************************************************************************************

        # *** Reading timeline file ***
        TLfile = open(timeLineFileName, 'r')
        timeline = []
        for line in TLfile:
            timeline.append(str(line))
        TLfile.close()

        if StartStopSwitch == 1:  # If we read only specified time limits of files

            # *** Converting text to ".datetime" format ***
            dt_timeline = []
            for i in range(
                    len(timeline)):  # converting text to ".datetime" format

                # Check is the uS field is empty. If so it means it is equal to '000000'
                uSecond = timeline[i][20:26]
                if len(uSecond) < 2: uSecond = '000000'

                dt_timeline.append(
                    datetime(int(timeline[i][0:4]), int(timeline[i][5:7]),
                             int(timeline[i][8:10]), int(timeline[i][11:13]),
                             int(timeline[i][14:16]), int(timeline[i][17:19]),
                             int(uSecond)))

            dt_dateTimeStart = datetime(int(dateTimeStart[0:4]),
                                        int(dateTimeStart[5:7]),
                                        int(dateTimeStart[8:10]),
                                        int(dateTimeStart[11:13]),
                                        int(dateTimeStart[14:16]),
                                        int(dateTimeStart[17:19]), 0)
            dt_dateTimeStop = datetime(int(dateTimeStop[0:4]),
                                       int(dateTimeStop[5:7]),
                                       int(dateTimeStop[8:10]),
                                       int(dateTimeStop[11:13]),
                                       int(dateTimeStop[14:16]),
                                       int(dateTimeStop[17:19]), 0)

            # *** Showing the time limits of file and time limits of chosen part
            if print_or_not == 1:
                print(
                    '\n\n                               Start                         End \n'
                )
            if print_or_not == 1:
                print('  File time limits:   ', dt_timeline[0], ' ',
                      dt_timeline[len(timeline) - 1], '\n')
            if print_or_not == 1:
                print('  Chosen time limits: ', dt_dateTimeStart, '        ',
                      dt_dateTimeStop, '\n')

            # Verifying that chosen time limits are inside file and are correct
            if (dt_timeline[len(timeline) - 1] >= dt_dateTimeStart >
                    dt_timeline[0]) and (
                        dt_timeline[len(timeline) - 1] > dt_dateTimeStop >=
                        dt_timeline[0]) and (dt_dateTimeStop >
                                             dt_dateTimeStart):
                if print_or_not == 1: print('  Time is chosen correctly! \n\n')
            else:
                print('  ERROR! Time is chosen out of file limits!!! \n\n')
                sys.exit('           Program stopped')

            # Finding the closest spectra to the chosen time limits
            A = []
            B = []
            for i in range(len(timeline)):
                dt_diff_start = dt_timeline[i] - dt_dateTimeStart
                dt_diff_stop = dt_timeline[i] - dt_dateTimeStop
                A.append(
                    abs(
                        divmod(dt_diff_start.total_seconds(), 60)[0] * 60 +
                        divmod(dt_diff_start.total_seconds(), 60)[1]))
                B.append(
                    abs(
                        divmod(dt_diff_stop.total_seconds(), 60)[0] * 60 +
                        divmod(dt_diff_stop.total_seconds(), 60)[1]))

            istart = A.index(min(A))
            istop = B.index(min(B))
            if print_or_not == 1:
                print('\n Start specter number is:          ', istart)
            if print_or_not == 1:
                print('\n Stop specter number is:           ', istop)
            if print_or_not == 1:
                print('\n Total number of spectra to read:  ', istop - istart)

        # *** Calculation of the dimensions of arrays to read ***
        nx = len(frequency)  # the first dimension of the array
        if StartStopSwitch == 1:  # If we read only specified time limits of files
            ny = int(
                istop - istart
            )  # the second dimension of the array: number of spectra to read
        else:
            ny = int(
                ((df_filesize - 1024) / (nx * 8))
            )  # the second dimension of the array: file size - 1024 bytes
            istart = 0
            istop = len(timeline)

        if print_or_not == 1: print(' ')
        if print_or_not == 1:
            print(' Number of frequency channels:     ', nx, '\n')
        if print_or_not == 1:
            print(' Number of spectra:                ', ny, '\n')
        if print_or_not == 1:
            print(' Recomended spectra number for averaging is:  ',
                  int(ny / 1024))
        # averageConst = raw_input('\n Enter number of spectra to be averaged:       ')

        #if (len(averageConst) < 1 or int(averageConst) < 1):
        #    averageConst = 1
        #else:
        #    averageConst = int(averageConst)
        averageConst = int(ny / 1024)
        if int(averageConst) < 1: averageConst = 1

        # *** Data reading and averaging ***
        if print_or_not == 1:
            print('\n\n\n  *** Data reading and averaging *** \n\n')

        file1 = open(filename, 'rb')
        if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'):
            file2 = open(filename02, 'rb')

        file1.seek(
            1024 + istart * 8 * nx, os.SEEK_SET
        )  # Jumping to 1024+number of spectra to skip byte from file beginning
        if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'):
            file2.seek(
                1024 + istart * 8 * nx, os.SEEK_SET
            )  # Jumping to 1024+number of spectra to skip byte from file beginning

        array = np.empty((nx, 0), float)
        numOfBlocks = int(ny / averageConst)
        for block in range(numOfBlocks):

            data1 = np.fromfile(file1,
                                dtype=np.float64,
                                count=nx * averageConst)
            if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'):
                data2 = np.fromfile(file2,
                                    dtype=np.float64,
                                    count=nx * averageConst)

            if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'):
                if typesOfData[j] == 'A+B': data = data1 + data2
                if typesOfData[j] == 'A-B': data = data1 - data2
            else:
                data = data1

            del data1
            if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'): del data2

            data = np.reshape(data, [nx, averageConst], order='F')

            dataApp = np.empty((nx, 1), float)

            if (typesOfData[j] == 'chA' or typesOfData[j] == 'chB'
                    or typesOfData[j] == 'A+B'):
                # If analyzing intensity - average and log data
                if averOrMin == 0:
                    with np.errstate(invalid='ignore'):
                        dataApp[:, 0] = 10 * np.log10(data.mean(axis=1)[:])
                elif averOrMin == 1:
                    with np.errstate(invalid='ignore'):
                        dataApp[:, 0] = 10 * np.log10(np.amin(data, axis=1)[:])
                else:
                    print('\n\n Error!!! Wrong value of parameters!')
                array = np.append(array, dataApp, axis=1)
                array[np.isnan(array)] = -120

            if (typesOfData[j] == 'A-B'):
                # If analyzing intensity - average and log absolute values of data
                with np.errstate(invalid='ignore'):
                    dataApp[:, 0] = 10 * np.log10(np.abs(data.mean(axis=1)[:]))
                array = np.append(array, dataApp, axis=1)
                array[np.isnan(array)] = -120

            if (typesOfData[j] == 'C_p' or typesOfData[j] == 'CRe'
                    or typesOfData[j] == 'CIm'
                ):  # If analyzing phase/Re/Im - no logarythming needed
                # If analyzing phase of Re/Im we do not log data, only averaging
                dataApp[:, 0] = (data.mean(axis=1)[:])
                array = np.append(array, dataApp, axis=1)
                array[np.isnan(array)] = 0

            if typesOfData[j] == 'C_m':
                dataApp[:, 0] = (data.mean(axis=1)[:])
                array = np.append(array, dataApp, axis=1)
                #array[np.isinf(array)] = -120

        del dataApp, data
        file1.close()
        if (typesOfData[j] == 'A+B' or typesOfData[j] == 'A-B'): file2.close()

        if print_or_not == 1:
            print('\n Array shape is now             ', array.shape)

        # *** Cutting timeline to time limits ***
        dateTimeNew = timeline[istart:istop:averageConst]
        del dateTimeNew[numOfBlocks:]
        if print_or_not == 1:
            print('\n TimeLine length is now:        ', len(dateTimeNew))

        #*******************************************************************************
        #                                F I G U R E S                                 *
        #*******************************************************************************
        if print_or_not == 1: print('\n\n\n  *** Building images *** \n\n')

        # Exact string timescales to show on plots
        TimeScaleFig = np.empty_like(dateTimeNew)
        for i in range(len(dateTimeNew)):
            TimeScaleFig[i] = str(dateTimeNew[i][0:11] + '\n' +
                                  dateTimeNew[i][11:23])

        # Limits of figures for common case or for Re/Im parts to show the interferometric picture
        if typesOfData[j] == 'CRe' or typesOfData[j] == 'CIm':
            Vmin = 0 - AmplitudeReIm
            Vmax = 0 + AmplitudeReIm

        # *** Immediate spectrum ***

        Suptitle = ('Immediate spectrum ' + str(df_filename[0:18]) + ' ' +
                    nameAdd)
        Title = ('Initial parameters: dt = ' + str(round(TimeRes, 3)) +
                 ' Sec, df = ' + str(round(df / 1000, 3)) + ' kHz ' +
                 sumDifMode + 'Processing: Averaging ' + str(averageConst) +
                 ' spectra (' + str(round(averageConst * TimeRes, 3)) +
                 ' sec.)')

        TwoOrOneValuePlot(
            1, frequency, array[:, [1]], [], 'Spectrum', ' ', frequency[0],
            frequency[FreqPointsNum - 1], Vmin, Vmax, Vmin, Vmax,
            'Frequency, MHz', YaxName, ' ', Suptitle, Title,
            newpath + '/' + fileNameAddSpectr + df_filename[0:14] + '_' +
            typesOfData[j] + ' Immediate Spectrum.png', currentDate,
            currentTime, Software_version)

        # *** Decide to use only list of frequencies or all frequencies in range
        if ListOrAllFreq == 0:
            freqList = np.array(freqList)
        if ListOrAllFreq == 1:
            freqList = np.array(frequency)

        # *** Finding frequency most close to specified by user ***
        for fc in range(len(freqList)):
            if (freqList[fc] > freqStartTXT) and (freqList[fc] < freqStopTXT):
                newFreq = np.array(frequency)
                newFreq = np.absolute(newFreq - freqList[fc])
                index = np.argmin(newFreq) + 1
                tempArr1 = np.arange(0, len(dateTimeNew), 1)

                if ChannelSavePNG == 1 or typesOfData[
                        j] == 'CRe' or typesOfData[j] == 'CIm':
                    if typesOfData[j] == 'CRe' or typesOfData[j] == 'CIm':
                        Vmin = 0 - AmplitudeReIm
                        Vmax = 0 + AmplitudeReIm

                    # *** Plotting intensity changes at particular frequency ***
                    timeline = []
                    for i in range(len(dateTimeNew)):
                        timeline.append(
                            str(dateTimeNew[i][0:11] + '\n' +
                                dateTimeNew[i][11:23]))

                    Suptitle = 'Intensity variation ' + str(
                        df_filename[0:18]) + ' ' + nameAdd
                    Title = ('Initial parameters: dt = ' +
                             str(round(TimeRes, 3)) + ' Sec, df = ' +
                             str(round(df / 1000, 3)) + ' kHz, Frequency = ' +
                             str(round(frequency[index], 3)) + ' MHz ' +
                             sumDifMode + ' Processing: Averaging ' +
                             str(averageConst) + ' spectra (' +
                             str(round(averageConst * TimeRes, 3)) + ' sec.)')

                    FileName = (newpath + '/' + df_filename[0:14] + '_' +
                                typesOfData[j] + df_filename[-4:] +
                                ' variation at ' +
                                str(round(frequency[index], 3)) + ' MHz.png')

                    OneValueWithTimePlot(
                        timeline, array[[index], :].transpose(), Label, 0,
                        len(dateTimeNew), Vmin, Vmax, 0, 0,
                        'UTC Date and time, YYYY-MM-DD HH:MM:SS.ms', YaxName,
                        Suptitle, Title, FileName, currentDate, currentTime,
                        Software_version)

                # *** Saving value changes at particular frequency to TXT file ***
                if ChannelSaveTXT == 1:
                    SingleChannelData = open(
                        newpath + '/' + df_filename[0:14] + '_' +
                        filename[-7:-4:] + df_filename[-4:] +
                        ' variation at ' + str(round(frequency[index], 3)) +
                        ' MHz.txt', "w")
                    for i in range(len(dateTimeNew)):
                        SingleChannelData.write(
                            str(dateTimeNew[i]).rstrip() + '   ' +
                            str(array.transpose()[i, index]) + ' \n')
                    SingleChannelData.close()

        # *** Cutting the array inside frequency range specified by user ***
        if SpecFreqRange == 1 and (
                frequency[0] <= freqStart <= frequency[FreqPointsNum - 1]
        ) and (frequency[0] <= freqStop <=
               frequency[FreqPointsNum - 1]) and (freqStart < freqStop):
            print('\n You have chosen the frequency range', freqStart, '-',
                  freqStop, 'MHz')
            A = []
            B = []
            for i in range(len(frequency)):
                A.append(abs(frequency[i] - freqStart))
                B.append(abs(frequency[i] - freqStop))
            ifmin = A.index(min(A))
            ifmax = B.index(min(B))
            array = array[ifmin:ifmax, :]
            print('\n New data array shape is: ', array.shape)
            freqLine = frequency[ifmin:ifmax]
        else:
            freqLine = frequency

        # Limits of figures for common case or for Re/Im parts to show the interferometric picture
        Vmin = np.min(array)
        Vmax = np.max(array)
        if typesOfData[j] == 'CRe' or typesOfData[j] == 'CIm':
            Vmin = 0 - AmplitudeReIm
            Vmax = 0 + AmplitudeReIm

        # *** Dynamic spectrum of initial signal***

        Suptitle = ('Dynamic spectrum starting from file ' +
                    str(df_filename[0:18]) + ' ' + nameAdd +
                    '\n Initial parameters: dt = ' + str(round(TimeRes, 3)) +
                    ' Sec, df = ' + str(round(df / 1000, 3)) + ' kHz, ' +
                    sumDifMode + ' Processing: Averaging ' +
                    str(averageConst) + ' spectra (' +
                    str(round(averageConst * TimeRes, 3)) + ' sec.)\n' +
                    ' Receiver: ' + str(df_system_name) + ', Place: ' +
                    str(df_obs_place) + ', Description: ' +
                    str(df_description))
        fig_file_name = (newpath + '/' + fileNameAdd + df_filename[0:14] +
                         '_' + typesOfData[j] + ' Dynamic spectrum.png')

        OneDynSpectraPlot(array, Vmin, Vmax, Suptitle, 'Intensity, dB',
                          len(dateTimeNew), TimeScaleFig, freqLine,
                          len(freqLine), colormap,
                          'UTC Date and time, YYYY-MM-DD HH:MM:SS.msec',
                          fig_file_name, currentDate, currentTime,
                          Software_version, customDPI)

        if (typesOfData[j] != 'C_p' and typesOfData[j] != 'CRe'
                and typesOfData[j] != 'CIm'):

            # *** Normalization and cleaning of dynamic spectra ***
            Normalization_dB(array.transpose(), len(freqLine),
                             len(dateTimeNew))
            simple_channel_clean(array.transpose(), RFImeanConst)

            # *** Dynamic spectra of cleaned and normalized signal ***

            Suptitle = (
                'Dynamic spectrum cleaned and normalized starting from file ' +
                str(df_filename[0:18]) + ' ' + nameAdd +
                '\n Initial parameters: dt = ' + str(round(TimeRes, 3)) +
                ' Sec, df = ' + str(round(df / 1000, 3)) + ' kHz, ' +
                sumDifMode + ' Processing: Averaging ' + str(averageConst) +
                ' spectra (' + str(round(averageConst * TimeRes, 3)) +
                ' sec.)\n' + ' Receiver: ' + str(df_system_name) +
                ', Place: ' + str(df_obs_place) + ', Description: ' +
                str(df_description))
            fig_file_name = (newpath + '/' + fileNameAddNorm +
                             df_filename[0:14] + '_' + typesOfData[j] +
                             ' Dynamic spectrum cleanned and normalized' +
                             '.png')

            OneDynSpectraPlot(array, VminNorm,
                              VmaxNorm, Suptitle, 'Intensity, dB',
                              len(dateTimeNew), TimeScaleFig, freqLine,
                              len(freqLine), colormap,
                              'UTC Date and time, YYYY-MM-DD HH:MM:SS.msec',
                              fig_file_name, currentDate, currentTime,
                              Software_version, customDPI)
            '''
            # *** TEMPLATE FOR JOURNLS Dynamic spectra of cleaned and normalized signal ***
            plt.figure(2, figsize=(16.0, 7.0))
            ImA = plt.imshow(np.flipud(array), aspect='auto', extent=[0,len(dateTimeNew),freqLine[0],freqLine[len(freqLine)-1]], vmin=VminNorm, vmax=VmaxNorm, cmap=colormap) #
            plt.ylabel('Frequency, MHz', fontsize=12, fontweight='bold')
            #plt.suptitle('Dynamic spectrum cleaned and normalized starting from file '+str(df_filename[0:18])+' '+nameAdd+
            #            '\n Initial parameters: dt = '+str(round(TimeRes,3))+
            #            ' Sec, df = '+str(round(df/1000,3))+' kHz, '+sumDifMode+
            #            ' Processing: Averaging '+str(averageConst)+' spectra ('+str(round(averageConst*TimeRes,3))+' sec.)\n'+
            #            ' Receiver: '+str(df_system_name)+
            #            ', Place: '+str(df_obs_place) +
            #            ', Description: '+str(df_description),
            #            fontsize=10, fontweight='bold', x = 0.46, y = 0.96)
            plt.yticks(fontsize=12, fontweight='bold')
            rc('font', weight='bold')
            cbar = plt.colorbar(ImA, pad=0.005)
            cbar.set_label('Intensity, dB', fontsize=12, fontweight='bold')
            cbar.ax.tick_params(labelsize=12)
            ax1 = plt.figure(2).add_subplot(1,1,1)
            a = ax1.get_xticks().tolist()
            for i in range(len(a)-1):   #a-1
                k = int(a[i])
                #a[i] = str(dateTimeNew[k][0:11]+'\n'+dateTimeNew[k][11:23])
                a[i] = str(dateTimeNew[k][11:19])
            ax1.set_xticklabels(a)
            plt.xticks(fontsize=12, fontweight='bold')
            plt.xlabel('UTC time, HH:MM:SS', fontsize=12, fontweight='bold')
            #plt.text(0.72, 0.04,'Processed '+currentDate+ ' at '+currentTime, fontsize=6, transform=plt.gcf().transFigure)
            pylab.savefig('DAT_Results/' + fileNameAddNorm + df_filename[0:14]+'_'+typesOfData[j]+' Dynamic spectrum cleanned and normalized'+'.png', bbox_inches='tight', dpi = customDPI)
            #pylab.savefig('DAT_Results/' +fileNameAddNorm+ df_filename[0:14]+'_'+typesOfData[j]+ ' Dynamic spectrum cleanned and normalized'+'.eps', bbox_inches='tight', dpi = customDPI)
                                                                                 #filename[-7:-4:]
            plt.close('all')
            '''

    ok = 1
    return ok
Beispiel #6
0
def ADR_file_reader(fileList, result_path, MaxNim, RFImeanConst, Vmin, Vmax,
                    VminNorm, VmaxNorm, VminCorrMag, VmaxCorrMag, customDPI,
                    colormap, CorrelationProcess, Sum_Diff_Calculate,
                    longFileSaveAch, longFileSaveBch, longFileSaveCMP,
                    longFileSaveCRI, longFileSaveSSD, DynSpecSaveInitial,
                    DynSpecSaveCleaned, CorrSpecSaveInitial,
                    CorrSpecSaveCleaned, SpecterFileSaveSwitch, ImmediateSpNo):

    currentTime = time.strftime("%H:%M:%S")
    currentDate = time.strftime("%d.%m.%Y")

    if not os.path.exists(result_path):
        os.makedirs(result_path)
    if not os.path.exists(result_path + '/Service'):
        os.makedirs(result_path + '/Service')
    if DynSpecSaveInitial == 1:
        if not os.path.exists(result_path + '/Initial_spectra'):
            os.makedirs(result_path + '/Initial_spectra')
    if (DynSpecSaveCleaned == 1 and CorrelationProcess == 1):
        if not os.path.exists(result_path + '/Correlation_spectra'):
            os.makedirs(result_path + '/Correlation_spectra')

    for fileNo in range(len(fileList)):  # loop by files

        # *** Opening datafile ***
        fname = ''
        if len(fname) < 1: fname = fileList[fileNo]

        # Reading the file header
        [
            df_filename, df_filesize, df_system_name, df_obs_place,
            df_description, F_ADC, df_creation_timeUTC, ReceiverMode, ADRmode,
            sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency, FFT_Size,
            SLine, Width, BlockSize
        ] = FileHeaderReaderADR(fname, 0, 0)

        # Reading the chunk header
        [
            SpInFile, SpInFrame, FrameInChunk, ChunksInFile, sizeOfChunk,
            frm_sec, frm_phase
        ] = ChunkHeaderReaderADR(fname, 0, BlockSize, 0)

        FreqPointsNum = int(Width * 1024)

        # *** Setting the time reference (file beginning) ***
        TimeFirstFramePhase = float(frm_phase) / F_ADC
        TimeFirstFrameFloatSec = frm_sec + TimeFirstFramePhase
        TimeScaleStartTime = datetime(int('20' + df_filename[1:3]),
                                      int(df_filename[3:5]),
                                      int(df_filename[5:7]),
                                      int(df_creation_timeUTC[0:2]),
                                      int(df_creation_timeUTC[3:5]),
                                      int(df_creation_timeUTC[6:8]),
                                      int(df_creation_timeUTC[9:12]) * 1000)

        with open(fname, 'rb') as file:

            # *** Reading indexes of data from index file '*.fft' ***
            indexes = []
            ifname = 'package_ra_data_files_formats/' + str(int(
                FFT_Size / 2)) + '.fft'
            indexfile = open(ifname, 'r')
            num = 0
            for line in indexfile:
                ind = int(line)
                if (ind >= SLine * 1024) & (ind < ((SLine + Width) * 1024)):
                    indexes.append(ind - SLine * 1024)
                num = num + 1
            indexfile.close()

            timeLineSecond = np.zeros(
                ChunksInFile)  # List of second values from DSP_INF field

            # *** If it is the first file - write the header to long data file ***
            if ((longFileSaveAch == 1 or longFileSaveBch == 1
                 or longFileSaveCRI == 1 or longFileSaveCMP == 1
                 or longFileSaveSSD == 1) and fileNo == 0):
                file.seek(0)
                file_header = file.read(1024)

                # *** Creating a name for long timeline TXT file ***
                TLfile_name = df_filename + '_Timeline.txt'
                TLfile = open(
                    TLfile_name, 'w'
                )  # Open and close to delete the file with the same name
                TLfile.close()

                DAT_file_name = df_filename
                DAT_file_list = []

                # *** Creating a binary file with data for long data storage ***
                if (longFileSaveAch == 1
                        and (ADRmode == 3 or ADRmode == 5 or ADRmode == 6)):
                    fileData_A_name = df_filename + '_Data_chA.dat'
                    fileData_A = open(fileData_A_name, 'wb')
                    fileData_A.write(file_header)
                    fileData_A.close()
                    DAT_file_list.append('chA')
                if (longFileSaveBch == 1
                        and (ADRmode == 4 or ADRmode == 5 or ADRmode == 6)):
                    fileData_B_name = df_filename + '_Data_chB.dat'
                    fileData_B = open(fileData_B_name, 'wb')
                    fileData_B.write(file_header)
                    fileData_B.close()
                    DAT_file_list.append('chB')
                if (CorrelationProcess == 1 and longFileSaveCRI == 1
                        and ADRmode == 6):
                    fileData_CRe_name = df_filename + '_Data_CRe.dat'
                    fileData_C_Re = open(fileData_CRe_name, 'wb')
                    fileData_C_Re.write(file_header)
                    fileData_C_Re.close()
                    DAT_file_list.append('CRe')
                    fileData_CIm_name = df_filename + '_Data_CIm.dat'
                    fileData_C_Im = open(fileData_CIm_name, 'wb')
                    fileData_C_Im.write(file_header)
                    fileData_C_Im.close()
                    DAT_file_list.append('CIm')
                if (CorrelationProcess == 1 and longFileSaveCMP == 1
                        and ADRmode == 6):
                    fileData_CM_name = df_filename + '_Data_C_m.dat'
                    fileData_C_M = open(fileData_CM_name, 'wb')
                    fileData_C_M.write(file_header)
                    fileData_C_M.close()
                    DAT_file_list.append('C_m')
                    fileData_CP_name = df_filename + '_Data_C_p.dat'
                    fileData_C_P = open(fileData_CP_name, 'wb')
                    fileData_C_P.write(file_header)
                    fileData_C_P.close()
                    DAT_file_list.append('C_p')
                if (Sum_Diff_Calculate == 1 and longFileSaveSSD == 1
                        and (ADRmode == 5 or ADRmode == 6)):
                    fileData_Sum_name = df_filename + '_Data_Sum.dat'
                    fileData_Sum = open(fileData_Sum_name, 'wb')
                    fileData_Sum.write(file_header)
                    fileData_Sum.close()
                    fileData_Dif_name = df_filename + '_Data_Dif.dat'
                    fileData_Dif = open(fileData_Dif_name, 'wb')
                    fileData_Dif.write(file_header)
                    fileData_Dif.close()

                del file_header

            #************************************************************************************
            #                            R E A D I N G   D A T A                                *
            #************************************************************************************

            file.seek(1024)  # Jumping to 1024 byte from file beginning

            if ADRmode > 2 and ADRmode < 7:  # Specter modes
                figID = -1
                figMAX = int(math.ceil((ChunksInFile) / MaxNim))
                if figMAX < 1: figMAX = 1
                for fig in range(figMAX):
                    Time1 = time.time()  # Timing
                    figID = figID + 1
                    currentTime = time.strftime("%H:%M:%S")
                    print('   File # ', str(fileNo + 1), ' of ',
                          str(len(fileList)), ', figure # ', figID + 1, ' of ',
                          figMAX, '   started at: ', currentTime)
                    if (ChunksInFile - MaxNim * figID) < MaxNim:
                        Nim = (ChunksInFile - MaxNim * figID)
                    else:
                        Nim = MaxNim
                    SpectrNum = Nim * SpInFrame * FrameInChunk  # Number of specra in the figure

                    # *** Preparing empty matrices ***
                    if ADRmode == 3 or ADRmode == 5 or ADRmode == 6:
                        Data_Ch_A = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        Data_Ch_A0 = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                    if ADRmode == 4 or ADRmode == 5 or ADRmode == 6:
                        Data_Ch_B = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        Data_Ch_B0 = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                    if ADRmode == 6:
                        Data_C_Im = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        Data_C_Re = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        Data_C_Im0 = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        Data_C_Re0 = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        CorrModule = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))
                        CorrPhase = np.zeros(
                            (Nim * SpInFrame * FrameInChunk, FreqPointsNum))

                    TimeScale = []
                    TimeFigureScale = []  # Timelime (new) for each figure
                    TimeFigureStartTime = datetime(2016, 1, 1, 0, 0, 0, 0)

                    # *** DATA READING process ***

                    # Reading and reshaping all data with readers
                    raw = np.fromfile(file,
                                      dtype='i4',
                                      count=int((Nim * (sizeOfChunk + 8)) / 4))
                    raw = np.reshape(raw, [int((sizeOfChunk + 8) / 4), Nim],
                                     order='F')

                    # Splitting headers from data
                    headers = raw[0:1024, :]
                    data = raw[1024:, :]
                    del raw

                    # Arranging data in right order
                    if ADRmode == 3:
                        data = np.reshape(
                            data,
                            [FreqPointsNum, Nim * FrameInChunk * SpInFrame],
                            order='F')
                        Data_Ch_A0 = data[0:FreqPointsNum:1, :].transpose()
                    if ADRmode == 4:
                        data = np.reshape(
                            data,
                            [FreqPointsNum, Nim * FrameInChunk * SpInFrame],
                            order='F')
                        Data_Ch_B0 = data[0:FreqPointsNum:1, :].transpose()
                    if ADRmode == 5:
                        data = np.reshape(data, [
                            FreqPointsNum * 2, Nim * FrameInChunk * SpInFrame
                        ],
                                          order='F')
                        Data_Ch_B0 = data[0:(FreqPointsNum *
                                             2):2, :].transpose()
                        Data_Ch_A0 = data[1:(FreqPointsNum *
                                             2):2, :].transpose()
                    if (ADRmode == 6):
                        data = np.reshape(data, [
                            FreqPointsNum * 4, Nim * FrameInChunk * SpInFrame
                        ],
                                          order='F')
                        Data_C_Im0 = data[0:(FreqPointsNum *
                                             4):4, :].transpose()
                        Data_C_Re0 = data[1:(FreqPointsNum *
                                             4):4, :].transpose()
                        Data_Ch_B0 = data[2:(FreqPointsNum *
                                             4):4, :].transpose()
                        Data_Ch_A0 = data[3:(FreqPointsNum *
                                             4):4, :].transpose()
                    del data

                    # *** TimeLine calculations ***
                    for i in range(Nim):

                        # *** DSP_INF ***
                        frm_count = headers[3][i]
                        frm_sec = headers[4][i]
                        frm_phase = headers[5][i]

                        # * Abosolute time calculation *
                        timeLineSecond[
                            figID * MaxNim +
                            i] = frm_sec  # to check the linearity of seconds
                        TimeCurrentFramePhase = float(frm_phase) / F_ADC
                        TimeCurrentFrameFloatSec = frm_sec + TimeCurrentFramePhase
                        TimeSecondDiff = TimeCurrentFrameFloatSec - TimeFirstFrameFloatSec
                        TimeAdd = timedelta(
                            0, int(np.fix(TimeSecondDiff)),
                            int(
                                np.fix(
                                    (TimeSecondDiff -
                                     int(np.fix(TimeSecondDiff))) * 1000000)))

                        # Adding of time point to time line is in loop by spectra because
                        # for each spectra in frame there is one time point but it should
                        # appear for all spectra to fit the dimensions of arrays

                        # * Time from figure start calculation *
                        if (i == 0): TimeFigureStart = TimeCurrentFrameFloatSec
                        TimeFigureSecondDiff = TimeCurrentFrameFloatSec - TimeFigureStart
                        TimeFigureAdd = timedelta(
                            0, int(np.fix(TimeFigureSecondDiff)),
                            int(
                                np.fix((TimeFigureSecondDiff -
                                        int(np.fix(TimeFigureSecondDiff))) *
                                       1000000)))

                        for iframe in range(0, SpInFrame):
                            TimeScale.append(
                                str((TimeScaleStartTime + TimeAdd)))  #.time()
                            TimeFigureScale.append(
                                str((TimeFigureStartTime +
                                     TimeFigureAdd).time()))

                    # Exact string timescales to show on plots
                    TimeFigureScaleFig = np.empty_like(TimeFigureScale)
                    TimeScaleFig = np.empty_like(TimeScale)
                    for i in range(len(TimeFigureScale)):
                        TimeFigureScaleFig[i] = TimeFigureScale[i][0:11]
                        TimeScaleFig[i] = TimeScale[i][11:23]

                    # *** Performing index changes ***
                    for i in range(0, FreqPointsNum):
                        n = indexes[i]
                        if ADRmode == 3 or ADRmode == 5 or ADRmode == 6:
                            Data_Ch_A[:, n] = Data_Ch_A0[:, i]
                        if ADRmode == 4 or ADRmode == 5 or ADRmode == 6:
                            Data_Ch_B[:, n] = Data_Ch_B0[:, i]
                        if (ADRmode == 6 and CorrelationProcess == 1):
                            Data_C_Im[:, n] = Data_C_Im0[:, i]
                            Data_C_Re[:, n] = Data_C_Re0[:, i]

                    # *** Deleting matrices which were nessesary for index changes ***
                    del n

                    if ADRmode == 3 or ADRmode == 5 or ADRmode == 6:
                        del Data_Ch_A0
                    if ADRmode == 4 or ADRmode == 5 or ADRmode == 6:
                        del Data_Ch_B0
                    if (ADRmode == 6 and CorrelationProcess == 1):
                        del Data_C_Im0, Data_C_Re0

                    # *** Converting from FPGA to PC float format ***
                    if ADRmode == 3 or ADRmode == 5 or ADRmode == 6:
                        Data_Ch_A = FPGAtoPCarrayADR(Data_Ch_A, NAvr)
                    if ADRmode == 4 or ADRmode == 5 or ADRmode == 6:
                        Data_Ch_B = FPGAtoPCarrayADR(Data_Ch_B, NAvr)
                    if (ADRmode == 6 and CorrelationProcess == 1):
                        Data_C_Re = FPGAtoPCarrayADR(Data_C_Re, NAvr)
                        Data_C_Im = FPGAtoPCarrayADR(Data_C_Im, NAvr)

                    # *** Calculating Sum and Difference of A and B channels ***
                    if ((ADRmode == 5 or ADRmode == 6)
                            and Sum_Diff_Calculate == 1):
                        Data_Sum = Data_Ch_A + Data_Ch_B
                        Data_Dif = abs(Data_Ch_A - Data_Ch_B)

                    # *** Saving data to a long-term file ***
                    if (ADRmode == 3 or ADRmode == 5
                            or ADRmode == 6) and longFileSaveAch == 1:
                        fileData_A = open(fileData_A_name, 'ab')
                        fileData_A.write(Data_Ch_A)
                        fileData_A.close()
                    if (ADRmode == 4 or ADRmode == 5
                            or ADRmode == 6) and longFileSaveBch == 1:
                        fileData_B = open(fileData_B_name, 'ab')
                        fileData_B.write(Data_Ch_B)
                        fileData_B.close()
                    if ADRmode == 6 and longFileSaveCRI == 1 and CorrelationProcess == 1:
                        fileData_C_Re = open(fileData_CRe_name, 'ab')
                        fileData_C_Re.write(Data_C_Re)
                        fileData_C_Re.close()
                        fileData_C_Im = open(fileData_CIm_name, 'ab')
                        fileData_C_Im.write(Data_C_Im)
                        fileData_C_Im.close()
                    if ((ADRmode == 5 or ADRmode == 6)
                            and Sum_Diff_Calculate == 1
                            and longFileSaveSSD == 1):
                        fileData_Sum = open(fileData_Sum_name, 'ab')
                        fileData_Sum.write(Data_Sum)
                        fileData_Sum.close()
                        fileData_Dif = open(fileData_Dif_name, 'ab')
                        fileData_Dif.write(Data_Dif)
                        fileData_Dif.close()
                        del Data_Sum, Data_Dif

                    if (longFileSaveAch == 1 or longFileSaveBch == 1
                            or longFileSaveCRI == 1 or longFileSaveCMP == 1
                            or longFileSaveSSD == 1):
                        with open(TLfile_name, 'a') as TLfile:
                            for i in range(SpInFrame * FrameInChunk * Nim):
                                TLfile.write((TimeScale[i][:]) + ' \n')  # str

                    # *** Converting to logarythmic scale matrices ***
                    if ADRmode == 3 or ADRmode == 5 or ADRmode == 6:
                        with np.errstate(divide='ignore'):
                            Data_Ch_A = 10 * np.log10(Data_Ch_A)
                    if ADRmode == 4 or ADRmode == 5 or ADRmode == 6:
                        with np.errstate(divide='ignore'):
                            Data_Ch_B = 10 * np.log10(Data_Ch_B)
                    if (ADRmode == 6 and CorrelationProcess == 1):
                        with np.errstate(divide='ignore'):
                            CorrModule = ((Data_C_Re)**2 +
                                          (Data_C_Im)**2)**(0.5)
                            CorrModule = 10 * np.log10(CorrModule)
                            CorrPhase = np.arctan2(Data_C_Im, Data_C_Re)
                        CorrModule[np.isnan(CorrModule)] = 0
                        CorrPhase[np.isnan(CorrPhase)] = 0

                    # *** Writing correlation data to long files ***
                    if (ADRmode == 6 and longFileSaveCMP == 1
                            and CorrelationProcess == 1):
                        fileData_C_M = open(fileData_CM_name, 'ab')
                        fileData_C_M.write(np.float64(CorrModule))
                        fileData_C_M.close()
                        fileData_C_P = open(fileData_CP_name, 'ab')
                        fileData_C_P.write(np.float64(CorrPhase))
                        fileData_C_P.close()

                    # *** Saving immediate spectrum to file ***
                    if (SpecterFileSaveSwitch == 1 and figID == 0):
                        SpFile = open(
                            result_path + '/Service/Specter_' +
                            df_filename[0:14] + '.txt', 'w')
                        for i in range(FreqPointsNum - 1):
                            if ADRmode == 3:
                                SpFile.write(
                                    str('{:10.6f}'.format(frequency[i])) +
                                    '  ' + str('{:16.10f}'.format(
                                        Data_Ch_A[ImmediateSpNo][i])) + ' \n')
                            if ADRmode == 4:
                                SpFile.write(
                                    str('{:10.6f}'.format(frequency[i])) +
                                    '  ' + str('{:16.10f}'.format(
                                        Data_Ch_B[ImmediateSpNo][i])) + ' \n')
                            if ADRmode == 5 or ADRmode == 6:
                                SpFile.write(
                                    str('{:10.6f}'.format(frequency[i])) +
                                    '  ' + str('{:16.10f}'.format(
                                        Data_Ch_A[ImmediateSpNo][i])) + '  ' +
                                    str('{:16.10f}'.format(
                                        Data_Ch_B[ImmediateSpNo][i])) + ' \n')
                        SpFile.close()

                    # *** FIGURE Immediate spectra before cleaning and normalizing ***
                    if figID == 0:
                        if ADRmode == 3:
                            Data_1 = Data_Ch_A[0][:]
                            Legend_1 = 'Channel A'
                        if ADRmode == 4:
                            Data_1 = Data_Ch_B[0][:]
                            Legend_1 = 'Channel B'
                        if ADRmode == 3 or ADRmode == 4:
                            no_of_sets = 1
                            Data_2 = []
                            Suptitle = ('Immediate spectrum ' +
                                        str(df_filename[0:18]) + ' ' +
                                        Legend_1)
                            Title = ('Initial parameters: dt = ' +
                                     str(round(TimeRes * 1000, 3)) +
                                     ' ms, df = ' + str(round(df / 1000., 3)) +
                                     ' kHz' + sumDifMode + ', Description: ' +
                                     str(df_description))
                            Filename = (
                                result_path + '/Service/' + df_filename[0:14] +
                                ' ' + Legend_1 +
                                ' Immediate Spectrum before cleaning and normalizing.png'
                            )

                        if (ADRmode == 5 or ADRmode
                                == 6):  # Immediate spectrum channels A & B
                            Data_1 = Data_Ch_A[0][:]
                            Data_2 = Data_Ch_B[0][:]
                            Legend_1 = 'Channel A'
                            no_of_sets = 2
                            Suptitle = ('Immediate spectrum ' +
                                        str(df_filename[0:18]) +
                                        ' channels A & B')
                            Title = ('Initial parameters: dt = ' +
                                     str(round(TimeRes * 1000, 3)) +
                                     ' ms, df = ' + str(round(df / 1000., 3)) +
                                     ' kHz,' + sumDifMode + ' Description: ' +
                                     str(df_description))
                            Filename = (
                                result_path + '/Service/' + df_filename[0:14] +
                                ' Channels A and B Immediate Spectrum before cleaning and normalizing.png'
                            )

                        TwoOrOneValuePlot(
                            no_of_sets, frequency, Data_1, Data_2, Legend_1,
                            'Channel B', frequency[0],
                            frequency[FreqPointsNum - 1], -120, -20, -120, -20,
                            'Frequency, MHz', 'Intensity, dB', 'Intensity, dB',
                            Suptitle, Title, Filename, currentDate,
                            currentTime, Software_version)

                    # *** FIGURE Correlation amplitude and phase immediate spectrum ***
                    if (ADRmode == 6 and figID == 0 and CorrelationProcess == 1
                        ):  #  Immediate correlation spectrum channels A & B

                        Suptitle = ('Immediate correlation spectrum ' +
                                    str(df_filename[0:18]) + ' channels A & B')
                        Title = ('Initial parameters: dt = ' +
                                 str(round(TimeRes * 1000, 3)) + ' ms, df = ' +
                                 str(round(df / 1000., 3)) + ' kHz,' +
                                 sumDifMode + ' Description: ' +
                                 str(df_description))
                        Filename = (
                            result_path + '/Service/' + df_filename[0:14] +
                            ' Channels A and B Correlation module and phase spectrum.png'
                        )

                        TwoOrOneValuePlot(
                            2, frequency, CorrModule[0][:], CorrPhase[0][:],
                            'Correlation module', 'Correlation phase',
                            frequency[0], frequency[FreqPointsNum - 1], -150,
                            -20, -4, 4, 'Frequency, MHz', 'Intensity, dB',
                            'Phase, rad', Suptitle, Title, Filename,
                            currentDate, currentTime, Software_version)

                    # *** FIGURE Initial dynamic spectrum of 1 channel (A or B) ***
                    if ((ADRmode == 3 or ADRmode == 4)
                            and DynSpecSaveInitial == 1):
                        if ADRmode == 3:
                            Data = Data_Ch_A.transpose()
                        if ADRmode == 4:
                            Data = Data_Ch_B.transpose()

                        Suptitle = ('Dynamic spectrum (initial) ' +
                                    str(df_filename[0:18]) + ' - Fig. ' +
                                    str(figID + 1) + ' of ' + str(figMAX) +
                                    '\n Initial parameters: dt = ' +
                                    str(round(TimeRes * 1000, 3)) +
                                    ' ms, df = ' + str(round(df / 1000., 3)) +
                                    ' kHz, ' + sumDifMode + ' Receiver: ' +
                                    str(df_system_name) + ', Place: ' +
                                    str(df_obs_place) + '\n Description: ' +
                                    str(df_description))

                        fig_file_name = (result_path + '/Initial_spectra/' +
                                         df_filename[0:14] +
                                         ' Initial dynamic spectrum fig.' +
                                         str(figID + 1) + '.png')

                        OneDynSpectraPlot(
                            Data, -120, -30, Suptitle, 'Intensity, dB',
                            Nim * SpInFrame * FrameInChunk, TimeScaleFig,
                            frequency, FreqPointsNum, colormap,
                            'UTC Time, HH:MM:SS.msec', fig_file_name,
                            currentDate, currentTime, Software_version,
                            customDPI)

                    # *** FIGURE Initial dynamic spectrum channels A and B ***
                    if ((ADRmode == 5 or ADRmode == 6)
                            and DynSpecSaveInitial == 1):

                        fig_file_name = (result_path + '/Initial_spectra/' +
                                         df_filename[0:14] +
                                         ' Initial dynamic spectrum fig.' +
                                         str(figID + 1) + '.png')
                        Suptitle = ('Dynamic spectrum (initial) ' +
                                    str(df_filename) + ' - Fig. ' +
                                    str(figID + 1) + ' of ' + str(figMAX) +
                                    '\n Initial parameters: dt = ' +
                                    str(round(TimeRes * 1000, 3)) +
                                    ' ms, df = ' + str(round(df / 1000., 3)) +
                                    ' kHz, ' + sumDifMode + ' Receiver: ' +
                                    str(df_system_name) + ', Place: ' +
                                    str(df_obs_place) + '\n' + ReceiverMode +
                                    ', Description: ' + str(df_description))

                        TwoDynSpectraPlot(
                            Data_Ch_A.transpose(), Data_Ch_B.transpose(), Vmin,
                            Vmax, Vmin, Vmax, Suptitle, 'Intensity, dB',
                            'Intensity, dB', Nim * SpInFrame * FrameInChunk,
                            TimeFigureScaleFig, TimeScaleFig, frequency,
                            FreqPointsNum, colormap, 'Channel A', 'Channel B',
                            fig_file_name, currentDate, currentTime,
                            Software_version, customDPI)

                    # *** FIGURE Initial correlation spectrum module and phase ***
                    if (ADRmode == 6 and CorrSpecSaveInitial == 1
                            and CorrelationProcess == 1):

                        fig_file_name = (result_path +
                                         '/Correlation_spectra/' +
                                         df_filename[0:14] +
                                         ' Correlation dynamic spectrum fig.' +
                                         str(figID + 1) + '.png')
                        Suptitle = ('Correlation dynamic spectrum (initial) ' +
                                    str(df_filename) + ' - Fig. ' +
                                    str(figID + 1) + ' of ' + str(figMAX) +
                                    '\n Initial parameters: dt = ' +
                                    str(round(TimeRes * 1000, 3)) +
                                    ' ms, df = ' + str(round(df / 1000., 3)) +
                                    ' kHz, ' + sumDifMode + ' Receiver: ' +
                                    str(df_system_name) + ', Place: ' +
                                    str(df_obs_place) + '\n' + ReceiverMode +
                                    ', Description: ' + str(df_description))

                        TwoDynSpectraPlot(CorrModule.transpose(),
                                          CorrPhase.transpose(), VminCorrMag,
                                          VmaxCorrMag, -3.15, 3.15, Suptitle,
                                          'Intensity, dB', 'Phase, rad',
                                          Nim * SpInFrame * FrameInChunk,
                                          TimeFigureScaleFig, TimeScaleFig,
                                          frequency, FreqPointsNum, colormap,
                                          'Correlation module',
                                          'Correlation phase', fig_file_name,
                                          currentDate, currentTime,
                                          Software_version, customDPI)

                    # *** Normalizing amplitude-frequency responce ***
                    if (ADRmode == 3 or ADRmode == 5
                            or ADRmode == 6) and DynSpecSaveCleaned == 1:
                        Normalization_dB(Data_Ch_A, FreqPointsNum,
                                         Nim * SpInFrame * FrameInChunk)
                    if (ADRmode == 4 or ADRmode == 5
                            or ADRmode == 6) and DynSpecSaveCleaned == 1:
                        Normalization_dB(Data_Ch_B, FreqPointsNum,
                                         Nim * SpInFrame * FrameInChunk)
                    if ADRmode == 6 and CorrelationProcess == 1 and CorrSpecSaveCleaned == 1:
                        Normalization_dB(CorrModule, FreqPointsNum,
                                         Nim * SpInFrame * FrameInChunk)

                    # *** Deleting cahnnels with strong RFI ***
                    if (ADRmode == 3 or ADRmode == 5
                            or ADRmode == 6) and DynSpecSaveCleaned == 1:
                        simple_channel_clean(Data_Ch_A, RFImeanConst)
                    if (ADRmode == 4 or ADRmode == 5
                            or ADRmode == 6) and DynSpecSaveCleaned == 1:
                        simple_channel_clean(Data_Ch_B, RFImeanConst)
                    if ADRmode == 6 and CorrelationProcess == 1 and CorrSpecSaveCleaned == 1:
                        simple_channel_clean(CorrModule, 2 * RFImeanConst)

                    #   *** Immediate spectra of normalyzed data ***    (only for first figure in data file)
                    if figID == 0 and DynSpecSaveCleaned == 1:
                        if ADRmode == 3:
                            Data_1 = Data_Ch_A[0][:]
                            Legend_1 = 'Channel A'
                        if ADRmode == 4:
                            Data_1 = Data_Ch_B[0][:]
                            Legend_1 = 'Channel B'
                        if ADRmode == 3 or ADRmode == 4:
                            no_of_sets = 1
                            Data_2 = []
                            Suptitle = ('Normalized immediate spectrum ' +
                                        str(df_filename[0:18]) + ' ' +
                                        Legend_1)
                            Title = ('Initial parameters: dt = ' +
                                     str(round(TimeRes * 1000, 3)) +
                                     ' ms, df = ' + str(round(df / 1000., 3)) +
                                     ' kHz' + sumDifMode + ', Description: ' +
                                     str(df_description))
                            Filename = (
                                result_path + '/Service/' + df_filename[0:14] +
                                ' ' + Legend_1 +
                                ' Immediate Spectrum after cleaning and normalizing.png'
                            )

                        if (ADRmode == 5 or ADRmode
                                == 6):  # Immediate spectrum channels A & B
                            no_of_sets = 2
                            Data_1 = Data_Ch_A[0][:]
                            Data_2 = Data_Ch_B[0][:]
                            Legend_1 = 'Channel A'
                            Suptitle = ('Normalized immediate spectrum ' +
                                        str(df_filename[0:18]) +
                                        ' channels A & B')
                            Title = ('Initial parameters: dt = ' +
                                     str(round(TimeRes * 1000, 3)) +
                                     ' ms, df = ' + str(round(df / 1000., 3)) +
                                     ' kHz' + sumDifMode + ', Description: ' +
                                     str(df_description))
                            Filename = (
                                result_path + '/Service/' + df_filename[0:14] +
                                ' Channels A and B Immediate Spectrum after cleaning and normalizing.png'
                            )

                        TwoOrOneValuePlot(
                            no_of_sets, frequency, Data_1, Data_2, Legend_1,
                            'Channel B', frequency[0],
                            frequency[FreqPointsNum - 1], -10, 40, -10, 40,
                            'Frequency, MHz', 'Intensity, dB', 'Intensity, dB',
                            Suptitle, Title, Filename, currentDate,
                            currentTime, Software_version)

                    # *** FIGURE Cleaned and normalized dynamic spectrum of 1 channel A or B
                    if ((ADRmode == 3 or ADRmode == 4)
                            and DynSpecSaveCleaned == 1):
                        if ADRmode == 3:
                            Data = Data_Ch_A.transpose()
                        if ADRmode == 4:
                            Data = Data_Ch_B.transpose()

                        Suptitle = ('Dynamic spectrum (normalized) ' +
                                    str(df_filename[0:18]) + ' - Fig. ' +
                                    str(figID + 1) + ' of ' + str(figMAX) +
                                    '\n Initial parameters: dt = ' +
                                    str(round(TimeRes * 1000, 3)) +
                                    ' ms, df = ' + str(round(df / 1000., 3)) +
                                    ' kHz, ' + sumDifMode + ' Receiver: ' +
                                    str(df_system_name) + ', Place: ' +
                                    str(df_obs_place) + '\n Description: ' +
                                    str(df_description))

                        fig_file_name = (result_path + '/' +
                                         df_filename[0:14] +
                                         ' Dynamic spectrum fig.' +
                                         str(figID + 1) + '.png')

                        OneDynSpectraPlot(
                            Data, VminNorm, VmaxNorm, Suptitle,
                            'Intensity, dB', Nim * SpInFrame * FrameInChunk,
                            TimeScaleFig, frequency, FreqPointsNum, colormap,
                            'UTC Time, HH:MM:SS.msec', fig_file_name,
                            currentDate, currentTime, Software_version,
                            customDPI)

                    # *** FIGURE Dynamic spectrum channels A and B cleaned and normalized (python 3 new version) ***
                    if ((ADRmode == 5 or ADRmode == 6)
                            and DynSpecSaveCleaned == 1):
                        fig_file_name = (result_path + '/' +
                                         df_filename[0:14] +
                                         ' Dynamic spectrum fig.' +
                                         str(figID + 1) + '.png')
                        Suptitle = ('Dynamic spectrum (normalized) ' +
                                    str(df_filename) + ' - Fig. ' +
                                    str(figID + 1) + ' of ' + str(figMAX) +
                                    '\n Initial parameters: dt = ' +
                                    str(round(TimeRes * 1000, 3)) +
                                    ' ms, df = ' + str(round(df / 1000., 3)) +
                                    ' kHz, ' + sumDifMode + ' Receiver: ' +
                                    str(df_system_name) + ', Place: ' +
                                    str(df_obs_place) + '\n' + ReceiverMode +
                                    ', Description: ' + str(df_description))

                        TwoDynSpectraPlot(
                            Data_Ch_A.transpose(), Data_Ch_B.transpose(),
                            VminNorm, VmaxNorm, VminNorm, VmaxNorm, Suptitle,
                            'Intensity, dB', 'Intensity, dB',
                            Nim * SpInFrame * FrameInChunk, TimeFigureScaleFig,
                            TimeScaleFig, frequency, FreqPointsNum, colormap,
                            'Channel A', 'Channel B', fig_file_name,
                            currentDate, currentTime, Software_version,
                            customDPI)

                    # *** FIGURE Correlation spectrum module and phase cleaned and normalized (python 3 new version) ***
                    if (ADRmode == 6 and CorrSpecSaveCleaned == 1
                            and CorrelationProcess == 1):
                        Suptitle = 'Correlation dynamic spectrum (normalized) ' + str(
                            df_filename
                        ) + ' - Fig. ' + str(figID + 1) + ' of ' + str(
                            figMAX
                        ) + '\n Initial parameters: dt = ' + str(
                            round(TimeRes * 1000, 3)) + ' ms, df = ' + str(
                                round(df / 1000., 3)
                            ) + ' kHz, ' + sumDifMode + ' Receiver: ' + str(
                                df_system_name
                            ) + ', Place: ' + str(
                                df_obs_place
                            ) + '\n' + ReceiverMode + ', Description: ' + str(
                                df_description)
                        fig_file_name = result_path + '/Correlation_spectra/' + df_filename[
                            0:
                            14] + ' Correlation dynamic spectrum cleaned fig.' + str(
                                figID + 1) + '.png'
                        TwoDynSpectraPlot(
                            CorrModule.transpose(), CorrPhase.transpose(),
                            VminNorm, 3 * VmaxNorm, -3.15, 3.15, Suptitle,
                            'Intensity, dB', 'Phase, rad',
                            Nim * SpInFrame * FrameInChunk, TimeFigureScaleFig,
                            TimeScaleFig, frequency, FreqPointsNum, colormap,
                            'Normalized and cleaned correlation module',
                            'Correlation phase', fig_file_name, currentDate,
                            currentTime, Software_version, customDPI)

                gc.collect()
            del timeLineSecond
            #print ('\n  Position in file: ', file.tell(), ' File size: ', df_filesize)
            #if (file.tell() == df_filesize): print ('\n  File was read till the end')
            #if (file.tell() < df_filesize):  print ('\n  File was NOT read till the end!!! ERROR')

        # Here we close the data file
    ok = 1
    return ok, DAT_file_name, DAT_file_list
def check_if_ADR_files_of_equal_parameters(folder_path, file_list):
    '''
    The function checks if main parameters of the ADR files are equal (are they from the same observation)
    Input parameters:
        folder_path - path to folder with files
        file_list - list of files in the folder to check
    Output parameters:
        equal_or_not - "1" if files have equal parameters, "0" - otherwise
    '''
    df_system_name_list = []
    df_obs_place_list = []
    df_description_list = []
    ADRmode_list = []
    sumDifMode_list = []
    TimeRes_list = []
    FFT_Size_list = []
    SLine_list = []
    Width_list = []
    BlockSize_list = []

    for file_no in range(len(file_list)):
        filepath = folder_path + file_list[file_no]
        [
            df_filename, df_filesize, df_system_name, df_obs_place,
            df_description, F_ADC, df_creation_timeUTC, ReceiverMode, ADRmode,
            sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency, FFT_Size,
            SLine, Width, BlockSize
        ] = FileHeaderReaderADR(filepath, 0, 0)

        df_system_name_list.append(df_system_name)
        df_obs_place_list.append(df_obs_place)
        df_description_list.append(df_description)
        ADRmode_list.append(ADRmode)
        sumDifMode_list.append(sumDifMode)
        TimeRes_list.append(TimeRes)
        FFT_Size_list.append(FFT_Size)
        SLine_list.append(SLine)
        Width_list.append(Width)
        BlockSize_list.append(BlockSize)

    i = 0
    if df_system_name_list.count(
            df_system_name_list[0]) == len(df_system_name_list):
        i = i + 1
    if df_obs_place_list.count(df_obs_place_list[0]) == len(df_obs_place_list):
        i = i + 1
    if df_description_list.count(
            df_description_list[0]) == len(df_description_list):
        i = i + 1
    if ADRmode_list.count(ADRmode_list[0]) == len(ADRmode_list): i = i + 1
    if sumDifMode_list.count(sumDifMode_list[0]) == len(sumDifMode_list):
        i = i + 1
    if TimeRes_list.count(TimeRes_list[0]) == len(TimeRes_list): i = i + 1
    if FFT_Size_list.count(FFT_Size_list[0]) == len(FFT_Size_list): i = i + 1
    if SLine_list.count(SLine_list[0]) == len(SLine_list): i = i + 1
    if Width_list.count(Width_list[0]) == len(Width_list): i = i + 1
    if BlockSize_list.count(BlockSize_list[0]) == len(BlockSize_list):
        i = i + 1

    if i == 10:
        equal_or_not = 1
        print('   OK: all files have the same parameters!')
    else:
        equal_or_not = 0
        print(
            '\n **********************************************************\n !!!   WARNING: Parameters of files in folder differ    !!! \n **********************************************************'
        )
        print('\n   * Check letteral parameters of the files in list: \n')
        for file_no in range(len(file_list)):
            print('   ', file_no + 1, ') ', df_system_name_list[file_no], '  ',
                  df_obs_place_list[file_no], '  ',
                  df_description_list[file_no])
        print('\n   * Check numerical parameters of the files in list: \n')
        print(
            '   No  ADR mode  Sum/Diff  Time res.   FFT size   Start line   Width  Block size\n'
        )
        for file_no in range(len(file_list)):
            #print('   ',  file_no+1 ,')    ', str(ADRmode_list[file_no]), '     ',str(sumDifMode_list[file_no]), '   ',np.round(TimeRes_list[file_no], 6), '   ', FFT_Size_list[file_no], '  ', SLine_list[file_no], Width_list[file_no], ' ', BlockSize_list[file_no])
            print('  {:0>4d}'.format(file_no + 1),
                  '   {:0>1d}'.format(ADRmode_list[file_no]),
                  '        {}'.format(sumDifMode_list[file_no]),
                  '      {:.6f}'.format(np.round(TimeRes_list[file_no], 6)),
                  '     {:5.0f}'.format(FFT_Size_list[file_no]),
                  '       {:1.0f}'.format(SLine_list[file_no]),
                  '         {:1.0f}'.format(Width_list[file_no]),
                  '     {:6.0f}'.format(BlockSize_list[file_no]))

    return equal_or_not
source = '3C405'
for i in range (len(culm_time_3C405)):
    currentTime = time.strftime("%H:%M:%S")
    print ('\n Culmination '+ str(culm_time_3C405[i]) +' # ', str(i+1), ' of ', str(len(culm_time_3C405)), '       started at: ', currentTime)


    start_time = culm_time_3C405[i] - TimeDelta(3600, format = 'sec')
    end_time  = culm_time_3C405[i] + TimeDelta(3600, format = 'sec')

    dateTimeStart = str(start_time)[0:19]
    dateTimeStop = str(end_time)[0:19]

    [df_filename, df_filesize, df_system_name, df_obs_place, df_description,
                CLCfrq, df_creation_timeUTC, ReceiverMode, Mode, sumDifMode,
                NAvr, TimeRes, fmin, fmax, df, frequency, FFTsize, SLine,
                Width, BlockSize] = FileHeaderReaderADR(path_to_data + dat_files_list[0], 0, 0)


    result_folder = data_files_name_list[0]+"_"+str(i+1)+'_of_'+str(len(culm_time_3C405))+'_'+source
    done_or_not = DAT_file_reader(path_to_data, data_files_name_list[0], typesOfData, result_folder,
                                averOrMin, StartStopSwitch, SpecFreqRange, VminMan, VmaxMan, VminNormMan, VmaxNormMan,
                                RFImeanConst, customDPI, colormap, ChannelSaveTXT, ChannelSavePNG, ListOrAllFreq,
                                AmplitudeReIm_GURT, freqStart, freqStop, dateTimeStart, dateTimeStop, freqStartTXT,
                                freqStopTXT, freqList_GURT, 0)

    # Saving TXT file with parameters from file header
    path = path_to_data + 'DAT_Results_' + result_folder + '/'
    TXT_file = open(path + data_files_name_list[0]+'_'+source + '_header.info', "w")
    TXT_file.write(' Observatory:           ' + df_obs_place + '\n')
    TXT_file.write(' Receiver:              ' + df_system_name + '\n')
    TXT_file.write(' Initial filename:      ' + df_filename + '\n')
for type in range(1):  # Main loop by       of data to analyze (may be not neccesary)

    # *** Opening DAT datafile ***

    file = open(data_filename, 'rb')

    # reading FHEADER
    df_filesize = (os.stat(data_filename).st_size)                          # Size of file
    df_filename = file.read(32).decode('utf-8').rstrip('\x00')              # Initial data file name
    file.close()

    receiver_type = df_filename[-4:]

    # Reading file header to obtain main parameters of the file
    if receiver_type == '.adr':
        [TimeRes, fmin, fmax, df, frequency_list, FFTsize] = FileHeaderReaderADR(data_filename, 0)
    if receiver_type == '.jds':
        [df_filename, df_filesize, df_system_name, df_obs_place, df_description,
        CLCfrq, df_creation_timeUTC, SpInFile, ReceiverMode, Mode, Navr,
        TimeRes, fmin, fmax, df, frequency_list, FFTsize, dataBlockSize] = FileHeaderReaderJDS(data_filename, 0, 1)


    #************************************************************************************
    #                            R E A D I N G   D A T A                                *
    #************************************************************************************
    if receiver_type == '.jds':
        num_frequencies = len(frequency_list)-4

    shift_vector = DM_full_shift_calc(len(frequency_list), fmin, fmax, df / pow(10,6), TimeRes, DM, receiver_type)

    #plot1D(shift_vector, newpath+'/01 - Shift parameter.png', 'Shift parameter', 'Shift parameter', 'Shift parameter', 'Frequency channel number', customDPI)
def pulsar_period_DM_compensated_pics(common_path, filename, pulsar_name,
                                      normalize_response, profile_pic_min,
                                      profile_pic_max, spectrum_pic_min,
                                      spectrum_pic_max, periods_per_fig,
                                      customDPI, colormap, save_strongest,
                                      threshold):

    current_time = time.strftime("%H:%M:%S")
    current_date = time.strftime("%d.%m.%Y")

    # Creating a folder where all pictures and results will be stored (if it doesn't exist)
    result_path = "RESULTS_pulsar_n_periods_pics_" + filename
    if not os.path.exists(result_path):
        os.makedirs(result_path)
    if save_strongest:
        best_result_path = result_path + '/Strongest_pulses'
        if not os.path.exists(best_result_path):
            os.makedirs(best_result_path)

    # Taking pulsar period from catalogue
    pulsar_ra, pulsar_dec, DM, p_bar = catalogue_pulsar(pulsar_name)

    # DAT file to be analyzed:
    filepath = common_path + filename

    # Timeline file to be analyzed:
    timeline_filepath = common_path + filename.split(
        '_Data_')[0] + '_Timeline.txt'

    # Opening DAT datafile
    file = open(filepath, 'rb')

    # Data file header read
    df_filesize = os.stat(filepath).st_size  # Size of file
    df_filepath = file.read(32).decode('utf-8').rstrip(
        '\x00')  # Initial data file name
    file.close()

    if df_filepath[-4:] == '.adr':

        [
            df_filepath, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, ReceiverMode, Mode,
            sumDifMode, NAvr, time_resolution, fmin, fmax, df, frequency,
            FFTsize, SLine, Width, BlockSize
        ] = FileHeaderReaderADR(filepath, 0, 0)

        freq_points_num = len(frequency)

    if df_filepath[-4:] == '.jds':  # If data obtained from DSPZ receiver

        [
            df_filepath, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, SpInFile,
            ReceiverMode, Mode, Navr, time_resolution, fmin, fmax, df,
            frequency, freq_points_num, dataBlockSize
        ] = FileHeaderReaderJDS(filepath, 0, 1)

    # ************************************************************************************
    #                             R E A D I N G   D A T A                                *
    # ************************************************************************************

    # Time line file reading
    timeline, dt_timeline = time_line_file_reader(timeline_filepath)

    # Calculation of the dimensions of arrays to read taking into account the pulsar period
    spectra_in_file = int(
        (df_filesize - 1024) /
        (8 * freq_points_num))  # int(df_filesize - 1024)/(2*4*freq_points_num)
    spectra_to_read = int(
        np.round((periods_per_fig * p_bar / time_resolution), 0))
    num_of_blocks = int(np.floor(spectra_in_file / spectra_to_read))

    print('   Pulsar period:                           ', p_bar, 's.')
    print('   Time resolution:                         ', time_resolution,
          's.')
    print('   Number of spectra to read in', periods_per_fig, 'periods:  ',
          spectra_to_read, ' ')
    print('   Number of spectra in file:               ', spectra_in_file, ' ')
    print('   Number of', periods_per_fig, 'periods blocks in file:      ',
          num_of_blocks, '\n')

    # Data reading and making figures
    print('\n\n  *** Data reading and making figures *** \n\n')

    data_file = open(filepath, 'rb')
    data_file.seek(
        1024, os.SEEK_SET
    )  # Jumping to 1024+number of spectra to skip byte from file beginning

    bar = IncrementalBar('   Making pictures of n periods: ',
                         max=num_of_blocks,
                         suffix='%(percent)d%%')
    bar.start()

    for block in range(num_of_blocks + 1):  # Main loop by blocks of data

        # bar.next()

        # current_time = time.strftime("%H:%M:%S")
        # print(' * Data block # ', block + 1, ' of ', num_of_blocks + 1, '  started at: ', current_time)

        # Reading the last block which is less then 3 periods
        if block == num_of_blocks:
            spectra_to_read = spectra_in_file - num_of_blocks * spectra_to_read

        # Reading and preparing block of data (3 periods)
        data = np.fromfile(data_file,
                           dtype=np.float64,
                           count=spectra_to_read * len(frequency))
        data = np.reshape(data, [len(frequency), spectra_to_read], order='F')
        data = 10 * np.log10(data)
        if normalize_response > 0:
            Normalization_dB(data.transpose(), len(frequency), spectra_to_read)

        # Preparing single averaged data profile for figure
        profile = data.mean(axis=0)[:]
        profile = profile - np.mean(profile)
        data = data - np.mean(data)

        # Time line
        fig_time_scale = timeline[block * spectra_to_read:(block + 1) *
                                  spectra_to_read]

        # Making result picture
        fig = plt.figure(figsize=(9.2, 4.5))
        rc('font', size=5, weight='bold')
        ax1 = fig.add_subplot(211)
        ax1.plot(profile,
                 color=u'#1f77b4',
                 linestyle='-',
                 alpha=1.0,
                 linewidth='0.60',
                 label='3 pulses time profile')
        ax1.legend(loc='upper right', fontsize=5)
        ax1.grid(b=True,
                 which='both',
                 color='silver',
                 linewidth='0.50',
                 linestyle='-')
        ax1.axis([0, len(profile), profile_pic_min, profile_pic_max])
        ax1.set_ylabel('Amplitude, AU', fontsize=6, fontweight='bold')
        ax1.set_title('File: ' + filename + '  Description: ' +
                      df_description + '  Resolution: ' +
                      str(np.round(df / 1000, 3)) + ' kHz and ' +
                      str(np.round(time_resolution * 1000, 3)) + ' ms.',
                      fontsize=5,
                      fontweight='bold')
        ax1.tick_params(axis='x',
                        which='both',
                        bottom=False,
                        top=False,
                        labelbottom=False)
        ax2 = fig.add_subplot(212)
        ax2.imshow(np.flipud(data),
                   aspect='auto',
                   cmap=colormap,
                   vmin=spectrum_pic_min,
                   vmax=spectrum_pic_max,
                   extent=[0, len(profile), frequency[0], frequency[-1]])
        ax2.set_xlabel('Time UTC (at the lowest frequency), HH:MM:SS.ms',
                       fontsize=6,
                       fontweight='bold')
        ax2.set_ylabel('Frequency, MHz', fontsize=6, fontweight='bold')
        text = ax2.get_xticks().tolist()
        for i in range(len(text) - 1):
            k = int(text[i])
            text[i] = fig_time_scale[k][11:23]
        ax2.set_xticklabels(text, fontsize=5, fontweight='bold')
        fig.subplots_adjust(hspace=0.05, top=0.91)
        fig.suptitle('Single pulses of ' + pulsar_name + ' (DM: ' + str(DM) +
                     r' $\mathrm{pc \cdot cm^{-3}}$' + ', Period: ' +
                     str(p_bar) + ' s.), fig. ' + str(block + 1) + ' of ' +
                     str(num_of_blocks + 1),
                     fontsize=7,
                     fontweight='bold')
        fig.text(0.80,
                 0.04,
                 'Processed ' + current_date + ' at ' + current_time,
                 fontsize=3,
                 transform=plt.gcf().transFigure)
        fig.text(0.09,
                 0.04,
                 'Software version: ' + Software_version +
                 ', [email protected], IRA NASU',
                 fontsize=3,
                 transform=plt.gcf().transFigure)
        pylab.savefig(result_path + '/' + filename + ' fig. ' +
                      str(block + 1) + ' - Combined picture.png',
                      bbox_inches='tight',
                      dpi=customDPI)

        # If the profile has points above threshold save picture also into separate folder
        if save_strongest and np.max(profile) > threshold:
            pylab.savefig(best_result_path + '/' + filename + ' fig. ' +
                          str(block + 1) + ' - Combined picture.png',
                          bbox_inches='tight',
                          dpi=customDPI)
        plt.close('all')

        bar.next()

    bar.finish()
    data_file.close()
Beispiel #11
0
    Log_File = open("ADR_Results/Service/Log.txt", "a")
    Log_File.write('\n\n\n  * File '+str(fileNo+1)+' of %s \n' %str(len(fileList)))
    Log_File.write('  * File path: %s \n\n\n' %str(fileList[fileNo]) )


#*********************************************************************************

    # *** Opening datafile ***
    fname = ''
    if len(fname) < 1 : fname = fileList[fileNo]

    # Reading the file header
    [df_filename, df_filesize, df_system_name, df_obs_place, df_description,
            F_ADC, df_creation_timeUTC, ReceiverMode, ADRmode,
            sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency,
            FFT_Size, SLine, Width, BlockSize] = FileHeaderReaderADR(fname, 0, 1)

    # Reading the chunk header
    [SpInFile, SpInFrame, FrameInChunk, ChunksInFile, sizeOfChunk,
            frm_sec, frm_phase] = ChunkHeaderReaderADR(fname, 0, BlockSize, 1)

    FreqPointsNum = int(Width * 1024)
    Log_File.close()


    # *** Setting the time reference (file beginning) ***
    TimeFirstFramePhase = float(frm_phase)/F_ADC
    TimeFirstFrameFloatSec = frm_sec + TimeFirstFramePhase
    TimeScaleStartTime = datetime(int('20' + df_filename[1:3]), int(df_filename[3:5]), int(df_filename[5:7]), int(df_creation_timeUTC[0:2]), int(df_creation_timeUTC[3:5]), int(df_creation_timeUTC[6:8]), int(df_creation_timeUTC[9:12])*1000)

Beispiel #12
0
source = '3C405'
for i in range(len(culm_time_3C405)):
    currentTime = time.strftime("%H:%M:%S")
    print('\n Culmination ' + str(culm_time_3C405[i]) + ' # ', str(i + 1),
          ' of ', str(len(culm_time_3C405)), '       started at: ',
          currentTime)

    start_time = culm_time_3C405[i] - TimeDelta(3600, format='sec')
    end_time = culm_time_3C405[i] + TimeDelta(3600, format='sec')

    dateTimeStart = str(start_time)[0:19]
    dateTimeStop = str(end_time)[0:19]

    [df_filename, df_filesize, df_system_name, df_obs_place, df_description, CLCfrq, df_creation_timeUTC,
        ReceiverMode, Mode, sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency, FFTsize, SLine, Width, BlockSize] = \
        FileHeaderReaderADR(path_to_data + dat_files_list[0], 0, 0)

    result_folder = data_files_name_list[0] + "_" + str(i + 1) + '_of_' + str(
        len(culm_time_3C405)) + '_' + source
    done_or_not = DAT_file_reader(
        path_to_data, data_files_name_list[0], typesOfData, result_folder,
        averOrMin, StartStopSwitch, SpecFreqRange, VminMan, VmaxMan,
        VminNormMan, VmaxNormMan, RFImeanConst, customDPI, colormap,
        ChannelSaveTXT, ChannelSavePNG, ListOrAllFreq, AmplitudeReIm_GURT,
        freqStart, freqStop, dateTimeStart, dateTimeStop, freqStartTXT,
        freqStopTXT, freqList_GURT, 0)

    # Saving TXT file with parameters from file header
    path = path_to_data + 'DAT_Results_' + result_folder + '/'
    TXT_file = open(
        path + data_files_name_list[0] + '_' + source + '_header.info', "w")
def cut_needed_pulsar_period_from_dat_to_dat(common_path, filename,
                                             pulsar_name, period_number,
                                             profile_pic_min, profile_pic_max,
                                             spectrum_pic_min,
                                             spectrum_pic_max, periods_per_fig,
                                             customDPI, colormap):
    """
    Function to find and cut the selected pulsar period (by its number) from the DAT files
    """

    software_version = '2021.08.07'

    current_time = time.strftime("%H:%M:%S")
    current_date = time.strftime("%d.%m.%Y")

    # Creating a folder where all pictures and results will be stored (if it doesn't exist)
    result_path = "RESULTS_pulsar_extracted_pulse_" + filename
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    # Taking pulsar period from catalogue
    pulsar_ra, pulsar_dec, DM, p_bar = catalogue_pulsar(pulsar_name)

    # DAT file to be analyzed:
    filepath = common_path + filename

    # Timeline file to be analyzed:
    timeline_filepath = common_path + filename.split(
        '_Data_')[0] + '_Timeline.txt'

    # Opening DAT datafile
    file = open(filepath, 'rb')

    # Data file header read
    df_filesize = os.stat(filepath).st_size  # Size of file
    df_filepath = file.read(32).decode('utf-8').rstrip(
        '\x00')  # Initial data file name
    file.close()

    if df_filepath[-4:] == '.adr':
        [
            df_filepath, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, ReceiverMode, Mode,
            sumDifMode, NAvr, time_resolution, fmin, fmax, df, frequency,
            FFTsize, SLine, Width, BlockSize
        ] = FileHeaderReaderADR(filepath, 0, 0)

        freq_points_num = len(frequency)

    if df_filepath[-4:] == '.jds':  # If data obtained from DSPZ receiver

        [
            df_filepath, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, SpInFile,
            ReceiverMode, Mode, Navr, time_resolution, fmin, fmax, df,
            frequency, freq_points_num, dataBlockSize
        ] = FileHeaderReaderJDS(filepath, 0, 0)

    # ************************************************************************************
    #                             R E A D I N G   D A T A                                *
    # ************************************************************************************

    # Time line file reading
    timeline, dt_timeline = time_line_file_reader(timeline_filepath)

    # Calculation of the dimensions of arrays to read taking into account the pulsar period
    spectra_in_file = int(
        (df_filesize - 1024) /
        (8 * freq_points_num))  # int(df_filesize - 1024)/(2*4*freq_points_num)
    spectra_to_read = int(
        np.round((periods_per_fig * p_bar / time_resolution), 0))
    spectra_per_period = int(np.round((p_bar / time_resolution), 0))
    num_of_blocks = int(np.floor(spectra_in_file / spectra_to_read))

    print('\n   Pulsar name:                             ', pulsar_name, '')
    print('   Pulsar period:                           ', p_bar, 's.')
    print('   Time resolution:                         ', time_resolution,
          's.')
    print('   Number of spectra to read in', periods_per_fig, 'periods:  ',
          spectra_to_read, ' ')
    print('   Number of spectra in file:               ', spectra_in_file, ' ')
    print('   Number of', periods_per_fig, 'periods blocks in file:      ',
          num_of_blocks, '\n')

    # Data reading and making figures
    print('\n   Data reading and making figure...')

    data_file = open(filepath, 'rb')

    # Jumping to 1024+number of spectra to skip bytes from file beginning
    data_file.seek(
        1024 + (period_number - 1) * spectra_per_period * len(frequency) * 8,
        os.SEEK_SET)

    # Reading and preparing block of data (3 periods)
    data = np.fromfile(data_file,
                       dtype=np.float64,
                       count=spectra_to_read * len(frequency))
    data_file.close()

    data = np.reshape(data, [len(frequency), spectra_to_read], order='F')

    # Read data file header from initial file
    with open(filepath, 'rb') as file:
        file_header = file.read(1024)

    # Create binary file with the header and pulsar one or two periods data
    dat_file_name = 'Single_pulse_' + filename
    file_data = open(result_path + '/' + dat_file_name, 'wb')
    file_data.write(file_header)
    del file_header
    # Prepare data to save to the file
    temp = data.transpose().copy(order='C')
    file_data.write(np.float64(temp))
    del temp
    file_data.close()

    # Time line
    fig_time_scale = timeline[(period_number - 1) *
                              spectra_per_period:(period_number - 1 +
                                                  spectra_to_read) *
                              spectra_per_period]

    # Prepared code to save timeline to a file, but as the timeline is wrong comented them temporarily
    # # Creating and filling a new timeline TXT file for results
    # new_tl_file_name = dat_file_name.split('_Data_', 1)[0] + '_Timeline.txt'
    # new_tl_file = open(result_path + '/' + new_tl_file_name, 'w')
    # # Saving time data to new file
    # for j in range(len(fig_time_scale)):
    #     new_tl_file.write((fig_time_scale[j][:]) + '')
    # new_tl_file.close()

    # Logging data for figure
    data = 10 * np.log10(data)

    # Normalizing data
    data = data - np.mean(data)

    # Making result picture
    fig = plt.figure(figsize=(9.2, 4.5))
    rc('font', size=5, weight='bold')
    ax2 = fig.add_subplot(111)
    ax2.set_title('File: ' + filename + '  Description: ' + df_description +
                  '  Resolution: ' + str(np.round(df / 1000, 3)) +
                  ' kHz and ' + str(np.round(time_resolution * 1000, 3)) +
                  ' ms.',
                  fontsize=5,
                  fontweight='bold')
    ax2.imshow(
        np.flipud(data),
        aspect='auto',
        cmap=colormap,
        vmin=spectrum_pic_min,
        vmax=spectrum_pic_max,
        extent=[0, data.shape[1], frequency[0] + 16.5,
                frequency[-1] + 16.5])  # len(profile)
    ax2.set_xlabel('Time UTC (at the lowest frequency), HH:MM:SS.ms',
                   fontsize=6,
                   fontweight='bold')
    ax2.set_ylabel('Frequency, MHz', fontsize=6, fontweight='bold')
    text = ax2.get_xticks().tolist()
    for i in range(len(text) - 1):
        k = int(text[i])
        text[i] = fig_time_scale[k][11:23]
    ax2.set_xticklabels(text, fontsize=5, fontweight='bold')
    fig.subplots_adjust(hspace=0.05, top=0.91)
    fig.suptitle('Extracted single pulse of ' + pulsar_name + ' (DM: ' +
                 str(DM) + r' $\mathrm{pc \cdot cm^{-3}}$' + ', Period: ' +
                 str(p_bar) + ' s.)',
                 fontsize=7,
                 fontweight='bold')
    fig.text(0.80,
             0.04,
             'Processed ' + current_date + ' at ' + current_time,
             fontsize=3,
             transform=plt.gcf().transFigure)
    fig.text(0.09,
             0.04,
             'Software version: ' + software_version +
             ', [email protected], IRA NASU',
             fontsize=3,
             transform=plt.gcf().transFigure)
    pylab.savefig(result_path + '/' + 'Single_pulse_' + filename[:-4] + '.png',
                  bbox_inches='tight',
                  dpi=customDPI)
    plt.close('all')

    return result_path, 'Single_pulse_' + filename, filename + 'Single_pulse_' + filename[:
                                                                                          -4] + '.png'
# *** Opening DAT datafile ***
file = open(data_filename, 'rb')

# reading FHEADER
df_filesize = (os.stat(data_filename).st_size)  # Size of file
df_filename = file.read(32).decode('utf-8').rstrip(
    '\x00')  # Initial data file name
file.close()

receiver_type = df_filename[-4:]

# Reading file header to obtain main parameters of the file
if receiver_type == '.adr':
    [TimeRes, fmin, fmax, df, frequency_list,
     FFTsize] = FileHeaderReaderADR(data_filename, 0)

if receiver_type == '.jds':
    [
        df_filename, df_filesize, df_system_name, df_obs_place, df_description,
        CLCfrq, df_creation_timeUTC, SpInFile, ReceiverMode, Mode, Navr,
        TimeRes, fmin, fmax, df, frequency_list, FFTsize, dataBlockSize
    ] = FileHeaderReaderJDS(data_filename, 0, 1)

#************************************************************************************
#                            R E A D I N G   D A T A                                *
#************************************************************************************
num_frequencies = len(frequency_list)

# Calculating number of samples per period and number of blocks
samples_per_period = int(np.ceil(pulsar_period / TimeRes))
Beispiel #15
0
def normalize_dat_file(directory, filename, no_of_spectra_in_bunch,
                       median_filter_window, show_aver_spectra):
    """
    function calculates the average spectrum in DAT file and normalizes all spectra in file to average spectra
    Input parameters:
        directory - name of directory with initial dat file
        filename - name of initial dat file
        no_of_spectra_in_bunch - number of spectra in bunch to read
        median_filter_window - window of median filter to process the average spectrum
        show_aver_spectra - boolean variable which indicates if the picture of average spectrum to be shown and
                            the script paused till the picture window is closed
    Output parameters:
        output_file_name -  name of result normalized .dat file
    """

    print(
        '\n   Preparations and calculation of the average spectrum to normalize... \n'
    )

    output_file_name = directory + 'Norm_' + filename
    filename = directory + filename

    # Opening DAT datafile
    file = open(filename, 'rb')

    # *** Data file header read ***
    df_filesize = os.stat(filename).st_size  # Size of file
    df_filename = file.read(32).decode('utf-8').rstrip(
        '\x00')  # Initial data file name
    file.close()

    if df_filename[-4:] == '.adr':

        [
            df_filename, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, ReceiverMode, Mode,
            sumDifMode, NAvr, TimeRes, fmin, fmax, df, frequency, FFTsize,
            SLine, Width, BlockSize
        ] = FileHeaderReaderADR(filename, 0, 0)

    if df_filename[-4:] == '.jds':  # If data obtained from DSPZ receiver

        [
            df_filename, df_filesize, df_system_name, df_obs_place,
            df_description, CLCfrq, df_creation_timeUTC, SpInFile,
            ReceiverMode, Mode, Navr, TimeRes, fmin, fmax, df, frequency,
            FreqPointsNum, dataBlockSize
        ] = FileHeaderReaderJDS(filename, 0, 0)

    # Calculation of the dimensions of arrays to read
    nx = len(frequency)  # the first dimension of the array
    ny = int((
        (df_filesize - 1024) /
        (nx * 8)))  # the second dimension of the array: file size - 1024 bytes

    # Number of data blocks to read from file
    num_of_blocks = int(ny // no_of_spectra_in_bunch)

    # Read data from file by blocks and average it
    file = open(filename, 'rb')
    file.seek(1024)
    average_array = np.empty((nx, 0), float)
    for block in range(num_of_blocks):
        if block == (num_of_blocks - 1):
            spectra_num_in_bunch = ny - (num_of_blocks -
                                         1) * no_of_spectra_in_bunch
        else:
            spectra_num_in_bunch = no_of_spectra_in_bunch

        data = np.fromfile(file,
                           dtype=np.float64,
                           count=nx * spectra_num_in_bunch)
        data = np.reshape(data, [nx, spectra_num_in_bunch], order='F')
        tmp = np.empty((nx, 1), float)
        # tmp[:, 0] = data.mean(axis=1)[:]
        tmp[:, 0] = data.min(axis=1)[:]
        average_array = np.append(average_array, tmp, axis=1)  #

    # Average average spectra of all data blocks
    average_profile = average_array.mean(axis=1)

    init_average_profile = average_profile.copy()

    # # Make a figure of average spectrum (profile)
    # fig = plt.figure(figsize=(9, 5))
    # ax1 = fig.add_subplot(111)
    # ax1.plot(10 * np.log10(average_profile), linestyle='-', linewidth='1.00', label='Average spectra')
    # ax1.legend(loc='upper right', fontsize=6)
    # ax1.grid(b=True, which='both', color='silver', linestyle='-')
    # ax1.set_xlabel('Frequency points, num.', fontsize=6, fontweight='bold')
    # ax1.set_ylabel('Intensity, dB', fontsize=6, fontweight='bold')
    # pylab.savefig('Averaged_spectra_'+filename[:-4]+'_before_filtering.png', bbox_inches='tight', dpi=160)
    # plt.close('all')

    # Apply median filter to average profile
    average_profile = median_filter(average_profile, median_filter_window)
    med_average_profile = average_profile.copy()
    average_profile = average_filter(average_profile,
                                     median_filter_window + 20)

    # Make a figure of filtered average spectrum (profile)
    fig = plt.figure(figsize=(12, 8))
    ax1 = fig.add_subplot(111)
    ax1.plot(10 * np.log10(init_average_profile),
             linestyle='-',
             linewidth='1.50',
             label='Initial spectra',
             color='C0',
             alpha=0.6)
    ax1.plot(10 * np.log10(med_average_profile),
             linestyle='-',
             linewidth='1.25',
             label='Median spectra',
             color='C1',
             alpha=0.8)
    ax1.plot(10 * np.log10(average_profile),
             linestyle='-',
             linewidth='1.00',
             label='Median averaged spectra',
             color='C3')
    ax1.legend(loc='upper right', fontsize=6)
    ax1.grid(b=True, which='both', color='silver', linestyle='-')
    ax1.set_xlabel('Frequency points, num.', fontsize=6, fontweight='bold')
    ax1.set_ylabel('Intensity, dB', fontsize=6, fontweight='bold')
    pylab.savefig('Averaged_spectra_' + filename[:-4] + '_after_filtering.png',
                  bbox_inches='tight',
                  dpi=160)
    if show_aver_spectra:
        print('\n   Close the figure window to continue processing!!!\n')
        plt.show()
    plt.close('all')

    del init_average_profile, med_average_profile

    # Normalization
    print('   Spectra normalization... \n')
    file.seek(0)
    file_header = file.read(1024)
    normalized_file = open(output_file_name, 'wb')
    normalized_file.write(file_header)
    del file_header

    bar = IncrementalBar(' Normalizing of the DAT file: ',
                         max=num_of_blocks,
                         suffix='%(percent)d%%')
    bar.start()

    for block in range(num_of_blocks):

        if block == (num_of_blocks - 1):
            spectra_num_in_bunch = ny - (num_of_blocks -
                                         1) * no_of_spectra_in_bunch
        else:
            spectra_num_in_bunch = no_of_spectra_in_bunch

        data = np.fromfile(file,
                           dtype=np.float64,
                           count=nx * spectra_num_in_bunch)
        data = np.reshape(data, [nx, spectra_num_in_bunch], order='F')
        for j in range(spectra_num_in_bunch):
            data[:, j] = data[:, j] / average_profile[:]
        temp = data.transpose().copy(order='C')
        normalized_file.write(np.float64(temp))

        bar.next()

    file.close()
    normalized_file.close()
    bar.finish()

    # *** Creating a new timeline TXT file for results ***
    new_tl_file_name = output_file_name.split('_Data_', 1)[0] + '_Timeline.txt'
    new_tl_file = open(
        new_tl_file_name,
        'w')  # Open and close to delete the file with the same name
    new_tl_file.close()

    # *** Reading timeline file ***
    old_tl_file_name = filename.split('_Data_', 1)[0] + '_Timeline.txt'
    old_tl_file = open(old_tl_file_name, 'r')
    new_tl_file = open(new_tl_file_name, 'w')

    # Read time from timeline file
    time_scale_bunch = old_tl_file.readlines()

    # Saving time data to new file
    for j in range(len(time_scale_bunch)):
        new_tl_file.write((time_scale_bunch[j][:]) + '')

    old_tl_file.close()
    new_tl_file.close()

    return output_file_name