def process_azfp(site, data_directory, xml_file, output_directory, dates,
                 tilt_correction):
    """
    Use echopype to convert and process the ASL AZFP bio-acoustic sonar data
    (in *.01A files) to generate echograms for use by the community

    :param site:
    :param data_directory:
    :param xml_file:
    :param output_directory:
    :param dates:
    :param tilt_correction:
    :return data:
    """
    # generate a list of data files given the input dates
    file_list = azfp_file_list(data_directory, dates)

    # reset the file_list to a single index
    file_list = [file for sub in file_list for file in sub]
    if not file_list:
        # if there are no files to process, exit cleanly
        return None

    # make sure the data output directory exists
    output_directory = os.path.join(output_directory,
                                    dates[0] + '-' + dates[1])
    if not os.path.isdir(output_directory):
        os.mkdir(output_directory)

    # convert the list of .01A files using echopype and save the output as NetCDF files
    dc = Convert(file_list, xml_file)
    dc.platform_name = site  # OOI site name
    dc.platform_type = 'Mooring'  # ICES platform type
    dc.platform_code_ICES = '48'  # ICES code: tethered collection of instruments at a fixed location that may
    # include seafloor, mid-water or surface components
    dc.raw2nc(save_path=output_directory)

    # process the data, calculating the volume acoustic backscatter strength and the vertical range
    echo = []
    nc_files = glob.glob(output_directory + '/[12]???????.nc')
    for nc in nc_files:
        tmp_echo = Process(nc)
        tmp_echo.calibrate()  # calculate Sv
        data = tmp_echo.Sv  # extract the Sv dataset
        echo.append(data.sortby('ping_time'))  # append to the echogram list

    # concatenate the data into a single dataset
    data = xr.concat(echo, dim='ping_time', join='outer')
    data = data.sortby(['frequency', 'ping_time'])
    data['frequency'] = data['frequency'].astype(np.float32)
    data['range_bin'] = data['range_bin'].astype(np.int32)
    data['range'] = data['range'].sel(ping_time=data.ping_time[0], drop=True)
    data = data.set_coords('range')

    if tilt_correction:
        range_correction(
            data, tilt_correction)  # apply a tilt correction, if applicable

    # pass the Sv data back for further processing
    return data
def process_ek60(site, data_directory, output_directory, dates,
                 tilt_correction):
    """

    :param site:
    :param data_directory:
    :param output_directory:
    :param dates:
    :param tilt_correction:
    :return data:
    """
    # generate a list of data files given the input dates
    file_list = ek60_file_list(data_directory, dates)

    # reset the file_list to a single index
    file_list = [file for sub in file_list for file in sub]
    if not file_list:
        # if there are no files to process, exit cleanly
        return None

    # make sure the data output directory exists
    output_directory = os.path.join(output_directory,
                                    dates[0] + '-' + dates[1])
    if not os.path.isdir(output_directory):
        os.mkdir(output_directory)

    # convert the list of .raw files using echopype and save the output as NetCDF files
    dc = Convert(file_list)
    dc.platform_name = site  # OOI site name
    if site == 'CE02SHBP':
        dc.platform_type = 'Fixed Benthic Node'  # ICES platform type
        dc.platform_code_ICES = '11'  # ICES code
    else:
        dc.platform_type = 'Mooring'  # ICES platform type
        dc.platform_code_ICES = '48'  # ICES code: tethered collection of instruments at a fixed location that may
        # include seafloor, mid-water or surface components
    dc.raw2nc(save_path=output_directory)

    # process the data, calculating the volume acoustic backscatter strength and the vertical range
    echo = []
    sample_thickness = []
    tvg_correction_factor = []
    nc_files = glob.glob(output_directory + '/*OOI-D*.nc')
    for nc in nc_files:
        tmp_echo = Process(nc)
        tmp_echo.calibrate()  # calculate Sv
        data = tmp_echo.Sv  # extract the Sv dataset
        echo.append(data.sortby('ping_time'))  # append to the echogram list
        sample_thickness.append(tmp_echo.sample_thickness.values)
        tvg_correction_factor.append(tmp_echo.tvg_correction_factor)

    # concatenate the data into a single dataset
    data = xr.concat(echo, dim='ping_time', join='outer')
    data = data.sortby(['frequency', 'ping_time'])
    data['range_bin'] = data['range_bin'].astype(np.int32)
    data['range'] = data['range'].sel(ping_time=data.ping_time[0], drop=True)
    data = data.set_coords('range')

    # recalculate the range to deal with some discrepancies caused by the xarray concat
    thickness = np.max(np.array(sample_thickness), 0)
    correction_factor = np.max(tvg_correction_factor)
    range_meter = calc_range(data, thickness, correction_factor)
    data['range'] = data['range'].fillna(range_meter)

    if tilt_correction:
        range_correction(
            data, tilt_correction)  # apply a tilt correction, if applicable

    # pass the Sv data back for further processing
    return data