def _get_eng_params(hdf, align_param=None):
    '''
    Get eng parameters from hdf, and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency.
        Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []

    eng_params= ('Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
                 'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
                 'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np')

    for param_name in eng_params:
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if align_param:
            # Align all other parameters to provided param or first available.
            param.array = align(param, align_param)
        else:
            align_param = param
        params.append(param)

    if not len(params):
        return None, None
    # If there is at least one split parameter available.
    stacked_params = vstack_params(*params)
    split_params_avg = np.ma.average(stacked_params, axis=0)
    return split_params_avg, align_param.frequency
示例#2
0
    def get_params(self, hdf_path, _slice, phase_name):
        import shutil
        import tempfile
        from hdfaccess.file import hdf_file

        with tempfile.NamedTemporaryFile() as temp_file:
            shutil.copy(hdf_path, temp_file.name)

            with hdf_file(hdf_path) as hdf:
                pitch_capt = hdf.get('Pitch (Capt)')
                pitch_fo = hdf.get('Pitch (FO)')
                roll_capt = hdf.get('Roll (Capt)')
                roll_fo = hdf.get('Roll (FO)')
                cc_capt = hdf.get('Control Column Force (Capt)')
                cc_fo = hdf.get('Control Column Force (FO)')

                for par in pitch_capt, pitch_fo, roll_capt, roll_fo, cc_capt, \
                        cc_fo:
                    if par is not None:
                        ref_par = par
                        break

        phase = S(name=phase_name, frequency=1)
        phase.create_section(_slice)
        phase = phase.get_aligned(ref_par)[0]

        # Align the arrays, usually done in the Nodes
        for par in pitch_fo, roll_capt, roll_fo, cc_capt, cc_fo:
            if par is None:
                continue
            par.array = align(par, ref_par)
            par.hz = ref_par.hz

        return pitch_capt, pitch_fo, roll_capt, roll_fo, cc_capt, cc_fo, phase
def _get_normalised_split_params(hdf):
    '''
    Get split parameters (currently engine power and Groundspeed) from hdf, normalise
    them on a scale from 0-1.0 and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency. Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []
    first_split_param = None
    for param_name in ('Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
                       'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
                       'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np',
                       'Groundspeed', 'Groundspeed (1)', 'Groundspeed (2)'):
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if first_split_param:
            # Align all other parameters to first available.  #Q: Why not force to 1Hz?
            param.array = align(param, first_split_param)
        else:
            first_split_param = param
        params.append(param)

    if not first_split_param:
        return None, None
    # If there is at least one split parameter available.
    # normalise the parameters we'll use for splitting the data
    stacked_params = vstack_params(*params)
    normalised_params = normalise(stacked_params, scale_max=100)
    split_params_min = np.ma.average(normalised_params, axis=0)
    return split_params_min, first_split_param.frequency
示例#4
0
 def _generate_graph(self):
     postvars = self._parse_post()
     data = []
     with hdf_file(postvars['file_path'][0]) as hdf:
         params = hdf.get_params(postvars['parameters[]']).values()
     
     align_param = params[0]
     arrays = [align_param[0]]
     
     for param in align_param:
         arrays.append(align(param, align_param))
     
     # TODO: Get parameter data and return it as AJAX.
     for param in params.values():
         data.append(zip(range(len(param.array)), param.array.data.tolist()))
     return self._respond_with_json({'data': data})
def _get_normalised_split_params(hdf):
    '''
    Get split parameters (currently engine power and Groundspeed) from hdf,
    normalise them on a scale from 0-1.0 and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency.
        Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []
    first_split_param = None
    split_params = (
        'Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
        'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
        'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np',
        'Eng (1) Fuel Flow', 'Eng (2) Fuel Flow', 'Eng (3) Fuel Flow', 'Eng (4) Fuel Flow',
        'Groundspeed', 'Groundspeed (1)', 'Groundspeed (2)'
    )
    for param_name in split_params:
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if first_split_param:
            # Align all other parameters to first available.  #Q: Why not force
            # to 1Hz?
            param.array = align(param, first_split_param)
        else:
            first_split_param = param
        params.append(param)

    if not first_split_param:
        return None, None
    # If there is at least one split parameter available.
    # normalise the parameters we'll use for splitting the data
    stacked_params = vstack_params(*params)
    # We normalise each in turn to the range 0-1 so they have equal weight
    normalised_params = [normalise(i) for i in stacked_params]
    # Using a true minimum leads to bias to a zero value. We take the average
    # to allow each parameter equal weight, then (later) seek the minimum.
    split_params_min = np.ma.average(normalised_params, axis=0)
    return split_params_min, first_split_param.frequency
def _get_normalised_split_params(hdf):
    '''
    Get split parameters (currently engine power and Groundspeed) from hdf,
    normalise them on a scale from 0-1.0 and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency.
        Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []
    first_split_param = None
    split_params = (
        'Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
        'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
        'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np',
        'Eng (1) Fuel Flow', 'Eng (2) Fuel Flow', 'Eng (3) Fuel Flow', 'Eng (4) Fuel Flow',
        'Groundspeed', 'Groundspeed (1)', 'Groundspeed (2)'
    )
    for param_name in split_params:
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if first_split_param:
            # Align all other parameters to first available.  #Q: Why not force
            # to 1Hz?
            param.array = align(param, first_split_param)
        else:
            first_split_param = param
        params.append(param)

    if not first_split_param:
        return None, None
    # If there is at least one split parameter available.
    # normalise the parameters we'll use for splitting the data
    stacked_params = vstack_params(*params)
    # We normalise each in turn to the range 0-1 so they have equal weight
    normalised_params = [normalise(i) for i in stacked_params]
    # Using a true minimum leads to bias to a zero value. We take the average
    # to allow each parameter equal weight, then (later) seek the minimum.
    split_params_min = np.ma.average(normalised_params, axis=0)
    return split_params_min, first_split_param.frequency
def get_dt_arrays(hdf, fallback_dt, validation_dt):
    now = datetime.utcnow().replace(tzinfo=pytz.utc)

    if fallback_dt:
        fallback_dts = []
        for secs in range(0, int(hdf.duration)):
            fallback_dts.append(fallback_dt + timedelta(seconds=secs))

    onehz = P(frequency=1)
    dt_arrays = []
    precise = True
    for name in ('Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'):
        param = hdf.get(name)
        if param:
            if name == 'Year':
                year = getattr(validation_dt, 'year', None) or now.year
                param.array = _mask_invalid_years(param.array, year)
            # do not interpolate date/time parameters to avoid rollover issues
            array = align(param, onehz, interpolate=False)
            if len(array) == 0 or np.ma.count(array) == 0:
                logger.warning("No valid values returned for %s", name)
            elif (np.ma.all(array == 0)) and not (
                    name == 'Hour' and len(array) < 3600
            ):  # Hour can be 0 for up to 1 hour (after midnight)
                # Other than the year 2000 or possibly 2100, no date values
                # can be all 0's
                logger.warning("Only zero values returned for %s", name)
            else:
                # values returned, continue
                dt_arrays.append(array)
                continue
        if fallback_dt:
            precise = False
            array = [getattr(dt, name.lower()) for dt in fallback_dts]
            logger.warning(
                "%s not available, using range from %d to %d from fallback_dt %s",
                name, array[0], array[-1], fallback_dt)
            dt_arrays.append(array)
            continue
        else:
            raise TimebaseError("Required parameter '%s' not available" % name)
    return dt_arrays, precise
示例#8
0
def get_dt_arrays(hdf, fallback_dt, validation_dt):
    now = datetime.utcnow().replace(tzinfo=pytz.utc)

    if fallback_dt:
        fallback_dts = []
        for secs in xrange(0, int(hdf.duration)):
            fallback_dts.append(fallback_dt + timedelta(seconds=secs))

    onehz = P(frequency=1)
    dt_arrays = []
    for name in ('Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'):
        param = hdf.get(name)
        if param:
            if name == 'Year':
                year = getattr(validation_dt, 'year', None) or now.year
                param.array = _mask_invalid_years(param.array, year)
            # do not interpolate date/time parameters to avoid rollover issues
            array = align(param, onehz, interpolate=False)
            if len(array) == 0 or np.ma.count(array) == 0:
                logger.warning("No valid values returned for %s", name)
            elif (np.ma.all(array == 0)) and not (name == 'Hour' and len(array) < 3600): # Hour can be 0 for up to 1 hour (after midnight)
                # Other than the year 2000 or possibly 2100, no date values
                # can be all 0's
                logger.warning("Only zero values returned for %s", name)
            else:
                # values returned, continue
                dt_arrays.append(array)
                continue
        if fallback_dt:
            array = [getattr(dt, name.lower()) for dt in fallback_dts]
            logger.warning("%s not available, using range from %d to %d from fallback_dt %s",
                           name, array[0], array[-1], fallback_dt)
            dt_arrays.append(array)
            continue
        else:
            raise TimebaseError("Required parameter '%s' not available" % name)
    return dt_arrays
def _get_eng_params(hdf, align_param=None):
    '''
    Get eng parameters from hdf, and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency.
        Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []

    eng_params = (
        'Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
        'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
        'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np',
        'Eng (1) Fuel Flow', 'Eng (2) Fuel Flow', 'Eng (3) Fuel Flow', 'Eng (4) Fuel Flow'
    )

    for param_name in eng_params:
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if align_param:
            # Align all other parameters to provided param or first available.
            param.array = align(param, align_param)
        else:
            align_param = param
        params.append(param)

    if not len(params):
        return None, None
    # If there is at least one split parameter available.
    stacked_params = vstack_params(*params)
    split_params_avg = np.ma.average(stacked_params, axis=0)
    return split_params_avg, align_param.frequency
示例#10
0
def _get_normalised_split_params(hdf):
    '''
    Get split parameters (currently engine power and Groundspeed) from hdf,
    normalise them on a scale from 0-1.0 and return the minimum.

    :param hdf: hdf_file object.
    :type hdf: hdfaccess.file.hdf_file
    :returns: Minimum of normalised split parameters along with its frequency.
        Will return None, None if no split parameters are available.
    :rtype: (None, None) or (np.ma.masked_array, float)
    '''
    params = []
    first_split_param = None
    for param_name in ('Eng (1) N1', 'Eng (2) N1', 'Eng (3) N1', 'Eng (4) N1',
                       'Eng (1) N2', 'Eng (2) N2', 'Eng (3) N2', 'Eng (4) N2',
                       'Eng (1) Np', 'Eng (2) Np', 'Eng (3) Np', 'Eng (4) Np',
                       'Groundspeed', 'Groundspeed (1)', 'Groundspeed (2)'):
        try:
            param = hdf[param_name]
        except KeyError:
            continue
        if first_split_param:
            # Align all other parameters to first available.  #Q: Why not force
            # to 1Hz?
            param.array = align(param, first_split_param)
        else:
            first_split_param = param
        params.append(param)

    if not first_split_param:
        return None, None
    # If there is at least one split parameter available.
    # normalise the parameters we'll use for splitting the data
    stacked_params = vstack_params(*params)
    normalised_params = normalise(stacked_params, scale_max=100)
    split_params_min = np.ma.average(normalised_params, axis=0)
    return split_params_min, first_split_param.frequency
示例#11
0
def plot_parameters(params, axes, title=''):
    '''
    Plot resulting parameters.
    '''
    print 'Plotting parameters.'
    max_freq = 0
    min_freq = float('inf')

    for name, param in params.iteritems():
        max_freq = max(max_freq, param.frequency)
        min_freq = min(min_freq, param.frequency)

    for param_name, param in params.iteritems():
        if max_freq == param.frequency:
            param_max_freq = param
        if param.frequency == min_freq:
            param_min_freq_len = len(param.array)

    # Truncate parameter arrays to successfully align them since the file
    # has not been through split sections.
    for param_name, param in params.iteritems():
        array_len = param_min_freq_len * (param.frequency / min_freq)
        if array_len != len(param.array):
            print 'Truncated %s from %d to %d for display purposes' % (
                param_name, len(param.array), array_len)
            param.array = param.array[:array_len]

    #==========================================================================
    # Plot Preparation
    #==========================================================================

    # Invariant parameters are identified here. They could be inserted into
    # the plot configuration file, but this is more straightforward.
    plt.rc('axes', grid=True)
    plt.rc('grid', color='0.75', linestyle='-', linewidth=0.5)

    # These items are altered during the plot, so not suited to plt.rc setup
    prop = fm.FontProperties(size=10)
    legendprops = dict(shadow=True, fancybox=True, markerscale=0.5, prop=prop)

    # Start by making a big clean canvas
    fig = plt.figure(facecolor='white', figsize=(8, 6))
    fig.canvas.set_window_title("%s %s" % (
        title, datetime.now().strftime('%A, %d %B %Y at %X')))

    # Add the "reference" altitude plot, and title this
    # (If we title the empty plot, it acquires default 0-1 scales)
    param_name = axes[1][0]
    param = params[param_name]
    array = align(param, param_max_freq)
    first_axis = fig.add_subplot(len(axes), 1, 1)
    first_axis.plot(array, label=param_name)

    ####plt.title("Processed on %s" %
    ####          datetime.now().strftime('%A, %d %B %Y at %X'))
    setp(first_axis.get_xticklabels(), visible=False)

    # Now plot the additional data from the AXIS_N lists at the top of the lfl
    for index, param_names in axes.iteritems():
        if index == 1:
            continue
        axis = fig.add_subplot(len(axes), 1, index, sharex=first_axis)
        # Avoid iterating over string
        if isinstance(param_names, basestring):
            param_names = [param_names]
        for param_name in param_names:
            param = params[param_name]
            # Data is aligned in time but the samples are not interpolated so
            # that scaling issues can be easily addressed
            label_text = param.name
            args = []
            if np.ma.all(param.array.mask):
                args.append([])
                label_text += ' <ALL MASKED>'
            elif param.data_type == 'ASCII' or param.array.dtype.char == 'S':
                print "Warning: ASCII not supported. Param '%s'" % param
                args.append([])
                label_text += ' <ASCII NOT DRAWN>'
            elif param.hz != max_freq:
                # Data is aligned in time but the samples are not
                # interpolated so that scaling issues can be easily addressed
                args.append(np.arange(len(param.array)) * (max_freq / param.hz))
                args.append(param.array)
            else:
                args.append(param.array)

            if param.units is None:
                label_text += " [No units]"
            else:
                label_text += " : " + param.units
            values_mapping = getattr(param.array, 'values_mapping', None)
            if values_mapping:
                label_text += '\n%s' % values_mapping
            axis.plot(*args, label=label_text)
            axis.legend(loc='upper right', **legendprops)
            if index < len(axes):
                setp(axis.get_xticklabels(), visible=False)
        plt.legend(prop={'size': 10})
    plt.show()
def _calculate_start_datetime(hdf, fallback_dt=None):
    """
    Calculate start datetime.

    :param hdf: Flight data HDF file
    :type hdf: hdf_access object
    :param fallback_dt: Used to replace elements of datetimes which are not available in the hdf file (e.g. YEAR not being recorded)
    :type fallback_dt: datetime

    HDF params used:
    :Year: Optional (defaults to 1970)
    :Month: Optional (defaults to 1)
    :Day: Optional (defaults to 1)
    :Hour: Required
    :Minute: Required
    :Second: Required

    If required parameters are not available and fallback_dt is not provided,
    a TimebaseError is raised
    """
    now = datetime.now()
    if fallback_dt is not None:
        assert fallback_dt < now, \
               ("Fallback time '%s' in the future is not allowed. Current time "
                "is '%s'." % (fallback_dt, now))
    # align required parameters to 1Hz
    onehz = P(frequency = 1)
    dt_arrays = []
    for name in ('Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'):
        param = hdf.get(name)
        if param:
            if name == 'Year':
                year = getattr(fallback_dt, 'year', None) or now.year
                param.array = _mask_invalid_years(param.array, year)
            # do not interpolate date/time parameters to avoid rollover issues
            array = align(param, onehz, interpolate=False)
            if len(array) == 0 or np.ma.count(array) == 0 or np.ma.all(array == 0):
                # Other than the year 2000 or possibly 2100, no date values
                # can be all 0's
                logger.warning("No valid values returned for %s", name)
            else:
                # values returned, continue
                dt_arrays.append(array)
                continue
        if fallback_dt:
            array = [getattr(fallback_dt, name.lower())]
            logger.warning("%s not available, using %d from fallback_dt %s",
                         name, array[0], fallback_dt)
            dt_arrays.append(array)
            continue
        else:
            raise TimebaseError("Required parameter '%s' not available" % name)

    length = max([len(array) for array in dt_arrays])
    if length > 1:
        # ensure all arrays are the same length
        for n, arr in enumerate(dt_arrays):
            if len(arr) == 1:
                # repeat to the correct size
                arr = np.repeat(arr, length)
                dt_arrays[n] = arr
            elif len(arr) != length:
                raise ValueError("After align, all arrays should be the same "
                                 "length")
            else:
                pass

    # establish timebase for start of data
    try:
        timebase = calculate_timebase(*dt_arrays)
    except (KeyError, ValueError) as err:
        raise TimebaseError("Error with timestamp values: %s" % err)

    if timebase > now:
        # Flight Data Analysis in the future is a challenge, lets see if we
        # can correct this first...
        if 'Day' not in hdf:
            # unlikely to have year, month or day.
            # Scenario: that fallback_dt is of the current day but recorded
            # time is in the future of the fallback time, therefore resulting
            # in a futuristic date.
            a_day_before = timebase - relativedelta(days=1)
            if a_day_before < now:
                logger.info("Timebase was in the future, using a day before satisfies requirements")
                return a_day_before
            # continue to take away a Year
        if 'Year' not in hdf:
            # remove a year from the timebase
            a_year_before = timebase - relativedelta(years=1)
            if a_year_before < now:
                logger.info("Timebase was in the future, using a day before satisfies requirements")
                return a_year_before

        raise TimebaseError("Timebase '%s' is in the future.", timebase)

    if settings.MAX_TIMEBASE_AGE and \
       timebase < (now - timedelta(days=settings.MAX_TIMEBASE_AGE)):
        # Only allow recent timebases.
        error_msg = "Timebase '%s' older than the allowed '%d' days." % (
            timebase, settings.MAX_TIMEBASE_AGE)
        raise TimebaseError(error_msg)

    logger.info("Valid timebase identified as %s", timebase)
    return timebase
    def derive(self,
               gl=M('Gear (L) On Ground'),
               gr=M('Gear (R) On Ground'),
               vert_spd=P('Vertical Speed'),
               torque=P('Eng (*) Torque Avg'),
               ac_series=A('Series'),
               collective=P('Collective')):

        if gl and gr:
            delta = abs((gl.offset - gr.offset) * gl.frequency)
            if 0.75 < delta or delta < 0.25:
                # If the samples of the left and right gear are close together,
                # the best representation is to map them onto a single
                # parameter in which we accept that either wheel on the ground
                # equates to gear on ground.
                self.array = np.ma.logical_or(gl.array, gr.array)
                self.frequency = gl.frequency
                self.offset = gl.offset
                return
            else:
                # If the paramters are not co-located, then
                # merge_two_parameters creates the best combination possible.
                self.array, self.frequency, self.offset = merge_two_parameters(
                    gl, gr)
                return
        elif gl or gr:
            gear = gl or gr
            self.array = gear.array
            self.frequency = gear.frequency
            self.offset = gear.offset
        elif vert_spd and torque:
            vert_spd_limit = 100.0
            torque_limit = 30.0
            if ac_series and ac_series.value == 'Columbia 234':
                vert_spd_limit = 125.0
                torque_limit = 22.0
                collective_limit = 15.0

                vert_spd_array = align(
                    vert_spd,
                    torque) if vert_spd.hz != torque.hz else vert_spd.array
                collective_array = align(
                    collective,
                    torque) if collective.hz != torque.hz else collective.array

                vert_spd_array = moving_average(vert_spd_array)
                torque_array = moving_average(torque.array)
                collective_array = moving_average(collective_array)

                roo_vs_array = runs_of_ones(
                    abs(vert_spd_array) < vert_spd_limit, min_samples=1)
                roo_torque_array = runs_of_ones(torque_array < torque_limit,
                                                min_samples=1)
                roo_collective_array = runs_of_ones(
                    collective_array < collective_limit, min_samples=1)

                vs_and_torque = slices_and(roo_vs_array, roo_torque_array)
                grounded = slices_and(vs_and_torque, roo_collective_array)

                array = np_ma_zeros_like(vert_spd_array)
                for _slice in slices_remove_small_slices(grounded, count=2):
                    array[_slice] = 1
                array.mask = vert_spd_array.mask | torque_array.mask
                array.mask = array.mask | collective_array.mask
                self.array = nearest_neighbour_mask_repair(array)
                self.frequency = torque.frequency
                self.offset = torque.offset

            else:
                vert_spd_array = align(
                    vert_spd,
                    torque) if vert_spd.hz != torque.hz else vert_spd.array
                # Introducted for S76 and Bell 212 which do not have Gear On Ground available

                vert_spd_array = moving_average(vert_spd_array)
                torque_array = moving_average(torque.array)

                grounded = slices_and(
                    runs_of_ones(abs(vert_spd_array) < vert_spd_limit,
                                 min_samples=1),
                    runs_of_ones(torque_array < torque_limit, min_samples=1))

                array = np_ma_zeros_like(vert_spd_array)
                for _slice in slices_remove_small_slices(grounded, count=2):
                    array[_slice] = 1
                array.mask = vert_spd_array.mask | torque_array.mask
                self.array = nearest_neighbour_mask_repair(array)
                self.frequency = torque.frequency
                self.offset = torque.offset

        else:
            # should not get here if can_operate is correct
            raise NotImplementedError()
    def derive(self,
               gl=M('Gear (L) On Ground'),
               gr=M('Gear (R) On Ground'),
               vert_spd=P('Vertical Speed'),
               torque=P('Eng (*) Torque Avg'),
               ac_series=A('Series'),
               collective=P('Collective')):

        if gl and gr:
            delta = abs((gl.offset - gr.offset) * gl.frequency)
            if 0.75 < delta or delta < 0.25:
                # If the samples of the left and right gear are close together,
                # the best representation is to map them onto a single
                # parameter in which we accept that either wheel on the ground
                # equates to gear on ground.
                self.array = np.ma.logical_or(gl.array, gr.array)
                self.frequency = gl.frequency
                self.offset = gl.offset
                return
            else:
                # If the paramters are not co-located, then
                # merge_two_parameters creates the best combination possible.
                self.array, self.frequency, self.offset = merge_two_parameters(gl, gr)
                return
        elif gl or gr:
            gear = gl or gr
            self.array = gear.array
            self.frequency = gear.frequency
            self.offset = gear.offset
        elif vert_spd and torque:
            vert_spd_limit = 100.0
            torque_limit = 30.0
            if ac_series and ac_series.value == 'Columbia 234':
                vert_spd_limit = 125.0
                torque_limit = 22.0
                collective_limit = 15.0

                vert_spd_array = align(vert_spd, torque) if vert_spd.hz != torque.hz else vert_spd.array
                collective_array = align(collective, torque) if collective.hz != torque.hz else collective.array

                vert_spd_array = moving_average(vert_spd_array)
                torque_array = moving_average(torque.array)
                collective_array = moving_average(collective_array)

                roo_vs_array = runs_of_ones(abs(vert_spd_array) < vert_spd_limit, min_samples=1)
                roo_torque_array = runs_of_ones(torque_array < torque_limit, min_samples=1)
                roo_collective_array = runs_of_ones(collective_array < collective_limit, min_samples=1)

                vs_and_torque = slices_and(roo_vs_array, roo_torque_array)
                grounded = slices_and(vs_and_torque, roo_collective_array)

                array = np_ma_zeros_like(vert_spd_array)
                for _slice in slices_remove_small_slices(grounded, count=2):
                    array[_slice] = 1
                array.mask = vert_spd_array.mask | torque_array.mask
                array.mask = array.mask | collective_array.mask
                self.array = nearest_neighbour_mask_repair(array)
                self.frequency = torque.frequency
                self.offset = torque.offset

            else:
                vert_spd_array = align(vert_spd, torque) if vert_spd.hz != torque.hz else vert_spd.array
                # Introducted for S76 and Bell 212 which do not have Gear On Ground available

                vert_spd_array = moving_average(vert_spd_array)
                torque_array = moving_average(torque.array)

                grounded = slices_and(runs_of_ones(abs(vert_spd_array) < vert_spd_limit, min_samples=1),
                                      runs_of_ones(torque_array < torque_limit, min_samples=1))

                array = np_ma_zeros_like(vert_spd_array)
                for _slice in slices_remove_small_slices(grounded, count=2):
                    array[_slice] = 1
                array.mask = vert_spd_array.mask | torque_array.mask
                self.array = nearest_neighbour_mask_repair(array)
                self.frequency = torque.frequency
                self.offset = torque.offset

        else:
            # should not get here if can_operate is correct
            raise NotImplementedError()
    def derive(self,
               vert_spd=P('Vertical Speed'),
               torque=P('Eng (*) Torque Avg'),
               ac_series=A('Series'),
               collective=P('Collective')):

        vert_spd_limit = 100.0
        torque_limit = 30.0
        if ac_series and ac_series.value == 'Columbia 234':
            vert_spd_limit = 125.0
            torque_limit = 22.0
            collective_limit = 15.0

            vert_spd_array = align(
                vert_spd,
                torque) if vert_spd.hz != torque.hz else vert_spd.array
            collective_array = align(
                collective,
                torque) if collective.hz != torque.hz else collective.array

            vert_spd_array = moving_average(vert_spd_array)
            torque_array = moving_average(torque.array)
            collective_array = moving_average(collective_array)

            roo_vs_array = runs_of_ones(abs(vert_spd_array) < vert_spd_limit,
                                        min_samples=1)
            roo_torque_array = runs_of_ones(torque_array < torque_limit,
                                            min_samples=1)
            roo_collective_array = runs_of_ones(
                collective_array < collective_limit, min_samples=1)

            vs_and_torque = slices_and(roo_vs_array, roo_torque_array)
            grounded = slices_and(vs_and_torque, roo_collective_array)

            array = np_ma_zeros_like(vert_spd_array)
            for _slice in slices_remove_small_slices(grounded, count=2):
                array[_slice] = 1
            array.mask = vert_spd_array.mask | torque_array.mask
            array.mask = array.mask | collective_array.mask
            self.array = nearest_neighbour_mask_repair(array)
            self.frequency = torque.frequency
            self.offset = torque.offset

        else:
            vert_spd_array = align(
                vert_spd,
                torque) if vert_spd.hz != torque.hz else vert_spd.array
            # Introducted for S76 and Bell 212 which do not have Gear On Ground available

            vert_spd_array = moving_average(vert_spd_array)
            torque_array = moving_average(torque.array)

            grounded = slices_and(
                runs_of_ones(abs(vert_spd_array) < vert_spd_limit,
                             min_samples=1),
                runs_of_ones(torque_array < torque_limit, min_samples=1))

            array = np_ma_zeros_like(vert_spd_array)
            for _slice in slices_remove_small_slices(grounded, count=2):
                array[_slice] = 1
            array.mask = vert_spd_array.mask | torque_array.mask
            self.array = nearest_neighbour_mask_repair(array)
            self.frequency = torque.frequency
            self.offset = torque.offset
示例#16
0
def _calculate_start_datetime(hdf, fallback_dt=None):
    """
    Calculate start datetime.

    :param hdf: Flight data HDF file
    :type hdf: hdf_access object
    :param fallback_dt: Used to replace elements of datetimes which are not
        available in the hdf file (e.g. YEAR not being recorded)
    :type fallback_dt: datetime

    HDF params used:
    :Year: Optional (defaults to 1970)
    :Month: Optional (defaults to 1)
    :Day: Optional (defaults to 1)
    :Hour: Required
    :Minute: Required
    :Second: Required

    If required parameters are not available and fallback_dt is not provided,
    a TimebaseError is raised
    """
    now = datetime.utcnow().replace(tzinfo=pytz.utc)
    
    if fallback_dt is not None:
        if (fallback_dt.tzinfo is None or
            fallback_dt.tzinfo.utcoffset(fallback_dt) is None):
            # Assume fallback_dt is UTC.
            fallback_dt = fallback_dt.replace(tzinfo=pytz.utc)
        assert fallback_dt < now, (
            "Fallback time '%s' in the future is not allowed. Current time "
            "is '%s'." % (fallback_dt, now))
    # align required parameters to 1Hz
    onehz = P(frequency=1)
    dt_arrays = []
    for name in ('Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'):
        param = hdf.get(name)
        if param:
            if name == 'Year':
                year = getattr(fallback_dt, 'year', None) or now.year
                param.array = _mask_invalid_years(param.array, year)
            # do not interpolate date/time parameters to avoid rollover issues
            array = align(param, onehz, interpolate=False)
            if len(array) == 0 or np.ma.count(array) == 0 \
                    or np.ma.all(array == 0):
                # Other than the year 2000 or possibly 2100, no date values
                # can be all 0's
                logger.warning("No valid values returned for %s", name)
            else:
                # values returned, continue
                dt_arrays.append(array)
                continue
        if fallback_dt:
            array = [getattr(fallback_dt, name.lower())]
            logger.warning("%s not available, using %d from fallback_dt %s",
                           name, array[0], fallback_dt)
            dt_arrays.append(array)
            continue
        else:
            raise TimebaseError("Required parameter '%s' not available" % name)

    length = max([len(a) for a in dt_arrays])
    if length > 1:
        # ensure all arrays are the same length
        for n, arr in enumerate(dt_arrays):
            if len(arr) == 1:
                # repeat to the correct size
                arr = np.repeat(arr, length)
                dt_arrays[n] = arr
            elif len(arr) != length:
                raise ValueError("After align, all arrays should be the same "
                                 "length")
            else:
                pass

    # establish timebase for start of data
    try:
        timebase = calculate_timebase(*dt_arrays)
    except (KeyError, ValueError) as err:
        raise TimebaseError("Error with timestamp values: %s" % err)

    if timebase > now:
        # Flight Data Analysis in the future is a challenge, lets see if we
        # can correct this first...
        if 'Day' not in hdf:
            # unlikely to have year, month or day.
            # Scenario: that fallback_dt is of the current day but recorded
            # time is in the future of the fallback time, therefore resulting
            # in a futuristic date.
            a_day_before = timebase - relativedelta(days=1)
            if a_day_before < now:
                logger.info(
                    "Timebase was in the future, using a DAY before "
                    "satisfies requirements: %s", a_day_before)
                return a_day_before
            # continue to take away a Year
        if 'Year' not in hdf:
            # remove a year from the timebase
            a_year_before = timebase - relativedelta(years=1)
            if a_year_before < now:
                logger.info("Timebase was in the future, using a YEAR before "
                            "satisfies requirements: %s", a_year_before)
                return a_year_before

        raise TimebaseError("Timebase '%s' is in the future.", timebase)

    if settings.MAX_TIMEBASE_AGE and \
       timebase < (now - timedelta(days=settings.MAX_TIMEBASE_AGE)):
        # Only allow recent timebases.
        error_msg = "Timebase '%s' older than the allowed '%d' days." % (
            timebase, settings.MAX_TIMEBASE_AGE)
        raise TimebaseError(error_msg)

    logger.info("Valid timebase identified as %s", timebase)
    return timebase