def test___init__imutable_cache_param_list(self):
     temp = 'temp_new_file.hdf5'
     if os.path.exists(temp):
         os.remove(temp)
     hdf_a = hdf_file(temp, create=True)
     hdf_a.cache_param_list.append('test')
     hdf_b = hdf_file(temp, create=True)
     self.assertEqual(hdf_b.cache_param_list, [])
     self.assertNotEqual(id(hdf_a.cache_param_list), id(hdf_b.cache_param_list))
 def test___init__imutable_cache_param_list(self):
     temp = 'temp_new_file.hdf5'
     if os.path.exists(temp):
         os.remove(temp)
     hdf_a = hdf_file(temp, create=True)
     hdf_a.cache_param_list.append('test')
     hdf_b = hdf_file(temp, create=True)
     self.assertEqual(hdf_b.cache_param_list, [])
     self.assertNotEqual(id(hdf_a.cache_param_list), id(hdf_b.cache_param_list)) 
Beispiel #3
0
def concat_hdf(hdf_paths, dest=None):
    '''
    Takes in a list of HDF file paths and concatenates the parameter
    datasets which match the path 'series/<Param Name>/data'. The first file
    in the list of paths is the template for the output file, with only the
    'series/<Param Name>/data' datasets being replaced with the concatenated
    versions.

    :param hdf_paths: File paths.
    :type hdf_paths: list of strings
    :param dest: optional destination path, which will be the first path in
                 'paths'
    :type dest: dict
    :return: path to concatenated hdf file.
    :rtype: str
    '''
    # copy hdf to temp area to build upon
    hdf_master_path = copy_file(hdf_paths[0])

    with hdf_file(hdf_master_path) as hdf_master:
        master_keys = hdf_master.keys()
        for hdf_path in hdf_paths[1:]:
            with hdf_file(hdf_path) as hdf:
                # check that all parameters match (avoids mismatching array lengths)
                param_keys = hdf.keys()
                assert set(param_keys) == set(master_keys)
                logging.debug("Copying parameters from file %s", hdf_path)
                for param_name in param_keys:
                    param = hdf[param_name]
                    master_param = hdf_master[param_name]
                    assert param.frequency == master_param.frequency
                    assert param.offset == master_param.offset
                    assert param.units == master_param.units
                    # join arrays together
                    master_param.array = np.ma.concatenate(
                        (master_param.raw_array, param.raw_array))
                    # re-save parameter
                    hdf_master[param_name] = master_param
                # extend the master's duration
                hdf_master.duration += hdf.duration
            #endwith
            logging.debug("Completed extending parameters from %s", hdf_path)
        #endfor
    #endwith

    if dest:
        shutil.move(hdf_master_path, dest)
        return dest
    else:
        return hdf_master_path
def concat_hdf(hdf_paths, dest=None):
    '''
    Takes in a list of HDF file paths and concatenates the parameter
    datasets which match the path 'series/<Param Name>/data'. The first file
    in the list of paths is the template for the output file, with only the
    'series/<Param Name>/data' datasets being replaced with the concatenated
    versions.

    :param hdf_paths: File paths.
    :type hdf_paths: list of strings
    :param dest: optional destination path, which will be the first path in
                 'paths'
    :type dest: dict
    :return: path to concatenated hdf file.
    :rtype: str
    '''
    # copy hdf to temp area to build upon
    hdf_master_path = copy_file(hdf_paths[0])

    with hdf_file(hdf_master_path) as hdf_master:
        master_keys = hdf_master.keys()
        for hdf_path in hdf_paths[1:]:
            with hdf_file(hdf_path) as hdf:
                # check that all parameters match (avoids mismatching array lengths)
                param_keys = hdf.keys()
                assert set(param_keys) == set(master_keys)
                logging.debug("Copying parameters from file %s", hdf_path)
                for param_name in param_keys:
                    param = hdf[param_name]
                    master_param = hdf_master[param_name]
                    assert param.frequency == master_param.frequency
                    assert param.offset == master_param.offset
                    assert param.units == master_param.units
                    # join arrays together
                    master_param.array = np.ma.concatenate(
                        (master_param.raw_array, param.raw_array))
                    # re-save parameter
                    hdf_master[param_name] = master_param
                # extend the master's duration
                hdf_master.duration += hdf.duration
            #endwith
            logging.debug("Completed extending parameters from %s", hdf_path)
        #endfor
    #endwith

    if dest:
        shutil.move(hdf_master_path, dest)
        return dest
    else:
        return hdf_master_path
    def setUp(self):
        self.hdf_path = os.path.join(TEST_DATA_DIR, 'test_hdf_access.hdf')
        hdf = h5py.File(self.hdf_path, 'w')
        series = hdf.create_group('series')
        self.param_name = 'TEST_PARAM10'
        param_group = series.create_group(self.param_name)
        self.param_frequency = 2
        self.param_supf_offset = 1.5
        self.param_arinc_429 = True
        param_group.attrs['frequency'] = self.param_frequency
        param_group.attrs['supf_offset'] = self.param_supf_offset
        param_group.attrs['arinc_429'] = self.param_arinc_429
        param_group.attrs['lfl'] = 1
        self.param_data = np.arange(100)
        param_group.create_dataset('data', data=self.param_data)
        self.masked_param_name = 'TEST_PARAM11'
        masked_param_group = series.create_group(self.masked_param_name)
        self.masked_param_frequency = 4
        self.masked_param_supf_offset = 2.5
        masked_param_group.attrs['frequency'] = self.masked_param_frequency
        masked_param_group.attrs['supf_offset'] = self.masked_param_supf_offset
        self.param_mask = [bool(random.randint(0, 1)) for x in range(len(self.param_data))]
        masked_param_group.create_dataset('data', data=self.param_data)
        masked_param_group.create_dataset('mask', data=self.param_mask)
        self.masked_param_submask_arrays = np.array([[False, True],
                                                     [True, False],
                                                     [False, False]])
        self.masked_param_submask_map = {'mask1': 0, 'mask2': 1}
        masked_param_group.attrs['submasks'] = \
            simplejson.dumps(self.masked_param_submask_map)
        masked_param_group.create_dataset(
            'submasks', data=self.masked_param_submask_arrays)

        hdf.close()
        self.hdf_file = hdf_file(self.hdf_path)
    def test_split_segments_multiple_types(self, settings):
        '''
        Test data has multiple segments of differing segment types.
        Test data has already been validated
        '''
        # Overriding MINIMUM_FAST_DURATION.
        settings.AIRSPEED_THRESHOLD = 80
        settings.AIRSPEED_THRESHOLD_TIME = 3 * 60
        settings.HEADING_CHANGE_TAXI_THRESHOLD = 60
        settings.MINIMUM_SPLIT_DURATION = 100
        settings.MINIMUM_FAST_DURATION = 0
        settings.MINIMUM_SPLIT_PARAM_VALUE = 0.175
        settings.HEADING_RATE_SPLITTING_THRESHOLD = 0.1
        settings.MAX_TIMEBASE_AGE = 365 * 10
        settings.MIN_FAN_RUNNING = 10

        hdf_path = os.path.join(test_data_path,
                                "split_segments_multiple_types.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)
        self.maxDiff = None
        segment_tuples = split_segments(hdf, {})
        self.assertEqual(len(segment_tuples),
                         16,
                         msg="Unexpected number of segments detected")
        segment_types = tuple(x[0] for x in segment_tuples)
        self.assertEqual(
            segment_types,
            ('STOP_ONLY', 'START_ONLY', 'START_AND_STOP', 'START_AND_STOP',
             'START_AND_STOP', 'START_AND_STOP', 'STOP_ONLY', 'START_AND_STOP',
             'STOP_ONLY', 'START_ONLY', 'START_ONLY', 'START_AND_STOP',
             'START_ONLY', 'START_AND_STOP', 'START_AND_STOP', 'START_ONLY'))
Beispiel #7
0
def plot_essential(hdf_path):
    """
    Plot the essential parameters for flight analysis.
    
    Assumes hdf_path file contains the parameter series:
    Frame Counter, Airspeed, Altitude STD, Head True
    
    show() is to be called elsewhere (from matplotlib.pyplot import show)
    
    :param hdf_path: Path to HDF file.
    :type hdf_path: string
    """
    fig = plt.figure()  ##figsize=(10,8))
    plt.title(os.path.basename(hdf_path))

    with hdf_file(hdf_path) as hdf:
        ax1 = fig.add_subplot(4, 1, 1)
        #ax1.set_title('Frame Counter')
        ax1.plot(hdf['Frame Counter'].array, 'k--')
        ax2 = fig.add_subplot(4, 1, 2)
        ax2.plot(hdf['Airspeed'].array, 'r-')
        ax3 = fig.add_subplot(4, 1, 3, sharex=ax2)
        ax3.plot(hdf['Altitude STD'].array, 'g-')
        ax4 = fig.add_subplot(4, 1, 4, sharex=ax2)
        ax4.plot(hdf['Heading'].array, 'b-')
    def test_split_segments_data_3(self):
        '''Splits on both Engine and Heading parameters.'''
        hdf_path = os.path.join(test_data_path, "split_segments_3.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)

        segment_tuples = split_segments(hdf, {})

        #for a, e in zip(segment_tuples, expected):
            #print(a,e,a==e)

        self.assertEqual(segment_tuples,
                         [('START_AND_STOP', slice(0, 3989.0, None), 0),
                          ('START_AND_STOP', slice(3989.0, 7049.0, None), 1),
                          ('START_AND_STOP', slice(7049.0, 9569.0, None), 1),
                          ('START_AND_STOP', slice(9569.0, 12889.0, None), 1),
                          ('START_AND_STOP', slice(12889.0, 15867.0, None), 1),
                          ('START_AND_STOP', slice(15867.0, 18526.0, None), 3),
                          ('START_AND_STOP', slice(18526.0, 21726.0, None), 2),
                          ('START_AND_STOP', slice(21726.0, 24209.0, None), 2),
                          ('START_AND_STOP', slice(24209.0, 26607.0, None), 1),
                          ('START_AND_STOP', slice(26607.0, 28534.0, None), 3),
                          ('START_AND_STOP', slice(28534.0, 30875.0, None), 2),
                          ('START_AND_STOP', slice(30875.0, 33488.0, None), 3),
                          ('NO_MOVEMENT', slice(33488.0, 33680.0, None), 0),])
Beispiel #9
0
    def _design(self):
        '''
        '''
        form = FieldStorage(
            self.rfile,
            headers=self.headers,
            environ={'REQUEST_METHOD': 'POST'},
        )

        # Handle uploading of an HDF file:
        file_upload = form['hdf_file']
        if not file_upload.filename:
            self._index(error='Please select a file to upload.')
            return
        # Create a temporary file for the upload:
        file_desc, file_path = mkstemp()
        file_obj = os.fdopen(file_desc, 'w')
        file_obj.write(file_upload.file.read())
        file_obj.close()
        try:
            with hdf_file(file_path) as hdf_file_obj:
                parameter_names = hdf_file_obj.keys()
        except IOError:
            self._index(error='Please select a valid HDF file.')
            return

        self._respond_with_template('design.html', {
            'parameter_names': parameter_names,
            'file_path': file_path,
        })
 def setUp(self):
     self.hdf_path = os.path.join(TEST_DATA_DIR, 'test_hdf_access.hdf')
     hdf = h5py.File(self.hdf_path, 'w')
     series = hdf.create_group('series')
     self.param_name = 'TEST_PARAM10'
     param_group = series.create_group(self.param_name)
     self.param_frequency = 2
     self.param_supf_offset = 1.5
     self.param_arinc_429 = True
     param_group.attrs['frequency'] = self.param_frequency
     param_group.attrs['supf_offset'] = self.param_supf_offset
     param_group.attrs['arinc_429'] = self.param_arinc_429
     self.param_data = np.arange(100)
     param_group.create_dataset('data', data=self.param_data)
     self.masked_param_name = 'TEST_PARAM11'
     masked_param_group = series.create_group(self.masked_param_name)
     self.masked_param_frequency = 4
     self.masked_param_supf_offset = 2.5
     masked_param_group.attrs['frequency'] = self.masked_param_frequency
     masked_param_group.attrs['supf_offset'] = self.masked_param_supf_offset
     self.param_mask = [bool(random.randint(0, 1)) for x in range(len(self.param_data))]
     masked_param_group.create_dataset('data', data=self.param_data)
     masked_param_group.create_dataset('mask', data=self.param_mask)
     hdf.close()
     self.hdf_file = hdf_file(self.hdf_path)
 def plot_loop(self):
     '''
     The plotting loop.
     '''
     while True:
         # For some strange reason it appears that printing the following
         # line affects the plotting window being shown on windows.
         if self.exit_loop.is_set():
             return
         error_message = self._get_error_message()
         if error_message:
             show_error_dialog(*error_message)
             continue
         if self._ready_to_plot.is_set():
             self._ready_to_plot.clear()
             try:
                 with hdf_file(self._hdf_path) as hdf:
                     # iterate over whole file as only those params
                     # required were converted earlier into the HDF file
                     params = hdf.get_params()
                 title = os.path.basename(self._hdf_path)
                 plot_parameters(params, self._axes, title=title)
             except ValueError as err:
                 print 'Waiting for you to fix this error: %s' % err
             except Exception as err:
                 # traceback required?
                 print 'Exception raised! %s: %s' % (err.__class__.__name__,
                                                     err)
         else:
             time.sleep(1)
Beispiel #12
0
def derived_trimmer(hdf_path, node_names, dest):
    '''
    Trims an HDF file of parameters which are not dependencies of nodes in
    node_names.
    
    :param hdf_path: file path of hdf file.
    :type hdf_path: str
    :param node_names: A list of Node names which are required.
    :type node_names: list of str
    :param dest: destination path for trimmed output file
    :type dest: str
    :return: parameters in stripped hdf file
    :rtype: [str]
    '''
    params = []
    with hdf_file(hdf_path) as hdf:
        derived_nodes = get_derived_nodes(settings.NODE_MODULES)
        node_mgr = NodeManager(
            datetime.now(), hdf.duration, hdf.valid_param_names(), [], [],
            derived_nodes, {}, {})
        _graph = graph_nodes(node_mgr)
        for node_name in node_names:
            deps = dependencies3(_graph, node_name, node_mgr)
            params.extend(filter(lambda d: d in node_mgr.hdf_keys, deps))
    return strip_hdf(hdf_path, params, dest) 
Beispiel #13
0
 def _code_run(self):
     postvars = self._parse_post()
     import numpy as np
     from analysis_engine.node import A, KTI, KPV, S, P
     from analysis_engine.library import *
     
     data = []
     try:
         with hdf_file(postvars['file_path'][0]) as hdf:
             if postvars['var_name_1'][0]:
                 exec "%s = hdf['%s']" % (postvars['var_name_1'][0], postvars['hdf_name_1'][0])
                 data.append(self._prepare_array(locals()[postvars['var_name_1'][0]].array))
             if postvars['var_name_2'][0]:
                 exec "%s = hdf['%s']" % (postvars['var_name_2'][0], postvars['hdf_name_2'][0])
                 data.append(self._prepare_array(locals()[postvars['var_name_2'][0]].array))
             if postvars['var_name_3'][0]:
                 exec "%s = hdf['%s']" % (postvars['var_name_3'][0], postvars['hdf_name_3'][0])
                 data.append(self._prepare_array(locals()[postvars['var_name_3'][0]].array))
         
             exec postvars['code'][0]
         data.insert(0, self._prepare_array(result))
     except Exception as err:
         return self._respond_with_json({'error': str(err)})
     # TODO: Align.
     # TODO: Remove invalid/masked dependencies
     # TODO: Use ast module to parse code.
     return self._respond_with_json({'data': data})
def revert_masks(hdf_path, params=None, delete_derived=False):
    '''
    :type hdf_path: str
    :type params: params to revert or delete.
    :type params: [str] or None
    :type delete_derived: bool
    '''
    with hdf_file(hdf_path) as hdf:
        if not params:
            params = hdf.keys() if delete_derived else hdf.lfl_keys()

        for param_name in params:
            param = hdf.get_param(param_name, load_submasks=True)

            if not param.lfl:
                if delete_derived:
                    del hdf[param_name]
                continue

            if 'padding' not in param.submasks:
                continue

            param.array = param.get_array(submask='padding')
            param.submasks = {'padding': param.submasks['padding']}
            param.invalid = False
            hdf[param_name] = param
    def test_split_segments_data_3(self):
        '''Splits on both Engine and Heading parameters.'''
        hdf_path = os.path.join(test_data_path, "split_segments_3.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)

        segment_tuples = split_segments(hdf, {})

        #for a, e in zip(segment_tuples, expected):
        #print(a,e,a==e)

        self.assertEqual(segment_tuples, [
            ('START_AND_STOP', slice(0, 3989.0, None), 0),
            ('START_AND_STOP', slice(3989.0, 7049.0, None), 1),
            ('START_AND_STOP', slice(7049.0, 9569.0, None), 1),
            ('START_AND_STOP', slice(9569.0, 12889.0, None), 1),
            ('START_AND_STOP', slice(12889.0, 15867.0, None), 1),
            ('START_AND_STOP', slice(15867.0, 18526.0, None), 3),
            ('START_AND_STOP', slice(18526.0, 21726.0, None), 2),
            ('START_AND_STOP', slice(21726.0, 24209.0, None), 2),
            ('START_AND_STOP', slice(24209.0, 26607.0, None), 1),
            ('START_AND_STOP', slice(26607.0, 28534.0, None), 3),
            ('START_AND_STOP', slice(28534.0, 30875.0, None), 2),
            ('START_AND_STOP', slice(30875.0, 33488.0, None), 3),
            ('NO_MOVEMENT', slice(33488.0, 33680.0, None), 0),
        ])
Beispiel #16
0
def revert_masks(hdf_path, params=None, delete_derived=False):
    '''
    :type hdf_path: str
    :type params: params to revert or delete.
    :type params: [str] or None
    :type delete_derived: bool
    '''
    with hdf_file(hdf_path) as hdf:
        if not params:
            params = hdf.keys() if delete_derived else hdf.lfl_keys()

        for param_name in params:
            param = hdf.get_param(param_name, load_submasks=True)

            if not param.lfl:
                if delete_derived:
                    del hdf[param_name]
                continue

            if 'padding' not in param.submasks:
                continue

            param.array = param.get_array(submask='padding')
            param.submasks = {'padding': param.submasks['padding']}
            param.invalid = False
            hdf[param_name] = param
 def test_split_segments_multiple_types(self):
     '''
     Test data has multiple segments of differing segment types.
     Test data has already been validated
     '''
     hdf_path = os.path.join(test_data_path, "split_segments_multiple_types.hdf5")
     temp_path = copy_file(hdf_path)
     hdf = hdf_file(temp_path)
     self.maxDiff = None
     segment_tuples = split_segments(hdf)
     self.assertEqual(len(segment_tuples), 16, msg="Unexpected number of segments detected")
     segment_types = tuple(x[0] for x in segment_tuples)
     self.assertEqual(segment_types,
                      ('STOP_ONLY',
                       'START_ONLY',
                       'START_AND_STOP',
                       'START_AND_STOP',
                       'START_AND_STOP',
                       'START_AND_STOP',
                       'STOP_ONLY',
                       'START_AND_STOP',
                       'STOP_ONLY',
                       'START_ONLY',
                       'START_ONLY',
                       'START_AND_STOP',
                       'START_ONLY',
                       'START_AND_STOP',
                       'START_AND_STOP',
                       'START_ONLY'))
def plot_essential(hdf_path):
    """
    Plot the essential parameters for flight analysis.

    Assumes hdf_path file contains the parameter series:
    Frame Counter, Airspeed, Altitude STD, Head True

    show() is to be called elsewhere (from matplotlib.pyplot import show)

    :param hdf_path: Path to HDF file.
    :type hdf_path: string
    """
    fig = plt.figure() ##figsize=(10,8))
    plt.title(os.path.basename(hdf_path))

    with hdf_file(hdf_path) as hdf:
        ax1 = fig.add_subplot(4,1,1)
        #ax1.set_title('Frame Counter')
        ax1.plot(hdf['Frame Counter'].array, 'k--')
        ax2 = fig.add_subplot(4,1,2)
        ax2.plot(hdf['Airspeed'].array, 'r-')
        ax3 = fig.add_subplot(4,1,3,sharex=ax2)
        ax3.plot(hdf['Altitude STD'].array, 'g-')
        ax4 = fig.add_subplot(4,1,4,sharex=ax2)
        ax4.plot(hdf['Heading'].array, 'b-')
    def setUp(self):
        self.hdf_path = os.path.join(TEST_DATA_DIR, 'test_hdf_access.hdf')
        hdf = h5py.File(self.hdf_path, 'w')
        series = hdf.create_group('series')
        self.param_name = 'TEST_PARAM10'
        param_group = series.create_group(self.param_name)
        self.param_frequency = 2
        self.param_supf_offset = 1.5
        self.param_arinc_429 = True
        param_group.attrs['frequency'] = self.param_frequency
        param_group.attrs['supf_offset'] = self.param_supf_offset
        param_group.attrs['arinc_429'] = self.param_arinc_429
        param_group.attrs['lfl'] = 1
        self.param_data = np.arange(100)
        param_group.create_dataset('data', data=self.param_data)
        self.masked_param_name = 'TEST_PARAM11'
        masked_param_group = series.create_group(self.masked_param_name)
        self.masked_param_frequency = 4
        self.masked_param_supf_offset = 2.5
        masked_param_group.attrs['frequency'] = self.masked_param_frequency
        masked_param_group.attrs['supf_offset'] = self.masked_param_supf_offset
        self.param_mask = [bool(random.randint(0, 1)) for x in range(len(self.param_data))]
        masked_param_group.create_dataset('data', data=self.param_data)
        masked_param_group.create_dataset('mask', data=self.param_mask)
        self.masked_param_submask_arrays = np.array([[False, True],
                                                     [True, False],
                                                     [False, False]])
        self.masked_param_submask_map = {'mask1': 0, 'mask2': 1}
        masked_param_group.attrs['submasks'] = \
            simplejson.dumps(self.masked_param_submask_map)
        masked_param_group.create_dataset(
            'submasks', data=self.masked_param_submask_arrays)

        hdf.close()
        self.hdf_file = hdf_file(self.hdf_path)
Beispiel #20
0
    def get_params(self, hdf_path, _slice, phase_name):
        import shutil
        import tempfile
        from hdfaccess.file import hdf_file

        with tempfile.NamedTemporaryFile() as temp_file:
            shutil.copy(hdf_path, temp_file.name)

            with hdf_file(hdf_path) as hdf:
                pitch_capt = hdf.get('Pitch (Capt)')
                pitch_fo = hdf.get('Pitch (FO)')
                roll_capt = hdf.get('Roll (Capt)')
                roll_fo = hdf.get('Roll (FO)')
                cc_capt = hdf.get('Control Column Force (Capt)')
                cc_fo = hdf.get('Control Column Force (FO)')

                for par in pitch_capt, pitch_fo, roll_capt, roll_fo, cc_capt, \
                        cc_fo:
                    if par is not None:
                        ref_par = par
                        break

        phase = S(name=phase_name, frequency=1)
        phase.create_section(_slice)
        phase = phase.get_aligned(ref_par)[0]

        # Align the arrays, usually done in the Nodes
        for par in pitch_fo, roll_capt, roll_fo, cc_capt, cc_fo:
            if par is None:
                continue
            par.array = align(par, ref_par)
            par.hz = ref_par.hz

        return pitch_capt, pitch_fo, roll_capt, roll_fo, cc_capt, cc_fo, phase
def append_segment_info(hdf_segment_path, segment_type, segment_slice, part,
                        fallback_dt=None):
    """
    Get information about a segment such as type, hash, etc. and return a
    named tuple.

    If a valid timestamp can't be found, it creates start_dt as epoch(0)
    i.e. datetime(1970,1,1,1,0). Go-fast dt and Stop dt are relative to this
    point in time.

    :param hdf_segment_path: path to HDF segment to analyse
    :type hdf_segment_path: string
    :param segment_slice: Slice of this segment relative to original file.
    :type segment_slice: slice
    :param part: Numeric part this segment was in the original data file (1 indexed)
    :type part: Integer
    :param fallback_dt: Used to replace elements of datetimes which are not available in the hdf file (e.g. YEAR not being recorded)
    :type fallback_dt: datetime
    :returns: Segment named tuple
    :rtype: Segment
    """
    # build information about a slice
    with hdf_file(hdf_segment_path) as hdf:
        airspeed = hdf['Airspeed'].array
        duration = hdf.duration
        # For now, raise TimebaseError up rather than using EPOCH
        # TODO: Review whether to revert to epoch again.
        ##try:
        start_datetime = _calculate_start_datetime(hdf, fallback_dt)
        ##except TimebaseError:
            ##logger.warning("Unable to calculate timebase, using epoch "
                           ##"1.1.1970!")
            ##start_datetime = datetime.fromtimestamp(0)
        stop_datetime = start_datetime + timedelta(seconds=duration)
        hdf.start_datetime = start_datetime

    if segment_type in ('START_AND_STOP', 'START_ONLY', 'STOP_ONLY'):
        # we went fast, so get the index
        spd_above_threshold = \
            np.ma.where(airspeed > settings.AIRSPEED_THRESHOLD)
        go_fast_index = spd_above_threshold[0][0]
        go_fast_datetime = \
            start_datetime + timedelta(seconds=int(go_fast_index))
        # Identification of raw data airspeed hash
        airspeed_hash_sections = runs_of_ones(airspeed.data > settings.AIRSPEED_THRESHOLD)
        airspeed_hash = hash_array(airspeed.data,airspeed_hash_sections,
                                   settings.AIRSPEED_HASH_MIN_SAMPLES)
    #elif segment_type == 'GROUND_ONLY':
        ##Q: Create a groundspeed hash?
        #pass
    else:
        go_fast_index = None
        go_fast_datetime = None
        # if not go_fast, create hash from entire file
        airspeed_hash = sha_hash_file(hdf_segment_path)
    #                ('slice         type          part  path              hash           start_dt        go_fast_dt        stop_dt')
    segment = Segment(segment_slice, segment_type, part, hdf_segment_path, airspeed_hash, start_datetime, go_fast_datetime, stop_datetime)
    return segment
 def test_split_segments_146_300(self):
     hdf = hdf_file(os.path.join(test_data_path, "4_3377853_146-301.hdf5"))
     segment_tuples = split_segments(hdf, {})
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 24801.0, None)),
                       ('START_AND_STOP', slice(24801.0, 30000.0, None)),
                       ('START_AND_STOP', slice(30000.0, 49999.0, None)),
                       ('START_AND_STOP', slice(49999.0, 69999.0, None)),
                       ('START_AND_STOP', slice(69999.0, 73552.0, None))])
 def test_split_segments_146_300(self):
     hdf = hdf_file(os.path.join(test_data_path, "4_3377853_146-301.hdf5"))
     segment_tuples = split_segments(hdf)
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 24801.0, None)),
                       ('START_AND_STOP', slice(24801.0, 30000.0, None)),
                       ('START_AND_STOP', slice(30000.0, 49999.0, None)),
                       ('START_AND_STOP', slice(49999.0, 69999.0, None)),
                       ('START_AND_STOP', slice(69999.0, 73552.0, None))])
Beispiel #24
0
def get_esns(flight):
    try:
        with hdf_file(flight.segment.file, read_only=True) as hdf:
            e1esn = most_common_value(hdf['Eng (1) ESN'].array)
            e2esn = most_common_value(hdf['Eng (2) ESN'].array)
        return e1esn, e2esn
    except:
        import traceback
        traceback.print_exc()
        return '', ''
Beispiel #25
0
    def _spacetree(self):
        '''
        '''
        form = FieldStorage(
            self.rfile,
            headers=self.headers,
            environ={'REQUEST_METHOD': 'POST'},
        )

        # Handle uploading of an HDF file:
        file_upload = form['hdf_file']
        if not file_upload.filename:
            self._index(error='Please select a file to upload.')
            return
        # Create a temporary file for the upload:
        file_desc, file_path = mkstemp()
        file_obj = os.fdopen(file_desc, 'w')
        file_obj.write(file_upload.file.read())
        file_obj.close()
        try:
            with hdf_file(file_path) as hdf_file_obj:
                #lfl_params = hdf_file_obj.keys()
                json_dep_tree = json.loads(hdf_file_obj.dependency_tree)
                from networkx.readwrite import json_graph
                gr_st = json_graph.node_link_graph(json_dep_tree)
        except IOError:
            self._index(error='Please select a valid HDF file.')
            return

        # Fetch parameters to display in a grid:
        #self._generate_json(lfl_params)

        # Save the dependency tree to tree.json:
        tree = os.path.join(AJAX_DIR, 'tree.json')
        with open(tree, 'w') as fh:
            json.dump(graph_adjacencies(gr_st), fh, indent=4)

        # Save the list of nodes to node_list.json:
        node_list = os.path.join(AJAX_DIR, 'node_list.json')
        spanning_tree_params = sorted(gr_st.nodes())
        with open(node_list, 'w') as fh:
            json.dump(spanning_tree_params, fh, indent=4)

        #polaris_query, params, missing_lfl_params = self._fetch_params(lfl_params)
        polaris_query, params, missing_lfl_params = '', {}, []

        # Render the spacetree:
        self._respond_with_template(
            'spacetree.html', {
                'missing_lfl_params': missing_lfl_params,
                'params': sorted(params.items()),
                'polaris_query': polaris_query,
                'server': BASE_URL,
                'year': date.today().year,
            })
Beispiel #26
0
def strip_hdf(hdf_path, params_to_keep, dest, deidentify=True):
    """
    Strip an HDF file of all parameters apart from those in params_to_keep. Does
    not raise an exception if any of the params_to_keep are not in the HDF file.

    :param hdf_path: file path of hdf file.
    :type hdf_path: str
    :param params_to_keep: parameter names to keep.
    :type param_to_keep: list of str
    :param dest: destination path for stripped output file
    :type dest: str
    :return: all parameters names within the output hdf file
    :rtype: [str]
    """
    with hdf_file(hdf_path) as hdf, hdf_file(dest, create=True) as hdf_dest:
        _copy_attrs(hdf.hdf, hdf_dest.hdf, deidentify=deidentify)  # Copy top-level attrs.
        params = hdf.get_params(params_to_keep)
        for param_name, param in params.iteritems():
            hdf_dest[param_name] = param
    return params.keys()
 def test_split_segments_737_3C(self):
     '''Splits on both DFC Jump and Engine parameters.'''
     hdf = hdf_file(os.path.join(test_data_path, "1_7295949_737-3C.hdf5"))
     segment_tuples = split_segments(hdf)
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 3168.0, None)),
                       ('START_AND_STOP', slice(3168.0, 6260.0, None)),
                       ('START_AND_STOP', slice(6260.0, 9504.0, None)),
                       ('START_AND_STOP', slice(9504.0, 12680.0, None)),
                       ('START_AND_STOP', slice(12680.0, 15571.0, None)),
                       ('START_AND_STOP', slice(15571.0, 18752.0, None))])
 def test_split_segments_737_3C(self):
     '''Splits on both DFC Jump and Engine parameters.'''
     hdf = hdf_file(os.path.join(test_data_path, "1_7295949_737-3C.hdf5"))
     segment_tuples = split_segments(hdf, {})
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 3168.0, None)),
                       ('START_AND_STOP', slice(3168.0, 6260.0, None)),
                       ('START_AND_STOP', slice(6260.0, 9504.0, None)),
                       ('START_AND_STOP', slice(9504.0, 12680.0, None)),
                       ('START_AND_STOP', slice(12680.0, 15571.0, None)),
                       ('START_AND_STOP', slice(15571.0, 18752.0, None))])
 def test_create_file(self):
     temp = 'temp_new_file.hdf5'
     if os.path.exists(temp):
         os.remove(temp)
     # cannot create file without specifying 'create=True'
     self.assertRaises(IOError, hdf_file, temp)
     self.assertFalse(os.path.exists(temp))
     # this one will create the file
     hdf = hdf_file(temp, create=True)
     self.assertTrue(os.path.exists(temp))
     self.assertEqual(hdf.hdfaccess_version, 1)
     os.remove(temp)
 def test_split_segments_data_1(self):
     '''Splits on both DFC Jump and Engine parameters.'''
     hdf_path = os.path.join(test_data_path, "split_segments_1.hdf5")
     temp_path = copy_file(hdf_path)
     hdf = hdf_file(temp_path)
     segment_tuples = split_segments(hdf, {})
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 9952.0, None), 0),
                       ('START_AND_STOP', slice(9952.0, 21799.0, None), 0),
                       ('START_AND_STOP', slice(21799.0, 24665.0, None), 3),
                       ('START_AND_STOP', slice(24665.0, 27898.0, None), 1),
                       ('START_AND_STOP', slice(27898.0, 31424.0, None), 2)])
Beispiel #31
0
 def test_create_file(self):
     temp = 'temp_new_file.hdf5'
     if os.path.exists(temp):
         os.remove(temp)
     # cannot create file without specifying 'create=True'
     self.assertRaises(IOError, hdf_file, temp)
     self.assertFalse(os.path.exists(temp))
     # this one will create the file
     hdf = hdf_file(temp, create=True)
     self.assertTrue(os.path.exists(temp))
     self.assertEqual(hdf.hdfaccess_version, 1)
     os.remove(temp)
def make_kml_file(start_datetime, flight_attrs, kti, kpv, flight_file, REPORTS_DIR, output_path_and_file): 
    '''adapted from FDS process_flight.  As of 2013/6/6 we do not geolocate unless KML was requested, to save time.'''
    from analysis_engine.plot_flight    import track_to_kml
    with hdf_file(output_path_and_file) as hdf:
        # geo locate KTIs
        kti = geo_locate(hdf, kti)
        kti = _timestamp(start_datetime, kti)                    
        # geo locate KPVs
        kpv = geo_locate(hdf, kpv)
        kpv = _timestamp(start_datetime, kpv)
    report_path_and_file = REPORTS_DIR + flight_file.replace('.','_')+'.kml'
    track_to_kml(output_path_and_file, kti, kpv, flight_attrs, dest_path=report_path_and_file)
 def test_split_segments_data_1(self):
     '''Splits on both DFC Jump and Engine parameters.'''
     hdf_path = os.path.join(test_data_path, "split_segments_1.hdf5")
     temp_path = copy_file(hdf_path)
     hdf = hdf_file(temp_path)
     segment_tuples = split_segments(hdf, {})
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 9952.0, None), 0),
                       ('START_AND_STOP', slice(9952.0, 21799.0, None), 0),
                       ('START_AND_STOP', slice(21799.0, 24665.0, None), 3),
                       ('START_AND_STOP', slice(24665.0, 27898.0, None), 1),
                       ('START_AND_STOP', slice(27898.0, 31424.0, None), 2)])
Beispiel #34
0
def strip_hdf(hdf_path, params_to_keep, dest, deidentify=True):
    '''
    Strip an HDF file of all parameters apart from those in params_to_keep.
    Does not raise an exception if any of the params_to_keep are not in the
    HDF file.

    :param hdf_path: file path of hdf file.
    :type hdf_path: str
    :param params_to_keep: parameter names to keep.
    :type param_to_keep: list of str
    :param dest: destination path for stripped output file
    :type dest: str
    :return: all parameters names within the output hdf file
    :rtype: [str]
    '''
    with hdf_file(hdf_path) as hdf, hdf_file(dest, create=True) as hdf_dest:
        _copy_attrs(hdf.hdf, hdf_dest.hdf,
                    deidentify=deidentify)  # Copy top-level attrs.
        params = hdf.get_params(params_to_keep)
        for param_name, param in params.items():
            hdf_dest[param_name] = param
    return params.keys()
    def test_split_segments_data_2(self):
        '''Splits on both DFC Jump and Engine parameters.'''
        hdf_path = os.path.join(test_data_path, "split_segments_2.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)

        segment_tuples = split_segments(hdf, {})
        self.assertEqual(segment_tuples,
                         [('START_AND_STOP', slice(0, 3407.0, None)),
                          ('START_AND_STOP', slice(3407.0, 6362.0, None)),
                          ('START_AND_STOP', slice(6362.0, 9912.0, None)),
                          ('START_AND_STOP', slice(9912.0, 13064.0, None)),
                          ('START_AND_STOP', slice(13064.0, 16467.0, None)),
                          ('START_AND_STOP', slice(16467.0, 19200.0, None))])
 def save_to_hdf5(self, hdf5_path):
     '''this will overwrite any existing file with the same name'''
     with hdf_file(hdf5_path, cache_param_list=[], create=True) as hfile:
         # set attributes
         hfile.hdf.attrs['duration'] = self.duration
         hfile.hdf.attrs['start_datetime '] = self.start_epoch
         hfile.hdf.attrs['superframe_present']=self.superframe_present 
         hfile.hdf.attrs['hdfaccess_version']=hdfaccess.file.HDFACCESS_VERSION
         hfile.hdf.attrs['reliable_frame_counter']=self.reliable_frame_counter 
         hfile.hdf.attrs['aircraft_info'] = json.dumps(self.aircraft_info)           
         # save time series
         for k in self.parameters.keys():
             hfile.set_param( self.parameters[k])
     return
 def test_split_segments_data_2(self):
     '''Splits on both DFC Jump and Engine parameters.'''
     hdf_path = os.path.join(test_data_path, "split_segments_2.hdf5")
     temp_path = copy_file(hdf_path)
     hdf = hdf_file(temp_path)
     
     segment_tuples = split_segments(hdf)
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 3583.0, None)),
                       ('START_AND_STOP', slice(3583.0, 6446.0, None)),
                       ('START_AND_STOP', slice(6446.0, 9912.0, None)),
                       ('START_AND_STOP', slice(9912.0, 13064.0, None)),
                       ('START_AND_STOP', slice(13064.0, 16467.0, None)),
                       ('START_AND_STOP', slice(16467.0, 19200.0, None))])
    def test_open_and_close_and_full_masks(self):
        self.hdf_file.close()
        with hdf_file(self.hdf_path) as hdf:
            # check it's open
            self.assertFalse(hdf.hdf.id is None)
            hdf['sample'] = Parameter('sample', np.array(range(10)))
            self.assertEqual(list(hdf['sample'].array.data), range(10))
            self.assertTrue(hasattr(hdf['sample'].array, 'mask'))

            hdf['masked sample'] = Parameter('masked sample', np.ma.array(range(10)))
            self.assertEqual(list(hdf['masked sample'].array.data), range(10))
            # check masks are returned in full (not just a single False)
            self.assertEqual(list(hdf['masked sample'].array.mask), [False] * 10)
        # check it's closed
        self.assertEqual(hdf.hdf.__repr__(), '<Closed HDF5 file>')
    def test_split_segments_data_2(self):
        '''Splits on both DFC Jump and Engine parameters.'''
        hdf_path = os.path.join(test_data_path, "split_segments_2.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)

        segment_tuples = split_segments(hdf, {})
        self.assertEqual(segment_tuples,
                         [('START_AND_STOP', slice(0, 3407.0, None), 0),
                          ('START_AND_STOP', slice(3407.0, 6362.0, None), 15),
                          ('START_AND_STOP', slice(6362.0, 9912.0, None), 26),
                          ('START_AND_STOP', slice(9912.0, 13064.0, None), 56),
                          ('START_AND_STOP', slice(13064.0, 16467.0, None), 8),
                          ('START_AND_STOP', slice(16467.0, 19065.0, None), 19),
                          ('GROUND_ONLY', slice(19065.0, 19200.0, None), 57)])
Beispiel #40
0
def process_raw_hdf(hdf, axes):
    with hdf_file(hdf) as h:
        params = h.get_params()

    params_to_plot = {}
    for axis in axes:
        if axis is not None:
            for param in axis:
                try:
                    params_to_plot[param] = params[param]
                except KeyError:
                    print('Parameter %s was not found in the HDF file.' % param)

    filtered_axes = dict(enumerate(filter(None, axes), start=1))
    return params_to_plot, filtered_axes
    def test_open_and_close_and_full_masks(self):
        self.hdf_file.close()
        with hdf_file(self.hdf_path) as hdf:
            # check it's open
            self.assertFalse(hdf.hdf.id is None)
            hdf['sample'] = Parameter('sample', np.array(range(10)))
            self.assertEqual(list(hdf['sample'].array.data), range(10))
            self.assertTrue(hasattr(hdf['sample'].array, 'mask'))

            hdf['masked sample'] = Parameter('masked sample', np.ma.array(range(10)))
            self.assertEqual(list(hdf['masked sample'].array.data), range(10))
            # check masks are returned in full (not just a single False)
            self.assertEqual(list(hdf['masked sample'].array.mask), [False] * 10)
        # check it's closed
        self.assertEqual(hdf.hdf.__repr__(), '<Closed HDF5 file>')
Beispiel #42
0
 def _generate_graph(self):
     postvars = self._parse_post()
     data = []
     with hdf_file(postvars['file_path'][0]) as hdf:
         params = hdf.get_params(postvars['parameters[]']).values()
     
     align_param = params[0]
     arrays = [align_param[0]]
     
     for param in align_param:
         arrays.append(align(param, align_param))
     
     # TODO: Get parameter data and return it as AJAX.
     for param in params.values():
         data.append(zip(range(len(param.array)), param.array.data.tolist()))
     return self._respond_with_json({'data': data})
 def save_to_hdf5(self, hdf5_path):
     '''this will overwrite any existing file with the same name'''
     with hdf_file(hdf5_path, cache_param_list=[], create=True) as hfile:
         # set attributes
         hfile.hdf.attrs['duration'] = self.duration
         hfile.hdf.attrs['start_datetime '] = self.start_epoch
         hfile.hdf.attrs['superframe_present'] = self.superframe_present
         hfile.hdf.attrs[
             'hdfaccess_version'] = hdfaccess.file.HDFACCESS_VERSION
         hfile.hdf.attrs[
             'reliable_frame_counter'] = self.reliable_frame_counter
         hfile.hdf.attrs['aircraft_info'] = json.dumps(self.aircraft_info)
         # save time series
         for k in self.parameters.keys():
             hfile.set_param(self.parameters[k])
     return
    def test_mapped_array(self):
        # created mapped array
        mapping = {0: 'zero', 2: 'two', 3: 'three'}
        array = np.ma.array(range(5) + range(5), mask=[1, 1, 1, 0, 0, 0, 0, 0, 1, 1])
        multi_p = Parameter('multi', array, values_mapping=mapping)
        multi_p.array[0] = 'three'

        # save array to hdf
        self.hdf_file['multi'] = multi_p
        self.hdf_file.close()

        # check hdf has mapping and integer values stored
        with hdf_file(self.hdf_path) as hdf:
            saved = hdf['multi']
            self.assertEqual(str(saved.array[:]),
                             "['three' -- -- 'three' '?' 'zero' '?' 'two' -- --]")
            self.assertEqual(saved.array.data.dtype, np.int)
def make_kml_file(start_datetime, flight_attrs, kti, kpv, flight_file,
                  REPORTS_DIR, output_path_and_file):
    '''adapted from FDS process_flight.  As of 2013/6/6 we do not geolocate unless KML was requested, to save time.'''
    from analysis_engine.plot_flight import track_to_kml
    with hdf_file(output_path_and_file) as hdf:
        # geo locate KTIs
        kti = geo_locate(hdf, kti)
        kti = _timestamp(start_datetime, kti)
        # geo locate KPVs
        kpv = geo_locate(hdf, kpv)
        kpv = _timestamp(start_datetime, kpv)
    report_path_and_file = REPORTS_DIR + flight_file.replace('.', '_') + '.kml'
    track_to_kml(output_path_and_file,
                 kti,
                 kpv,
                 flight_attrs,
                 dest_path=report_path_and_file)
    def test_mapped_array(self):
        # created mapped array
        mapping = {0: 'zero', 2: 'two', 3: 'three'}
        array = np.ma.array(range(5) + range(5), mask=[1, 1, 1, 0, 0, 0, 0, 0, 1, 1])
        multi_p = Parameter('multi', array, values_mapping=mapping)
        multi_p.array[0] = 'three'

        # save array to hdf
        self.hdf_file['multi'] = multi_p
        self.hdf_file.close()

        # check hdf has mapping and integer values stored
        with hdf_file(self.hdf_path) as hdf:
            saved = hdf['multi']
            self.assertEqual(str(saved.array[:]),
                             "['three' -- -- 'three' '?' 'zero' '?' 'two' -- --]")
            self.assertEqual(saved.array.data.dtype, np.int)
 def test_rto_correct_side_of_split(self):
     '''
     Test to ensure that RTO's are on the correct side of splitting, i.e. at
     the beginning of a flight. This example HDF5 file appears to have two
     stationary engine activities and an RTO between the two flights.
     This creates 6 sizeable slices (potential splitting points) where the
     engine parameters normalised to 0.
     We're interested in making the segment split within the first of these
     eng_min_slices slices (Between indices 11959.5 to 12336.5).
     Ideally the segment split should be halfway between this, at 12148.0.
     '''
     hdf = hdf_file(os.path.join(test_data_path, "rto_split_segment.hdf5"))
     segment_tuples = split_segments(hdf, {})
     split_idx = 12148.0
     self.assertEqual(segment_tuples, [
         ('START_AND_STOP', slice(0, split_idx, None), 0),
         ('START_AND_STOP', slice(split_idx, 21997.0, None), 52),
         ('GROUND_ONLY', slice(21997.0, 22784.0, None), 45),
     ])
def validate_file(hdffile, helicopter=False):
    """
    Attempts to open the HDF5 file in using FlightDataAccessor and run all the
    validation tests. If the file cannot be opened, it will attempt to open
    the file using the h5py package and validate the namespace to test the
    HDF5 group structure.
    """
    filename = hdffile.split(os.sep)[-1]
    open_with_h5py = False
    hdf = None
    LOGGER.info("Verifying file '%s' with FlightDataAccessor.", filename)
    try:
        hdf = hdf_file(hdffile, read_only=True)
    except Exception as err:
        LOGGER.error("FlightDataAccessor cannot open '%s'. "
                     "Exception(%s: %s)", filename, type(err).__name__, err)
        open_with_h5py = True
    # If FlightDataAccessor errors upon opening it maybe because '/series'
    # is not included in the file. hdf_file attempts to create and fails
    # because we opening it as readonly. Verify the group structure by
    # using H5PY
    if open_with_h5py:
        LOGGER.info("Checking that H5PY package can read the file.")
        try:
            hdf_alt = h5py.File(hdffile, 'r')
        except Exception as err:
            LOGGER.error("cannot open '%s' using H5PY. Exception(%s: %s)",
                         filename, type(err).__name__, err)
            return
        LOGGER.info("File %s can be opened by H5PY, suggesting the format "
                    "is not compatible for POLARIS to use.",
                    filename)
        LOGGER.info("Will just verify the HDF5 structure and exit.")
        validate_namespace(hdf_alt)
        hdf_alt.close()
    else:
        validate_namespace(hdf.hdf)
        # continue testing using FlightDataAccessor
        validate_root_attribute(hdf)
        validate_parameters(hdf, helicopter)
    if hdf:
        hdf.close()
Beispiel #49
0
def validate_file(hdffile, helicopter=False):
    """
    Attempts to open the HDF5 file in using FlightDataAccessor and run all the
    validation tests. If the file cannot be opened, it will attempt to open
    the file using the h5py package and validate the namespace to test the
    HDF5 group structure.
    """
    filename = hdffile.split(os.sep)[-1]
    open_with_h5py = False
    hdf = None
    LOGGER.info("Verifying file '%s' with FlightDataAccessor.", filename)
    try:
        hdf = hdf_file(hdffile, read_only=True)
    except Exception as err:
        LOGGER.error("FlightDataAccessor cannot open '%s'. "
                     "Exception(%s: %s)", filename, type(err).__name__, err)
        open_with_h5py = True
    # If FlightDataAccessor errors upon opening it maybe because '/series'
    # is not included in the file. hdf_file attempts to create and fails
    # because we opening it as readonly. Verify the group structure by
    # using H5PY
    if open_with_h5py:
        LOGGER.info("Checking that H5PY package can read the file.")
        try:
            hdf_alt = h5py.File(hdffile, 'r')
        except Exception as err:
            LOGGER.error("cannot open '%s' using H5PY. Exception(%s: %s)",
                         filename, type(err).__name__, err)
            return
        LOGGER.info("File %s can be opened by H5PY, suggesting the format "
                    "is not compatible for POLARIS to use.",
                    filename)
        LOGGER.info("Will just verify the HDF5 structure and exit.")
        validate_namespace(hdf_alt)
        hdf_alt.close()
    else:
        validate_namespace(hdf.hdf)
        # continue testing using FlightDataAccessor
        validate_root_attribute(hdf)
        validate_parameters(hdf, helicopter)
    if hdf:
        hdf.close()
    def test_split_segments_data_3(self):
        '''Splits on both Engine and Heading parameters.'''
        hdf_path = os.path.join(test_data_path, "split_segments_3.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)

        segment_tuples = split_segments(hdf, {})
        self.assertEqual(segment_tuples,
                         [('START_AND_STOP', slice(0, 3989.0, None)),
                          ('START_AND_STOP', slice(3989.0, 7049.0, None)),
                          ('START_AND_STOP', slice(7049.0, 9569.0, None)),
                          ('START_AND_STOP', slice(9569.0, 12889.0, None)),
                          ('START_AND_STOP', slice(12889.0, 15867.0, None)),
                          ('START_AND_STOP', slice(15867.0, 18526.0, None)),
                          ('START_AND_STOP', slice(18526.0, 21726.0, None)),
                          ('START_AND_STOP', slice(21726.0, 24209.0, None)),
                          ('START_AND_STOP', slice(24209.0, 26607.0, None)),
                          ('START_AND_STOP', slice(26607.0, 28534.0, None)),
                          ('START_AND_STOP', slice(28534.0, 30875.0, None)),
                          ('START_AND_STOP', slice(30875.0, 33680.0, None))])
 def test_rto_correct_side_of_split(self):
     '''
     Test to ensure that RTO's are on the correct side of splitting, i.e. at
     the beginning of a flight. This example HDF5 file appears to have two
     stationary engine activities and an RTO between the two flights.
     This creates 6 sizeable slices (potential splitting points) where the
     engine parameters normalised to 0.
     We're interested in making the segment split within the first of these
     eng_min_slices slices (Between indices 11959.5 to 12336.5).
     Ideally the segment split should be halfway between this, at 12148.0.
     '''
     hdf = hdf_file(os.path.join(test_data_path, "rto_split_segment.hdf5"))
     segment_tuples = split_segments(hdf, {})
     split_idx = 12148.0
     self.assertEqual(
         segment_tuples,
         [('START_AND_STOP', slice(0, split_idx, None), 0),
          ('START_AND_STOP', slice(split_idx, 21997.0, None), 52),
          ('GROUND_ONLY', slice(21997.0, 22784.0, None), 45),]
     )
 def test_split_segments_data_3(self):
     '''Splits on both Engine and Heading parameters.'''
     hdf_path = os.path.join(test_data_path, "split_segments_3.hdf5")
     temp_path = copy_file(hdf_path)
     hdf = hdf_file(temp_path)
     
     segment_tuples = split_segments(hdf)
     self.assertEqual(segment_tuples,
                      [('START_AND_STOP', slice(0, 3987.0, None)),
                       ('START_AND_STOP', slice(3987.0, 7049.0, None)),
                       ('START_AND_STOP', slice(7049.0, 9563.0, None)),
                       ('START_AND_STOP', slice(9563.0, 12921.0, None)),
                       ('START_AND_STOP', slice(12921.0, 15858.0, None)),
                       ('START_AND_STOP', slice(15858.0, 18526.0, None)),
                       ('START_AND_STOP', slice(18526.0, 21728.0, None)),
                       ('START_AND_STOP', slice(21728.0, 24208.0, None)),
                       ('START_AND_STOP', slice(24208.0, 26607.0, None)),
                       ('START_AND_STOP', slice(26607.0, 28534.0, None)),
                       ('START_AND_STOP', slice(28534.0, 30875.0, None)),
                       ('START_AND_STOP', slice(30875.0, 33680.0, None))])
    def _spacetree(self):
        '''
        '''
        form = FieldStorage(
            self.rfile,
            headers=self.headers,
            environ={'REQUEST_METHOD': 'POST'},
        )

        # Handle uploading of an HDF file:
        file_upload = form['hdf_file']
        if not file_upload.filename:
            self._index(error='Please select a file to upload.')
            return
        # Create a temporary file for the upload:
        file_desc, file_path = mkstemp()
        file_obj = os.fdopen(file_desc, 'w')
        file_obj.write(file_upload.file.read())
        file_obj.close()
        try:
            with hdf_file(file_path) as hdf_file_obj:
                lfl_params = hdf_file_obj.keys()
        except IOError:
            self._index(error='Please select a valid HDF file.')
            return

        # Fetch parameters to display in a grid:
        self._generate_json(lfl_params)
        polaris_query, params, missing_lfl_params = self._fetch_params(
            lfl_params)

        # Render the spacetree:
        self._respond_with_template(
            'spacetree.html', {
                'missing_lfl_params': missing_lfl_params,
                'params': sorted(params.items()),
                'polaris_query': polaris_query,
                'server': BASE_URL,
                'year': date.today().year,
            })
    def test_split_segments_multiple_types(self, settings):
        '''
        Test data has multiple segments of differing segment types.
        Test data has already been validated
        '''
        # Overriding MINIMUM_FAST_DURATION.
        settings.AIRSPEED_THRESHOLD = 80
        settings.AIRSPEED_THRESHOLD_TIME = 3 * 60
        settings.HEADING_CHANGE_TAXI_THRESHOLD = 60
        settings.MINIMUM_SPLIT_DURATION = 100
        settings.MINIMUM_FAST_DURATION = 0
        settings.MINIMUM_SPLIT_PARAM_VALUE = 0.175
        settings.HEADING_RATE_SPLITTING_THRESHOLD = 0.1
        settings.MAX_TIMEBASE_AGE = 365 * 10
        settings.MIN_FAN_RUNNING = 10

        hdf_path = os.path.join(test_data_path, "split_segments_multiple_types.hdf5")
        temp_path = copy_file(hdf_path)
        hdf = hdf_file(temp_path)
        self.maxDiff = None
        segment_tuples = split_segments(hdf, {})
        self.assertEqual(len(segment_tuples), 16, msg="Unexpected number of segments detected")
        segment_types = tuple(x[0] for x in segment_tuples)
        self.assertEqual(segment_types,
                         ('STOP_ONLY',
                          'START_ONLY',
                          'START_AND_STOP',
                          'START_AND_STOP',
                          'START_AND_STOP',
                          'START_AND_STOP',
                          'STOP_ONLY',
                          'START_AND_STOP',
                          'STOP_ONLY',
                          'START_ONLY',
                          'START_ONLY',
                          'START_AND_STOP',
                          'START_ONLY',
                          'START_AND_STOP',
                          'START_AND_STOP',
                          'START_ONLY'))