Exemple #1
0
def _get_string_annotation_constraint_dict_aux(
        constraint_name, visited_constraint_names):
    
    """
    Gets the specified string annotation value constraint from the
    database, parses its YAML to produce a constraint dictionary, and
    recursively substitutes similarly parsed constraint dictionaries for
    constraint names in the `extends` value (if there is one) of the result.
    
    This method detects cycles in constraint inheritance graphs, raising
    a `ValueError` when one is found.
    """
    
    if constraint_name in visited_constraint_names:
        # constraint inheritance graph is cyclic
        
        i = visited_constraint_names.index(constraint_name)
        cycle = ' -> '.join(visited_constraint_names[i:] + [constraint_name])
        raise ValueError(
            ('Cycle detected in constraint inheritance graph. '
             'Cycle is: {}.').format(cycle))
        
    constraint = AnnotationConstraint.objects.get(name=constraint_name)
    constraint = yaml_utils.load(constraint.text)
    
    constraint['parents'] = _get_string_annotation_constraint_parents(
        constraint, visited_constraint_names)
    
    return constraint
Exemple #2
0
def _get_string_annotation_constraint_dict_aux(constraint_name,
                                               visited_constraint_names):
    """
    Gets the specified string annotation value constraint from the
    database, parses its YAML to produce a constraint dictionary, and
    recursively substitutes similarly parsed constraint dictionaries for
    constraint names in the `extends` value (if there is one) of the result.
    
    This method detects cycles in constraint inheritance graphs, raising
    a `ValueError` when one is found.
    """

    if constraint_name in visited_constraint_names:
        # constraint inheritance graph is cyclic

        i = visited_constraint_names.index(constraint_name)
        cycle = ' -> '.join(visited_constraint_names[i:] + [constraint_name])
        raise ValueError(('Cycle detected in constraint inheritance graph. '
                          'Cycle is: {}.').format(cycle))

    constraint = AnnotationConstraint.objects.get(name=constraint_name)
    constraint = yaml_utils.load(constraint.text)

    constraint['parents'] = _get_string_annotation_constraint_parents(
        constraint, visited_constraint_names)

    return constraint
Exemple #3
0
    def __init__(self, name, data):

        # Parse YAML into a Python data structure.
        try:
            data = yaml_utils.load(data)
        except Exception as e:
            raise ValueError(str(e))

        super().__init__(name, data)
Exemple #4
0
 def compile_yaml(spec, lat=None, lon=None, time_zone=None):
     
     try:
         spec = yaml_utils.load(spec)
     except Exception as e:
         raise ValueError(
             'Could not load schedule YAML. Error message was: {}'.format(
                 e.message))
         
     return Schedule.compile_dict(spec, lat, lon, time_zone)
Exemple #5
0
def read_yaml_file(path):

    contents = read_file(path)
    
    try:
        return yaml_utils.load(contents)
    
    except Exception as e:
        raise OSError(
            'Could not load YAML file "{:s}". Error message was: {:s}'.format(
                path, str(e)))
Exemple #6
0
def read_yaml_file(path):

    contents = read_file(path)

    try:
        return yaml_utils.load(contents)

    except Exception as e:
        raise OSError(
            'Could not load YAML file "{:s}". Error message was: {:s}'.format(
                path, str(e)))
Exemple #7
0
    def test_sexagesimal_load(self):
        """
        The PyYAML `load` function parses YAML 1.1, in which "12:34:56"
        is the sexagesimal number 12 * 3600 + 34 * 60 + 56 = 45296. We
        use `ruaml_yaml` rather than PyYAML because it can also parse
        YAML 1.2, in which "12:34:56" is simply the string "12:34:56".
        This test checks that `yaml_utils.load` parses its input as we
        would like.
        """

        x = yaml_utils.load('12:34:56')
        self.assertEqual(x, '12:34:56')
 def test_sexagesimal_load(self):
     
     """
     The PyYAML `load` function parses YAML 1.1, in which "12:34:56"
     is the sexagesimal number 12 * 3600 + 34 * 60 + 56 = 45296. We
     use `ruaml_yaml` rather than PyYAML because it can also parse
     YAML 1.2, in which "12:34:56" is simply the string "12:34:56".
     This test checks that `yaml_utils.load` parses its input as we
     would like.
     """
     
     x = yaml_utils.load('12:34:56')
     self.assertEqual(x, '12:34:56')
Exemple #9
0
    def create_from_yaml(s):
        """Creates a settings object from a YAML string."""

        try:
            d = yaml_utils.load(s)

        except Exception as e:
            raise ValueError(
                'YAML parse failed. Error message was:\n{}'.format(str(e)))

        if d is None:
            d = dict()

        elif not isinstance(d, dict):
            raise ValueError('Settings must be a YAML mapping.')

        return Settings.create_from_dict(d)
def _load_preferences(dir_path):
    
    path = os.path.join(dir_path, _PREFERENCE_FILE_NAME)
    defaults_message = 'Will use default preference values.'
    
    if not os.path.exists(path):
        logging.warning((
            'Could not find preferences file "{}". {}').format(
                path, defaults_message))
        return {}
        
    try:
        with open(path, 'r') as file_:
            contents = file_.read()
    except Exception as e:
        logging.error(
            'Read failed for preferences file "{}". {}'.format(
                path, defaults_message))
        return {}
    
    try:
        preferences = yaml_utils.load(contents)
    except Exception as e:
        logging.error((
            'YAML load failed for preferences file "{}". {} YAML load error '
            'message was:\n{}').format(path, defaults_message, str(e)))
        return {}
    
    if preferences is None:
        # preferences file contains no data
        
        return {}
    
    elif not isinstance(preferences, dict):
        logging.error(
            'Preferences file "{}" does not contain a YAML map. {}'.format(
                path, defaults_message))
        return {}
    
    return _Preferences(preferences)
def _load_preferences(file_path):
    
    defaults_message = 'Will use default preference values.'
    
    if not os.path.exists(file_path):
        logging.warning(
            f'Preference file "{file_path}" does not exist. '
            f'{defaults_message}')
        return _Preferences()
        
    try:
        with open(file_path, 'r') as file_:
            contents = file_.read()
    except Exception as e:
        logging.warning(
            f'Read failed for preference file "{file_path}". '
            f'{defaults_message}')
        return _Preferences()
    
    try:
        preferences = yaml_utils.load(contents)
    except Exception as e:
        logging.warning(
            f'YAML load failed for preference file "{file_path}". '
            f'{defaults_message} YAML load error message was:\n{str(e)}')
        return _Preferences()
    
    if preferences is None:
        # preference file contains no data
        
        return _Preferences()
    
    elif not isinstance(preferences, dict):
        logging.warning(
            f'Preference file "{file_path}" does not contain a YAML mapping. '
            f'{defaults_message}')
        return _Preferences()
    
    return _Preferences(preferences)
import pyaudio

import vesper.util.yaml_utils as yaml_utils


_CONFIG = yaml_utils.load('''
    
    channel_signals:
    
        - signal_type: Chirp
          signal_config:
              amplitude: 10000
              start_freq: 0
              end_freq: 10000
              duration: 5.1
              
        - signal_type: Chirp
          signal_config:
              amplitude: 10000
              start_freq: 5000
              end_freq: 1000
              duration: 3.1
          
    sample_rate: 22050
    buffer_size: .1
    
''')

_SAMPLE_DTYPE = '<i2'
_SAMPLE_SIZE = 16          # bits
_BUFFER_SIZE = .2          # seconds
_TOTAL_BUFFER_SIZE = 10    # seconds
Exemple #13
0
 def test_create_from_dict(self):
     contents = os_utils.read_file(_SETTINGS_FILE_PATH)
     d = yaml_utils.load(contents)
     settings = Settings.create_from_dict(d)
     self._check_settings(settings)
Exemple #14
0
 def _load_settings(self):
     path = classifier_utils.get_settings_file_path(self._clip_type)
     text = path.read_text()
     d = yaml_utils.load(text)
     return Settings.create_from_dict(d)
Exemple #15
0
 def clean_metadata(self):
     try:
         return yaml_utils.load(self.cleaned_data['metadata'])
     except Exception:
         raise ValidationError('Could not parse metadata YAML.')
Exemple #16
0
 def _load_extensions_if_needed(self):
     if self._extensions is None:
         spec = yaml_utils.load(self._extensions_spec)
         self._extensions = dict(
             (type_name, _load_extension_classes(module_class_names))
             for type_name, module_class_names in spec.items())
Exemple #17
0
 def _load_settings(self):
     path = classifier_utils.get_settings_file_path(self.clip_type)
     logging.info('Loading classifier settings from "{}"...'.format(path))
     text = path.read_text()
     d = yaml_utils.load(text)
     return Settings.create_from_dict(d)
 def clean_archive_data(self):
     try:
         return yaml_utils.load(self.cleaned_data['archive_data'])
     except Exception:
         raise ValidationError('Could not parse specified YAML.')
Exemple #19
0
 def test_dump_and_load(self):
     x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
     s = yaml_utils.dump(x)
     y = yaml_utils.load(s)
     self.assertEqual(x, y)
_TABLE_FORMAT = yaml_utils.load('''

columns:

    - name: season
      measurement: Night
      format: Bird Migration Season
  
    - name: year
      measurement: Night
      format:
          name: Time
          parameters:
              format: "%Y"

    - name: detector
      measurement: Detector
      format: Lower Case

    - name: species
      measurement: Clip Class
      format:
          name: Call Clip Class
          parameters:
              mapping:
                  DoubleUp: dbup
                  Other: othe
                  Unknown: unkn
      
    - name: site
      measurement: Station
      format:
          name: Mapping
          parameters:
              mapping:
                  Baldy: baldy
                  Floodplain: flood
                  Ridge: ridge
                  Sheep Camp: sheep
      
    - name: date
      measurement: Night
      format:
          name: Time
          parameters:
              format: "%m/%d/%y"
              
    - name: recording_start
      measurement: Recording Start Time
      format: Time
              
    - name: recording_length
      measurement: Recording Duration
      format: Duration
              
    - name: detection_time
      measurement: Elapsed Start Time
      format: Duration
      
    - name: real_detection_time
      measurement: Start Time
      format:
          name: Time
          parameters:
              format: "%H:%M:%S"
              
    - name: real_detection_time
      measurement: Start Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
              
    - name: rounded_to_half_hour
      measurement: Rounded Start Time
      format: Time
      
    - name: duplicate
      measurement:
          name: Duplicate Call
          parameters:
              min_intercall_interval: 60
              ignored_classes: [Other, Unknown, Weak]
      format:
          name: Boolean
          parameters:
              values:
                  true: 'yes'
                  false: 'no'
                  
    - name: sunset
      measurement: Sunset Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: civil_dusk
      measurement: Civil Dusk Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: nautical_dusk
      measurement: Nautical Dusk Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: astronomical_dusk
      measurement: Astronomical Dusk Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: astronomical_dawn
      measurement: Astronomical Dawn Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: nautical_dawn
      measurement: Nautical Dawn Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: civil_dawn
      measurement: Civil Dawn Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: sunrise
      measurement: Sunrise Time
      format:
          name: Time
          parameters:
              format: "%m/%d/%y %H:%M:%S"
      
    - name: moon_altitude
      measurement: Moon Altitude
      format:
          name: Decimal
          parameters:
              detail: ".1"

    - name: moon_illumination
      measurement: Moon Illumination
      format:
          name: Decimal
          parameters:
              detail: ".1"
      
''')
Exemple #21
0
 def test_dump_and_load_with_non_default_flow_style(self):
     x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
     s = yaml_utils.dump(x, default_flow_style=False)
     y = yaml_utils.load(s)
     self.assertEqual(x, y)
 def __init__(self, name, data):
     data = yaml_utils.load(data)
     super().__init__(name, data)
Exemple #23
0
def _populate_database_from_yaml(s):

    d = yaml_utils.load(s)

    num_stations = d.get('num_stations', 1)
    stations = []
    for i in range(num_stations):
        name = 'Station {}'.format(i + 1)
        station = Station.objects.create(name=name, time_zone='US/Eastern')
        stations.append(station)

    model = DeviceModel.objects.create(name='Recorder Model',
                                       type='Recorder',
                                       manufacturer='Nagra',
                                       model='X')

    device = Device.objects.create(name='Recorder',
                                   model=model,
                                   serial_number='0')

    clips = d['clips']
    num_clips = len(clips)
    clip_length = _CLIP_DURATION * _SAMPLE_RATE
    recording_length = num_clips * clip_length
    recording_duration = recording_length / _SAMPLE_RATE
    recording_end_time = \
        _RECORDING_START_TIME + datetime.timedelta(seconds=recording_duration)

    station_recorder = StationDevice.objects.create(
        station=station,
        device=device,
        start_time=_RECORDING_START_TIME,
        end_time=recording_end_time)

    creation_time = _RECORDING_START_TIME

    recording = Recording.objects.create(station_recorder=station_recorder,
                                         num_channels=_NUM_CHANNELS,
                                         length=recording_length,
                                         sample_rate=_SAMPLE_RATE,
                                         start_time=_RECORDING_START_TIME,
                                         end_time=recording_end_time,
                                         creation_time=creation_time)

    annotation_names = _get_annotation_names(clips)
    annotation_infos = dict(
        (name, _create_annotation_info(name, creation_time))
        for name in annotation_names)

    for i, clip_d in enumerate(clips):

        clip_start_index = i * clip_length
        offset = clip_start_index / _SAMPLE_RATE
        clip_start_time = \
            _RECORDING_START_TIME + datetime.timedelta(seconds=offset)
        clip_duration = clip_length / _SAMPLE_RATE
        clip_end_time = \
            clip_start_time + datetime.timedelta(seconds=clip_duration)

        clip = Clip.objects.create(recording=recording,
                                   channel_num=0,
                                   start_index=clip_start_index,
                                   length=clip_length,
                                   start_time=clip_start_time,
                                   end_time=clip_end_time,
                                   creation_time=creation_time)

        for name, value in clip_d['annotations'].items():
            info = annotation_infos[name]
            model_utils.annotate_clip(clip, info, value, creation_time)
Exemple #24
0
 def _load_settings(self):
     path = classifier_utils.get_settings_file_path(self.clip_type)
     logging.info('Loading classifier settings from "{}"...'.format(path))
     text = path.read_text()
     d = yaml_utils.load(text)
     return Settings.create_from_dict(d)
Exemple #25
0
 def test_dump_and_load(self):
     x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
     s = yaml_utils.dump(x)
     y = yaml_utils.load(s)
     self.assertEqual(x, y)
DATASET_CONFIGS = yaml_utils.load('''

- dataset_name_prefix: Thrush 20K
  detector_name: Thrush
  train_dataset_size: [6000, 6000]
  val_dataset_size: [2000, 2000]
  test_dataset_size: [2000, 2000]
  
- dataset_name_prefix: Thrush 100K
  detector_name: Tseep
  train_dataset_size: [36000, 36000]
  val_dataset_size: [2000, 2000]
  test_dataset_size: [2000, 2000]
  
- dataset_name_prefix: Thrush 1M
  detector_name: Thrush
  train_dataset_size: [496000, 496000]
  val_dataset_size: [2000, 2000]
  test_dataset_size: [2000, 2000]

- dataset_name_prefix: Tseep 3K
  detector_name: Tseep
  train_dataset_size: [2000, 2000]
  val_dataset_size: [500, 500]
  test_dataset_size: [500, 500]
    
- dataset_name_prefix: Tseep 20K
  detector_name: Tseep
  train_dataset_size: [6000, 6000]
  val_dataset_size: [2000, 2000]
  test_dataset_size: [2000, 2000]
    
- dataset_name_prefix: Tseep 100K
  detector_name: Tseep
  train_dataset_size: [40000, 40000]
  val_dataset_size: [5000, 5000]
  test_dataset_size: [5000, 5000]
  
- dataset_name_prefix: Tseep 340K
  detector_name: Tseep
  train_dataset_size: [158579, 158579]
  val_dataset_size: [5000, 5000]
  test_dataset_size: [5000, 5000]
  
- dataset_name_prefix: Tseep 1M
  detector_name: Tseep
  train_dataset_size: [480000, 480000]
  val_dataset_size: [10000, 10000]
  test_dataset_size: [10000, 10000]
  
''')
Exemple #27
0
    
    Schedule complementation.
            
            
See Simplenote entry "Recording Schedules, Take 2" for more information
about schedules.
'''


_INTERVAL_SCHEMA = yaml_utils.load('''
    type: object
    properties:
        interval:
            type: object
            properties:
                start: {}
                end: {}
                duration: {type: string}
            additionalProperties: false
    required: [interval]
    additionalProperties: false
''')


_INTERVALS_SCHEMA = yaml_utils.load('''
    type: object
    properties:
        intervals:
            type: array
            items:
                type: object
def _populate_database_from_yaml(s):
    
    d = yaml_utils.load(s)
    
    num_stations = d.get('num_stations', 1)
    stations = []
    for i in range(num_stations):
        name = 'Station {}'.format(i + 1)
        station = Station.objects.create(name=name, time_zone='US/Eastern')
        stations.append(station)
        
    model = DeviceModel.objects.create(
        name='Recorder Model', type='Recorder', manufacturer='Nagra',
        model='X')
    
    device = Device.objects.create(
        name='Recorder', model=model, serial_number='0')
    
    clips = d['clips']
    num_clips = len(clips)
    clip_length = _CLIP_DURATION * _SAMPLE_RATE
    recording_length = num_clips * clip_length
    recording_duration = recording_length / _SAMPLE_RATE
    recording_end_time = \
        _RECORDING_START_TIME + datetime.timedelta(seconds=recording_duration)
    
    station_recorder = StationDevice.objects.create(
        station=station, device=device, start_time=_RECORDING_START_TIME,
        end_time=recording_end_time)
    
    creation_time = _RECORDING_START_TIME
    
    recording = Recording.objects.create(
        station_recorder=station_recorder, num_channels=_NUM_CHANNELS,
        length=recording_length, sample_rate=_SAMPLE_RATE,
        start_time=_RECORDING_START_TIME, end_time=recording_end_time,
        creation_time=creation_time)
    
    annotation_names = _get_annotation_names(clips)
    annotation_infos = dict(
        (name, _create_annotation_info(name, creation_time))
        for name in annotation_names)
    
    for i, clip_d in enumerate(clips):
        
        clip_start_index = i * clip_length
        offset = clip_start_index / _SAMPLE_RATE
        clip_start_time = \
            _RECORDING_START_TIME + datetime.timedelta(seconds=offset)
        clip_duration = clip_length / _SAMPLE_RATE
        clip_end_time = \
            clip_start_time + datetime.timedelta(seconds=clip_duration)
        
        clip = Clip.objects.create(
            recording=recording, channel_num=0,
            start_index=clip_start_index, length=clip_length,
            start_time=clip_start_time, end_time=clip_end_time,
            creation_time=creation_time)
        
        for name, value in clip_d['annotations'].items():
            info = annotation_infos[name]
            model_utils.annotate_clip(clip, info, value, creation_time)
Exemple #29
0
 def test_dump_and_load_with_non_default_flow_style(self):
     x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
     s = yaml_utils.dump(x, default_flow_style=False)
     y = yaml_utils.load(s)
     self.assertEqual(x, y)
Exemple #30
0
def load_training_settings(training_name):
    file_path = get_training_settings_file_path(training_name)
    logging.info(f'Loading annotator settings from "{file_path}"...')
    text = file_path.read_text()
    dict_ = yaml_utils.load(text)
    return Settings.create_from_dict(dict_)
 def _load_extensions_if_needed(self):
     if self._extensions is None:
         spec = yaml_utils.load(self._extensions_spec)
         self._extensions = dict(
             (type_name, _load_extension_classes(module_class_names))
             for type_name, module_class_names in spec.items())