def shand(signum, frame): now = time.time() if now - self.int_time < INTERRUPT_REPEAT_INTERVAL: self.stop_messaging() else: self.int_time = now log.info('mi/core/instrument/driver_process.py DRIVER GOT SIGINT and is ignoring it...')
def main(): # Get the command line arguments options = docopt.docopt(__doc__) subsites = options.get('<subsites>') deployments = options.get('<deployments>') dates = options.get('<dates>') keep_temp_files = options.get('--keep') process_mode = options.get('--process') all_subsites = options.get('--all') zplsc_datafile = options.get('<zplsc_datafile>') if subsites is not None: subsites = subsites.split(" ") if deployments is not None: deployments = deployments.split(" ") else: deployments = [] if dates is not None: dates = dates.split(" ") try: echogram_generator = ZPLSCEchogramGenerator(subsites, deployments, dates, keep_temp_files, zplsc_datafile, process_mode, all_subsites) echogram_generator.generate_zplsc_echograms() log.info('Echogram processing completed successfully!') except ValueError: log.error('Invalid command line parameters: exiting Echogram Generator')
def test_real_file(self): """ The file used in this test, is a real file from the IDD. It contains 14 records: 7 power records, 6 CO2 records (normal) and 1 CO2 record (blank). (No control files are in the real file.) Verify that the correct number of particles are generated from a real file. """ log.debug('===== START TEST REAL FILE =====') with open(os.path.join(RESOURCE_PATH, '20140507.pco2w1.log'), 'r') as file_handle: num_particles_to_request = 2500 num_expected_particles = 14 parser = Pco2wAbcDclParser(self._recovered_parser_config, file_handle, self.exception_callback, None, None) particles = parser.get_records(num_particles_to_request) log.info(len(particles)) self.assertEquals(len(particles), num_expected_particles) self.assertEquals(self.exception_callback_value, []) log.debug('===== END TEST REAL FILE =====')
def fix_yml_pressure_params(self): """ This helper tool was used to modify the yml files in response to ticket #4341 """ pressure_regex = r' pressure:\s+(0.\d+)' for file_name in os.listdir(RESOURCE_PATH): if file_name.endswith('.yml'): with open(os.path.join(RESOURCE_PATH, file_name), 'rU') as in_file_id: out_file_name = file_name + '.new' log.info('fixing file %s', file_name) log.info('creating file %s', out_file_name) out_file_id = open(os.path.join(RESOURCE_PATH, out_file_name), 'w') for line in in_file_id: match = re.match(pressure_regex, line) if match is not None: new_value = float(match.group(1)) * 1000.0 new_line = ' pressure_mbar: ' + str(new_value) out_file_id.write(new_line + '\n') else: out_file_id.write(line) out_file_id.close()
def main(): # Get the command line arguments options = docopt.docopt(__doc__) subsites = options.get('<subsites>') deployments = options.get('<deployments>') dates = options.get('<dates>') keep_temp_files = options.get('--keep') process_mode = options.get('--process') all_subsites = options.get('--all') zplsc_datafile = options.get('<zplsc_datafile>') if subsites is not None: subsites = subsites.split(" ") if deployments is not None: deployments = deployments.split(" ") else: deployments = [] if dates is not None: dates = dates.split(" ") try: echogram_generator = ZPLSCEchogramGenerator(subsites, deployments, dates, keep_temp_files, zplsc_datafile, process_mode, all_subsites) echogram_generator.generate_zplsc_echograms() log.info('Echogram processing completed successfully!') except ValueError: log.error( 'Invalid command line parameters: exiting Echogram Generator')
def fix_yml_pressure_params(self): """ This helper tool was used to modify the yml files in response to ticket #4341 """ pressure_regex = r'\s+pressure:\s+(0.\d+)' for file_name in os.listdir(RESOURCE_PATH): if file_name.endswith('.yml'): with open(os.path.join(RESOURCE_PATH, file_name), 'rU') as in_file_id: out_file_name = file_name + '.new' log.info('fixing file %s', file_name) log.info('creating file %s', out_file_name) out_file_id = open( os.path.join(RESOURCE_PATH, out_file_name), 'w') for line in in_file_id: match = re.match(pressure_regex, line) if match is not None: new_value = float(match.group(1)) * 1000.0 new_line = ' pressure_mbar: ' + str(new_value) out_file_id.write(new_line + '\n') else: out_file_id.write(line) out_file_id.close()
def test_real_file(self): """ The file used in this test, is a real file from the acquisition server. It contains 20 pH records: Verify that 20 instrument particles and one metadata particle are generated from the real file. """ log.debug('===== START TEST REAL FILE =====') with open(os.path.join(RESOURCE_PATH, 'phsen1_20140730_190554.DAT'), O_MODE) as file_handle: num_particles_to_request = 25 num_expected_particles = 21 parser = PhsenAbcdefImodemParser(self._recovered_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) log.info(len(particles)) self.assertEquals(len(particles), num_expected_particles) self.assertEquals(self.exception_callback_value, []) log.debug('===== END TEST REAL FILE =====')
def test_real_file(self): """ Verify that the correct number of particles are generated from a real file. """ log.debug('===== START TEST REAL FILE =====') with open(os.path.join(RESOURCE_PATH, 'SAMI_C0069_300614.txt'), 'r') as file_handle: NUM_PARTICLES_TO_REQUEST = 2500 NUM_EXPECTED_PARTICLES = 2063 parser = Pco2wAbcParser(self._parser_config, file_handle, self.exception_callback, None, None) particles = parser.get_records(NUM_PARTICLES_TO_REQUEST) log.info(len(particles)) self.assertEquals(len(particles), NUM_EXPECTED_PARTICLES) self.assertEqual(self._exception_occurred, False) log.debug('===== END TEST REAL FILE =====')
def shutdown(self): """ Shutdown function prior to process exit. """ log.info('Driver process shutting down.') self.driver_module = None self.driver_class = None self.driver = None
def rec_exception_callback(exception): """ Callback function to log exceptions and continue. @param exception - Exception that occurred """ log.info("Exception occurred: %s", exception.message)
def exception_callback(self, exception): """ Store any exceptions that come into the exception callback @param exception The exception that occurred """ log.info('Received exception: %r', exception) self.exception_callback_value.append(exception)
def __init__(self, allowed, max_events=None, publish_interval=None): self._allowed = allowed self._deque = deque() self._max_events = max_events if max_events else self.DEFAULT_MAX_EVENTS self._publish_interval = publish_interval if publish_interval else self.DEFAULT_PUBLISH_INTERVAL self._running = False self._headers = {} log.info('Publisher: max_events: %d publish_interval: %d', self._max_events, self._publish_interval)
def test_one(self): particle_data_handler = parse(None, self.source_file_path, ParticleDataHandler()) log.info("SAMPLES: %s", particle_data_handler._samples) log.info("FAILURE: %s", particle_data_handler._failure) self.assertEquals(particle_data_handler._failure, False)
def test_recovered_deprecation(self): particle_data_handler = parse_recovered(None, self.source_file_path, ParticleDataHandler()) log.info("SAMPLES: %s", particle_data_handler._samples) log.info("FAILURE: %s", particle_data_handler._failure) self.assertEquals(particle_data_handler._failure, False)
def _publish(self, events, headers): for e in events: try: json.dumps(e) except (ValueError, UnicodeDecodeError) as err: log.exception('Unable to publish event: %r %r', e, err) count = len(events) self.total += count log.info('Publish %d events (%d total)', count, self.total)
def shand(signum, frame): now = time.time() if now - self.int_time < INTERRUPT_REPEAT_INTERVAL: self.stop_messaging() else: self.int_time = now log.info( 'mi/core/instrument/driver_process.py DRIVER GOT SIGINT and is ignoring it...' )
def test_one(self): particle_data_hdlr_obj = parse(Config().base_dir(), self.sourceFilePath, ParticleDataHandler()) log.info("SAMPLES: %s", particle_data_hdlr_obj._samples) log.info("FAILURE: %s", particle_data_hdlr_obj._failure) self.assertEquals(particle_data_hdlr_obj._failure, False)
def _spawn(self, spawnargs): """ Launch a process using popen @param spawnargs a list of arguments for the Popen command line. The first argument must be a path to a program and arguments much be in additional list elements. @return subprocess.Popen object """ log.info('spawnargs: %s', spawnargs) return subprocess.Popen(spawnargs, env=os.environ, close_fds=True)
def _publish(self, events, headers): msg_headers = self._merge_headers(headers) # HACK! self.connection.error = None now = time.time() message = qm.Message(content=json.dumps(events), content_type='text/plain', durable=True, properties=msg_headers, user_id='guest') self.sender.send(message, sync=True) elapsed = time.time() - now log.info('Published %d messages to QPID in %.2f secs', len(events), elapsed)
def _get_attribute_values(self): attrNames = self.ATTR_NAMES # see OOIION-631 note in test_platform_agent_with_rsn from_time = ntplib.system_to_ntp_time(time.time() - 50) req_attrs = [(attr_id, from_time) for attr_id in attrNames] attr_values = self._plat_driver.get_attribute_values(req_attrs) log.info("attr_values = %s" % str(attr_values)) self.assertIsInstance(attr_values, dict) for attr_name in attrNames: self.assertTrue(attr_name in attr_values) self.assertIsInstance(attr_values[attr_name], list)
def zplsc_playback(self): for index, filename in enumerate(self.reader.read()): if filename: self.set_header_filename(filename) log.info("filename is: %s", filename) if hasattr(self.protocol, 'got_filename'): self.protocol.got_filename(filename) pub_index = 0 while True: self.publish() pub_index = pub_index + 1 log.info("publish index is: %d", pub_index)
def check_parent(self): """ Test for existence of original parent process, if ppid specified. """ if self.ppid: try: os.kill(self.ppid, 0) except OSError: log.info('Driver process COULD NOT DETECT PARENT.') return False return True
def _publish(self, events, headers): msg_headers = self._merge_headers(headers) now = time.time() try: publish = self.connection.ensure(self.producer, self.producer.publish, max_retries=4) publish(json.dumps(events), headers=msg_headers, user_id=self.username, declare=[self._queue], content_type='text/plain') log.info('Published %d messages using KOMBU in %.2f secs with headers %r', len(events), time.time() - now, msg_headers) except Exception as e: log.error('Exception attempting to publish events: %r', e) return events
def start_web_service(oms_uri, alert_alarm_server_uri): """ This method gets the proxy for the OMS Server, registers this server as a listener to the OMS and starts the Flask web service that will listen for OMS Events from the OMS Server. :param oms_uri: The URI of the OMS Server :param alert_alarm_server_uri: The URI of this server. :return: """ alert_alarm_server_port = int( re.search('http://.+:(.+?)/', alert_alarm_server_uri).group(1)) if oms_uri == 'DEBUG': log.info( 'DEBUG mode: OMS Alert Alarm Server not registering with OMS.') else: log.info('Getting the proxy for OMS server: %r', oms_uri) proxy = xmlrpclib.ServerProxy(oms_uri) log.info('Registering OMS Alerts & Alarms server as listener: %r', alert_alarm_server_uri) proxy.event.register_event_listener(alert_alarm_server_uri) log.info('Listening for Alerts & Alarms on 0.0.0.0:%d', alert_alarm_server_port) app.run(host='0.0.0.0', port=alert_alarm_server_port)
def main(): options = docopt(__doc__) qpid_url = options['<qpid_url>'] qpid_queue = options['<qpid_queue>'] rabbit_url = options['<rabbit_url>'] rabbit_queue = options['<rabbit_queue>'] rabbit_key = options['<rabbit_key>'] log.info('Starting shovel: %r', options) qpid = QpidProducer(qpid_url, qpid_queue) rabbit = RabbitConsumer(rabbit_url, rabbit_queue, rabbit_key, qpid) reporter = StatsReporter(rabbit) reporter.daemon = True reporter.start() rabbit.run()
def stop_messaging(self): """ Close messaging resources for the driver process client. Close ZMQ command socket and terminate command context. Set flag to cause event thread to close event socket and context and terminate. Await event thread completion and return. """ if self.event_thread: self.stop_event_thread = True self.event_thread.join() self.event_thread = None if self.zmq_context: self.zmq_context.destroy(linger=1) self.zmq_context = None self.evt_callback = None log.info('Driver client messaging closed.')
def filter_events(self, events): if self._allowed is not None and isinstance(self._allowed, list): log.info('Filtering %d events with: %r', len(events), self._allowed) new_events = [] dropped = 0 for event in events: if event.get('type') == DriverAsyncEvent.SAMPLE: if event.get('value', {}).get('stream_name') in self._allowed: new_events.append(event) else: dropped += 1 else: new_events.append(event) log.info('Dropped %d unallowed particles', dropped) return new_events return events
def _write(self): dataframes = self.to_dataframes() for particle_type in dataframes: # very large dataframes don't work with pickle # split if too large df = dataframes[particle_type] max_size = 5000000 if len(df) > max_size: num_slices = len(df) / max_size slices = np.array_split(df, num_slices) for index, df_slice in enumerate(slices): file_path = '%s_%d.pd' % (particle_type, index) df_slice.to_pickle(file_path) else: log.info('length of dataframe: %d', len(df)) file_path = '%s.pd' % particle_type dataframes[particle_type].to_pickle(file_path)
def test_real_file_2(self): """ The file used in this test, is a real file from the acquisition server. It contains 9 pH records: Verify that 9 instrument particles and one metadata particle are generated from the real file. """ log.debug('===== START TEST REAL 2 FILE =====') num_particles_to_request = 10 num_expected_particles = 10 with open(os.path.join(RESOURCE_PATH, 'phsen1_20140725_192532.DAT'), O_MODE) as file_handle: parser = PhsenAbcdefImodemParser(self._recovered_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) log.info(len(particles)) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "phsen1_20140725_192532_rec.yml", RESOURCE_PATH) self.assertEquals(self.exception_callback_value, []) with open(os.path.join(RESOURCE_PATH, 'phsen1_20140725_192532.DAT'), O_MODE) as file_handle: parser = PhsenAbcdefImodemParser(self._telemetered_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) log.info(len(particles)) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "phsen1_20140725_192532_tel.yml", RESOURCE_PATH) self.assertEquals(self.exception_callback_value, []) log.debug('===== END TEST REAL 2 FILE =====')
def run(self): while True: queue_name, queue_depth, sent_count = self.rabbit.get_current_queue_depth( ) now = time.time() if self.last_time is not None: elapsed = now - self.last_time if elapsed > 0: rate = float(sent_count - self.last_count) / elapsed else: rate = -1 log.info('Queue: %s Depth: %d Sent Count: %d Rate: %.2f/s', queue_name, queue_depth, sent_count, rate) self.last_time = now self.last_count = sent_count time.sleep(self.report_interval)
def construct_driver(self): """ Attempt to import and construct the driver object based on configuration. @retval True if successful, False otherwise. """ try: module = importlib.import_module(self.driver_module) driver_class = getattr(module, self.driver_class) self.driver = driver_class(self.send_event) log.info('Imported and created driver from module: %r class: %r driver: %r', module, driver_class, self.driver) return True except Exception as e: log.error('Could not import/construct driver module %s, class %s.', self.driver_module, self.driver_class) log.error('%s' % str(e)) return False
def construct_driver(self): """ Attempt to import and construct the driver object based on configuration. @retval True if successful, False otherwise. """ try: module = importlib.import_module(self.driver_module) driver_class = getattr(module, self.driver_class) self.driver = driver_class(self.send_event) log.info( 'Imported and created driver from module: %r class: %r driver: %r', module, driver_class, self.driver) return True except Exception as e: log.error('Could not import/construct driver module %s, class %s.', self.driver_module, self.driver_class) log.error('%s' % str(e)) return False
def read(self): while True: if self._filehandle is None and not self.files: log.info('Completed reading specified port agent logs, exiting...') raise StopIteration if self._filehandle is None: name = self.files.pop(0) log.info('Begin reading: %r', name) # yield the filename so we can pass it through to the driver yield name self.file_name_list.append(name) self._filehandle = open(name, 'r') if not self._process_packet(): self._filehandle.close() self._filehandle = None yield
def process_oms_request(): """ This is the method that is called when the OMS POSTs OMS Events to this registered listener at the "/" path. :return: """ if isinstance(request.json, list): # Log the list of Alert & Alarm messages from the OMS Event for alert_alarm_dict in request.json: aa_publisher.enqueue(alert_alarm_dict) log.info('oms_alert_alarm_server: OMS_AA_MSG: %r', alert_alarm_dict) # Publish the list of Alert & Alarm messages to qpid aa_publisher.publish() else: log.error('No data in the POSTed alert/alarm OMS Event ...') return '', httplib.ACCEPTED
def _publish(self, events, headers): msg_headers = self._merge_headers(headers) now = time.time() try: publish = self.connection.ensure(self.producer, self.producer.publish, max_retries=4) publish(json.dumps(events), headers=msg_headers, user_id=self.username, declare=[self._queue], content_type='text/plain') log.info( 'Published %d messages using KOMBU in %.2f secs with headers %r', len(events), time.time() - now, msg_headers) except Exception as e: log.error('Exception attempting to publish events: %r', e) return events
def connect(self): delay = 1 max_delay = 60 while True: try: connection = qm.Connection(self.url, reconnect=False, username=self.username, password=self.password) connection.open() session = connection.session() self.sender = session.sender( '%s; {create: always, node: {type: queue, durable: true}}' % self.queue) log.info('Shovel connected to QPID') return except qm.ConnectError: log.error('Shovel QPID connection error. Sleep %d seconds', delay) time.sleep(delay) delay = min(max_delay, delay * 2)
def run(self): """ Process entry point. Construct driver and start messaging loops. Periodically check messaging is going and parent exists if specified. """ from mi.core.log import LoggerManager LoggerManager() log.info('Driver process started.') # noinspection PyUnusedLocal def shand(signum, frame): now = time.time() if now - self.int_time < INTERRUPT_REPEAT_INTERVAL: self.stop_messaging() else: self.int_time = now log.info( 'mi/core/instrument/driver_process.py DRIVER GOT SIGINT and is ignoring it...' ) signal.signal(signal.SIGINT, shand) if self.driver is not None or self.construct_driver(): self.start_messaging() while self.messaging_started: if self.check_parent(): time.sleep(2) else: self.stop_messaging() break self.shutdown() time.sleep(1) os._exit(0)
def run(self): """ Process entry point. Construct driver and start messaging loops. Periodically check messaging is going and parent exists if specified. """ from mi.core.log import LoggerManager LoggerManager() log.info('Driver process started.') # noinspection PyUnusedLocal def shand(signum, frame): now = time.time() if now - self.int_time < INTERRUPT_REPEAT_INTERVAL: self.stop_messaging() else: self.int_time = now log.info('mi/core/instrument/driver_process.py DRIVER GOT SIGINT and is ignoring it...') signal.signal(signal.SIGINT, shand) if self.driver is not None or self.construct_driver(): self.start_messaging() while self.messaging_started: if self.check_parent(): time.sleep(2) else: self.stop_messaging() break self.shutdown() time.sleep(1) os._exit(0)
def start_web_service(oms_uri, alert_alarm_server_uri): """ This method gets the proxy for the OMS Server, registers this server as a listener to the OMS and starts the Flask web service that will listen for OMS Events from the OMS Server. :param oms_uri: The URI of the OMS Server :param alert_alarm_server_uri: The URI of this server. :return: """ alert_alarm_server_port = int(re.search('http://.+:(.+?)/', alert_alarm_server_uri).group(1)) if oms_uri == 'DEBUG': log.info('DEBUG mode: OMS Alert Alarm Server not registering with OMS.') else: log.info('Getting the proxy for OMS server: %r', oms_uri) proxy = xmlrpclib.ServerProxy(oms_uri) log.info('Registering OMS Alerts & Alarms server as listener: %r', alert_alarm_server_uri) proxy.event.register_event_listener(alert_alarm_server_uri) log.info('Listening for Alerts & Alarms on 0.0.0.0:%d', alert_alarm_server_port) app.run(host='0.0.0.0', port=alert_alarm_server_port)
def fix_yml_float_params(self): """ This helper tool was used to modify the yml files in response to ticket #8564 """ param_change_table = [ ('battery_voltage', 'battery_voltage_dV', 10), ('sound_speed_analog2', 'sound_speed_dms', 10), ('heading', 'heading_decidegree', 10), ('pitch', 'pitch_decidegree', 10), ('roll', 'roll_decidegree', 10), ('pressure_mbar', 'pressure_mbar', 1), ('temperature', 'temperature_centidegree', 100), ('velocity_beam1', 'velocity_beam1', 1), ('velocity_beam2', 'velocity_beam2', 1), ('velocity_beam3', 'velocity_beam3', 1) ] for file_name in os.listdir(RESOURCE_PATH): if file_name.endswith('.yml'): with open(os.path.join(RESOURCE_PATH, file_name), 'rU') as in_file_id: out_file_name = file_name + '.new' log.info('fixing file %s', file_name) log.info('creating file %s', out_file_name) out_file_id = open(os.path.join(RESOURCE_PATH, out_file_name), 'w') for line in in_file_id: new_line = line for param_name, new_name, mult in param_change_table: param_regex = r'\s+' + param_name + r':\s+(' + FLOAT_REGEX + ')' + END_OF_LINE_REGEX match = re.match(param_regex, line) if match is not None: new_value = int(float(match.group(1)) * mult) new_line = ' ' + new_name + ': ' + str(new_value) + '\n' log.info('%s', new_line) out_file_id.write(new_line) out_file_id.close()
def start_messaging(self, evt_callback=None): """ Initialize and start messaging resources for the driver process client. Initializes command socket for sending requests, and starts event thread that listens for events from the driver process independently of command request-reply. """ self.zmq_context = zmq.Context() self.zmq_cmd_socket = self.zmq_context.socket(zmq.REQ) self.zmq_cmd_socket.connect(self.cmd_host_string) log.info('Driver client cmd socket connected to %s.' % self.cmd_host_string) self.zmq_evt_socket = self.zmq_context.socket(zmq.SUB) self.zmq_evt_socket.connect(self.event_host_string) self.zmq_evt_socket.setsockopt(zmq.SUBSCRIBE, '') log.info('Driver client event thread connected to %s.' % self.event_host_string) self.evt_callback = evt_callback def recv_evt_messages(): """ A looping function that monitors a ZMQ SUB socket for asynchronous driver events. Can be run as a thread or greenlet. @param driver_client The client object that launches the thread. """ self.stop_event_thread = False while not self.stop_event_thread: try: evt = self.zmq_evt_socket.recv_pyobj(flags=zmq.NOBLOCK) log.debug('got event: %s' % str(evt)) if self.evt_callback: self.evt_callback(evt) except zmq.ZMQError: time.sleep(.5) except Exception, e: log.error('Driver client error reading from zmq event socket: ' + str(e)) log.error('Driver client error type: ' + str(type(e))) log.info('Client event socket closed.')
def get_date_dirs(self, subsite, deployment): """ This method will generate the path to the directory of date directories in the format of YYYYMM. Exceptions raised by this method: OSError ValueError :param subsite: The subsite of the ZPLSC instrument. :param deployment: The deployment number of the data of interest. :return: echogram_dates: The mapping of echogram dates to the entire month flag date_dirs_path: The path to the date directories. """ # Generate the portion of the path up to the DCL directory to get the all the instrument sub-directories. deployment_dir = os.path.join(self.raw_data_dir, subsite.upper(), 'R%05d' % deployment) dcl_path = '' instrument_dirs = '' for dcl_rel_path in DCL_PATHS: dcl_path = os.path.join(deployment_dir, dcl_rel_path) try: instrument_dirs = self.get_dir_contents(dcl_path, True) break except OSError: log.info('Could not find path: %s: checking alternate path', dcl_path) if dcl_path is DCL_PATHS[-1]: raise # Generate the portion of the path up to the ZPLSC Instrument serial number. serial_num_found = None for instrument in instrument_dirs: serial_num_found = SERIAL_NUM_DIR_MATCHER.match(instrument) if serial_num_found: break if serial_num_found is None: log.warning('Could not find ZPLSC data for subsite: %s and recovered deployment: %s', subsite, deployment) raise OSError self.serial_num = serial_num_found.group(1) serial_num_dir = os.path.join(dcl_path, serial_num_found.group()) sub_dirs = self.get_dir_contents(serial_num_dir) # Generate the portion of the path that contains the recovered data path. recovered_path = RECOVERED_DIR % (subsite.lower(), self.serial_num) recovered_dir = '' for sub_dir in sub_dirs: if sub_dir.startswith(recovered_path): recovered_dir = sub_dir break if recovered_dir: # Create the raw data path including the recovered path date_dirs_path = os.path.join(serial_num_dir, recovered_dir, DATA_PATH) else: log.warning('Could not find ZPLSC recovered data path starting with: %s', recovered_path) raise OSError # If no dates were entered on the command line, get the entire list of date directories. echogram_dates = self.echogram_dates if not echogram_dates: echogram_dates = {} # Get all the year/month date subdirectories for this subsite/deployment the get contents of the directory. date_dirs = self.get_dir_contents(date_dirs_path, True) date_dirs = [(date_dir[:4], date_dir[4:]) for date_dir in date_dirs] # If in process mode, get the latest date that has 24 1-hour data files for echogram generation. if self.process_mode: echogram_dates[self.get_latest_echogram_date(date_dirs_path, date_dirs)] = False # Otherwise, get all the year/month date subdirectories for this subsite and deployment. else: for date_dir in date_dirs: year = int(date_dir[0]) month = int(date_dir[1]) # Save the date and indicate that the entire month should be generated. echogram_dates[date(year, month, 1)] = True return echogram_dates, date_dirs_path