def _cleanup_active(expiration): LOG.info('remove pods before: %s', expiration) with locks.acquiring_exclusive(_get_active_path()): for pod_dir_path, config in _iter_configs(): pod_id = _get_id(pod_dir_path) if _get_ref_count(pod_dir_path) > 1: LOG.debug('pod is still referenced: %s', pod_id) continue pod_dir_lock = locks.try_acquire_exclusive(pod_dir_path) if not pod_dir_lock: LOG.debug('pod is still active: %s', pod_id) continue try: pod_status = _get_pod_status(pod_dir_path, config) last_updated = _get_last_updated(pod_status) if last_updated is None: # Prevent cleaning up just-prepared pod directory. last_updated = datetimes.utcfromtimestamp( _get_config_path(pod_dir_path).stat().st_mtime ) if last_updated < expiration: with locks.acquiring_exclusive(_get_graveyard_path()): LOG.info('clean up pod: %s', pod_id) _move_pod_dir_to_graveyard(pod_dir_path) journals.remove_journal_dir(pod_id) finally: pod_dir_lock.release() pod_dir_lock.close()
def test_timestamp_date(self): dt = datetimes.utcfromtimestamp(7200 + 180 + 4) self.assertEqual(dt.replace(tzinfo=None), datetime.datetime(1970, 1, 1, 2, 3, 4)) self.assertEqual(dt.timestamp(), 7200 + 180 + 4) dt = datetimes.timestamp_date(dt) self.assertEqual(dt.replace(tzinfo=None), datetime.datetime(1970, 1, 1, 0, 0, 0)) self.assertEqual(dt.timestamp(), 0)
def get_pod_app_exit_status(root_path, app): """Return exit status and the time it was recorded.""" path = _get_pod_app_exit_status_path(root_path, app) if path.is_file(): return ( int(path.read_text()), datetimes.utcfromtimestamp(path.stat().st_mtime), ) else: return None, None
def _journal_get_timestamp(entry): timestamp = (entry.get('_SOURCE_REALTIME_TIMESTAMP') or entry.get('__REALTIME_TIMESTAMP')) if timestamp is None: return datetimes.utcnow() return datetimes.utcfromtimestamp( times.convert( times.Units.MICROSECONDS, times.Units.SECONDS, int(timestamp), ), )
def test_make_timestamp(self): dt = datetimes.make_timestamp(1970, 1, 1, 2, 3, 4) self.assertEqual(dt, datetimes.utcfromtimestamp(7200 + 180 + 4)) self.assertEqual( dt, datetime.datetime(1970, 1, 1, 2, 3, 4, tzinfo=datetime.timezone.utc), )
def test_parse_collectd_notification(self): self.assertEqual( alerts._parse_collectd_notification(\ [ alerts.Config.Rule( pattern=re.compile(r''), template=alerts.Config.Rule.Template( level='{level}', title='{title}', description='{raw_message}', ), ) ], io.StringIO( '''\ Severity: OKAY Time: 1234.567 Host: foobar Plugin: cpu PluginInstance: 0 Type: cpu TypeInstance: idle DataSource: value CurrentValue: 2.000000e+01 WarningMin: 1.000000e+01 WarningMax: nan FailureMin: 5.000000e+00 FailureMax: nan Some message. Second line of message. ''' ) ), alerts.Message( host='foobar', level=alerts.Message.Levels.GOOD, title='cpu/0/idle: 20.00 >= 10.00', description='Some message.\nSecond line of message.\n', timestamp=datetimes.utcfromtimestamp(1234.567), ), )
def _parse_collectd_header(header, kwargs, headers): i = header.find(':') if i == -1: LOG.warning('ill-formatted collectd notification header: %s', header) return name = header[:i].strip() value = header[i + 1:].strip() if name == 'Host': kwargs['host'] = value elif name == 'Time': kwargs['timestamp'] = datetimes.utcfromtimestamp(float(value)) elif name == 'Severity': level = _COLLECTD_SEVERITY_TABLE.get(value.upper()) if level is None: LOG.warning('unknown collectd severity: %s', header) else: kwargs['level'] = level elif name in _COLLECTD_KNOWN_HEADERS: headers[name] = value else: LOG.warning('unknown collectd notification header: %s', header)
def test_utcfromtimestamp(self): zero = datetime.datetime(1970, 1, 1) dt = datetimes.utcfromtimestamp(0) self.assertEqual(zero, dt.replace(tzinfo=None)) self.assertEqual(0, dt.timestamp())
def test_parse_journal_entry(self): self.assertIsNone( alerts._parse_journal_entry( [ alerts.Config.Rule( pattern=re.compile(r'something'), template=alerts.Config.Rule.Template( level='ERROR', title='{title}', description='{raw_message}', ), ) ], {'MESSAGE': 'no match'}, 'foobar', '01234567-89ab-cdef-0123-456789abcdef', )) self.assertIsNone( alerts._parse_journal_entry( [ alerts.Config.Rule( pattern=re.compile(r'something'), template=None, ) ], {'MESSAGE': 'this has something'}, 'foobar', '01234567-89ab-cdef-0123-456789abcdef', )) for message in ( 'INFO this has something', list(b'INFO this has something'), ): with self.subTest(message): self.assertEqual( alerts._parse_journal_entry( [ alerts.Config.Rule( pattern=re.compile( r'(?P<level>INFO) ' r'this (?P<raw_message>.* something)'), template=alerts.Config.Rule.Template( level='{level}', title='{title}', description='{raw_message}', ), ) ], { 'SYSLOG_IDENTIFIER': 'spam', 'MESSAGE': message, '_SOURCE_REALTIME_TIMESTAMP': '1001200200', }, 'foobar', '01234567-89ab-cdef-0123-456789abcdef', ), alerts.Message( host='foobar', level=alerts.Message.Levels.INFO, title='spam', description='has something', timestamp=datetimes.utcfromtimestamp(1001.2002), ), )
def _get_last_updated(image_dir_path): return datetimes.utcfromtimestamp( _get_metadata_path(image_dir_path).stat().st_mtime )
def _union_datetime_getter(reader, name): timestamp = reader[name] if timestamp is None: return None return datetimes.utcfromtimestamp(timestamp)
def _datetime_getter(reader, name): return datetimes.utcfromtimestamp(reader[name])