def gettz(name): for cachedname, tzinfo in CACHE: if cachedname == name: return tzinfo name_parts = name.lstrip('/').split('/') for part in name_parts: if part == os.path.pardir or os.path.sep in part: raise ValueError('Bad path segment: %r' % part) filename = os.path.join(ZONEINFODIR, *name_parts) try: zonefile = open(filename, "rb") except: tzinfo = None else: tzinfo = tzfile(zonefile) zonefile.close() if tzinfo is None and ZONEINFOFILE: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() if tzinfo is not None: CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
def test_now_local(self, mock_dt, mock_conf): dt_test = datetime(year=1985, month=10, day=25, hour=8, minute=18) mock_dt.now.return_value = dt_test mock_conf.get.return_value = test_config self.assertEqual(now_local(), dt_test) expected_timezone = tzfile('/usr/share/zoneinfo/America/Chicago') mock_dt.now.assert_called_with(expected_timezone) now_local(tzfile('/usr/share/zoneinfo/Europe/Stockholm')) expected_timezone = tzfile('/usr/share/zoneinfo/Europe/Stockholm') mock_dt.now.assert_called_with(expected_timezone)
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with tar_open(fileobj=zonefile_stream, mode='r') as tf: # dict comprehension does not work on python2.6 # TODO: get back to the nicer syntax when we ditch python2.6 # self.zones = {zf.name: tzfile(tf.extractfile(zf), # filename = zf.name) # for zf in tf.getmembers() if zf.isfile()} self.zones = dict((zf.name, tzfile(tf.extractfile(zf), filename=zf.name)) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN) # deal with links: They'll point to their parent object. Less # waste of memory # links = {zl.name: self.zones[zl.linkname] # for zl in tf.getmembers() if zl.islnk() or zl.issym()} links = dict((zl.name, self.zones[zl.linkname]) for zl in tf.getmembers() if zl.islnk() or zl.issym()) self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = dict() self.metadata = None
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with tar_open(fileobj=zonefile_stream, mode='r') as tf: # dict comprehension does not work on python2.6 # TODO: get back to the nicer syntax when we ditch python2.6 # self.zones = {zf.name: tzfile(tf.extractfile(zf), # filename = zf.name) # for zf in tf.getmembers() if zf.isfile()} self.zones = dict( (zf.name, tzfile(tf.extractfile(zf), filename=zf.name)) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN) # deal with links: They'll point to their parent object. Less # waste of memory # links = {zl.name: self.zones[zl.linkname] # for zl in tf.getmembers() if zl.islnk() or zl.issym()} links = dict((zl.name, self.zones[zl.linkname]) for zl in tf.getmembers() if zl.islnk() or zl.issym()) self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = dict() self.metadata = None
def test_default_timezone(self, mock_conf): mock_conf.get.return_value = test_config self.assertEqual(default_timezone(), tzfile('/usr/share/zoneinfo/America/Chicago')) # Test missing tz-info mock_conf.get.return_value = {} self.assertEqual(default_timezone(), tzlocal())
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = { zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN } # deal with links: They'll point to their parent object. Less # waste of memory links = { zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym() } self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
def load_timezones(path): """ Loading of nevessary timezones/ :param path: root path to start search for timezone files from :return: map of timezone names to tzinfo objects """ timezone_map = {} for timezone_file_name in _get_available_timezones(path): timezone_map[timezone_file_name[0]] = tzfile(timezone_file_name[1]) return timezone_map
def testFileLastTransition(self): # After the last transition, it goes to standard time in perpetuity tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) self.assertEqual(datetime(2037, 10, 25, 0, 59, tzinfo=tzc).tzname(), "EDT") self.assertEqual(datetime(2037, 10, 25, 1, 00, tzinfo=tzc).tzname(), "EST") self.assertEqual(datetime(2038, 5, 25, 12, 0, tzinfo=tzc).tzname(), "EST")
def stdtime(tz, year, month, day, hour, min, sec , zoneinfo="/usr/share/zoneinfo" ): """Use /usr/share/zoneinfo to interpret a time in a timezone. >>> stdtime("America/Chicago", "2007-04-02T21:53:27") '2007-04-02T21:53:27-05:00' """ return datetime(year, month, day, hour, min, sec, tzinfo=tzfile("%s/%s" % (zoneinfo, tz)) )
def testFileLastTransition(self): # After the last transition, it goes to standard time in perpetuity tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) self.assertEqual( datetime(2037, 10, 25, 0, 59, tzinfo=tzc).tzname(), "EDT") self.assertEqual( datetime(2037, 10, 25, 1, 00, tzinfo=tzc).tzname(), "EST") self.assertEqual( datetime(2038, 5, 25, 12, 0, tzinfo=tzc).tzname(), "EST")
def buildcache(): global CACHE zoneinfofile = getzoneinfofile() if zoneinfofile: tf = TarFile.open(zoneinfofile) try: for tarinfo in tf.getmembers(): if tarinfo.islnk() or tarinfo.isfile(): zonefile = tf.extractfile(tarinfo) CACHE[tarinfo.name] = tzfile(zonefile) finally: tf.close()
def testIsStd(self): # NEW_YORK tzfile contains this isstd information: isstd_expected = (0, 0, 0, 1) tzc = tz.tzfile(BytesIO(base64.b64decode(NEW_YORK))) # gather the actual information as parsed by the tzfile class isstd = [] for ttinfo in tzc._ttinfo_list: # ttinfo objects contain boolean values isstd.append(int(ttinfo.isstd)) # ttinfo list may contain more entries than isstd file content isstd = tuple(isstd[:len(isstd_expected)]) self.assertEqual( isstd_expected, isstd, "isstd UTC/local indicators parsed: %s != tzfile contents: %s" % (isstd, isstd_expected))
def testLeapCountDecodesProperly(self): # This timezone has leapcnt, and failed to decode until # Eugene Oden notified about the issue. # As leap information is currently unused (and unstored) by tzfile() we # can only indirectly test this: Take advantage of tzfile() not closing # the input file if handed in as an opened file and assert that the # full file content has been read by tzfile(). Note: For this test to # work NEW_YORK must be in TZif version 1 format i.e. no more data # after TZif v1 header + data has been read fileobj = BytesIO(base64.b64decode(NEW_YORK)) tzc = tz.tzfile(fileobj) # we expect no remaining file content now, i.e. zero-length; if there's # still data we haven't read the file format correctly remaining_tzfile_content = fileobj.read() self.assertEqual(len(remaining_tzfile_content), 0)
def _ParseFileEntry(self, knowledge_base, file_entry): """Parses artifact file system data for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Returns: bool: True if all the preprocessing attributes were found and the preprocessor plugin is done. Raises: errors.PreProcessFail: if the preprocessing fails. """ result = False if file_entry.link: # Determine the timezone based on the file path. _, _, time_zone = file_entry.link.partition('zoneinfo/') else: # Determine the timezone based on the timezone information file. file_object = file_entry.GetFileObject() time_zone = None try: time_zone_file = tz.tzfile(file_object) date_time = datetime.datetime(2017, 1, 1) time_zone = time_zone_file.tzname(date_time) except ValueError: # TODO: add and store preprocessing errors. logging.error('Unable to read time zone information file.') finally: file_object.close() if time_zone: try: knowledge_base.SetTimeZone(time_zone) result = True except ValueError: # TODO: add and store preprocessing errors. logging.error('Unable to set time zone in knowledge base.') return result
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
def stdtime(tz, year, month, day, hour, min, sec, zoneinfo="/usr/share/zoneinfo"): """Use /usr/share/zoneinfo to interpret a time in a timezone. >>> stdtime("America/Chicago", "2007-04-02T21:53:27") '2007-04-02T21:53:27-05:00' """ return datetime(year, month, day, hour, min, sec, tzinfo=tzfile("%s/%s" % (zoneinfo, tz)))
def add_tags(self, tags): """ add tags to event """ if tags == '*': tags = [] else: tags = tags.split(',') # parse tags for t in tags: if '=' in t: (k, v) = t.split('=') if k == 'TZ': tzfilename = '/usr/share/zoneinfo/' + v if os.path.isfile(tzfilename): self.timezone = tzfile(tzfilename) self.timezonename = v else: logger.error('No timezone file {0}'.format(v)) elif k == 'TRANSP': self.transp = v else: self.categories.append(t)
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with _tar_open(fileobj=zonefile_stream, mode='r') as tf: # dict comprehension does not work on python2.6 # TODO: get back to the nicer syntax when we ditch python2.6 # self.zones = {zf.name: tzfile(tf.extractfile(zf), # filename = zf.name) # for zf in tf.getmembers() if zf.isfile()} self.zones = dict( (zf.name, tzfile(tf.extractfile(zf), filename=zf.name)) for zf in tf.getmembers() if zf.isfile()) # deal with links: They'll point to their parent object. Less # waste of memory # links = {zl.name: self.zones[zl.linkname] # for zl in tf.getmembers() if zl.islnk() or zl.issym()} links = dict((zl.name, self.zones[zl.linkname]) for zl in tf.getmembers() if zl.islnk() or zl.issym()) self.zones.update(links) else: self.zones = dict()
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with _tar_open(fileobj=zonefile_stream, mode='r') as tf: # dict comprehension does not work on python2.6 # TODO: get back to the nicer syntax when we ditch python2.6 # self.zones = {zf.name: tzfile(tf.extractfile(zf), # filename = zf.name) # for zf in tf.getmembers() if zf.isfile()} self.zones = dict((zf.name, tzfile(tf.extractfile(zf), filename=zf.name)) for zf in tf.getmembers() if zf.isfile()) # deal with links: They'll point to their parent object. Less # waste of memory # links = {zl.name: self.zones[zl.linkname] # for zl in tf.getmembers() if zl.islnk() or zl.issym()} links = dict((zl.name, self.zones[zl.linkname]) for zl in tf.getmembers() if zl.islnk() or zl.issym()) self.zones.update(links) else: self.zones = dict()
def _ParseFileEntry(self, mediator, file_entry): """Parses artifact file system data for a preprocessing attribute. Args: mediator (PreprocessMediator): mediates interactions between preprocess plugins and other components, such as storage and knowledge base. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails. """ if file_entry.link: # Determine the timezone based on the file path. _, _, time_zone = file_entry.link.partition('zoneinfo/') else: # Determine the timezone based on the timezone information file. file_object = file_entry.GetFileObject() time_zone = None try: time_zone_file = tz.tzfile(file_object) date_time = datetime.datetime(2017, 1, 1) time_zone = time_zone_file.tzname(date_time) except ValueError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to read time zone information file.') # TODO: check if time zone is set in knowledge base. if time_zone: try: mediator.knowledge_base.SetTimeZone(time_zone) except ValueError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to set time zone in knowledge base.')
def _ParseFileEntry(self, knowledge_base, file_entry): """Parses artifact file system data for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails. """ if file_entry.link: # Determine the timezone based on the file path. _, _, time_zone = file_entry.link.partition('zoneinfo/') else: # Determine the timezone based on the timezone information file. file_object = file_entry.GetFileObject() time_zone = None try: time_zone_file = tz.tzfile(file_object) date_time = datetime.datetime(2017, 1, 1) time_zone = time_zone_file.tzname(date_time) except ValueError: # TODO: add and store preprocessing errors. logger.error('Unable to read time zone information file.') finally: file_object.close() # TODO: check if time zone is set in knowledge base. if time_zone: try: knowledge_base.SetTimeZone(time_zone) except ValueError: # TODO: add and store preprocessing errors. logger.error('Unable to set time zone in knowledge base.')
def __init__(self, remline): # TODO: workaround line splits/continuations fileinfo, remline = remline.strip().split('\n') self.remline = remline self.linenumber, self.filename = fileinfo.split(' ') self.uid = hashlib.md5(remline.encode(_encoding)).hexdigest() fields = remline.split(None, 5) # set defaults tzfilename = '/usr/share/zoneinfo/' + options['timezone'] if os.path.isfile(tzfilename): self.timezone = tzfile(tzfilename) self.timezonename = options['timezone'] else: logger.debug('No timezone file {0}. ' + \ 'Setting to local zone.'.format(options['timezone'])) self.timezone = tzlocal() self.timezonename = 'localtime' self.transp = 'OPAQUE' self.categories = [] self.add_date(fields[0]) self.add_tags(fields[2]) self.add_times(fields[4], fields[3]) self.add_body(fields[5])
def compute_mb_prod(data, period='day', ALLDIFF=False): START = datetime(today.year, today.month, today.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) prod_delta = {} last_doc = None last_dt = None prev_doc = None inum = 0 gap = ONE_DAY dt = None for doc in data: dt = datetime.strptime(doc['ts'], "%Y-%m-%d %H:%M:%S") #print('dt ',dt.strftime(out_format)) dt = dt.replace( tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) #print(' [dt] ',dt.strftime(out_format)) if last_doc is None: last_dt = dt last_doc = doc['doc'] prev_doc = doc['doc'] diff = (START - dt) t = diff.total_seconds() if t > 0: START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if period == 'week': gap = ONE_DAY * 2 wday = START.weekday() START = START - timedelta(wday) elif period == 'month': gap = ONE_DAY * 7 mday = START.day - 1 START = START - timedelta(mday) #print(' START',START.strftime(out_format)) diff = (START - dt) t = diff.total_seconds() if t > 0 or (ALLDIFF and inum > 0): inum += 1 if t < gap: cur_doc = doc['doc'] ########## first doc of the next day vd, ld = check_serial(prev_doc, cur_doc, last_doc) if len(vd) > 0: prod_delta[last_dt] = vd last_doc = ld prev_doc = cur_doc vd = {} pvv = {} lvv = {} if last_dt in prod_delta: vd = prod_delta[last_dt] for k in prev_doc.keys(): if k == 'ser': continue lv = last_doc[k] pv = prev_doc[k] if VERBOSE: pvv[k] = pv lvv[k] = lv dv = lv - pv if k in vd: vd[k] = vd[k] + dv else: vd[k] = dv prod_delta[last_dt] = vd if VERBOSE: print(last_dt.strftime(out_format), vd, lvv, '-', pvv) START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if period == 'week': wday = START.weekday() START = START - timedelta(wday) elif period == 'month': mday = START.day - 1 START = START - timedelta(mday) #print(' START',START.strftime(out_format),dt.strftime(out_format)) last_doc = doc['doc'] prev_doc = doc['doc'] ########## first doc of the next day #START = START - timedelta(1) last_dt = dt else: cur_doc = doc['doc'] vd, ld = check_serial(prev_doc, cur_doc, last_doc) if len(vd) > 0: prod_delta[last_dt] = vd last_doc = ld prev_doc = cur_doc if dt is not None: vd = {} pvv = {} lvv = {} if last_dt in prod_delta: vd = prod_delta[last_dt] for k in prev_doc.keys(): if k == 'ser': continue lv = last_doc[k] pv = prev_doc[k] if VERBOSE: pvv[k] = pv lvv[k] = lv dv = lv - pv if k in vd: vd[k] = vd[k] + dv else: vd[k] = dv prod_delta[last_dt] = vd if VERBOSE: print(last_dt.strftime(out_format), vd, lvv, '-', pvv) return prod_delta
def compute_sm_prod(data, period='day', ALLDIFF=False): START = datetime(today.year, today.month, today.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) prod_delta = {} last_doc = None last_dt = None prev_doc = None gap = ONE_DAY dt = None for doc in data: if not 'sm' in doc: continue dt = datetime.strptime(doc['ts'], "%Y-%m-%d %H:%M:%S") dt = dt.replace( tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if last_doc is None: last_dt = dt last_doc = doc['sm'] prev_doc = doc['sm'] diff = (START - dt) t = diff.total_seconds() if t > 0: START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if period == 'week': gap = ONE_DAY * 2 wday = START.weekday() START = START - timedelta(wday) elif period == 'month': gap = ONE_DAY * 7 mday = START.day - 1 START = START - timedelta(mday) #print(' START',START.strftime(out_format)) diff = (START - dt) t = diff.total_seconds() if t > 0: if t < gap: prev_doc = doc['sm'] ########## first doc of the next day prod_delta[last_dt] = {} for k in last_doc.keys(): v2 = float(last_doc[k]) v1 = v2 if k in prev_doc.keys(): v1 = float(prev_doc[k]) k2 = k[0:k.rfind('_')] diff = v2 - v1 if k2 in prod_delta[last_dt].keys(): diff += prod_delta[last_dt][k2] prod_delta[last_dt][k2] = diff if VERBOSE: print(last_dt.strftime(out_format), json.dumps(prod_delta[last_dt])) print('last:{}\nprev {}'.format(json.dumps(last_doc), json.dumps(prev_doc))) START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if period == 'week': wday = START.weekday() START = START - timedelta(wday) elif period == 'month': mday = START.day - 1 START = START - timedelta(mday) #print(' START',START.strftime(out_format),dt.strftime(out_format)) last_doc = doc['sm'] prev_doc = doc['sm'] ########## first doc of the next day #START = START - timedelta(1) last_dt = dt else: prev_doc = doc['sm'] if dt is not None: prod_delta[last_dt] = {} for k in last_doc.keys(): v2 = float(last_doc[k]) v1 = v2 if k in prev_doc.keys(): v1 = float(prev_doc[k]) k2 = k[0:k.rfind('_')] diff = v2 - v1 if k2 in prod_delta[last_dt].keys(): diff += prod_delta[last_dt][k2] prod_delta[last_dt][k2] = diff if VERBOSE: print(last_dt.strftime(out_format), json.dumps(prod_delta[last_dt])) print('#last:{}\n prev {}'.format(json.dumps(last_doc), json.dumps(prev_doc))) return prod_delta
def testInvalidFile(self): # Should throw a ValueError if an invalid file is passed with self.assertRaises(ValueError): tz.tzfile(BytesIO(b'BadFile'))
def testFileEnd1(self): tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) self.assertEqual( datetime(2003, 10, 26, 0, 59, tzinfo=tzc).tzname(), "EDT") self.assertEqual( datetime(2003, 10, 26, 1, 00, tzinfo=tzc).tzname(), "EST")
def testLeapCountDecodesProperly(self): # This timezone has leapcnt, and failed to decode until # Eugene Oden notified about the issue. tzc = tz.tzfile(BytesIO(base64.b64decode(NEW_YORK))) self.assertEqual(datetime(2007, 3, 31, 20, 12).tzname(), None) # What is the point of this?
def testPickleTzFileEST5EDT(self): tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) self.assertPicklable(tzc)
def testPickleTzFileEurope_Helsinki(self): tzc = tz.tzfile(BytesIO(base64.b64decode(EUROPE_HELSINKI))) self.assertPicklable(tzc)
def testPickleTzFileNew_York(self): tzc = tz.tzfile(BytesIO(base64.b64decode(NEW_YORK))) self.assertPicklable(tzc)
def testFileEnd1(self): tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tzc).tzname(), "EDT") self.assertEqual(datetime(2003, 10, 26, 1, 00, tzinfo=tzc).tzname(), "EST")
def get_gmt(): current_directory = os.path.dirname(__file__) gmt_path = os.path.join(current_directory, 'GMT') tzinfo = tzfile(gmt_path) return tzinfo
def testRoundNonFullMinutes(self): # This timezone has an offset of 5992 seconds in 1900-01-01. tzc = tz.tzfile(BytesIO(base64.b64decode(EUROPE_HELSINKI))) self.assertEqual(str(datetime(1900, 1, 1, 0, 0, tzinfo=tzc)), "1900-01-01 00:00:00+01:40")
def main(utcnow): parser = OptionParser("%prog [options]") parser.add_option("-p", "--path", dest="path", default=os.path.join(os.path.expanduser("~"), ".cronetab"), help="path to the cronetab file (default is ~/.cronetab)") parser.add_option("-t", "--tzpath", dest="tzpath", default="/usr/share/zoneinfo", help="path to the timezone directory (default is /usr/share/zoneinfo)") parser.add_option("--concurrent", dest="concurrent", default=False, action="store_true", help="run commands concurrently (default is False)") (options, args) = parser.parse_args() if not options.path or not os.path.exists(options.path): raise AssertionError("cronetab file %s not found" % options.path) # build parsing functions (minute, hour, date, month, day, begin, end, interval, timezone) = build_parsers() # process each line of command with open(options.path) as fd: log.info("<<<<< BEGIN >>>>>") for i, line in enumerate(fd): try: log.info("--- begin job %d ---", i + 1) # prepare the input line and skip if commented. log.info("input is %s", line.rstrip()) if line.lstrip().startswith("#"): log.info("skip this job as it is commented") continue tokens = line.split() # check the path to the specified timezone tzfile = os.path.join(options.tzpath, timezone(tokens[8])) if not os.path.exists(tzfile): log.error("timezone file %s does not exist", tzfile) continue # calculate the current time at the timezone now = utcnow.astimezone(tz.tzfile(tzfile)).replace(tzinfo=None) log.info("now at %s is %s", timezone(tokens[8]), now) # check the minute condition if now.minute not in minute(tokens[0]): log.debug("fail to meet minute condition") continue # check the hour condition if now.hour not in hour(tokens[1]): log.debug("fail to meet hour condition") continue # check the date condition if now.day not in date(tokens[2]): log.debug("fail to meet date condition") continue # check the month condition if now.month not in month(tokens[3]): log.debug("fail to meet month condition") continue # check the day condition if now.isoweekday() % 7 not in day(tokens[4]): log.debug("fail to meet day condition") continue # check the period condition if not (begin(tokens[5]) <= now <= end(tokens[6])): log.debug("fail to meet period condition") continue # check the interval condition (key, value) = interval(tokens[7]) diff = total_seconds(now - begin(tokens[5])) if key == "d": if (diff / 86400) % value: log.debug("fail to meet day interval condition") continue elif key == "h": if (diff / 3600) % value: log.debug("fail to meet hour interval condition") continue elif key == "m": if (diff / 60) % value: log.debug("fail to meet minute interval condition") continue # execute the command command = " ".join(tokens[9:]) log.info("executing command %s", command) exec_command(command, as_thread=options.concurrent) except: log.exception("unexpected error, see traces below") finally: log.info("--- end job %d ---", i + 1) log.info("<<<<< END >>>>>")
while True: old = events.list( calendarId=args.calID, pageToken=page_token).execute() old_events = old_events + old['items'] page_token = old.get('nextPageToken') if not page_token: break for x in old_events: if ('extendedProperties' in x and x['extendedProperties']['private']['source'] == basename(args.infile)): events.delete(calendarId=args.calID, eventId=x['id']).execute() elif args.clear: events.delete(calendarId=args.calID, eventId=x['id']).execute() utcfile = tzfile('/usr/share/zoneinfo/UTC') for vevent in vcal.contents['vevent']: event = {} event['summary'] = vevent.contents['summary'][0].value dt = vevent.contents['dtstart'][0].value # Timed event if isinstance(dt, datetime): dtstart = dt.astimezone(utcfile) duration = vevent.contents['duration'][0].value dtend = dtstart + duration event['start'] = {'dateTime': dtstart.isoformat()} event['end'] = {'dateTime': dtend.isoformat()} # All-day event else: event['start'] = {'date': dt.isoformat()}
def get_timezone_file(f, key=None): return tz.tzfile(f)
Mz3XMDRAS0A1C0QwNg24QDcG1bA4AA9AOMsIMDnpK8A6quowO8kNwDyKzDA9qO/APmquMD+I0cBAU8qw QWizwEIzrLBDSJXARBOOsEUxskBF83CwRxGUQEfvAjBI8XZASbxvMErRWEBLuACwTLE6QE3GBzBOUILA T5yusFBC2cBRfJCwUiv2QFNccrBUC9hAVTxUsAECAQMBBAIEAgQCBAIEAgMCAwUDAgMGBwYHBgcGBwYH BgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYH BgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYI//+9ugAA//+9ugAE//+5sAAI///HwAAI///HwAEM///V0AEM ///V0AEM///HwAAI///V0AAITE1UAFNNVABDTFQAQ0xTVAAAAAAAAAABAQEAAAAAAAABAQFUWmlmMgAA AAAAAAAAAAAAAAAAAAAAAAkAAAAJAAAAAAAAAHQAAAAJAAAAEf////9phx3G/////48wR0b/////m1zl UP////+ffOLG/////6EAccD/////sF53xv////+xdz1A/////7JBAND/////s1hwwP////+0IjRQ//// /7U5pED/////tgNn0P////+3GtfA/////7fkm1D/////uP1cwP////+5xyBQ/////8wcbkD/////zGzn 0P/////T3I/A/////9QbybD/////1TNVwP/////VdpJA//////3RPED//////pL6sP//////zM3AAAAA AABy3LAAAAAAAXVQwAAAAAACQEmwAAAAAANVMsAAAAAABCArsAAAAAAFPk9AAAAAAAYADbAAAAAABwu8 QAAAAAAH3++wAAAAAAj+E0AAAAAACb/RsAAAAAAK3fVAAAAAAAuo7jAAAAAADL3XQAAAAAANiNAwAAAA AA6duUAAAAAAD2iyMAAAAAAQhtXAAAAAABFIlDAAAAAAEma3wAAAAAATKHYwAAAAABRGmcAAAAAAFRGS sAAAAAAWJnvAAAAAABbxdLAAAAAAGAZdwAAAAAAY0VawAAAAABnmP8AAAAAAGrE4sAAAAAAbz1xAAAAA AByRGrAAAAAAHa8+QAAAAAAecPywAAAAAB+PIEAAAAAAIH8DMAAAAAAhbwJAAAAAACI5+zAAAAAAI07k QAAAAAAkGd0wAAAAACU4AMAAAAAAJfm/MAAAAAAm8vjAAAAAACfZoTAAAAAAKPfEwAAAAAApwr2wAAAA ACrXpsAAAAAAK6KfsAAAAAAst4jAAAAAAC2CgbAAAAAALpdqwAAAAAAvYmOwAAAAADCAh0AAAAAAMUJF sAAAAAAyYGlAAAAAADM91zAAAAAANEBLQAAAAAA1C0QwAAAAADYNuEAAAAAANwbVsAAAAAA4AA9AAAAA ADjLCDAAAAAAOekrwAAAAAA6quowAAAAADvJDcAAAAAAPIrMMAAAAAA9qO/AAAAAAD5qrjAAAAAAP4jR wAAAAABAU8qwAAAAAEFos8AAAAAAQjOssAAAAABDSJXAAAAAAEQTjrAAAAAARTGyQAAAAABF83CwAAAA AEcRlEAAAAAAR+8CMAAAAABI8XZAAAAAAEm8bzAAAAAAStFYQAAAAABLuACwAAAAAEyxOkAAAAAATcYH MAAAAABOUILAAAAAAE+crrAAAAAAUELZwAAAAABRfJCwAAAAAFIr9kAAAAAAU1xysAAAAABUC9hAAAAA AFU8VLABAgEDAQQCBAIEAgQCBAIDAgMFAwIDBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcG BwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcGBwYHBgcG CP//vboAAP//vboABP//ubAACP//x8AACP//x8ABDP//1dABDP//1dABDP//x8AACP//1dAACExNVABT TVQAQ0xUAENMU1QAAAAAAAAAAQEBAAAAAAAAAQEBCkNMVDMK """.replace('\n', '').strip().encode())) SANTIAGO = tz.gettz("America/Santiago") SANTIAGO_2016 = tz.tzfile(SANTIAGO_2016_DATA, filename="America/Santiago")
def main(): parser = OptionParser(usage="""\ Query to MongoDB, by default period is day ./nomitor_prodTemp.py -g K208AK0051 -s "2018-04-01 11:00:00" -f "data.type=dhw,data.adr=12" ./nomitor_prodTemp.py -g K208AK0038 -s "2018-04-01 20:00:00" """) parser.add_option('-g', '--gw', action='store', dest="gw", help="""gateway""") parser.add_option('-s', '--st', action='store', dest="stime", help="""start time (default= first data time)""") parser.add_option('-e', '--et', action='store', dest="etime", help="""end time (default= current time)""") parser.add_option( '-f', '--filter', action='store', dest="filter", default="data.type=dhw", help="""filter data.type=dhw, data.adr=2 (default= data.type=dhw)""") parser.add_option('-v', '--verbose', action='store_true', dest="verbose", default=False, help="""verbose actual values""") opts, args = parser.parse_args() global VERBOSE VERBOSE = opts.verbose stime = opts.stime etime = opts.etime filter = opts.filter.strip() gw = opts.gw if gw is None: print('nomitor serial is missing \n') parser.print_help() sys.exit(1) mc_dict = query_mongo.get_mc_dict(gw) m2adrs = query_mongo.get_mbus_adrs(mc_dict) if not 'dhw' in m2adrs: print('\nno "dhw" found in mc_dict {}\n'.format(json.dumps(m2adrs))) parser.print_help() sys.exit(1) fils = filter.split(',') if len(fils) == 1 and filter.lower().startswith('data.type='): i = filter.find('=') m = filter[i + 1:] status = query_mongo.check_mb_adrs(m2adrs, m) if len(status) > 1: print('\n{}\n'.format(status)) sys.exit(1) prod_delta = get_dhw_mb_data(gw, stime, etime, filter) #keys = sorted(prod_delta.keys(), reverse=True) #for dt in keys: # print(dt.strftime(out_format), json.dumps(prod_delta[dt])) #print('----------------') data = get_mod_data(gw, stime, etime) mod_data = {} for doc in data: dt = datetime.strptime(doc['ts'], "%Y-%m-%d %H:%M:%S") dt = dt.replace( tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) if 'mod' in doc: mod_data[dt] = doc['mod'] day_temp = find_max_temp(prod_delta, mod_data) if not VERBOSE and len(day_temp) > 0: vos = [] for t in day_temp.keys(): v = day_temp[t] vos.append(v['vo']) vos = sorted(vos) vmax = 0 vh = vos[0] try: vmax = float(vos[-1]) for x in vos: vh = x y = float(x) if y > vmax / 2: break except: pass tt = 0 tc = 0 for t in sorted(day_temp.keys()): v = day_temp[t] print(t.strftime(out_format), str(v['temp']), 'vo {}'.format(v['vo'])) if v['vo'] >= vh: tt += float(v['temp']) tc += 1 print('vmax {} vh {} tt {} tc {} => year avg {}'.format( vmax, vh, tt, tc, (tt / tc)))
def find_max_temp(dhw_delta, mod_data): START = datetime(today.year, today.month, today.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) day_temp = {} last_dt = None day_max = 0 vo_m = 0 vo_d = 0 day_count = 0 keys = sorted(dhw_delta.keys(), reverse=True) for dt in keys: doc = dhw_delta[dt] vo = None for k in doc: vo = doc[k] if vo is None: continue if not dt in mod_data: continue md = mod_data[dt] if VERBOSE: print(dt.strftime(out_format), doc, md) if last_dt is None: last_dt = dt diff = (START - dt) t = diff.total_seconds() if t > 0: START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) diff = (START - dt) t = diff.total_seconds() if t > 0: if day_count > 0: if last_dt not in day_temp: day_temp[last_dt] = {} day_temp[last_dt]['temp'] = day_max day_temp[last_dt]['vo'] = vo_m if VERBOSE: print(last_dt.strftime(out_format), str(day_max), 'vo {} day_vo{}'.format(vo_m, vo_d)) START = datetime( dt.year, dt.month, dt.day, tzinfo=tz.tzfile('/usr/share/zoneinfo/Europe/Amsterdam')) last_dt = dt day_max = 0 day_count = 0 vo_m = 0 vo_d = 0 if vo > 0: vo_d += vo day_count += 1 if VERBOSE: print(dt.strftime(out_format), vo, md) if md['Ta'] > day_max: day_max = md['Ta'] vo_m = vo last_dt = dt if day_count > 0: if last_dt not in day_temp: day_temp[last_dt] = {} day_temp[last_dt]['temp'] = day_max day_temp[last_dt]['vo'] = vo_m if VERBOSE: print(last_dt.strftime(out_format), str(day_max), 'vo {} day_vo{}'.format(vo_m, vo_d)) return day_temp