def xlsx_to_dict(self, names_list): dict = {} for item in names_list: wb = xlrd.open_workbook(item) sh = wb.sheet_by_index(0) for i in range(sh.nrows): name = sh.cell(i, 0).value time = sh.cell(i, 1).value if (name in dict) != True: j = self.time_format_timedelta(time) dict[name] = DateTime.DateTimeDelta(0,j[0],j[1],j[2]) else: k = self.time_format_timedelta(time) dict[name] += DateTime.DateTimeDelta(0,k[0],k[1],k[2]) return dict
def test(formats='ymd mdy dmy'): from time import clock testcases = [ \ # Time expressions ("12:13:14.56", DateTime.DateTimeDelta(0, 12, 13, 14.56)), ("12:13:14", DateTime.DateTimeDelta(0, 12, 13, 14)), ("12:13", DateTime.DateTimeDelta(0, 12, 13, 0)), # Date or date-time expressions (mostly) ("Ceci n'est pas une date", None), ("010803", DateTime.DateTime(2001, 8, 3)), ("20010803", DateTime.DateTime(2001, 8, 3)), ("20010803 12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), ("20010803T12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), ("01-08-03", DateTime.DateTime(2001, 8, 3)), ("2001-08-03", DateTime.DateTime(2001, 8, 3)), ("2001-8-3", DateTime.DateTime(2001, 8, 3)), ("2001-08-03 12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), ("2001-08-03T12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), # These should be correctly recognized if tested as mdy. ("08/03/2001", DateTime.DateTime(2001, 8, 3)), ("8/3/2001", DateTime.DateTime(2001, 8, 3)), ("08/03/2001 12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), ("08/03/2001T12:13:14.56", DateTime.DateTime(2001, 8, 3, 12, 13, 14.56)), # These can only be mdy ("08/23/2001", DateTime.DateTime(2001, 8, 23)), ("8/23/2001", DateTime.DateTime(2001, 8, 23)), ("08/23/2001 12:13:14.56", DateTime.DateTime(2001, 8, 23, 12, 13, 14.56)), # These can only be dmy ("23.08.2001", DateTime.DateTime(2001, 8, 23)), ("23.8.2001", DateTime.DateTime(2001, 8, 23)), ("23.08.2001 12:13:14.56", DateTime.DateTime(2001, 8, 23, 12, 13, 14.56)), ] print "\nTesting numdate.py with format(s) %s" % ", ".join(formats.split()) t0 = clock() cases = 0 for i in range(100): for testcase in testcases[0:3]: result = _test_it(testcase, formats, timeonly=1) cases = cases + 1 if result and i == 0: print result for testcase in testcases[3:]: result = _test_it(testcase, formats, timeonly=0) cases = cases + 1 if result and i == 0: print result t1 = clock() print "Done - %d cases tested in %1.5f sec." % (cases, t1 - t0)
def operators(self): def _test_op(a, op, b, expect): if a is None: aa = None else: aa = datetime.mx_parse_datetime(a) if b is None: bb = None elif type(b) is DateTime.DateTimeDeltaType: bb = b else: bb = datetime.mx_parse_datetime(b) r = op(aa, bb) self.assertEqual(r, expect, '%s %s %s, expected %s, got %s' %\ (a, op.__name__, b, expect, r)) _test_op('1/1/03', operator.eq, '1/1/03', True) _test_op('1/1/03', operator.eq, '2/1/03', False) _test_op('1/1/03', operator.eq, None, False) _test_op('1/1/03', operator.ne, '1/1/03', False) _test_op('1/1/03', operator.ne, '2/1/03', True) _test_op('1/1/03', operator.ne, None, True) _test_op('1/1/03', operator.gt, '2/1/03', False) _test_op('2/1/03', operator.gt, '1/1/03', True) _test_op('2/1/03', operator.gt, None, True) _test_op('2/1/03', operator.lt, None, False) delta = DateTime.DateTimeDelta(1) _test_op('2/1/03', operator.add, delta, DateTime.DateTime(2003, 1, 3)) _test_op('2/1/03', operator.sub, delta, DateTime.DateTime(2003, 1, 1))
def __init__(self, arg): if isinstance(arg, DatetimeFormat): arg = arg._value if isinstance(arg, DateTime.DateTimeDeltaType): self._value = arg elif isinstance(arg, DateTime.DateTimeType): self._value = DateTime.DateTimeDelta(0, arg.hour, arg.minute, arg.second) elif not arg: self._value = None else: try: self._value = DateTime.DateTimeDelta(0, *parse_time(arg)) except DateTime.Error: raise Error('invalid time "%s"' % arg) if self._value is not None and self._value.day: raise Error('invalid time %r' % arg)
def offset( self, info, buffer): """Calculate the time zone offset as a date-time delta""" (tag, left, right, sublist) = info set = singleMap( sublist, self, buffer ) direction = set.get('offset_sign',1) hour = set.get( "hour", 0) minute = set.get( "minute", 0) delta = DateTime.DateTimeDelta( 0, hour*direction, minute*direction) return delta
def period_check(self, filename, period): file_dates = filename.split('.')[0].split('-') filetime = DateTime.DateTime(int(file_dates[0]), int(file_dates[1]), int(file_dates[2])) periodtime = DateTime.now() - DateTime.DateTimeDelta(period) if filetime >= periodtime: return True else: return False
def _parse_date(self, date): """Convert a written date into DateTime object. Possible syntaxes are: YYYY-MM-DD (2005-04-03) YYYY-MM-DDTHH:MM (2005-04-03T02:01) THH:MM (T02:01) Time of day defaults to midnight. If date is unspecified, the resulting time is between now and 24 hour into future. """ if not date: # TBD: Is this correct behaviour? mx.DateTime.DateTime # objects allow comparison to None, although that is # hardly what we expect/want. return None if isinstance(date, DateTime.DateTimeType): # Why not just return date? Answer: We do some sanity # checks below. date = date.Format("%Y-%m-%dT%H:%M") if date.count('T') == 1: date, time = date.split('T') try: hour, min = [int(x) for x in time.split(':')] except ValueError: raise CerebrumError("Time of day must be on format HH:MM") if date == '': now = DateTime.now() target = DateTime.Date(now.year, now.month, now.day, hour, min) if target < now: target += DateTime.DateTimeDelta(1) date = target.Format("%Y-%m-%d") else: hour = min = 0 try: y, m, d = [int(x) for x in date.split('-')] except ValueError: raise CerebrumError("Dates must be on format YYYY-MM-DD") # TODO: this should be a proper delta, but rather than using # pgSQL specific code, wait until Python has standardised on a # Date-type. if y > 2050: raise CerebrumError("Too far into the future: %r" % date) if y < 1800: raise CerebrumError("Too long ago: %r" % date) try: return DateTime.Date(y, m, d, hour, min) except: raise CerebrumError("Illegal date: %r" % date)
def get_deadline(self, account): """ Calculates the deadline for password change. The returned datetime is when the account should be terminated. :param Cerebrum.Account account: The account to fetch a deadline time for. :return DateTime: Returns the deadline datetime. """ d = self.get_notification_time(account) if d is None: d = self.today return d + dt.DateTimeDelta(self.config.grace_period)
def mx_parse_time(self): def _mx_parse_time(arg, expect): self._test(datetime.mx_parse_time, (arg, ), expect) _mx_parse_time('11:12', DateTime.DateTimeDelta(0, 11, 12)) _mx_parse_time('11:12:13', DateTime.DateTimeDelta(0, 11, 12, 13)) _mx_parse_time('11:12:13', DateTime.DateTimeDelta(0, 11, 12, 13.999)) _mx_parse_time(DateTime.DateTimeDelta(0, 11, 12, 13.999), DateTime.DateTimeDelta(0, 11, 12, 13.999)) _mx_parse_time(DateTime.DateTime(2003, 02, 27, 11, 12, 13), DateTime.DateTimeDelta(0, 11, 12, 13)) self.assertRaises(datetime.Error, datetime.mx_parse_time, '11:22:33:44') self.assertRaises(datetime.Error, datetime.mx_parse_time, DateTime.DateTimeDelta(0, 24, 0, 0)) self.assertRaises(datetime.Error, datetime.mx_parse_time, DateTime.DateTimeDelta(1, 0, 0, 0))
def account_is_fresh(ac, co): traits = ac.get_traits() relevant_traits = [ traits[trait_code] for trait_code in (co.trait_student_new, co.trait_sms_welcome) if trait_code in traits ] cutoff = DateTime.now() - DateTime.DateTimeDelta(FRESH_DAYS) for trait in relevant_traits: date = trait['date'] if not date: continue if date > cutoff: return True return False
def get_deadline(self, account): """ Calculates the deadline for password change. The returned datetime is when the account should be terminated. :param Cerebrum.Account account: The account to fetch a deadline time for. :return DateTime: Returns the deadline datetime. """ trait = account.get_trait( self.constants.EntityTrait(self.config.follow_trait)) d = trait['date'] if trait else None if d is None: d = self.today return d + dt.DateTimeDelta(self.config.grace_period)
def mx_parse_datetime(self): def _mx_parse_datetime(arg, expect): self._test(datetime.mx_parse_datetime, (arg, ), expect) _mx_parse_datetime('27/02/03 11:12:13', DateTime.DateTime(2003, 02, 27, 11, 12, 13)) _mx_parse_datetime('11:12:13 27/02/03', DateTime.DateTime(2003, 02, 27, 11, 12, 13)) _mx_parse_datetime('11:12:13 27/02/03', DateTime.DateTime(2003, 02, 27, 11, 12, 13.999)) t = datetime.mx_parse_datetime('11:12:13 27/02/03') self._test(t.time, (), DateTime.DateTimeDelta(0, 11, 12, 13)) self._test(t.date, (), DateTime.DateTime(2003, 02, 27)) self.assertRaises(datetime.Error, datetime.mx_parse_datetime, '11:12:13pm 27/02/03') self.assertRaises(datetime.Error, datetime.mx_parse_datetime, '11:12:13 pm 27/02/03')
def remind_ok(self, account): """Returns true if it is time to remind""" n = self.get_num_notifications(account) try: a_mapping = self.get_account_affiliation_mapping(account) except Errors.NotFoundError: a_mapping = None if a_mapping is not None: reminder_delay_values = a_mapping['warn_before_expiration_days'] else: reminder_delay_values = self.config.reminder_delay_values if 0 < n <= len(reminder_delay_values): delay = dt.DateTimeDelta(reminder_delay_values[n - 1]) if self.get_notification_time(account) <= self.today - delay: return True return False
def numericTimeExt(s): if s is None: return None dt = None match = _ntimeRE.search(s) if match is not None: hour = match.group('hour') minute = match.group('minute') second = match.group('second') hh = int(hour) mm = int(minute) if second: ss = float(second) else: ss = 0.0 try: dt = DateTime.DateTimeDelta(0, hh, mm, ss) except DateTime.RangeError: dt = None return dt
def user_history_filtered(self, operator, accountname): self.logger.warn("in user history filtered") account = self._get_account(accountname) self.ba.can_show_history(operator.get_entity_id(), account) ret = [] timedelta = "%s" % (DateTime.mxDateTime.now() - DateTime.DateTimeDelta(7)) timeperiod = timedelta.split(" ") for r in self.db.get_log_events(0, subject_entity=account.entity_id, sdate=timeperiod[0]): ret.append(self._format_changelog_entry(r)) ret_val = "" for item in ret: ret_val += "\n" for key, value in item.items(): ret_val += "%s\t" % str(value) return ret_val
def remind_ok(self, account): """Returns true if it is time to remind""" try: a_mapping = self.get_account_affiliation_mapping(account) except Errors.NotFoundError: a_mapping = None if a_mapping is not None: reminder_delay_values = a_mapping['warn_before_expiration_days'] else: reminder_delay_values = self.config.reminder_delay_values if (self.get_notification_time(account) == self.today or (self.get_num_notifications(account) >= len(reminder_delay_values))): return False for days_before in reminder_delay_values: if ((self.get_deadline(account) - dt.DateTimeDelta(days_before)) == self.today): return True return False
def RelativeLST2AbsoluteTime(lst, now=None): """ Returns today's DateTime in UTC, defined as first corresponding time after now, from given LST in hours. """ lst = DateTime.DateTimeDelta(0, lst, 0, 0) if now is None: now = DateTime.gmt() else: now = dt2mxDT(now) # Now's mjd at 0h mjd0 = int(now.mjd) # Convert requested LST to degrees requested_lst = 15 * lst.hours # Local LMST for 0h UT in degrees lst0 = (180.0 / math.pi) * slalib.sla_gmst(mjd0) + GBTLONG # LST difference between 0h UT and requested LST lst_offset = requested_lst - lst0 solar_sidereal_ratio = (365.25 / 366.25) # options for solar time at 1 day sidereal intervals options = [] for cycle in range(720, -1080, -360): solar_time = ((lst_offset - cycle) / 15.0) * solar_sidereal_ratio mjd = mjd0 + solar_time / 24 options.append(DateTime.DateTimeFromMJD(mjd)) # Select the time following the target time target = DateTime.DateTimeFromMJD(now.mjd) for option in options: if target < option: return mxDT2dt(option) return mxDT2dt(option[-1])
def write(self, wdict, period): dict = wdict sortedk = sorted(dict, key=lambda k: dict[k], reverse=True) path = self.dir + '\\' + str(period) + '.xlsx' workbook = xlsxwriter.Workbook(path) worksheet = workbook.add_worksheet() row = 0 col = 0 i = 0 for key in sortedk: if dict[key] > DateTime.DateTimeDelta(0, 0, 0, self.time_threshold): worksheet.write(row, col, key) worksheet.write(row, col + 1, str(dict[key])) row += 1 else: pass workbook.close()
def numericTime(s): """ If the input string s is a valid time expression of the form hh:mm:ss.sss or hh:mm:ss or hh:mm, return a corresponding DateTimeDelta object. Otherwise, return None. """ if s is None: return None dt = None match = _timeRE.search(s) if match is not None: hour = match.group('hour') minute = match.group('minute') second = match.group('second') hh = int(hour) mm = int(minute) if second: ss = float(second) else: ss = 0.0 try: dt = DateTime.DateTimeDelta(0, hh, mm, ss) except DateTime.RangeError: dt = None return dt
def sympa_remove_list(self, operator, run_host, listname, force_yes_no): """ Remove a sympa list from cerebrum. @type force_request: bool @param force_request: Controls whether a bofhd request should be issued. This may come in handy, if we want to delete a sympa list from Cerebrum only and not issue any requests. misc cancel_request would have worked too, but it's better to merge this into one command. """ force_request = self._is_yes(force_yes_no) # Check that the command exec host is sane if run_host not in cereconf.SYMPA_RUN_HOSTS: raise CerebrumError("run-host '%s' for list '%s' is not valid" % (run_host, listname)) et, ea = self._get_email_target_and_address(listname) self.ba.can_email_list_delete(operator.get_entity_id(), ea) if et.email_target_type != self.const.email_target_Sympa: raise CerebrumError( "'%s' is not a sympa list (type: %s)" % (listname, self.const.EmailTarget(et.email_target_type))) epat = Email.EmailPrimaryAddressTarget(self.db) list_id = ea.entity_id # Now, there are *many* ETs/EAs associated with one sympa list. We # have to wipe them all out. if not self._validate_sympa_list(listname): raise CerebrumError("Illegal sympa list name: '%s'", listname) deleted_EA = self.email_info(operator, listname) # needed for pattern interpolation below (these are actually used) local_part, domain = self._split_email_address(listname) for pattern, pipe_destination in self._sympa_addr2alias: address = pattern % locals() # For each address, find the target, and remove all email # addresses for that target (there may be many addresses for the # same target). try: ea.clear() ea.find_by_address(address) et.clear() et.find(ea.get_target_id()) epat.clear() try: epat.find(et.entity_id) except Errors.NotFoundError: pass else: epat.delete() # Wipe all addresses... for row in et.get_addresses(): addr = '%(local_part)s@%(domain)s' % row ea.clear() ea.find_by_address(addr) ea.delete() et.delete() except Errors.NotFoundError: pass if cereconf.INSTITUTION_DOMAIN_NAME == 'uio.no': self._report_deleted_EA(deleted_EA) if not force_request: return {'listname': listname, 'request': False} br = BofhdRequests(self.db, self.const) state = {'run_host': run_host, 'listname': listname} br.add_request( operator.get_entity_id(), # IVR 2008-08-04 +1 hour to allow changes to spread to # LDAP. This way we'll have a nice SMTP-error, rather # than a confusing error burp from sympa. DateTime.now() + DateTime.DateTimeDelta(0, 1), self.const.bofh_sympa_remove, list_id, None, state_data=pickle.dumps(state)) return {'listname': listname, 'request': True}
def sympa_create_list(self, operator, run_host, delivery_host, listname, admins, list_profile, list_description, yes_no_force="No"): """ Create a sympa list in Cerebrum and on the sympa server(s). Registers all the necessary cerebrum information and make a bofhd request for the actual list creation. """ # Check that the profile is legal if list_profile not in cereconf.SYMPA_PROFILES: raise CerebrumError("Profile %s for sympa list %s is not valid" % (list_profile, listname)) # Check that the command exec host is sane if run_host not in cereconf.SYMPA_RUN_HOSTS: raise CerebrumError("run-host '%s' for list '%s' is not valid" % (run_host, listname)) metachars = "'\"$&()*;<>?[\\]`{|}~\n" def has_meta(s1, s2=metachars): """Check if any char of s1 is in s2""" for c in s1: if c in s2: return True return False # Sympa list creation command will be passed through multiple # exec/shells. Better be restrictive. if True in [ has_meta(x) for x in (run_host, delivery_host, listname, admins, list_profile, list_description) ]: raise CerebrumError( "Illegal metacharacter in list parameter. Allowed: '%s'" % metachars) delivery_host = self._get_email_server(delivery_host) force = self._is_yes(yes_no_force) self._create_sympa_list(operator, listname, delivery_host, force=force) # Now make a bofhd request to create the list itself admin_list = list() for item in admins.split(","): # it's a user name. That username must exist in Cerebrum if "@" not in item: self._get_account(item) # TODO: Not good, this is in use by UIA item = item + "@ulrik.uio.no" admin_list.append(item) # Make the request. lp, dom = self._split_email_address(listname) ed = self._get_email_domain_from_str(dom) ea = Email.EmailAddress(self.db) ea.clear() ea.find_by_local_part_and_domain(lp, ed.entity_id) list_id = ea.entity_id # IVR 2008-08-01 TBD: this is a big ugly. We need to pass several # arguments to p_b_r, but we cannot really store them anywhere :( The # idea is then to take a small dict, pickle it, shove into state_data, # unpickle in p_b_r and be on our merry way. It is at the very best # suboptimal. state = { "runhost": run_host, # IVR 2008-08-01 FIXME: non-fqdn? force? # check? "admins": admin_list, "profile": list_profile, "description": list_description, } br = BofhdRequests(self.db, self.const) # IVR 2009-04-17 +30 minute delay to allow changes to spread to # LDAP. The postmasters are nagging for that delay. All questions # should be directed to them (this is similar to delaying a delete # request). br.add_request(operator.get_entity_id(), DateTime.now() + DateTime.DateTimeDelta(0, 0, 30), self.const.bofh_sympa_create, list_id, ea.entity_id, state_data=pickle.dumps(state)) return {'listname': listname}
"""Interpret the ISO time format""" set = {} for item in sublist: set[ item[0] ] = dispatch( self, item, buffer) return DateTime.RelativeDateTime( hour = set.get("hour") or 0, minute = set.get("minute") or 0, second = set.get("second") or 0, ) integer = numbers.IntInterpreter() second = offset_minute = offset_hour = year = month = day = hour =minute =integer def offset( self, (tag, left, right, sublist), buffer): """Calculate the time zone offset as a date-time delta""" set = singleMap( sublist, self, buffer ) direction = set.get('offset_sign',1) hour = set.get( "hour", 0) minute = set.get( "minute", 0) delta = DateTime.DateTimeDelta( 0, hour*direction, minute*direction) return delta def offset_sign( self , (tag, left, right, sublist), buffer): """Interpret the offset sign as a multiplier""" v = buffer [left: right] if v in ' +': return 1 else: return -1
def need_check(user): if not config.user_check_interval or user.is_new(): return False days = DateTime.DateTimeDelta(config.user_check_interval) threshold = DateTime.now() - days return not user.checked_timestamp or user.checked_timestamp < threshold
def timelock_remain(user): now = DateTime.now() five_minutes = DateTime.DateTimeDelta(0, 0, bad_time_penalty_mins) if (user.bad_attempts >= max_bad_passwords and user.bad_timestamp + five_minutes > now): return (user.bad_timestamp + five_minutes) - now
def get_old_account_ids(self): """ Returns the ID of candidate accounts with old affiliations. :return set: A set with Account entity_ids. """ def _with_aff(affiliation=None, max_age=None): old = set() person = Utils.Factory.get("Person")(self.db) aff_or_status = self.constants.human2constant(affiliation) if not aff_or_status: self.logger.error('Unknown affiliation "%s"', affiliation) return old lookup = { 'status' if '/' in affiliation else 'affiliation': aff_or_status } for row in person.list_affiliations(**lookup): person_id = row['person_id'] # if person_id in old_ids: # continue person.clear() person.find(person_id) # account_id = person.get_primary_account() for account_row in person.get_accounts(): # consider all accounts belonging to this person account_id = account_row['account_id'] if account_id: history = [ x['set_at'] for x in ph.get_history(account_id) ] if history and (self.today - max(history) > max_age): old.add(account_id) else: # The account does not have an expired password # according to the special rules. # Remove it from old_ids if it was put there # by the default rules. try: old_ids.remove(account_id) except KeyError: pass self.logger.info( 'Accounts with affiliation %s with old password: %s', str(affiliation), len(old)) return old ph = PasswordHistory(self.db) self.logger.info('Fetching accounts with password older than %d days', self.config.max_password_age) old_ids = set([ int(x['account_id']) for x in ph.find_old_password_accounts(( self.today - dt.DateTimeDelta(self.config.max_password_age) ).strftime(DATE_FORMAT)) ]) self.logger.info('Fetching accounts with no password history') old_ids.update( set([int(x['account_id']) for x in ph.find_no_history_accounts()])) # Do we have special rules for certain person affiliations? # We want to end with the smallest 'max_password_age' aff_mappings = sorted(self.config.affiliation_mappings, key=lambda k: k['max_password_age'], reverse=True) for aff_mapping in aff_mappings: self.logger.info( 'Fetching accounts with affiliation %s ' 'with password older than %d days', str(aff_mapping['affiliation']), aff_mapping['max_password_age']) old_ids.update( _with_aff(affiliation=aff_mapping['affiliation'], max_age=dt.DateTimeDelta( aff_mapping['max_password_age']))) self.logger.info('Fetching quarantines') # TODO: Select only autopassword quarantines? quarantined_ids = QuarantineHandler.get_locked_entities( self.db, entity_types=self.constants.entity_account, entity_ids=old_ids) old_ids = old_ids - quarantined_ids return old_ids
import unittest, string, logging from simpleparse.parser import Parser from simpleparse.common import iso_date, iso_date_loose log = logging.getLogger(__name__) try: from mx import DateTime except ImportError: log.warn("No mx.DateTime module available") else: import time try: fulltrans = string.maketrans(b"", b"") except AttributeError: fulltrans = bytes.maketrans(b"", b"") tzOffset = DateTime.DateTimeDelta(0, 0, 0, time.timezone) class CommonTests(unittest.TestCase): def testISODateLoose(self): """Test the parsing of ISO date and time formats""" values = [ ("2002-02-03", DateTime.DateTime(2002, 2, 3)), ("2002-02", DateTime.DateTime(2002, 2)), ("2002", DateTime.DateTime(2002)), ("2002-02-03 04:15", DateTime.DateTime(2002, 2, 3, 4, 15)), ("2002-02-03 04:15:16", DateTime.DateTime(2002, 2, 3, 4, 15, 16)), ("2002-02-03 04:15:16 +00:00", DateTime.DateTime(2002, 2, 3, 4, 15, 16) - tzOffset), ("2002-02-03 4:5", DateTime.DateTime(2002, 2, 3, 4, 5)), ("2002-02-03 4:5:16", DateTime.DateTime(2002, 2, 3, 4, 5, 16)), ("2002-02-03 4:5:16 +00:00",
def get_matching_accs(logger, ac, pe, co): """For each account: - checks if the owner of the account (person) is still affiliated - if not, go to next account - checks if the account is quarantined - if not, go to next account - for each quarantine, checks if 1 year has passed - if not, go to next account - saves account name, full name and quarantine information @param logger: logger object @type logger: logger @param ac: Account object @type ac: Account @param pe: Person object @type pe: Person @param co: Constants object @type co: Constants @return a dictionary indexed by disk, each containing a list of dictionaries with account information """ # Get all non-expired accounts accounts = ac.search(owner_type=co.entity_person) logger.info("Found %d accounts with owner_type = 'person'" % len(accounts)) # Map account_id to disk acc2disk = {} for i in ac.list_account_home(): acc2disk[i['account_id']] = i['path'] logger.info("Found %d accounts assigned to a disk" % len(acc2disk)) # Map account_id to quarantines acc2quar = {} for q in ac.list_entity_quarantines(only_active=True, entity_types=co.entity_account): acc2quar.setdefault(q['entity_id'], []) acc2quar[q['entity_id']].append(q) logger.info("Found quarantines for %d accounts" % len(acc2quar)) # Map person_id to full name person2name = {} for n in pe.search_person_names(name_variant=co.name_full, source_system=co.system_cached): person2name[n['person_id']] = n['name'] logger.info("Found full names for %d persons" % len(person2name)) # Add person_id to the list if the person has an affiliation person_has_affs = set() for aff in pe.list_affiliations(): person_has_affs.add(aff['person_id']) logger.info("Found %d persons with affiliations" % len(person_has_affs)) matches = {} for acc in accounts: # Is the account owner still affiliated? if acc['owner_id'] not in person_has_affs: # Is the account quarantined? if acc['account_id'] not in acc2quar: continue quar = {} for q in acc2quar[acc['account_id']]: # Has this quarantine been in place for > 1 year? if (q['start_date'] + DateTime.DateTimeDelta(365)) < DateTime.now(): quar = { 'type': str(co.Quarantine(q['quarantine_type'])), 'description': q['description'], 'start_date': str(q['start_date']).split()[0], } # Include this account in the result set if quarantined for > 1 year if len(quar): disk = acc2disk.setdefault(acc['account_id'], None) matches.setdefault(disk, []) matches[disk].append({ 'account_name': acc['name'], 'full_name': person2name.get(acc['owner_id'], '(not set)'), 'quarantine': quar, }) return matches
def round_datetime_delta(d): return DateTime.DateTimeDelta(d.day, d.hour, d.minute, int(ceil(d.second)))