def create_well_log(self, version, prev_dict, current_dict, parent_log): difflog = compare_dicts( prev_dict, current_dict, excludes=['reagent_id', 'resource_uri']) if is_empty_diff(difflog): return None activity = version.library_contents_loading_activity.activity log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id else: log.username = '******' if getattr(activity.performed_by, 'login_id', None): log.username = activity.performed_by.login_id # FIXME log.user_id = 1 # log.date_time = make_aware(activity.date_created,timezone.get_default_timezone()) log.date_time = activity.date_created log.ref_resource_name = self.wellResource._meta.resource_name # TODO: what types here? could also be a REST specifier, i.e. 'PATCH' log.api_action = 'MIGRATION' log.key = prev_dict['well_id'] log.uri = '/db/api/v1/well/'+log.key # log.diff_dict_to_api_log(difflog) log.diffs = difflog log.json_field = json.dumps({ 'version': version.version_number }) log.parent_log = parent_log log.save() return log
def create_library_comments(apps,schema_editor): sql_keys = [ 'activity_id', 'library_id', 'short_name', 'date_created', 'comments', 'username', 'performed_by_id'] sql = ''' select a.activity_id, l.library_id, l.short_name, a.date_created, a.comments, su.username, a.performed_by_id from activity a join screensaver_user su on(performed_by_id=screensaver_user_id) join library_update_activity lua on(activity_id=lua.update_activity_id) join administrative_activity aa using(activity_id) join library l using(library_id) where aa.administrative_activity_type='Comment' order by l.library_id asc, a.date_created asc; ''' connection = schema_editor.connection cursor = connection.cursor() try: cursor.execute(sql) i = 0 for row in cursor: _dict = dict(zip(sql_keys,row)) if 'original library contents (migration)' in _dict['comments']: logger.info('ignoring activity log for library: %r',_dict) continue log = ApiLog() # Note: as long as users migration has been completed, all # user accounts will have a "username" log.username = _dict['username'] log.user_id = _dict['performed_by_id'] log.date_time = _dict['date_created'] log.api_action = 'PATCH' log.ref_resource_name = 'library' log.key = _dict['short_name'] log.uri = '/'.join([log.ref_resource_name,log.key]) log.comment = _dict['comments'] log.json_field = { 'migration': 'Library comments', 'data': { 'administrative_activity.activity_id': _dict['activity_id'] } } if log.username is None: log.username = '******' log.save() i += 1 except Exception, e: logger.exception('migration exc') raise e
def make_log(status_item): logger.debug('make status log: %r', status_item) collision_counter=1 log = ApiLog() log.key = status_item['screen_facility_id'] log.date_time = create_log_time(log.key,status_item['date']) log.username = status_item['username'] log.user_id = status_item['user_id'] log.ref_resource_name = 'screen' log.uri = '/'.join([DB_API_URI, log.ref_resource_name, log.key]) log.diffs = status_item.get('diffs') log.comment = status_item.get('comments') log.json_field = status_item.get('json_field') try: # check for log key (date_time) collisions; this shouldn't # happen with the "create_log_time()", but, in case it does with transaction.atomic(): log.save() except IntegrityError as e: q = ApiLog.objects.filter( ref_resource_name=log.ref_resource_name, key = log.key).order_by('-date_time') if q.exists(): max_datetime = ( q.values_list('date_time', flat=True))[0] else: max_datetime = log.date_time logger.info('log time collision: %s, adjust log time from : %s to %s', e, max_datetime.isoformat(), (max_datetime + datetime.timedelta(0,collision_counter))) max_datetime += datetime.timedelta(0,collision_counter) times_seen.add(max_datetime) log.date_time = max_datetime collision_counter = collision_counter + 1
def create_log(_dict): log = ApiLog() log.ref_resource_name = 'useragreement' log.username = _dict['admin_username'] log.user_id = _dict['performed_by_id'] log.api_action = 'PATCH' log.json_field = { 'migration': 'User Data Sharing Roles', 'data': {'screensaver_user_update_activity':_dict['activity_id'] } } if log.username is None: log.username = '******' return log
def create_log(_dict): log = ApiLog() log.ref_resource_name = 'useragreement' log.username = _dict['admin_username'] log.user_id = _dict['performed_by_id'] log.api_action = 'PATCH' log.json_field = { 'migration': 'User Data Sharing Roles', 'data': { 'screensaver_user_update_activity': _dict['activity_id'] } } if log.username is None: log.username = '******' return log
def create_well_log(self, version, prev_dict, current_dict, parent_log): difflog = compare_dicts( prev_dict, current_dict, excludes=['reagent_id', 'resource_uri'], log_empty_strings=True) if is_empty_diff(difflog): return None log = ApiLog() if parent_log: log.username = parent_log.username log.user_id = parent_log.user_id log.comment = parent_log.comment log.date_time = parent_log.date_time else: activity = version.library_contents_loading_activity.activity if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id else: log.username = '******' if getattr(activity.performed_by, 'login_id', None): log.username = activity.performed_by.login_id # FIXME log.user_id = 1 log.date_time = activity.date_of_activity log.ref_resource_name = 'well' log.api_action = 'PATCH' log.key = current_dict['well_id'] log.uri = 'well/'+log.key log.diffs = difflog log.json_field = json.dumps({ 'version': version.version_number }) log.parent_log = parent_log log.save() return log
def do_migration(self, apps, schema_editor, screen_type=None): i=0 query = apps.get_model('db','LibraryContentsVersion').objects.all() if screen_type: query = (query.filter(library__screen_type=screen_type)) #.exclude(library__library_type='natural_products')) library_ids = [x['library'] for x in (query .values('library') # actually, library id's here .order_by('library') )] logger.info('libraries to consider: %r', library_ids) for library in (apps.get_model('db','Library').objects.all() .filter(library_id__in=library_ids)): prev_version = None for version in (library.librarycontentsversion_set.all() .order_by('version_number')): # create an apilog for the library activity = (version.library_contents_loading_activity.activity) log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id if getattr(activity.performed_by, 'user', None): log.user_id = getattr(activity.performed_by.user, 'id', log.username) if not log.user_id: log.user_id = 1 log.date_time = activity.date_created # log.date_time = make_aware( # activity.date_created,timezone.get_default_timezone()) log.ref_resource_name = self.libraryResource._meta.resource_name # TODO: what action? could also be a REST specifier, i.e. 'PATCH' log.api_action = 'PUT' # store the old type in the catch-all field log.json_field = json.dumps( { 'administrative_activity_type': version.library_contents_loading_activity.administrative_activity_type }) log.uri = self.libraryResource.get_resource_uri(model_to_dict(library)) log.key = '/'.join([str(x) for x in ( self.libraryResource.get_id(model_to_dict(library)).values()) ]) # log.diff_keys = json.dumps(['version_number']) log.diffs = { 'version_number': [ prev_version.version_number if prev_version else 0, version.version_number] } log.comment = activity.comments log.save() if prev_version: self.diff_library_wells(schema_editor,library, prev_version, version, log) prev_version = version # add version to library library.version_number = version.version_number library.loaded_by = activity.performed_by library.save() i=i+1 ## TODO: 20140826 ## - set all the reagent.library values ## - prune out all the old reagents print 'processed: ', i, 'libraries'
def create_user_checklist_from_checklist_item_events(apps, schema_editor): ''' Convert ChecklistItemEvent entries into UserChecklist - create ApiLogs to track status changes - also track the status_notified - not idempotent, can be re-run by deleting user_checklist, user_agreement and reports_apilog/reports_logdiff; /* clear for new migration 0007 */ delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'userchecklist' and log_id=id); delete from reports_apilog where ref_resource_name = 'userchecklist'; delete from user_checklist ; delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'useragreement' and log_id=id); delete from reports_apilog where ref_resource_name = 'useragreement'; delete from user_agreement ; ''' # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db','ChecklistItem') UserChecklist = apps.get_model('db','UserChecklist') UserAgreement = apps.get_model('db','UserAgreement') # Create a map from ci names to new names: uc_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) uc_name_map[obj.item_name] = key logger.info('uc_name_map: %r', uc_name_map) # create entries in the user_checklist table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0003) sql_keys = [ 'checklist_item_event_id', 'suid','cigroup','ciname', 'su_username','admin_username','admin_suid','admin_upid', 'date_performed', 'date_created','status','is_notified' ] sql = ''' select cie.checklist_item_event_id, screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id left outer join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left outer join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, checklist_item_event_id asc; ''' connection = schema_editor.connection cursor = connection.cursor() resource_name = 'userchecklist' _dict = None log = None ucl_hash = {} notified_ucl_hash = {} cursor.execute(sql) i = 0 # Iterate through the ChecklistItemEvents: # - Ordered by date_performed asc # - create a UserChecklist - key [username,checklist_item_name] # - first occurrence creates a new UserChecklist or UserAgreement # - subsequent occurrences represent updates # - keep track of UCLs in hash # - look for previous UCL for row in cursor: _dict = dict(zip(sql_keys,row)) checklist_name = uc_name_map[_dict['ciname']] if checklist_name in checklist_to_agreement_map: resource_name = RESOURCE_USER_AGREEMENT key = '/'.join([ str(_dict['suid']), checklist_to_agreement_map[checklist_name] ]) else: resource_name = RESOURCE_USER_CHECKLIST key = '/'.join([str(_dict['suid']),checklist_name]) previous_dict = ucl_hash.get(key, None) notified_previous_dict = notified_ucl_hash.get(key, None) logger.debug('previous_dict: %s:%s' % (key,previous_dict)) # Create a log for every event log = ApiLog() log.ref_resource_name = resource_name log.username = _dict['admin_username'] log.user_id = _dict['suid'] log.api_action = 'PATCH' log.key = key log.uri = '/'.join([log.ref_resource_name,log.key]) log.json_field = { 'migration': 'ChecklistItemEvent', 'data': { 'checklist_item_event_id': _dict['checklist_item_event_id'] } } if log.username is None: log.username = '******' date_time = get_activity_date_time(_dict['date_created'],_dict['date_performed']) set_log_time(log, date_time) if previous_dict: # NOTE: for SMUA, there may be multiple events before the # "deactivation" event; # - so the previous dict will be updated multiple times here, # - 60 days from the last if 'ucl' in previous_dict: ucl = previous_dict['ucl'] ucl.admin_user_id = int(_dict['admin_suid']) previous_state = ucl.is_checked ucl.is_checked = False if _dict['status'] in ['activated', 'completed']: ucl.is_checked = True elif _dict['status'] == 'deactivated': if notified_previous_dict: ucl.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, ucl.date_notified.isoformat()] ucl.date_effective = _dict['date_performed'] logger.debug( 'dict: %s, prev_dict: %s, date_effective %s, date_notified: %s', _dict, previous_dict, ucl.date_effective, ucl.date_notified) # Make a log log.diffs['is_checked'] = [previous_state,ucl.is_checked] if previous_dict['admin_username'] != _dict['admin_username']: log.diffs['admin_username'] = \ [previous_dict['admin_username'], _dict['admin_username']] log.diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat()] log.diffs['status'] = [previous_dict['status'],_dict['status']] ucl.save() elif 'ua' in previous_dict: user_agreement = previous_dict['ua'] if _dict['status'] in ['activated', 'completed']: user_agreement.date_active = _dict['date_performed'] previous_expired = user_agreement.date_expired user_agreement.date_expired = None previous_notified = user_agreement.date_notified user_agreement.date_notified = None # NOTE: implied that UA has been nulled out before reactivating log.diffs['date_active'] = \ [None,_dict['date_performed'].isoformat()] if previous_expired is not None: log.diffs['date_expired'] = \ [previous_expired.isoformat(), None] if previous_notified is not None: log.diffs['date_notified'] = \ [previous_notified.isoformat(), None] if _dict['status'] == 'deactivated': user_agreement.date_expired = _dict['date_performed'] # NOTE: implied that UA has been nulled out # before reactivating/deactivating log.diffs['date_expired'] = \ [None,_dict['date_performed'].isoformat()] if notified_previous_dict: user_agreement.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, user_agreement.date_notified.isoformat()] user_agreement.save() else: logger.error( 'no obj found in prev dict: %r', previous_dict) raise ProgrammingError else: # create ucl_hash[key] = _dict if _dict['is_notified']: notified_ucl_hash[key] = _dict logger.debug('create user checklist item: %r, %r', _dict, _dict['date_performed'].isoformat()) is_checked = False if _dict['status'] in ['activated', 'completed']: is_checked = True if checklist_name in checklist_to_agreement_map: user_agreement = UserAgreement.objects.create( screensaver_user_id = int(_dict['suid']), type=checklist_to_agreement_map[checklist_name], ) log.api_action = 'CREATE' if is_checked is True: user_agreement.date_active=_dict['date_performed'] log.diffs['date_active'] = [ None,_dict['date_performed'].isoformat()] else: logger.warn('first ua record is expiration: %r', _dict) user_agreement.date_expired=_dict['date_performed'] log.diffs['date_expired'] = [ None,_dict['date_performed'].isoformat()] user_agreement.save() _dict['ua'] = user_agreement logger.debug('created ua: %r', user_agreement) else: ucl = UserChecklist.objects.create( screensaver_user_id = int(_dict['suid']), admin_user_id = int(_dict['admin_suid']), name = checklist_name, is_checked = is_checked, date_effective = _dict['date_performed']) ucl.save() _dict['ucl'] = ucl # Make a log log.api_action = 'CREATE' log.diffs['is_checked'] = [None,ucl.is_checked] log.diffs['date_effective'] = [ None,_dict['date_performed'].isoformat()] logger.debug('created ucl: %r', ucl) i += 1 log.save() logger.debug('created log: %r, %r', log, log.diffs ) if i%1000 == 0: logger.info('created %d checklists & user_agreements', i) logger.info('key: %r', key) logger.info('created log: %r, %r', log, log.diffs ) logger.info('created %d user_checklist and user_agreement instances', i)
def do_migration(self, apps, schema_editor, screen_type=None): i=0 query = apps.get_model('db','LibraryContentsVersion').objects.all() if screen_type: query = (query.filter(library__screen_type=screen_type)) # Testing... # query = query.filter(library__short_name='Human2 Duplexes') library_ids = [x['library'] for x in (query .values('library') # actually, library id's here .order_by('library') )] for library in (apps.get_model('db','Library').objects.all() .filter(library_id__in=library_ids)): logger.info('create well logs for %r', library.short_name) prev_version = None for version in (library.librarycontentsversion_set.all() .order_by('version_number')): # create an apilog for the library activity = (version.library_contents_loading_activity.activity) log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id if log.username == 'dwrobel': log.username = '******' log.user_id = 761 if getattr(activity.performed_by, 'user', None): log.user_id = getattr( activity.performed_by.user, 'id', log.username) if not log.user_id: log.user_id = 1 log.ref_resource_name = 'library' log.api_action = 'PATCH' log.json_field = { 'migration': 'Library (contents)', 'data': { 'library_contents_version.activity_id': activity.activity_id, } } log.key = library.short_name log.uri = '/'.join(['library',log.key]) log.date_time = create_log_time(log.key, activity.date_of_activity) log.diffs = { 'version_number': [ prev_version.version_number if prev_version else 0, version.version_number] } log.comment = activity.comments log.save() if prev_version: self.diff_library_wells( schema_editor,library, prev_version, version, log) prev_version = version # add version to library library.version_number = version.version_number library.loaded_by = activity.performed_by library.save() i=i+1 ## TODO: 20140826 ## - set all the reagent.library values ## - prune out all the old reagents print 'processed: ', i, 'libraries'
def create_user_checklist_from_checklist_item_events(apps, schema_editor): ''' Convert ChecklistItemEvent entries into UserChecklist - create ApiLogs to track status changes - also track the status_notified - not idempotent, can be re-run by deleting user_checklist, user_agreement and reports_apilog/reports_logdiff; /* clear for new migration 0007 */ delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'userchecklist' and log_id=id); delete from reports_apilog where ref_resource_name = 'userchecklist'; delete from user_checklist ; delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'useragreement' and log_id=id); delete from reports_apilog where ref_resource_name = 'useragreement'; delete from user_agreement ; ''' # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db', 'ChecklistItem') UserChecklist = apps.get_model('db', 'UserChecklist') UserAgreement = apps.get_model('db', 'UserAgreement') # Create a map from ci names to new names: uc_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) uc_name_map[obj.item_name] = key logger.info('uc_name_map: %r', uc_name_map) # create entries in the user_checklist table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0003) sql_keys = [ 'checklist_item_event_id', 'suid', 'cigroup', 'ciname', 'su_username', 'admin_username', 'admin_suid', 'admin_upid', 'date_performed', 'date_created', 'status', 'is_notified' ] sql = ''' select cie.checklist_item_event_id, screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id left outer join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left outer join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, checklist_item_event_id asc; ''' connection = schema_editor.connection cursor = connection.cursor() resource_name = 'userchecklist' _dict = None log = None ucl_hash = {} notified_ucl_hash = {} cursor.execute(sql) i = 0 # Iterate through the ChecklistItemEvents: # - Ordered by date_performed asc # - create a UserChecklist - key [username,checklist_item_name] # - first occurrence creates a new UserChecklist or UserAgreement # - subsequent occurrences represent updates # - keep track of UCLs in hash # - look for previous UCL for row in cursor: _dict = dict(zip(sql_keys, row)) checklist_name = uc_name_map[_dict['ciname']] if checklist_name in checklist_to_agreement_map: resource_name = RESOURCE_USER_AGREEMENT key = '/'.join([ str(_dict['suid']), checklist_to_agreement_map[checklist_name] ]) else: resource_name = RESOURCE_USER_CHECKLIST key = '/'.join([str(_dict['suid']), checklist_name]) previous_dict = ucl_hash.get(key, None) notified_previous_dict = notified_ucl_hash.get(key, None) logger.debug('previous_dict: %s:%s' % (key, previous_dict)) # Create a log for every event log = ApiLog() log.ref_resource_name = resource_name log.username = _dict['admin_username'] log.user_id = _dict['suid'] log.api_action = 'PATCH' log.key = key log.uri = '/'.join([log.ref_resource_name, log.key]) log.json_field = { 'migration': 'ChecklistItemEvent', 'data': { 'checklist_item_event_id': _dict['checklist_item_event_id'] } } if log.username is None: log.username = '******' date_time = get_activity_date_time(_dict['date_created'], _dict['date_performed']) set_log_time(log, date_time) if previous_dict: # NOTE: for SMUA, there may be multiple events before the # "deactivation" event; # - so the previous dict will be updated multiple times here, # - 60 days from the last if 'ucl' in previous_dict: ucl = previous_dict['ucl'] ucl.admin_user_id = int(_dict['admin_suid']) previous_state = ucl.is_checked ucl.is_checked = False if _dict['status'] in ['activated', 'completed']: ucl.is_checked = True elif _dict['status'] == 'deactivated': if notified_previous_dict: ucl.date_notified = (_dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, ucl.date_notified.isoformat() ] ucl.date_effective = _dict['date_performed'] logger.debug( 'dict: %s, prev_dict: %s, date_effective %s, date_notified: %s', _dict, previous_dict, ucl.date_effective, ucl.date_notified) # Make a log log.diffs['is_checked'] = [previous_state, ucl.is_checked] if previous_dict['admin_username'] != _dict['admin_username']: log.diffs['admin_username'] = \ [previous_dict['admin_username'], _dict['admin_username']] log.diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat() ] log.diffs['status'] = [ previous_dict['status'], _dict['status'] ] ucl.save() elif 'ua' in previous_dict: user_agreement = previous_dict['ua'] if _dict['status'] in ['activated', 'completed']: user_agreement.date_active = _dict['date_performed'] previous_expired = user_agreement.date_expired user_agreement.date_expired = None previous_notified = user_agreement.date_notified user_agreement.date_notified = None # NOTE: implied that UA has been nulled out before reactivating log.diffs['date_active'] = \ [None,_dict['date_performed'].isoformat()] if previous_expired is not None: log.diffs['date_expired'] = \ [previous_expired.isoformat(), None] if previous_notified is not None: log.diffs['date_notified'] = \ [previous_notified.isoformat(), None] if _dict['status'] == 'deactivated': user_agreement.date_expired = _dict['date_performed'] # NOTE: implied that UA has been nulled out # before reactivating/deactivating log.diffs['date_expired'] = \ [None,_dict['date_performed'].isoformat()] if notified_previous_dict: user_agreement.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, user_agreement.date_notified.isoformat() ] user_agreement.save() else: logger.error('no obj found in prev dict: %r', previous_dict) raise ProgrammingError else: # create ucl_hash[key] = _dict if _dict['is_notified']: notified_ucl_hash[key] = _dict logger.debug('create user checklist item: %r, %r', _dict, _dict['date_performed'].isoformat()) is_checked = False if _dict['status'] in ['activated', 'completed']: is_checked = True if checklist_name in checklist_to_agreement_map: user_agreement = UserAgreement.objects.create( screensaver_user_id=int(_dict['suid']), type=checklist_to_agreement_map[checklist_name], ) log.api_action = 'CREATE' if is_checked is True: user_agreement.date_active = _dict['date_performed'] log.diffs['date_active'] = [ None, _dict['date_performed'].isoformat() ] else: logger.warn('first ua record is expiration: %r', _dict) user_agreement.date_expired = _dict['date_performed'] log.diffs['date_expired'] = [ None, _dict['date_performed'].isoformat() ] user_agreement.save() _dict['ua'] = user_agreement logger.debug('created ua: %r', user_agreement) else: ucl = UserChecklist.objects.create( screensaver_user_id=int(_dict['suid']), admin_user_id=int(_dict['admin_suid']), name=checklist_name, is_checked=is_checked, date_effective=_dict['date_performed']) ucl.save() _dict['ucl'] = ucl # Make a log log.api_action = 'CREATE' log.diffs['is_checked'] = [None, ucl.is_checked] log.diffs['date_effective'] = [ None, _dict['date_performed'].isoformat() ] logger.debug('created ucl: %r', ucl) i += 1 log.save() logger.debug('created log: %r, %r', log, log.diffs) if i % 1000 == 0: logger.info('created %d checklists & user_agreements', i) logger.info('key: %r', key) logger.info('created log: %r, %r', log, log.diffs) logger.info('created %d user_checklist and user_agreement instances', i)