def _child_log_from(parent_log): child_log = ApiLog() child_log.parent_log = parent_log child_log.username = parent_log.username child_log.user_id = parent_log.user_id child_log.date_time = parent_log.date_time child_log.api_action = parent_log.api_action child_log.comment = parent_log.comment return child_log
def create_log(_dict): log = ApiLog() log.ref_resource_name = 'useragreement' log.username = _dict['admin_username'] log.user_id = _dict['performed_by_id'] log.api_action = 'PATCH' log.json_field = { 'migration': 'User Data Sharing Roles', 'data': { 'screensaver_user_update_activity': _dict['activity_id'] } } if log.username is None: log.username = '******' return log
def create_log(_dict): log = ApiLog() log.ref_resource_name = 'useragreement' log.username = _dict['admin_username'] log.user_id = _dict['performed_by_id'] log.api_action = 'PATCH' log.json_field = { 'migration': 'User Data Sharing Roles', 'data': {'screensaver_user_update_activity':_dict['activity_id'] } } if log.username is None: log.username = '******' return log
def _create_generic_log(activity): log = ApiLog() log.comment = activity.comments log.date_time = create_log_time(activity.date_of_activity) log.username = activity.performed_by.username if not log.username: log.username = '******' % ( activity.performed_by.screensaver_user_id, activity.performed_by.email) log.user_id = activity.performed_by.screensaver_user_id return log
def create_user_checklist_items(apps, schema_editor): # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db','ChecklistItem') UserChecklistItem = apps.get_model('db','UserChecklistItem') ci_group_map = {} for obj in ChecklistItem.objects.all().distinct('checklist_item_group'): key = default_converter(obj.checklist_item_group) ci_group_map[obj.checklist_item_group] = key ci_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) ci_name_map[obj.item_name] = key # create entries in the user_checklist_item table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0002) sql_keys = [ 'suid','cigroup','ciname', 'su_username','admin_username','admin_suid','admin_upid', 'date_performed', 'date_created','status','is_notified' ] sql = ''' select screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, cie.date_performed asc; ''' connection = schema_editor.connection cursor = connection.cursor() log_ref_resource_name = 'userchecklistitem' _dict = None log = None uci_hash = {} unique_log_keys = set() try: cursor.execute(sql) i = 0 for row in cursor: _dict = dict(zip(sql_keys,row)) key = '/'.join([str(_dict['suid']),_dict['cigroup'],_dict['ciname']]) previous_dict = uci_hash.get(key) logger.debug('previous_dict: %s:%s' % (key,previous_dict)) date_time = pytz.timezone('US/Eastern').localize(_dict['date_created']) if date_time.date() != _dict['date_performed']: # only use the less accurate date_performed date if that date # is not equal to the date_created date date_time = pytz.timezone('US/Eastern').localize( datetime.datetime.combine( _dict['date_performed'], datetime.datetime.min.time())) if previous_dict: uci = previous_dict['obj'] uci.admin_user_id = int(_dict['admin_suid']) uci.status = _dict['status'] uci.previous_status = previous_dict['status'] if(previous_dict['is_notified']): # notified date will be this event - 60 days (smua/rnaiua) uci.status_notified_date = ( _dict['date_performed'] - datetime.timedelta(days=60)) uci.status_date = _dict['date_performed'] logger.debug('saving, dict: %s, prev_dict: %s, status date %s, status_notified: %s', _dict, previous_dict, uci.status_date, uci.status_notified_date) uci.save() logger.debug('update uci: %s,%s,%s,%s', uci.status,uci.status_date,uci.previous_status,uci.status_notified_date) else: uci_hash[key] = _dict logger.debug(str(('create user checklist item', _dict, _dict['date_performed'].isoformat()))) uci = UserChecklistItem.objects.create( screensaver_user_id = int(_dict['suid']), admin_user_id = int(_dict['admin_suid']), item_group = ci_group_map[_dict['cigroup']], item_name = ci_name_map[_dict['ciname']], status = _dict['status'], status_date = _dict['date_performed']) uci.save() _dict['obj'] = uci logger.debug('created uci: %s,%s,%s', uci.status, uci.status_date) i += 1 # create the apilog for this item log = ApiLog() log.ref_resource_name = log_ref_resource_name log.key = '/'.join([_dict['su_username'],uci.item_group,uci.item_name]) log.username = _dict['admin_username'] log.user_id = _dict['admin_upid'] log.date_time = date_time log.api_action = 'PATCH' log.uri = '/'.join([log.ref_resource_name,log.key]) log.comment = 'status=%s' % _dict['status'] # is the key (date_time, actually) unique? full_key = '/'.join([log.ref_resource_name,log.key,str(log.date_time)]) while full_key in unique_log_keys: # add a second to make it unique; because date performed is a date, logger.info(str(('time collision for: ',full_key))) log.date_time = log.date_time + datetime.timedelta(0,1) full_key = '/'.join([log.ref_resource_name,log.key,str(log.date_time)]) unique_log_keys.add(full_key) if previous_dict: diff_keys = ['status'] diffs = {} logger.debug(str(('found previous_dict', previous_dict))) diff_keys.append('admin_username') diffs['admin_username'] = [previous_dict['admin_username'], _dict['admin_username']] diff_keys.append('status_date') diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat()] diffs['status'] = [previous_dict['status'],_dict['status']] diff_keys.append('previous_status') diffs['previous_status'] = [ None, previous_dict['status']] log.diff_keys = json.dumps(diff_keys) log.diffs = json.dumps(diffs) logger.debug('create log: %s', log) log.save() log = None if i%1000 == 0: logger.info(str(('created', i, 'logs'))) except Exception, e: logger.exception('migration exc') raise e
def create_library_comments(apps,schema_editor): sql_keys = [ 'activity_id', 'library_id', 'short_name', 'date_created', 'comments', 'username', 'performed_by_id'] sql = ''' select a.activity_id, l.library_id, l.short_name, a.date_created, a.comments, su.username, a.performed_by_id from activity a join screensaver_user su on(performed_by_id=screensaver_user_id) join library_update_activity lua on(activity_id=lua.update_activity_id) join administrative_activity aa using(activity_id) join library l using(library_id) where administrative_activity_type='Comment' order by l.library_id asc, a.date_created asc; ''' connection = schema_editor.connection cursor = connection.cursor() try: cursor.execute(sql) i = 0 for row in cursor: _dict = dict(zip(sql_keys,row)) # logger.info('processing: %r',_dict) log = ApiLog() # Note: as long as 0004_users migration has been completed, all # user accounts will have a "username" log.username = _dict['username'] log.user_id = _dict['performed_by_id'] log.date_time = _dict['date_created'] log.api_action = 'PATCH' log.ref_resource_name = 'library' log.key = _dict['short_name'] log.uri = '/'.join([log.ref_resource_name,log.key]) log.comment = _dict['comments'] log.save() i += 1 except Exception, e: logger.exception('migration exc') raise e
def create_well_log(self, version, prev_dict, current_dict, parent_log): difflog = compare_dicts( prev_dict, current_dict, excludes=['reagent_id', 'resource_uri']) if is_empty_diff(difflog): return None activity = version.library_contents_loading_activity.activity log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id else: log.username = '******' if getattr(activity.performed_by, 'login_id', None): log.username = activity.performed_by.login_id # FIXME log.user_id = 1 # log.date_time = make_aware(activity.date_created,timezone.get_default_timezone()) log.date_time = activity.date_created log.ref_resource_name = self.wellResource._meta.resource_name # TODO: what types here? could also be a REST specifier, i.e. 'PATCH' log.api_action = 'MIGRATION' log.key = prev_dict['well_id'] log.uri = '/db/api/v1/well/'+log.key # log.diff_dict_to_api_log(difflog) log.diffs = difflog log.json_field = json.dumps({ 'version': version.version_number }) log.parent_log = parent_log log.save() return log
def create_user_checklist_items(apps, schema_editor): # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db', 'ChecklistItem') UserChecklistItem = apps.get_model('db', 'UserChecklistItem') ci_group_map = {} for obj in ChecklistItem.objects.all().distinct('checklist_item_group'): key = default_converter(obj.checklist_item_group) ci_group_map[obj.checklist_item_group] = key ci_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) ci_name_map[obj.item_name] = key # create entries in the user_checklist_item table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0002) sql_keys = [ 'suid', 'cigroup', 'ciname', 'su_username', 'admin_username', 'admin_suid', 'admin_upid', 'date_performed', 'date_created', 'status', 'is_notified' ] sql = ''' select screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, cie.date_performed asc; ''' connection = schema_editor.connection cursor = connection.cursor() log_ref_resource_name = 'userchecklistitem' _dict = None log = None uci_hash = {} unique_log_keys = set() try: cursor.execute(sql) i = 0 for row in cursor: _dict = dict(zip(sql_keys, row)) key = '/'.join( [str(_dict['suid']), _dict['cigroup'], _dict['ciname']]) previous_dict = uci_hash.get(key) logger.debug('previous_dict: %s:%s' % (key, previous_dict)) date_time = pytz.timezone('US/Eastern').localize( _dict['date_created']) if date_time.date() != _dict['date_performed']: # only use the less accurate date_performed date if that date # is not equal to the date_created date date_time = pytz.timezone('US/Eastern').localize( datetime.datetime.combine(_dict['date_performed'], datetime.datetime.min.time())) if previous_dict: uci = previous_dict['obj'] uci.admin_user_id = int(_dict['admin_suid']) uci.status = _dict['status'] uci.previous_status = previous_dict['status'] if (previous_dict['is_notified']): # notified date will be this event - 60 days (smua/rnaiua) uci.status_notified_date = (_dict['date_performed'] - datetime.timedelta(days=60)) uci.status_date = _dict['date_performed'] logger.debug( 'saving, dict: %s, prev_dict: %s, status date %s, status_notified: %s', _dict, previous_dict, uci.status_date, uci.status_notified_date) uci.save() logger.debug('update uci: %s,%s,%s,%s', uci.status, uci.status_date, uci.previous_status, uci.status_notified_date) else: uci_hash[key] = _dict logger.debug( str(('create user checklist item', _dict, _dict['date_performed'].isoformat()))) uci = UserChecklistItem.objects.create( screensaver_user_id=int(_dict['suid']), admin_user_id=int(_dict['admin_suid']), item_group=ci_group_map[_dict['cigroup']], item_name=ci_name_map[_dict['ciname']], status=_dict['status'], status_date=_dict['date_performed']) uci.save() _dict['obj'] = uci logger.debug('created uci: %s,%s,%s', uci.status, uci.status_date) i += 1 # create the apilog for this item log = ApiLog() log.ref_resource_name = log_ref_resource_name log.key = '/'.join( [_dict['su_username'], uci.item_group, uci.item_name]) log.username = _dict['admin_username'] log.user_id = _dict['admin_upid'] log.date_time = date_time log.api_action = 'PATCH' log.uri = '/'.join([log.ref_resource_name, log.key]) log.comment = 'status=%s' % _dict['status'] # is the key (date_time, actually) unique? full_key = '/'.join( [log.ref_resource_name, log.key, str(log.date_time)]) while full_key in unique_log_keys: # add a second to make it unique; because date performed is a date, logger.info(str(('time collision for: ', full_key))) log.date_time = log.date_time + datetime.timedelta(0, 1) full_key = '/'.join( [log.ref_resource_name, log.key, str(log.date_time)]) unique_log_keys.add(full_key) if previous_dict: diff_keys = ['status'] diffs = {} logger.debug(str(('found previous_dict', previous_dict))) diff_keys.append('admin_username') diffs['admin_username'] = [ previous_dict['admin_username'], _dict['admin_username'] ] diff_keys.append('status_date') diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat() ] diffs['status'] = [previous_dict['status'], _dict['status']] diff_keys.append('previous_status') diffs['previous_status'] = [None, previous_dict['status']] log.diff_keys = json.dumps(diff_keys) log.diffs = json.dumps(diffs) logger.debug('create log: %s', log) log.save() log = None if i % 1000 == 0: logger.info(str(('created', i, 'logs'))) except Exception, e: logger.exception('migration exc') raise e
def _create_ls_log(date_of_activity=None, activity_id=None, screen_facility_id=None, performed_by_username=None, performed_by_id=None, created_by_username=None, created_by_id=None, comments=None, **kwargs): log = ApiLog() log.ref_resource_name = library_screening_resource_name log.key = str(activity_id) log.uri = '/'.join( ['screen', screen_facility_id, log.ref_resource_name, log.key]) log.api_action = 'PATCH' log.date_time = create_log_time(log.key, date_of_activity) if created_by_username: log.username = created_by_username else: log.username = '******' if created_by_id: log.user_id = created_by_id else: log.user_id = 767 if comments is not None: log.comment = comments else: log.comment = 'Library Screening log (migration)' return log
def do_migration(self, apps, schema_editor, screen_type=None): i=0 query = apps.get_model('db','LibraryContentsVersion').objects.all() if screen_type: query = (query.filter(library__screen_type=screen_type)) #.exclude(library__library_type='natural_products')) library_ids = [x['library'] for x in (query .values('library') # actually, library id's here .order_by('library') )] logger.info('libraries to consider: %r', library_ids) for library in (apps.get_model('db','Library').objects.all() .filter(library_id__in=library_ids)): prev_version = None for version in (library.librarycontentsversion_set.all() .order_by('version_number')): # create an apilog for the library activity = (version.library_contents_loading_activity.activity) log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id if getattr(activity.performed_by, 'user', None): log.user_id = getattr(activity.performed_by.user, 'id', log.username) if not log.user_id: log.user_id = 1 log.date_time = activity.date_created # log.date_time = make_aware( # activity.date_created,timezone.get_default_timezone()) log.ref_resource_name = self.libraryResource._meta.resource_name # TODO: what action? could also be a REST specifier, i.e. 'PATCH' log.api_action = 'PUT' # store the old type in the catch-all field log.json_field = json.dumps( { 'administrative_activity_type': version.library_contents_loading_activity.administrative_activity_type }) log.uri = self.libraryResource.get_resource_uri(model_to_dict(library)) log.key = '/'.join([str(x) for x in ( self.libraryResource.get_id(model_to_dict(library)).values()) ]) # log.diff_keys = json.dumps(['version_number']) log.diffs = { 'version_number': [ prev_version.version_number if prev_version else 0, version.version_number] } log.comment = activity.comments log.save() if prev_version: self.diff_library_wells(schema_editor,library, prev_version, version, log) prev_version = version # add version to library library.version_number = version.version_number library.loaded_by = activity.performed_by library.save() i=i+1 ## TODO: 20140826 ## - set all the reagent.library values ## - prune out all the old reagents print 'processed: ', i, 'libraries'
def create_user_checklist_from_checklist_item_events(apps, schema_editor): ''' Convert ChecklistItemEvent entries into UserChecklist - create ApiLogs to track status changes - also track the status_notified - not idempotent, can be re-run by deleting user_checklist, user_agreement and reports_apilog/reports_logdiff; /* clear for new migration 0007 */ delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'userchecklist' and log_id=id); delete from reports_apilog where ref_resource_name = 'userchecklist'; delete from user_checklist ; delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'useragreement' and log_id=id); delete from reports_apilog where ref_resource_name = 'useragreement'; delete from user_agreement ; ''' # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db', 'ChecklistItem') UserChecklist = apps.get_model('db', 'UserChecklist') UserAgreement = apps.get_model('db', 'UserAgreement') # Create a map from ci names to new names: uc_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) uc_name_map[obj.item_name] = key logger.info('uc_name_map: %r', uc_name_map) # create entries in the user_checklist table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0003) sql_keys = [ 'checklist_item_event_id', 'suid', 'cigroup', 'ciname', 'su_username', 'admin_username', 'admin_suid', 'admin_upid', 'date_performed', 'date_created', 'status', 'is_notified' ] sql = ''' select cie.checklist_item_event_id, screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id left outer join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left outer join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, checklist_item_event_id asc; ''' connection = schema_editor.connection cursor = connection.cursor() resource_name = 'userchecklist' _dict = None log = None ucl_hash = {} notified_ucl_hash = {} cursor.execute(sql) i = 0 # Iterate through the ChecklistItemEvents: # - Ordered by date_performed asc # - create a UserChecklist - key [username,checklist_item_name] # - first occurrence creates a new UserChecklist or UserAgreement # - subsequent occurrences represent updates # - keep track of UCLs in hash # - look for previous UCL for row in cursor: _dict = dict(zip(sql_keys, row)) checklist_name = uc_name_map[_dict['ciname']] if checklist_name in checklist_to_agreement_map: resource_name = RESOURCE_USER_AGREEMENT key = '/'.join([ str(_dict['suid']), checklist_to_agreement_map[checklist_name] ]) else: resource_name = RESOURCE_USER_CHECKLIST key = '/'.join([str(_dict['suid']), checklist_name]) previous_dict = ucl_hash.get(key, None) notified_previous_dict = notified_ucl_hash.get(key, None) logger.debug('previous_dict: %s:%s' % (key, previous_dict)) # Create a log for every event log = ApiLog() log.ref_resource_name = resource_name log.username = _dict['admin_username'] log.user_id = _dict['suid'] log.api_action = 'PATCH' log.key = key log.uri = '/'.join([log.ref_resource_name, log.key]) log.json_field = { 'migration': 'ChecklistItemEvent', 'data': { 'checklist_item_event_id': _dict['checklist_item_event_id'] } } if log.username is None: log.username = '******' date_time = get_activity_date_time(_dict['date_created'], _dict['date_performed']) set_log_time(log, date_time) if previous_dict: # NOTE: for SMUA, there may be multiple events before the # "deactivation" event; # - so the previous dict will be updated multiple times here, # - 60 days from the last if 'ucl' in previous_dict: ucl = previous_dict['ucl'] ucl.admin_user_id = int(_dict['admin_suid']) previous_state = ucl.is_checked ucl.is_checked = False if _dict['status'] in ['activated', 'completed']: ucl.is_checked = True elif _dict['status'] == 'deactivated': if notified_previous_dict: ucl.date_notified = (_dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, ucl.date_notified.isoformat() ] ucl.date_effective = _dict['date_performed'] logger.debug( 'dict: %s, prev_dict: %s, date_effective %s, date_notified: %s', _dict, previous_dict, ucl.date_effective, ucl.date_notified) # Make a log log.diffs['is_checked'] = [previous_state, ucl.is_checked] if previous_dict['admin_username'] != _dict['admin_username']: log.diffs['admin_username'] = \ [previous_dict['admin_username'], _dict['admin_username']] log.diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat() ] log.diffs['status'] = [ previous_dict['status'], _dict['status'] ] ucl.save() elif 'ua' in previous_dict: user_agreement = previous_dict['ua'] if _dict['status'] in ['activated', 'completed']: user_agreement.date_active = _dict['date_performed'] previous_expired = user_agreement.date_expired user_agreement.date_expired = None previous_notified = user_agreement.date_notified user_agreement.date_notified = None # NOTE: implied that UA has been nulled out before reactivating log.diffs['date_active'] = \ [None,_dict['date_performed'].isoformat()] if previous_expired is not None: log.diffs['date_expired'] = \ [previous_expired.isoformat(), None] if previous_notified is not None: log.diffs['date_notified'] = \ [previous_notified.isoformat(), None] if _dict['status'] == 'deactivated': user_agreement.date_expired = _dict['date_performed'] # NOTE: implied that UA has been nulled out # before reactivating/deactivating log.diffs['date_expired'] = \ [None,_dict['date_performed'].isoformat()] if notified_previous_dict: user_agreement.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, user_agreement.date_notified.isoformat() ] user_agreement.save() else: logger.error('no obj found in prev dict: %r', previous_dict) raise ProgrammingError else: # create ucl_hash[key] = _dict if _dict['is_notified']: notified_ucl_hash[key] = _dict logger.debug('create user checklist item: %r, %r', _dict, _dict['date_performed'].isoformat()) is_checked = False if _dict['status'] in ['activated', 'completed']: is_checked = True if checklist_name in checklist_to_agreement_map: user_agreement = UserAgreement.objects.create( screensaver_user_id=int(_dict['suid']), type=checklist_to_agreement_map[checklist_name], ) log.api_action = 'CREATE' if is_checked is True: user_agreement.date_active = _dict['date_performed'] log.diffs['date_active'] = [ None, _dict['date_performed'].isoformat() ] else: logger.warn('first ua record is expiration: %r', _dict) user_agreement.date_expired = _dict['date_performed'] log.diffs['date_expired'] = [ None, _dict['date_performed'].isoformat() ] user_agreement.save() _dict['ua'] = user_agreement logger.debug('created ua: %r', user_agreement) else: ucl = UserChecklist.objects.create( screensaver_user_id=int(_dict['suid']), admin_user_id=int(_dict['admin_suid']), name=checklist_name, is_checked=is_checked, date_effective=_dict['date_performed']) ucl.save() _dict['ucl'] = ucl # Make a log log.api_action = 'CREATE' log.diffs['is_checked'] = [None, ucl.is_checked] log.diffs['date_effective'] = [ None, _dict['date_performed'].isoformat() ] logger.debug('created ucl: %r', ucl) i += 1 log.save() logger.debug('created log: %r, %r', log, log.diffs) if i % 1000 == 0: logger.info('created %d checklists & user_agreements', i) logger.info('key: %r', key) logger.info('created log: %r, %r', log, log.diffs) logger.info('created %d user_checklist and user_agreement instances', i)
def _create_cpr_log(date_of_activity=None, cpr_id=None, screen_facility_id=None, username=None, email=None, performed_by_id=None, comments=None, **kwargs): log = ApiLog() if username: log.username = username else: if email: log.username = email else: logger.info('cpr log w/o username or email: %r: %r', cpr_id, performed_by_id) log.username = performed_by_id log.user_id = performed_by_id if comments is not None: log.comment = comments else: log.comment = 'Cherry Pick Request reservation log (migration)' log.ref_resource_name = cpr_resource_name log.key = str(cpr_id) log.date_time = create_log_time(log.key, date_of_activity) # # Hack: to avoid integrity collisions between test migrations # if log.date_time in extant_cpr_logs[log.key]: # log.date_time = create_log_time('cpr',log.date_time) # extant_cpr_logs[log.key].add(log.date_time) log.uri = '/'.join( ['screen', screen_facility_id, log.ref_resource_name, log.key]) log.api_action = 'PATCH' return log
def create_plate_activity_logs(apps, schema_editor): logger.info('create plate activity logs') Activity = apps.get_model('db', 'Activity') cols = OrderedDict({ 'activity_id': 'a.activity_id', 'username': '******', 'screensaver_user_id': 'screensaver_user_id', 'date_of_activity': 'date_of_activity', 'comments': 'a.comments', 'plate_number': 'plate_number', 'copy_name': 'copy.name', 'library_short_name': 'library.short_name', }) colkeys = cols.keys() _cols = ', '.join([ '%s as %s' % (value,key) for key, value in cols.items() ]) sql = ( 'select ' + _cols + ''' from activity a join screensaver_user on(performed_by_id=screensaver_user_id) join plate on (activity_id=plate.plated_activity_id) join copy using(copy_id) join library using(library_id); ''' ) connection = schema_editor.connection cursor = connection.cursor() cursor.execute(sql) _list = cursor.fetchall() if len(_list) == 0: raise Exception('No plate plated_activities found with sql: %r' % sql) for i,_data in enumerate(_list): _activity = dict(zip(colkeys, _data)) log = ApiLog() log.ref_resource_name = librarycopyplate_resource_name log.key = '/'.join([ _activity['library_short_name'],_activity['copy_name'], str(int(_activity['plate_number'])).zfill(5)]) log.uri = '/'.join([base_uri,log.ref_resource_name,log.key]) log.comment = _activity['comments'] log.date_time = create_log_time(_activity['date_of_activity']) log.username = _activity['username'] log.user_id = _activity['screensaver_user_id'] if "'available'" in log.comment.lower(): log.diffs = { 'status': ['not_specied','available']} elif "'not available'" in log.comment.lower(): log.diffs = { 'status': ['not_specied','not_available']} else: raise Exception('unknown plate.plated_activity comment: %r', _activity) log.save() if i % 1000 == 0: logger.info('processed %d plate plated activity logs', i) logger.info('processed %d plate plated activity logs', i) sql = ( 'select ' + _cols + ''' from activity a join screensaver_user on(performed_by_id=screensaver_user_id) join plate on (activity_id=plate.retired_activity_id) join copy using(copy_id) join library using(library_id); ''' ) cursor.execute(sql) _list = cursor.fetchall() status_change_pattern = re.compile(r".*from '([^\']+)'.*to '([^\']+)'.*") if len(_list) == 0: raise Exception('No plate retired_activities found with sql: %r' % sql) status_terms_recognized = set() for i,_data in enumerate(_list): _activity = dict(zip(colkeys, _data)) log = ApiLog() log.ref_resource_name = librarycopyplate_resource_name log.key = '/'.join([ _activity['library_short_name'],_activity['copy_name'], str(int(_activity['plate_number'])).zfill(5)]) log.uri = '/'.join([base_uri,log.ref_resource_name,log.key]) log.comment = _activity['comments'] log.date_time = create_log_time(_activity['date_of_activity']) log.username = _activity['username'] log.user_id = _activity['screensaver_user_id'] match = status_change_pattern.match(log.comment) if not match: raise Exception('unknown plate.retired_activity comment: %r', _activity) log.diffs = {'status': [ default_converter(match.group(1)), default_converter(match.group(2))]} log.save() status_terms_recognized.add(default_converter(match.group(1))) status_terms_recognized.add(default_converter(match.group(2))) if i % 1000 == 0: logger.info('processed %d plate retired activity logs', i) logger.info('processed %d plate retired activity logs', i) logger.info('status terms recognized: %r', status_terms_recognized)
def make_log( apps, input_date, ref_resource_name, key, diffs=None, comment=None, username=None): apilog_model = apps.get_model('reports','apilog') collision_counter=0 if diffs is None: diffs = {} log = ApiLog() log.date_time = create_log_time(key,input_date) log.user_id = 1 log.username = username log.ref_resource_name = ref_resource_name log.key = key log.uri = '/'.join([DB_API_URI, ref_resource_name, key]) log.diffs = diffs log.comment = comment try: # check for log key (date_time) collisions; this shouldn't # happen with the "create_log_time()", but, in case it does with transaction.atomic(): log.save() except IntegrityError as e: q = apilog_model.objects.filter( ref_resource_name=ref_resource_name, key = key).order_by('-date_time') if q.exists(): max_datetime = ( q.values_list('date_time', flat=True))[0] else: max_datetime = log.date_time logger.info('log time collision: %s, adjust log time from : %s to %s', e, max_datetime.isoformat(), (max_datetime + timedelta(0,collision_counter))) max_datetime += timedelta(0,collision_counter) times_seen.add(max_datetime) log.date_time = max_datetime collision_counter = collision_counter + 1 return log
log .save() j += 1 logger.info('processed %d', j) total_corrections += j i += 1 # if i>10: break # finally, case c. where the wva has no parent: # we know these are all for one copy: # copy_id = 664, name = 'C', library_short_name = 'Human4 Duplexes' copy = apps.get_model('db', 'Copy').objects.get(pk=664) copy1 = apps.get_model('db', 'Copy').objects.get(pk=659) parent_log = ApiLog() parent_log.date_time = create_log_time(datetime.date(2000, 1, 1)) parent_log.ref_resource_name = copywell_resource_name parent_log.key = copywell_resource_name parent_log.uri = '/'.join([base_uri, log.ref_resource_name, log.key]) parent_log1 = ApiLog() parent_log1.date_time = create_log_time(datetime.date(2000, 1, 2)) parent_log1.ref_resource_name = copywell_resource_name parent_log1.key = copywell_resource_name parent_log1.uri = '/'.join([base_uri, log.ref_resource_name, log.key]) # assign to Stewart Rudnicki parent_log.user_id = 767 parent_log.username = '******' parent_log.comment = 'Manual well volume correction activity with no log information'
def create_well_log(self, version, prev_dict, current_dict, parent_log): difflog = compare_dicts( prev_dict, current_dict, excludes=['reagent_id', 'resource_uri'], log_empty_strings=True) if is_empty_diff(difflog): return None log = ApiLog() if parent_log: log.username = parent_log.username log.user_id = parent_log.user_id log.comment = parent_log.comment log.date_time = parent_log.date_time else: activity = version.library_contents_loading_activity.activity if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id else: log.username = '******' if getattr(activity.performed_by, 'login_id', None): log.username = activity.performed_by.login_id # FIXME log.user_id = 1 log.date_time = activity.date_of_activity log.ref_resource_name = 'well' log.api_action = 'PATCH' log.key = current_dict['well_id'] log.uri = 'well/'+log.key log.diffs = difflog log.json_field = json.dumps({ 'version': version.version_number }) log.parent_log = parent_log log.save() return log
def do_migration(self, apps, schema_editor, screen_type=None): i=0 query = apps.get_model('db','LibraryContentsVersion').objects.all() if screen_type: query = (query.filter(library__screen_type=screen_type)) # Testing... # query = query.filter(library__short_name='Human2 Duplexes') library_ids = [x['library'] for x in (query .values('library') # actually, library id's here .order_by('library') )] for library in (apps.get_model('db','Library').objects.all() .filter(library_id__in=library_ids)): logger.info('create well logs for %r', library.short_name) prev_version = None for version in (library.librarycontentsversion_set.all() .order_by('version_number')): # create an apilog for the library activity = (version.library_contents_loading_activity.activity) log = ApiLog() if getattr(activity.performed_by, 'ecommons_id', None): log.username = activity.performed_by.ecommons_id if log.username == 'dwrobel': log.username = '******' log.user_id = 761 if getattr(activity.performed_by, 'user', None): log.user_id = getattr( activity.performed_by.user, 'id', log.username) if not log.user_id: log.user_id = 1 log.ref_resource_name = 'library' log.api_action = 'PATCH' log.json_field = { 'migration': 'Library (contents)', 'data': { 'library_contents_version.activity_id': activity.activity_id, } } log.key = library.short_name log.uri = '/'.join(['library',log.key]) log.date_time = create_log_time(log.key, activity.date_of_activity) log.diffs = { 'version_number': [ prev_version.version_number if prev_version else 0, version.version_number] } log.comment = activity.comments log.save() if prev_version: self.diff_library_wells( schema_editor,library, prev_version, version, log) prev_version = version # add version to library library.version_number = version.version_number library.loaded_by = activity.performed_by library.save() i=i+1 ## TODO: 20140826 ## - set all the reagent.library values ## - prune out all the old reagents print 'processed: ', i, 'libraries'
def _create_wvac_log(date_of_activity=None, username=None, email=None, performed_by_id=None, comments=None, **kwargs): log = ApiLog() if username: log.username = username else: log.username = email log.user_id = performed_by_id if comments is not None: log.comment = comments else: log.comment = 'Manual Well Volume Correction (migration)' log.ref_resource_name = copywell_resource_name log.key = copywell_resource_name log.uri = copywell_resource_name log.date_time = create_log_time(log.key, date_of_activity) log.api_action = 'PATCH' return log
def create_user_checklist_from_checklist_item_events(apps, schema_editor): ''' Convert ChecklistItemEvent entries into UserChecklist - create ApiLogs to track status changes - also track the status_notified - not idempotent, can be re-run by deleting user_checklist, user_agreement and reports_apilog/reports_logdiff; /* clear for new migration 0007 */ delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'userchecklist' and log_id=id); delete from reports_apilog where ref_resource_name = 'userchecklist'; delete from user_checklist ; delete from reports_logdiff where exists( select null from reports_apilog where ref_resource_name = 'useragreement' and log_id=id); delete from reports_apilog where ref_resource_name = 'useragreement'; delete from user_agreement ; ''' # prerequisites: # - convert checklist_item / checklist_item_event entries into into # checklistitem.* vocabularies (migration 0003) # - create the user_checklist_item table (0002) ChecklistItem = apps.get_model('db','ChecklistItem') UserChecklist = apps.get_model('db','UserChecklist') UserAgreement = apps.get_model('db','UserAgreement') # Create a map from ci names to new names: uc_name_map = {} for obj in ChecklistItem.objects.all().distinct('item_name'): key = default_converter(obj.item_name) uc_name_map[obj.item_name] = key logger.info('uc_name_map: %r', uc_name_map) # create entries in the user_checklist table # note: status values are hard-coded to correspond to the vocabulary # keys (created in migration 0003) sql_keys = [ 'checklist_item_event_id', 'suid','cigroup','ciname', 'su_username','admin_username','admin_suid','admin_upid', 'date_performed', 'date_created','status','is_notified' ] sql = ''' select cie.checklist_item_event_id, screening_room_user_id, ci.checklist_item_group, ci.item_name, su.username su_username, admin.username admin_username, admin.screensaver_user_id admin_suid, up.id admin_upid, cie.date_performed, cie.date_created, case when cie.is_not_applicable then 'n_a' when ci.is_expirable and cie.date_performed is not null then case when cie.is_expiration then 'deactivated' else 'activated' end when cie.date_performed is not null then 'completed' else 'not_completed' end as status, ( select 1 from screening_room_user sru where sru.last_notified_smua_checklist_item_event_id = cie.checklist_item_event_id UNION select 1 from screening_room_user sru where sru.last_notified_rnaiua_checklist_item_event_id = cie.checklist_item_event_id ) as is_notified from checklist_item ci join checklist_item_event cie using(checklist_item_id) join screensaver_user su on screening_room_user_id=su.screensaver_user_id left outer join screensaver_user admin on cie.created_by_id=admin.screensaver_user_id left outer join reports_userprofile up on up.id=admin.user_id order by screening_room_user_id, checklist_item_group, item_name, checklist_item_event_id asc; ''' connection = schema_editor.connection cursor = connection.cursor() resource_name = 'userchecklist' _dict = None log = None ucl_hash = {} notified_ucl_hash = {} cursor.execute(sql) i = 0 # Iterate through the ChecklistItemEvents: # - Ordered by date_performed asc # - create a UserChecklist - key [username,checklist_item_name] # - first occurrence creates a new UserChecklist or UserAgreement # - subsequent occurrences represent updates # - keep track of UCLs in hash # - look for previous UCL for row in cursor: _dict = dict(zip(sql_keys,row)) checklist_name = uc_name_map[_dict['ciname']] if checklist_name in checklist_to_agreement_map: resource_name = RESOURCE_USER_AGREEMENT key = '/'.join([ str(_dict['suid']), checklist_to_agreement_map[checklist_name] ]) else: resource_name = RESOURCE_USER_CHECKLIST key = '/'.join([str(_dict['suid']),checklist_name]) previous_dict = ucl_hash.get(key, None) notified_previous_dict = notified_ucl_hash.get(key, None) logger.debug('previous_dict: %s:%s' % (key,previous_dict)) # Create a log for every event log = ApiLog() log.ref_resource_name = resource_name log.username = _dict['admin_username'] log.user_id = _dict['suid'] log.api_action = 'PATCH' log.key = key log.uri = '/'.join([log.ref_resource_name,log.key]) log.json_field = { 'migration': 'ChecklistItemEvent', 'data': { 'checklist_item_event_id': _dict['checklist_item_event_id'] } } if log.username is None: log.username = '******' date_time = get_activity_date_time(_dict['date_created'],_dict['date_performed']) set_log_time(log, date_time) if previous_dict: # NOTE: for SMUA, there may be multiple events before the # "deactivation" event; # - so the previous dict will be updated multiple times here, # - 60 days from the last if 'ucl' in previous_dict: ucl = previous_dict['ucl'] ucl.admin_user_id = int(_dict['admin_suid']) previous_state = ucl.is_checked ucl.is_checked = False if _dict['status'] in ['activated', 'completed']: ucl.is_checked = True elif _dict['status'] == 'deactivated': if notified_previous_dict: ucl.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, ucl.date_notified.isoformat()] ucl.date_effective = _dict['date_performed'] logger.debug( 'dict: %s, prev_dict: %s, date_effective %s, date_notified: %s', _dict, previous_dict, ucl.date_effective, ucl.date_notified) # Make a log log.diffs['is_checked'] = [previous_state,ucl.is_checked] if previous_dict['admin_username'] != _dict['admin_username']: log.diffs['admin_username'] = \ [previous_dict['admin_username'], _dict['admin_username']] log.diffs['status_date'] = [ previous_dict['date_performed'].isoformat(), _dict['date_performed'].isoformat()] log.diffs['status'] = [previous_dict['status'],_dict['status']] ucl.save() elif 'ua' in previous_dict: user_agreement = previous_dict['ua'] if _dict['status'] in ['activated', 'completed']: user_agreement.date_active = _dict['date_performed'] previous_expired = user_agreement.date_expired user_agreement.date_expired = None previous_notified = user_agreement.date_notified user_agreement.date_notified = None # NOTE: implied that UA has been nulled out before reactivating log.diffs['date_active'] = \ [None,_dict['date_performed'].isoformat()] if previous_expired is not None: log.diffs['date_expired'] = \ [previous_expired.isoformat(), None] if previous_notified is not None: log.diffs['date_notified'] = \ [previous_notified.isoformat(), None] if _dict['status'] == 'deactivated': user_agreement.date_expired = _dict['date_performed'] # NOTE: implied that UA has been nulled out # before reactivating/deactivating log.diffs['date_expired'] = \ [None,_dict['date_performed'].isoformat()] if notified_previous_dict: user_agreement.date_notified = ( _dict['date_performed'] - datetime.timedelta(days=60)) log.diffs['date_notified'] = [ None, user_agreement.date_notified.isoformat()] user_agreement.save() else: logger.error( 'no obj found in prev dict: %r', previous_dict) raise ProgrammingError else: # create ucl_hash[key] = _dict if _dict['is_notified']: notified_ucl_hash[key] = _dict logger.debug('create user checklist item: %r, %r', _dict, _dict['date_performed'].isoformat()) is_checked = False if _dict['status'] in ['activated', 'completed']: is_checked = True if checklist_name in checklist_to_agreement_map: user_agreement = UserAgreement.objects.create( screensaver_user_id = int(_dict['suid']), type=checklist_to_agreement_map[checklist_name], ) log.api_action = 'CREATE' if is_checked is True: user_agreement.date_active=_dict['date_performed'] log.diffs['date_active'] = [ None,_dict['date_performed'].isoformat()] else: logger.warn('first ua record is expiration: %r', _dict) user_agreement.date_expired=_dict['date_performed'] log.diffs['date_expired'] = [ None,_dict['date_performed'].isoformat()] user_agreement.save() _dict['ua'] = user_agreement logger.debug('created ua: %r', user_agreement) else: ucl = UserChecklist.objects.create( screensaver_user_id = int(_dict['suid']), admin_user_id = int(_dict['admin_suid']), name = checklist_name, is_checked = is_checked, date_effective = _dict['date_performed']) ucl.save() _dict['ucl'] = ucl # Make a log log.api_action = 'CREATE' log.diffs['is_checked'] = [None,ucl.is_checked] log.diffs['date_effective'] = [ None,_dict['date_performed'].isoformat()] logger.debug('created ucl: %r', ucl) i += 1 log.save() logger.debug('created log: %r, %r', log, log.diffs ) if i%1000 == 0: logger.info('created %d checklists & user_agreements', i) logger.info('key: %r', key) logger.info('created log: %r, %r', log, log.diffs ) logger.info('created %d user_checklist and user_agreement instances', i)
def make_log(apps, input_date, ref_resource_name, key, diffs=None, comment=None, username=None): apilog_model = apps.get_model('reports', 'apilog') collision_counter = 0 if diffs is None: diffs = {} log = ApiLog() log.date_time = create_log_time(key, input_date) log.user_id = 1 log.username = username log.ref_resource_name = ref_resource_name log.key = key log.uri = '/'.join([DB_API_URI, ref_resource_name, key]) log.diffs = diffs log.comment = comment try: # check for log key (date_time) collisions; this shouldn't # happen with the "create_log_time()", but, in case it does with transaction.atomic(): log.save() except IntegrityError as e: q = apilog_model.objects.filter(ref_resource_name=ref_resource_name, key=key).order_by('-date_time') if q.exists(): max_datetime = (q.values_list('date_time', flat=True))[0] else: max_datetime = log.date_time logger.info('log time collision: %s, adjust log time from : %s to %s', e, max_datetime.isoformat(), (max_datetime + timedelta(0, collision_counter))) max_datetime += timedelta(0, collision_counter) times_seen.add(max_datetime) log.date_time = max_datetime collision_counter = collision_counter + 1 return log
def _create_plate_activity_log(activity_dict): log = ApiLog() log.ref_resource_name = plate_resource_name log.api_action = 'PATCH' log.key = '/'.join([ activity_dict['library_short_name'], activity_dict['copy_name'], str(int(activity_dict['plate_number'])) ]) log.uri = '/'.join([base_uri, log.ref_resource_name, log.key]) log.comment = activity_dict['comments'] log.date_time = create_log_time(log.key, activity_dict['date_of_activity']) log.username = activity_dict['username'] if log.username is None: log.username = activity_dict['email'] log.user_id = activity_dict['screensaver_user_id'] return log
def create_log(well, activity): log = ApiLog() log.ref_resource_name = 'well' log.key = well.well_id log.uri = '/'.join(['db/api/v1',log.ref_resource_name, log.key]) log.username = activity.performed_by.username log.user_id = activity.performed_by.screensaver_user_id log.api_action = 'PATCH' if log.username is None: log.username = '******' log.date_time = create_log_time(log.key,activity.date_of_activity) log.diffs = { 'is_deprecated': [False,True] } log.comment = activity.comments log.save() return log
def make_log(status_item): logger.debug('make status log: %r', status_item) collision_counter=1 log = ApiLog() log.key = status_item['screen_facility_id'] log.date_time = create_log_time(log.key,status_item['date']) log.username = status_item['username'] log.user_id = status_item['user_id'] log.ref_resource_name = 'screen' log.uri = '/'.join([DB_API_URI, log.ref_resource_name, log.key]) log.diffs = status_item.get('diffs') log.comment = status_item.get('comments') log.json_field = status_item.get('json_field') try: # check for log key (date_time) collisions; this shouldn't # happen with the "create_log_time()", but, in case it does with transaction.atomic(): log.save() except IntegrityError as e: q = ApiLog.objects.filter( ref_resource_name=log.ref_resource_name, key = log.key).order_by('-date_time') if q.exists(): max_datetime = ( q.values_list('date_time', flat=True))[0] else: max_datetime = log.date_time logger.info('log time collision: %s, adjust log time from : %s to %s', e, max_datetime.isoformat(), (max_datetime + datetime.timedelta(0,collision_counter))) max_datetime += datetime.timedelta(0,collision_counter) times_seen.add(max_datetime) log.date_time = max_datetime collision_counter = collision_counter + 1