def test_serialize_deserialize(self): args = ['fee', 'fie', 'foe', 'foo'] kwargs = {'one': 'foo', 'two': 'bar', 'three': 'baz'} call_request = CallRequest(function, args, kwargs) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest), str(type(call_request_2)))
def test_serialize_deserialize(self): args = ["fee", "fie", "foe", "foo"] kwargs = {"one": "foo", "two": "bar", "three": "baz"} call_request = CallRequest(function, args, kwargs) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest), str(type(call_request_2)))
def test_serialize_deserialize_with_execution_hook(self): key = dispatch_constants.CALL_CANCEL_LIFE_CYCLE_CALLBACK call_request = CallRequest(function) call_request.add_life_cycle_callback(key, function) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest)) self.assertTrue(call_request_2.execution_hooks[key][0] == function)
def test_serialize_deserialize_with_control_hook(self): key = dispatch_constants.CALL_CANCEL_CONTROL_HOOK call_request = CallRequest(function) call_request.add_control_hook(key, function) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest)) self.assertTrue(call_request_2.control_hooks[key] == function)
def test_serialize_deserialize_with_execution_hook(self): key = dispatch_constants.CALL_CANCEL_LIFE_CYCLE_CALLBACK call_request = CallRequest(function) call_request.add_life_cycle_callback(key, function) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest)) self.assertTrue(call_request_2.execution_hooks[key][0] == function)
def test_serialize_deserialize_with_control_hook(self): key = dispatch_constants.CALL_CANCEL_CONTROL_HOOK call_request = CallRequest(function) call_request.add_control_hook(key, function) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest)) self.assertTrue(call_request_2.control_hooks[key] == function)
def test_serialize_deserialize(self): args = ['fee', 'fie', 'foe', 'foo'] kwargs = {'one': 'foo', 'two': 'bar', 'three': 'baz'} call_request = CallRequest(function, args, kwargs) data = call_request.serialize() self.assertTrue(isinstance(data, dict)) call_request_2 = CallRequest.deserialize(data) self.assertTrue(isinstance(call_request_2, CallRequest), str(type(call_request_2)))
def _insert_scheduled_v2_repo(self, repo_id, schedule): importer_id = ObjectId() schedule_id = ObjectId() importer_doc = {'importer_id': importer_id, 'importer_type_id': yum_repos.YUM_IMPORTER_TYPE_ID, 'scheduled_syncs': [str(schedule_id)]} self.tmp_test_db.database.repo_importers.update({'repo_id': repo_id}, {'$set': importer_doc}, safe=True) call_request = CallRequest(sync_with_auto_publish_itinerary, [repo_id], {'overrides': {}}) interval, start, recurrences = dateutils.parse_iso8601_interval(schedule) scheduled_call_doc = {'_id': schedule_id, 'id': str(schedule_id), 'serialized_call_request': call_request.serialize(), 'schedule': schedule, 'failure_threshold': None, 'consecutive_failures': 0, 'first_run': start or datetime.datetime.utcnow(), 'next_run': None, 'last_run': None, 'remaining_runs': recurrences, 'enabled': True} scheduled_call_doc['next_run'] = all_repos._calculate_next_run(scheduled_call_doc) self.tmp_test_db.database.scheduled_calls.insert(scheduled_call_doc, safe=True)
def _insert_scheduled_v2_repo(self, repo_id, schedule): importer_id = ObjectId() schedule_id = ObjectId() importer_doc = {'importer_id': importer_id, 'importer_type_id': yum_repos.YUM_IMPORTER_TYPE_ID, 'scheduled_syncs': [str(schedule_id)]} self.tmp_test_db.database.repo_importers.update({'repo_id': repo_id}, {'$set': importer_doc}, safe=True) call_request = CallRequest(sync_with_auto_publish_itinerary, [repo_id], {'overrides': {}}) interval, start, recurrences = dateutils.parse_iso8601_interval(schedule) scheduled_call_doc = {'_id': schedule_id, 'id': str(schedule_id), 'serialized_call_request': call_request.serialize(), 'schedule': schedule, 'failure_threshold': None, 'consecutive_failures': 0, 'first_run': start or datetime.datetime.utcnow(), 'next_run': None, 'last_run': None, 'remaining_runs': recurrences, 'enabled': True} scheduled_call_doc['next_run'] = all_repos._calculate_next_run(scheduled_call_doc) self.tmp_test_db.database.scheduled_calls.insert(scheduled_call_doc, safe=True)
def _sync_schedules(v1_database, v2_database, report): v1_repo_collection = v1_database.repos v2_repo_importer_collection = v2_database.repo_importers v2_scheduled_call_collection = v2_database.scheduled_calls # ugly hack to find out which repos have already been scheduled # necessary because $size is not a meta-query and doesn't support $gt, etc repos_without_schedules = v2_repo_importer_collection.find( {'scheduled_syncs': { '$size': 0 }}, fields=['repo_id']) repo_ids_without_schedules = [ r['repo_id'] for r in repos_without_schedules ] repos_with_schedules = v2_repo_importer_collection.find( {'repo_id': { '$nin': repo_ids_without_schedules }}, fields=['repo_id']) repo_ids_with_schedules = [r['repo_id'] for r in repos_with_schedules] repos_to_schedule = v1_repo_collection.find( { 'id': { '$nin': repo_ids_with_schedules }, 'sync_schedule': { '$ne': None } }, fields=['id', 'sync_schedule', 'sync_options', 'last_sync']) for repo in repos_to_schedule: if repo['id'] not in repo_ids_without_schedules: report.error('Repository [%s] not found in the v2 database.' 'sync scheduling being canceled.' % repo['id']) return False args = [repo['id']] kwargs = {'overrides': {}} call_request = CallRequest(sync_with_auto_publish_itinerary, args, kwargs, principal=SystemUser()) scheduled_call_document = { '_id': ObjectId(), 'id': None, 'serialized_call_request': None, 'schedule': repo['sync_schedule'], 'failure_threshold': None, 'consecutive_failures': 0, 'first_run': None, 'last_run': None, 'next_run': None, 'remaining_runs': None, 'enabled': True } scheduled_call_document['id'] = str(scheduled_call_document['_id']) schedule_tag = resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, scheduled_call_document['id']) call_request.tags.append(schedule_tag) scheduled_call_document[ 'serialized_call_request'] = call_request.serialize() if isinstance(repo['sync_options'], dict): scheduled_call_document['failure_threshold'] = repo[ 'sync_options'].get('failure_threshold', None) interval, start, recurrences = dateutils.parse_iso8601_interval( scheduled_call_document['schedule']) scheduled_call_document['first_run'] = start or datetime.utcnow() scheduled_call_document['remaining_runs'] = recurrences scheduled_call_document['next_run'] = _calculate_next_run( scheduled_call_document) if repo['last_sync'] is not None: scheduled_call_document[ 'last_run'] = dateutils.to_naive_utc_datetime( dateutils.parse_iso8601_datetime(repo['last_sync'])) v2_scheduled_call_collection.insert(scheduled_call_document, safe=True) v2_repo_importer_collection.update( {'repo_id': repo['id']}, {'$push': { 'scheduled_syncs': scheduled_call_document['id'] }}, safe=True) return True
def _sync_schedules(v1_database, v2_database, report): v1_repo_collection = v1_database.repos v2_repo_importer_collection = v2_database.repo_importers v2_scheduled_call_collection = v2_database.scheduled_calls # ugly hack to find out which repos have already been scheduled # necessary because $size is not a meta-query and doesn't support $gt, etc repos_without_schedules = v2_repo_importer_collection.find( {'scheduled_syncs': {'$size': 0}}, fields=['repo_id']) repo_ids_without_schedules = [r['repo_id'] for r in repos_without_schedules] repos_with_schedules = v2_repo_importer_collection.find( {'repo_id': {'$nin': repo_ids_without_schedules}}, fields=['repo_id']) repo_ids_with_schedules = [r['repo_id'] for r in repos_with_schedules] repos_to_schedule = v1_repo_collection.find( {'id': {'$nin': repo_ids_with_schedules}, 'sync_schedule': {'$ne': None}}, fields=['id', 'sync_schedule', 'sync_options', 'last_sync']) for repo in repos_to_schedule: if repo['id'] not in repo_ids_without_schedules: report.error('Repository [%s] not found in the v2 database.' 'sync scheduling being canceled.' % repo['id']) return False args = [repo['id']] kwargs = {'overrides': {}} call_request = CallRequest(sync_with_auto_publish_itinerary, args, kwargs, principal=SystemUser()) scheduled_call_document = { '_id': ObjectId(), 'id': None, 'serialized_call_request': None, 'schedule': repo['sync_schedule'], 'failure_threshold': None, 'consecutive_failures': 0, 'first_run': None, 'last_run': None, 'next_run': None, 'remaining_runs': None, 'enabled': True} scheduled_call_document['id'] = str(scheduled_call_document['_id']) schedule_tag = resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, scheduled_call_document['id']) call_request.tags.append(schedule_tag) scheduled_call_document['serialized_call_request'] = call_request.serialize() if isinstance(repo['sync_options'], dict): scheduled_call_document['failure_threshold'] = repo['sync_options'].get('failure_threshold', None) interval, start, recurrences = dateutils.parse_iso8601_interval(scheduled_call_document['schedule']) scheduled_call_document['first_run'] = start or datetime.utcnow() scheduled_call_document['remaining_runs'] = recurrences scheduled_call_document['next_run'] = _calculate_next_run(scheduled_call_document) if repo['last_sync'] is not None: scheduled_call_document['last_run'] = dateutils.to_naive_utc_datetime(dateutils.parse_iso8601_datetime(repo['last_sync'])) v2_scheduled_call_collection.insert(scheduled_call_document, safe=True) v2_repo_importer_collection.update({'repo_id': repo['id']}, {'$push': {'scheduled_syncs': scheduled_call_document['id']}}, safe=True) return True