def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') normalizer.normalize_default_data(default_data) if (_check_default_data_change(runtime_storage_inst, default_data) or force_update): _update_default_data(runtime_storage_inst, default_data) LOG.debug('Gather release index for all repos') release_index = {} for repo in runtime_storage_inst.get_by_key('repos'): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) # need to iterate over full view of records and generate valid # users profiles LOG.debug('Iterate all records to create valid users profiles') for record in runtime_storage_inst.get_all_records(): record_processor_inst.update_user(record) # update records according to generated users profiles LOG.debug('Update all records according to users profiles') updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records) if 'project_sources' in default_data: _retrieve_project_list(runtime_storage_inst, default_data['project_sources'])
def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') normalizer.normalize_default_data(default_data) dd_changed = _check_default_data_change(runtime_storage_inst, default_data) if 'project_sources' in default_data: if not _retrieve_project_list(default_data): raise Exception('Unable to retrieve project list') _update_default_data(runtime_storage_inst, default_data) if (dd_changed or force_update): LOG.debug('Gather release index for all repos') release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) # need to iterate over full view of records and generate valid # users profiles LOG.debug('Iterate all records to create valid users profiles') for record in runtime_storage_inst.get_all_records(): record_processor_inst.update_user(record) # update records according to generated users profiles LOG.debug('Update all records according to users profiles') updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records)
def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') normalizer.normalize_default_data(default_data) if (_check_default_data_change(runtime_storage_inst, default_data) or force_update): _update_default_data(runtime_storage_inst, default_data) LOG.debug('Gather release index for all repos') release_index = {} for repo in runtime_storage_inst.get_by_key('repos'): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) # need to iterate over full view of records and generate valid # users profiles LOG.debug('Iterate all records to create valid users profiles') for record in record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index): pass # update records according to generated users profiles LOG.debug('Update all records according to users profiles') updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records) if 'project_sources' in default_data: _retrieve_project_list(runtime_storage_inst, default_data['project_sources'])
def setUp(self): super(TestDefaultDataProcessor, self).setUp() self.get_users = mock.Mock(return_value=[ test_data.USERS, ]) normalized_data = copy.deepcopy(test_data.DEFAULT_DATA) normalizer.normalize_default_data(normalized_data)
def _store_default_data(runtime_storage_inst, default_data): normalizer.normalize_default_data(default_data) LOG.debug('Update runtime storage with default data') for key, value in six.iteritems(default_data): if key in STORE_FUNCS: STORE_FUNCS[key](runtime_storage_inst, value) else: runtime_storage_inst.set_by_key(key, value)
def test_normalizer(self): data = copy.deepcopy(test_data.DEFAULT_DATA) normalizer.normalize_default_data(data) self.assertIn('releases', data['repos'][0]) self.assertEqual([], data['repos'][0]['releases'], message='Empty list of releases expected') self.assertEqual(0, data['users'][0]['companies'][-1]['end_date'], message='The last company end date should be 0') self.assertIn('user_id', data['users'][0]) self.assertEqual(test_data.USERS[0]['launchpad_id'], data['users'][0]['user_id'], message='User id should be set')
def test_normalizer(self): data = copy.deepcopy(test_data.DEFAULT_DATA) normalizer.normalize_default_data(data) self.assertIn('releases', data['repos'][0]) self.assertEqual([], data['repos'][0]['releases'], message='Empty list of releases expected') self.assertEqual(0, data['users'][0]['companies'][-1]['end_date'], message='The last company end date should be 0') self.assertIn('user_id', data['users'][0]) # verify that *independent company is added automatically self.assertEqual(3, len(data['users'][1]['companies'])) self.assertEqual(0, data['users'][1]['companies'][-1]['end_date'], message='The last company end date should be 0')
def process(runtime_storage_inst, default_data, sources_root): normalizer.normalize_default_data(default_data) if _check_default_data_change(runtime_storage_inst, default_data): _update_default_data(runtime_storage_inst, default_data) release_index = {} for repo in runtime_storage_inst.get_by_key('repos'): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records) if 'project_sources' in default_data: _retrieve_project_list(runtime_storage_inst, default_data['project_sources'])
def _validate_default_data_correctness(self, file_name): data = self._read_file(file_name) normalizer.normalize_default_data(data)