def test_from_rpmtest(self): parser = ACParser(self.acfobj, with_buggy_comparisons=True) test_count = 0 for first, second, exp in parser: test_count += 1 ret = rpm_vercmp.vercmp(first, second) self.assertEqual(exp, ret) # Make sure we still test something, in case the m4 file drops # content this will fail the test self.assertGreater(test_count, 20)
def _process_delta_pair(prod_instance_id, source_instance_id, bucket, delta_date, s3_client): def key_paths(source_instance_id, prod_instance_id): """Form S3 keys based on variables.""" non_prod_pkg_list_key = 'patching-state/{}/{}.json'.format( delta_date, source_instance_id) prod_pkg_list_key = 'patching-state/{}/{}.json'.format( delta_date, prod_instance_id) mismatch_versions_key = 'patching-state/{}/{}/version-mismatch.json'.format( delta_date, prod_instance_id) not_on_prod_key = 'patching-state/{}/{}/not-on-prod.csv'.format( delta_date, prod_instance_id) out_of_date_key = 'patching-state/{}/{}/out-of-date.csv'.format( delta_date, prod_instance_id) return non_prod_pkg_list_key, prod_pkg_list_key, mismatch_versions_key, not_on_prod_key, out_of_date_key def format_str(instance_id, pkg_list): return instance_id if pkg_list else instance_id + ' (Missing pkg list)' logging.info('Processing pair %s and %s', prod_instance_id, source_instance_id) (non_prod_pkg_list_key, prod_pkg_list_key, mismatch_versions_key, not_on_prod_key, out_of_date_key) = key_paths(source_instance_id, prod_instance_id) prod_s3_url = make_s3_url(bucket, prod_pkg_list_key) non_prod_s3_url = make_s3_url(bucket, non_prod_pkg_list_key) prod_pkg_list = retrieve_json_from_s3(s3_client, bucket, prod_pkg_list_key) nonprod_pkg_list = retrieve_json_from_s3(s3_client, bucket, non_prod_pkg_list_key) if not prod_pkg_list or not nonprod_pkg_list: logging.error('Missing pkg list') return { 'Prod Instance': AREF.format(prod_s3_url, format_str(prod_instance_id, prod_pkg_list)), 'Non-Prod Instance': AREF.format(non_prod_s3_url, format_str(source_instance_id, nonprod_pkg_list)), 'Updates Needed': '', 'Not installed on Prod': '', 'Version Mismatch': '', } # not on prod but installed on nonprod not_on_prod = set(nonprod_pkg_list).difference(set(prod_pkg_list)) # version mismatch mismatch_versions = {} old_versions = [] for pkg in nonprod_pkg_list: if pkg in prod_pkg_list and rpm_vercmp.vercmp(nonprod_pkg_list[pkg], prod_pkg_list[pkg]) == 1: mismatch_versions[pkg] = { 'prod': prod_pkg_list[pkg], 'nonprod': nonprod_pkg_list[pkg] } old_versions.append(pkg + "-" + nonprod_pkg_list[pkg]) # Uploading files to s3 put_file_to_s3(s3_client, '\n'.join(list(not_on_prod)), bucket, not_on_prod_key) put_file_to_s3(s3_client, '\n'.join(list(old_versions)), bucket, out_of_date_key) put_file_to_s3(s3_client, json.dumps(mismatch_versions, indent=4), bucket, mismatch_versions_key) return { 'Prod Instance': AREF.format(prod_s3_url, prod_instance_id), 'Non-Prod Instance': AREF.format(non_prod_s3_url, source_instance_id), 'Updates Needed': AREF.format(make_s3_url(bucket, out_of_date_key), 'Out of date packages'), 'Not installed on Prod': AREF.format(make_s3_url(bucket, not_on_prod_key), 'Not installed on Prod'), 'Version Mismatch': AREF.format(make_s3_url(bucket, mismatch_versions_key), 'Version Mismatch'), }
def __ne__(self, other): return vercmp(self.filename, other.filename) != 0
def __ge__(self, other): return vercmp(self.filename, other.filename) >= 0
def __le__(self, other): return vercmp(self.filename, other.filename) <= 0
def __eq__(self, other): return vercmp(self.filename, other.filename) == 0