def remove_old(args, parser): global list_headers #fields = ["unit_type_id", "unit_id"]#list_headers + ["arch"] criteria = {} #criteria["fields"] = { # "unit": fields #} criteria["type_ids"] = ["package_group"] #criteria["filters"] = {"unit": {"name": {"$regex": "zlib-devel"}}} #if args.match: # criteria["filters"] = { # "unit": {"name": {"$regex": args.match}} # } from_data = pulp_lib.post_request("repositories/%s/search/units/" % args.from_repoid, data={"criteria": criteria}, limit=100) to_data = pulp_lib.post_request("repositories/%s/search/units/" % args.to_repoid, data={"criteria": criteria}, limit=100) print json.dumps(from_data[0], sort_keys=True, indent=4) print json.dumps(to_data[0], sort_keys=True, indent=4) return None from_units = [] to_units = [] remove_units = [] from_unit_ids = [ d["unit_id"] for d in from_data ] for unit in to_data: _unit_id = unit["unit_id"] _unit_type = unit["unit_type_id"] if _unit_id not in from_unit_ids: _unit = { "id": _unit_id, "type": _unit_type } remove_units.append(_unit) remove_types = {} for unit in remove_units: _id = unit["id"] _type = unit["type"] if _type in remove_types: remove_types[_type]["ids"].append(_id) else: remove_types[_type] = {} remove_types[_type]["ids"] = [_id] remove_types[_type]["type"] = _type #print json.dumps(remove_types, indent=4, sort_keys=True) #return None for remove_type in remove_types.values(): criteria = { 'type_ids': [remove_type['type']], 'filters': { 'association': { 'unit_id': {'$in': remove_type['ids']} }, } } pulp_lib.post_request("repositories/%s/actions/unassociate/" % args.to_repoid, data={"criteria": criteria}) return None
def list_content(args, parser): global list_headers if not args.repoid: logger.error("Must provide repoid argument") sys.exit(1) criteria = {} criteria["fields"] = {"unit": list_headers} criteria["type_ids"] = ["rpm"] if args.match: criteria["filters"] = {"unit": {"name": {"$regex": args.match}}} data = pulp_lib.post_request("repositories/%s/search/units/" % args.repoid, data={"criteria": criteria}) #print json.dumps(data[0], sort_keys=True, indent=4) #return None units = [] for unit in data: if not unit["metadata"]: continue row = [] for header in list_headers: row.append(unit["metadata"][header]) units.append(row) #print json.dumps(row, sort_keys=True, indent=4) rows = [] for unit in sorted(units, key=operator.itemgetter(0, 1, 2)): rows.append(unit) pulp_lib.print_table(list_headers, rows)
def content_changelog(args, parser): criteria = {} criteria["type_ids"] = ["rpm"] if args.match: criteria["filters"] = { "unit": { "name": { "$regex": "^%s$" % args.match } } } data = pulp_lib.post_request("repositories/%s/search/units/" % args.repoid, data={"criteria": criteria}) #print json.dumps(data, sort_keys=True, indent=4) units = [] unit_names = [] for unit in data: u = {} m = unit["metadata"] u_name = "%s-%s-%s" % (m["name"], m["version"], m["release"]) if u_name in unit_names: continue else: unit_names.append(u_name) rows = [] for c in sorted(m["changelog"], key=operator.itemgetter(0)): row = [] row.append(c[1]) row.append(c[2]) row.append("") rows.append(["\n".join(row)]) pulp_lib.print_table(["%s Changelog" % u_name], rows)
def content_changelog(args, parser): criteria = {} criteria["type_ids"] = ["rpm"] if args.match: criteria["filters"] = { "unit": {"name": {"$regex": "^%s$" % args.match}} } data = pulp_lib.post_request("repositories/%s/search/units/" % args.repoid, data={"criteria": criteria}) #print json.dumps(data, sort_keys=True, indent=4) units = [] unit_names = [] for unit in data: u = {} m = unit["metadata"] u_name = "%s-%s-%s" % (m["name"], m["version"], m["release"]) if u_name in unit_names: continue else: unit_names.append(u_name) rows = [] for c in sorted(m["changelog"], key=operator.itemgetter(0)): row = [] row.append(c[1]) row.append(c[2]) row.append("") rows.append(["\n".join(row)]) pulp_lib.print_table(["%s Changelog" % u_name], rows)
def list_content(args, parser): global list_headers if not args.repoid: logger.error("Must provide repoid argument") sys.exit(1) criteria = {} criteria["fields"] = { "unit": list_headers } criteria["type_ids"] = ["rpm"] if args.match: criteria["filters"] = { "unit": {"name": {"$regex": args.match}} } data = pulp_lib.post_request("repositories/%s/search/units/" % args.repoid, data={"criteria": criteria}) #print json.dumps(data[0], sort_keys=True, indent=4) #return None units = [] for unit in data: if not unit["metadata"]: continue row = [] for header in list_headers: row.append(unit["metadata"][header]) units.append(row) #print json.dumps(row, sort_keys=True, indent=4) rows = [] for unit in sorted(units, key=operator.itemgetter(0,1,2)): rows.append(unit) pulp_lib.print_table(list_headers, rows)
def diff_content(args, parser): global list_headers fields = list_headers + ["arch"] criteria = {} criteria["fields"] = {"unit": fields} criteria["type_ids"] = ["rpm"] #criteria["filters"] = {"unit": {"name": {"$regex": "zlib-devel"}}} if args.match: criteria["filters"] = {"unit": {"name": {"$regex": args.match}}} from_data = pulp_lib.post_request("repositories/%s/search/units/" % args.from_repoid, data={"criteria": criteria}) to_data = pulp_lib.post_request("repositories/%s/search/units/" % args.to_repoid, data={"criteria": criteria}) #print json.dumps(from_data, sort_keys=True, indent=4) from_rpms = [] to_rpms = [] for unit in from_data: m = unit["metadata"] name = "%s-%s-%s.%s" % (m["name"], m["version"], m["release"], m["arch"]) if name not in from_rpms: from_rpms.append(name) for unit in to_data: m = unit["metadata"] name = "%s-%s-%s.%s" % (m["name"], m["version"], m["release"], m["arch"]) if name not in to_rpms: to_rpms.append(name) #print json.dumps(to_rpms, sort_keys=True, indent=4) if args.show_diff: for line in difflib.unified_diff(from_rpms, to_rpms, fromfile=args.from_repoid, tofile=args.to_repoid): print(line) else: differences = list(set(from_rpms) - set(to_rpms)) for d in sorted(differences): print(d)
def list(args, parser): global list_headers if args.details: headers = list_headers["details"] else: headers = list_headers["no_details"] # data = pulp_lib.get_request("repositories/", params={"details": 1}) criteria = {} if args.repo: criteria["filters"] = {"id": {"$in": args.repo}} data = pulp_lib.post_request("repositories/search/", data={"criteria": criteria, "importers": 1, "distributors": 1}) # print json.dumps(data, indent=4, sort_keys=True) # return None repo_names = [] repos = {} for repo in data: name = repo["display_name"] # if args.repo and name not in args.repo: # continue # if name == 'centos-6-base': # print json.dumps(repo, indent=4, sort_keys=True) # Collect distributors for distributor in repo["distributors"]: if distributor["distributor_type_id"] == "yum_distributor": distributors = distributor # Collect importers for importer in repo["importers"]: if importer["importer_type_id"] == "yum_importer": importers = importer repo_names.append(name) repo_data = { "name": name, "last_published": distributors.get("last_publish", ""), "relative_url": distributors["config"].get("relative_url", ""), "last_sync": importers.get("last_sync", ""), "feed": importers["config"].get("feed", ""), "http": distributors["config"].get("http", ""), "https": distributors["config"].get("https", ""), "remove_missing": importers["config"].get("remove_missing", ""), "rpms": repo["content_unit_counts"].get("rpm", ""), "package_group": repo["content_unit_counts"].get("package_group", ""), "package_category": repo["content_unit_counts"].get("package_category", ""), "distribution": repo["content_unit_counts"].get("distribution", ""), } row = [] for header in headers: row.append(repo_data[header]) repos[name] = row rows = [] for repo_name in sorted(repo_names): row = repos[repo_name] rows.append(row) pulp_lib.print_table(headers, rows)
def diff_content(args, parser): global list_headers fields = list_headers + ["arch"] criteria = {} criteria["fields"] = { "unit": fields } criteria["type_ids"] = ["rpm"] #criteria["filters"] = {"unit": {"name": {"$regex": "zlib-devel"}}} if args.match: criteria["filters"] = { "unit": {"name": {"$regex": args.match}} } from_data = pulp_lib.post_request("repositories/%s/search/units/" % args.from_repoid, data={"criteria": criteria}) to_data = pulp_lib.post_request("repositories/%s/search/units/" % args.to_repoid, data={"criteria": criteria}) #print json.dumps(from_data, sort_keys=True, indent=4) from_rpms = [] to_rpms = [] for unit in from_data: m = unit["metadata"] name = "%s-%s-%s.%s" % (m["name"], m["version"], m["release"], m["arch"]) if name not in from_rpms: from_rpms.append(name) for unit in to_data: m = unit["metadata"] name = "%s-%s-%s.%s" % (m["name"], m["version"], m["release"], m["arch"]) if name not in to_rpms: to_rpms.append(name) #print json.dumps(to_rpms, sort_keys=True, indent=4) if args.show_diff: for line in difflib.unified_diff(from_rpms, to_rpms, fromfile=args.from_repoid, tofile=args.to_repoid): print(line) else: differences = list(set(from_rpms) - set(to_rpms)) for d in sorted(differences): print(d)
def list(args, parser): """ { "_href": "/pulp/api/v2/tasks/5cfedd93-e300-4edd-9b18-a94454a0190f/", "_id": { "$oid": "54e4f3d91378605a940f0b77" }, "error": null, "exception": null, "finish_time": null, "id": "54e4f3d9e97f1818df85ead9", "progress_report": {}, "queue": "*****@*****.**", "result": null, "spawned_tasks": [], "start_time": null, "state": "waiting", "tags": [ "pulp:repository:epel-7-testing", "pulp:action:publish" ], "task_id": "5cfedd93-e300-4edd-9b18-a94454a0190f", "task_type": "pulp.server.managers.repo.publish.publish", "traceback": null }, """ #print vars(args) criteria = {} if args.list_state: criteria["filters"] = { "state": { "$in": args.list_state }, } criteria["sort"] = [["start_time", "ascending"]] params = {"criteria": criteria} #data = pulp_lib.get_request("tasks/") data = pulp_lib.post_request("tasks/search/", data=params) #print json.dumps(data, sort_keys=True, indent=4) tasks = [] for r in data: task = [] resource = "" action = "" tags = r.get("tags", None) if not tags: continue for tag in tags: act_m = re.search('^pulp:action:(.*)$', tag) if act_m: action = act_m.group(1) continue res_m = re.search('^pulp:(.*):(.*)$', tag) if res_m: resource = "%s (%s)" % (res_m.group(2), res_m.group(1)) state = r["state"] start_time = r["start_time"] finish_time = r["finish_time"] task_id = r["task_id"] task.append(resource) task.append(action) task.append(state) task.append(start_time) task.append(finish_time) task.append(task_id) tasks.append(task) headers = ["Resource", "Action", "State", "Start", "Finish", "Task ID"] pulp_lib.print_table(headers, tasks)
def list(args, parser): """ { "_href": "/pulp/api/v2/tasks/5cfedd93-e300-4edd-9b18-a94454a0190f/", "_id": { "$oid": "54e4f3d91378605a940f0b77" }, "error": null, "exception": null, "finish_time": null, "id": "54e4f3d9e97f1818df85ead9", "progress_report": {}, "queue": "*****@*****.**", "result": null, "spawned_tasks": [], "start_time": null, "state": "waiting", "tags": [ "pulp:repository:epel-7-testing", "pulp:action:publish" ], "task_id": "5cfedd93-e300-4edd-9b18-a94454a0190f", "task_type": "pulp.server.managers.repo.publish.publish", "traceback": null }, """ #print vars(args) criteria = {} if args.list_state: criteria["filters"] = { "state": {"$in": args.list_state}, } criteria["sort"] = [["start_time", "ascending"]] params = { "criteria": criteria } #data = pulp_lib.get_request("tasks/") data = pulp_lib.post_request("tasks/search/", data=params) #print json.dumps(data, sort_keys=True, indent=4) tasks = [] for r in data: task = [] resource = "" action = "" tags = r.get("tags", None) if not tags: continue for tag in tags: act_m = re.search('^pulp:action:(.*)$', tag) if act_m: action = act_m.group(1) continue res_m = re.search('^pulp:(.*):(.*)$', tag) if res_m: resource = "%s (%s)" % (res_m.group(2), res_m.group(1)) state = r["state"] start_time = r["start_time"] finish_time = r["finish_time"] task_id = r["task_id"] task.append(resource) task.append(action) task.append(state) task.append(start_time) task.append(finish_time) task.append(task_id) tasks.append(task) headers = ["Resource", "Action", "State", "Start", "Finish", "Task ID"] pulp_lib.print_table(headers, tasks)
def remove_old(args, parser): global list_headers #fields = ["unit_type_id", "unit_id"]#list_headers + ["arch"] criteria = {} #criteria["fields"] = { # "unit": fields #} criteria["type_ids"] = ["package_group"] #criteria["filters"] = {"unit": {"name": {"$regex": "zlib-devel"}}} #if args.match: # criteria["filters"] = { # "unit": {"name": {"$regex": args.match}} # } from_data = pulp_lib.post_request("repositories/%s/search/units/" % args.from_repoid, data={"criteria": criteria}, limit=100) to_data = pulp_lib.post_request("repositories/%s/search/units/" % args.to_repoid, data={"criteria": criteria}, limit=100) print json.dumps(from_data[0], sort_keys=True, indent=4) print json.dumps(to_data[0], sort_keys=True, indent=4) return None from_units = [] to_units = [] remove_units = [] from_unit_ids = [d["unit_id"] for d in from_data] for unit in to_data: _unit_id = unit["unit_id"] _unit_type = unit["unit_type_id"] if _unit_id not in from_unit_ids: _unit = {"id": _unit_id, "type": _unit_type} remove_units.append(_unit) remove_types = {} for unit in remove_units: _id = unit["id"] _type = unit["type"] if _type in remove_types: remove_types[_type]["ids"].append(_id) else: remove_types[_type] = {} remove_types[_type]["ids"] = [_id] remove_types[_type]["type"] = _type #print json.dumps(remove_types, indent=4, sort_keys=True) #return None for remove_type in remove_types.values(): criteria = { 'type_ids': [remove_type['type']], 'filters': { 'association': { 'unit_id': { '$in': remove_type['ids'] } }, } } pulp_lib.post_request("repositories/%s/actions/unassociate/" % args.to_repoid, data={"criteria": criteria}) return None
def list(args, parser): global list_headers if args.details: headers = list_headers["details"] else: headers = list_headers["no_details"] #data = pulp_lib.get_request("repositories/", params={"details": 1}) criteria = {} if args.repo: criteria['filters'] = { 'id': { '$in': args.repo }, } data = pulp_lib.post_request("repositories/search/", data={ "criteria": criteria, "importers": 1, "distributors": 1 }) #print json.dumps(data, indent=4, sort_keys=True) #return None repo_names = [] repos = {} for repo in data: name = repo["display_name"] #if args.repo and name not in args.repo: # continue #if name == 'centos-6-base': # print json.dumps(repo, indent=4, sort_keys=True) # Collect distributors for distributor in repo["distributors"]: if distributor["distributor_type_id"] == "yum_distributor": distributors = distributor # Collect importers for importer in repo["importers"]: if importer["importer_type_id"] == "yum_importer": importers = importer repo_names.append(name) repo_data = { "name": name, "last_published": distributors.get("last_publish", ""), "relative_url": distributors["config"].get("relative_url", ""), "last_sync": importers.get("last_sync", ""), "feed": importers["config"].get("feed", ""), "http": distributors["config"].get("http", ""), "https": distributors["config"].get("https", ""), "remove_missing": importers["config"].get("remove_missing", ""), "rpms": repo["content_unit_counts"].get("rpm", ""), "package_group": repo["content_unit_counts"].get("package_group", ""), "package_category": repo["content_unit_counts"].get("package_category", ""), "distribution": repo["content_unit_counts"].get("distribution", ""), } row = [] for header in headers: row.append(repo_data[header]) repos[name] = row rows = [] for repo_name in sorted(repo_names): row = repos[repo_name] rows.append(row) pulp_lib.print_table(headers, rows)