def origin_command(args): dest_creds = session.config.creds[args.env] dest_portal = generate_token(dest_creds) project = session.project if args.action == 'add': # add a destination origin project.add_origin(dest_portal, dest_creds) # save the changes project.create_project_file() if args.action == 'fetch': service_deps = project.all_dependencies(dest_portal) dest_ids = [] for dep in service_deps: dep_id, clauses = dep terms = ["{}:{}".format(k, v) for k, v in clauses.items()] query = " AND ".join(terms) # Get a list of the content matching the query. content = search_portal(dest_portal, query=query) if len(content) == 1: dest_ids.append((dep_id, content[0]['id'], content[0]['url'])) else: print("A UNIQUE match for '%s' could not be found in '%s'" % (query, dest_portal)) # update the project file with the destination portal ids project.update_portal_ids(dest_portal, dest_ids) # save the changes project.create_project_file() if args.action == 'fix': webapp_origins = project.webapp_dependencies(dest_portal) for webapp_origin in webapp_origins: # make url if 'serviceUrl' in webapp_origin['portal']: m = re.match(r"(.*)(id=.*)", webapp_origin['portal']['serviceUrl']) print(m.group(1)) if m.group(1) and 'id' in webapp_origin['portal']: item_url = "%sid=%s" % (m.group(1), webapp_origin['portal']['id']) # update item result = update_item(dest_portal, webapp_origin['portal']['id'], {'url': item_url, 'ownerFolder': ""})
def deps_command(args): source_creds = session.config.creds[args.source] if source_creds: source_portal = generate_token(source_creds) else: return False # Get a list of the content matching the query. content = search_portal(source_portal, query=args.query) portal_data = get_items(source_portal, content) graph = retrieve_deps(source_portal, portal_data) return graph
def check_publishId_exists(authenticatedPortal, publishId, username): portalUrl, token = authenticatedPortal # Get a list of the content matching the query. searchQuery = "owner:" + username content = search_portal(authenticatedPortal, query=searchQuery) for item in content: json_desc = get_item_description(item['id'], portalUrl, token) desc = json.loads(json_desc) prop = desc['properties'] if desc['properties'] is not None else {} if 'publishId' in prop: if prop['publishId'] == publishId: return desc['id'] return None
def download_command(args): source_creds = session.config.creds[args.env] source_portal = generate_token(source_creds) project = session.project # create directory #download_path = get_download_path(session.config.downloads, source_portal) download_path = get_download_path(session.config.downloads) os.makedirs(download_path, exist_ok=True) # Get a list of the content matching the query. content = search_portal(source_portal, query=publisher.getQuery(args)) # store all content locally portal_data = get_items(source_portal, content) # look for dependencies where I can if args.deps: print("you should look for dependencies") graph = retrieve_deps(source_portal, portal_data) relabeller = graph.create_relabeller() setvars = create_setvars(source_portal) for node in graph.postorder(): item = node['portal_data'] files = item_to_file(download_path, item.title, item, relabeller, setvars) node['files'] = files # grab all the nodes in order of least amount of dependencies nodes = graph.postorder() # remember the ids used to remap the portal ids project.add_dependencies(nodes, source_portal, source_creds) # save the changes to the project project.create_project_file() else: print("Dont look for dependencies") for item in portal_data: files = item_to_file(download_path, item.title, item) return portal_data
def copy_command(args): source_creds = session.config.creds[args.source] if source_creds: source_portal = generate_token(source_creds) else: return False destination_creds = session.config.creds[args.destination] if destination_creds: destination_portal = generate_token(destination_creds) else: return False # Get a list of the content matching the query. content = search_portal(source_portal, query=args.query) portal_data = get_items(source_portal, content) upload_items(destination_portal, portal_data) return portal_data
def retrieve_deps(source_portal, portal_data, last_parent=None): graph = Graph() for parent in portal_data: if last_parent is None: graph.add_root(parent) else: graph.add_child(last_parent, parent) deps = find_dependencies(parent) if deps is not None and len(deps) > 0: # create a query to get all the dependencies ids_query = " OR ".join(["id:{}".format(dep) for dep in deps]) # Get a list of the content matching the query. deps_content = search_portal(source_portal, query=ids_query) # store all content locally children = get_items(source_portal, deps_content) print("item '%s' of type: '%s' has the following dependencies:" % (parent.title, parent.type)) for child in children: print("\tchild id:%s" % (child.title)) graph.add_child(parent, child) if len(children) > 0: retrieve_deps(source_portal, children, parent) else: print("No dependencies for item '%s' of type: '%s'." % (parent.title, parent.type)) return graph