def populate_cache_with_project_and_members(project, ccm, ccmpool): print "Loading object %s" % project project_obj = ccm_cache.get_object(project, ccm=ccm) #assuming no project.members objects_in_project = ccm_objects_in_project.get_objects_in_project(project, ccm=ccm, ccmpool=ccmpool, use_cache=True) project_obj.set_members(objects_in_project) ccm_cache.force_cache_update_for_object(project_obj)
def update_project_with_members(project, ccm, ccmpool): print "Loading object %s" % project project_obj = ccm_cache.get_object(project, ccm=ccm) if not project_obj.members: objects_in_project = ccm_objects_in_project.get_objects_in_project(project, ccm=ccm, ccmpool=ccmpool) project_obj.set_members(objects_in_project) ccm_cache.force_cache_update_for_object(project_obj)
def populate_cache_with_project_and_members(project, ccm, ccmpool): print "Loading object %s" % project project_obj = ccm_cache.get_object(project, ccm=ccm) #assuming no project.members objects_in_project = ccm_objects_in_project.get_objects_in_project( project, ccm=ccm, ccmpool=ccmpool, use_cache=True) project_obj.set_members(objects_in_project) ccm_cache.force_cache_update_for_object(project_obj)
def update_project_with_members(project, ccm, ccmpool): print "Loading object %s" % project project_obj = ccm_cache.get_object(project, ccm=ccm) if not project_obj.members: objects_in_project = ccm_objects_in_project.get_objects_in_project( project, ccm=ccm, ccmpool=ccmpool) project_obj.set_members(objects_in_project) ccm_cache.force_cache_update_for_object(project_obj)
def get_snapshot(project, ccm, outdir): if not outdir.endswith('/'): outdir += '/' # get all objects in the project objects = get_objects_in_project(project, ccm) # write the objects to outdir for object, paths in objects.iteritems(): # print object, paths if not ':dir:' in object and not ':project:' in object: content = ccm.cat(object).run() for path in paths: p = outdir + path dir = os.path.split(p)[0] if not os.path.exists(dir): os.makedirs(dir) print "Writing %s to %s" %(object, p) f = open(p, 'wb') f.write(content) f.close() # handle empty dirs by adding .gitignore to empty leaf dirs empty_dirs = get_empty_dirs(objects) write_empty_dirs(empty_dirs, outdir)
def find_project_diff(self, baseline_project, next_project): # Get all objects and paths for baseline_project if not self.baseline_objects: self.baseline_objects = baseline_project.get_members() if self.baseline_objects is None or len( self.baseline_objects) == 1 or not isinstance( self.baseline_objects, dict): self.baseline_objects = ccm_objects.get_objects_in_project( baseline_project.get_object_name(), ccmpool=self.ccmpool) baseline_project.set_members(self.baseline_objects) ccm_cache.force_cache_update_for_object(baseline_project) if next_project: # Get all objects and paths for next project self.project_objects = next_project.get_members() if self.project_objects is None or len( self.project_objects) == 1 or not isinstance( self.project_objects, dict): self.project_objects = ccm_objects.get_objects_in_project( next_project.get_object_name(), ccmpool=self.ccmpool) next_project.set_members(self.project_objects) ccm_cache.force_cache_update_for_object(next_project) # Find difference between baseline_project and next_project new_objects, old_objects = get_changed_objects( self.baseline_objects, self.project_objects) next_projects = [ o for o in self.project_objects.keys() if ':project:' in o ] baseline_projects = [ o for o in self.baseline_objects.keys() if ':project:' in o ] object_history = ObjectHistory( self.ccm, next_project.get_object_name(), old_objects=old_objects, old_release=baseline_project.get_object_name(), new_projects=next_projects, old_projects=baseline_projects) else: # root project, get ALL objects in release new_objects = self.baseline_objects old_objects = [] object_history = ObjectHistory(self.ccm, baseline_project.get_object_name()) num_of_objects = len( [o for o in new_objects.keys() if ":project:" not in o]) logger.info("objects to process : %i" % num_of_objects) objects = [] if self.tag in self.history.keys(): if 'objects' in self.history[self.tag]: #Add all existing objects for o in self.history[self.tag]['objects']: objects.append(o) #objects[o] = ccm_cache.get_object(o, self.ccm) logger.info("no of old objects loaded %i", len(objects)) else: self.history[self.tag] = {'objects': [], 'tasks': []} object_names = set([ o for o in new_objects.keys() if ':project:' not in o ]) - set(objects) for o in object_names: object = ccm_cache.get_object(o, self.ccm) if next_project: # get the object history between releases history = object_history.get_history( object, new_objects[object.get_object_name()]) objects.extend(history.keys()) #objects.update(object_history.get_history(object, new_objects[object.get_object_name()])) else: # just get all the objects in the release logger.info('Processing: %s path: %s' % (object.get_object_name(), str(new_objects[object.get_object_name()]))) #object.set_path(new_objects[object.get_object_name()]) objects.append(o) #objects[object.get_object_name()] = object num_of_objects -= 1 logger.info('Objects left: %i' % num_of_objects) objects = list(set(objects)) logger.info("number of files: %i" % len(objects)) self.history[self.tag]['objects'] = objects # Create tasks from objects, but not for initial project if next_project: self.find_tasks_from_objects(objects, next_project) # Handle new projects: if next_project: new_created_projects = get_new_projects(old_objects, new_objects, self.delim) dir_lookup = {} # create lookup for path to directory-4-part-name {path : dir-name} for k, v in new_objects.iteritems(): if ':dir:' in k: for i in v: dir_lookup[i] = k project_dirs = [ dir for project in new_created_projects for dir in new_objects[project] ] directories = [ d for k, v in new_objects.iteritems() for d in v if ':dir:' in k ] changed_directories = set(directories).intersection( set(project_dirs)) changed_directories = remove_subdirs_under_same_path( changed_directories) dirs = [dir_lookup[d] for d in changed_directories] # find task and add all objects to the task, which shares the path. project_tasks = self.find_task_from_dirs(dirs) logger.info("Checking for new subprojects") # check directories for new subdirectories and add their content directories = self.get_new_dirs(self.project_objects, new_objects) # Limit directories to only directories not already processed as a new project directories = set(directories) - set(dirs) # find task and add all objects to the task, which shares the path. dir_tasks = self.find_task_from_dirs(directories) # merge project and dir tasks for k, v in dir_tasks.iteritems(): if not project_tasks.has_key(k): project_tasks[k] = v # if real synergy tasks isn't found check the path of the directories and skip possible subdirs tasks = self.reduce_dir_tasks(project_tasks) logger.info("Project and dir tasks reduced...") logger.info("%s" % str(tasks)) self.update_tasks_with_directory_contens(tasks) # remove possible duplicates from objects self.history[self.tag]['objects'] = list( set(self.history[self.tag]['objects']))
def find_project_diff(self, baseline_project, next_project): # Get all objects and paths for baseline_project if not self.baseline_objects: self.baseline_objects = baseline_project.get_members() if self.baseline_objects is None or len(self.baseline_objects) == 1 or not isinstance(self.baseline_objects, dict): self.baseline_objects = ccm_objects.get_objects_in_project(baseline_project.get_object_name(), ccmpool=self.ccmpool) baseline_project.set_members(self.baseline_objects) ccm_cache.force_cache_update_for_object(baseline_project) if next_project: # Get all objects and paths for next project self.project_objects = next_project.get_members() if self.project_objects is None or len(self.project_objects) == 1 or not isinstance(self.project_objects, dict): self.project_objects = ccm_objects.get_objects_in_project(next_project.get_object_name(), ccmpool=self.ccmpool) next_project.set_members(self.project_objects) ccm_cache.force_cache_update_for_object(next_project) # Find difference between baseline_project and next_project new_objects, old_objects = get_changed_objects(self.baseline_objects, self.project_objects) next_projects = [o for o in self.project_objects.keys() if ':project:' in o] baseline_projects = [o for o in self.baseline_objects.keys() if ':project:' in o] object_history = ObjectHistory(self.ccm, next_project.get_object_name(), old_objects=old_objects, old_release=baseline_project.get_object_name(), new_projects=next_projects, old_projects=baseline_projects) else: # root project, get ALL objects in release new_objects = self.baseline_objects old_objects = [] object_history = ObjectHistory(self.ccm, baseline_project.get_object_name()) num_of_objects = len([o for o in new_objects.keys() if ":project:" not in o]) logger.info("objects to process : %i" % num_of_objects) objects = [] if self.tag in self.history.keys(): if 'objects' in self.history[self.tag]: #Add all existing objects for o in self.history[self.tag]['objects']: objects.append(o) #objects[o] = ccm_cache.get_object(o, self.ccm) logger.info("no of old objects loaded %i", len(objects)) else: self.history[self.tag] = {'objects': [], 'tasks': []} object_names = set([o for o in new_objects.keys() if ':project:' not in o]) - set(objects) for o in object_names: object = ccm_cache.get_object(o, self.ccm) if next_project: # get the object history between releases history = object_history.get_history(object, new_objects[object.get_object_name()]) objects.extend(history.keys()) #objects.update(object_history.get_history(object, new_objects[object.get_object_name()])) else: # just get all the objects in the release logger.info('Processing: %s path: %s' %(object.get_object_name(), str(new_objects[object.get_object_name()]))) #object.set_path(new_objects[object.get_object_name()]) objects.append(o) #objects[object.get_object_name()] = object num_of_objects -=1 logger.info('Objects left: %i' %num_of_objects) objects = list(set(objects)) logger.info("number of files: %i" % len(objects)) self.history[self.tag]['objects'] = objects # Create tasks from objects, but not for initial project if next_project: self.find_tasks_from_objects(objects, next_project) # Handle new projects: if next_project: new_created_projects = get_new_projects(old_objects, new_objects, self.delim) dir_lookup ={} # create lookup for path to directory-4-part-name {path : dir-name} for k,v in new_objects.iteritems(): if ':dir:' in k: for i in v: dir_lookup[i] = k project_dirs = [dir for project in new_created_projects for dir in new_objects[project]] directories = [d for k,v in new_objects.iteritems() for d in v if ':dir:' in k] changed_directories = set(directories).intersection(set(project_dirs)) changed_directories = remove_subdirs_under_same_path(changed_directories) dirs = [dir_lookup[d] for d in changed_directories] # find task and add all objects to the task, which shares the path. project_tasks = self.find_task_from_dirs(dirs) logger.info("Checking for new subprojects") # check directories for new subdirectories and add their content directories = self.get_new_dirs(self.project_objects, new_objects) # Limit directories to only directories not already processed as a new project directories = set(directories) - set(dirs) # find task and add all objects to the task, which shares the path. dir_tasks = self.find_task_from_dirs(directories) # merge project and dir tasks for k,v in dir_tasks.iteritems(): if not project_tasks.has_key(k): project_tasks[k] = v # if real synergy tasks isn't found check the path of the directories and skip possible subdirs tasks = self.reduce_dir_tasks(project_tasks) logger.info("Project and dir tasks reduced...") logger.info("%s" % str(tasks)) self.update_tasks_with_directory_contens(tasks) # remove possible duplicates from objects self.history[self.tag]['objects'] = list(set(self.history[self.tag]['objects']))