def find_project_diff(self, latestproject, baseline_project, toplevel_project): # Find difference between latestproject and baseline_project if baseline_project: object_hist = ObjectHistory(self.ccm, toplevel_project, baseline_project) objects_changed = self.ccm.query("recursive_is_member_of('{0}', 'none') and not recursive_is_member_of('{1}', 'none')".format(latestproject, baseline_project)).format("%objectname").format("%owner").format("%status").format("%create_time").format("%task").run() else: # root project, get ALL objects in release object_hist = ObjectHistory(self.ccm, toplevel_project, toplevel_project) objects_changed = self.ccm.query("recursive_is_member_of('{0}', 'none')".format(latestproject)).format("%objectname").format("%owner").format("%status").format("%create_time").format("%task").run() num_of_objects = len([o for o in objects_changed if ":project:" not in o]) print "objects to process for", latestproject, num_of_objects objects = {} persist = 1 if self.tag in self.history.keys(): if 'objects' in self.history[self.tag]: #Add all existin objects for o in self.history[self.tag]['objects']: objects[o.get_object_name()] = o else: self.history[self.tag] = {'objects': [], 'tasks': []} # Check history for all objects and add them to history for o in objects_changed: #print o['objectname'] if o['objectname'] not in objects.keys(): # Don't do project objects if ':project:' not in o['objectname']: with self.timer: objects.update(object_hist.get_history(FileObject.FileObject(o['objectname'], self.delim, o['owner'], o['status'], o['create_time'], o['task']))) persist +=1 else: print o['objectname'], "already in history" if persist % 100 == 0: self.history[self.tag]['objects'] = objects.values() fname = self.outputfile + '_' + self.tag + '_inc' self.persist_data(fname, self.history[self.tag]) num_of_objects -= 1 print "objects left:", num_of_objects print "number of files:", str(len(objects.values())) self.history[self.tag]['objects'] = objects.values() # Create tasks from objects self.find_tasks_from_objects(objects.values(), latestproject)
def find_project_diff(self, baseline_project, next_project): # Get all objects and paths for baseline_project if not self.baseline_objects: self.baseline_objects = baseline_project.get_members() if self.baseline_objects is None or len( self.baseline_objects) == 1 or not isinstance( self.baseline_objects, dict): self.baseline_objects = ccm_objects.get_objects_in_project( baseline_project.get_object_name(), ccmpool=self.ccmpool) baseline_project.set_members(self.baseline_objects) ccm_cache.force_cache_update_for_object(baseline_project) if next_project: # Get all objects and paths for next project self.project_objects = next_project.get_members() if self.project_objects is None or len( self.project_objects) == 1 or not isinstance( self.project_objects, dict): self.project_objects = ccm_objects.get_objects_in_project( next_project.get_object_name(), ccmpool=self.ccmpool) next_project.set_members(self.project_objects) ccm_cache.force_cache_update_for_object(next_project) # Find difference between baseline_project and next_project new_objects, old_objects = get_changed_objects( self.baseline_objects, self.project_objects) next_projects = [ o for o in self.project_objects.keys() if ':project:' in o ] baseline_projects = [ o for o in self.baseline_objects.keys() if ':project:' in o ] object_history = ObjectHistory( self.ccm, next_project.get_object_name(), old_objects=old_objects, old_release=baseline_project.get_object_name(), new_projects=next_projects, old_projects=baseline_projects) else: # root project, get ALL objects in release new_objects = self.baseline_objects old_objects = [] object_history = ObjectHistory(self.ccm, baseline_project.get_object_name()) num_of_objects = len( [o for o in new_objects.keys() if ":project:" not in o]) logger.info("objects to process : %i" % num_of_objects) objects = [] if self.tag in self.history.keys(): if 'objects' in self.history[self.tag]: #Add all existing objects for o in self.history[self.tag]['objects']: objects.append(o) #objects[o] = ccm_cache.get_object(o, self.ccm) logger.info("no of old objects loaded %i", len(objects)) else: self.history[self.tag] = {'objects': [], 'tasks': []} object_names = set([ o for o in new_objects.keys() if ':project:' not in o ]) - set(objects) for o in object_names: object = ccm_cache.get_object(o, self.ccm) if next_project: # get the object history between releases history = object_history.get_history( object, new_objects[object.get_object_name()]) objects.extend(history.keys()) #objects.update(object_history.get_history(object, new_objects[object.get_object_name()])) else: # just get all the objects in the release logger.info('Processing: %s path: %s' % (object.get_object_name(), str(new_objects[object.get_object_name()]))) #object.set_path(new_objects[object.get_object_name()]) objects.append(o) #objects[object.get_object_name()] = object num_of_objects -= 1 logger.info('Objects left: %i' % num_of_objects) objects = list(set(objects)) logger.info("number of files: %i" % len(objects)) self.history[self.tag]['objects'] = objects # Create tasks from objects, but not for initial project if next_project: self.find_tasks_from_objects(objects, next_project) # Handle new projects: if next_project: new_created_projects = get_new_projects(old_objects, new_objects, self.delim) dir_lookup = {} # create lookup for path to directory-4-part-name {path : dir-name} for k, v in new_objects.iteritems(): if ':dir:' in k: for i in v: dir_lookup[i] = k project_dirs = [ dir for project in new_created_projects for dir in new_objects[project] ] directories = [ d for k, v in new_objects.iteritems() for d in v if ':dir:' in k ] changed_directories = set(directories).intersection( set(project_dirs)) changed_directories = remove_subdirs_under_same_path( changed_directories) dirs = [dir_lookup[d] for d in changed_directories] # find task and add all objects to the task, which shares the path. project_tasks = self.find_task_from_dirs(dirs) logger.info("Checking for new subprojects") # check directories for new subdirectories and add their content directories = self.get_new_dirs(self.project_objects, new_objects) # Limit directories to only directories not already processed as a new project directories = set(directories) - set(dirs) # find task and add all objects to the task, which shares the path. dir_tasks = self.find_task_from_dirs(directories) # merge project and dir tasks for k, v in dir_tasks.iteritems(): if not project_tasks.has_key(k): project_tasks[k] = v # if real synergy tasks isn't found check the path of the directories and skip possible subdirs tasks = self.reduce_dir_tasks(project_tasks) logger.info("Project and dir tasks reduced...") logger.info("%s" % str(tasks)) self.update_tasks_with_directory_contens(tasks) # remove possible duplicates from objects self.history[self.tag]['objects'] = list( set(self.history[self.tag]['objects']))
def find_project_diff(self, baseline_project, next_project): # Get all objects and paths for baseline_project if not self.baseline_objects: self.baseline_objects = baseline_project.get_members() if self.baseline_objects is None or len(self.baseline_objects) == 1 or not isinstance(self.baseline_objects, dict): self.baseline_objects = ccm_objects.get_objects_in_project(baseline_project.get_object_name(), ccmpool=self.ccmpool) baseline_project.set_members(self.baseline_objects) ccm_cache.force_cache_update_for_object(baseline_project) if next_project: # Get all objects and paths for next project self.project_objects = next_project.get_members() if self.project_objects is None or len(self.project_objects) == 1 or not isinstance(self.project_objects, dict): self.project_objects = ccm_objects.get_objects_in_project(next_project.get_object_name(), ccmpool=self.ccmpool) next_project.set_members(self.project_objects) ccm_cache.force_cache_update_for_object(next_project) # Find difference between baseline_project and next_project new_objects, old_objects = get_changed_objects(self.baseline_objects, self.project_objects) next_projects = [o for o in self.project_objects.keys() if ':project:' in o] baseline_projects = [o for o in self.baseline_objects.keys() if ':project:' in o] object_history = ObjectHistory(self.ccm, next_project.get_object_name(), old_objects=old_objects, old_release=baseline_project.get_object_name(), new_projects=next_projects, old_projects=baseline_projects) else: # root project, get ALL objects in release new_objects = self.baseline_objects old_objects = [] object_history = ObjectHistory(self.ccm, baseline_project.get_object_name()) num_of_objects = len([o for o in new_objects.keys() if ":project:" not in o]) logger.info("objects to process : %i" % num_of_objects) objects = [] if self.tag in self.history.keys(): if 'objects' in self.history[self.tag]: #Add all existing objects for o in self.history[self.tag]['objects']: objects.append(o) #objects[o] = ccm_cache.get_object(o, self.ccm) logger.info("no of old objects loaded %i", len(objects)) else: self.history[self.tag] = {'objects': [], 'tasks': []} object_names = set([o for o in new_objects.keys() if ':project:' not in o]) - set(objects) for o in object_names: object = ccm_cache.get_object(o, self.ccm) if next_project: # get the object history between releases history = object_history.get_history(object, new_objects[object.get_object_name()]) objects.extend(history.keys()) #objects.update(object_history.get_history(object, new_objects[object.get_object_name()])) else: # just get all the objects in the release logger.info('Processing: %s path: %s' %(object.get_object_name(), str(new_objects[object.get_object_name()]))) #object.set_path(new_objects[object.get_object_name()]) objects.append(o) #objects[object.get_object_name()] = object num_of_objects -=1 logger.info('Objects left: %i' %num_of_objects) objects = list(set(objects)) logger.info("number of files: %i" % len(objects)) self.history[self.tag]['objects'] = objects # Create tasks from objects, but not for initial project if next_project: self.find_tasks_from_objects(objects, next_project) # Handle new projects: if next_project: new_created_projects = get_new_projects(old_objects, new_objects, self.delim) dir_lookup ={} # create lookup for path to directory-4-part-name {path : dir-name} for k,v in new_objects.iteritems(): if ':dir:' in k: for i in v: dir_lookup[i] = k project_dirs = [dir for project in new_created_projects for dir in new_objects[project]] directories = [d for k,v in new_objects.iteritems() for d in v if ':dir:' in k] changed_directories = set(directories).intersection(set(project_dirs)) changed_directories = remove_subdirs_under_same_path(changed_directories) dirs = [dir_lookup[d] for d in changed_directories] # find task and add all objects to the task, which shares the path. project_tasks = self.find_task_from_dirs(dirs) logger.info("Checking for new subprojects") # check directories for new subdirectories and add their content directories = self.get_new_dirs(self.project_objects, new_objects) # Limit directories to only directories not already processed as a new project directories = set(directories) - set(dirs) # find task and add all objects to the task, which shares the path. dir_tasks = self.find_task_from_dirs(directories) # merge project and dir tasks for k,v in dir_tasks.iteritems(): if not project_tasks.has_key(k): project_tasks[k] = v # if real synergy tasks isn't found check the path of the directories and skip possible subdirs tasks = self.reduce_dir_tasks(project_tasks) logger.info("Project and dir tasks reduced...") logger.info("%s" % str(tasks)) self.update_tasks_with_directory_contens(tasks) # remove possible duplicates from objects self.history[self.tag]['objects'] = list(set(self.history[self.tag]['objects']))