def detect_conflict(self, confl_list): """ Main public class method. List of package tuples that conflict with the given package. Return join list for package A and package B. :param confl_list: list of tuples with package hashes :return: `list` of `tuple` (package hash, conflict hash) for input list """ # get unique package hashes uniq_hshs = list({hsh for confl in confl_list for hsh in confl}) # get conflicts and provides for every unique package # also (epoch, version, release, disttag) hsh_dpt_dict, hsh_evrd = self._get_dict_conflict_provide(uniq_hshs) conflicts = [] for hshA, hshB in confl_list: # A - conflicts; B - provides conflA = self._get_conflicts( hsh_dpt_dict[hshA], hsh_dpt_dict[hshB], hshA, hshB, hsh_evrd ) # A - provides; B - conflicts conflB = self._get_conflicts( hsh_dpt_dict[hshB], hsh_dpt_dict[hshA], hshB, hshA, hsh_evrd ) conflicts += utils.remove_duplicate(conflA + conflB) return conflicts
def misconflict_packages(): """ The function of searching for conflicting files in packages that do not have a conflict. Input GET params: pkg_ls * - package or list of packages task ** - task id branch (* - for pkg_ls only) - name of repository arch - package architectures Output structure: input package conflict package version release epoch architectures files with conflict """ server.url_logging() check_params = server.check_input_params(source=0) if check_params is not True: return check_params values = server.get_dict_values([('pkg_ls', 's', 'pkg_name'), ('task', 'i'), ('branch', 's', 'repo_name'), ('arch', 's')]) if values['pkg_ls'] and values['task']: return utils.json_str_error("One parameter only. ('name'/'task')") if not values['pkg_ls'] and not values['task']: return utils.json_str_error( "'pkg_ls' or 'task' is require parameters.") if values['pkg_ls'] and not values['branch']: return get_helper(server.helper(request.path)) if values['arch']: allowed_archs = values['arch'].split(',') if 'noarch' not in allowed_archs: allowed_archs.append('noarch') else: allowed_archs = server.default_archs allowed_archs = tuple(allowed_archs) # prepare packages list from Task if values['task']: # get branch of task g.connection.request_line = ( "SELECT DISTINCT branch FROM Tasks WHERE task_id = %(task)d", { 'task': values['task'] }) status, response = g.connection.send_request() if status is False: return response if not response: return utils.json_str_error( "Task {task} not found!".format(task=values['task'])) pbranch = response[0][0] # get packages of task for last build iteration (hashes) g.connection.request_line = (QM.misconflict_pkgs_get_pkgs_of_task, { 'task': values['task'] }) status, response = g.connection.send_request() if status is False: return response if not response: return utils.json_str_error( "Error: Packages in task {task} not found!" "".format(task=values['task'])) # joining tuples from response list input_pkg_hshs = [hsh[0] for hsh in response] # package list without task else: pkg_ls = tuple(values['pkg_ls'].split(',')) pbranch = values['branch'] # get hash for package names g.connection.request_line = (QM.misconflict_pkgs_get_hshs_by_pkgs, { 'pkgs': tuple(pkg_ls), 'branch': pbranch, 'arch': allowed_archs }) status, response = g.connection.send_request() if status is False: return response if not response: return utils.json_str_error( "Error: Packages {pkgs} not found in pkgset {branch}!".format( pkgs=pkg_ls, branch=pbranch)) # check the existence of a package by comparing the number of input # and selected from database if len(set([pkg[1] for pkg in response])) != len(pkg_ls): return utils.json_str_error("Error of input data.") # form a list of package hashes input_pkg_hshs = [pkg[0] for pkg in response] if not input_pkg_hshs: return json.dumps({}) # get list of (input package | conflict package | conflict files) g.connection.request_line = (QM.misconflict_pkgs_get_pkg_with_conflict, { 'hshs': tuple(input_pkg_hshs), 'branch': pbranch, 'arch': allowed_archs }) status, response = g.connection.send_request() if status is False: return response if not response: return json.dumps({}) hshs_files = response # list of conflicting package pairs in_confl_hshs = [(hsh[0], hsh[1]) for hsh in hshs_files] # filter conflicts by provides/conflicts c_filter = ConflictFilter(pbranch, allowed_archs) # check for the presence of the specified conflict each pair # if the conflict between the packages in the pair is specified, # then add the pair to the list filter_ls = c_filter.detect_conflict(in_confl_hshs) # create dict with package names by hashes hsh_name_dict = defaultdict(dict) for hsh_1, hsh_2, _, name_2, name_1, _ in response: hsh_name_dict[hsh_1], hsh_name_dict[hsh_2] = name_1, name_2 # convert the hashes into names, put in the first place in the pair # the name of the input package, if it is not filter_ls_names = [] for hsh in filter_ls: inp_pkg = hsh[0] if hsh[0] in input_pkg_hshs else hsh[1] out_pkg = hsh[0] if hsh[0] != inp_pkg else hsh[1] result_pair = (hsh_name_dict[inp_pkg], hsh_name_dict[out_pkg]) if result_pair not in filter_ls: filter_ls_names.append(result_pair) # form the list of tuples (input package | conflict package | conflict files) result_list, output_pkgs = [], set() for pkg in hshs_files: [output_pkgs.add(i) for i in pkg[:2]] pkg = (hsh_name_dict[pkg[0]], hsh_name_dict[pkg[1]], pkg[2]) if pkg not in result_list: result_list.append(pkg) # get architectures of found packages g.connection.request_line = QM.misconflict_pkgs_get_pkg_archs.format( hshs=tuple(output_pkgs)) status, response = g.connection.send_request() if status is False: return response pkg_archs_dict = utils.tuplelist_to_dict(response, 1) # look for duplicate pairs of packages in the list with different files # and join them result_dict_cleanup = defaultdict(list) for pkg in result_list: result_dict_cleanup[(pkg[0], pkg[1])] += pkg[2] confl_pkgs = utils.remove_duplicate( [pkg[1] for pkg in result_dict_cleanup.keys()]) # get main information of packages by package hashes g.connection.request_line = (QM.misconflict_pkgs_get_meta_by_hshs, { 'pkgs': tuple(confl_pkgs), 'branch': pbranch, 'arch': allowed_archs }) status, response = g.connection.send_request() if status is False: return response # form dict name - package info name_info_dict = {} for pkg in response: name_info_dict[pkg[0]] = pkg[1:] # form list of tuples (input pkg | conflict pkg | pkg info | conflict files) # and filter it result_list_info = [] for pkg, files in result_dict_cleanup.items(): inp_pkg_archs = set(pkg_archs_dict[pkg[0]]) found_pkg_archs = set(pkg_archs_dict[pkg[1]]) intersect_pkg_archs = inp_pkg_archs.intersection(found_pkg_archs) if (pkg[0], pkg[1]) not in filter_ls_names and intersect_pkg_archs: pkg = (pkg[0], pkg[1]) + \ name_info_dict[pkg[1]][:-1] + \ (list(intersect_pkg_archs),) + (files,) result_list_info.append(pkg) return utils.convert_to_json([ 'input_package', 'conflict_package', 'version', 'release', 'epoch', 'archs', 'files_with_conflict' ], result_list_info)
def task_diff(): server.url_logging() check_params = server.check_input_params() if check_params is not True: return check_params task_id = server.get_one_value('task', type_='i') if not task_id: return get_helper(server.helper(request.path)) g.connection.request_line = QM.task_diff_get_task_pkgs.format(id=task_id) status, response = g.connection.send_request() if status is False: return response task_pkgs = utils.join_tuples(response) g.connection.request_line = (QM.task_diff_get_repo_pkgs, { 'hshs': task_pkgs, 'id': task_id }) status, response = g.connection.send_request() if status is False: return response if not response: return json.dumps({}) repo_pkgs = utils.join_tuples(response) g.connection.request_line = (QM.task_diff_get_depends_by_hshs, { 'hshs': task_pkgs }) status, response = g.connection.send_request() if status is False: return response task_deps = response g.connection.request_line = (QM.task_diff_get_depends_by_hshs, { 'hshs': repo_pkgs }) status, response = g.connection.send_request() if status is False: return response repo_deps = response uniq_repo_pkgs = utils.remove_duplicate([i[0] for i in repo_deps]) base_struct = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) for pkg in uniq_repo_pkgs: for type_ in ['provide', 'require', 'obsolete', 'conflict']: for arch in ['x86_64', 'x86_64-i586', 'i586']: base_struct[pkg][type_][arch] = [] def create_struct(deps): struct = copy.deepcopy(base_struct) [ struct[el[0]][el[1]][el[2]].__iadd__(el[3]) for el in deps if el[0] in base_struct ] return struct task_struct = create_struct(task_deps) repo_struct = create_struct(repo_deps) result_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) for name, type_dict in task_struct.items(): for type_, arch_dict in type_dict.items(): for arch, value in arch_dict.items(): task_set = set(value) repo_set = set(repo_struct[name][type_][arch]) res_list = ['-{}'.format(dep) for dep in repo_set - task_set] + \ ['+{}'.format(dep) for dep in task_set - repo_set] if res_list: result_dict[name][type_][arch] = res_list return json.dumps(result_dict)
def eosip(self, content_file): ''' this function resorve the xml file and return the extracted metadata in dictionary extractMetada(xml_file)->dictionary :param content_file:a complete path file name towards a EOSIP format xml file of the metadata #example return value {'earthobservation_resulttime_timeinstant': '2011-10-16T20:52:46Z'} ''' product = content_file if not isfile(content_file): return None, 101 try: tree = ET.parse(product) except ET.ParserError: return None, 102 except: return None, 999 else: print("the xml file has been successfully read in!") root = tree.getroot() dic = {} avoid_string = 'vendorspecific' for ele in tree.iter(): try: len(ele.text) except: # print "this node is empty" continue if ele.text.isspace(): # print "empty string" continue val = ele.text property = [] #--- extract the tag of the current and its ancester in reverse order for arc in ele.iterancestors(): property.insert(0, arc.tag) property.append(ele.tag) #--- convert the tags to the key for value, store property = [re.sub(r'\{.*?\}', "", p).lower() for p in property] property = remove_duplicate(property) property = self.filter_not_needed(property) property = '_'.join(property) if avoid_string in property: continue dic[property] = val #--- insert the specific case of vendorSpecific in dic try: for node in tree.findall('//eop:vendorSpecific//eop:localValue', namespaces=root.nsmap): if node.getprevious() is None: # print "this local value have no attribute:", node.tag, node.text continue else: attribute = 'vendorspecific_' + node.getprevious( ).text.lower() localValue = node.text dic[attribute] = localValue except: print "can't find eop vendor" return dic, 0
try: len(ele.text) except: # print "this node is empty" continue if ele.text.isspace(): continue val = ele.text property = [] #--- extract the tag of the current and its ancester in reverse order for arc in ele.iterancestors(): property.insert(0, arc.tag) property.append(ele.tag) #--- convert the tags to the key for value, store property = [re.sub(r'\{.*?\}', "", p).lower() for p in property] property = remove_duplicate(property) property = self.filter_not_needed(property) property = '_'.join(property) # -- current node is a node of specific structure,avoid if avoid_string in property: continue # --handle same property multiple value same_key_list = [ k for k, v in dic.items() if k.startswith(property) ] if same_key_list: property = property + '_' + str(len(same_key_list)) dic[property] = val #--- insert the specific case of vendorSpecific in dic
def center_edge(self, rmin=4, gamma_min=0.89): """ Find all swirls from gamma1, and gamma2 Parameters ---------- rmin : `int` minimum radius of swirls, all swirls with radius less than rmin will be rejected. gamma_min : `float` minimum value of gamma1, all potential swirls with peak gamma1 values less than gamma_min will be rejected. Returns ------- `dictionary` The keys and their meanings of the dictionary are: center: center locations of vortices, in the form of [x, y]. edge: edge locations of vortices, in the form of [x, y]. points: all points within vortices, in the form of [x, y]. peak: maximum/minimum gamma1 values in vortices. radius: equivalent radius of vortices. All results are in pixel coordinates. """ # Initial dictionary setup self.edge_prop = {'center': (), 'edge': (), 'points': (), 'peak': (), 'radius': ()} # ------------ Old algorithm, depracated ----------------------------- # # Turn interactive plotting off # plt.ioff() # plt.figure(-1) # # Find countours # cs = plt.contour(self.gamma[..., 1], levels=[-2 / np.pi, 2 / np.pi]) # plt.close(-1) # # iterate over all contours # for i in range(len(cs.collections)): # # Extract a contour and iterate over # for c in cs.collections[i].get_paths(): # # convert the single contour to list # v = c.vertices # v = np.rint(c.vertices).tolist() # ------------ Old algorithm, depracated ----------------------------- cs = np.array(measure.find_contours(self.gamma[..., 1].T, -2 / np.pi)) cs_pos = np.array(measure.find_contours(self.gamma[..., 1].T, 2 / np.pi)) if len(cs) == 0: cs = cs_pos elif len(cs_pos) != 0: cs = np.append(cs, cs_pos, 0) for i in range(np.shape(cs)[0]): v = np.rint(cs[i]) v = remove_duplicate(v) # find all points in the contour ps = points_in_poly(v) # gamma1 value of all points in the contour dust = [] for p in ps: dust.append(self.gamma[..., 0][int(p[1]), int(p[0])]) # determin swirl properties if len(dust) > 1: # effective radius re = np.sqrt(np.array(ps).shape[0] / np.pi) / self.factor # only consider swirls with re >= rmin and maximum gamma1 # value greater than gamma_min if np.max(np.fabs(dust)) >= gamma_min and re >= rmin: # Extract the index, only first dimension idx = np.where(np.fabs(dust) == np.max(np.fabs(dust)))[0][0] # Update dictionary key 'center' self.edge_prop['center'] += \ (np.array(ps[idx]) / self.factor, ) # Update dictionary key 'edge' self.edge_prop['edge'] += \ (np.array(v) / self.factor, ) # Update dictionary key 'points' self.edge_prop['points'] += \ (np.array(ps) / self.factor, ) # Update dictionary key 'peak' self.edge_prop['peak'] += (dust[idx],) # Update dictionary key 'radius' self.edge_prop['radius'] += (re,) return self.edge_prop