def recalculate(self, cluster_set): ''' Constructs probability matrix. If use_cache is true, it will try to load old computations from the database. If save cache is true it will save the current results into the database. @param cluster_set: A cluster set object, used to initialize the matrix. ''' last_cleaned = 0 old_matrix = self._bib_matrix cached_bibs = self.__get_up_to_date_bibs() have_cached_bibs = bool(cached_bibs) self._bib_matrix = Bib_matrix(cluster_set) ncl = cluster_set.num_all_bibs expected = ((ncl * (ncl - 1)) / 2) if expected == 0: expected = 1 cur_calc, opti, prints_counter = 0, 0, 0 for cl1 in cluster_set.clusters: if cur_calc + opti - prints_counter > 100000: update_status( (float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti)) prints_counter = cur_calc + opti #clean caches if cur_calc - last_cleaned > 2000000: clear_comparison_caches() last_cleaned = cur_calc for cl2 in cluster_set.clusters: if id(cl1) < id(cl2) and not cl1.hates(cl2): for bib1 in cl1.bibs: for bib2 in cl2.bibs: if have_cached_bibs and bib1 in cached_bibs and bib2 in cached_bibs: val = old_matrix[bib1, bib2] if not val: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) else: opti += 1 if bconfig.DEBUG_CHECKS: assert _debug_is_eq_v( val, compare_bibrefrecs(bib1, bib2)) else: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) self._bib_matrix[bib1, bib2] = val clear_comparison_caches() update_status_final("Matrix done. %d calc, %d opt." % (cur_calc, opti))
def recalculate(self, cluster_set): ''' Constructs probability matrix. If use_cache is true, it will try to load old computations from the database. If save cache is true it will save the current results into the database. @param cluster_set: A cluster set object, used to initialize the matrix. ''' last_cleaned = 0 old_matrix = self._bib_matrix cached_bibs = self.__get_up_to_date_bibs() have_cached_bibs = bool(cached_bibs) self._bib_matrix = Bib_matrix(cluster_set) ncl = cluster_set.num_all_bibs expected = ((ncl * (ncl - 1)) / 2) if expected == 0: expected = 1 cur_calc, opti, prints_counter = 0, 0, 0 for cl1 in cluster_set.clusters: if cur_calc+opti - prints_counter > 100000: update_status((float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti)) prints_counter = cur_calc+opti #clean caches if cur_calc - last_cleaned > 2000000: clear_comparison_caches() last_cleaned = cur_calc for cl2 in cluster_set.clusters: if id(cl1) < id(cl2) and not cl1.hates(cl2): for bib1 in cl1.bibs: for bib2 in cl2.bibs: if have_cached_bibs and bib1 in cached_bibs and bib2 in cached_bibs: val = old_matrix[bib1, bib2] if not val: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) else: opti += 1 if bconfig.DEBUG_CHECKS: assert _debug_is_eq_v(val, compare_bibrefrecs(bib1, bib2)) else: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) self._bib_matrix[bib1, bib2] = val clear_comparison_caches() update_status_final("Matrix done. %d calc, %d opt." % (cur_calc, opti))
def recalculate(self, cluster_set): ''' Constructs probability matrix. If use_cache is true, it will try to load old computations from the database. If save cache is true it will save the current results into the database. @param cluster_set: A cluster set object, used to initialize the matrix. ''' last_cleaned = 0 self._bib_matrix.store() try: old_matrix = Bib_matrix(self._bib_matrix.name + 'copy') old_matrix.duplicate_existing(self._bib_matrix.name, self._bib_matrix.name + 'copy') old_matrix.load() cached_bibs = self.__get_up_to_date_bibs(old_matrix) have_cached_bibs = bool(cached_bibs) except IOError: old_matrix.destroy() cached_bibs = None have_cached_bibs = False self._bib_matrix.destroy() self._bib_matrix = Bib_matrix(cluster_set.last_name, cluster_set=cluster_set) ncl = cluster_set.num_all_bibs expected = ((ncl * (ncl - 1)) / 2) if expected == 0: expected = 1 try: cur_calc, opti, prints_counter = 0, 0, 0 for cl1 in cluster_set.clusters: if cur_calc + opti - prints_counter > 100000 or cur_calc == 0: update_status( (float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti)) prints_counter = cur_calc + opti # #clean caches if cur_calc - last_cleaned > 20000000: gc.collect() # clear_comparison_caches() last_cleaned = cur_calc for cl2 in cluster_set.clusters: if id(cl1) < id(cl2) and not cl1.hates(cl2): for bib1 in cl1.bibs: for bib2 in cl2.bibs: if have_cached_bibs: try: val = old_matrix[bib1, bib2] opti += 1 if bconfig.DEBUG_CHECKS: assert _debug_is_eq_v( val, compare_bibrefrecs(bib1, bib2)) except KeyError: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) if not val: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) else: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) self._bib_matrix[bib1, bib2] = val except Exception, e: raise Exception("""Error happened in prob_matrix.recalculate with val:%s original_exception: %s """ % (str(val), str(e)))
def recalculate(self, cluster_set): ''' Constructs probability matrix. If use_cache is true, it will try to load old computations from the database. If save cache is true it will save the current results into the database. @param cluster_set: A cluster set object, used to initialize the matrix. ''' last_cleaned = 0 self._bib_matrix.store() try: old_matrix = Bib_matrix(self._bib_matrix.name+'copy') old_matrix.duplicate_existing(self._bib_matrix.name, self._bib_matrix.name+'copy') old_matrix.load() cached_bibs = self.__get_up_to_date_bibs(old_matrix) have_cached_bibs = bool(cached_bibs) except IOError: old_matrix.destroy() cached_bibs = None have_cached_bibs = False self._bib_matrix.destroy() self._bib_matrix = Bib_matrix(cluster_set.last_name, cluster_set=cluster_set) ncl = cluster_set.num_all_bibs expected = ((ncl * (ncl - 1)) / 2) if expected == 0: expected = 1 try: cur_calc, opti, prints_counter = 0, 0, 0 for cl1 in cluster_set.clusters: if cur_calc+opti - prints_counter > 100000 or cur_calc == 0: update_status((float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti)) prints_counter = cur_calc+opti # #clean caches if cur_calc - last_cleaned > 20000000: gc.collect() # clear_comparison_caches() last_cleaned = cur_calc for cl2 in cluster_set.clusters: if id(cl1) < id(cl2) and not cl1.hates(cl2): for bib1 in cl1.bibs: for bib2 in cl2.bibs: if have_cached_bibs: try: val = old_matrix[bib1, bib2] opti += 1 if bconfig.DEBUG_CHECKS: assert _debug_is_eq_v(val, compare_bibrefrecs(bib1, bib2)) except KeyError: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) if not val: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) else: cur_calc += 1 val = compare_bibrefrecs(bib1, bib2) self._bib_matrix[bib1, bib2] = val except Exception, e: raise Exception("""Error happened in prob_matrix.recalculate with val:%s original_exception: %s """%(str(val),str(e)))