def fix_qcx2_res_types(qcx2_res): ' Changes data types of cx2_fm_V and cx2_fs_V ' total_qcx = len(qcx2_res) fmt_str = helpers.make_progress_fmt_str(total_qcx) for qcx in xrange(total_qcx): helpers.print_(fmt_str % (qcx)) res = qcx2_res[qcx] fix_res_types(res)
def load_cached_matches(hs): print_ = helpers.print_ test_samp = hs.test_sample_cx # Create result containers print('[mc2] hs.num_cx = %r ' % hs.num_cx) qcx2_res = [QueryResult(qcx, hs) for qcx in xrange(hs.num_cx)] #-------------------- # Read cached queries #-------------------- total_queries = len(test_samp) print('[mc2] Total queries: %d' % total_queries) dirty_test_sample_cx = [] clean_test_sample_cx = [] fmt_str_filter = helpers.make_progress_fmt_str(total_queries, lbl='[mc2] check cache: ') # Filter queries into dirty and clean sets for count, qcx in enumerate(test_samp): print_(fmt_str_filter % (count+1)) if params.CACHE_QUERY and qcx2_res[qcx].has_cache(hs): clean_test_sample_cx.append(qcx) else: dirty_test_sample_cx.append(qcx) print('') print('[mc2] Num clean queries: %d ' % len(clean_test_sample_cx)) print('[mc2] Num dirty queries: %d ' % len(dirty_test_sample_cx)) # Check how much data we are going to load num_bytes = 0 for qcx in iter(clean_test_sample_cx): num_bytes += qcx2_res[qcx].cache_bytes(hs) print('[mc2] Loading %dMB cached results' % (num_bytes / (2.0 ** 20))) # Load clean queries from the cache fmt_str_load = helpers.make_progress_fmt_str(len(clean_test_sample_cx), lbl='[mc2] load cache: ') for count, qcx in enumerate(clean_test_sample_cx): print_(fmt_str_load % (count+1)) qcx2_res[qcx].load(hs) print('') return qcx2_res, dirty_test_sample_cx