Example #1
0
def multicrunch(surfsums, varname=None):
    """
    Given an iterable consisting of SURFSums, compute the rational function
    given by their combined sum.
    Note that this rational function necessarily has degree <= 0.
    """

    surfsums = list(surfsums)

    #
    # Combine the various critical sets and construct a candidate denominator.
    #

    critical = set().union(*(Q._critical for Q in surfsums))
    cand = dict()
    for Q in surfsums:
        E = Q._cand
        for r in E:
            if r not in cand or cand[r] < E[r]:
                cand[r] = E[r]

    if varname is None:
        varname = 's'

    R = QQ[varname]
    s = R.gen(0)
    g = R(prod((a * s - b)**e for ((a, b), e) in cand.items()))
    m = g.degree()

    logger.info('Total number of SURFs: %d' % sum(Q._count for Q in surfsums))

    for Q in surfsums:
        Q._file.flush()

    logger.info('Combined size of data files: %s' %
         readable_filesize(sum(os.path.getsize(Q._filename) for Q in surfsums)))
    logger.info('Number of critical points: %d' % len(critical))
    logger.info('Degree of candidate denominator: %d' % m)

    #
    # Construct m + 1 non-critical points for evaluation.
    #

    values = set()
    while len(values) < m + 1:
        x = QQ.random_element()
        if x in critical:
            continue
        values.add(x)
    values = list(values)

    #
    # Set up parallel computations.
    #

    # bucket_size = ceil(float(len(values)) / common.ncpus)
    # this was unused

    dat_filenames = [Q._filename for Q in surfsums]

    res_names = []
    val_names = []

    value_batches = [values[j::common.ncpus] for j in range(common.ncpus)]

    with TemporaryDirectory() as tmpdir:
        for j, v in enumerate(value_batches):
            if not v:
                break

            val_filename = os.path.join(tmpdir, 'values%d' % j)
            val_names.append(val_filename)
            res_names.append(os.path.join(tmpdir, 'results%d' % j))
            with open(val_filename, 'w') as val_file:
                val_file.write(str(len(v)) + '\n')
                for x in v:
                    val_file.write(str(x) + '\n')

        def fun(k):
            ret = crunch(['crunch', val_names[k], res_names[k]] + dat_filenames)
            if ret == 0:
                logger.info('Cruncher #%d finished.' % k)
            return ret

        logger.info('Launching %d crunchers.' % len(res_names))

        if not common.debug:
            fun = parallel(ncpus=len(res_names))(fun)
            for (arg, ret) in fun(list(range(len(res_names)))):
                if ret == 'NO DATA':
                    raise RuntimeError('A parallel process died')
                if ret != 0:
                    raise RuntimeError('crunch failed')
        else:
            for k in range(len(res_names)):
                fun(k)
                

        #
        # Collect results
        #
        pairs = []

        for j, rn in enumerate(res_names):
            it_batch = iter(value_batches[j])
            with open(rn, 'r') as res_file:
                for line in res_file:
                    # We also need to evaluate the candidate denominator 'g'
                    # from above at the given random points.
                    x = QQ(next(it_batch))
                    pairs.append((x, g(x) * QQ(line)))

    if len(values) != len(pairs):
        raise RuntimeError('Length of results is off')

    f = R.lagrange_polynomial(list(pairs))
    res = SR(f / g)
    return res.factor() if res else res