コード例 #1
0
ファイル: cremona.py プロジェクト: bopopescu/sagelib-1
def build(name, data_tgz, largest_conductor=0, mini=False, decompress=True):
    """
    Build the CremonaDatabase with given name from scratch
    using the data_tgz tarball.

    ... note::

           For data up to level 170000, this function takes about 5
           minutes on a 2.9Ghz Nehalem Core i3. The resulting database
           occupies 216MB disk space.

    To create the large Cremona database from Cremona's data_tgz tarball,
    run the following command::

        sage: d = sage.databases.cremona.build('cremona','ecdata.tgz')   # not tested
    """
    t = name.replace(' ', '_')
    if os.path.exists("%s/cremona/%s.db" % (SAGE_DATA, t)):
        raise RuntimeError("Please (re)move %s/cremona/%s.db" %
                           (SAGE_DATA, t) + " before rebuilding database.")
    if not os.path.exists(data_tgz):
        raise IOError, "The data file is not at %s" % data_tgz
    t = walltime()

    if decompress:
        cmd = "tar zxvf %s" % data_tgz
        n = os.system(cmd)
        if n:
            raise RuntimeError, "Error extracting tarball."
    if mini:
        c = MiniCremonaDatabase(name, False, True)
    else:
        c = LargeCremonaDatabase(name, False, True)
    c._init_from_ftpdata('ecdata', largest_conductor)
    print "Total time: ", walltime(t)
コード例 #2
0
ファイル: cremona.py プロジェクト: dagss/sage
def build(name, data_tgz, largest_conductor=0, mini=False, decompress=True):
    """
    Build the CremonaDatabase with given name from scratch
    using the data_tgz tarball.

    ... note::

           For data up to level 170000, this function takes about 5
           minutes on a 2.9Ghz Nehalem Core i3. The resulting database
           occupies 216MB disk space.

    To create the large Cremona database from Cremona's data_tgz tarball,
    run the following command::

        sage: d = sage.databases.cremona.build('cremona','ecdata.tgz')   # not tested
    """
    t = name.replace(' ','_')
    if os.path.exists("%s/cremona/%s.db"%(SAGE_DATA, t)):
        raise RuntimeError("Please (re)move %s/cremona/%s.db"%(SAGE_DATA, t)
        + " before rebuilding database.")
    if not os.path.exists(data_tgz):
        raise IOError, "The data file is not at %s"%data_tgz
    t = walltime()

    if decompress:
        cmd = "tar zxvf %s"%data_tgz
        n = os.system(cmd)
        if n:
            raise RuntimeError, "Error extracting tarball."
    if mini:
        c = MiniCremonaDatabase(name,False,True)
    else:
        c = LargeCremonaDatabase(name,False,True)
    c._init_from_ftpdata('ecdata', largest_conductor)
    print "Total time: ", walltime(t)
コード例 #3
0
ファイル: tests.py プロジェクト: Babyll/sage
def manyvars(s, num=70000, inlen=1, step=2000):
    """
    Test that > 65,000 variable names works in each system.
    """
    print "Testing -- %s"%s
    t = '"%s"'%('9'*int(inlen))
    try:
        t = cputime()
        w = walltime()
        v = []
        for i in range(num):
            if i%step==0:
                sys.stdout.write('%s '%i)
                sys.stdout.flush()
            v.append(s(t))
        print '\nsuccess -- time = cpu: %s, wall: %s'%(cputime(t), walltime(w))
    except Exception:
        print "%s -- failed!"%s
コード例 #4
0
def manyvars(s, num=70000, inlen=1, step=2000):
    """
    Test that > 65,000 variable names works in each system.
    """
    print "Testing -- %s"%s
    t = '"%s"'%('9'*int(inlen))
    try:
        t = cputime()
        w = walltime()
        v = []
        for i in range(num):
            if i%step==0:
                sys.stdout.write('%s '%i)
                sys.stdout.flush()
            v.append(s(t))
        print '\nsuccess -- time = cpu: %s, wall: %s'%(cputime(t), walltime(w))
    except:
        print "%s -- failed!"%s
コード例 #5
0
ファイル: rains.py プロジェクト: fredrik-johansson/ffisom
def test_gens_cyclotomic(p, n):
    '''
    Test routine for `find_gens_cyclotomic`. Constructs two random
    extensions of F_p of degree n, then calls find_gens_cyclotomic and
    tests that the returned elements have the same minimal polynomial
    over F_p, and that the polynomial has degree n.
    '''
    c, w = cputime(), walltime()
    k1 = GF(p**n, 'z1', modulus='random')
    k2 = GF(p**n, 'z2', modulus='random')
    print "Field creation: CPU %s, Wall %s" % (cputime(c), walltime(w))

    c, w = cputime(), walltime()
    a, b = find_gens_cyclotomic(k1, k2)
    print "Rains' algorithm: CPU %s, Wall %s" % (cputime(c), walltime(w))

    P = a.minpoly()
    assert(P.degree() == n)
    assert(P(b) == 0)
コード例 #6
0
def test_gens(p, n, use_lucas=True, verbose=False):
    '''
    Test routine for `find_gens`. Constructs two random
    extensions of F_p of degree n, then calls find_gens and
    tests that the returned elements have the same minimal polynomial
    over F_p, and that the polynomial has degree n.
    '''
    c, w = cputime(), walltime()
    k1 = GF(p**n, 'z1', modulus='random')
    k2 = GF(p**n, 'z2', modulus='random')
    print "Field creation: CPU %s, Wall %s" % (cputime(c), walltime(w))

    c, w = cputime(), walltime()
    a, b = find_gens(k1, k2, use_lucas=use_lucas, verbose=verbose)
    print "Rains' algorithm: CPU %s, Wall %s" % (cputime(c), walltime(w))

    P = a.minpoly()
    assert (P.degree() == n)
    assert (P(b) == 0)
コード例 #7
0
    def __init__(self, input, starttime=None, failure=""):
        r"""
        See the class documentation for description of the inputs.

        EXAMPLES::

            sage: from sage.parallel.use_fork import WorkerData
            sage: W = WorkerData(42)
        """
        self.input = input
        self.starttime = starttime or walltime()
        self.failure = failure
コード例 #8
0
ファイル: use_fork.py プロジェクト: saraedum/sage-renamed
    def __init__(self, input, starttime=None, failure=""):
        r"""
        See the class documentation for description of the inputs.

        EXAMPLES::

            sage: from sage.parallel.use_fork import WorkerData
            sage: W = WorkerData(42)
        """
        self.input = input
        self.starttime = starttime or walltime()
        self.failure = failure
コード例 #9
0
    def start(self):
        """
        Start the timer.

        EXAMPLES::

            sage: from sage.doctest.util import Timer
            sage: Timer().start()
            {'cputime': ..., 'walltime': ...}
        """
        self.cputime = cputime()
        self.walltime = walltime()
        return self
コード例 #10
0
ファイル: util.py プロジェクト: pombredanne/sage-1
    def start(self):
        """
        Start the timer.

        EXAMPLES::

            sage: from sage.doctest.util import Timer
            sage: Timer().start()
            {'cputime': ..., 'walltime': ...}
        """
        self.cputime = cputime()
        self.walltime = walltime()
        return self
コード例 #11
0
ファイル: util.py プロジェクト: mcognetta/sage
    def stop(self):
        """
        Stops the timer, recording the time that has passed since it
        was started.

        EXAMPLES::

            sage: from sage.doctest.util import Timer
            sage: import time
            sage: timer = Timer().start()
            sage: time.sleep(0.5)
            sage: timer.stop()
            {'cputime': ..., 'walltime': ...}
        """
        self.cputime = cputime(self.cputime)
        self.walltime = walltime(self.walltime)
        return self
コード例 #12
0
    def stop(self):
        """
        Stops the timer, recording the time that has passed since it
        was started.

        EXAMPLES::

            sage: from sage.doctest.util import Timer
            sage: import time
            sage: timer = Timer().start()
            sage: time.sleep(0.5)
            sage: timer.stop()
            {'cputime': ..., 'walltime': ...}
        """
        self.cputime = cputime(self.cputime)
        self.walltime = walltime(self.walltime)
        return self
コード例 #13
0
ファイル: darmonpoints.py プロジェクト: mmasdeu/darmonpoints
def darmon_point(P, E, beta, prec, ramification_at_infinity = None, input_data = None, magma = None, working_prec = None, **kwargs):
    r'''

    EXAMPLES:

    We first need to import the module::

    sage: from darmonpoints.darmonpoints import darmon_point

    A first example (Stark--Heegner point)::

    sage: from darmonpoints.darmonpoints import darmon_point
    sage: darmon_point(7,EllipticCurve('35a1'),41,20, cohomological=False, use_magma=False, use_ps_dists = True)
    Starting computation of the Darmon point
    ...
    (-70*alpha + 449 : 2100*alpha - 13444 : 1)

    A quaternionic (Greenberg) point::

    sage: darmon_point(13,EllipticCurve('78a1'),5,20) # long time # optional - magma

    A Darmon point over a cubic (1,1) field::

    sage: F.<r> = NumberField(x^3 - x^2 - x + 2)
    sage: E = EllipticCurve([-r -1, -r, -r - 1,-r - 1, 0])
    sage: N = E.conductor()
    sage: P = F.ideal(r^2 - 2*r - 1)
    sage: beta = -3*r^2 + 9*r - 6
    sage: darmon_point(P,E,beta,20) # long time # optional - magma

    '''
    # global G, Coh, phiE, Phi, dK, J, J1, cycleGn, nn, Jlist

    config = ConfigParser.ConfigParser()
    config.read('config.ini')
    param_dict = config_section_map(config, 'General')
    param_dict.update(config_section_map(config, 'DarmonPoint'))
    param_dict.update(kwargs)
    param = Bunch(**param_dict)

    # Get general parameters
    outfile = param.get('outfile')
    use_ps_dists = param.get('use_ps_dists',False)
    use_shapiro = param.get('use_shapiro',True)
    use_sage_db = param.get('use_sage_db',False)
    magma_seed = param.get('magma_seed',1515316)
    parallelize = param.get('parallelize',False)
    Up_method = param.get('up_method','naive')
    use_magma = param.get('use_magma',True)
    progress_bar = param.get('progress_bar',True)
    sign_at_infinity = param.get('sign_at_infinity',ZZ(1))

    # Get darmon_point specific parameters
    idx_orientation = param.get('idx_orientation')
    idx_embedding = param.get('idx_embedding',0)
    algorithm = param.get('algorithm')
    quaternionic = param.get('quaternionic')
    cohomological = param.get('cohomological',True)

    if Up_method == "bigmatrix" and use_shapiro == True:
        import warnings
        warnings.warn('Use of "bigmatrix" for Up iteration is incompatible with Shapiro Lemma trick. Using "naive" method for Up.')
        Up_method = 'naive'

    if working_prec is None:
        working_prec = max([2 * prec + 10, 30])

    if use_magma:
        page_path = os.path.dirname(__file__) + '/KleinianGroups-1.0/klngpspec'
        if magma is None:
            from sage.interfaces.magma import Magma
            magma = Magma()
            quit_when_done = True
        else:
            quit_when_done = False
        magma.attach_spec(page_path)
    else:
        quit_when_done = False

    sys.setrecursionlimit(10**6)

    F = E.base_ring()
    beta = F(beta)
    DB,Np,Ncartan = get_heegner_params(P,E,beta)
    if quaternionic is None:
        quaternionic = ( DB != 1 )
    if cohomological is None:
        cohomological = quaternionic
    if quaternionic and not cohomological:
        raise ValueError("Need cohomological algorithm when dealing with quaternions")
    if use_ps_dists is None:
        use_ps_dists = False if cohomological else True
    try:
        p = ZZ(P)
    except TypeError:
        p = ZZ(P.norm())
    if not p.is_prime():
        raise ValueError,'P (= %s) should be a prime, of inertia degree 1'%P

    if F == QQ:
        dK = ZZ(beta)
        extra_conductor_sq = dK/fundamental_discriminant(dK)
        assert ZZ(extra_conductor_sq).is_square()
        extra_conductor = extra_conductor_sq.sqrt()
        dK = dK / extra_conductor_sq
        assert dK == fundamental_discriminant(dK)
        if dK % 4 == 0:
            dK = ZZ(dK/4)
        beta = dK
    else:
        dK = beta

    # Compute the completion of K at p
    x = QQ['x'].gen()
    K = F.extension(x*x - dK,names = 'alpha')
    if F == QQ:
        dK = K.discriminant()
    else:
        dK = K.relative_discriminant()

    hK = K.class_number()

    sgninfty = 'plus' if sign_at_infinity == 1 else 'minus'
    if hasattr(E,'cremona_label'):
        Ename = E.cremona_label()
    elif hasattr(E,'ainvs'):
        Ename = E.ainvs()
    else:
        Ename = 'unknown'
    fname = 'moments_%s_%s_%s_%s.sobj'%(P,Ename,sgninfty,prec)

    if use_sage_db:
        print("Moments will be stored in database as %s"%(fname))

    if outfile == 'log':
        outfile = '%s_%s_%s_%s_%s_%s.log'%(P,Ename,dK,sgninfty,prec,datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
        outfile = outfile.replace('/','div')
        outfile = '/tmp/darmonpoint_' + outfile

    fwrite("Starting computation of the Darmon point",outfile)
    fwrite('D_B = %s  %s'%(DB,factor(DB)),outfile)
    fwrite('Np = %s'%Np,outfile)
    if Ncartan is not None:
        fwrite('Ncartan = %s'%Ncartan,outfile)
    fwrite('dK = %s (class number = %s)'%(dK,hK),outfile)
    fwrite('Calculation with p = %s and prec = %s'%(P,prec),outfile)
    fwrite('Elliptic curve %s: %s'%(Ename,E),outfile)
    if outfile is not None:
        print("Partial results will be saved in %s"%outfile)

    if input_data is None:
        if cohomological:
            # Define the S-arithmetic group
            if F != QQ and ramification_at_infinity is None:
                if F.signature()[0] > 1:
                    if F.signature()[1] == 1:
                        ramification_at_infinity = F.real_places(prec = Infinity) # Totally 'definite'
                    else:
                        raise ValueError,'Please specify the ramification at infinity'
                elif F.signature()[0] == 1:
                    if len(F.ideal(DB).factor()) % 2 == 0:
                        ramification_at_infinity = [] # Split at infinity
                    else:
                        ramification_at_infinity = F.real_places(prec = Infinity) # Ramified at infinity
                else:
                    ramification_at_infinity = None
            if F == QQ:
                abtuple = QuaternionAlgebra(DB).invariants()
            else:
                abtuple = quaternion_algebra_invariants_from_ramification(F, DB, ramification_at_infinity)

            G = BigArithGroup(P,abtuple,Np,base = F,outfile = outfile,seed = magma_seed,use_sage_db = use_sage_db,magma = magma, use_shapiro = use_shapiro, nscartan=Ncartan)

            # Define the cycle ( in H_1(G,Div^0 Hp) )
            Coh = ArithCoh(G)
            while True:
                try:
                    cycleGn,nn,ell = construct_homology_cycle(G,beta,working_prec,lambda q: Coh.hecke_matrix(q).minpoly(), outfile = outfile, elliptic_curve = E)
                    break
                except PrecisionError:
                    working_prec *= 2
                    verbose('Encountered precision error, trying with higher precision (= %s)'%working_prec)
                except ValueError:
                    fwrite('ValueError occurred when constructing homology cycle. Returning the S-arithmetic group.', outfile)
                    if quit_when_done:
                        magma.quit()
                    return G
                except AssertionError as e:
                    fwrite('Assertion occurred when constructing homology cycle. Returning the S-arithmetic group.', outfile)
                    fwrite('%s'%str(e), outfile)
                    if quit_when_done:
                        magma.quit()
                    return G
            eisenstein_constant = -ZZ(E.reduction(ell).count_points())
            fwrite('r = %s, so a_r(E) - r - 1 = %s'%(ell,eisenstein_constant), outfile)
            fwrite('exponent = %s'%nn, outfile)
            phiE = Coh.get_cocycle_from_elliptic_curve(E, sign = sign_at_infinity)
            if hasattr(E,'ap'):
                sign_ap = E.ap(P)
            else:
                try:
                    sign_ap = ZZ(P.norm() + 1 - E.reduction(P).count_points())
                except ValueError:
                    sign_ap = ZZ(P.norm() + 1 - Curve(E).change_ring(P.residue_field()).count_points(1)[0])

            Phi = get_overconvergent_class_quaternionic(P,phiE,G,prec,sign_at_infinity,sign_ap,use_ps_dists = use_ps_dists,use_sage_db = use_sage_db,parallelize = parallelize,method = Up_method, progress_bar = progress_bar,Ename = Ename)
            # Integration with moments
            tot_time = walltime()
            J = integrate_H1(G,cycleGn,Phi,1,method = 'moments',prec = working_prec,parallelize = parallelize,twist = True,progress_bar = progress_bar)
            verbose('integration tot_time = %s'%walltime(tot_time))
            if use_sage_db:
                G.save_to_db()
        else: # not cohomological
            nn = 1
            eisenstein_constant = 1
            if algorithm is None:
                if Np == 1:
                    algorithm = 'darmon_pollack'
                else:
                    algorithm = 'guitart_masdeu'
            w = K.maximal_order().ring_generators()[0]
            r0,r1 = w.coordinates_in_terms_of_powers()(K.gen())
            QQp = Qp(p,working_prec)
            Cp = QQp.extension(w.minpoly().change_ring(QQp),names = 'g')
            v0 = K.hom([r0 + r1 * Cp.gen()])

            # Optimal embeddings of level one
            fwrite("Computing optimal embeddings of level one...", outfile)
            Wlist = find_optimal_embeddings(K,use_magma = use_magma, extra_conductor = extra_conductor)
            fwrite("Found %s such embeddings."%len(Wlist), outfile)
            if idx_embedding is not None:
                if idx_embedding >= len(Wlist):
                    fwrite('There are not enough embeddings. Taking the index modulo %s'%len(Wlist), outfile)
                    idx_embedding = idx_embedding % len(Wlist)
                fwrite('Taking only embedding number %s'%(idx_embedding), outfile)
                Wlist = [Wlist[idx_embedding]]

            # Find the orientations
            orients = K.maximal_order().ring_generators()[0].minpoly().roots(Zmod(Np),multiplicities = False)
            fwrite("Possible orientations: %s"%orients, outfile)
            if len(Wlist) == 1 or idx_orientation == -1:
                fwrite("Using all orientations, since hK = 1", outfile)
                chosen_orientation = None
            else:
                fwrite("Using orientation = %s"%orients[idx_orientation], outfile)
                chosen_orientation = orients[idx_orientation]

            emblist = []
            for i,W in enumerate(Wlist):
                tau, gtau,sign,limits = find_tau0_and_gtau(v0,Np,W,algorithm = algorithm,orientation = chosen_orientation,extra_conductor = extra_conductor)
                fwrite('n_evals = %s'%sum((num_evals(t1,t2) for t1,t2 in limits)), outfile)
                emblist.append((tau,gtau,sign,limits))

            # Get the cohomology class from E
            Phi = get_overconvergent_class_matrices(P,E,prec,sign_at_infinity,use_ps_dists = use_ps_dists,use_sage_db = use_sage_db,parallelize = parallelize,progress_bar = progress_bar)

            J = 1
            Jlist = []
            for i,emb in enumerate(emblist):
                fwrite("Computing %s-th period, attached to the embedding: %s"%(i,Wlist[i].list()), outfile)
                tau, gtau,sign,limits = emb
                n_evals = sum((num_evals(t1,t2) for t1,t2 in limits))
                fwrite("Computing one period...(total of %s evaluations)"%n_evals, outfile)
                newJ = prod((double_integral_zero_infty(Phi,t1,t2) for t1,t2 in limits))**ZZ(sign)
                Jlist.append(newJ)
                J *= newJ
    else: # input_data is not None
        Phi,J = input_data[1:3]
    fwrite('Integral done. Now trying to recognize the point', outfile)
    fwrite('J_psi = %s'%J,outfile)
    fwrite('g belongs to %s'%J.parent(),outfile)
    #Try to recognize a generator
    if quaternionic:
        local_embedding = G.base_ring_local_embedding(working_prec)
        twopowlist = [4, 3, 2, 1, QQ(1)/2, QQ(3)/2, QQ(1)/3, QQ(2)/3, QQ(1)/4, QQ(3)/4, QQ(5)/2, QQ(4)/3]
    else:
        local_embedding = Qp(p,working_prec)
        twopowlist = [4, 3, 2, 1, QQ(1)/2, QQ(3)/2, QQ(1)/3, QQ(2)/3, QQ(1)/4, QQ(3)/4, QQ(5)/2, QQ(4)/3]

    known_multiple = QQ(nn * eisenstein_constant) # It seems that we are not getting it with present algorithm.
    while known_multiple % p == 0:
        known_multiple = ZZ(known_multiple / p)

    candidate,twopow,J1 = recognize_J(E,J,K,local_embedding = local_embedding,known_multiple = known_multiple,twopowlist = twopowlist,prec = prec, outfile = outfile)

    if candidate is not None:
        HCF = K.hilbert_class_field(names = 'r1') if hK > 1 else K
        if hK == 1:
            try:
                verbose('candidate = %s'%candidate)
                Ptsmall = E.change_ring(HCF)(candidate)
                fwrite('twopow = %s'%twopow,outfile)
                fwrite('Computed point:  %s * %s * %s'%(twopow,known_multiple,Ptsmall),outfile)
                fwrite('(first factor is not understood, second factor is)',outfile)
                fwrite('(r satisfies %s = 0)'%(Ptsmall[0].parent().gen().minpoly()),outfile)
                fwrite('================================================',outfile)
                if quit_when_done:
                    magma.quit()
                return Ptsmall
            except (TypeError,ValueError):
                verbose("Could not recognize the point.")
        else:
            verbose('candidate = %s'%candidate)
            fwrite('twopow = %s'%twopow,outfile)
            fwrite('Computed point:  %s * %s * (x,y)'%(twopow,known_multiple),outfile)
            fwrite('(first factor is not understood, second factor is)',outfile)
            try:
                pols = [HCF(c).relative_minpoly() for c in candidate[:2]]
            except AttributeError:
                pols = [HCF(c).minpoly() for c in candidate[:2]]
            fwrite('Where x satisfies %s'%pols[0],outfile)
            fwrite('and y satisfies %s'%pols[1],outfile)
            fwrite('================================================',outfile)
            if quit_when_done:
                magma.quit()
            return candidate
    else:
        fwrite('================================================',outfile)
        if quit_when_done:
            magma.quit()
        return []
コード例 #14
0
def darmon_point(P,
                 E,
                 beta,
                 prec,
                 ramification_at_infinity=None,
                 input_data=None,
                 magma=None,
                 working_prec=None,
                 recognize_point=True,
                 **kwargs):
    r'''

    EXAMPLES:

    We first need to import the module::

    sage: from darmonpoints.darmonpoints import darmon_point

    A first example (Stark--Heegner point)::

    sage: from darmonpoints.darmonpoints import darmon_point
    sage: darmon_point(7,EllipticCurve('35a1'),41,20, cohomological=False, use_magma=False, use_ps_dists = True)
    Starting computation of the Darmon point
    ...
    (-70*alpha + 449 : 2100*alpha - 13444 : 1)

    A quaternionic (Greenberg) point::

    sage: darmon_point(13,EllipticCurve('78a1'),5,20) # long time # optional - magma

    A Darmon point over a cubic (1,1) field::

    sage: F.<r> = NumberField(x^3 - x^2 - x + 2)
    sage: E = EllipticCurve([-r -1, -r, -r - 1,-r - 1, 0])
    sage: N = E.conductor()
    sage: P = F.ideal(r^2 - 2*r - 1)
    sage: beta = -3*r^2 + 9*r - 6
    sage: darmon_point(P,E,beta,20) # long time # optional - magma

    '''
    # global G, Coh, phiE, Phi, dK, J, J1, cycleGn, nn, Jlist

    config = ConfigParser.ConfigParser()
    config.read('config.ini')
    param_dict = config_section_map(config, 'General')
    param_dict.update(config_section_map(config, 'DarmonPoint'))
    param_dict.update(kwargs)
    param = Bunch(**param_dict)

    # Get general parameters
    outfile = param.get('outfile')
    use_ps_dists = param.get('use_ps_dists', False)
    use_shapiro = param.get('use_shapiro', False)
    use_sage_db = param.get('use_sage_db', False)
    magma_seed = param.get('magma_seed', 1515316)
    parallelize = param.get('parallelize', False)
    Up_method = param.get('up_method', 'naive')
    use_magma = param.get('use_magma', True)
    progress_bar = param.get('progress_bar', True)
    sign_at_infinity = param.get('sign_at_infinity', ZZ(1))

    # Get darmon_point specific parameters
    idx_orientation = param.get('idx_orientation')
    idx_embedding = param.get('idx_embedding', 0)
    algorithm = param.get('algorithm')
    quaternionic = param.get('quaternionic')
    cohomological = param.get('cohomological', True)

    if Up_method == "bigmatrix" and use_shapiro == True:
        import warnings
        warnings.warn(
            'Use of "bigmatrix" for Up iteration is incompatible with Shapiro Lemma trick. Using "naive" method for Up.'
        )
        Up_method = 'naive'

    if working_prec is None:
        working_prec = max([2 * prec + 10, 30])

    if use_magma:
        page_path = os.path.dirname(__file__) + '/KleinianGroups-1.0/klngpspec'
        if magma is None:
            from sage.interfaces.magma import Magma
            magma = Magma()
            quit_when_done = True
        else:
            quit_when_done = False
        magma.attach_spec(page_path)
    else:
        quit_when_done = False

    sys.setrecursionlimit(10**6)

    F = E.base_ring()
    beta = F(beta)
    DB, Np, Ncartan = get_heegner_params(P, E, beta)
    if quaternionic is None:
        quaternionic = (DB != 1)
    if cohomological is None:
        cohomological = quaternionic
    if quaternionic and not cohomological:
        raise ValueError(
            "Need cohomological algorithm when dealing with quaternions")
    if use_ps_dists is None:
        use_ps_dists = False if cohomological else True
    try:
        p = ZZ(P)
    except TypeError:
        p = ZZ(P.norm())
    if not p.is_prime():
        raise ValueError('P (= %s) should be a prime, of inertia degree 1' % P)

    if F == QQ:
        dK = ZZ(beta)
        extra_conductor_sq = dK / fundamental_discriminant(dK)
        assert ZZ(extra_conductor_sq).is_square()
        extra_conductor = extra_conductor_sq.sqrt()
        dK = dK / extra_conductor_sq
        assert dK == fundamental_discriminant(dK)
        if dK % 4 == 0:
            dK = ZZ(dK / 4)
        beta = dK
    else:
        dK = beta

    # Compute the completion of K at p
    x = QQ['x'].gen()
    K = F.extension(x * x - dK, names='alpha')
    if F == QQ:
        dK = K.discriminant()
    else:
        dK = K.relative_discriminant()

    hK = K.class_number()

    sgninfty = 'plus' if sign_at_infinity == 1 else 'minus'
    if hasattr(E, 'cremona_label'):
        Ename = E.cremona_label()
    elif hasattr(E, 'ainvs'):
        Ename = E.ainvs()
    else:
        Ename = 'unknown'
    fname = 'moments_%s_%s_%s_%s.sobj' % (P, Ename, sgninfty, prec)

    if use_sage_db:
        print("Moments will be stored in database as %s" % (fname))

    if outfile == 'log':
        outfile = '%s_%s_%s_%s_%s_%s.log' % (
            P, Ename, dK, sgninfty, prec,
            datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
        outfile = outfile.replace('/', 'div')
        outfile = '/tmp/darmonpoint_' + outfile

    fwrite("Starting computation of the Darmon point", outfile)
    fwrite('D_B = %s  %s' % (DB, factor(DB)), outfile)
    fwrite('Np = %s' % Np, outfile)
    if Ncartan is not None:
        fwrite('Ncartan = %s' % Ncartan, outfile)
    fwrite('dK = %s (class number = %s)' % (dK, hK), outfile)
    fwrite('Calculation with p = %s and prec = %s' % (P, prec), outfile)
    fwrite('Elliptic curve %s: %s' % (Ename, E), outfile)
    if outfile is not None:
        print("Partial results will be saved in %s" % outfile)

    if input_data is None:
        if cohomological:
            # Define the S-arithmetic group
            if F != QQ and ramification_at_infinity is None:
                if F.signature()[0] > 1:
                    if F.signature()[1] == 1:
                        ramification_at_infinity = F.real_places(
                            prec=Infinity)  # Totally 'definite'
                    else:
                        raise ValueError(
                            'Please specify the ramification at infinity')
                elif F.signature()[0] == 1:
                    if len(F.ideal(DB).factor()) % 2 == 0:
                        ramification_at_infinity = []  # Split at infinity
                    else:
                        ramification_at_infinity = F.real_places(
                            prec=Infinity)  # Ramified at infinity
                else:
                    ramification_at_infinity = None
            if F == QQ:
                abtuple = QuaternionAlgebra(DB).invariants()
            else:
                abtuple = quaternion_algebra_invariants_from_ramification(
                    F, DB, ramification_at_infinity, magma=magma)

            G = BigArithGroup(P,
                              abtuple,
                              Np,
                              base=F,
                              outfile=outfile,
                              seed=magma_seed,
                              use_sage_db=use_sage_db,
                              magma=magma,
                              use_shapiro=use_shapiro,
                              nscartan=Ncartan)

            # Define the cycle ( in H_1(G,Div^0 Hp) )
            Coh = ArithCoh(G)
            while True:
                try:
                    cycleGn, nn, ell = construct_homology_cycle(
                        p,
                        G.Gn,
                        beta,
                        working_prec,
                        lambda q: Coh.hecke_matrix(q).minpoly(),
                        outfile=outfile,
                        elliptic_curve=E)
                    break
                except PrecisionError:
                    working_prec *= 2
                    verbose(
                        'Encountered precision error, trying with higher precision (= %s)'
                        % working_prec)
                except ValueError:
                    fwrite(
                        'ValueError occurred when constructing homology cycle. Returning the S-arithmetic group.',
                        outfile)
                    if quit_when_done:
                        magma.quit()
                    return G
                except AssertionError as e:
                    fwrite(
                        'Assertion occurred when constructing homology cycle. Returning the S-arithmetic group.',
                        outfile)
                    fwrite('%s' % str(e), outfile)
                    if quit_when_done:
                        magma.quit()
                    return G
            eisenstein_constant = -ZZ(E.reduction(ell).count_points())
            fwrite(
                'r = %s, so a_r(E) - r - 1 = %s' % (ell, eisenstein_constant),
                outfile)
            fwrite('exponent = %s' % nn, outfile)
            phiE = Coh.get_cocycle_from_elliptic_curve(E,
                                                       sign=sign_at_infinity)
            if hasattr(E, 'ap'):
                sign_ap = E.ap(P)
            else:
                try:
                    sign_ap = ZZ(P.norm() + 1 - E.reduction(P).count_points())
                except ValueError:
                    sign_ap = ZZ(P.norm() + 1 - Curve(E).change_ring(
                        P.residue_field()).count_points(1)[0])

            Phi = get_overconvergent_class_quaternionic(
                P,
                phiE,
                G,
                prec,
                sign_at_infinity,
                sign_ap,
                use_ps_dists=use_ps_dists,
                use_sage_db=use_sage_db,
                parallelize=parallelize,
                method=Up_method,
                progress_bar=progress_bar,
                Ename=Ename)
            # Integration with moments
            tot_time = walltime()
            J = integrate_H1(G,
                             cycleGn,
                             Phi,
                             1,
                             method='moments',
                             prec=working_prec,
                             parallelize=parallelize,
                             twist=True,
                             progress_bar=progress_bar)
            verbose('integration tot_time = %s' % walltime(tot_time))
            if use_sage_db:
                G.save_to_db()
        else:  # not cohomological
            nn = 1
            eisenstein_constant = 1
            if algorithm is None:
                if Np == 1:
                    algorithm = 'darmon_pollack'
                else:
                    algorithm = 'guitart_masdeu'
            w = K.maximal_order().ring_generators()[0]
            r0, r1 = w.coordinates_in_terms_of_powers()(K.gen())
            QQp = Qp(p, working_prec)
            Cp = QQp.extension(w.minpoly().change_ring(QQp), names='g')
            v0 = K.hom([r0 + r1 * Cp.gen()])

            # Optimal embeddings of level one
            fwrite("Computing optimal embeddings of level one...", outfile)
            Wlist = find_optimal_embeddings(K,
                                            use_magma=use_magma,
                                            extra_conductor=extra_conductor)
            fwrite("Found %s such embeddings." % len(Wlist), outfile)
            if idx_embedding is not None:
                if idx_embedding >= len(Wlist):
                    fwrite(
                        'There are not enough embeddings. Taking the index modulo %s'
                        % len(Wlist), outfile)
                    idx_embedding = idx_embedding % len(Wlist)
                fwrite('Taking only embedding number %s' % (idx_embedding),
                       outfile)
                Wlist = [Wlist[idx_embedding]]

            # Find the orientations
            orients = K.maximal_order().ring_generators()[0].minpoly().roots(
                Zmod(Np), multiplicities=False)
            fwrite("Possible orientations: %s" % orients, outfile)
            if len(Wlist) == 1 or idx_orientation == -1:
                fwrite("Using all orientations, since hK = 1", outfile)
                chosen_orientation = None
            else:
                fwrite("Using orientation = %s" % orients[idx_orientation],
                       outfile)
                chosen_orientation = orients[idx_orientation]

            emblist = []
            for i, W in enumerate(Wlist):
                tau, gtau, sign, limits = find_tau0_and_gtau(
                    v0,
                    Np,
                    W,
                    algorithm=algorithm,
                    orientation=chosen_orientation,
                    extra_conductor=extra_conductor)
                fwrite(
                    'n_evals = %s' % sum(
                        (num_evals(t1, t2) for t1, t2 in limits)), outfile)
                emblist.append((tau, gtau, sign, limits))

            # Get the cohomology class from E
            Phi = get_overconvergent_class_matrices(P,
                                                    E,
                                                    prec,
                                                    sign_at_infinity,
                                                    use_ps_dists=use_ps_dists,
                                                    use_sage_db=use_sage_db,
                                                    parallelize=parallelize,
                                                    progress_bar=progress_bar)

            J = 1
            Jlist = []
            for i, emb in enumerate(emblist):
                fwrite(
                    "Computing %s-th period, attached to the embedding: %s" %
                    (i, Wlist[i].list()), outfile)
                tau, gtau, sign, limits = emb
                n_evals = sum((num_evals(t1, t2) for t1, t2 in limits))
                fwrite(
                    "Computing one period...(total of %s evaluations)" %
                    n_evals, outfile)
                newJ = prod((double_integral_zero_infty(Phi, t1, t2)
                             for t1, t2 in limits))**ZZ(sign)
                Jlist.append(newJ)
                J *= newJ
    else:  # input_data is not None
        Phi, J = input_data[1:3]
    fwrite('Integral done. Now trying to recognize the point', outfile)
    fwrite('J_psi = %s' % J, outfile)
    fwrite('g belongs to %s' % J.parent(), outfile)
    #Try to recognize a generator
    if quaternionic:
        local_embedding = G.base_ring_local_embedding(working_prec)
        twopowlist = [
            4, 3, 2, 1,
            QQ(1) / 2,
            QQ(3) / 2,
            QQ(1) / 3,
            QQ(2) / 3,
            QQ(1) / 4,
            QQ(3) / 4,
            QQ(5) / 2,
            QQ(4) / 3
        ]
    else:
        local_embedding = Qp(p, working_prec)
        twopowlist = [
            4, 3, 2, 1,
            QQ(1) / 2,
            QQ(3) / 2,
            QQ(1) / 3,
            QQ(2) / 3,
            QQ(1) / 4,
            QQ(3) / 4,
            QQ(5) / 2,
            QQ(4) / 3
        ]

    known_multiple = QQ(
        nn * eisenstein_constant
    )  # It seems that we are not getting it with present algorithm.
    while known_multiple % p == 0:
        known_multiple = ZZ(known_multiple / p)

    if not recognize_point:
        fwrite('known_multiple = %s' % known_multiple, outfile)
        if quit_when_done:
            magma.quit()
        return J, Jlist

    candidate, twopow, J1 = recognize_J(E,
                                        J,
                                        K,
                                        local_embedding=local_embedding,
                                        known_multiple=known_multiple,
                                        twopowlist=twopowlist,
                                        prec=prec,
                                        outfile=outfile)

    if candidate is not None:
        HCF = K.hilbert_class_field(names='r1') if hK > 1 else K
        if hK == 1:
            try:
                verbose('candidate = %s' % candidate)
                Ptsmall = E.change_ring(HCF)(candidate)
                fwrite('twopow = %s' % twopow, outfile)
                fwrite(
                    'Computed point:  %s * %s * %s' %
                    (twopow, known_multiple, Ptsmall), outfile)
                fwrite('(first factor is not understood, second factor is)',
                       outfile)
                fwrite(
                    '(r satisfies %s = 0)' %
                    (Ptsmall[0].parent().gen().minpoly()), outfile)
                fwrite('================================================',
                       outfile)
                if quit_when_done:
                    magma.quit()
                return Ptsmall
            except (TypeError, ValueError):
                verbose("Could not recognize the point.")
        else:
            verbose('candidate = %s' % candidate)
            fwrite('twopow = %s' % twopow, outfile)
            fwrite(
                'Computed point:  %s * %s * (x,y)' % (twopow, known_multiple),
                outfile)
            fwrite('(first factor is not understood, second factor is)',
                   outfile)
            try:
                pols = [HCF(c).relative_minpoly() for c in candidate[:2]]
            except AttributeError:
                pols = [HCF(c).minpoly() for c in candidate[:2]]
            fwrite('Where x satisfies %s' % pols[0], outfile)
            fwrite('and y satisfies %s' % pols[1], outfile)
            fwrite('================================================', outfile)
            if quit_when_done:
                magma.quit()
            return candidate
    else:
        fwrite('================================================', outfile)
        if quit_when_done:
            magma.quit()
        return []
コード例 #15
0
    def __call__(self, f, inputs):
        """
        Parallel iterator using ``fork()``.

        INPUT:

        - ``f`` -- a function (or more general, any callable)

        - ``inputs`` -- a list of pairs ``(args, kwds)`` to be used as
          arguments to ``f``, where ``args`` is a tuple and ``kwds`` is
          a dictionary.

        OUTPUT:

        EXAMPLES::

            sage: F = sage.parallel.use_fork.p_iter_fork(2,3)
            sage: sorted(list( F( (lambda x: x^2), [([10],{}), ([20],{})])))
            [(([10], {}), 100), (([20], {}), 400)]
            sage: sorted(list( F( (lambda x, y: x^2+y), [([10],{'y':1}), ([20],{'y':2})])))
            [(([10], {'y': 1}), 101), (([20], {'y': 2}), 402)]

        TESTS:

        The output of functions decorated with :func:`parallel` is read
        as a pickle by the parent process. We intentionally break the
        unpickling and demonstrate that this failure is handled
        gracefully (the exception is put in the list instead of the
        answer)::

            sage: Polygen = parallel(polygen)
            sage: list(Polygen([QQ]))
            [(((Rational Field,), {}), x)]
            sage: from sage.misc.persist import unpickle_override, register_unpickle_override
            sage: register_unpickle_override('sage.rings.polynomial.polynomial_rational_flint', 'Polynomial_rational_flint', Integer)
            sage: L = list(Polygen([QQ]))
            sage: L
            [(((Rational Field,), {}),
              'INVALID DATA __init__() takes at most 2 positional arguments (4 given)')]

        Fix the unpickling::

            sage: del unpickle_override[('sage.rings.polynomial.polynomial_rational_flint', 'Polynomial_rational_flint')]
            sage: list(Polygen([QQ,QQ]))
            [(((Rational Field,), {}), x), (((Rational Field,), {}), x)]
        """
        n = self.ncpus
        v = list(inputs)
        import os
        import sys
        import signal
        from sage.misc.persist import loads
        from sage.misc.temporary_file import tmp_dir
        dir = tmp_dir()
        timeout = self.timeout

        workers = {}
        try:
            while v or workers:
                # Spawn up to n subprocesses
                while v and len(workers) < n:
                    v0 = v.pop(0)  # Input value for the next subprocess
                    with ContainChildren():
                        pid = os.fork()
                        # The way fork works is that pid returns the
                        # nonzero pid of the subprocess for the master
                        # process and returns 0 for the subprocess.
                        if not pid:
                            # This is the subprocess.
                            self._subprocess(f, dir, *v0)

                    workers[pid] = WorkerData(v0)

                if len(workers) > 0:
                    # Now wait for one subprocess to finish and report the result.
                    # However, wait at most the time since the oldest process started.
                    T = walltime()
                    if timeout:
                        oldest = min(W.starttime for W in workers.values())
                        alarm(max(timeout - (T - oldest), 0.1))

                    try:
                        pid = os.wait()[0]
                        cancel_alarm()
                        W = workers.pop(pid)
                    except AlarmInterrupt:
                        # Kill workers that are too old
                        for pid, W in workers.items():
                            if T - W.starttime > timeout:
                                if self.verbose:
                                    print(
                                        "Killing subprocess %s with input %s which took too long"
                                        % (pid, W.input))
                                os.kill(pid, signal.SIGKILL)
                                W.failure = " (timed out)"
                    except KeyError:
                        # Some other process exited, not our problem...
                        pass
                    else:
                        # collect data from process that successfully terminated
                        sobj = os.path.join(dir, '%s.sobj' % pid)
                        try:
                            with open(sobj, "rb") as file:
                                data = file.read()
                        except IOError:
                            answer = "NO DATA" + W.failure
                        else:
                            os.unlink(sobj)
                            try:
                                answer = loads(data, compress=False)
                            except Exception as E:
                                answer = "INVALID DATA {}".format(E)

                        out = os.path.join(dir, '%s.out' % pid)
                        try:
                            with open(out) as file:
                                sys.stdout.write(file.read())
                            os.unlink(out)
                        except IOError:
                            pass

                        yield (W.input, answer)
        finally:
            # Send SIGKILL signal to workers that are left.
            if workers:
                if self.verbose:
                    print("Killing any remaining workers...")
                sys.stdout.flush()
                for pid in workers:
                    try:
                        os.kill(pid, signal.SIGKILL)
                    except OSError:
                        # If kill() failed, it is most likely because
                        # the process already exited.
                        pass
                    else:
                        try:
                            os.waitpid(pid, 0)
                        except OSError as msg:
                            if self.verbose:
                                print(msg)

            # Clean up all temporary files.
            rmtree(dir)
コード例 #16
0
ファイル: use_fork.py プロジェクト: thalespaiva/sagelib
    def __call__(self, f, inputs):
        """
        Parallel iterator using ``fork()``.

        INPUT:

            - ``f`` -- a Python function that need not be pickleable or anything else!
            - ``inputs`` -- a list of pickleable pairs ``(args, kwds)``, where
              ``args`` is a tuple and ``kwds`` is a dictionary.

        OUTPUT:

        EXAMPLES::

            sage: F = sage.parallel.use_fork.p_iter_fork(2,3)
            sage: sorted(list( F( (lambda x: x^2), [([10],{}), ([20],{})])))
            [(([10], {}), 100), (([20], {}), 400)]
            sage: sorted(list( F( (lambda x, y: x^2+y), [([10],{'y':1}), ([20],{'y':2})])))
            [(([10], {'y': 1}), 101), (([20], {'y': 2}), 402)]
        """
        n = self.ncpus
        v = list(inputs)
        import os, sys, signal
        from sage.structure.sage_object import load
        from sage.misc.misc import tmp_dir, walltime
        dir = tmp_dir()
        timeout = self.timeout
        # Subprocesses shouldn't inherit unflushed buffers (cf. #11778):
        sys.stdout.flush()
        sys.stderr.flush()

        workers = {}
        try:
            while len(v) > 0 or len(workers) > 0:
                # Spawn up to n subprocesses
                while len(v) > 0 and len(workers) < n:
                    pid = os.fork()
                    # The way fork works is that pid returns the
                    # nonzero pid of the subprocess for the master
                    # process and returns 0 for the subprocess.
                    if pid:
                        # This is the parent master process.
                        workers[pid] = [v[0], walltime(), '']
                        del v[0]
                    else:
                        # This is the subprocess.
                        self._subprocess(f, dir, v[0])

                if len(workers) > 0:
                    # Now wait for one subprocess to finish and report the result.
                    # However, wait at most the time since the oldest process started.
                    if timeout:

                        def mysig(a, b):
                            raise RuntimeError, "SIGALRM"

                        oldest = min([X[1] for X in workers.values()])
                        signal.signal(signal.SIGALRM, mysig)
                        signal.alarm(
                            max(int(timeout - (walltime() - oldest)), 1))
                    try:
                        pid = os.wait()[0]
                        signal.signal(signal.SIGALRM, signal.SIG_IGN)
                    except RuntimeError:
                        signal.signal(signal.SIGALRM, signal.SIG_IGN)
                        # Kill workers that are too old
                        for pid, X in workers.iteritems():
                            if walltime() - X[1] > timeout:
                                if self.verbose:
                                    print(
                                        "Killing subprocess %s with input %s which took too long"
                                        % (pid, X[0]))
                                    sys.stdout.flush()
                                os.kill(pid, 9)
                                X[-1] = ' (timed out)'
                    else:
                        # If the computation was interrupted the pid
                        # might not be in the workers list, in which
                        # case we skip this.
                        if pid in workers:
                            # collect data from process that successfully terminated
                            sobj = os.path.join(dir, '%s.sobj' % pid)
                            if not os.path.exists(sobj):
                                X = "NO DATA" + workers[pid][
                                    -1]  # the message field
                            else:
                                X = load(sobj, compress=False)
                                os.unlink(sobj)
                            out = os.path.join(dir, '%s.out' % pid)
                            if not os.path.exists(out):
                                output = "NO OUTPUT"
                            else:
                                output = open(out).read()
                                os.unlink(out)

                            if output.strip():
                                print output,
                                sys.stdout.flush()

                            yield (workers[pid][0], X)
                            del workers[pid]

        except Exception, msg:
            print msg
            sys.stdout.flush()
コード例 #17
0
ファイル: use_fork.py プロジェクト: bgxcpku/sagelib
    def __call__(self, f, inputs):
        """
        Parallel iterator using ``fork()``.

        INPUT:

            - ``f`` -- a Python function that need not be pickleable or anything else!
            - ``inputs`` -- a list of pickleable pairs ``(args, kwds)``, where
              ``args`` is a tuple and ``kwds`` is a dictionary.

        OUTPUT:

        EXAMPLES::

            sage: F = sage.parallel.use_fork.p_iter_fork(2,3)
            sage: sorted(list( F( (lambda x: x^2), [([10],{}), ([20],{})])))
            [(([10], {}), 100), (([20], {}), 400)]
            sage: sorted(list( F( (lambda x, y: x^2+y), [([10],{'y':1}), ([20],{'y':2})])))
            [(([10], {'y': 1}), 101), (([20], {'y': 2}), 402)]
        """
        n = self.ncpus
        v = list(inputs)
        import os, sys, signal
        from sage.structure.sage_object import load
        from sage.misc.misc import tmp_dir, walltime
        dir = tmp_dir()
        timeout = self.timeout
        # Subprocesses shouldn't inherit unflushed buffers (cf. #11778):
        sys.stdout.flush()
        sys.stderr.flush()

        workers = {}
        try:
            while len(v) > 0 or len(workers) > 0:
                # Spawn up to n subprocesses
                while len(v) > 0 and len(workers) < n:
                    pid = os.fork()
                    # The way fork works is that pid returns the
                    # nonzero pid of the subprocess for the master
                    # process and returns 0 for the subprocess.
                    if pid:
                        # This is the parent master process.
                        workers[pid] = [v[0], walltime(), '']
                        del v[0]
                    else:
                        # This is the subprocess.
                        self._subprocess(f, dir, v[0])

                if len(workers) > 0:
                    # Now wait for one subprocess to finish and report the result.
                    # However, wait at most the time since the oldest process started.
                    if timeout:
                        def mysig(a,b):
                            raise RuntimeError, "SIGALRM"
                        oldest = min([X[1] for X in workers.values()])
                        signal.signal(signal.SIGALRM, mysig)
                        signal.alarm(max(int(timeout - (walltime()-oldest)), 1))
                    try:
                        pid = os.wait()[0]
                        signal.signal(signal.SIGALRM, signal.SIG_IGN)
                    except RuntimeError:
                        signal.signal(signal.SIGALRM, signal.SIG_IGN)
                        # Kill workers that are too old
                        for pid, X in workers.iteritems():
                            if walltime() - X[1] > timeout:
                                if self.verbose:
                                    print(
                                        "Killing subprocess %s with input %s which took too long"
                                         % (pid, X[0]) )
                                    sys.stdout.flush()
                                os.kill(pid,9)
                                X[-1] = ' (timed out)'
                    else:
                        # If the computation was interrupted the pid
                        # might not be in the workers list, in which
                        # case we skip this.
                        if pid in workers:
                            # collect data from process that successfully terminated
                            sobj = os.path.join(dir, '%s.sobj'%pid)
                            if not os.path.exists(sobj):
                                X = "NO DATA" + workers[pid][-1]  # the message field
                            else:
                                X = load(sobj, compress=False)
                                os.unlink(sobj)
                            out = os.path.join(dir, '%s.out'%pid)
                            if not os.path.exists(out):
                                output = "NO OUTPUT" 
                            else:
                                output = open(out).read()
                                os.unlink(out)

                            if output.strip():
                                print output,
                                sys.stdout.flush()

                            yield (workers[pid][0], X)
                            del workers[pid]

        except Exception, msg:
            print msg
            sys.stdout.flush()
コード例 #18
0
ファイル: use_fork.py プロジェクト: saraedum/sage-renamed
    def __call__(self, f, inputs):
        """
        Parallel iterator using ``fork()``.

        INPUT:

        - ``f`` -- a function (or more general, any callable)

        - ``inputs`` -- a list of pairs ``(args, kwds)`` to be used as
          arguments to ``f``, where ``args`` is a tuple and ``kwds`` is
          a dictionary.

        OUTPUT:

        EXAMPLES::

            sage: F = sage.parallel.use_fork.p_iter_fork(2,3)
            sage: sorted(list( F( (lambda x: x^2), [([10],{}), ([20],{})])))
            [(([10], {}), 100), (([20], {}), 400)]
            sage: sorted(list( F( (lambda x, y: x^2+y), [([10],{'y':1}), ([20],{'y':2})])))
            [(([10], {'y': 1}), 101), (([20], {'y': 2}), 402)]

        TESTS:

        The output of functions decorated with :func:`parallel` is read
        as a pickle by the parent process. We intentionally break the
        unpickling and demonstrate that this failure is handled
        gracefully (the exception is put in the list instead of the
        answer)::

            sage: Polygen = parallel(polygen)
            sage: list(Polygen([QQ]))
            [(((Rational Field,), {}), x)]
            sage: from sage.structure.sage_object import unpickle_override, register_unpickle_override
            sage: register_unpickle_override('sage.rings.polynomial.polynomial_rational_flint', 'Polynomial_rational_flint', Integer)
            sage: L = list(Polygen([QQ]))
            sage: L
            [(((Rational Field,), {}),
              'INVALID DATA __init__() takes at most 2 positional arguments (4 given)')]

        Fix the unpickling::

            sage: del unpickle_override[('sage.rings.polynomial.polynomial_rational_flint', 'Polynomial_rational_flint')]
            sage: list(Polygen([QQ,QQ]))
            [(((Rational Field,), {}), x), (((Rational Field,), {}), x)]
        """
        n = self.ncpus
        v = list(inputs)
        import os, sys, signal
        from sage.structure.sage_object import loads
        from sage.misc.temporary_file import tmp_dir
        dir = tmp_dir()
        timeout = self.timeout

        workers = {}
        try:
            while len(v) > 0 or len(workers) > 0:
                # Spawn up to n subprocesses
                while len(v) > 0 and len(workers) < n:
                    v0 = v.pop(0)  # Input value for the next subprocess
                    with ContainChildren():
                        pid = os.fork()
                        # The way fork works is that pid returns the
                        # nonzero pid of the subprocess for the master
                        # process and returns 0 for the subprocess.
                        if not pid:
                            # This is the subprocess.
                            self._subprocess(f, dir, *v0)

                    workers[pid] = WorkerData(v0)

                if len(workers) > 0:
                    # Now wait for one subprocess to finish and report the result.
                    # However, wait at most the time since the oldest process started.
                    T = walltime()
                    if timeout:
                        oldest = min(W.starttime for W in workers.values())
                        alarm(max(timeout - (T - oldest), 0.1))

                    try:
                        pid = os.wait()[0]
                        cancel_alarm()
                        W = workers.pop(pid)
                    except AlarmInterrupt:
                        # Kill workers that are too old
                        for pid, W in workers.items():
                            if T - W.starttime > timeout:
                                if self.verbose:
                                    print(
                                        "Killing subprocess %s with input %s which took too long"
                                         % (pid, W.input) )
                                os.kill(pid, signal.SIGKILL)
                                W.failure = " (timed out)"
                    except KeyError:
                        # Some other process exited, not our problem...
                        pass
                    else:
                        # collect data from process that successfully terminated
                        sobj = os.path.join(dir, '%s.sobj'%pid)
                        try:
                            with open(sobj) as file:
                                data = file.read()
                        except IOError:
                            answer = "NO DATA" + W.failure
                        else:
                            os.unlink(sobj)
                            try:
                                answer = loads(data, compress=False)
                            except Exception as E:
                                answer = "INVALID DATA {}".format(E)

                        out = os.path.join(dir, '%s.out'%pid)
                        try:
                            with open(out) as file:
                                sys.stdout.write(file.read())
                            os.unlink(out)
                        except IOError:
                            pass

                        yield (W.input, answer)
        finally:
            # Send SIGKILL signal to workers that are left.
            if workers:
                if self.verbose:
                    print("Killing any remaining workers...")
                sys.stdout.flush()
                for pid in workers:
                    try:
                        os.kill(pid, signal.SIGKILL)
                    except OSError:
                        # If kill() failed, it is most likely because
                        # the process already exited.
                        pass
                    else:
                        try:
                            os.waitpid(pid, 0)
                        except OSError as msg:
                            if self.verbose:
                                print(msg)

            # Clean up all temporary files.
            rmtree(dir)