Beispiel #1
0
 def testmissingfile(self):
     try:
         e = FileNotFoundError
     except:
         e = IOError
     with self.assertRaises((IOError, e)):
         columnfile.columnfile("a_filename_that_does_not_exist.flt")
Beispiel #2
0
 def test_to_hdf(self):
     c = columnfile.columnfile("test.col")
     columnfile.colfile_to_hdf(c, "testcolfile.hdf")
     h = columnfile.columnfile("testcolfile.hdf")
     for t in c.titles:
         assert ((c.getcolumn(t) == h.getcolumn(t)).all())
         assert t in h.titles
Beispiel #3
0
def two2one(in1, in2, out):
    """
    Example:    merge.two2one('peaks_fa.flt','peaks_fb.flt','merged.flt')
                This merges files peaks_fa.flt and peaks_fb.flt 
                into merged.flt 
    
    Jette Oddershede, October 2008
    """

    f1 = ic.columnfile(in1)
    f2 = ic.columnfile(in2)
    print(in1, f1.nrows)
    print(in2, f2.nrows)
    assert f1.titles == f2.titles, 'Attempting to merge columnfiles with different column titles'

    f = open(in1, 'r')
    flt1 = f.read()
    f.close()
    f = open(in2, 'r')
    flt2lines = f.readlines()
    f.close()
    flt2 = ''
    for i in range(len(flt2lines)):
        if '#' not in flt2lines[i]:
            flt2 = flt2 + flt2lines[i]
    flt = open(out, 'w')
    flt.write(flt1 + '\n' + flt2)
    flt.close
    print(out, f1.nrows + f2.nrows)
Beispiel #4
0
 def testmissingfile(self):
     e = False
     try:
         columnfile.columnfile("a_filename_that_does_not_exist.flt")
     except:
         # got an exception
         e = True
     assert (e)
Beispiel #5
0
    def recon_slice(self, flt_file, grain_file, label):

        print("\n \nStarting reconstruction of slice: " + label)
        print(flt_file)
        flt = columnfile.columnfile(flt_file)

        #---------------------------------------------------------------------------
        # Fix the dty column of the flt such that it scanns across 0 anddty is in microns.
        flt.dty[:] = ((flt.dty - self.ymin_flt) * self.unit /
                      (10**(-6))) - self.ystep * (self.number_y_scans // 2)
        print(
            '\nFixed the dty column, existing dtys are now (in units of microns):'
        )
        print(np.unique(flt.dty))
        print('')
        #---------------------------------------------------------------------------

        grains = grain.read_grain_file(grain_file)

        grain_topology_masks = self.recon_topology(flt, grains)

        g, s = self.select_grains(grains, grain_topology_masks)


        recons = self.reconstructor.reconstruct( flt, g, self.number_y_scans,\
                                                 self.ymin, self.ystep, s )

        print("Recon shape: ", recons['E11'].shape)
        print("\n Finished reconstruction of slice: " + label)
        print("Saving results..")
        self.slices[label] = recons
        self.slice_masks[label] = sum(grain_topology_masks)
        self.save_slice_as_npy(recons, label)

        return recons
Beispiel #6
0
def main():
    colf = columnfile.columnfile(sys.argv[1])
    print("Read", colf.nrows, "peaks")
    p = parameters.read_par_file(sys.argv[2])
    # input a sample radius to use for computing ranges
    radius = int(sys.argv[3])
    omegastep = guess_omega_step(colf)
    print("Guess omega step as", omegastep)
    colf.updateGeometry(p)

    for i, t in enumerate(colf.titles):
        print("\n", t, end=" ")
        for j in (0, 1, 2, colf.nrows // 2, colf.nrows // 2 + 1,
                  colf.nrows // 2 + 2, colf.nrows - 3, colf.nrows - 2,
                  colf.nrows - 1):
            print(colf.bigarray[i, j], end=" ")
    print()

    modXL = np.sqrt(colf.xl * colf.xl + colf.yl * colf.yl + colf.zl * colf.zl)
    modXLmean = modXL.mean()
    dangle = np.degrees(np.arctan2(radius, modXLmean))
    print("Angle precision is about ", dangle, "for average distance",
          modXLmean, "and sample size ", radius)

    findpairs_2d(colf, dangle, omegastep)
Beispiel #7
0
 def test3(self):
     c = columnfile.columnfile("test.col")
     self.assertEqual(list(c.a), [1, 6, 4, 5])
     c.removerows("a", [1])
     c.removerows("a", [4])
     self.assertEqual(c.nrows, 2)
     self.assertEqual(list(c.b), [1, 6])
Beispiel #8
0
def merge_flts(p, flts, motor):
    ars = []
    titles = None
    for f in flts:
        try:
            c = columnfile(f)
        except:
            print(f, "Empty file")
            continue
        # num = int(f.split("_")[5])
        dty = get_dty(f, motor=motor)
        print(f, dty, c.nrows)
        try:
            c.addcolumn(ones(c.nrows) * dty, "dty")
        except:
            print(dty, c.nrows, type(dty), type(c.nrows))
            print(ones(c.nrows) * dty)
            raise
        c.updateGeometry(p)
        if titles is None:
            titles = [t for t in c.titles]
        for told, tnew in zip(titles, c.titles):
            assert told == tnew, "titles do not match %s %s %s" % (f, told,
                                                                   tnew)
        ars.append(c.bigarray)
    c.bigarray = concatenate(ars, axis=1)
    c.nrows = c.bigarray.shape[1]
    c.set_attributes()
    return c
Beispiel #9
0
 def read_peaks(self, peaksfile):
     """ Read in the peaks from a peaksearch """
     start = time.time()
     colf = columnfile.columnfile(peaksfile)
     logging.info("reading file %f/s"%(time.time()-start))
     # hmm - is this the right way around?
     self.rlgrid = 1.0*self.cell_size/self.npx
     self.px = colf.omega
     self.px = np.where(self.px > self.npx/2 ,
                        self.px - self.npx  ,
                        self.px)*self.rlgrid
     self.py = colf.sc
     self.py = np.where(self.py > self.npx/2 ,
                        self.py - self.npx  ,
                        self.py)*self.rlgrid
     self.pz = colf.fc
     self.pz = np.where(self.pz > self.npx/2 ,
                        self.pz - self.npx   ,
                        self.pz)*self.rlgrid
     self.UBIALL = np.array( [self.px, self.py, self.pz] ).T
     logging.info("Number of peaks found %d  %f/s, now fit some"%(
                  self.px.shape[0],time.time()-start))
     for i in range(colf.nrows):
         print(".",end="")
         self.UBIALL[i] = refine_vector( self.UBIALL[i], self.gv )
         
     logging.info("Fitting vectors %f /s"%(time.time()-start))
     self.colfile = colf
Beispiel #10
0
 def testaddcol1(self):
     c = columnfile.columnfile("test.col")
     c.addcolumn([5, 4, 1, 2, 0], 'alice')
     self.assertEqual(c.titles[-1], 'alice')
     self.assertEqual(list(c.alice), [5, 4, 1, 2, 0])
     self.assertEqual(c.ncols, 5)
     self.assertEqual(c.nrows, 5)
Beispiel #11
0
def setup():
    try:
        c = columnfile( sys.argv[1] )
        g = read_grain_file( sys.argv[2] )
        parfile = sys.argv[3]
        cmds = []
    except:
        print( "Usage: %s colfile.flt.new grains.map parameters.par  --omega_slop=1 etc"%(sys.argv[0]))
        sys.exit()
    
    if platform.system() != "Windows":
        fmt = "%s %s"
    else:
        fmt = '%s "%s"'

    cmd0 = fmt%( sys.executable,
                     os.path.join( os.path.split(__file__)[0],
                                   "fitgrain.py" ) )

    for i in range(len(g)):
        #g[i].translation[2] = 0.0
        write_grain_file("%d.ubi"%(i),[g[i]])
        d = c.copy()
        d.filter( d.labels == i )
        d.writefile("%d.flt"%(i))
        cmd = cmd0 + " -p %s -u %d.ubi -U %d.ubi -P %d.par -f %d.flt -x t_z"%(
            parfile,i,i,i,i)
        for extra_arg in sys.argv[4:]:
            cmd += " "+extra_arg
        cmds.append( cmd )
    return cmds
Beispiel #12
0
 def testaddcol2(self):
     c = columnfile.columnfile("test.col")
     c.addcolumn([5, 4, 1, 2, 9], 'a')
     self.assertEqual(c.titles.index("a"), 1)
     self.assertEqual(list(c.a), [5, 4, 1, 2, 9])
     self.assertEqual(c.ncols, 4)
     self.assertEqual(c.nrows, 5)
Beispiel #13
0
 def test_xyz_from_yaml(self):
     for p in parfiles:
         parfile = os.path.join(TEST, p)
         fltfile = os.path.join(TEST, "test.flt")
         ymlfile = os.path.join(
             os.path.split(general_geometry.__file__)[0], "data",
             "fable.yml")
         pars = parameters.read_par_file(parfile).parameters
         colf = columnfile.columnfile(fltfile)
         sc = colf.sc[:4]
         fc = colf.fc[:4]
         geometry = general_geometry.from_yml(
             pars,
             ymlfile,
             path=["Positioners", "Fable_detector"],
             noisy=False)
         # test old
         xyz1 = transform.compute_xyz_lab((sc, fc), **pars)
         # test new
         v = np.zeros((3, len(sc)))
         v[1] = fc
         v[2] = sc
         xyz2 = geometry((np.zeros(len(fc)), fc, sc))
         if not np.allclose(xyz1, xyz2):
             print("Geometry", geometry)
             print("Parfile:", p)
             for i in range(len(fc)):
                 print(xyz1[:, i])
                 print(xyz2[:, i])
                 print()
         assert np.allclose(xyz1, xyz2)
Beispiel #14
0
def main():
    import sys, time
    c = columnfile(sys.argv[1])
    p = read_par_file(sys.argv[2])
    u = unitcell_from_parameters(p)
    gl = read_grain_file(sys.argv[3])
    if gl[0].translation is None:
        gl[0].translation = np.array((0., 0., 0.))
    start = time.time()
    # Setup and assign hkls
    w = p.get("wavelength")
    peaks_Cxyz, beam_Cxyz = getCxyz(c, p)
    t = gl[0].translation.copy()
    ub = gl[0].ub.copy()
    ubi = gl[0].ubi.copy()
    gve = compute_Cgve(t, peaks_Cxyz, beam_Cxyz, w)
    hi = np.round(np.dot(ubi, gve))
    lastgof = 1e9
    ubn, tn = fit_ub_t(ub, t, hi, peaks_Cxyz, beam_Cxyz, w)
    print("Before\nt=", gl[0].translation)
    print("UB=", gl[0].ub)
    gl[0].set_ubi(np.linalg.inv(ubn))
    gl[0].translation = tn
    dt = time.time() - start
    print("time calculating", dt, "gps", 1 / dt)
    print("After\nt=", gl[0].translation)
    print("UB=", gl[0].ub)
    write_grain_file(sys.argv[4], gl)
Beispiel #15
0
def process_layer(args, layer):
    ## Checks for existance of directory for saving files
    pdir = '%s/%02d' % (args.output, layer)
    if os.path.exists(pdir):
        if args.force:
            shutil.rmtree(pdir)
        else:
            raise IOError('%s already exists' % pdir)
    ## Creats directory for saving files
    os.mkdir(pdir)

    ## Defines first image and last image number in layer
    first_im = args.nstart + args.nimages * layer  ###
    last_im = args.nimages + first_im - 1

    ## Creats median file using fable 'median.py'
    ndigits = None
    for i in range(7):
        fmtstr = '%s%0' + str(i) + 'd%s'
        first_im_name = fmtstr % (args.stem, args.nstart, args.image_format)
        if os.path.exists(first_im_name):
            ndigits = i
            break
    if ndigits is None:
        raise RuntimeError('format string problem, revisit')
    ## defines directory path for raw data/images
    ## first image, last image, step size between images
    if args.image_format == '.ge2':
        medianskip = int(args.nimages / 240)
        args.medianstep = 1
    else:
        medianskip = int(args.nimages / args.medianstep)
    command = ('median.py -i %s -f %i -l %i -s %i -o %s/median' %
               (first_im_name, first_im, medianskip, args.medianstep, pdir))
    if args.verbose:
        print(command)
    os.system(command)

    ## Runs peaksearch.py on images using specified thresholds
    command = ('peaksearch.py -n %s -F %s -f %i -l %i -o %s/peaks '
               '-d %s/median.edf -p Y --ndigits=%i --OmegaOverRide '
               '-S %f -T %f' %
               (args.stem, args.image_format, first_im, last_im, pdir, pdir,
                ndigits, args.step_ome, args.start_ome))

    ## Adds threshold values onto command
    for t in args.thresholds:
        command += ' -t %s' % t
    if args.verbose:
        print(command)
    os.system(command)

    ## removes any peak entries arising from spots containing less than N pixels
    for t in args.thresholds:
        num_lines = sum(1 for line in open('%s/peaks_t%i.flt' % (pdir, t)))
        if os.path.exists('%s/peaks_t%i.flt' % (pdir, t)) and num_lines > 1:
            d = ic.columnfile('%s/peaks_t%i.flt' % (pdir, t))
            d.removerows('Number_of_pixels', list(range(args.pixels)))
            d.writefile('%s/peaks_min%d_t%i.flt' % (pdir, args.pixels, t))
Beispiel #16
0
 def test3(self):
     c = columnfile.columnfile("test.col")
     assert list(c.a) == [1, 6, 4, 5, 7]
     assert list(c.b) == [2, 1, 2, 6, 9]
     c.removerows("a", [1])
     c.removerows("a", [4])
     assert c.nrows == 3
     assert list(c.b) == [1, 6, 9]
Beispiel #17
0
    def __init__(self, fname, parfile):
        self.colfile = columnfile(fname)
        self.pars = parameters.read_par_file(parfile)

        self.ds_tol = 0.005
        self.bins = np.arange(0, 0.5, 0.005)
        print("Setting up")
        self.compute_XLYLZL()
        self.computegv()
Beispiel #18
0
def spot3d_id(filename):
    """
    Example:    merge.spot3d_id('peaks.flt')
                This makes unique spot3d_id in file peaks.flt
    
    Jette Oddershede, October 2008
    """
    flt = ic.columnfile(filename)
    flt.setcolumn(n.arange(flt.nrows), 'spot3d_id')
    flt.writefile(filename)
Beispiel #19
0
 def testfilter(self):
     c = columnfile.columnfile("testgeom.flt")
     d = c.copy()
     d.filter(abs(d.sc - 22) > .1)
     self.assertTrue(d.nrows == 4)
     d.filter(abs(d.sc - 22) > .1)
     self.assertTrue(d.nrows == 4)
     self.assertTrue(c.nrows == 5)
     d = c.copy()
     self.assertTrue(d.nrows == 5)
Beispiel #20
0
 def testpeaksearch_singlethread(self):
     os.system(self.CMD + " -n tiftest " + \
               " -t 7 -F .tif -S 0.3 " + \
               " -T 11 -p Y --singleThread " +\
               " -f %d -l %d"%(self.FIRST, self.LAST))
     results = columnfile.columnfile( "peaks_t7.flt" )
     self.assertEqual( results.nrows, self.NPK)
     if os.path.exists( 'peaks_t7.flt' ):
         self.assertEqual( open( 'peaks_t7.flt').read().rstrip(),
                           open( 'peaks_t6.flt').read().rstrip() )
Beispiel #21
0
    def test_1(self):
        self.data1[20:41, 120:141] = 1.
        self.data2[20:41, 120:141] = 1.
        self.data3[20:41, 120:141] = 1.
        self.data4[20:41, 120:141] = 1.
        self.data5[20:41, 120:141] = 1.
        self.data3[30, 130] = 2.  # in the middle
        self.data1[55, 65] = 1.
        self.data2[55, 65] = 2.
        self.data3[55, 65] = 3.
        self.data4[55, 65] = 2.
        self.data5[55, 65] = 1.

        lio = labelimage.labelimage(self.dims, self.outfile)
        lio.peaksearch(self.data1, 0.1, 1.)
        lio.mergelast()
        lio.peaksearch(self.data2, 0.1, 2.)
        lio.mergelast()
        lio.peaksearch(self.data3, 0.1, 3.)
        lio.mergelast()
        lio.peaksearch(self.data4, 0.1, 4.)
        lio.mergelast()
        lio.peaksearch(self.data5, 0.1, 5.)
        lio.mergelast()
        lio.finalise()
        lio.outfile.close()

        co = columnfile.columnfile(self.outfile)

        self.assertEqual(co.nrows, 2)
        # first peaks 20->41, 120->141
        self.assertAlmostEqual(co.fc[0], 130., 6)
        self.assertAlmostEqual(co.sc[0], 30., 6)
        self.assertAlmostEqual(co.omega[0], 3.0, 6)
        self.assertAlmostEqual(co.Number_of_pixels[0], 21 * 21 * 5, 6)
        self.assertAlmostEqual(co.avg_intensity[0], 1, 1)
        self.assertAlmostEqual(co.IMax_int[0], 2, 6)
        self.assertAlmostEqual(co.IMax_f[0], 130., 6)
        self.assertAlmostEqual(co.IMax_s[0], 30.0, 6)
        self.assertAlmostEqual(co.IMax_o[0], 3.0, 6)

        self.assertAlmostEqual(co.fc[1], 65., 6)
        self.assertAlmostEqual(co.sc[1], 55., 6)
        self.assertAlmostEqual(co.omega[1], 3.0, 6)
        self.assertAlmostEqual(co.Number_of_pixels[1], 5, 6)
        self.assertAlmostEqual(co.avg_intensity[1], 1.8, 1)
        self.assertAlmostEqual(co.IMax_int[1], 3, 6)
        self.assertAlmostEqual(co.IMax_f[1], 65., 6)
        self.assertAlmostEqual(co.IMax_s[1], 55., 6)
        self.assertAlmostEqual(co.IMax_o[1], 3.0, 6)

        mask = co.Number_of_pixels > 10
        co.filter(mask)
        self.assertEqual(co.nrows, 1)
        co.writefile("l1.out")
Beispiel #22
0
 def loadfiltered(self, filename):
     """
     Read in 3D peaks from peaksearch
     """
     self.colfile = columnfile.columnfile(filename)
     if (self.colfile.titles[0:3] == ["sc", "fc", "omega"]):
         self.setxyomcols("sc", "fc", "omega")
     if (self.colfile.titles[0:3] == ["xc", "yc", "omega"]):
         self.setxyomcols("xc", "yc", "omega")
     if "spot3d_id" not in self.colfile.titles:
         self.colfile.addcolumn(range(self.colfile.nrows), "spot3d_id")
Beispiel #23
0
    def __init__(self, fname, parfile):
        self.colfile = columnfile( fname )
        self.pars = parameters.read_par_file( parfile )

        self.ds_tol = 0.005
        self.bins=np.arange(0,0.5,0.005)

        self.compute_XLYLZL()
        self.computegv()
        self.makecell()
        self.ringassign()
Beispiel #24
0
def runtest(start, step):
    """ check tif range """
    pksh = sys.executable + ' ' + os.path.join("..","..","scripts","peaksearch.py")
    os.system(pksh + 
          " -n test -F .tif -f 0 -l 9 -t 1.0 -D 0 " + 
          "-T %f -S %f > testpksearch.log"%(start,step))
    c = columnfile("peaks_t1.flt")
    print "start",start,"step", step,"finds:",
    print "Min_o",c.Min_o,"Max_o",c.Max_o,"omega",c.omega
    if len(c.Min_o) == 3:
        print "I think it might be OK"
    return len(c.Min_o)==3
Beispiel #25
0
 def get( self, fname ):
     if fname not in self.cache:
         # integer dty positions for indexing arrays
         colfile = columnfile( fname )
         iy = np.round( (colfile.dty - self.ymin)/self.ystep ).astype(int)
         colfile.addcolumn( iy, "iy")
         colfile.NY = self.NY
         # ensure tth, eta, gx, gy, gz are up-to-date
         if self.parfile is not None:
             colfile.parameters.loadparameters( self.parfile )
             colfile.updateGeometry()
         self.cache[fname] = colfile
     return self.cache[fname]
Beispiel #26
0
 def readallpeaks_flt(self, peaksfilename):
     """
     read in flt peaks found with peaksearch (ImageD11)
     """
     import os, fabio
     self.lines = open(peaksfilename, "r").readlines()
     try:
         from ImageD11 import columnfile
     except:
         return False
     cf = columnfile.columnfile(peaksfilename)
     self.images = cf
     return
def loadcolfile(cfile):
    """ fixme: move to ImageD11.colfile if not there already """
    if cfile.find("::")>-1:
        h,s = cfile.split("::")
        try:
            colfile = colfile_from_hdf( h, name = s )
        except:
            print "*",cfile,"*"
            print "Trying to open",h, os.path.exists(h)
            print s
            raise
    else:
        colfile = columnfile(cfile)
    return colfile
Beispiel #28
0
def test(start, step):
    import sys
    if sys.platform == "win32":
        pksh = "%s ..\\..\\scripts\\peaksearch.py " % (sys.executable)
    else:
        pksh = "peaksearch.py "

    os.system(pksh + "-n test -F .tif -f 0 -l 9 -t 1.0 -D 0 " +
              "-T %f -S %f >> testpksearch.log" % (start, step))
    c = columnfile("peaks_t1.flt")
    print "start", start, "step", step, "finds:",
    print "Min_o", c.Min_o, "Max_o", c.Max_o, "omega", c.omega
    if len(c.Min_o) == 3:
        print "I think it might be OK"
Beispiel #29
0
def main2():
    import sys
    c = columnfile(sys.argv[1])
    p = read_par_file(sys.argv[2])
    gl = read_grain_file(sys.argv[3])
    for i, g in enumerate(gl):
        mask = c.labels == i
        g.sc = np.compress(mask, c.sc)
        g.fc = np.compress(mask, c.fc)
        g.omega = np.compress(mask, c.omega)
        ubnew, tnew = fitagrain(g, p)
        g.set_ubi(np.linalg.inv(ubnew))
        g.translation[:] = tnew
        print(i, len(g.sc), tnew)
    write_grain_file(sys.argv[4], gl)
Beispiel #30
0
 def loadColfile(self, filename=None):
     """ Read in a new file """
     if filename is None or filename is False:
         filename = silx.gui.qt.QFileDialog(self.win,"Columnfile").getOpenFileName()[0]
     try:
         c = columnfile( filename )
         self.colfile = c
     except:
         print("problem opening file",filename)
         return
     print("loaded file",filename)
     self.fnamelabel.setText( "Columnfile: " + self.colfile.filename )
     self.setTitles()
     self.update()
     self.scatter_widget.resetZoom()
     self.scatter_widget.activateWindow()
     self.win.activateWindow()
Beispiel #31
0
def main():
    flt    = columnfile.columnfile( sys.argv[1] )
    grains = grain.read_grain_file( sys.argv[2] )
    pars   = parameters.read_par_file( sys.argv[3] )
    newgrainfile  = sys.argv[4]

    hkltol = 0.05    #  for first peak assignments
    nmedian = 5      #  for removing peak fit outliers
    omegastep = 1.0  #  for omega images
    ymin = 13.5      #  dty start (so -15 -> +15 in 0.25 steps)
    ystep = 0.02     #  step in dty from scan
    rcut  = 0.2      #  cutoff for segmentation of reconstruction
    
    flt.filter( flt.dty >= ymin )
    flt.idty = np.round((flt.dty - ymin)/ystep).astype(np.int32) - 35
    flt.NY = 71 # flt.idty.max()+1
    OMSLOP = omegastep / 2.0
    
    
    tth, eta, gve = update_cols( flt, pars, OMSLOP )
    assign_peaks( grains, gve, flt, pars, nmedian,  hkltol )
#    pl.ioff()
    print("\n\n")
    out = open( newgrainfile, "w" )
    out.write("#  grain  ix  iy   npks   ubi00  ubi01  ubi02  ubi10  ubi11  ubi12  ubi20  ubi21  ubi22\n")
    for i,g in enumerate(grains):
        print("# Grain:",i)
        fit_one_grain( g, flt, pars )
        y0,x,y = map_out_cell( g, flt )
        sinoangles, sino, recon = map_grain( g, flt, ymin, ystep, omegastep )
        if 0:
            pl.subplot(211)
            pl.imshow( sino )
            pl.subplot(212)
            pl.imshow( recon )
            pl.show()
        active = recon > recon.max() * rcut
        ii, jj = np.mgrid[ 0:recon.shape[0], 0:recon.shape[0] ] - recon.shape[0]//2
        for ix, iy in zip(ii[active], jj[active]):
            gf = fit_one_point( g, flt, pars, ix, iy, ystep )
            r = ("%-4d  "*4)%(i,ix,iy,gf.mask.astype(int).sum())
            print(r)
            u = ("%.7f  "*9)%tuple(gf.ubi.ravel())
            out.write(r)
            out.write(u+"\n")
        g.translation = (x,y,0)