示例#1
0
def get_masks(labeled, minArea=None, maxArea=None, isSort=True, keyPrefix = None, labelLength=3):
    '''
    get mask dictionary from labeled maps (labeled by scipy.ndimage.label function)
    area range of each mask was defined by minArea and maxArea
    isSort: if True, sort masks by areas, big to small
    keyPrefix: the prefix of key
    labelLength: the number of characters of key
    '''

    maskNum = np.max(labeled.flatten())
    masks = {}
    for i in range(1,maskNum+1):
        currMask = np.zeros(labeled.shape,dtype=np.uint8)
        currMask[labeled==i]=1

        if minArea is not None and np.sum(currMask.flatten()) < minArea: pass
        elif maxArea is not None and np.sum(currMask.flatten()) > maxArea: pass
        else:
            if keyPrefix is not None: currKey = keyPrefix+'.'+ft.int2str(i,labelLength)
            else: currKey = ft.int2str(i,labelLength)
            masks.update({currKey:currMask})

    if isSort:
        masks = sort_masks(masks, keyPrefix = keyPrefix, labelLength=labelLength)

    return masks
def get_masks(labeled, minArea=None, maxArea=None, isSort=True, keyPrefix=None, labelLength=3):
    """
    get mask dictionary from labeled maps (labeled by scipy.ndimage.label function)
    area range of each mask was defined by minArea and maxArea
    isSort: if True, sort masks by areas, big to small
    keyPrefix: the prefix of key
    labelLength: the number of characters of key
    """

    maskNum = np.max(labeled.flatten())
    masks = {}
    for i in range(1, maskNum + 1):
        currMask = np.zeros(labeled.shape, dtype=np.uint8)
        currMask[labeled == i] = 1

        if minArea is not None and np.sum(currMask.flatten()) < minArea:
            pass
        elif maxArea is not None and np.sum(currMask.flatten()) > maxArea:
            pass
        else:
            if keyPrefix is not None:
                currKey = keyPrefix + '.' + ft.int2str(i, labelLength)
            else:
                currKey = ft.int2str(i, labelLength)
            masks.update({currKey: currMask})

    if isSort:
        masks = sort_masks(masks, keyPrefix=keyPrefix, labelLength=labelLength)

    return masks
示例#3
0
def satDXToTimeSeries(filename):
    file = open(filename)
    data = ft.read(file)
    (rows, cols) = data.shape
    dict = pyLTR.TimeSeries()
    mon = []
    mday = []
    dayFraction = []
    doy = []
    atime = []
    for i in range(rows):
        d = datetime.date(int(data[i, 0]), 1,
                          1) + datetime.timedelta(int(data[i, 1]) - 1)
        mon.append(d.month)
        mday.append(d.day)
        dayFraction = (data[i, 2] + data[i, 3] / 60.0 +
                       data[i, 4] / 3600.0) / 24.0
        doy.append(float(data[i, 1]) + dayFraction)
        #atime.append(datetime.datetime(d.year,d.month,d.day,data[i,2],data[i,3],data[i,4]))
    dict.append('time_doy', 'Day of Year', ' ', doy)
    dict.append('density', 'Den', r'$\mathrm{1/cm^3}$', data[:, 5])
    dict.append('vx', 'Vx', r'$\mathrm{km/s}$', data[:, 6])
    dict.append('vy', 'Vy', r'$\mathrm{km/s}$', data[:, 7])
    dict.append('vz', 'Vz', r'$\mathrm{km/s}$', data[:, 8])
    dict.append('p', 'P', r'$\mathrm{keV/cm^3}$', data[:, 9])
    dict.append('bx', 'Bx', r'$\mathrm{nT}$', data[:, 10])
    dict.append('by', 'By', r'$\mathrm{nT}$', data[:, 11])
    dict.append('bz', 'Bz', r'$\mathrm{nT}$', data[:, 12])
    dict.append('ex', 'Ex', r'$\mathrm{V/m}$', data[:, 13])
    dict.append('ey', 'Ey', r'$\mathrm{V/m}$', data[:, 14])
    dict.append('ez', 'Ez', r'$\mathrm{V/m}$', data[:, 15])

    return dict
示例#4
0
    def __init__(self, dossier_acquis, isdata):
        """
        Definition of the data acquisition parameters

        Parameters
        ----------
        dossier_acquis : str
            Path to the data.
        isdata : bool
            If isdata is True, data are processed, if isdata is False, white images are processed.

        Returns
        -------
        None.

        """
        self.dossier_acquis = dossier_acquis
        self.dossier_data = f"{dossier_acquis}data/"
        self.dossier_blanc = f"{dossier_acquis}blanc/"
        self.isdata = isdata
        self.fichier_config = f"{dossier_acquis}config/config_manip.txt"
        procfolder = self.createresultfolder(dossier_acquis, "Pretraitement",
                                             isdata)
        self.dossier_pretraitement = procfolder
        recfolder = self.createresultfolder(dossier_acquis, "Reconstruction",
                                            isdata)
        self.dossier_reconstruction = recfolder
        gerchfolder = self.createresultfolder(dossier_acquis, "Gerchberg",
                                              isdata)
        self.dossier_gerchberg = gerchfolder
        self.NA = float(ft.readvalue(self.fichier_config, 'NA'))
        self.NIMM = float(ft.readvalue(self.fichier_config, 'N0'))
        self.LAMBDA = float(ft.readvalue(self.fichier_config, 'LAMBDA'))
        self.F_TUBE = float(ft.readvalue(self.fichier_config,
                                         'F_TUBE'))  # Tube lens focal length
        self.F_OBJ = float(ft.readvalue(
            self.fichier_config, 'F_OBJ'))  # Microscope objective focal length
        self.PIX = float(ft.readvalue(self.fichier_config,
                                      'TPCAM'))  # Physical pixel pitch
        self.RAPFOC = float(ft.readvalue(
            self.fichier_config,
            'RF'))  # focal length ratio of the resampling lens dublet
        self.CHEMINMASQUE = f"{self.dossier_data}Image_mask.pgm"
        self.CENTREX = int(ft.readvalue(
            self.fichier_config, 'CIRCLE_CX'))  # Pupil center in Fourier space
        self.CENTREY = int(ft.readvalue(self.fichier_config, 'CIRCLE_CY'))
        self.NB_HOLOTOT = int(ft.readvalue(self.fichier_config, 'NB_HOLO'))
def get_template():
    pre = '/home/luke/Documents/computational_naval_architecture/projects/relational_hull_design'
    directory = pre+'/relational_lsplines/relational_lsplines'
    filename =   'TemplateTable.tex'
    testlines = FileTools.GetLines(directory,filename)
    for line in testlines:
        print line
    return testlines
示例#6
0
def read_training_file(train_file):
    global_features = {}
    local_features = []
    gold_vec = []

    with FileTools.openReadFile(train_file) as ifile:
        info = ifile.readline().rstrip()

        e = info.split(" ")
        n_global = int(e[0])
        n_local = int(e[1])

        # ignore next line
        line = ifile.readline()
        # print n_global,n_local
        if (read_features(global_features, 1, ifile) == 0):
            return [None, None]

        global_features = dict([(x, int(global_features[x][0]) - 1)
                                for x in global_features])

        line = ifile.readline()

        if (len(line) == 0):
            return None

        n_sent = 0

        while (len(line) > 0):
            # print str(n_sent)+": "+line
            if (line[0] != '#'):
                print "Expecting comment in line " + line
                return None

            e = line.rstrip().split(" ")
            gold = e[-1]

            n_sent = n_sent + 1

            if (n_sent % 10 == 0):
                print(str(n_sent))  #, end='\r')
                #                sys.stdout.flush()
                if (n_sent % 100 == 0):
                    break

            local_sent_features = {}
            if (read_features(local_sent_features, n_local, ifile) == 0):
                return [None, None]

            if (gold in global_features):
                gold_vec.append(gold)
                local_features.append(local_sent_features)

            line = ifile.readline()
        #print "\n"+str(n_sent)+" "+line

    return [n_local, n_global, global_features, local_features, gold_vec]
示例#7
0
def sort_masks(masks, keyPrefix='', labelLength=3):
    '''
    sort a dictionary of binary masks, big to small
    '''

    maskNum = len(masks.keys())
    order = []
    for key, mask in masks.iteritems():
        order.append([key,np.sum(mask.flatten())])

    order = sorted(order, key=lambda a:a[1], reverse=True)

    newMasks = {}
    for i in range(len(order)):
        if keyPrefix is not None: currKey = keyPrefix+'.'+ft.int2str(i,labelLength)
        else: currKey = ft.int2str(i,labelLength)
        newMasks.update({currKey:masks[order[i][0]]})
    return newMasks
def sort_masks(masks, keyPrefix='', labelLength=3):
    """
    sort a dictionary of binary masks, big to small
    """

    maskNum = len(masks.keys())
    order = []
    for key, mask in masks.iteritems():
        order.append([key, np.sum(mask.flatten())])

    order = sorted(order, key=lambda a: a[1], reverse=True)

    newMasks = {}
    for i in range(len(order)):
        if keyPrefix is not None:
            currKey = keyPrefix + '.' + ft.int2str(i, labelLength)
        else:
            currKey = ft.int2str(i, labelLength)
        newMasks.update({currKey: masks[order[i][0]]})
    return newMasks
示例#9
0
def main(args):
    if len(args) < 3:
        print("Usage:", args[0], "<if> <of>")
        return -1

    with FileTools.openReadFile(args[1]) as ifh, FileTools.openWriteFile(
            args[2]) as ofh:
        words = []
        tags = []
        v = None

        for l in ifh:
            e = l.rstrip().split("\t")

            if len(e) < 3:
                ofh.write(v + " " + " ".join(words) + " ||| " +
                          " ".join(tags) + "\n")
                words = []
                tags = []
            else:
                t = e[2]
                if t == 'B-V':
                    v = str(len(words))
                elif t != 'O':
                    if t.find("ARG") != -1:
                        t = t.replace("ARG", "A")
                    else:
                        t = t.replace("-", "-AM-")

                words.append(e[1])
                tags.append(t)

        if len(words):
            ofh.write(" ".join(words) + " ||| " + " ".join(tags) + "\n")

    return 0
示例#10
0
def main():
    argc = len(sys.argv)

    n_iter = 100
    reg = 0.1

    if (argc < 3):
        print("Usage: " + sys.argv[0] +
              " <training file> <out file> <n_iter = " + str(n_iter) +
              "> <reg = " + str(reg) + ">")
        return -1
    elif (argc > 3):
        n_iter = int(sys.argv[3])
        if (argc > 4):
            reg = float(sys.argv[4])

    ifile = sys.argv[1]
    ofile = sys.argv[2]

    [n_local, n_global, global_features, local_features,
     gold_vec] = read_training_file(ifile)
    if (local_features == None):
        return -2

    n_features = n_local + n_global
    print(
        str(n_local) + ", " + str(n_global) + ", " +
        str(len(global_features)) + ", " + str(len(local_features)) + ", " +
        str(len(local_features[0])))

    weights = [random.uniform(-1, 1) for i in range(n_features)]
    #weights = [1 for i in range(n_features)]
    print len(global_features.values())

    for i in range(n_iter):
        [loss,
         weights] = run_iteration(n_features, local_features, global_features,
                                  n_global, n_local, weights, gold_vec, reg)
        print str(i) + ": " + str(loss)

    with FileTools.openWriteFile(ofile) as ofh:
        for i in range(n_features):
            ofh.write(str(weights[i]) + "\n")

    return 0
示例#11
0
    def __init__(self, args, savepath, filei, centers):
        self.args   = args
        self.energy = self.args.e
        
        self.file     = filei[0]
        self.savepath = savepath

        self.ft  = FileTools.FileTools(args)

        rit = RunInfoTools.RunInfoTools(args, savepath, filei)
        self.ampbias                 = rit.ampbias
        self.xtal                    = rit.xtal
        self.x_center, self.y_center = centers[0], centers[1] 
        self.Aeff                    = rit.Aeff

        # These lines annoyingly needed to make fitResult work :(
        gSystem.Load("/afs/cern.ch/user/m/mplesser/H4Analysis/CfgManager/lib/libCFGMan.so")
        gSystem.Load("/afs/cern.ch/user/m/mplesser/H4Analysis/lib/libH4Analysis.so")
        gSystem.Load("/afs/cern.ch/user/m/mplesser/H4Analysis/DynamicTTree/lib/libDTT.so")
示例#12
0
    def read_training_file(self, train_file, n_training_samples=-1):
        pos_map = {}
        gold_vec = []

        with FileTools.openReadFile(train_file) as ifile:
            info = ifile.readline().rstrip()
            
            e = info.split(" ")
            n_pos_tags = int(e[0])
            n_features = int(e[1])
            features = []
            sums = []
        
            # ignore next line
            line = ifile.readline()
            s = {}
            # print n_pos_tags,n_features
            if (self.read_features(s, 1, ifile) == 0):
                return [None,None]
            
            pos_map = dict([(w,int(s[w][0])-1) for w in s])
        
            line = ifile.readline()
    
            if (len(line) == 0):
                return None
            
            n_sent = 0

            while (len(line) > 0):
                # print str(n_sent)+": "+line
                if (line[0] != '#'):
                    print "Expecting comment in line "+line
                    return None
        
                e = line.rstrip().split(" ")
                gold = e[-1]
            
                n_sent = n_sent + 1
            
            
                local_sent_features = {}
            
                if (self.read_features(local_sent_features, n_features, ifile) == 0):
                    return [None,None]
            
                if (gold in pos_map):
                    gold_vec.append(gold)
                
                    arr = self.gen_local_pos_features(local_sent_features, pos_map, n_pos_tags, gold)
                
                    
                    features.append(arr[0])
                    sums.append(arr[1])
                                
                line = ifile.readline()
                # break
                # if n_sent == 3:
                #     break
            
                if (n_training_samples > 0 and n_sent == n_training_samples):
                    break
                
                if (n_sent %10 == 0):
                    print str(n_sent)+"    \r",
                    sys.stdout.flush()
               
            #print "\n"+str(n_sent)+" "+line
    
        self.n_features = n_features
        self.n_pos_tags = n_pos_tags
        self.pos_map = pos_map
        self.gold_vec = gold_vec
        self.features = features
        self.sums = sums
    
        return
    DOSSIERACQUIS = "/home/nicolas/Acquisitions/Topi_pollen_600U/"
DATA = True  # True for data preprocessing, False for white image processing
M = manip.Manip(DOSSIERACQUIS, DATA)
if DATA is True:
    DOSSIERDATA = M.dossier_data
else:
    DOSSIERDATA = M.dossier_blanc

# Creating results Folders
PROCESSINGFOLDER = f"{DOSSIERDATA}Reconstruction"
if not os.path.exists(PROCESSINGFOLDER):
    os.makedirs(PROCESSINGFOLDER)

# Path to the parameter file, and parameters reading
CHEMINPARAM = f"{DOSSIERDATA}Pretraitement/Param.txt"
REWALD = float(ft.readvalue(CHEMINPARAM, 'REwald'))
NB_ANGLE = int(ft.readvalue(CHEMINPARAM, 'nb_angle'))
FMAXHOLO = int(ft.readvalue(CHEMINPARAM, 'fmaxHolo'))
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))
UBornPitch = 1 / (2 * FMAXHOLO * PIXTHEO)
NB_HOLO = NB_ANGLE

# Filter Radii
GreenRadius = 60
RedRadius = 80

# Rounding tomographic volume dimensions to the next power of 2
pow2 = ft.NextPow2(2 * DIMHOLO)
DIMTOMO = 2**pow2
    DOSSIERACQUIS = "/home/nicolas/Acquisitions/Topi_pollen_600S/"
DATA = True  # True for data preprocessing, False for white image processing
M = manip.Manip(DOSSIERACQUIS, DATA)
if DATA is True:
    DOSSIERDATA = M.dossier_data
else:
    DOSSIERDATA = M.dossier_blanc

# Creating results Folders
PROCESSINGFOLDER = f"{DOSSIERDATA}Reconstruction"
if not os.path.exists(PROCESSINGFOLDER):
    os.makedirs(PROCESSINGFOLDER)

# Path to the parameter file, and parameters reading
CHEMINPARAM = f"{DOSSIERDATA}Pretraitement/Param.txt"
REWALD = float(ft.readvalue(CHEMINPARAM, 'REwald'))
NB_ANGLE = int(ft.readvalue(CHEMINPARAM, 'nb_angle'))
FMAXHOLO = int(ft.readvalue(CHEMINPARAM, 'fmaxHolo'))
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))
UBornPitch = 1 / (2 * FMAXHOLO * PIXTHEO)
NB_HOLO = NB_ANGLE

# Paths to the real, and imaginary parts of the field
CHEMIN_RE_UBORN = f"{DOSSIERDATA}Pretraitement/ReBorn_{DIMHOLO}.tiff"
CHEMIN_IM_UBORN = f"{DOSSIERDATA}Pretraitement/ImBorn_{DIMHOLO}.tiff"

# Paths to the saved cuts
CHEMINSAV_XY = f"{PROCESSINGFOLDER}/XY_CUT.tiff"
CHEMINSAV_YZ = f"{PROCESSINGFOLDER}/YZ_CUT.tiff"
CHEMINSAVOTF_XY = f"{PROCESSINGFOLDER}/XY_CUT_OTF.tiff"
示例#15
0
    DOSSIERACQUIS = "/home/nicolas/Acquisitions/Topi_pollen_600U/"
DATA = True  # True for data preprocessing, False for white image processing
M = manip.Manip(DOSSIERACQUIS, DATA)
if DATA is True:
    DOSSIERDATA = M.dossier_data
else:
    DOSSIERDATA = M.dossier_blanc

# Creating results Folders
PROCESSINGFOLDER = f"{DOSSIERDATA}Reconstruction"
if not os.path.exists(PROCESSINGFOLDER):
    os.makedirs(PROCESSINGFOLDER)

# Path to the parameter file, and parameters reading
CHEMINPARAM = f"{DOSSIERDATA}Pretraitement/Param.txt"
REWALD = float(ft.readvalue(CHEMINPARAM, 'REwald'))
NB_ANGLE = int(ft.readvalue(CHEMINPARAM, 'nb_angle'))
FMAXHOLO = int(ft.readvalue(CHEMINPARAM, 'fmaxHolo'))
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))
UBornPitch = 1 / (2 * FMAXHOLO * PIXTHEO)
NB_HOLO = NB_ANGLE

# Paths to the real, and imaginary parts of the field
CHEMIN_RE_UBORN = f"{DOSSIERDATA}Pretraitement/ReBorn_{DIMHOLO}.tiff"
CHEMIN_IM_UBORN = f"{DOSSIERDATA}Pretraitement/ImBorn_{DIMHOLO}.tiff"

# Path to the specular coordinates
SpecCoordPath = f"{DOSSIERDATA}Pretraitement/Centres_{DIMHOLO}.txt"
fi = rp.Calc_fi(SpecCoordPath, NB_ANGLE, DIMHOLO)
示例#16
0
            maxRa = np.max(ra)
            minDec = np.min(dec)
            maxDec = np.max(dec)

            if (crval[0] < minRa) | (crval[0] > maxRa) | (
                    crval[1] < minDec) | (crval[1] > maxDec):
                print(
                    'WARNING: MAP CENTRE DOES NOT MATCH TELESCOPE POINTING CENTRE. CHECK COORDINATES'
                )
                print('MEAN RA: {:.2f}, MEAN DEC: {:.2f}'.format(
                    np.mean(ra), np.mean(dec)))

            wcs = Mapping.DefineWCS(naxis, cdelt, crval)
            maps, hits = Mapping.MakeMaps(tod, ra, dec, wcs)
            dataout['hits'] = hits
            dataout['maps'] = maps
            dataout['naxis'] = np.array(naxis)
            dataout['cdelt'] = np.array(cdelt)
            dataout['crval'] = np.array(crval)

        sbStr = ''.join(str(e) for e in sidebands)
        hoStr = ''.join(str(e) for e in pixels)
        FileTools.WriteH5Py(
            '{}/{}_{}_Horns{}_Sidebands{}.h5'.format(
                Parameters.get('Inputs', 'outputDir'),
                Parameters.get('Inputs', 'outputname'), prefix, hoStr, sbStr),
            dataout)

        dataout = {}  # clear data
        dfile.close()
示例#17
0
DistSrc = DistName + "_src"
DistDir = "../../DistTemp/"

#====================================================================
# script assumes to run in src/Tools

DistTools.EnsureDir(DistDir)
if (DistTools.EnsureDir(DistDir + DistSrc) == 1):
    raise "Dist path already there!!"

#====================================================================
# copy src
sys.stdout.write('Copy src Tree ...\n')
DistTools.EnsureDir(DistDir + DistSrc + '/src')
FileTools.cpallWithFilter('../../src', DistDir + DistSrc + '/src',
                          FileTools.SetUpFilter(DistTools.SrcFilter))

#====================================================================
# copy top level files

#FileTools.cpfile("../Doc/README.html",DistDir+DistBin+"/README.html")
#FileTools.cpfile("../Doc/INSTALL.html",DistDir+DistBin+"/INSTALL.html")
#FileTools.cpfile("../Doc/LICENSE.GPL.html",DistDir+DistBin+"/LICENSE.GPL.html")
#FileTools.cpfile("../Doc/LICENSE.LGPL.html",DistDir+DistBin+"/LICENSE.LGPL.html")
#DistTools.cpfile("../Tools/BuildTool.py",DistDir+DistBin+"/BuildTool.py")

#====================================================================
# zipping an archive
os.popen("7z a -tzip " + DistDir + DistSrc + ".zip " + DistDir + DistSrc +
         " -mx9")

# shell and operating system
import os,sys
#sys.path.append( "E:\\Develop\\Projekte\\FreeCADWin\\src\\Tools" )

import DistTools,FileTools

# line seperator 
ls = os.linesep
# path seperator
ps = os.pathsep
# dir seperator
ds = os.sep

DistName = DistTools.BuildDistName()

DistInst  = DistName + "_installer.msi"
DistDir  = "../../DistTemp/"

#====================================================================
# copy intaller file

FileTools.cpfile("../../Install/FreeCAD.msi",DistDir+DistInst)

示例#19
0
DistSrc  = DistName + "_src"
DistDir  = "../../DistTemp/"

#====================================================================
# script assumes to run in src/Tools

DistTools.EnsureDir(DistDir)
if (DistTools.EnsureDir(DistDir+DistSrc) == 1):
    raise "Dist path already there!!"

#====================================================================
# copy src 
sys.stdout.write( 'Copy src Tree ...\n')
DistTools.EnsureDir(DistDir+DistSrc+'/src')
FileTools.cpallWithFilter('../../src',DistDir+DistSrc+'/src',FileTools.SetUpFilter(DistTools.SrcFilter))

#====================================================================
# copy top level files

#FileTools.cpfile("../Doc/README.html",DistDir+DistBin+"/README.html")
#FileTools.cpfile("../Doc/INSTALL.html",DistDir+DistBin+"/INSTALL.html")
#FileTools.cpfile("../Doc/LICENSE.GPL.html",DistDir+DistBin+"/LICENSE.GPL.html")
#FileTools.cpfile("../Doc/LICENSE.LGPL.html",DistDir+DistBin+"/LICENSE.LGPL.html")
#DistTools.cpfile("../Tools/BuildTool.py",DistDir+DistBin+"/BuildTool.py")

#====================================================================
# zipping an archive
os.popen("7z a -tzip "+ DistDir+DistSrc+".zip "+ DistDir+DistSrc + " -mx9")

FileTools.rmall(DistDir+DistSrc)
示例#20
0
if not os.path.exists(PROCESSINGFOLDER):
    os.makedirs(PROCESSINGFOLDER)

# Choosing method
Method = {
    0: "BASE",
    1: "DARKFIELD",
    2: "PHASECONTRAST",
    3: "RHEINBERG",
    4: "DIC"
}
MethodUsed = Method[2]

# Path to the parameter file, and parameters reading
CHEMINPARAM = f"{DOSSIERDATA}Pretraitement/Param.txt"
REWALD = float(ft.readvalue(CHEMINPARAM, 'REwald'))
NB_ANGLE = int(ft.readvalue(CHEMINPARAM, 'nb_angle'))
FMAXHOLO = int(ft.readvalue(CHEMINPARAM, 'fmaxHolo'))
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))
UBornPitch = 1 / (2 * FMAXHOLO * PIXTHEO)
NB_HOLO = NB_ANGLE

# Path to the specular coordinates
SpecCoordPath = f"{DOSSIERDATA}Pretraitement/Centres_{DIMHOLO}.txt"
fi = rp.Calc_fi(SpecCoordPath, NB_ANGLE, DIMHOLO)

# Paths to the real, and imaginary parts of the field
CHEMIN_RE_UBORN = f"{DOSSIERDATA}Pretraitement/ReBorn_{DIMHOLO}.tiff"
CHEMIN_IM_UBORN = f"{DOSSIERDATA}Pretraitement/ImBorn_{DIMHOLO}.tiff"
示例#21
0
import settings
import sys
import DBTools

arg = sys.argv

if len(arg) < 2:
    print("Please use : ", arg[0], " [directory]")
    exit()
elif not os.path.isdir(arg[1]):
    print(arg[1], " is not a directory")
    exit()

startDir = arg[1]

result = FileTools.searchFile(startDir)

dbTools = DBTools.DBTools()

if settings.display != "none" and settings.display != "error":
    print(str(len(result)) + " music file find")

i = 0

for track in result:

    i += 1

    if settings.display != "none" and settings.display != "error":
        print("file ", end='')
        print(i, end='/')
示例#22
0
PlotDir = Config.get('Inputs','PlotDir')
DataFile = Config.get('Inputs','DataFile')
if 'none'.lower() in PlotDir.lower():
    PlotDir = None

todjd0 = TimeString2JD(Config.get('Observation','todstart')) + float(Config.get('TimeCorrections', 'additiveFactor') )

nside = Config.getint('Inputs', 'nside')
bl = Config.getint('Inputs', 'baseline')
blong = Config.getint('Inputs', 'blong')
npix = 12*nside**2

# Read in the TOD, integrate over all SB0
if os.path.isfile('{}/{}'.format(Config.get('Inputs', 'datadir'),Config.get('Inputs', 'compressedtod'))):
    tod = FileTools.ReadH5Py('{}/{}'.format(Config.get('Inputs', 'datadir'),Config.get('Inputs', 'compressedtod')))['auto_py']
    todjd = np.arange(tod.size)/Config.getfloat('Observation', 'todsr') / 3600./ 24. * (1. - float(Config.get('TimeCorrections', 'multiFactor')) ) + todjd0
else:
    todfile = h5py.File('{}/{}'.format(Config.get('Inputs', 'datadir'), Config.get('Inputs', 'todfile')))
    tod = np.mean(todfile['auto_py'][:,0,:],axis=1)
    todfile.close()
    FileTools.WriteH5Py('{}/{}'.format(Config.get('Inputs', 'datadir'),Config.get('Inputs', 'compressedtod')),
                        {'auto_py': tod})

# Check for nans in TOD and normalise
tod[np.isnan(tod)] = 0.
tod = (tod - np.mean(tod))/np.std(tod)



# Read in encoder data
示例#23
0

# shell and operating system
import os,sys
#sys.path.append( "E:\\Develop\\Projekte\\FreeCADWin\\src\\Tools" )

import DistTools,FileTools

# line seperator 
ls = os.linesep
# path seperator
ps = os.pathsep
# dir seperator
ds = os.sep

DistName = DistTools.BuildDistName()

DistInst  = DistName + "_installer.msi"
DistDir  = "../../DistTemp/"

#====================================================================
# copy intaller file

FileTools.cpfile("../../Install/FreeCAD.msi",DistDir+DistInst)

tagg.sub_aggs.append(tagg1)
qsearch = pyes.query.Search(q)
qsearch.agg.add(tagg)

rs = conn.search(query=qsearch, indices='example_index', type="example_type")
print json.dumps(rs.aggs, indent=2)

formatTranslator = FormatTranslator.FormatTranslator()
result = formatTranslator.ES_Aggs_2_Layer_to_Matrix_and_indice(
    rs.aggs, "user_id", "name")

print result['rowIndexList']
print result['colIndexList']
print result['matrix']

fileTools = FileTools.FileTools()
fileTools.List_to_CSV(result['colIndexList'], "col_index.csv")
fileTools.Matrix_to_CSV(result['matrix'], "matrix.csv")
'''------------------------------------------- Agg to csv -------------------------------------------'''

conn = pyes.es.ES('localhost:9200')
q = pyes.MatchAllQuery()
tagg = pyes.aggs.TermsAgg('name', field='name', sub_aggs=[])
qsearch = pyes.query.Search(q)
qsearch.agg.add(tagg)

rs = conn.search(query=qsearch, indices='example_index', type="example_type")
print json.dumps(rs.aggs, indent=2)

fileTools = FileTools.FileTools()
fileTools.ES_Aggs_1_Layer_to_CSV(rs.aggs, "agg.csv", agg_name="name")
示例#25
0
from scipy.fftpack import fftn, ifftn, fftshift
import Retropropagation as rp

NA_ill = 1.4
nimm = 1.518
nbead = 1.55
nbangle = 50
kappa = 0.01
Radius = 20
DOSSIERDATA = "/home/nicolas/Simulations/"
CHEMINPARAM = f"{DOSSIERDATA}Param.txt"

# Path to the parameter file, and parameters reading
DARKFIELD = False
PHASECONTRAST = False
REWALD = float(ft.readvalue(CHEMINPARAM, 'REwald'))
NB_ANGLE = int(ft.readvalue(CHEMINPARAM, 'nb_angle'))
FMAXHOLO = int(ft.readvalue(CHEMINPARAM, 'fmaxHolo'))
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))
UBornPitch = 1 / (2 * FMAXHOLO * PIXTHEO)
NB_HOLO = NB_ANGLE

# Paths to the real, and imaginary parts of the field
CHEMIN_RE_UBORN = f"{DOSSIERDATA}ReBorn_{DIMHOLO}.tiff"
CHEMIN_IM_UBORN = f"{DOSSIERDATA}ImBorn_{DIMHOLO}.tiff"

# Path to the specular coordinates
SpecCoordPath = f"{DOSSIERDATA}/Centres_{DIMHOLO}.txt"
fi = rp.Calc_fi(SpecCoordPath, NB_ANGLE, DIMHOLO)
示例#26
0
import Retropropagation as rp
import manip
import napari

# Path to the parameter file, and parameters reading
# Data folders and config files
if os.name == 'nt':  # Windows
    DOSSIERACQUIS = "C:/Users/p1600109/Documents/Recherche/Acquisitions/Topi_pollen_600S/"
else:  # Linux
    DOSSIERACQUIS = "/home/nicolas/Acquisitions/Topi_pollen_600S/"
DATA = True  # True for data preprocessing, False for white image processing
M = manip.Manip(DOSSIERACQUIS, DATA)
DOSSIERDATA = M.dossier_data
PROCESSINGFOLDER = f"{DOSSIERDATA}Reconstruction"
CHEMINPARAM = f"{DOSSIERDATA}Pretraitement/Param.txt"
DIMHOLO = int(ft.readvalue(CHEMINPARAM, 'dimHolo'))
PIXTHEO = float(ft.readvalue(CHEMINPARAM, 'pixTheo'))

# Creation of results folder
GERCHBERGFOLDER = M.dossier_gerchberg
if not os.path.exists(GERCHBERGFOLDER):
    os.makedirs(GERCHBERGFOLDER)

# Rounding tomographic volume dimensions to the next power of 2
pow2 = ft.NextPow2(2 * DIMHOLO)
DIMTOMO = 2**pow2

# Paths to the refraction, and absorption of the object
CHEMINABSORP = f"{PROCESSINGFOLDER}/Absorption_{DIMTOMO}x{DIMTOMO}x{DIMTOMO}.tiff"
CHEMINREFRAC = f"{PROCESSINGFOLDER}/Refraction_{DIMTOMO}x{DIMTOMO}x{DIMTOMO}.tiff"
CHEMINOTF = f"{PROCESSINGFOLDER}/OTF_{DIMTOMO}x{DIMTOMO}x{DIMTOMO}.tiff"
示例#27
0
#FileTools.cpfile("FreeCAD.css","../../doc/res/FreeCAD.css")

#====================================================================
sys.stdout.write('Running source documentation ...')
# running doxygen with the parameters from the config file
param = "doxygen fcbt" + ds + "BuildDocDoxy.cfg"
LogFile.write(param)
print param
text = os.popen(param).read()
LogFile.write(text)
if not os.path.isdir("../../doc/SourceDocumentation"):
    os.mkdir("../../doc/SourceDocumentation")

#====================================================================
sys.stdout.write(' done\n  Generate HTML ...')
FileTools.cpall("html", "../../doc/SourceDocumentation")
"""
#====================================================================
sys.stdout.write(' done\n  Generate DVI ...')
os.chdir("latex")
text = os.popen("latex refman.tex").read()
LogFile.write(text)
text = os.popen("makeindex refman.idx").read()
LogFile.write(text)
text = os.popen("latex refman.tex").read()
text = os.popen("latex refman.tex").read()
text = os.popen("latex refman.tex").read()
FileTools.cpfile("refman.dvi","../../../doc/FrameWork/FrameWork.dvi")

#====================================================================
sys.stdout.write (' done\n  Generate PS ...')
示例#28
0
            CPT += 1
        
    if "RHEINBERG" == MethodUsed:
        StackRGB[:,:,int((Z+1200)/80),0] = SumHoloR
        StackRGB[:,:,int((Z+1200)/80),1] = SumHoloG
        StackRGB[:,:,int((Z+1200)/80),2] = SumHoloB
        
    else :
        Stack[:,:,int((Z+1200)/80)] = SumHolo
    
    CPTSUM += 1
    print(f"Slice {CPTSUM} out of {Stack.shape[2]}")
print(f"Pre-Processing time for {CPTSUM} Slices: "
      f"{np.round(time.time() - start_time,decimals=2)} seconds")


if "RHEINBERG" == MethodUsed:
    ft.SAVtiffRGBCube(f"{PROCESSINGFOLDER}/{MethodUsed}_{2*dimHolo}x{2*dimHolo}x{StackRGB.shape[2]}.tiff",
                        StackRGB, 2*M.PIX*1e6)
else:
    ft.SAVtiffCube(f"{PROCESSINGFOLDER}/{MethodUsed}_{2*dimHolo}x{2*dimHolo}x{Stack.shape[2]}.tiff",
                    Stack, 2*M.PIX*1e6)

# Center recording and file closing
fidParams.write(f"nb_angle {CPT_EXIST-1}\n")
fidParams.write(f"fmaxHolo {fmaxHolo}\n")
fidParams.write(f"dimHolo {dimHolo}\n")
fidParams.write(f"pixTheo {M.PIX/Gtot}\n")
tf.imwrite(CHEMINSAV_CENTRES, np.float32(np.int32(Centres)))
fidCentrestxt.close()
fidParams.close()
示例#29
0
DistBin  = DistName + "_binary_WinX86"
DistDir  = "../../DistTemp/"

#====================================================================
# script assumes to run in src/Tools

DistTools.EnsureDir(DistDir)
if (DistTools.EnsureDir(DistDir+DistBin) == 1):
    raise "Dist path already there!!"

#====================================================================
# copy src 
sys.stdout.write( 'Copy src Tree ...\n')
DistTools.EnsureDir(DistDir+DistBin+'/src')
FileTools.cpallWithFilter('../../src',DistDir+DistBin+'/src',FileTools.SetUpFilter(DistTools.SrcFilter))

#====================================================================
# copy bin and lib 
sys.stdout.write( 'Copy bin and lib Tree ...\n')
DistTools.EnsureDir(DistDir+DistBin+'/bin')
FileTools.cpallWithFilter('../../bin',DistDir+DistBin+'/bin',FileTools.SetUpFilter(DistTools.BinFilter))
DistTools.EnsureDir(DistDir+DistBin+'/lib')
FileTools.cpallWithFilter('../../lib',DistDir+DistBin+'/lib',FileTools.SetUpFilter(DistTools.LibFilter))

#====================================================================
# copy Modules
sys.stdout.write( 'Copy modul Tree ...\n')
DistTools.EnsureDir(DistDir+DistBin+'/Mod')
FileTools.cpallWithFilter('../../src/Mod',DistDir+DistBin+'/Mod',FileTools.SetUpFilter(DistTools.ModFilter))
示例#30
0
popt,pcov = scipy.optimize.curve_fit(spt.generateEPSP,cirf_time,
                                     cirf_raw,p)
cirf_fit = spt.generateEPSP(cirf_time,popt[0],popt[1],popt[2],popt[3])
leg3, = plt.plot(cirf_time,cirf_fit,'--',label = 'Least-squares fit')
plt.legend(handles = [leg1, leg2, leg3], fontsize = 18)
plt.xlabel('Time (s)', fontsize = 20)
plt.ylabel('Normalized amplitude', fontsize = 20)
plt.title('CIRF for regression', fontsize = 22)
plt.box('off')
plt.tick_params(labelsize = 14)
print('Parameters \n' +  str(popt))

#%% Load ephys data
import FileTools as ft
pyDir = r'S:\Avinash\SPIM\Alx\8-9-2015_Alx RG x 939_4dpf\Ephys\Fish2'
pyFileName = ft.findAndSortFilesInDir(pyDir,ext = 'mat')[-1]

pyData_disk = h5py.File(os.path.join(pyDir,pyFileName))['data']
pyData = {}
pyData['stim'],pyData['swim'] = {},{}
pyData['stim']['amps'] = pyData_disk['stim']['amps'][:]
pyData['stim']['inds'] = pyData_disk['stim']['inds'][:].astype(int)
pyData['swim']['startInds'] = pyData_disk['swim']['startInds'][:].astype(int)
pyData['swim']['distFromLastStim'] = pyData_disk['swim']['distFromLastStim'][:]
pyData['time'] = pyData_disk['t'][:]
pyData['smooth'] = np.transpose(pyData_disk['smooth']['burst'][:])

pyData['samplingRate'] = np.int(1/(pyData['time'][1]-pyData['time'][0]))