Beispiel #1
0
    def cal_rows(self, css, scene, h, dh):
        width = scene.width
        height = scene.height
        ar = width / height
        camera = scene.camera
        xmin = -1
        xmax = 1
        ymax = xmax / ar
        ymin = -ymax
        dx = (xmax - xmin) / (width - 1)
        dy = (ymax - ymin) / (height - 1)
        cs = []  # 3 * width * (dh)
        pb = ProgressBar((dh) * width)

        for j in range(h, h + dh):
            y = ymin + dy * j
            for i in range(width):
                x = xmin + dx * i
                ray = Ray(camera, Vector(x, y, 0) - camera)
                c = self.ray_trace(ray, scene)
                cs.append(c.x)
                cs.append(c.y)
                cs.append(c.z)
                pb.update(1)

        a = h * width * 3
        b = (h + dh) * width * 3
        css[a:b] = cs
Beispiel #2
0
    def render(self, scene):
        """ Render image from stuff inside scene """
        width = scene.width
        height = scene.height
        ar = width / height

        camera = scene.camera
        im = Image(width, height)

        xmin = -1
        xmax = 1
        ymax = xmax / ar
        ymin = -ymax
        dx = (xmax - xmin) / (width - 1)
        dy = (ymax - ymin) / (height - 1)

        pb = ProgressBar(height * width)

        for j in range(height):
            y = ymin + dy * j
            for i in range(width):
                x = xmin + dx * i

                ray = Ray(camera, Vector(x, y, 0) - camera)
                c = self.ray_trace(ray, scene)
                im.set_pixel(j, i, c)
                pb.update(1)
        return im
    def learn(self, samples, epochs=25000, noise=0, test_samples=None, show_progress=True):
        ''' Learn given distribution using n data
                List of sample sets
                Number of epochs to be ran for each sample set
        '''

        # Check if samples is a list
        if type(samples) not in [tuple,list]:
            samples = (samples,)
            epochs = (epochs,)

        n = 0 # total number of epochs to be ran
        
        for j in range(len(samples)):
            n += epochs[j]
        
        self.entropy = []
        self.distortion = []

        if show_progress:
            bar = ProgressBar(widgets=[Percentage(), Bar()], maxval=n).start()
        index = 0

        for j in range(len(samples)):
            
            self.samples = samples[j]
            I = np.random.randint(0,self.samples.shape[0],n)
            
            for i in range(epochs[j]):
                # Set sigma and learning rate via time
                t = index/float(n)
                lrate = self.lrate_i*(self.lrate_f/self.lrate_i)**t
                sigma = self.sigma_i*(self.sigma_f/self.sigma_i)**t
                C = self.adj.copy()

                # Learn something
                S = self.samples[I[i]] + noise*(2*np.random.random(len(self.samples[I[i]]))-1)
                S = np.minimum(np.maximum(S,0),1)
                self.learn_data(S,lrate,sigma)

                #self.learn_data(self.samples[I[i]],lrate,sigma)
                if i%100 == 0:
                    self.entropy.append(((self.adj-C)**2).sum())

                    if test_samples is not None:
                        distortion = self.compute_distortion(test_samples)
                    else:
                        distortion = self.compute_distortion(self.samples)

                    self.distortion.append(distortion)

                if show_progress:
                    bar.update(index+1)

                index = index+1

        if show_progress: bar.finish()
Beispiel #4
0
    def learn(self, samples, epochs=25000, noise=0, test_samples=None, show_progress=True):
        ''' Learn given distribution using n data

        :Parameters:
            `samples` : [numpy array, ...]
                List of sample sets
            `epochs` : [int, ...]
                Number of epochs to be ran for each sample set
        '''

        # Check if samples is a list
        if type(samples) not in [tuple,list]:
            samples = (samples,)
            epochs = (epochs,)

        n = 0 # total number of epochs to be ran
        for j in range(len(samples)):
            n += epochs[j]
        self.entropy = []
        self.distortion = []

        if show_progress:
            bar = ProgressBar(widgets=[Percentage(), Bar()], maxval=n).start()
        index = 0
        for j in range(len(samples)):
            self.samples = samples[j]
            I = np.random.randint(0,self.samples.shape[0],n)
            for i in range(epochs[j]):
                # Set sigma and learning rate according to current time
                t = index/float(n)
                lrate = self.lrate_i*(self.lrate_f/self.lrate_i)**t
                sigma = self.sigma_i*(self.sigma_f/self.sigma_i)**t
                C = self.codebook.copy()
                # Learn data

                S = self.samples[I[i]] + noise*(2*np.random.random(len(self.samples[I[i]]))-1)
                S = np.minimum(np.maximum(S,0),1)
                self.learn_data(S,lrate,sigma)

                #self.learn_data(self.samples[I[i]],lrate,sigma)
                if i%100 == 0:
                    self.entropy.append(((self.codebook-C)**2).sum())
                    if test_samples is not None:
                        distortion = self.compute_distortion(test_samples)
                    else:
                        distortion = self.compute_distortion(self.samples)
                    self.distortion.append(distortion)
                if show_progress:
                    bar.update(index+1)
                index = index+1
        if show_progress:
            bar.finish()
Beispiel #5
0
    def _align(self, Ys, X, sel=None, record=True):

        pb = ProgressBar(final=len(Ys), label='align', tail_label='{i:}')
        if record:
            info = np.zeros((len(Ys), 11))

        # set defaults
        sel = sel or slice(None)

        X_sel = X[:, sel]
        t0 = self._calc_centroid(X_sel)
        X0 = X - t0  # centre reference on origin
        X0_sel = X0[:, sel]

        for i, Y_ in enumerate(Ys):
            pb.update()
            Y_sel = Y_[:, sel]
            t = self._calc_centroid(Y_sel)
            Y0_ = Y_ - t  # centre frame on origin
            Y0_sel = Y0_[:, sel]
            R = self._calc_optrot(X0_sel, Y0_sel)  # get rotation matrix
            Y0 = np.dot(R, Y0_.T)  # rotate
            Y = Y0 + t0  # centre frame on t0
            Ys[i, :, :] = Y

            if record:
                # ref/frame comparison before alignment
                info[i, 0] = self._calc_rmsd(X, Y_)  # all
                info[i, 2] = self._calc_rmsd(X0, Y0_)  # rotational
                info[i, 3] = self._calc_rmsd(X0_sel,
                                             Y0_sel)  # rotational selection

                # ref/frame comparison after alignment
                info[i, 4] = self._calc_rmsd(X, Y)  # all
                info[i, 5] = self._calc_rmsd(X0, Y0)  # rotational

                # aligned/unaligned frame comparisons
                info[i, 6] = self._calc_rmsd(Y_, Y)  # all
                info[i, 7] = self._calc_rmsd(Y0, Y0_)  # rotational

                # ref/frame and aligned/unaligned
                info[i, 8] = self._calc_rmsd(t, t0)  # centre difference

                # euler angles
                info[i, 8:11] = self._calc_euler(R)

        return (Ys, info) if record else Ys
Beispiel #6
0
    def __init__(self, pdb, dmin, dmax, expand=True):

        # inherit from Mixin
        self.enumerate_hkl = self._enumerate_hkl_numpy

        # enable access to self.F[h, k, l] with slicing support
        self.F = self._make_FhklArray()

        # coordinates
        if expand:
            pdb = pdb.get_unitcell()

        uc_xyzc = np.dot(pdb.S, [pdb.x, pdb.y, pdb.z])
        self.n = len(pdb)

        # reflections
        hkls, d = self._enumerate_hkl_numpy(dmin, dmax, S=pdb.S)
        stol2 = 1.0 / (4.0 * d * d)

        # Cromer-Mann scattering factors
        assert os.path.isfile('cm.pkl')
        with open('cm.pkl', 'rb') as _f:
            _A, _B, _C = pickle.load(_f)
        f0 = {
            e:
            _C[e] + sum(_A[e][i] * np.exp(-_B[e][i] * stol2) for i in range(4))
            for e in set(pdb.e)
        }

        # direct summation for each summand
        self.Fatoms = np.zeros((len(pdb), len(hkls)), dtype=complex)
        p = ProgressBar(final=len(pdb),
                        label='directsum',
                        tail_label='{f:>10}')

        for i, (xyzc, e, n, b) in enumerate(zip(uc_xyzc.T, pdb.e, pdb.n,
                                                pdb.B)):
            p.update()
            hx = np.sum(hkls * xyzc, axis=1)
            self.Fatoms[i, :] = n * f0[e] * np.exp(-b * stol2) * np.exp(
                np.pi * 2j * hx)

        # save drange
        self._hkls = hkls
        self._d = d
        self.selection()
Beispiel #7
0
    def learn(self, fairness_types, epochs=100, display=True):
        optimizer = torch.optim.Adam(self.pq_net.parameters(),
                                     lr=1e-1,
                                     weight_decay=self.lam)

        losses = []

        if display:
            progress = ProgressBar(epochs, 1)

        for epoch in range(epochs):
            epoch_loss = 0
            loss = self.loss(fairness_types)
            epoch_loss += loss.data.numpy()[0]
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            if display:
                progress.update(
                    epoch,
                    "Iteration {}: Objective = {}".format(epoch, epoch_loss))

            losses.append(epoch_loss)
            file.write('''set xrange [-.5:1.5]\n''')
            file.write('''set yrange [-.5:1.5]\n''')
            file.write('''set zrange [-.5:1.5]\n''')
            file.write('''set style data line\n''')
            file.write('''set ticslevel 0\n''')
            file.write('''set size ratio 1\n''')
            file.write('''set title "Dynamic Self-Organising Map\\nNicolas Rougier & Yann Boniface"\n''')
            file.write('''set label "Self-reorganisation from sphere to spheres surface" at screen .5, screen .1 center\n''')
            file.write('''set label "(http://www.loria.fr/~rougier/)" at screen .5, screen .065 center textcolor lt 3\n''')
            file.write('''set view %d,%d\n''' % (rot_x,rot_z))
            file.write('''set terminal pngcairo size 512,512\n''')
            file.write('''set output '%s.png'\n''' % filename)
#            file.write('''splot '%s' using 1:2:3, '%s' with point pt 6 lw .1\n''' % (datafile,datafile))
            file.write('''splot '%s' using 1:2:3\n''' % (datafile))
            file.close()
            file = open(datafile, 'w')
            C = net.codebook
            for x in range(C.shape[0]):
                for y in range(C.shape[1]):
                    file.write('%.3f %.3f %.3f\n' % (C[x,y,0],C[x,y,1],C[x,y,2]))
                file.write('''\n''')
            file.close()
            subprocess.call(['/usr/bin/gnuplot', plotfile])
        net.learn_data(samples[I[i]])
        bar.update(i)
    bar.finish()
#    os.system('''mencoder 'mf:///tmp/sphere*.png' -mf type=png:fps=25  -Ovc lavc -lavcopts \
#                 vcodec=mpeg4:vbitrate=2500  -oac copy -o sphere.avi''')


Beispiel #9
0
def main(plistpath, dest, options=None):
    if options:
        flat = options.flat
        delete = options.delete
        pretend = options.pretend

    plist = open(plistpath)
    srcset = set()
    destset = set()

    p.message("Parsing playlist.", 1)

    playlistpaths = []

    for line in plist:
        if line[0] == '#':
            continue

        if line[-1] == '\n':
            path = line[:-1] # strip the new line
        else:
            path = line

        playlistpaths.append(path)

    if len(playlistpaths) == 0:
        p.message("Playlist is empty!", 1)
        sys.exit()

    p.message("Loading metadata from playlist files.", 1)
    if p.level > 1 and p.level < 4:
        bar = ProgressBar(len(playlistpaths),"numbers")
        bar.draw()
    i = 0
    totalbytes=0

    for path in playlistpaths:
        try:
            song = getMetaData(path)
            totalbytes += os.path.getsize(path)

            srcset.add(song)
        except (OSError, IOError) as e:
            p.message("\nError loading {0}: {1}".format(path, e.strerror), 2)
        i += 1
        if p.level > 1 and p.level < 4:
            bar.update(i)

    p.message("{0} files, {1} MB".format(len(playlistpaths), totalbytes/(1024.0*1024.0)), 2)

    p.message("Loading existing files", 1)
    existingFilePaths = []

    for dirpath, dirnames, filenames in os.walk(dest):
        for path in filenames:
            if path[-3:] == 'mp3':
                fullpath = os.path.join(dirpath, path)
                existingFilePaths.append(fullpath)

    if len(existingFilePaths) > 0:
        p.message("Loading metadata from existing files.", 2)
        if p.level > 1 and p.level < 4:
            bar = ProgressBar(len(existingFilePaths),"numbers")
            bar.draw()
        i = 0

        for path in existingFilePaths:
            try:
                song = getMetaData(path)
                destset.add(song)
            except HeaderNotFoundError:
                # This is when the mp3 file in place is malformed, like when it is
                # only a partial file
                os.remove(path)
            except IOError:
                # Something wierd happened
                p.message("File not found" + path, 2)
            i += 1
            if p.level > 1 and p.level < 4:
                bar.update(i)
    else:
        p.message("No existing files", 3)

    toAdd = srcset - destset
    toDel = destset - srcset

    # we can't just take the intersection, because we need the version from
    # dest
    toCheck = set()
    for song in destset:
        if song in srcset:
            toCheck.add(song)

    # Delete songs that shouldn't be there (if we should delete things)
    if delete and len(toDel) > 0 and not pretend:
        p.message("Deleting songs", 1)
        for song in toDel:
            os.remove(song.mp3path)
    else:
        p.message("Not deleting: delete flag={0}, pretend={1} len(toDel)={2}".format(delete,pretend,len(toDel)),5)

    # Move songs around that are already there, but possibly not in the right
    # place
    first = False
    if len(toCheck) > 0:
        for song in toCheck:
            data = song.data(root=dest)
            data['artist'] = sanitize(data['artist'])
            data['album'] = sanitize(data['album'])
            data['title'] = sanitize(data['title'])
            newFile = ""
            if flat == False:
                artistDir = u"{0[root]}/{0[artist]}".format(data)
                albumDir = artistDir + u"/{0[album]}".format(data)
                newFile = albumDir + u"/{0[track]:0>2} {0[title]}.mp3".format(data)

                if not os.path.exists(artistDir):
                    os.mkdir(artistDir)
                if not os.path.exists(albumDir):
                    os.mkdir(albumDir)
            else:
                newFile = u"{0[root]}/{0[artist]} - {0[album]} - {0[track]:0>2} {0[title]}.mp3".format(data)

            if not song.mp3path == newFile:
                if first:
                    first = False
                    p.message("Organizing old songs", 1)
                if not pretend:
                    shutil.move(song.mp3path, newFile)

    # Copy new songs
    if len(toAdd) > 0:
        p.message("Copying songs", 1)
        if p.level > 1 and p.level < 4:
            bar = ProgressBar(len(toAdd),"numbers")
            bar.draw()
        i = 0

        for song in toAdd:
            data = song.data(root=dest)
            data['artist'] = sanitize(data['artist'])
            data['album'] = sanitize(data['album'])
            data['title'] = sanitize(data['title'])
            newPath = ""
            if flat == False:
                artistDir = u"{0[root]}/{0[artist]}".format(data)
                albumDir = artistDir + u"/{0[album]}".format(data)
                newPath = albumDir + u"/{0[track]:0>2} {0[title]}.mp3".format(data)

                if not os.path.exists(artistDir) and not pretend:
                    os.mkdir(artistDir)
                if not os.path.exists(albumDir) and not pretend:
                    os.mkdir(albumDir)
            else:
                newPath = u"{0[root]}/{0[artist]} - {0[album]} - {0[track]:0>2} {0[title]}.mp3".format(data)

            p.message("Copying {0}".format(newPath), 4)
            if not pretend:
                try:
                    shutil.copyfile(song.mp3path, newPath)
                except IOError as e:
                    p.message("Error copying {0}: {1}".format(newPath, e.strerror), 3)
            i += 1
            if p.level > 1 and p.level < 4:
                bar.update(i)
    else:
        p.message("All songs already there!", 1)

    p.message("\nDone.", 1)
Beispiel #10
0
def directsum_python(pdb, dmin, dmax, expand=False):

    # directsum Fhkl calculation - reference implementation to check c code

    # load Cromer-Mann coefficient dictionaries from ITC tables vol C ch 6.1
    assert os.path.isfile('cm.pkl')
    with open('cm.pkl', 'rb') as _f:
        _A, _B, _C = pickle.load(_f)

    # apply symmetry operators and expand to full unit cell
    if expand:
        pdb = pdb.get_unitcell()

    S = pdb.S
    ST = [[S[0][0], S[1][0], S[2][0]], [S[0][1], S[1][1], S[2][1]],
          [S[0][2], S[1][2], S[2][2]]]

    # convert to crystallographic/fractional basis
    xcs, ycs, zcs = [], [], []
    for x, y, z in zip(pdb.x, pdb.y, pdb.z):
        xcs.append(S[0][0] * x + S[0][1] * y + S[0][2] * z)
        ycs.append(S[1][0] * x + S[1][1] * y + S[1][2] * z)
        zcs.append(S[2][0] * x + S[2][1] * y + S[2][2] * z)

    es = set(pdb.e)
    atoms = zip(pdb.e, xcs, ycs, zcs, pdb.n, pdb.B)
    F = []

    ####################### enumerate hkls

    # rearrange d*max <= hmax|a*| to d*max/|a*| <= hmax etc.
    astar = math.sqrt(S[0][0] * S[0][0] + S[0][1] * S[0][1] +
                      S[0][2] * S[0][2])
    bstar = math.sqrt(S[1][0] * S[1][0] + S[1][1] * S[1][1] +
                      S[1][2] * S[1][2])
    cstar = math.sqrt(S[2][0] * S[2][0] + S[2][1] * S[2][1] +
                      S[2][2] * S[2][2])
    hmax = int(1.0 / (dmin * astar)) + 1
    kmax = int(1.0 / (dmin * bstar)) + 1
    lmax = int(1.0 / (dmin * cstar)) + 1

    p = ProgressBar(final=hmax * 2 * kmax,
                    label='directsum',
                    tail_label='{t:>10}')

    for h in range(0, hmax):
        hw = ST[0][0] * h  # + ST[0][1]*k + ST[0][2]*l
        for k in range(-kmax, kmax):
            p.update()
            kw = ST[1][0] * h + ST[1][1] * k  # + ST[1][2]*l
            for l in range(-lmax, lmax):
                lw = ST[2][0] * h + ST[2][1] * k + ST[2][2] * l
                dstar2 = hw * hw + kw * kw + lw * lw
                d = 1.0 / math.sqrt(dstar2 + 1e-20)
                if d < dmin or d > dmax:
                    continue

                stol2 = dstar2 / 4.0

                #######################

                # calculate Cromer-Mann atomic scattering factors
                f0 = {}
                for e in es:
                    f0[e] = _C[e]
                    f0[e] += sum(_A[e][i] * np.exp(-_B[e][i] * stol2)
                                 for i in range(4))

                # direct summation using isotropic B-factors
                Freal, Fimag = 0.0, 0.0
                for e, x, y, z, n, b in atoms:
                    alpha = 2.0 * np.pi * (h * x + k * y + l * z)
                    tmp = n * f0[e] * math.exp(-b * stol2)
                    Freal += tmp * math.cos(alpha)
                    Fimag += tmp * math.sin(alpha)

                # convert to amplitude and phase
                A = math.sqrt(Freal * Freal + Fimag * Fimag)
                phi = round(math.degrees(math.atan2(Fimag, Freal)), 3)
                F.append([h, k, l, d, A, phi])

    # sort according to resolution
    # F = sorted(F, key=lambda a: -a[3])
    return F
    subcategory_index = subcategory[0, 0]

    id = f[:-4]
    category = TRAIN_LABELS[id][0]
    subcategory = TRAIN_LABELS[id][1]

    if category in category_indexes:
        assert category_indexes[category] == category_index
    if subcategory in subcategory_indexes:
        assert subcategory_indexes[subcategory] == subcategory_index

    category_indexes[category] = category_index
    subcategory_indexes[subcategory] = subcategory_index

    i += 1
    progress.update(i)

for f in os.listdir(MAT_TEST_DIRECTORY):
    mat = loadmat(os.path.join(MAT_TEST_DIRECTORY, f))

    id = f[:-4]
    category = TEST_LABELS[id][0]
    subcategory = TEST_LABELS[id][1]

    mat["category"] = mat["category"] * 0 + category_indexes[category]
    mat["labels"] = mat["labels"] * 0 + subcategory_indexes[subcategory]

    savemat(os.path.join(MAT_TEST_DIRECTORY, f), mat)

    i += 1
    progress.update(i)
Beispiel #12
0
    extract_files('tmp')

    #cleaning up
    shutil.rmtree('tmp')


try:
    url = obj["url"][1:][:-1]
    name = obj["url"][1:][:-1].split('/')[-1]
    filename = os.path.join(DOWNLOADS_FOLDER, name)
    name, ext = os.path.splitext(filename)

    bar = ProgressBar(title="Downloading Started : Downloading %s V. %s " % (name, obj["version"]))

    if(ext not in [".app",".zip",".pkg",".dmg"]):
        bar.update(0, message="Can't handle files of type %s" % ext)
        time.sleep(2)
        raise

    os.chdir(DOWNLOADS_FOLDER)

    def prg(count, blockSize, totalSize):

        percent = int(count * blockSize * 100 / totalSize)
        bar.update(percent,message="(%s%%) Downloading %s " % (percent,url))



    urllib.urlretrieve(url, filename, reporthook=prg)
    print "\n"
Beispiel #13
0
    seg = tuple(read_csv(os.path.join(SEG_DIRECTORY, category,
                                      f'{pc_id}.seg')))
    point_array = np.array(points)
    label_array = np.array(seg)
    category_array = np.ones((1, 1)) * CATEGORIES.index(category)
    savemat(os.path.join(MATDIR, f'{pc_id}.mat'), {
        'points': point_array,
        'labels': label_array,
        'category': category_array
    })


num_files = sum(
    map(len, (os.listdir(os.path.join(POINTS_DIRECTORY, category))
              for category in CATEGORIES)))

print(f"Creating {num_files} files")
progress_step = max(num_files // 1000, 1)
progress_bar = ProgressBar(num_files)
i = 0

for category in CATEGORIES:
    for pointfile in os.listdir(os.path.join(POINTS_DIRECTORY, category)):
        pc_id = pointfile[:-4]
        if i % progress_step == 0:
            progress_bar.update(i)

        make_mat(pc_id, category)
        i += 1
print('')
Beispiel #14
0
    def __init__(self,
                 pdb,
                 dmin=1.0,
                 g=1.0 / 3.0,
                 rho_cutoff=0.01,
                 rpad=2,
                 Q=100.,
                 expand=False):

        self._expand = expand

        # inherit from Mixin
        self.enumerate_hkl = self._enumerate_hkl_python
        self.isabsent = self._isabsent_python

        cos = math.cos
        sin = math.sin

        # unit cell parameters
        a, b, c = pdb.uc_abc
        V = self._V = pdb.calc_volume()
        S = self._S = pdb.calc_scale()
        SI = pdb.calc_invscale()

        def matrixmult(A, B):
            AB = [[0] * 3, [0] * 3, [0] * 3]
            for i in range(3):
                for j in range(3):
                    for k in range(3):
                        AB[i][j] += A[i][k] * B[k][j]
                    AB[i][j] = round(AB[i][j], 4)
            return AB

        # convert symops to the crystallographic basis for reciprocal expansion
        self._R = []
        self._T = []
        S = pdb.S
        for i in pdb.R.keys():
            Rw = pdb.R[i]
            Rc = matrixmult(matrixmult(S, Rw), SI)
            Tw = pdb.T[i]
            Tc = [
                S[0][0] * Tw[0] + S[0][1] * Tw[1] + S[0][1] * Tw[2],
                S[1][0] * Tw[1] + S[1][1] * Tw[1] + S[1][1] * Tw[2],
                S[2][0] * Tw[2] + S[2][1] * Tw[2] + S[2][1] * Tw[2]
            ]

            Tc = np.around(np.dot(S, Tw), 4)

            # self._T = np.around([np.dot(S, pdb.T[i]) for i in pdb.R.keys()], 4)
            self._T.append(Tc)
            self._R.append(Rc)

        # baseline Biso and Ueq = Biso/(8*pi^2)
        sigma = 0.5 / g
        bbase = (math.log10(Q) * dmin * dmin) / (sigma * (sigma - 1))
        bmin = min(pdb.B)
        bsmear = min(bbase - bmin, 100 * (8 * math.pi**2))
        self._bsmear = bsmear

        # grid dimensions
        na = int(round(a / 10.0) * 10.0 / g)
        nb = int(round(b / 10.0) * 10.0 / g)
        nc = int(round(c / 10.0) * 10.0 / g)
        self._grid = na, nb, nc

        # build 3D array for density
        rpad = 2
        self.rho = [None] * na
        for i in xrange(na):
            self.rho[i] = [None] * nb
            for j in xrange(nb):
                self.rho[i][j] = [0.0] * (nc + rpad)

        # ensure true division
        _na = 1. * na
        _nb = 1. * nb
        _nc = 1. * nc

        # initalise progress bar
        progressbar = ProgressBar(len(pdb),
                                  label='sampling',
                                  tail_label='{t:>10}')

        # load Cromer-Mann coefficient dictionaries from ITC tables vol C ch 6.1
        with open('cm.pkl', 'rb') as _f:
            _A, _B, _C = pickle.load(_f)

        A = {e: _A[e] + [_C[e]] for e in set(pdb.e)}
        B = {e: _B[e] + [0] for e in set(pdb.e)}

        # loop over all atoms
        for e, xw, yw, zw, n, b in zip(pdb.e, pdb.x, pdb.y, pdb.z, pdb.n,
                                       pdb.B):

            progressbar.update()

            # atom crystallographic fractional coordinates
            xc = S[0][0] * xw + S[0][1] * yw + S[0][2] * zw
            yc = S[1][1] * yw + S[1][2] * zw
            zc = S[2][2] * zw

            # atom crystallographic grid coordinate
            xg = int(round(xc * na))
            yg = int(round(yc * nb))
            zg = int(round(zc * nc))

            # prepare stuff for density calculation
            b += bsmear
            prs = [
                n * A[e][i] * (4.0 * math.pi / (B[e][i] + b))**(3. / 2.)
                for i in range(5)
            ]
            exs = [4.0 * math.pi * math.pi / (B[e][i] + b) for i in range(5)]

            def density(dsq):
                return sum(prs[i] * math.exp(-exs[i] * dsq) for i in range(5))

            # get cutoff radius for the (1-rho_cutoff) isosurface
            dcut = 0
            rho_origin = density(0)
            while density(dcut * dcut) / rho_origin > rho_cutoff:
                dcut += g
            assert dcut > 0
            dcutsq = dcut * dcut

            # cutoff radius in crystallographic fractional cordinates
            dmax_xc = S[0][0] * dcut + S[0][1] * dcut + S[0][2] * dcut
            dmax_yc = S[1][1] * dcut + S[1][2] * dcut
            dmax_zc = S[2][2] * dcut

            # local sampling box grid coordinate limits
            dmax_xg = int(round(dmax_xc * na))
            dmax_yg = int(round(dmax_yc * nb))
            dmax_zg = int(round(dmax_zc * nc))

            # local sampling box grid limits centered on atoms
            rxg_min = xg - dmax_xg
            rxg_max = xg + dmax_xg
            ryg_min = yg - dmax_yg
            ryg_max = yg + dmax_yg
            rzg_min = zg - dmax_zg
            rzg_max = zg + dmax_zg

            # loop over local voxel grid coordinates
            for rzg in range(rzg_min, rzg_max):
                rzc = rzg / _nc
                rzw = SI[2][2] * rzc
                dzw = rzw - zw
                dzwdzw = dzw * dzw
                rzg %= nc

                for ryg in range(ryg_min, ryg_max):
                    ryc = ryg / _nb
                    ryw = SI[1][1] * ryc + SI[1][2] * rzc
                    dyw = ryw - yw
                    dywdyw = dyw * dyw
                    ryg %= nb

                    for rxg in range(rxg_min, rxg_max):
                        rxc = rxg / _na
                        rxw = SI[0][0] * rxc + SI[0][1] * ryc + SI[0][2] * rzc
                        dxw = rxw - xw
                        rxg %= na

                        # distance^2 between atom and grid voxels
                        dsq = dxw * dxw + dywdyw + dzwdzw

                        # trim edges of box exceeding radial cutoff
                        if dsq > dcutsq:
                            continue

                        # add current atom's density constribution to voxel
                        self.rho[rxg][ryg][rzg] += density(dsq)

        # complex conjugate of fourier coeffs is equiv to iFFT
        self._F = np.conj(np.fft.fftpack.fftn(self.rho, s=self._grid))
        self._F *= self._V / (na * nb * nc)
Beispiel #15
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from progress import ProgressBar
from animation import *
from time import sleep

p = ProgressBar(" Doing the 1st thing:", max_value=20, width=20)
for i in range(21):
    p.update(i)
    sleep(.25)
p.finish("Success")

a = Animation("Waiting for the 2nd thing:")
for i in range(21):
    a.next()
    sleep(.25)
a.finish("Success")

a = Animation("Waiting for the 3rd thing:", animation=ANIMATE_VGROW)
for i in range(21):
    a.next()
    sleep(.25)
a.finish("Success")

a = Animation("Waiting for the 4th thing:", animation=ANIMATE_HGROW)
for i in range(21):
    a.next()
    sleep(.25)
a.finish("Success")

a = Animation("Waiting for the 5th thing:", animation=ANIMATE_COUNT)
Beispiel #16
0
def directsum_numpy(pdb, dmin, dmax, expand=False):

    # direct sum Fhkl algorithm - vectorised implementation

    # apply symmetry operators and expand to full unit cell
    if expand:
        pdb = pdb.get_unitcell()

    # convert to crystallographic/fractional basis
    S = np.array(pdb.S)
    xyzc = np.dot(S, [pdb.x, pdb.y, pdb.z])

    ####################### enumerate hkls

    # get maximum hkl indicies possible for a resolution range
    hmax = int(1.0 / (dmin * np.linalg.norm(S[0]))) + 1
    kmax = int(1.0 / (dmin * np.linalg.norm(S[1]))) + 1
    lmax = int(1.0 / (dmin * np.linalg.norm(S[2]))) + 1

    # build cube in reciprocal space
    check_np(1, 6, 2)
    H, K, L = np.meshgrid(range(hmax), range(-kmax, kmax), range(-lmax, lmax))
    H = H.transpose(1, 0, 2).flatten()
    K = K.transpose(1, 0, 2).flatten()
    L = L.flatten()
    hkls = np.row_stack((H, K, L))

    # calculate resolution (inverse of recip lattice vector)
    dstar = np.dot(S.T, hkls)
    dstar = np.sqrt(np.sum(dstar * dstar, axis=0))
    d = 1.0 / np.where(dstar != 0.0, dstar, 1e10)

    # trim to sphere in recip space
    mask = (dmin <= d) & (d <= dmax)
    hkls = hkls[:, mask].T
    d = d[mask]
    stol2 = 1.0 / (4.0 * d * d)

    #######################

    # load Cromer-Mann coefficients from ITC tables vol C ch 6.1
    assert os.path.isfile('cm.pkl')
    with open('cm.pkl', 'rb') as _f:
        _A, _B, _C = pickle.load(_f)

    # calculate Cromer-Mann atomic scattering factors
    f0 = {}
    for e in set(pdb.e):
        f0[e] = _C[e]
        f0[e] += sum(_A[e][i] * np.exp(-_B[e][i] * stol2) for i in range(4))

    # direct summation while printing progress
    F = np.zeros(len(hkls), dtype=complex)
    p = ProgressBar(final=len(pdb), label='directsum', tail_label='{f:>10}')
    for xyzc, e, n, b in zip(xyzc.T, pdb.e, pdb.n, pdb.B):
        p.update()
        hx = np.sum(hkls * xyzc, axis=1)
        F += n * f0[e] * np.exp(-b * stol2) * np.exp(np.pi * 2j * hx)

    A = np.abs(F)
    phi = np.degrees(np.arctan2(F.imag, F.real))
    F = np.around(np.column_stack((hkls, d, A, phi)), 6)
    # F = F[(-F[:, 3]).argsort()]
    return F
Beispiel #17
0
    def __init__(self,
                 pdb,
                 dmin=1.0,
                 g=0.333333333333,
                 rho_cutoff=0.01,
                 rpad=2,
                 Q=100.,
                 expand=False):

        self._expand = expand

        # inherit from Mixin
        self.enumerate_hkl = self._enumerate_hkl_numpy
        self.isabsent = self._isabsent_numpy

        # enable access to self.F[h, k, l] with slicing support
        self.F = self._make_FhklArray()

        check_np(1, 6, 2)  # for meshgrid

        V = self._V = pdb.calc_volume()
        S = self._S = np.array(pdb.calc_scale())
        SI = self._SI = np.array(pdb.calc_invscale())

        keys = pdb.R.keys()
        self._R = np.around([np.dot(np.dot(S, pdb.R[i]), SI) for i in keys], 4)
        self._T = np.around([np.dot(S, pdb.T[i]) for i in keys], 4)

        # baseline Biso and Ueq = Biso/(8*pi^2)
        sigma = 0.5 / g
        bbase = (np.log10(Q) * dmin * dmin) / (sigma * (sigma - 1))
        bmin = min(pdb.B)
        bsmear = bbase - bmin
        self._bsmear = bsmear

        # grid dimensions and build zeros
        gridf = np.floor(
            np.around(np.array(pdb.uc_abc, dtype=float) / 10.0) * 10.0 / g)
        gridi = gridf.astype(int)
        self._grid = gridi
        self._rho = np.zeros((gridi[0], gridi[1], gridi[2] + rpad))

        # initalise progress bar
        progressbar = ProgressBar(len(pdb),
                                  label='sampling',
                                  tail_label='{f:>10}')

        # load Cromer-Mann coefficient dictionaries from ITC tables vol C ch 6.1
        with open('cm.pkl', 'rb') as _f:
            _A, _B, _C = pickle.load(_f)

        A = {e: _A[e] + [_C[e]] for e in set(pdb.e)}
        B = {e: _B[e] + [0] for e in set(pdb.e)}

        # coordinates of point atoms
        xws = np.row_stack((pdb.x, pdb.y, pdb.z))
        xcs = np.dot(S, xws)
        xgs = np.round(xcs * gridf.reshape(3, 1)).astype(int)

        for e, xw, xc, xg, b, n in zip(pdb.e, xws.T, xcs.T, xgs.T, pdb.B,
                                       pdb.n):

            progressbar.update()

            # model electron density as inverse scattering factor
            b += bsmear
            prs = [
                n * A[e][i] * (4 * math.pi / (B[e][i] + b))**(3. / 2.)
                for i in range(5)
            ]
            exs = [4 * math.pi * math.pi / (B[e][i] + b) for i in range(5)]

            def density(dsq):
                return sum(prs[i] * np.exp(-exs[i] * dsq) for i in range(5))

            # get cutoff radius for the (1-rho_cutoff) isosurface
            dcut = 0
            rho0 = density(0.0)
            while density(dcut * dcut) / rho0 > rho_cutoff:
                dcut += g
            assert dcut > 0

            # cube limits in crystallographic and grid coordinates
            dmax_c = np.dot(S, [dcut, dcut, dcut])
            dmax_g = np.round(dmax_c * gridf).astype(int)

            # build cube
            X, Y, Z = np.meshgrid(*[range(-dg, dg) for dg in dmax_g])
            X = X.transpose(1, 0, 2).flatten()
            Y = Y.transpose(1, 0, 2).flatten()
            Z = Z.flatten()
            dgs = np.column_stack((X, Y, Z))

            # translate to atom centre and change coordinates
            rgs = dgs + xg
            rcs = rgs / gridf
            rws = np.dot(SI, rcs.T).T

            # distance^2 between atom and grid voxels
            dws = rws - xw
            dsqs = np.sum(dws * dws, axis=1)

            # trim edges of box exceeding radial cutoff
            mask = dsqs <= dcut * dcut
            dsqs = dsqs[mask]
            rgs = rgs[mask]
            rcs = rcs[mask]

            # bring grid points into unit cell and add electron density
            rgs = (rgs % gridf).astype(int)
            idx = tuple(rgs.T)

            self._rho[idx] += density(dsqs)

        # conj(fft()) is equivalent to ifft()
        self._F = np.conj(np.fft.fftpack.fftn(self._rho, s=self._grid))

        # volume/ngrid factor correction
        self._F *= self._V / np.prod(self._grid)
Beispiel #18
0
            file.write('''set size ratio 1\n''')
            file.write(
                '''set title "Dynamic Self-Organising Map\\nNicolas Rougier & Yann Boniface"\n'''
            )
            file.write(
                '''set label "Self-organisation onto a cube surface" at screen .5, screen .1 center\n'''
            )
            file.write(
                '''set label "(http://www.loria.fr/~rougier/)" at screen .5, screen .065 center textcolor lt 3\n'''
            )
            file.write('''set view %d,%d\n''' % (rot_x, rot_z))
            file.write('''set terminal pngcairo size 512,512\n''')
            file.write('''set output '%s.png'\n''' % filename)
            #            file.write('''splot '%s' using 1:2:3, '%s' with point pt 6 lw .1\n''' % (datafile,datafile))
            file.write('''splot '%s' using 1:2:3\n''' % (datafile))
            file.close()
            file = open(datafile, 'w')
            C = net.codebook
            for x in range(C.shape[0]):
                for y in range(C.shape[1]):
                    file.write('%.3f %.3f %.3f\n' %
                               (C[x, y, 0], C[x, y, 1], C[x, y, 2]))
                file.write('''\n''')
            file.close()
            subprocess.call(['/usr/bin/gnuplot', plotfile])
        net.learn_data(samples[I[i]])
        bar.update(i)
    bar.finish()
#    os.system('''mencoder 'mf:///tmp/image*.png' -mf type=png:fps=25  -Ovc lavc -lavcopts \
#                 vcodec=mpeg4:vbitrate=2500  -oac copy -o cube.avi''')
Beispiel #19
0
def parse_crux_search_txt(filename):
    """Iterate over records in a search.{target,decoy}.txt.

    Crux txt format files are tab-delimited with 30 fields*, described
    in the online documentation [1]. This function returns an iterator
    which yields a dictionary with the fields and their values.

    * 'decoy q-value (p-value)' is not output by Crux, at least as of v1.33.

    [1] http://noble.gs.washington.edu/proj/crux/txt-format.html

    Arguments:
       filename: Name of the crux search-for-matches output.

    Returns:
       Dictionary that maps field names to values. Only fields that
       are non-empty in the input exist in the returned dictionary.
       Many of the fields are not usually set in the output of crux
       search-for-matches, and will not be available.

    """
    fields = ['scan', # int
              'charge', # int
              'spectrum precursor m/z', # float
              'spectrum neutral mass', # float
              'peptide mass', # float
              'delta_cn', # float
              'sp score', # float
              'sp rank', # float
              'xcorr score', # float
              'xcorr rank', # int
              'p-value', # float
              'Weibull est. q-value', # float
              'decoy q-value (xcorr)', # float
              'percolator score', # float
              'percolator rank', # int
              'percolator q-value', # float
              'q-ranker score', # float
              'q-ranker q-value', # float
              'b/y ions matched', # int
              'b/y ions total', # int
              'matches/spectrum', # int
              'sequence', # string
              'cleavage type', # string
              'protein id', # string
              'flanking aa', # string
              'unshuffled sequence', # string
              'eta', # float
              'beta', # float
              'shift', # float
              'corr'] # float
    casts = [ int, int, float, float, float, float, float, float, float, int,
              float, float, float, float, int, float, float, float, int, int,
              int, str, str, str, str, str, float, float, float, float ]
    assert(len(fields) == len(casts))

    _mandatories = [ 'scan', 'charge', 'spectrum precursor m/z',
                     'spectrum neutral mass', 'xcorr score',
                     'xcorr rank', 'sequence' ]

    def conv(f, value):
        value = value.strip()
        if len(value):
            return f(value)

    def validate(record):
        return all(record.has_key(m) for m in _mandatories)

    widgets = [ Percentage(), Bar(), ETA() ]
    progress = ProgressBar(widgets = widgets,
                           maxval = os.path.getsize(filename)).start()

    with open(filename) as f:
        reader = csv.reader(f, delimiter='\t')
        # Header
        row = reader.next()
        if row != fields:
            raise ParseError('Header: ', filename, 1, ' '.join(row))
        # Body
        for row in reader:
            progress.update(f.tell())
            if len(row) != len(fields):
                raise ParseError('Line: ', filename, reader.line_num,
                                 ' '.join(row))

            r = dict((k, conv(f,x)) for k, f, x in zip(fields, casts, row))
            if r:
                if not validate(r):
                    raise ParseError('Missing: ', filename, reader.line_num,
                                     ' '.join(row))
                yield r

    progress.finish()
    sys.stdout.write('\n')