Beispiel #1
0
def play():

    env_display = os.environ["DISPLAY"]
    if not args.graphics:
        print("Disable display ...")
        os.environ["DISPLAY"] = ":99"
        if "Xvfb" not in (p.name() for p in psutil.process_iter()):
            print("Starting Xvfb service on display port :99 ...")
            os.system("Xvfb :99 &")

    if args.noai:
        model = None
    else:
        if not args.new and os.path.exists('tetris.model'):
            model = keras.models.load_model('tetris.model')
            print("Loaded model from disk")
        else:
            model = Sequential()
            model.add(Dense(100, input_shape=(224,), activation='relu'))
            model.add(Dense(100, activation='relu'))
            model.add(Dense(6))  # number of actions
            model.compile(loss='mse', optimizer='adam', metrics=['mae'])
        print(model.summary())
        plot_model(model, show_shapes=True, to_file='model.png')

    autoplay = autop.Autoplay(model=model, verbose=args.verbose, training=args.train,
                              graphics=False, speedup=True)

    print("Start!")
    num_episodes = 1000
    try:
        for i in range(num_episodes):
            start_time = time.time()
            if args.verbose:
                matris.start_game(autoplay)
            else:
                with utils.suppress_stdout_stderr():
                    matris.start_game(autoplay)
            stop_time = time.time()
            print("{2}. Tetris game finished!\tScore: {0}\tTime (s): {1:.1f}\tLoss: {3}".
                  format(autoplay.score,
                         stop_time - start_time,
                         i+1,
                         autoplay.loss))
    except KeyboardInterrupt:
        print("Interrupt games!")

    if args.train:
        model.save('tetris.model')
        print("Saved model to disk")

    if not args.graphics:
        print("Reset display ...")
        os.environ["DISPLAY"] = env_display

    print("Done!")
Beispiel #2
0
def bayesian_demo():
    from simulation import wright_fisher

    for selection_strength in (0, 0.05, 0.2):
        d = wright_fisher(1000, 50, selection_strength) / 1000
        time = np.arange(1, d.shape[0] + 1)
        with suppress_stdout_stderr():
            post = bayesian_frequency_increment_test(time, d)
        print(
            f"Bayesian t-test: strength={selection_strength}, mu={post['mu']:.3f}, "
            f"CI: [{post['lci']:.2f}, {post['uci']:.2f}]")
Beispiel #3
0
def get_tree(files, branch_dict, tree_name='CollectionTree', max_events=None):
    """Applies root_numpy to get out a numpy array"""
    # Convert the files
    try:
        with suppress_stdout_stderr():
            tree = rnp.root2array(files, treename=tree_name,
                                  branches=branch_dict.keys(), stop=max_events,
                                  warn_missing_tree=True)
    except IOError as e:
        print('WARNING: root2array gave an IOError:', e)
        return None

    # Rename the branches
    tree.dtype.names = branch_dict.values()
    return tree
Beispiel #4
0
def get_data(files, branch_dict, **kwargs):
    """Applies root_numpy to get out a numpy array"""
    import root_numpy as rnp
    try:
        with suppress_stdout_stderr():
            tree = rnp.root2array(files,
                                  branches=branch_dict.keys(),
                                  warn_missing_tree=True,
                                  **kwargs)
    except IOError as e:
        logging.warn('WARNING: root2array gave an IOError: %s' % e)
        return None
    # Convert immutable structured array into dictionary of arrays
    data = dict()
    for (oldkey, newkey) in branch_dict.items():
        data[newkey] = tree[oldkey]
    return data
Beispiel #5
0
#!/usr/bin/python3

import os
import sys
import argparse
import requests
import multiprocessing
import warnings
import numpy as np
import networkx as nx
import pickle as pkl
import progressbar
import logging
from utils import suppress_stdout_stderr

with suppress_stdout_stderr():
    import vmd #suppress annoying warnings from plugins

import biograph.graph_models as graph_models
from biograph.downloader import PdbDownloader, ConsurfDBDownloader
from biograph.protein import Protein
from biograph.structure import Perseus
from biograph.groupfolds import CDHitGroup

# Parse command line arguments
parser = argparse.ArgumentParser(description='Generate dataset for protein graph analysis.')
parser.add_argument('--skip-pdb-download', dest='skip_pdb_download', action='store_true',
                    help='skip PDB download')
parser.add_argument('--no-consurf', dest='no_consurf', action='store_true',
                    help='do not add conservation (consurf DB) data')
parser.add_argument('--laptop-safe', dest='laptop_safe', action='store_true',