Ejemplo n.º 1
0
def main(fragment_file, lead_file):
    fragment_mols = read_file(fragment_file)
    lead_mols = read_file(lead_file)
    fragment_mols += lead_mols

    logging.info("Read %s molecules for fragmentation library",
                 len(fragment_mols))
    logging.info("Read %s lead moleculs", len(lead_mols))

    fragments, used_mols = get_fragments(fragment_mols)
    logging.info("Num fragments: %s", len(fragments))
    logging.info("Total molecules used: %s", len(used_mols))
    assert len(fragments)
    assert len(used_mols)
    encodings, decodings = get_encodings(fragments)
    save_decodings(decodings)
    logging.info("Saved decodings")

    lead_mols = np.asarray(
        fragment_mols[-len(lead_mols):])[used_mols[-len(lead_mols):]]

    X = encode_list(lead_mols, encodings)

    logging.info("Building models")
    actor, critic = build_models(X.shape[1:])

    X = clean_good(X, decodings)

    logging.info("Training")
    history = train(X, actor, critic, decodings)
    logging.info("Saving")
    np.save("History/history.npy", history)
Ejemplo n.º 2
0
def main(fragment_file, lead_file):
    fragment_mols = read_file(fragment_file)
    lead_mols = read_file(lead_file)
    fragment_mols += lead_mols

    logging.info("Read %s molecules for fragmentation library",
                 len(fragment_mols))
    logging.info("Read %s lead molecules", len(lead_mols))

    fragments, used_mols = get_fragments(fragment_mols)
    logging.info("Num fragments: %s", len(fragments))
    logging.info("Total molecules used: %s", len(used_mols))
    assert len(fragments)
    assert len(used_mols)
    # =============================================================================
    #    encodings, decodings = get_encodings(fragments)
    #    save_encodings(encodings)
    #    save_decodings(decodings)
    #    logging.info("Saved encodings and decodings")
    # =============================================================================
    lead_mols = np.asarray(
        fragment_mols[-len(lead_mols):])[used_mols[-len(lead_mols):]]

    decodings = read_decodings()
    encodings = read_encodings()
    logging.info("Loaded encodings and decodings")

    X = encode_list(lead_mols, encodings)
    #print(X.shape)
    if X.shape[0] == 0:
        return -1
    logging.info("Building models")
    actor, critic = build_models(X.shape[1:])

    # X = clean_good(X, decodings)
    # logging.info("Remaining molecules after clean good: %s",X.shape[0])

    if X.shape[0] == 0:
        return -1
    logging.info("Training")
    history = train(X, actor, critic, decodings)
    logging.info("Saving")
    np.save("History/history.npy", history)
    actor.save('./saved_models/generation')
    critic.save('./saved_models/critic')
Ejemplo n.º 3
0
def main(fragment_file, lead_file):
    fragment_mols = read_file(fragment_file)
    lead_mols = read_file(lead_file)
    fragment_mols += lead_mols


    fragments, used_mols = get_fragments(fragment_mols)
    encodings, decodings = get_encodings(fragments)
    save_decodings(decodings)

    lead_mols = np.asarray(fragment_mols[-len(lead_mols):])[used_mols[-len(lead_mols):]]

    X = encode_list(lead_mols, encodings)

    actor, critic = build_models(X.shape[1:])

    X = clean_good(X, decodings)

    history = train(X, actor, critic, decodings)

    np.save("History/history.npy", history)
Ejemplo n.º 4
0
app.secret_key = config["flask"]["secret_key"]
app.config['SQLALCHEMY_DATABASE_URI'] = config["app"]["database_uri"]
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['DEBUG'] = os.environ.get('FLASK_DEBUG', False)
if type(app.config['DEBUG']) == str:
    app.config['DEBUG'] = app.config['DEBUG'].lower() == "true"
if app.config["DEBUG"]:
    app.debug = True
    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.DEBUG)
    logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN)
log = app.logger
app.config['TEMPLATES_AUTO_RELOAD'] = True
db = SQLAlchemy(app)
migrate = Migrate(app, db)
models = build_models(db)
for m in models:
    globals()[m.__name__] = m

goodreads = OAuth1Service(
    consumer_key=config["goodreads"]["key"],
    consumer_secret=config["goodreads"]["secret"],
    name='goodreads',
    request_token_url='https://www.goodreads.com/oauth/request_token',
    authorize_url='https://www.goodreads.com/oauth/authorize',
    access_token_url='https://www.goodreads.com/oauth/access_token',
    base_url='https://www.goodreads.com/')


@app.route("/")
def index():
Ejemplo n.º 5
0
        "beeminder": {
            "client_id": os.environ["BEEMINDER_CLIENT_ID"],
            "client_secret": os.environ["BEEMINDER_CLIENT_SECRET"]
        },
        "flask": {
            "secret_key": os.environ["FLASK_ENCRYPTION_KEY"]
        }
    }
else:
    config = yaml.safe_load(open('config.yaml', 'r'))

app.secret_key = config["flask"]["secret_key"]
app.config['SQLALCHEMY_DATABASE_URI'] = config["app"]["database_uri"]
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
models = build_models(db)
User = models["User"]


@app.route("/")
def index():
    if "todoist_id" in session:
        config["beeminder_url"] = urlparse.urljoin(
            request.url_root, url_for('beeminder_oauth'))
        existing = User.query.filter_by(
            todoist_id=session["todoist_id"]).first()
        if existing is None:
            del session["todoist_id"]
            return redirect(url_for("index"))
        return render_template('index.html', data=existing, **config)
    else:
Ejemplo n.º 6
0
    with open(os.path.join(ddir, fname), 'w') as f:
        json.dump(data, f, indent=4)


if __name__ == '__main__':

    parser = argparse.ArgumentParser()
    params = fetch_args(parser)

    if params['vis']:
        params['split'] = 'val_easy'
        dataset = PlotDataset(params, 'val_easy')

        extra_params = get_extra_params(dataset)
        models = build_models(params, extra_params)
        visualize_model(models, dataset, params, extra_params,
                        int(params['vis_idx']))
        exit()  # Abrupt exit

    if params['split'] == 'train':

        train_dataset = PlotDataset(params, 'train')
        val_dataset = PlotDataset(params, 'val_easy')

        extra_params = get_extra_params(train_dataset)

        save_json(params, params['checkpoint_path'], 'params.json')
        save_json(extra_params, params['checkpoint_path'], 'extra_params.json')

        models = build_models(params, extra_params)