Exemple #1
0
def findLocalTranches():
    '''
	This finds tranche files on the webserver and marks them as being available for alternate download
	We don't need this yet, unless ZINC gets overloaded and wants us to offload bandwidth
	'''
    from app.core import create_app

    app = create_app(debug=debug)

    with app.app_context():

        print 'looking for cached ligand tranches'
        query = text('''
			SELECT * from tranches
		''')
        rows = db.engine.execute(query)
        # rows = [r for r in rows]

        for row in rows:
            print row.urlPath
            localPath = os.path.join(LOCAL_ZINC, row.urlPath)
            if os.path.exists(localPath):
                print 'Found! ', localPath
                # row.local = 1
                # f*****g sqlAlchemy ...
                # db.engine.execute(Tranche.update().where(Tranche.id == <id>).values(foo="bar"))
                tranche = getTranche(row.trancheID)
                tranche.local = 1

        db.session.commit()
Exemple #2
0
def fetchImportantTranches():
    '''
	Download tranches that have lots of FDA/World/Whatever molecules in them
	'''
    from app.core import create_app
    app = create_app(debug=debug)
    with app.app_context():

        def handleQuery(query):
            rows = db.engine.execute(query)
            for row in rows:
                if row.numDrugs < WEEDS:
                    print 'Down in the weeds .... exiting here'
                    break

                TrancheReader(0, row.urlPath, localCache=DOWNLOAD_PATH)

        query = text(
            'select * from FDATranches join tranches USING(trancheName) order by numDrugs desc;'
        )
        handleQuery(query)
        query = text(
            'select * from WorldTranches join tranches USING(trancheName) order by numDrugs desc;'
        )
        handleQuery(query)
        query = text(
            'select * from InManTranches join tranches USING(trancheName) order by numDrugs desc;'
        )
        handleQuery(query)
Exemple #3
0
def scan(args):

    pbar = tqdm.tqdm(
        total=39466)  # determined on commandline after rsyncing ....

    debug = True
    if args.production: debug = False

    App = create_app(debug=debug)

    with App.app_context():
        db.create_all()

        count = 0
        for txtFile in glob.iglob(args.dir + '/*/*/??????.txt.gz'):
            path, filename = os.path.split(txtFile)
            #print txtFile
            #print path, filename

            tranche = filename.replace('.txt.gz', '')

            with gzip.open(txtFile, 'r') as f:
                if '/special/' in txtFile: processSpecialFile(f)
                else: process3Dfile(f)

            pbar.update()
            count += 1

            if count % 100 == 0:
                print count
                db.session.commit()  # after every N files

        db.session.commit()  # finish
Exemple #4
0
def test_client():
    app = create_app('flask_test.cfg')
    testing_client = app.test_client()
    ctx = app.app_context()
    ctx.push()
    yield testing_client
    ctx.pop()
def scanAndInsert():
    from app.core import create_app

    app = create_app(debug=debug)

    with app.app_context():
        #db.create_all()
        # your code here

        if debug:
            for n in range(10):
                j = Job()
                j.receptor = 'mpro-1'
                db.session.add(j)
                db.session.commit()

        for filePath in glob.glob(os.path.join(RESULTS_STORAGE, '*.dlg.gz')):
            path, fileName = os.path.split(filePath)
            good = validateLogFile(filePath)
            jobID = int(fileName.replace('.dlg.gz', ''))
            if not good:
                print filePath, ' is not good'
                continue

            job = getJob(jobID)
            if not job:
                print 'Cannot find job ', jobID
                continue

            #results = parseLogfile(fileName)

            try:
                p = LogParser(filePath)
            except Exception as e:
                print e
                continue

            #outFile = os.path.join(RESULTS_STORAGE, '%d.traj.pdbqt' % jobID)
            #outPath = os.path.join(RESULTS_HOSTING, '%d' % jobID)
            #if not os.path.exists(outPath): os.makedirs(outPath)
            #outFile = os.path.join(outPath, 'trajectory')
            #outFile = os.path.join(RESULTS_HOSTING, '%d.traj.pdbqt' % jobID)
            outFile = os.path.join(RESULTS_HOSTING, '%d.traj.pdbqt.gz' % jobID)
            p.saveTrajectory(outFile, compress=True)

            # move logfile to hosting area so researchers can download it
            dst = os.path.join(RESULTS_HOSTING, fileName)
            shutil.move(filePath, dst)

            # nah
            #r = Result()
            #r.id = jobID

            job.uploaded = True
            db.session.commit()
Exemple #6
0
    def setUp(self):
        self.app = create_app('testing')

        self.app_context = self.app.app_context()
        self.app_context.push()
        self.app.config['WTF_CSRF_ENABLED'] = False
        db.create_all()

        # creates a test client
        self.client = self.app.test_client()

        # propogate the exceptions to the test client
        self.client.testing = True
Exemple #7
0
def app(request):
    """
    :param Request request:
    :return Flask:
    """
    app = create_app()

    # Establish an application context before running the tests.
    ctx = app.app_context()
    ctx.push()

    def teardown():
        ctx.pop()

    request.addfinalizer(teardown)
    return app
Exemple #8
0
def scanAllTranches():
    '''Scan all 3D trancheFiles and write the ligands discovered to a txt file, for later import into SQL'''
    from app.core import create_app
    app = create_app(debug=debug)

    outFile = open('trancheMappings.txt', 'w')

    class FastTrancheReader(TrancheReader):
        def download(self):
            return  # newp!

        def scan(self, outFile, fileID):
            lines = []
            for line in self.fh:
                if line.startswith('REMARK') and 'Name' in line:
                    zincID = line.replace('REMARK',
                                          '').replace('Name',
                                                      '').replace('=',
                                                                  '').strip()
                    zincID = int(zincID.replace('ZINC', ''))
                    #outFile.write('%s\t%s\n' % (str(zincID), fileID))
                    lines.append('%s\t%s' % (str(zincID), fileID))
            outFile.write('\n'.join(lines) + '\n')

    with app.app_context():
        # get trancheFiles and their integer IDs
        query = text('SELECT * FROM trancheFiles;')
        trancheFiles = db.engine.execute(query)
        trancheFiles = [tf for tf in trancheFiles]

        for trancheFile in tqdm(trancheFiles):
            if trancheFile.fileID < 42818: continue
            try:
                TR = FastTrancheReader(0,
                                       trancheFile.urlPath,
                                       localCache=DOWNLOAD_PATH)
                TR.scan(outFile, trancheFile.fileID)
            except Exception as e:
                print e.message
                #raise

            #outFile.flush()
    outFile.flush()
    outFile.close()
Exemple #9
0
def app(request):
    with patch.multiple(
            CONFIG,
            DEBUG=True,
            TESTING=True,
            IS_LOCAL=True,
            BASIC_AUTH_USERNAME="******",
            BASIC_AUTH_PASSWORD="******",
            CACHE_TYPE="SimpleCache",
    ):
        _app = create_app(CONFIG)

        ctx = _app.app_context()
        ctx.push()

        def teardown():
            ctx.pop()

        request.addfinalizer(teardown)

        yield _app
def drawNeededLigands():

	from app.core import create_app
	app = create_app(debug=debug)
	with app.app_context():

		print 'looking for top ligands to draw'

		query = text('''
		select *, group_concat(subsetName) as subsets
		FROM jobs
		join zincLigands using(zincID)
		join zincToSubset using (zincID)
		join zincSubsets using (subset)
		LEFT JOIN users USING(user)
		LEFT JOIN tranches using(trancheID)
		WHERE subsetName IN ('fda')
		group by jobID
		order by bestDG 
		LIMIT 1000
		;''')
		rows = db.engine.execute(query)
		# rows = [r for r in rows]

		drawRows(rows)


		for receptor in ALL_RECEPTORS:

			query = text('''
			select * FROM jobs
			join zincLigands using(zincID)
			WHERE receptor=:receptor
			group by zincID
			order by bestDG 
			LIMIT 1000
			;''')
			rows = db.engine.execute(query, receptor=receptor)
			drawRows(rows)
Exemple #11
0
def load3DTranches():
    """ Init db with information on tranches - tested with full 3D set only """

    from app.core import create_app

    app = create_app(debug=debug)

    with app.app_context():
        db.create_all()
        # your code here
        with open('ZINC-downloader-3D-pdbqt.gz.uri', 'r') as tracheList:
            for line in tracheList:
                line = line.rstrip()
                urlPath = line.replace('http://files.docking.org/', '')
                fileName = urlPath.split('/')[-1]

                print line, urlPath, fileName

                t = Tranche()
                t.urlPath = urlPath
                t.fileName = fileName

                t.lastAssigned = 0
                t.loopCount = 0
                t.ligandCount = None  # unknown until we figure it out

                t.weight = fileName[0]
                t.logP = fileName[1]
                t.reactivity = fileName[2]
                t.purchasibility = fileName[3]
                t.pH = fileName[4]
                t.charge = fileName[5]

                t.subset = '3D'  # "all"

                print t
                db.session.add(t)
            db.session.commit()  # faster out of loop
"""Entry CLI module for Sales System"""

import click
from app.core import create_app

app = create_app('app.settings')


@click.group()
def main():
    pass


@main.command()
def createdb():
    app.system.InitializeDb()


@main.command()
def run():
    app.run(host='0.0.0.0', port=8000)


if __name__ == '__main__':
    main()
Exemple #13
0
import os
import unittest

from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager

from app import blueprint
from app.core import create_app, db

# MODELS IMPORT START
from app.user.model import User
# MODELS IMPORT END

app = create_app(os.getenv('BOILERPLATE_ENV') or 'dev')
app.register_blueprint(blueprint)

app.app_context().push()

manager = Manager(app)

migrate = Migrate(app, db)

manager.add_command('db', MigrateCommand)


@manager.command
def run():
    app.run()


@manager.command
Exemple #14
0
def loadSpecialTranches():
    """
	decided to make this a seperate method to avoid making a mistake on production DB

	This was a cheap way to convert wget-d zincDB special directories :p

	find files.docking.org/special/ | grep pdbqt.gz > ~/specialTranches.txt
	 ....
	files.docking.org/special/current/fda/tranches/AJ/xaaa-AJ.ref.pdbqt.gz
	files.docking.org/special/current/fda/tranches/AJ/xaaa-AJ.mid.pdbqt.gz
	files.docking.org/special/current/fda/tranches/AB/xaaa-AB.ref.pdbqt.gz
	files.docking.org/special/current/fda/tranches/AB/xaaa-AB.mid.pdbqt.gz

	"""

    from app.core import create_app

    app = create_app(debug=debug)

    with app.app_context():
        db.create_all()

        with open('specialTranches.txt', 'r') as tracheList:
            for line in tracheList:
                assert 'special' in line

                line = line.rstrip()
                urlPath = line.replace(
                    'files.docking.org/',
                    '')  # no http:// as before in the 3D tranche importer

                dirs = urlPath.split('/')
                fileName = dirs[-1]

                print line, urlPath, fileName

                assert dirs[0] == 'special'
                # skipping dirs[1] which is usually 'current'
                subset = dirs[2]
                assert subset != '3D'

                t = Tranche()
                t.urlPath = urlPath
                t.fileName = fileName

                t.lastAssigned = 0
                t.loopCount = 0
                t.ligandCount = None  # unknown until we figure it out

                # not true anymore
                #t.weight = fileName[0]
                #t.logP = fileName[1]
                #t.reactivity = fileName[2]
                #t.purchasibility = fileName[3]
                #t.pH = fileName[4]
                #t.charge = fileName[5]

                # the first two tranche divisons appear to be stored in the last subdir
                weightLogP = dirs[-2]
                t.weight = weightLogP[0]
                t.logP = weightLogP[1]

                t.subset = subset

                print t
                db.session.add(t)
            db.session.commit()  # faster out of loop
Exemple #15
0
# -*- coding: utf8 -*-
from app.core import create_app

if __name__ == "__main__":
    app = create_app()

    app.run(host=app.config['HOST'], port=app.config['PORT'],
        debug=app.config.get('DEBUG', True))
else:
    app = create_app()
Exemple #16
0
from app.core import create_app
app = create_app('flask.cfg')
parser.add_argument('-debug', action='store_true')


class MyRequestHandler(WSGIRequestHandler):
    # Suppress logging of poll requests every dang second ...
    def log_request(self, code='-', size='-'):
        if 'GET /client/update.json' in self.raw_requestline: return
        super(MyRequestHandler, self).log_request(code, size)


args = parser.parse_args()

debug = False
if args.debug: debug = True

App = create_app(debug)
print 'Debug state: ', App.debug

if not debug:
    from app.initializers.secrets import SENTRY_DSN
    #sentry = Sentry(App, dsn=SENTRY_DSN)               # this stopped working under uWSGI
try:
    sentry = Sentry(App)
except Exception as e:
    print e


def main():

    host = '127.0.0.1'  # host of 0.0.0.0 makes debug server visible over network! Use sparingly
def app():
    app = create_app('app.tests.settings')  #pylint: disable=redefined-outer-name
    app.system.InitializeDb()

    yield app
                t.rules.append(r)
                LOGGER.info("Rule %s loaded", r.name)
            else:
                LOGGER.warning("Associated term %s not found for rule %s",
                               row['term'], row['name'])

            db.session.commit()


if __name__ == "__main__":
    LOG_FORMAT = "%(asctime)-15s [%(levelname)s] %(message)s"
    logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)

    LOGGER.info("Load process started")

    app = create_app(os.getenv('BG_CONFIG') or 'default')
    app.app_context().push()
    app.config['SQLALCHEMY_ECHO'] = False

    # Interface files are placed in a directory name bg_interface at the same level
    # as the application directory, i.e.
    #
    #   - bg_interface
    #   - business_glossary
    #
    # Call os.path.dirname twice to walk up to the parent directory

    FILE_PATH = os.path.join(os.path.dirname(BASE_DIR), 'bg_interface')

    db.drop_all(app=app)
    db.create_all(app=app)
Exemple #20
0
from app.core import create_app

application = create_app()
from app.core import create_app, db

app = application = create_app()
Exemple #22
0
# -*- coding: utf8 -*-
from app.core import create_app

if __name__ == "__main__":
    app = create_app()

    app.run(host=app.config['HOST'],
            port=app.config['PORT'],
            debug=app.config.get('DEBUG', True))
else:
    app = create_app()
Exemple #23
0
def assembleSpecialTranche(subset='fda', allTranches=False):
    from app.core import create_app
    app = create_app(debug=debug)
    with app.app_context():
        if subset == 'chembl':
            query = text('select * from chembl2zinc;')
        else:
            query = text('''select zincID from zincToSubset
				join zincSubsets using(subset)
				where subsetName=:subset
			;''')
        rows = db.engine.execute(query, subset=subset)
        zincIDs = set([r.zincID for r in rows])
        print 'Number of zincIDs to find : ', len(zincIDs)

        # now parse the tranches sequentially
        if allTranches:
            #query = text('''select * from tranches where subset='3D';''')
            gg = glob.iglob(SCANPATH + '*/*/*/*.pdbqt.gz')
            paths = [fp.replace(SCANPATH, '') for fp in gg]
        else:
            query = text('''
				select trancheName, urlPath, count(*) as numDrugs
				from zincLigands join zincToSubset using(zincID) join zincSubsets using(subset)
				JOIN tranches using(trancheName)
				where subsetName=:subset
				group by trancheName
				order by numDrugs desc
			;''')
            rows = db.engine.execute(query, subset=subset)
            paths = [r.urlPath for r in rows]

        outTranche = gzip.open('%s_special.pdbqt.gz' % subset, 'w')

        hitNum = 0

        for urlPath in tqdm(paths):
            #if row.numDrugs<WEEDS: break

            try:
                TR = TrancheReader(0, urlPath, localCache=DOWNLOAD_PATH)
            except Exception as e:
                print e.message
                continue
            modelNum = 1
            while True:
                try:
                    zincID, model = TR.getModel(modelNum)
                except StopIteration:
                    break
                zincID = int(zincID.replace('ZINC', ''))
                modelNum += 1
                if zincID in zincIDs:
                    #print 'Found ', zincID
                    hitNum += 1
                    outTranche.write('MODEL        %s\n' % str(hitNum))
                    outTranche.write(model)
                    outTranche.write('\nENDMDL\n')
                    #outTranche.write('\n')

        outTranche.close()

        print 'Found %s out of %s ligands' % (hitNum, len(zincIDs))
import logging
import yaml
import argparse
import os
from os.path import dirname, join, isfile

from app.core import create_app
from app.config import config

application = create_app('production').app_context().push()

from app.main.models import Rule, Note
from app.loader import load_yaml
from app.extensions import db

LOGGER = logging.getLogger("business-glossary.load_data")


def add_rule_note(rule_note):
    '''Add a rule note'''

    #########################################################
    ## Add the rule note
    #########################################################

    record = Note(**{'note': rule_note['rule_note']})

    db.session.add(record)

    #########################################################
    ## Now associate with a rule
Exemple #25
0
from app.core import create_app

if __name__ == '__main__':
    create_app().run(debug=True, host='0.0.0.0', threaded=True)