Exemple #1
0
from peaks_processor_celery import run_conservation_analysis, run_motif_analysis, run_analysis
from flask.ext.sqlalchemy import SQLAlchemy
import shutil
import json
from celery import group
from config_processor import read_config
from encode_peak_file_downloader import get_encode_peakfiles, get_metadata_for_peakfile
import subprocess
from bed_operations.format_peakfile import convert_to_scorefile
from query import get_async_id, encode_job_exists, insert_encode_job, update_job_status, insert_new_job, get_encode_metadata, get_filename, get_job_status, job_exists, encode_job_status, get_encode_jobid, is_job_type_encode,get_encode_from_jobid, get_all_encode_results
from database import SqlAlchemyTask
import operator
from Bio import motifs
jaspar_motifs = motifs.parse(open('../data/pfm_vertebrates.txt'), 'jaspar')

server_config = read_config('Server')
path_config = read_config('StaticPaths')


app = Flask(__name__)
app.config['CELERY_BROKER_URL'] = server_config['celery_broker_url']
app.config['CELERY_RESULT_BACKEND'] = server_config['celery_result_backend']
app.config['SQLALCHEMY_DATABASE_URI'] = server_config['sqlalchemy_database_uri']
app.config['CELERYD_MAX_TASKS_PER_CHILD'] = server_config['celery_max_tasks_per_child']
app.config['CELERY_IMPORTS'] = ('app',)
app.config['CELERYD_TASK_TIME_LIMIT'] = 1000000
app.url_map.strict_slashes = False


db = SQLAlchemy(app)
celery = Celery('app', broker=app.config['CELERY_BROKER_URL'])
Exemple #2
0
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from config_processor import read_config
server_config = read_config('Server')
import celery

engine = create_engine(server_config['sqlalchemy_database_uri'],
                       convert_unicode=True)
db_session = scoped_session(
    sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()


def init_db():
    import models
    Base.metadata.create_all(bind=engine)


class SqlAlchemyTask(celery.Task):
    """An abstract Celery Task that ensures that the connection the the
    database is closed on task completion"""
    abstract = True

    def after_return(self, status, retval, task_id, args, kwargs, einfo):
        db_session.remove()
Exemple #3
0
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from config_processor import read_config

server_config = read_config("Server")
import celery


engine = create_engine(server_config["sqlalchemy_database_uri"], convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()


def init_db():
    import models

    Base.metadata.create_all(bind=engine)


class SqlAlchemyTask(celery.Task):
    """An abstract Celery Task that ensures that the connection the the
    database is closed on task completion"""

    abstract = True

    def after_return(self, status, retval, task_id, args, kwargs, einfo):
        db_session.remove()
Exemple #4
0
from Bio import motifs
import glob
import logging
import sys
import os
import subprocess
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from random import seed
from random import randint
import shutil
import pandas
from config_processor import read_config
import json

binary_config = read_config('Binaries')
software_location = {}
software_location['calc_cons'] = binary_config['conservation_calculator']
software_location['plotter'] = binary_config['plotter']
software_location['meme'] = binary_config['meme']
software_location['fimo2sites'] = binary_config['fimo2sites']
software_location['fimo'] = binary_config['fimo']
software_location['extract_chunk'] = binary_config['extract_chunk']

error_messages = {
    k: 'Error executing {}'.format(k)
    for k in software_location.keys()
}

param_config = read_config('Parameters')
from Bio import motifs
import glob
import logging
import sys
import os
import subprocess
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from random import seed
from random import randint
import shutil
import pandas
from config_processor import read_config
import json

binary_config = read_config('Binaries')
software_location = {}
software_location['calc_cons'] = binary_config['conservation_calculator']
software_location['plotter'] = binary_config['plotter']
software_location['meme'] = binary_config['meme']
software_location['fimo2sites'] = binary_config['fimo2sites']
software_location['fimo'] = binary_config['fimo']
software_location['extract_chunk'] = binary_config['extract_chunk']

error_messages= {k:'Error executing {}'.format(k) for k in software_location.keys()}

param_config = read_config('Parameters')

FLANKING_SEQ_LENGTH = int(param_config['flanking_seq_length'])
ENRICHMENT_SEQ_LENGTH = int(param_config['enrichment_seq_length'])
MOTIF_FLANKING_BASES = int(param_config['motif_flanking_bases'])