def get_metadata(): project_config = read_project_config() results_obj = { 'uniprot_query': project_config['uniprot_query'], 'uniprot_domain_regex': project_config['uniprot_domain_regex'], } # = Return data in JSON format = response = make_response(jsonify(results_obj)) return response
def __init__(self, run_main=True): self.project_config = read_project_config() self.crawldata_row = models.CrawlData.query.first() self.current_crawl_number = self.crawldata_row.current_crawl_number self.safe_crawl_datestamp = self.crawldata_row.safe_crawl_datestamp self.current_crawl_datestamps_row = models.DateStamps.query.filter_by(crawl_number=self.current_crawl_number).first() if run_main: self.check_all_gather_scripts_have_been_run() self.update_crawl_numbers() self.update_datestamps() self.delete_old_crawls() self.commit()
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from targetexplorer.core import read_project_config targetexplorer_flaskapp_dir = os.path.dirname(__file__) app = Flask(__name__) db = SQLAlchemy(app) project_config = read_project_config() app.config.update( SQLALCHEMY_DATABASE_URI=project_config.get('sqlalchemy_database_uri')) import models
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from targetexplorer.core import read_project_config targetexplorer_flaskapp_dir = os.path.dirname(__file__) app = Flask(__name__) db = SQLAlchemy(app) project_config = read_project_config() app.config.update( SQLALCHEMY_DATABASE_URI=project_config.get('sqlalchemy_database_uri') ) import models