def __init__(self): f = open('config.json', 'r', encoding="utf-8") config = json.load(f) self.logger = log.get_custom_logger(__name__) self.product_code = config['productCode'] self.count = config['count'] self.period = config['period'] self.lot = config['lot'] code = 'XBTUSD' if self.product_code == 'BTC/USD' self.product_code self.candle = Candle(code) if config['isTest']: apiKey = config['test']['key'] secret = config['test']['secret'] self.bitmex = ccxt.bitmex({ 'apiKey': apiKey, 'secret': secret, }) self.bitmex.urls['api'] = self.bitmex.urls['test'] else: apiKey = config['real']['key'] secret = config['real']['secret'] self.bitmex = ccxt.bitmex({ 'apiKey': apiKey, 'secret': secret, }) self.order = Orders(self.bitmex)
def main(): import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--plateid", dest="plateid", action="store", help="The plate ID e.g. 'GEP00001_01'", required=True) parser.add_argument("--file", dest="file", action="store", help="The Incucyte file e.g. 'GEP00001_01 data/20170127_GEP00001/GEP00001_01_incu.txt'.", required=True) parser.add_argument("--clean", dest="clean", action="store_true", default=False, help="Clean database before loading?") options = parser.parse_args() log = logger.get_custom_logger(os.path.join(os.path.dirname(__file__), 'load_cell_growth.log')) engine = sqlalchemy.create_engine(cfg['DATABASE_URI']) Base.metadata.bind = engine DBSession = sqlalchemy.orm.sessionmaker(bind=engine) session = DBSession() loader = CellGrowthLoader(session, options.file, options.plateid) try: if options.clean: loader.clean() loader.load() session.commit() except Exception as e: log.exception(e) session.rollback() finally: session.close()
def setUp(self): import log as logger self.log = logger.get_custom_logger() self.current_path = os.path.abspath(os.path.dirname(__file__)) self.basedir = os.path.join(self.current_path, '../testdata/processing/') self.destdir = os.path.join(self.current_path, '../testdata/staging/') self.runs = RunFolderList(self.basedir, self.destdir, None, '130417_HWI-ST230_1122_C1YH9ACXX')
def setUp(self): import log as logger self.log = logger.get_custom_logger() self.current_path = os.path.abspath(os.path.dirname(__file__)) self.basedir = os.path.join(self.current_path, '../testdata/processing/') self.destdir = os.path.join(self.current_path, '../testdata/staging/') self.clusterdir = os.path.join(self.current_path, '../testdata/lustre/') self.runs = RunFolderList(processing_dir=self.basedir, staging_dir=self.destdir, cluster_dir=self.clusterdir)
def setUp(self): import log as logger self.log = logger.get_custom_logger() self.current_path = os.path.abspath(os.path.dirname(__file__)) self.basedir = os.path.join(self.current_path, '../testdata/processing/') self.destdir = os.path.join(self.current_path, '../testdata/staging/') self.clusterdir = os.path.join(self.current_path, '../testdata/lustre/') self.run_folder_name = '130417_HWI-ST230_1122_C1YH9ACXX' self.run_folder = os.path.join(self.basedir, self.run_folder_name) self.run = RunFolder(self.run_folder, self.destdir, self.clusterdir)
def setUp(self): import data import log as logger from autoanalysis.config import cfg self.log = logger.get_custom_logger() self.current_path = os.path.abspath(os.path.dirname(__file__)) self.basedir = os.path.join(self.current_path, '../testdata/processing4publishing/') self.stagingdir = os.path.join(self.current_path, '../testdata/staging4publishing/') self.clusterdir = os.path.join(self.current_path, '../testdata/lustre/') self.runs = data.RunFolderList(self.basedir, self.stagingdir, self.clusterdir) self.PUBLISHING_ASSIGNED = cfg['PUBLISHING_ASSIGNED'] self.are_fastq_files_attached = True self.glslims = GlsLims(use_dev_lims=True)
def main(): log = logger.get_custom_logger(os.path.join(os.path.dirname(__file__), 'load_ref_data.log')) engine = sqlalchemy.create_engine(cfg['DATABASE_URI']) Base.metadata.bind = engine DBSession = sqlalchemy.orm.sessionmaker(bind=engine) session = DBSession() loader = RefLoader(session) try: loader.load_genomes() loader.load_celllines() session.commit() except Exception as e: log.exception(e) session.rollback() finally: session.close()
def main(): log = logger.get_custom_logger( os.path.join(os.path.dirname(__file__), 'delete_project.log')) engine = sqlalchemy.create_engine(cfg['DATABASE_URI']) Base.metadata.bind = engine DBSession = sqlalchemy.orm.sessionmaker(bind=engine) session = DBSession() try: project = session.query(Project).filter( Project.geid == sys.argv[1]).first() session.delete(project) session.flush() session.commit() except Exception as e: log.exception(e) session.rollback() finally: session.close()
def main(): import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--plateid", dest="plateid", action="store", help="The plate ID e.g. 'GEP00001_01'", required=True) parser.add_argument( "--file", dest="file", action="store", help= "The InCell Western file e.g. 'data/20170127_GEP00001/GEP00001_01_ICW.csv'.", required=True) options = parser.parse_args() log = logger.get_custom_logger( os.path.join(os.path.dirname(__file__), 'load_protein_abundance.log')) engine = sqlalchemy.create_engine(cfg['DATABASE_URI']) Base.metadata.bind = engine DBSession = sqlalchemy.orm.sessionmaker(bind=engine) session = DBSession() loader = ProteinAbundanceLoader(session, options.file, options.plateid) try: loader.load() session.commit() except Exception as e: log.exception(e) session.rollback() finally: session.close()
from log import get_custom_logger log = get_custom_logger() def before_all(context): context.url = "https://api.spotify.com/v1/artists/" context.token = "Bearer BQC25JXxyTjjsx8ZHtcX4v0pXzqYOifElXwOCnhjj4C5MFLRvwdlC339EDBqNl3z_NmmP9VYbXBPkB5CWA9KdnDEZc8zEJgBOMVf7zf9F4vqWHcC4ZBIUCHz7khpNCayug0alpe1YW90HRf2_2w" log.info("Spotify OAuth token set to: " + context.token) def before_scenario(context, scenario): log.info("***************** SCENARIO STARTS: " + scenario.name + " *****************") def after_scenario(context, scenario): log.info("****************** SCENARIO ENDS: " + scenario.name + " ******************")
def __init__(self, exhange): f = open('config.json', 'r', encoding="utf-8") config = json.load(f) self.exhange = exhange self.product_code = config['productCode'] self.logger = log.get_custom_logger(__name__)