Exemplo n.º 1
0
 def setUp(self):
     self.dbconfig = 'data/dbconfig.json'
     self.platform_json = 'data/platform_db_data.json'
     self.seqrun_json = 'data/seqrun_db_data.json'
     self.pipeline_json = 'data/pipeline_data.json'
     dbparam = read_dbconf_json(self.dbconfig)
     base = BaseAdaptor(**dbparam)
     self.engine = base.engine
     self.dbname = dbparam['dbname']
     Base.metadata.create_all(self.engine)
     self.session_class = base.get_session_class()
     base.start_session()
     # load platform data
     pl = PlatformAdaptor(**{'session': base.session})
     pl.store_platform_data(data=read_json_data(self.platform_json))
     # load seqrun data
     sra = SeqrunAdaptor(**{'session': base.session})
     sra.store_seqrun_and_attribute_data(
         data=read_json_data(self.seqrun_json))
     # load platform data
     pla = PipelineAdaptor(**{'session': base.session})
     pla.store_pipeline_data(data=read_json_data(self.pipeline_json))
     pipeline_seed_data = [
         {
             'pipeline_name': 'demultiplexing_fastq',
             'seed_id': '1',
             'seed_table': 'seqrun'
         },
     ]
     pla.create_pipeline_seed(data=pipeline_seed_data)
     base.close_session()
 def setUp(self):
     self.dbconfig='data/dbconfig.json'
     self.platform_json='data/platform_db_data.json'
     self.seqrun_json='data/seqrun_db_data.json'
     self.pipeline_json='data/pipeline_data.json'
     self.flowcell_rules_json='data/flowcell_rules.json'
     dbparam=read_dbconf_json(self.dbconfig)
     base=BaseAdaptor(**dbparam)
     self.engine=base.engine
     self.dbname=dbparam['dbname']
     Base.metadata.create_all(self.engine)
     self.session_class=base.get_session_class()
     base.start_session()
     # load platform data
     pl=PlatformAdaptor(**{'session':base.session})
     pl.store_platform_data(data=read_json_data(self.platform_json))
     pl.store_flowcell_barcode_rule(data=read_json_data(self.flowcell_rules_json))
     # load seqrun data
     sra=SeqrunAdaptor(**{'session':base.session})
     sra.store_seqrun_and_attribute_data(data=read_json_data(self.seqrun_json))
     base.close_session()
Exemplo n.º 3
0
def load_new_platform_data(data_file, dbconfig):
    '''
  A method for loading new data for platform table
  '''
    try:
        formatted_data = read_json_data(data_file)
        dbparam = read_dbconf_json(dbconfig)
        pl = PlatformAdaptor(**dbparam)
        pl.start_session()
        pl.store_platform_data(data=formatted_data)
        pl.close_session()
    except:
        raise
Exemplo n.º 4
0
def load_new_flowcell_data(data_file, dbconfig):
    '''
  A method for loading new data to flowcell table
  '''
    try:
        flowcell_rule_data = read_json_data(data_file)
        dbparam = read_dbconf_json(dbconfig)
        pl = PlatformAdaptor(**dbparam)
        pl.start_session()
        pl.store_flowcell_barcode_rule(data=flowcell_rule_data)
        pl.close_session()
    except:
        raise
Exemplo n.º 5
0
def load_new_seqrun_data(data_file, dbconfig):
  '''
  A method for loading new data for seqrun table
  '''
  try:
    formatted_data=read_json_data(data_file)
    dbparam=read_dbconf_json(dbconfig)
    sr=SeqrunAdaptor(**dbparam)
    sr.start_session()
    sr.store_seqrun_and_attribute_data(data=formatted_data)
    sr.close_session()
  except:
    raise
def load_new_pipeline_data(data_file, dbconfig):
    '''
  A method for loading new data for pipeline table
  '''
    try:
        formatted_data = read_json_data(data_file)
        dbparam = read_dbconf_json(dbconfig)
        pp = PipelineAdaptor(**dbparam)
        pp.start_session()
        pp.store_pipeline_data(data=formatted_data)
        pp.close_session()
    except:
        raise
 def test_read_json_data(self):
     data = read_json_data(data_file=self.data_file)
     self.assertIsInstance(data, list)
Exemplo n.º 8
0
dbconfig_path = args.dbconfig_path
collection_file_data = args.collection_file_data
calculate_checksum = args.calculate_checksum

if __name__ == '__main__':
    try:
        dbconnected = False
        if not os.path.exists(dbconfig_path):
            raise IOError('Dbconfig file {0} not found'.format(dbconfig_path))

        if not os.path.exists(collection_file_data):
            raise IOError('Collection data json file {0} not found'.format(
                collection_file_data))

        dbparam = read_dbconf_json(dbconfig_path)  # read db config
        collection_data = read_json_data(
            collection_file_data)  # read collection data json
        ca = CollectionAdaptor(**dbparam)
        ca.start_session()  # connect to database
        dbconnected = True
        ca.load_file_and_create_collection(
            data=collection_data,
            calculate_file_size_and_md5=calculate_checksum,
            autosave=True)  # load data and commit changes
        ca.close_session()
        dbconnected = False
    except Exception as e:
        if dbconnected:
            ca.rollback_session()
            ca.close_session()
        raise ValueError('Error: {0}'.format(e))