def __init__(self, *args, **kwargs): super(TestUVData, self).__init__(*args, **kwargs) self.dbi = pdbi.DataBaseInterface() self.uv_file = os.path.join(ppdata.root_dir, 'data/test', 'zen.2456617.22257.yx.uvcRRE') self.npz_file = os.path.join(ppdata.root_dir, 'data/test', 'zen.2455906.53332.uvcRE.npz')
def restore_db(backup_file=None, table=None): ''' loads backups from json into database Parameters ---------- backup_file | str: name of backup file --defaults to None table | str: table name --defaults to None ''' if table is None: return if backup_file is None: backup_list = sorted(glob.glob('/data4/paper/paperdata_backup/[0-9]*'), reverse=True) timestamp = int(backup_list[0].split('/')[-1]) backup_file = '/data4/paper/paperdata_backup/{timestamp}/{table}_{timestamp}.json'.format( table=table, timestamp=timestamp) dbi = pdbi.DataBaseInterface() meta = pdbi.Base.metadata load_table = meta.tables[table] with dbi.session_scope() as s, open(backup_file, 'r') as backup_db: entry_list = json.load(backup_db) for entry_dict in entry_list: print(entry_dict.items()) try: s.add(load_table(**entry_dict)) except KeyboardInterrupt: raise except: print('Failed to load in entry')
def refresh_db(username=None): ''' refreshes database by checking md5sums, paths, obsnums connects observations to files ''' dbi = pdbi.DataBaseInterface() with dbi.session_scope() as s: update_sources(s, username=username) update_md5(s) update_obsnums(s) connect_observations(s)
def __init__(self, *args, **kwargs): super(TestDBI, self).__init__(*args, **kwargs) self.host = 'pot4.physics.upenn.edu' self.base_path = '/data5/final/2456617' self.filename = 'zen.2456617.22257.yx.uv' self.source = os.path.join(':'.join((self.host, self.base_path)), self.filename) self.jd = 2456617.22941 self.pol = 'yx' self.obsnum = 8595911205 self.dbi = pdbi.DataBaseInterface() self.obs_table = pdbi.Observation self.file_table = pdbi.File self.log_table = pdbi.Log
def db_objs(): ''' outputs database objects Returns ------- tuple: object: database interface object object: observation table object object: file table object ''' dbi = pdbi.DataBaseInterface() obs_table = pdbi.Observation file_table = pdbi.File return dbi, obs_table, file_table
def add_files(source_host, source_paths): ''' generates list of input files, check for duplicates, add information to database Parameters ---------- source_host | str: host of files source_paths | list[str]: list of paths of uv* files ''' dbi = pdbi.DataBaseInterface() with dbi.session_scope() as s: source_paths = sorted( dupe_check(s, source_host, source_paths, verbose=True)) #uv_paths = [uv_path for uv_path in source_paths if not uv_path.endswith('.npz')] uv_paths = [ uv_path for uv_path in source_paths if uv_path.endswith('.uv') ] npz_paths = [ npz_path for npz_path in source_paths if npz_path.endswith('.npz') ] add_files_to_db(s, source_host, uv_paths, verbose=True)
''' scripts.paperdata.load_tapes loads files into tapes and updates their tape_index author | Immanuel Washington ''' from __future__ import print_function import argparse import glob from paper.data import dbi as pdbi, add if __name__ == '__main__': dbi = pdbi.DataBaseInterface() file_table = pdbi.File with dbi.session_scope() as s: FILEs = s.query(file_table)\ .filter(file_table.tape_index != None) .all() #for FILE in FILEs: #tape_index = load_into_tape(FILE) #FILE.tape_index = tape_index
def script_test(): ''' runs tests of scripts ''' parser = argparse.ArgumentParser(description='Move files, update database') parser.add_argument('-u', '--uname', type=str, help='host username') parser.add_argument('-p', '--pword', type=str, help='host password') args = parser.parse_args() try: username = args.uname password = args.pword except AttributeError as e: raise #'Include all arguments' print('instantiating database interface object...') dbi = pdbi.DataBaseInterface( configfile=os.path.expanduser('~/paperdata/test.cfg')) print('creating db...') dbi.create_db() print('finding files to test...') test_paths_str = os.path.expanduser('~/test_data/zen*.uv*') test_paths = glob.glob(test_paths_str) print('adding files to db...') source_host = 'folio' add_files.add_files(dbi, source_host, test_paths) add_files.update_obsnums(dbi) add_files.connect_observations(dbi) print('backing up db...') backup_db.paperbackup(dbi, db='papertest') print('dropping db...') dbi.drop_db(pdbi.Base) print('creating db again...') dbi.create_db() print('loading db...') restore_db(dbi, table='File') restore_db(dbi, table='Observation') add_files.update_obsnums(dbi) add_files.connect_observations(dbi) print('moving files...') #copy files first? dest_host = 'node16' dest_path = os.path.expanduser('~/test_data/') move_files.move_files(dbi, source_host, source_paths, dest_host, dest_path, username, password) print('deleting files...') source_host = dest_host dest_host = 'folio' del_dir = os.path.expanduser('~/test_data_2/') os.mkdir(dest_path) source_paths = delete_files.delete_check(source_host) delete_files.delete_files(dbi, source_host, source_paths, dest_host, del_dir) print('dropping db again...') dbi.drop_db() print('deleting backup file...') backup_list = sorted(glob.glob('/data4/paper/paperdata_backup/[0-9]*'), reverse=True) timestamp = int(backup_list[0].split('/')[-1]) backup_file = '/data4/paper/paperdata_backup/{timestamp}/{table}_{timestamp}.json'.format( table=table, timestamp=timestamp) os.remove(backup_file) print('deleting copied files...') shutil.rmtree(del_dir) print('Script test Complete!')