def _test_put_get(user): filenames = ['test.dat', 'test2.dat'] # Make a local temp dir and put files there local_tmpdir = Ska.File.TempDir() with Ska.File.chdir(local_tmpdir.name): for filename in filenames: open(filename, 'w').write(filename) local_filenames = [os.path.abspath(x) for x in os.listdir(local_tmpdir.name)] remote_tmpdir = str(uuid.uuid4()) # random remote dir name occweb.ftp_put_to_lucky(remote_tmpdir, local_filenames, user=user) # Make a new local temp dir for the return local_tmpdir2 = Ska.File.TempDir() local_filenames = [os.path.join(local_tmpdir2.name, x) for x in filenames] occweb.ftp_get_from_lucky(remote_tmpdir, local_filenames, user=user) # Clean up remote temp dir lucky = Ska.ftp.SFTP('lucky') if user is None: user = lucky.ftp.get_channel().transport.get_username() lucky.rmdir('/home/{}/{}'.format(user, remote_tmpdir)) lucky.close() # Make sure round-tripped files are the same with Ska.File.chdir(local_tmpdir2.name): for filename in filenames: assert open(filename).read() == filename
def main(): """ Command line interface to update the cmd_states table to reflect current load segments / timelines in database. Usage: update_cmd_states.py [options]:: Options: -h, --help show this help message and exit --dbi=DBI Database interface (sqlite|sybase) --server=SERVER DBI server (<filename>|sybase) --user=USER database user (default=Ska.DBI default) --database=DATABASE database name (default=Ska.DBI default) --h5file=H5FILE filename for HDF5 version of cmd_states --datestart=DATESTART Starting date for update (default=Now-10 days) --mp_dir=DIR MP directory. (default=/data/mpcrit1/mplogs) --loglevel=LOGLEVEL Log level (10=debug, 20=info, 30=warnings) --occ Running on OCC network (default=False) """ opt, args = get_options() # Configure logging to emit msgs to stdout logging.basicConfig(level=opt.loglevel, format='%(message)s', stream=sys.stdout) logging.info('Running {0} at {1}' .format(os.path.basename(sys.argv[0]), time.ctime())) # Paths for the "flight" versions # In that case use ftp directory cmd_states, else cmd_states_test. flt_h5 = '/proj/sot/ska/data/cmd_states/cmd_states.h5' flt_db3 = '/proj/sot/ska/data/cmd_states/cmd_states.db3' ftp_h5_dirname = 'cmd_states' if opt.h5file == flt_h5 else 'cmd_states_test' ftp_db3_dirname = 'cmd_states' if opt.server == flt_db3 else 'cmd_states_test' # If running on the OCC (GRETA) network then just try to get a new HDF5 # file from lucky in /home/taldcroft/cmd_states and copy to opt.h5file. The # file will appear on lucky only when the HEAD network version gets updated # with changed content. if opt.occ: if opt.server: occweb.ftp_get_from_lucky(ftp_db3_dirname, [opt.server], logger=logging) if opt.h5file: occweb.ftp_get_from_lucky(ftp_h5_dirname, [opt.h5file], logger=logging) sys.exit(0) logging.debug('Connecting to db: dbi=%s server=%s user=%s database=%s' % (opt.dbi, opt.server, opt.user, opt.database)) try: db = Ska.DBI.DBI(dbi=opt.dbi, server=opt.server, user=opt.user, database=opt.database, verbose=False) if opt.dbi == 'sqlite': db.conn.text_factory = str except Exception as msg: logging.error('ERROR: failed to connect to {0}:{1} server: {2}' .format(opt.dbi, opt.server, msg)) sys.exit(0) if opt.h5file: filters = tables.Filters(complevel=5, complib='zlib') tables_open_file = getattr(tables, 'open_file', None) or tables.openFile h5 = tables_open_file(opt.h5file, mode='a', filters=filters) else: h5 = None # Get initial state containing the specified datestart logging.debug('Getting initial state0') state0 = cmd_states.get_state0(date=opt.datestart, db=db) logging.debug('Initial state0: datestart=%s datestop=%s obsid=%d' % (state0['datestart'], state0['datestop'], state0['obsid'])) # Sync up datestart to state0 and get timeline load segments including # state0 and beyond. datestart = state0['datestart'] logging.debug('Getting timeline_loads after %s' % datestart) timeline_loads = db.fetchall("""SELECT * from timeline_loads WHERE datestop > '%s'""" % datestart) logging.debug('Found %s timeline_loads' % len(timeline_loads)) # Get cmds since datestart. If needed add cmds to database logging.debug('Getting cmds after %s' % datestart) cmds = cmd_states.get_cmds(datestart, db=db, update_db=True, timeline_loads=timeline_loads, mp_dir=opt.mp_dir) logging.debug('Found %s cmds after %s' % (len(cmds), datestart)) # Get the states generated by cmds starting from state0 logging.debug('Generating cmd_states after %s' % datestart) states = cmd_states.get_states(state0, cmds) logging.debug('Found %s states after %s' % (len(states), datestart)) # Update cmd_states in database logging.debug('Updating database cmd_states table') states_changed = update_states_db(states, db, h5) # If updating sqlite, push that to lucky if opt.dbi == 'sqlite': occweb.ftp_put_to_lucky(ftp_db3_dirname, [opt.server], logger=logging) if h5: # Check for consistency between HDF5 and SQL n_check = 3000 if states_changed else 100 check_consistency(db, h5, n_check) # upload to lucky ftp server occweb.ftp_put_to_lucky(ftp_h5_dirname, [opt.h5file], logger=logging) # Close down for good measure. db.conn.close() if h5: h5.close()
cmds = cmd_states.get_cmds(datestart, db=db, update_db=True, timeline_loads=timeline_loads, mp_dir=opt.mp_dir) logging.debug('Found %s cmds after %s' % (len(cmds), datestart)) # Get the states generated by cmds starting from state0 logging.debug('Generating cmd_states after %s' % datestart) states = cmd_states.get_states(state0, cmds) logging.debug('Found %s states after %s' % (len(states), datestart)) # Update cmd_states in database logging.debug('Updating database cmd_states table') states_changed = update_states_db(states, db, h5) if h5: # Check for consistency between HDF5 and SQL n_check = 3000 if states_changed else 100 check_consistency(db, h5, n_check) # upload to lucky ftp server occweb.ftp_put_to_lucky(ftp_dirname, [opt.h5file], logger=logging) # Close down for good measure. db.conn.close() if h5: h5.close() if __name__ == '__main__': main()