Beispiel #1
0
def process_ami(modulus, remainder):
    """
    We're running on the AMI instance - so actually do the work

    Find the files and move them to S3
    :return:
    """
    # Connect to the database - the login string is set in the database package

    # Start the shutdown signal poller to check when this instance must close
    start_poll()

    engine = create_engine(DB_LOGIN)
    connection = engine.connect()
    try:
        # Check the processed data
        try:
            LOG.info('Updating state information')
            processed_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('processed_data(): an exception occurred')

        # Store files
        try:
            LOG.info('Storing files')
            store_files(connection, modulus, remainder)
        except Exception:
            LOG.exception('store_files(): an exception occurred')

        # Delete galaxy data - commits happen inside
        try:
            LOG.info('Deleting galaxy data')
            delete_galaxy_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('delete_galaxy_data(): an exception occurred')

        # Delete register data - commits happen inside
        try:
            LOG.info('Deleting register data')
            delete_register_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('delete_register_data(): an exception occurred')

        # Archive to HDF5
        try:
            LOG.info('Archiving to HDF5')
            archive_to_hdf5(connection, modulus, remainder)
        except Exception:
            LOG.exception('archive_to_hdf5(): an exception occurred')

    except SystemExit:
        LOG.info(
            'Spot Instance Terminate Notice received, archive_task is shutting down'
        )

    finally:
        connection.close()
Beispiel #2
0
def process_ami(modulus, remainder):
    """
    We're running on the AMI instance - so actually do the work

    Find the files and move them to S3
    :return:
    """
    # Connect to the database - the login string is set in the database package

    # Start the shutdown signal poller to check when this instance must close
    start_poll()

    engine = create_engine(DB_LOGIN)
    connection = engine.connect()
    try:
        # Check the processed data
        try:
            LOG.info('Updating state information')
            processed_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('processed_data(): an exception occurred')

        # Store files
        try:
            LOG.info('Storing files')
            store_files(connection, modulus, remainder)
        except Exception:
            LOG.exception('store_files(): an exception occurred')

        # Delete galaxy data - commits happen inside
        try:
            LOG.info('Deleting galaxy data')
            delete_galaxy_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('delete_galaxy_data(): an exception occurred')

        # Delete register data - commits happen inside
        try:
            LOG.info('Deleting register data')
            delete_register_data(connection, modulus, remainder)
        except Exception:
            LOG.exception('delete_register_data(): an exception occurred')

        # Archive to HDF5
        try:
            LOG.info('Archiving to HDF5')
            archive_to_hdf5(connection, modulus, remainder)
        except Exception:
            LOG.exception('archive_to_hdf5(): an exception occurred')

    except SystemExit:
        LOG.info('Spot Instance Terminate Notice received, archive_task is shutting down')

    finally:
        connection.close()
def process_ami():
    """
    We're running on the AMI instance - so actually do the work

    Find the files and move them to S3
    :return:
    """
    # Connect to the database - the login string is set in the database package
    ENGINE = create_engine(DB_LOGIN)
    connection = ENGINE.connect()
    try:
        # Check the processed data
        try:
            processed_data(connection)
        except:
            LOG.exception('processed_data(): an exception occurred')

        # Store files
        try:
            store_files(connection)
        except:
            LOG.exception('store_files(): an exception occurred')

        # Delete galaxy data - commits happen inside
        try:
            delete_galaxy_data(connection)
        except:
            LOG.exception('delete_galaxy_data(): an exception occurred')

        # Archive the BOINC stats
        try:
            archive_boinc_stats()
        except:
            LOG.exception('archive_boinc_stats(): an exception occurred')

        # Archive to HDF5
        try:
            archive_to_hdf5(connection)
        except:
            LOG.exception('archive_to_hdf5(): an exception occurred')

    finally:
        connection.close()
Beispiel #4
0
import os
import sys

# Setup the Python Path as we may be running this via ssh
base_path = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(base_path, '..')))
sys.path.append(
    os.path.abspath(os.path.join(base_path, '../../../../boinc/py')))

import argparse
from utils.logging_helper import config_logger
from archive.store_files_mod import store_files
from utils.readable_dir import ReadableDir

LOG = config_logger(__name__)
LOG.info('PYTHONPATH = {0}'.format(sys.path))

parser = argparse.ArgumentParser('Copy files into S3')
parser.add_argument('-d',
                    '--dir',
                    action=ReadableDir,
                    nargs=1,
                    help='where the HDF5 files are')
args = vars(parser.parse_args())

DIR = args['dir']

file_count = store_files(DIR)

LOG.info('All Done. Stored %d files.', file_count)
Beispiel #5
0
#    MA 02111-1307  USA
#
"""
Store the files to NGAS
"""
import os
import sys

# Setup the Python Path as we may be running this via ssh
base_path = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(base_path, '../../src')))
sys.path.append(os.path.abspath(os.path.join(base_path, '../../../../boinc/py')))

import argparse
from utils.logging_helper import config_logger
from archive.store_files_mod import store_files
from utils.readable_dir import ReadableDir

LOG = config_logger(__name__)
LOG.info('PYTHONPATH = {0}'.format(sys.path))

parser = argparse.ArgumentParser('Copy files into S3')
parser.add_argument('-d','--dir', action=ReadableDir, nargs=1, help='where the HDF5 files are')
args = vars(parser.parse_args())

DIR = args['dir']

file_count = store_files(DIR)

LOG.info('All Done. Stored %d files.', file_count)