コード例 #1
0
def copy_files(processes, bottom_frequency, frequency_range):
    # Create the directory
    if not exists(DIRECTORY):
        os.makedirs(DIRECTORY)

    # Scan the bucket
    s3_helper = S3Helper()
    bucket = s3_helper.get_bucket(CHILES_BUCKET_NAME)
    LOGGER.info('Scanning bucket: {0}/CLEAN'.format(bucket))

    # Create the queue
    queue = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    for key in bucket.list(prefix='CLEAN/'):
        LOGGER.info('Checking {0}'.format(key.key))
        # Ignore the key
        if key.key.endswith('.image.tar.gz') or key.key.endswith('.image.tar'):
            # Do we need this file?
            basename_key = basename(key.key)
            if in_frequency_range(basename_key, bottom_frequency, frequency_range):
                # Queue the copy of the file
                temp_file = os.path.join(DIRECTORY, basename_key)
                queue.put(Task(key, temp_file, DIRECTORY))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #2
0
def copy_files(frequency_id, processes, days):
    s3_helper = S3Helper()
    bucket = s3_helper.get_bucket(CHILES_BUCKET_NAME)
    LOGGER.info('Scanning bucket: {0}, frequency_id: {1}'.format(bucket, frequency_id))

    # Create the queue
    queue = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    for key in bucket.list(prefix='CVEL/{0}'.format(frequency_id)):
        LOGGER.info('Checking {0}'.format(key.key))
        # Ignore the key
        if key.key.endswith('/data.tar.gz') or key.key.endswith('/data.tar'):
            elements = key.key.split('/')
            if elements[2] in days:
                directory = '/mnt/output/Chiles/split_vis/{0}/'.format(elements[2])

                # Queue the copy of the file
                temp_file = os.path.join(directory, 'data.tar.gz' if key.key.endswith('/data.tar.gz') else 'data.tar')
                queue.put(Task(key, temp_file, directory, frequency_id))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #3
0
def copy_files(frequency_id, processes, days):
    s3_helper = S3Helper()
    bucket = s3_helper.get_bucket(CHILES_BUCKET_NAME)
    LOGGER.info('Scanning bucket: {0}, frequency_id: {1}'.format(
        bucket, frequency_id))

    # Create the queue
    queue = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    for key in bucket.list(prefix='CVEL/{0}'.format(frequency_id)):
        LOGGER.info('Checking {0}'.format(key.key))
        # Ignore the key
        if key.key.endswith('/data.tar.gz') or key.key.endswith('/data.tar'):
            elements = key.key.split('/')
            if elements[2] in days:
                directory = '/mnt/output/Chiles/split_vis/{0}/'.format(
                    elements[2])

                # Queue the copy of the file
                temp_file = os.path.join(
                    directory, 'data.tar.gz'
                    if key.key.endswith('/data.tar.gz') else 'data.tar')
                queue.put(Task(key, temp_file, directory, frequency_id))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #4
0
def copy_files(args):
    # Create the queue
    queue = multiprocessing.JoinableQueue()
    # Start the consumers
    for x in range(PROCESSES):
        consumer = Consumer(queue)
        consumer.start()

    # Look in the output directory
    for root, dir_names, filenames in os.walk(args.product_dir):
        LOGGER.debug('root: {0}, dir_names: {1}, filenames: {2}'.format(
            root, dir_names, filenames))
        for match in fnmatch.filter(dir_names,
                                    '13B-266*calibrated_deepfield.ms'):
            result_dir = join(root, match)
            LOGGER.info('Queuing result_dir: {0}'.format(result_dir))

            queue.put(
                CopyTask(args.bucket, match, result_dir,
                         args.aws_access_key_id, args.aws_secret_access_key))

    # Add a poison pill to shut things down
    for x in range(PROCESSES):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #5
0
def copy_files(s3_tag, processes):
    # Create the queue
    queue = multiprocessing.JoinableQueue()
    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    # Look in the output directory
    today = datetime.date.today()
    for root, dir_names, filenames in os.walk(CHILES_LOGS):
        for match in fnmatch.filter(filenames, '*.log'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(CopyTask(join(root, match), '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year, today.month, today.day, match)))

    for root, dir_names, filenames in os.walk(BENCHMARKING_LOGS):
        for match in fnmatch.filter(filenames, '*.csv'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(CopyTask(join(root, match), '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year, today.month, today.day, match)))
        for match in fnmatch.filter(filenames, '*.log'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(CopyTask(join(root, match), '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year, today.month, today.day, match)))

    queue.put(CopyTask('/var/log/chiles-output.log', '{0}/{1}{2:02d}{3:02d}/chiles-output.log'.format(s3_tag, today.year, today.month, today.day)))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #6
0
def copy_files(args):
    # Create the queue
    queue = multiprocessing.JoinableQueue()
    # Start the consumers
    for x in range(PROCESSES):
        consumer = Consumer(queue)
        consumer.start()

    # Look in the output directory
    for root, dir_names, filenames in os.walk(args.product_dir):
        LOGGER.debug('root: {0}, dir_names: {1}, filenames: {2}'.format(root, dir_names, filenames))
        for match in fnmatch.filter(dir_names, '13B-266*calibrated_deepfield.ms'):
            result_dir = join(root, match)
            LOGGER.info('Queuing result_dir: {0}'.format(result_dir))

            queue.put(
                CopyTask(
                    args.bucket,
                    match,
                    result_dir,
                    args.aws_access_key_id,
                    args.aws_secret_access_key
                )
            )

    # Add a poison pill to shut things down
    for x in range(PROCESSES):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #7
0
ファイル: run_cvel.py プロジェクト: astrowork/aws-chiles02
def start_servers(processes, ami_id, user_data, setup_disks, instance_type,
                  obs_ids, created_by, name, instance_details, spot_price,
                  frequency_channels, force):
    cvel_data = get_cvel()

    # Create the queue
    tasks = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(tasks)
        consumer.start()

    counter = 1
    for obs_id in obs_ids:
        snapshot_id = OBS_IDS.get(obs_id)
        if snapshot_id is None:
            LOGGER.warning(
                'The obs-id: {0} does not exist in the settings file')
        else:
            obs_id_dashes = obs_id.replace('_', '-')
            for frequency_groups in get_frequency_groups(
                    frequency_channels, obs_id_dashes, cvel_data, force):
                tasks.put(
                    Task(ami_id, user_data, setup_disks, instance_type, obs_id,
                         snapshot_id, created_by, name, spot_price,
                         instance_details, frequency_groups, counter))
                counter += 1

        # Add a poison pill to shut things down
    for x in range(processes):
        tasks.put(None)

    # Wait for the queue to terminate
    tasks.join()
コード例 #8
0
ファイル: run_cvel.py プロジェクト: ICRAR/aws-chiles02
def start_servers(
        processes,
        ami_id,
        user_data,
        setup_disks,
        instance_type,
        obs_ids,
        created_by,
        name,
        instance_details,
        spot_price,
        frequency_channels,
        force):
    cvel_data = get_cvel()

    # Create the queue
    tasks = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(tasks)
        consumer.start()

    counter = 1
    for obs_id in obs_ids:
        snapshot_id = OBS_IDS.get(obs_id)
        if snapshot_id is None:
            LOGGER.warning('The obs-id: {0} does not exist in the settings file')
        else:
            obs_id_dashes = obs_id.replace('_', '-')
            for frequency_groups in get_frequency_groups(frequency_channels, obs_id_dashes, cvel_data, force):
                tasks.put(
                    Task(
                        ami_id,
                        user_data,
                        setup_disks,
                        instance_type,
                        obs_id,
                        snapshot_id,
                        created_by,
                        name,
                        spot_price,
                        instance_details,
                        frequency_groups,
                        counter
                    )
                )
                counter += 1

        # Add a poison pill to shut things down
    for x in range(processes):
        tasks.put(None)

    # Wait for the queue to terminate
    tasks.join()
コード例 #9
0
def copy_files(s3_tag, processes):
    # Create the queue
    queue = multiprocessing.JoinableQueue()
    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    # Look in the output directory
    today = datetime.date.today()
    for root, dir_names, filenames in os.walk(CHILES_LOGS):
        for match in fnmatch.filter(filenames, '*.log'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(
                CopyTask(
                    join(root, match),
                    '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year,
                                                       today.month, today.day,
                                                       match)))

    for root, dir_names, filenames in os.walk(BENCHMARKING_LOGS):
        for match in fnmatch.filter(filenames, '*.csv'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(
                CopyTask(
                    join(root, match),
                    '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year,
                                                       today.month, today.day,
                                                       match)))
        for match in fnmatch.filter(filenames, '*.log'):
            LOGGER.info('Looking at: {0}'.format(join(root, match)))
            queue.put(
                CopyTask(
                    join(root, match),
                    '{0}/{1}{2:02d}{3:02d}/{4}'.format(s3_tag, today.year,
                                                       today.month, today.day,
                                                       match)))

    queue.put(
        CopyTask(
            '/var/log/chiles-output.log',
            '{0}/{1}{2:02d}{3:02d}/chiles-output.log'.format(
                s3_tag, today.year, today.month, today.day)))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()
コード例 #10
0
def start_servers(processes, ami_id, user_data, setup_disks, instance_type,
                  frequency_ids, created_by, name, instance_details,
                  spot_price):
    # Create the queue
    tasks = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(tasks)
        consumer.start()

    for frequency_id in frequency_ids:
        tasks.put(
            Task(ami_id, user_data, setup_disks, instance_type, frequency_id,
                 created_by, name, spot_price, instance_details))

        # Add a poison pill to shut things down
    for x in range(processes):
        tasks.put(None)

    # Wait for the queue to terminate
    tasks.join()
コード例 #11
0
ファイル: run_clean.py プロジェクト: ICRAR/aws-chiles02
def start_servers(
        processes,
        ami_id,
        user_data,
        setup_disks,
        instance_type,
        frequency_ids,
        created_by,
        name,
        instance_details,
        spot_price):
    # Create the queue
    tasks = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(tasks)
        consumer.start()

    for frequency_id in frequency_ids:
        tasks.put(
            Task(
                ami_id,
                user_data,
                setup_disks,
                instance_type,
                frequency_id,
                created_by,
                name,
                spot_price,
                instance_details))

        # Add a poison pill to shut things down
    for x in range(processes):
        tasks.put(None)

    # Wait for the queue to terminate
    tasks.join()
コード例 #12
0
def copy_files(processes, bottom_frequency, frequency_range):
    # Create the directory
    if not exists(DIRECTORY):
        os.makedirs(DIRECTORY)

    # Scan the bucket
    s3_helper = S3Helper()
    bucket = s3_helper.get_bucket(CHILES_BUCKET_NAME)
    LOGGER.info('Scanning bucket: {0}/CLEAN'.format(bucket))

    # Create the queue
    queue = multiprocessing.JoinableQueue()

    # Start the consumers
    for x in range(processes):
        consumer = Consumer(queue)
        consumer.start()

    for key in bucket.list(prefix='CLEAN/'):
        LOGGER.info('Checking {0}'.format(key.key))
        # Ignore the key
        if key.key.endswith('.image.tar.gz') or key.key.endswith('.image.tar'):
            # Do we need this file?
            basename_key = basename(key.key)
            if in_frequency_range(basename_key, bottom_frequency,
                                  frequency_range):
                # Queue the copy of the file
                temp_file = os.path.join(DIRECTORY, basename_key)
                queue.put(Task(key, temp_file, DIRECTORY))

    # Add a poison pill to shut things down
    for x in range(processes):
        queue.put(None)

    # Wait for the queue to terminate
    queue.join()