Exemplo n.º 1
0
        def setRootLogger(quiet, debug):
            """Sets the root logger with the desired verbosity level
               The root logger logs to logs/twlog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :return logger: a logger with the appropriate logger level."""

            createLogdir('logs')
            createLogdir('logs/processes')
            createLogdir('logs/tasks')

            if self.TEST:
                #if we are testing log to the console is easier
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog('logs/twlog.txt', when='midnight')
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger("master")
            logger.debug("PID %s.", os.getpid())
            logger.debug("Logging level initialized to %s.", loglevel)
            return logger
Exemplo n.º 2
0
        def setRootLogger(quiet=False, debug=True, console=False):
            """Sets the root logger with the desired verbosity level
               The root logger logs to logs/twlog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :arg bool console: it tells if to direct all printoput to console rather then files, useful for debug
            :return logger: a logger with the appropriate logger level."""

            createLogdir('Publisher/logs')
            createLogdir('Publisher/logs/processes')
            createLogdir('Publisher/logs/tasks')

            if console:
                #if we are testing log to the console is easier
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog('logs/log.txt', when='midnight')
                logFormatter = logging.Formatter("%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger("master")
            logger.debug("PID %s.", os.getpid())
            logger.debug("Logging level initialized to %s.", loglevel)
            return logger
Exemplo n.º 3
0
        def getLogging(quiet, debug):
            """Retrieves a logger and set the proper level

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :return logger: a logger with the appropriate logger level."""

            if self.TEST:
                #if we are testing log to the console is easier
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog('twlog.log', when="midnight")
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = logging.getLogger()
            logger.debug("Logging level initialized to %s." % loglevel)
            return logger
Exemplo n.º 4
0
        def getLogging(quiet, debug):
            """Retrieves a logger and set the proper level

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :return logger: a logger with the appropriate logger level."""

            if self.TEST:
                #if we are testing log to the console is easier
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog('twlog.log', when="midnight")
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = logging.getLogger()
            logger.debug("Logging level initialized to %s." %loglevel)
            return logger
def main(args):

    # setup logging
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s | %(filename)s/%(funcName)s: %(message)s')
    mp_log = MultiProcessingLog(
        os.path.join(
            args.source_dir,
            'find-empty-sites-' + time.strftime('%Y%m%d-%H%M%S') + '.log'),
        'w', 0, 0)
    mp_log.setFormatter(formatter)
    logger.addHandler(mp_log)

    if args.move and (not os.path.exists(args.target_dir)):
        os.makedirs(args.target_dir)

    images = [
        os.path.basename(full_path)
        for full_path in glob.glob(args.source_dir + '*C01.tif')
    ]
    logger.debug('found %d images in %s', len(images), args.source_dir)
    source_dirs = [args.source_dir for image in images]
    target_dirs = [args.target_dir for image in images]
    params = [args.move for image in images]

    function_args = zip(source_dirs, images, target_dirs, params)

    # use a multi-processing pool to get the work done
    pool = mp.Pool()
    pool.map(check_site_and_move_star, function_args)
    pool.close()
    pool.join()

    return
Exemplo n.º 6
0
        def setRootLogger(logWarning, logDebug, console):
            """Sets the root logger with the desired verbosity level
               The root logger logs to logsDir/twlog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool logWarning: it tells if a quiet logger is needed
            :arg bool logDebug: it tells if needs a verbose logger
            :arg bool console: it tells if to log to console
            :return logger: a logger with the appropriate logger level."""

            logsDir = config.TaskWorker.logsDir
            createLogdir(logsDir)
            createLogdir(logsDir+'/processes')
            createLogdir(logsDir+'/tasks')

            if console:
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog(logsDir+'/twlog.txt', when='midnight')
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if logWarning:
                loglevel = logging.WARNING
            if logDebug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger("master", logsDir)
            logger.debug("PID %s.", os.getpid())
            logger.debug("Logging level initialized to %s.", loglevel)
            return logger
Exemplo n.º 7
0
        def setRootLogger(quiet, debug):
            """Sets the root logger with the desired verbosity level
               The root logger logs to logs/twlog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :return logger: a logger with the appropriate logger level."""

            createLogdir('logs')
            createLogdir('logs/processes')
            createLogdir('logs/tasks')

            logHandler = MultiProcessingLog('logs/log.txt', when='midnight')
            logFormatter = logging.Formatter("%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s")
            logHandler.setFormatter(logFormatter)
            logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger("master")
            logger.debug("PID %s.", os.getpid())
            logger.debug("Logging level initialized to %s.", loglevel)
            return logger
Exemplo n.º 8
0
        def setRootLogger(quiet, debug):
            """
            Taken from CRABServer TaskWorker
            Sets the root logger with the desired verbosity level
               The root logger logs to logs/asolog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool quiet: it tells if a quiet logger is needed
            :arg bool debug: it tells if needs a verbose logger
            :return logger: a logger with the appropriate logger level."""

            createLogdir('logs')

            if self.TEST:
                # if we are testing log to the console is easier
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog('logs/monitor.txt',
                                                when='midnight')
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if quiet:
                loglevel = logging.WARNING
            if debug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger("master")
            logger.debug("PID %s.", os.getpid())
            logger.debug("Logging level initialized to %s.", loglevel)
            return logger
def main(args):

    # setup logging
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
    mp_log = MultiProcessingLog(
        path.join(args.output_dir,
                  'png_conversion-' + time.strftime('%Y%m%d-%H%M%S') + '.log'),
        'w', 0, 0)
    mp_log.setFormatter(formatter)
    logger.addHandler(mp_log)

    tiff_paths = []
    output_dirs = []

    # find tiff files and keep track of the source directory structure
    for root, dirs, files in walk(args.source_dir):
        for fyle in files:
            if fyle.endswith('.tif'):
                tiff_paths.append(path.join(root, fyle))

                # Hack to catch inputs ending with and without a /
                if args.source_dir.endswith('/'):
                    soure_path_length = len(args.source_dir)
                else:
                    soure_path_length = len(args.source_dir) + 1
                output_dirs.append(
                    path.join(args.output_dir, root[soure_path_length:]))

    # re-create the directory structure in output_dir
    unique_dirs = list(set(output_dirs))
    logger.info('found %d TIFF files in subfolders: %s', len(tiff_paths),
                unique_dirs)
    for d in unique_dirs:
        if not path.exists(d):
            logger.info('creating output directory: %s', d)
            makedirs(d)
        else:
            logger.info('output directory %s already exists', d)

    # generate list of tuples containing input/output pairs
    convert_args = zip(tiff_paths, output_dirs)

    # use a multi-processing pool to get the work done
    pool = mp.Pool()
    pool.map(convert_single_site_star, convert_args)
    pool.close()
    pool.join()

    return
Exemplo n.º 10
0
def main(args):

    if not os.path.exists(args.target_dir):
        os.makedirs(args.target_dir)

    # setup logging
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s | %(filename)s/%(funcName)s: %(message)s')
    mp_log = MultiProcessingLog(
        os.path.join(
            args.target_dir,
            'modify-site-order-' + time.strftime('%Y%m%d-%H%M%S') + '.log'),
        'w', 0, 0)
    mp_log.setFormatter(formatter)
    logger.addHandler(mp_log)

    # get all names for channel 01
    all_filenames_C01 = sorted([
        os.path.basename(full_path)
        for full_path in glob.glob(args.source_dir + '*C01.tif')
    ])

    # get unique wells
    well_names = [
        m.group('well') for f in all_filenames_C01
        for m in [re.match(pattern, f)] if m
    ]
    unique_well_names = sorted(list(set(well_names)))

    # generate a list of tuples containing files to be linked
    files_to_link = []
    for well in unique_well_names:

        # split list by well_name
        well_regex = re.compile('.*_' + well + '_')
        fnames_C01 = filter(well_regex.match, all_filenames_C01)

        # generate list of links per well
        files_to_link.extend(links_per_well(args.source_dir, fnames_C01))

    for fname_pair in files_to_link:
        logger.info('creating %s as link to %s',
                    os.path.join(args.target_dir, fname_pair[1]),
                    os.path.join(args.source_dir, fname_pair[0]))
        os.link(os.path.join(args.source_dir, fname_pair[0]),
                os.path.join(args.target_dir, fname_pair[1]))

    return
Exemplo n.º 11
0
def setup_logger(args):
    global logger

    logfile = os.path.join(os.getcwd(),
                           'popcon-' + time.strftime('%Y%m%d-%H%M%S') + '.log')
    mp_log = MultiProcessingLog(logfile, 'w', 0, 0)
    formatter = logging.Formatter(
        '%(asctime)s [%(thread)d] %(funcName)s %(levelname)s: %(message)s')
    mp_log.setFormatter(formatter)
    logger.addHandler(mp_log)

    logger.setLevel(logging.INFO)
    if args.verbose > 0:
        logger.setLevel(logging.DEBUG)
    print 'Logging to {} at level {}'.format(
        logfile, logging.getLevelName(logger.getEffectiveLevel()))
    return
Exemplo n.º 12
0
        def setRootLogger(logWarning, logDebug, console, name):
            """Sets the root logger with the desired verbosity level
               The root logger logs to logsDir/twlog.txt and every single
               logging instruction is propagated to it (not really nice
               to read)

            :arg bool logWarning: it tells if a quiet logger is needed
            :arg bool logDebug: it tells if needs a verbose logger
            :arg bool console: it tells if to log to console
            :arg string name: define a name for the log file of this master process
            :return logger: a logger with the appropriate logger level."""

            # this must only done for real Master, not when it is used by TapeRecallStatus
            logsDir = config.TaskWorker.logsDir
            if name == 'master':
                createAndCleanLogDirectories(logsDir)

            if console:
                logging.getLogger().addHandler(logging.StreamHandler())
            else:
                logHandler = MultiProcessingLog(logsDir + '/twlog.txt',
                                                when='midnight')
                logFormatter = \
                    logging.Formatter("%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s")
                logHandler.setFormatter(logFormatter)
                logging.getLogger().addHandler(logHandler)
            loglevel = logging.INFO
            if logWarning:
                loglevel = logging.WARNING
            if logDebug:
                loglevel = logging.DEBUG
            logging.getLogger().setLevel(loglevel)
            logger = setProcessLogger(name, logsDir)
            logger.info("PID %s.", os.getpid())
            logger.info("Logging level initialized to %s.", loglevel)
            return logger
Exemplo n.º 13
0
import warnings
import logging
import itertools
import skimage.io
import numpy as np
from os import path, getcwd
import multiprocessing as mp
from MultiProcessingLog import MultiProcessingLog

warnings.filterwarnings('ignore')

logger = logging.getLogger()
mp_log = MultiProcessingLog(path.join(getcwd(), 'StackToMaxSum.log'), 'a', 0,
                            0)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
mp_log.setFormatter(formatter)
logger.addHandler(mp_log)
logger.setLevel(logging.INFO)


def project_single_site(base_dir, fname_stub, well_name, timeline_name, field,
                        l_name, action_name, input_channel_name,
                        output_channel_names, z_planes, input_dir, output_dir):

    logger.info('Processing channel %s, well %s, site %d', input_channel_name,
                well_name, field)
    field_name = 'F' + str(field).zfill(3)

    image_names = []
    for z in range(1, z_planes + 1):
        z_name = 'Z' + str(z).zfill(2)
Exemplo n.º 14
0
def main(args):

    if not os.path.exists(args.target_dir):
        os.makedirs(args.target_dir)

    # setup logging
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s | %(filename)s/%(funcName)s: %(message)s')
    mp_log = MultiProcessingLog(
        os.path.join(
            args.target_dir, 'extend-wells-to-max-site-number' +
            time.strftime('%Y%m%d-%H%M%S') + '.log'), 'w', 0, 0)
    mp_log.setFormatter(formatter)
    logger.addHandler(mp_log)

    # get all names for channel 01
    all_filenames_C01 = sorted([
        os.path.basename(full_path)
        for full_path in glob.glob(args.source_dir + '*C01.png')
    ])

    # get unique well and sites
    well_site_names = [(m.group('well'), int(m.group('site')))
                       for f in all_filenames_C01
                       for m in [re.match(pattern, f)] if m]

    # find number of sites per well
    unique_wells = sorted(list(set([i[0] for i in well_site_names])))
    logger.info('found %d wells in %s', len(unique_wells), args.source_dir)
    max_sites_per_well = []
    for well in unique_wells:
        sites = [x for x in well_site_names if x[0] == well]
        n = max(sites, key=itemgetter(1))
        max_sites_per_well.append(n)
        logger.debug('well %s has maximum site number of %d', well, n[1])

    # find maximum number of sites and select the next largest value from list
    required_sites = max(max_sites_per_well, key=itemgetter(1))[1]
    n_sites = next(v for i, v in enumerate(possible_site_numbers)
                   if v >= required_sites)
    logger.info(
        '%d sites are required, generating links for %d sites per well',
        required_sites, n_sites)

    # generate a list of (well, site) pairs to add
    # also include possibly missing sites < max number of sites per well
    well_site_names_to_add = []
    for well in unique_wells:
        existing_sites = set([x[1] for x in well_site_names if x[0] == well])
        all_required_sites = set(range(1, n_sites + 1))
        well_site_names_to_add.extend([
            (well, s) for s in all_required_sites.difference(existing_sites)
        ])

    # convert this list to filenames for all channels
    # 1. use the first site as a template for new filenames
    # 2. randomly select a site from the empty directory and
    #    link the corresponding channels to that site
    filenames_single_site = sorted(
        list_all_files_same_site(args.source_dir, all_filenames_C01[0]))
    files_to_link = []
    empty_files = [
        os.path.basename(full_path)
        for full_path in glob.glob(args.empty_dir + '*.png')
    ]

    logger.debug('selecting random sites from %s', args.empty_dir)
    for well, site in well_site_names_to_add:
        empty_site = re.match(pattern, random.choice(empty_files))
        for basefile in filenames_single_site:
            new_site = re.match(pattern, basefile)
            link_name = (new_site.group('stem') + '_' + well + '_T' +
                         new_site.group('t') + 'F' + str(site).zfill(3) + 'L' +
                         new_site.group('l') + 'A' + new_site.group('a') +
                         'Z' + new_site.group('z') + new_site.group('c') +
                         '.png')

            # add a couple of catches to account for differing dimensions between sites
            if (int(new_site.group('z')) < 20) and (new_site.group('c')
                                                    == 'C04'):
                z_plane = new_site.group('z')
            elif (int(new_site.group('z')) < 40) and (new_site.group('c')
                                                      == 'C03'):
                z_plane = new_site.group('z')
            else:
                z_plane = '01'

            source_file = (empty_site.group('stem') + '_' +
                           empty_site.group('well') + '_T' +
                           new_site.group('t') + 'F' +
                           empty_site.group('site') + 'L' +
                           new_site.group('l') + 'A' + new_site.group('a') +
                           'Z' + z_plane + new_site.group('c') + '.png')
            files_to_link.append((source_file, link_name))

    for source, link in files_to_link:
        logger.info('creating hard link %s to file %s',
                    os.path.join(args.target_dir, link),
                    os.path.join(args.empty_dir, source))
        try:
            os.link(os.path.join(args.empty_dir, source),
                    os.path.join(args.target_dir, link))
        except OSError:
            logger.warning('could not link %s to %s',
                           os.path.join(args.target_dir, link),
                           os.path.join(args.empty_dir, source))
            pass

    return