Exemplo n.º 1
0
    def copy_to_bin(self, src_dir, wildcard, libaccessom2_src=None):
        exes = glob.glob(wildcard)
        if len(exes) != 1:
            print("Error: copy_to_bin can't find one {}".format(wildcard),
                  file=sys.stderr)
            return None, 1
        exe = exes[0]

        ghash = get_git_hash(src_dir)
        if libaccessom2_src:
            libaccessom2_hash = get_git_hash(libaccessom2_src)
        else:
            libaccessom2_hash = None

        eb = os.path.basename(exe)
        if libaccessom2_hash:
            new_name = '{}_{}_libaccessom2_{}.{}'.format(
                eb.split('.')[0], ghash, libaccessom2_hash,
                eb.split('.')[1])
        else:
            new_name = '{}_{}.{}'.format(
                eb.split('.')[0], ghash,
                eb.split('.')[1])
        dest = os.path.join(self.bin_path, new_name)
        if os.path.exists(dest):
            os.remove(dest)

        shutil.copy(exe, dest)
        shutil.chown(dest, group='v45')
        perms = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  | stat.S_IXUSR \
                 | stat.S_IXGRP | stat.S_IXOTH
        os.chmod(dest, perms)

        return dest, 0
Exemplo n.º 2
0
 def save_info(self, tablet: str, start: int, end: int):
     hash = get_git_hash()
     if hash is not None:
         if get_git_changes():
             hash = hash + " (modified)"
     self.cursor.execute(
         "INSERT INTO db_info (tablet, start_year, end_year, git) VALUES (?, ?, ?, ?)",
         (tablet, start, end, hash))
Exemplo n.º 3
0
    def copy_to_bin(self, src_dir, wildcard):
        exes = glob.glob(wildcard)
        if exes == []:
            return 1

        ghash = get_git_hash(src_dir)

        for e in exes:
            eb = os.path.basename(e)
            new_name = '{}_{}.{}'.format(
                eb.split('.')[0], ghash,
                eb.split('.')[1])
            shutil.copy(e, os.path.join(self.bin_path, new_name))

        return 0
Exemplo n.º 4
0
    def copy_to_bin(self, src_dir, wildcard):
        exes = glob.glob(wildcard)
        if len(exes) != 1:
            return None, 1
        exe = exes[0]

        ghash = get_git_hash(src_dir)

        eb = os.path.basename(exe)
        new_name = '{}_{}.{}'.format(eb.split('.')[0], ghash, eb.split('.')[1])
        dest = os.path.join(self.bin_path, new_name)
        if os.path.exists(dest):
            os.remove(dest)

        shutil.copy(exe, dest)
        shutil.chown(dest, group='v45')
        perms = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  | stat.S_IXUSR \
                 | stat.S_IXGRP | stat.S_IXOTH
        os.chmod(dest, perms)

        return dest, 0
Exemplo n.º 5
0
    def copy_to_bin(self, src_dir, wildcard):
        exes = glob.glob(wildcard)
        if len(exes) != 1:
            return None, 1
        exe = exes[0]

        ghash = get_git_hash(src_dir)

        eb = os.path.basename(exe)
        new_name = '{}_{}.{}'.format(eb.split('.')[0], ghash,
                                     eb.split('.')[1])
        dest = os.path.join(self.bin_path, new_name)
        if os.path.exists(dest):
            os.remove(dest)

        shutil.copy(exe, dest)
        shutil.chown(dest, group='v45')
        perms = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  | stat.S_IXUSR \
                 | stat.S_IXGRP | stat.S_IXOTH
        os.chmod(dest, perms)

        return dest, 0
Exemplo n.º 6
0
def main():
    # Get start time
    start_time_str = time.strftime('%Y%m%d_%H%M%S')

    # Get a list of available modules
    module_list = 'Data cpature modules:\n'

    for m in dir(data_capture):
        m = util.class_from_str("data_capture.{}".format(m), __name__)
        if m is not data_capture.DataCapture and inspect.isclass(m) and issubclass(m, data_capture.DataCapture):
            module_list += "\t{}\n".format(str(m).split('.', 1)[-1])

    module_list += '\nExperiment modules:\n'

    for m in dir(experiment):
        m = util.class_from_str("experiment.{}".format(m), __name__)
        if m is not experiment.Experiment and inspect.isclass(m) and issubclass(m, experiment.Experiment):
            module_list += "\t{}\n".format(str(m).split('.', 1)[-1])

    module_list += '\nPost-processor modules:\n'

    for m in dir(post_processor):
        m = util.class_from_str("post_processor.{}".format(m), __name__)
        if m is not post_processor.PostProcessor and inspect.isclass(m) and issubclass(m, post_processor.PostProcessor):
            module_list += "\t{}\n".format(str(m).split('.', 1)[-1])

    # Parse command line arguments
    parse = argparse.ArgumentParser(description='Experiment System',
                                    formatter_class=argparse.RawDescriptionHelpFormatter, epilog=module_list)

    parse.add_argument('name', help='Name for experiment (prefix for results folder)')
    parse.add_argument('experiment', help='Experiment class to run')
    parse.add_argument('capture', help='DataCapture class to run')
    parse.add_argument('config', help='Configuration file(s)', nargs='+')

    parse.add_argument('-p', '--post', help='Option data post-processing class', dest='post', action='append')
    parse.add_argument('-v', help='Verbose output', dest='verbose', action='store_true')
    parse.add_argument('--dry-run', help='Run without regulating experiment conditions', dest='dry_run',
                       action='store_true')
    parse.add_argument('--pushover', help='Send notifications using pushover service', dest='notify',
                       action='store_true')
    parse.add_argument('--lock', help='Lock the front panels of test equipment', dest='lock', action='store_true')
    parse.add_argument('--visa', help='Display VISA traffic in console', dest='visa', action='store_true')
    parse.set_defaults(verbose=False)
    parse.set_defaults(notemp=False)
    parse.set_defaults(notify=False)
    parse.set_defaults(lock=False)
    parse.set_defaults(visa=False)

    args = parse.parse_args()

    # Read configuration file(s)
    cfg = ConfigParser.RawConfigParser()
    cfg.read(args.config)

    # Check paths
    result_dir = cfg.get('path', 'result')

    if not os.path.isdir(result_dir):
        raise IOError('Result path is not a directory')

    if not os.access(result_dir, os.W_OK):
        raise IOError('Result path is not writable')

    result_dir = os.path.realpath(os.path.join(result_dir, '_'.join([args.name, start_time_str])))

    # Log file can be defined in seperate path, but defaults to the results directory
    if 'log' in [x[0] for x in cfg.items('path')]:
        log_dir = cfg.get('path', 'log')

        if not os.path.isdir(log_dir):
            raise IOError('Log path is not a directory')

        if not os.access(log_dir, os.W_OK):
            raise IOError('Log path is not writable')
    else:
        # Save log in results folder
        log_dir = result_dir

    log_file_path = os.path.join(os.path.realpath(log_dir), 'log_%s.txt' % start_time_str)

    os.mkdir(result_dir)

    # Setup logging
    # log_handle_console = logging.StreamHandler()
    log_handle_console = colorlog.ColorizingStreamHandler(sys.stdout)
    log_handle_console.setLevel(logging.DEBUG if args.verbose else logging.INFO)
    log_format_console = logging.Formatter(fmt='%(asctime)s [%(levelname)-5s] %(name)s: %(message)s',
                                           datefmt='%H:%M:%S')
    log_handle_console.setFormatter(log_format_console)

    log_handle_file = logging.FileHandler(log_file_path)
    log_handle_file.setLevel(logging.DEBUG)
    log_format_file = logging.Formatter(fmt='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
                                        datefmt='%Y%m%d %H:%M:%S')
    log_handle_file.setFormatter(log_format_file)

    # Get all loggers needed
    root_logger = logging.getLogger(__name__)
    data_logger = logging.getLogger(data_capture.__name__)
    equipment_logger = logging.getLogger(equipment.__name__)
    experiment_logger = logging.getLogger(experiment.__name__)
    regulator_logger = logging.getLogger(regulator.__name__)
    temperature_logger = logging.getLogger(templogger.__name__)
    mks_logger = logging.getLogger(mks.__name__)
    post_processor_logger = logging.getLogger(post_processor.__name__)

    # Set defaults
    for logger in [root_logger, data_logger, equipment_logger, experiment_logger, regulator_logger, temperature_logger,
                   mks_logger, post_processor_logger]:
        logger.handlers = []
        logger.setLevel(logging.DEBUG)
        logger.addHandler(log_handle_console)
        logger.addHandler(log_handle_file)

    if not args.visa:
        equipment_logger.removeHandler(log_handle_console)
        temperature_logger.removeHandler(log_handle_console)

    root_logger.info("jtfadump | git hash: {}".format(util.get_git_hash()))
    root_logger.info("Launch command: {}".format(' '.join(sys.argv)))
    root_logger.info("python {}".format(sys.version))
    root_logger.info("pyvisa {}".format(pyvisa.__version__))
    root_logger.info("Started: {}".format(time.strftime('%a, %d %b %Y %H:%M:%S +0000', time.gmtime())))
    root_logger.info("Logging path: {}".format(log_file_path))
    root_logger.info("Result directory: {}".format(result_dir))

    # Dump configuration to log
    root_logger.debug("--- BEGIN CONFIGURATION LISTING ---")

    for section in cfg.sections():
        root_logger.debug("[{}]".format(section))

        for item in cfg.items(section):
            root_logger.debug("{}: {}".format(item[0], item[1]))

    root_logger.debug("--- END CONFIGURATION LISTING ---")

    # Setup experiment
    try:
        root_logger.info("Loading experiment: {}".format(args.experiment))
        experiment_class = util.class_from_str("experiment.{}".format(args.experiment), __name__)
        run_exp = experiment_class(args, cfg, result_dir)
    except:
        root_logger.exception('Exception while loading experiment class', exc_info=True)
        return

    # Setup data capture
    try:
        root_logger.info("Loading data capture: {}".format(args.capture))
        data_capture_class = util.class_from_str("data_capture.{}".format(args.capture), __name__)
        run_data_capture = data_capture_class(args, cfg, result_dir)
    except:
        root_logger.exception('Exception while loading data capture class', exc_info=True)
        run_exp.stop()
        return
        
    # Setup notification if required
    notify = None

    if args.notify:
        notify = pushover.Client(user_key=cfg.get('pushover', 'user_key'), api_token=cfg.get('pushover', 'api_key'))
        try:
            notify.send_message("Experiment: {}\nData capture: {}".format(args.experiment, args.capture),
                                title='jtfadump Started')
        except requests.exceptions.ConnectionError:
            root_logger.warning('Failed to send Pushover start notification')

    # Add post-processors
    try:
        if args.post is not None:
            for post_class in args.post:
                root_logger.info("Loading post-processor: {}".format(post_class))
                post_processor_class = util.class_from_str("post_processor.{}".format(post_class), __name__)

                if data_capture_class in post_processor_class.get_supported_data_capture():
                    run_post_processor = post_processor_class(run_exp, run_data_capture, cfg, notify)
                    run_data_capture.add_post_processor(run_post_processor)
                else:
                    root_logger.warning("{} does not support data capture {}".format(post_class, data_capture_class))
    except:
        root_logger.exception('Exception while loading post processor class', exc_info=True)
        run_exp.stop()
        return

    # Make sure log file is written before beginning
    log_handle_file.flush()

    # Run the experiment
    # try:
    #     with open(_LOOP_STATE_FILE, 'r') as f:
    #         run_exp.set_remaining_loops(pickle.load(f))
    #         root_logger.info("Loaded existing loop counter from file")
    # except:
    #     root_logger.info("No existing state")

    loop = 0
    loop_runtime = []

    try:
        while run_exp.is_running():
            loop_start_time = time.time()

            capture_id = util.rand_hex_str()
            root_logger.info("Experiment step {} ({} remaining): {}".format(loop, run_exp.get_remaining_loops(),
                                                                            capture_id))

            # Update experimental parameters
            run_exp.step()

            # Capture data from experiment
            run_data_capture.save(capture_id, run_exp)
            run_exp.finish_loop()

            # Show time statistics
            loop_time = time.time() - loop_start_time
            loop_runtime.append(loop_time)

            loop_time_avg = sum(loop_runtime) / len(loop_runtime)
            loop_hours, r = divmod(loop_time_avg, 3600)
            loop_mins, loop_secs = divmod(r, 60)

            root_logger.info("Average loop runtime: {}:{}:{}".format(int(loop_hours), int(loop_mins), round(loop_secs,
                                                                                                            3)))

            if run_exp.get_remaining_loops() is not False:
                loop_est_maxtime = loop_time_avg * run_exp.get_remaining_loops()
                loop_est = datetime.datetime.now() + datetime.timedelta(seconds=loop_est_maxtime)

                root_logger.info("Estimated completion {:%Y-%m-%d %H:%M:%S}".format(loop_est))

            loop += 1

            # with open(_LOOP_STATE_FILE, 'w') as f:
            #    pickle.dump(run_exp.get_remaining_loops(), f)

        try:
            os.unlink(_LOOP_STATE_FILE)
        except:
            pass

        root_logger.info('Experiment loop exited normally')
        
        if notify:
            notify.send_message("Experiment stopped normally after {} loop{}".format(loop, '' if loop == 1 else 's'),
                                title='jtfadump Stopped')
    except (KeyboardInterrupt, SystemExit):
        root_logger.exception('User terminated experiment', exc_info=True)
    except:
        root_logger.exception('Error while running experiment', exc_info=True)

        if notify:
            notify.send_message("Exception occurred during experiment! Traceback:\n{}".format(traceback.format_exc()),
                                title='jtfadump Exception')
    finally:
        try:
            run_exp.stop()
        except:
            root_logger.exception('Error while stopping experiment', exc_info=True)

            if notify:
                notify.send_message("Exception occurred while stopping experiment! Traceback:\n{}".format(
                    traceback.format_exc()), title='jtfadump Exception')

    root_logger.info('jtfadump exiting')
Exemplo n.º 7
0
def main():
    app_path = os.path.dirname(os.path.realpath(__file__))

    try:
        app_git_hash = util.get_git_hash()
    except OSError:
        app_git_hash = 'not found'

    parse = argparse.ArgumentParser(description='Tool to monitor MCS output')

    parse.add_argument('-c', '--config', help='Path to config file', dest='config_path',
                       default=os.path.join(app_path, _DEFAULT_CONFIG_FILE))
                       
    args = parse.parse_args()
    
    # Read configuration
    with open(args.config_path, 'r') as f:
        config_dict = json.load(f)
        logging_dict = config_dict.pop('log')

        _app_cfg.update(config_dict)
        logging.config.dictConfig(logging_dict)

    # Setup logging
    root_logger = logging.getLogger('main')
    
    root_logger.info("mcs_watch {} | git: {}".format(__version__, app_git_hash))
    root_logger.info("Launch command: {}".format(' '.join(sys.argv)))
    
    # Pushover (if enabled)
    notify = None
    
    if 'pushover' in _app_cfg:
        root_logger.info('Pushover enabled')
        notify = pushover.Client(user_key=_app_cfg['pushover']['user_key'], api_token=_app_cfg['pushover']['api_key'])
        
    # Open serial port
    port = serial.Serial(_app_cfg['serial']['port'], _app_cfg['serial']['speed'],
                         timeout=_app_cfg['serial']['timeout'])

    root_logger.info('Wait for initial packet')

    # Wait for end of packet before reading full packets
    while port.read() != '\n':
        pass
    
    if notify is not None:
        notify.send_message('Experiment started', title='mcs_watch')

    root_logger.info('Packed received!')

    log_name = os.path.abspath(os.path.join(_app_cfg['output']['path'], "{}_{}.{}".format(
            _app_cfg['output']['prefix'], time.strftime('%Y%m%d%H%M%S'), _app_cfg['output']['extension'])))
    
    line_buffer = ''
    tick_counter = 0

    root_logger.info("Writing to file: {}".format(log_name))
    
    gas_flag = 0

    with open(log_name, 'w') as f:
        try:
            while True:
                # Read byte
                c = port.read()
                
                # root_logger.debug('RX: {}'.format(c))

                # Check for end of line
                if len(c) > 0:
                    # Received byte, reset counter
                    tick_counter = 0
                    
                    if c == '\n' or c == '\r':
                        if len(line_buffer) > 0:
                            # Process line
                            root_logger.info("Packet: {}".format(line_buffer))

                            mks_fields = line_buffer.split(_app_cfg['serial']['mcs_delim'])

                            # Validate number of fields
                            if len(mks_fields) != _app_cfg['serial']['mcs_fields']:
                                root_logger.warn("ERROR: Expected {} fields but got {}!".format(
                                        _app_cfg['serial']['mcs_fields'], len(mks_fields)))
                            else:
                                # Calculate clock offset
                                try:
                                    mcs_time = datetime.strptime(mks_fields[0], _app_cfg['serial']['mcs_time_format'])
                                    now_time = datetime.now()
                                    
                                    root_logger.info("Clock offset: {}".format((now_time - mcs_time).total_seconds()))
                                    
                                    # Overwrite the time with system time
                                    mks_fields[0] = time.strftime(_app_cfg['serial']['mcs_time_format'])
                                    
                                    # Check gas flow
                                    gas_flow = sum([float(x) if x != '-' else 0.0 for x in mks_fields[5:12]])
                                    
                                    if gas_flow < _app_cfg['gas']['threshold'] and gas_flag < 3:
                                        gas_flag += 1
                                        
                                        if gas_flag >= 3:
                                            msg = "Gas flow below threshold {:.3f} < {:.3f}".format(gas_flow, _app_cfg['gas']['threshold'])
                                            root_logger.warning(msg)
                                            
                                            if notify is not None:
                                                notify.send_message(msg, title='mcs_watch')
                                    elif gas_flow >= _app_cfg['gas']['threshold'] and gas_flag >= 3:
                                        gas_flag = 0
                                        
                                        msg = "Gas flow restored {:.3f} >= {:.3f}".format(gas_flow, _app_cfg['gas']['threshold'])
                                        root_logger.warning(msg)
                                        
                                        if notify is not None:
                                            notify.send_message(msg, title='mcs_watch')

                                    # Log line to file
                                    f.write(_app_cfg['serial']['mcs_delim'].join(mks_fields) + '\n')
                                    f.flush()
                                except:
                                    root_logger.exception('Unhandled exception during line read', exc_info=True)

                            # Clear buffer for next line
                            line_buffer = ''
                    else:
                        line_buffer += c
                else:
                    # Time out occurred
                    tick_counter += 1
                    
                    if tick_counter >= _app_cfg['serial']['timeout_rx']:
                        root_logger.warn('Timeout occured!')
                        break
        except KeyboardInterrupt:
            root_logger.warn('Keyboard interrupt')
        except:
            root_logger.exception('Unhandled exception', exc_info=True)
            
            if notify is not None:
                notify.send_message("Exception during monitoring! Traceback: {}".format(traceback.format_exc()),
                                    title='mcs_watch')
            
            raise
        finally:
            port.close()
        
        if notify is not None:
            notify.send_message('Experiment ended', title='mcs_watch')
        
        root_logger.info('Exiting')
Exemplo n.º 8
0
def main():
    app_path = os.path.dirname(os.path.realpath(__file__))

    try:
        app_git_hash = util.get_git_hash()
    except OSError:
        app_git_hash = 'not found'

    parse = argparse.ArgumentParser(description='Slightly annoying twitter thing')

    parse.add_argument('-c', '--config', help='Path to config file', dest='config_path',
                       default=os.path.join(app_path, _DEFAULT_CONFIG_FILE))
    parse.add_argument('-d', '--daemon', help='Run as daemon', dest='run_daemon', action='store_true')
    parse.set_defaults(run_daemon=False)

    args = parse.parse_args()

    # Read configuration
    with open(args.config_path, 'r') as f:
        config_dict = json.load(f)
        logging_dict = config_dict.pop('log')

        _app_cfg.update(config_dict)
        logging.config.dictConfig(logging_dict)

    # Setup logging
    root_logger = logging.getLogger('main')
    root_logger.info("annoyb {} | git: {}".format(__version__, app_git_hash))

    for m in [('tweepy', tweepy.__version__)]:
        root_logger.info("{} {}".format(m[0], m[1]))

    root_logger.info("Launch command: {}".format(' '.join(sys.argv)))

    # Check configuration
    if not _app_cfg['twitter_consumer_key'] or not _app_cfg['twitter_consumer_secret']:
        root_logger.error('Twitter API key not configured')
        return

    try:
        # Get twitter authentication
        auth = tweepy.OAuthHandler(_app_cfg['twitter_consumer_key'], _app_cfg['twitter_consumer_secret'])

        if not _app_cfg['twitter_access_token'] or not _app_cfg['twitter_access_token_secret']:
            # Get access token
            root_logger.warning('Attempting to get access token')

            try:
                redirect_url = auth.get_authorization_url()

                print("Access Twitter OAuth URL: {}".format(redirect_url))

                request_token = input('Enter request token: ')

                [access_token, access_token_secret] = auth.get_access_token(request_token)

                print("Access token config: {}".format(json.dumps(
                    {'twitter_access_token': access_token, 'twitter_access_token_secret': access_token_secret},
                    indent=2, separators=(',', ': '))))
            except tweepy.TweepError:
                logging.error('Failed to get Twitter request or access token')
                return

            root_logger.warning('Save authentication token in configuration')
            return

        auth.set_access_token(_app_cfg['twitter_access_token'], _app_cfg['twitter_access_token_secret'])

        # Get handle to Twitter API
        twitter_api = tweepy.API(auth)

        #if not twitter_api.verify_credentials():
        #    root_logger.error('Authentication failed')
        #    return

        limit = twitter_api.rate_limit_status()
        limit = util.dict_tree_walk(limit, 'remaining')

        for key, val in sorted(limit.items()):
            reset_time = datetime.datetime.fromtimestamp(val['reset']).strftime('%c')

            if val['remaining'] == 0:
                root_logger.error("API {} limit hit".format(key))
            elif val['remaining'] <= _API_LOW_LIMIT:
                root_logger.warning("API {} limit low".format(key))

            root_logger.info("API {}: {} of {} remaining (reset at: {})".format(key, val['remaining'], val['limit'], reset_time))

        user = twitter_api.me()
        target = [twitter_api.get_user(user) for user in _app_cfg['tweet']['target']]

        root_logger.info("Authenticated as {} (id: {})".format(user.screen_name, user.id))

        for t in target:
            root_logger.info("Target {} (id: {})".format(t.screen_name, t.id))

            friendship = twitter_api.show_friendship(source_id=user.id, target_id=t.id)

            if friendship[0].blocking:
                root_logger.error("User {} blocking {}".format(friendship[0].screen_name, friendship[1].screen_name))
                return

            if friendship[0].blocked_by:
                root_logger.error("User {} blocked by {}".format(friendship[1].screen_name, friendship[0].screen_name))
                return

            if not friendship[0].following:
                root_logger.warn("User {} not following {}".format(friendship[0].screen_name,
                                                                   friendship[1].screen_name))

            if not friendship[1].following:
                root_logger.warn("User {} not followed by {}".format(friendship[0].screen_name,
                                                                     friendship[1].screen_name))

        # send_tweet(twitter_api, 'Cake')

        # Exit gracefully
        root_logger.info('Exiting normally')
    except:
        root_logger.exception('Unhandled exception', exc_info=True)
        raise
Exemplo n.º 9
0
def main():
    app_path = os.path.dirname(os.path.realpath(__file__))

    try:
        app_git_hash = util.get_git_hash()
    except OSError:
        app_git_hash = 'not found'

    parse = argparse.ArgumentParser(
        description='Slightly annoying twitter thing')

    parse.add_argument('-c',
                       '--config',
                       help='Path to config file',
                       dest='config_path',
                       default=os.path.join(app_path, _DEFAULT_CONFIG_FILE))
    parse.add_argument('-d',
                       '--daemon',
                       help='Run as daemon',
                       dest='run_daemon',
                       action='store_true')
    parse.set_defaults(run_daemon=False)

    args = parse.parse_args()

    # Read configuration
    with open(args.config_path, 'r') as f:
        config_dict = json.load(f)
        logging_dict = config_dict.pop('log')

        _app_cfg.update(config_dict)
        logging.config.dictConfig(logging_dict)

    # Setup logging
    root_logger = logging.getLogger('main')
    root_logger.info("annoyb {} | git: {}".format(__version__, app_git_hash))

    for m in [('tweepy', tweepy.__version__)]:
        root_logger.info("{} {}".format(m[0], m[1]))

    root_logger.info("Launch command: {}".format(' '.join(sys.argv)))

    # Check configuration
    if not _app_cfg['twitter_consumer_key'] or not _app_cfg[
            'twitter_consumer_secret']:
        root_logger.error('Twitter API key not configured')
        return

    try:
        # Get twitter authentication
        auth = tweepy.OAuthHandler(_app_cfg['twitter_consumer_key'],
                                   _app_cfg['twitter_consumer_secret'])

        if not _app_cfg['twitter_access_token'] or not _app_cfg[
                'twitter_access_token_secret']:
            # Get access token
            root_logger.warning('Attempting to get access token')

            try:
                redirect_url = auth.get_authorization_url()

                print("Access Twitter OAuth URL: {}".format(redirect_url))

                request_token = input('Enter request token: ')

                [access_token,
                 access_token_secret] = auth.get_access_token(request_token)

                print("Access token config: {}".format(
                    json.dumps(
                        {
                            'twitter_access_token': access_token,
                            'twitter_access_token_secret': access_token_secret
                        },
                        indent=2,
                        separators=(',', ': '))))
            except tweepy.TweepError:
                logging.error('Failed to get Twitter request or access token')
                return

            root_logger.warning('Save authentication token in configuration')
            return

        auth.set_access_token(_app_cfg['twitter_access_token'],
                              _app_cfg['twitter_access_token_secret'])

        # Get handle to Twitter API
        twitter_api = tweepy.API(auth)

        #if not twitter_api.verify_credentials():
        #    root_logger.error('Authentication failed')
        #    return

        limit = twitter_api.rate_limit_status()
        limit = util.dict_tree_walk(limit, 'remaining')

        for key, val in sorted(limit.items()):
            reset_time = datetime.datetime.fromtimestamp(
                val['reset']).strftime('%c')

            if val['remaining'] == 0:
                root_logger.error("API {} limit hit".format(key))
            elif val['remaining'] <= _API_LOW_LIMIT:
                root_logger.warning("API {} limit low".format(key))

            root_logger.info(
                "API {}: {} of {} remaining (reset at: {})".format(
                    key, val['remaining'], val['limit'], reset_time))

        user = twitter_api.me()
        target = [
            twitter_api.get_user(user) for user in _app_cfg['tweet']['target']
        ]

        root_logger.info("Authenticated as {} (id: {})".format(
            user.screen_name, user.id))

        for t in target:
            root_logger.info("Target {} (id: {})".format(t.screen_name, t.id))

            friendship = twitter_api.show_friendship(source_id=user.id,
                                                     target_id=t.id)

            if friendship[0].blocking:
                root_logger.error("User {} blocking {}".format(
                    friendship[0].screen_name, friendship[1].screen_name))
                return

            if friendship[0].blocked_by:
                root_logger.error("User {} blocked by {}".format(
                    friendship[1].screen_name, friendship[0].screen_name))
                return

            if not friendship[0].following:
                root_logger.warn("User {} not following {}".format(
                    friendship[0].screen_name, friendship[1].screen_name))

            if not friendship[1].following:
                root_logger.warn("User {} not followed by {}".format(
                    friendship[0].screen_name, friendship[1].screen_name))

        # send_tweet(twitter_api, 'Cake')

        # Exit gracefully
        root_logger.info('Exiting normally')
    except:
        root_logger.exception('Unhandled exception', exc_info=True)
        raise