def test_setup_logging_rotating_file_handler(self):
     log_count = 1
     with NamedTemporaryFile() as temp_file:
         with patch('utility.RotatingFileHandler') as mock:
             setup_logging(temp_file.name, log_count=log_count)
     mock.assert_called_once_with(
         temp_file.name, maxBytes=1024 * 1024 * 512, backupCount=log_count)
Exemplo n.º 2
0
 def test_setup_logging_rotating_file_handler(self):
     log_count = 1
     with NamedTemporaryFile() as temp_file:
         with patch('utility.RotatingFileHandler') as mock:
             setup_logging(temp_file.name, log_count=log_count)
     mock.assert_called_once_with(temp_file.name,
                                  maxBytes=1024 * 1024 * 512,
                                  backupCount=log_count)
Exemplo n.º 3
0
 def test_setup_logging_formatter(self):
     log_count = 1
     with NamedTemporaryFile() as temp_file:
         with patch('logging.Formatter') as l_mock:
             setup_logging(temp_file.name, log_count=log_count)
     logger = logging.getLogger()
     self.assertEqual(logger.name, 'root')
     l_mock.assert_called_once_with('%(asctime)s %(levelname)s %(message)s',
                                    '%Y-%m-%d %H:%M:%S')
Exemplo n.º 4
0
 def test_log_error(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name, log_count=1)
         logging.error("testing123")
         with open(temp_file.name, 'r') as file_reader:
             content = file_reader.read()
             # log format: 2015-04-29 14:03:02 INFO testing123
             match = content.split(' ', 2)[2]
             self.assertEqual(match, 'ERROR testing123\n')
 def test_log_error(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name, log_count=1)
         logging.error("testing123")
         with open(temp_file.name, 'r') as file_reader:
             content = file_reader.read()
             # log format: 2015-04-29 14:03:02 INFO testing123
             match = content.split(' ', 2)[2]
             self.assertEqual(match, 'ERROR testing123\n')
 def test_setup_logging_formatter(self):
     log_count = 1
     with NamedTemporaryFile() as temp_file:
         with patch('logging.Formatter') as l_mock:
             setup_logging(temp_file.name, log_count=log_count)
     logger = logging.getLogger()
     self.assertEqual(logger.name, 'root')
     l_mock.assert_called_once_with(
         '%(asctime)s %(levelname)s %(message)s', '%Y-%m-%d %H:%M:%S')
Exemplo n.º 7
0
 def test_setup_logging(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name, log_count=1, log_level=logging.DEBUG)
     logger = logging.getLogger()
     self.assertEqual(logger.level, logging.DEBUG)
     self.assertEqual(logger.name, 'root')
     handlers = logger.handlers
     self.assertIn(type(handlers[0]),
                   [RotatingFileHandler, logging.StreamHandler])
     self.assertIn(type(handlers[1]),
                   [RotatingFileHandler, logging.StreamHandler])
 def test_setup_logging(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name, log_count=1, log_level=logging.DEBUG)
     logger = logging.getLogger()
     self.assertEqual(logger.level, logging.DEBUG)
     self.assertEqual(logger.name, 'root')
     handlers = logger.handlers
     self.assertIn(
         type(handlers[0]), [RotatingFileHandler, logging.StreamHandler])
     self.assertIn(
         type(handlers[1]), [RotatingFileHandler, logging.StreamHandler])
Exemplo n.º 9
0
 def factory(cls, workspace, log_count=1, dry_run=False):
     log_dir_path = os.path.join(workspace, 'log')
     ensure_dir(log_dir_path)
     log_file = os.path.join(log_dir_path, 'results.log')
     cmd_log_file = os.path.join(log_dir_path, 'chaos_run_list.log')
     cmd_log_name = 'cmd_log'
     setup_logging(log_path=log_file, log_count=log_count)
     setup_logging(
         log_path=cmd_log_file, log_count=log_count,  name=cmd_log_name,
         add_stream=False, disable_formatter=True)
     chaos_monkey = ChaosMonkey.factory()
     return cls(workspace, chaos_monkey, log_count, dry_run, cmd_log_name)
 def test_setup_logging_cmd_logger(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name, log_count=1, log_level=logging.INFO,
                       name='cmd_log', add_stream=False,
                       disable_formatter=True)
         logger = logging.getLogger('cmd_log')
         logger.info(StructuredMessage("deny-all", "3"))
         data = temp_file.read()
     self.assertEqual(logger.level, logging.INFO)
     self.assertEqual(logger.name, 'cmd_log')
     handlers = logger.handlers
     self.assertIn(type(handlers[0]), [RotatingFileHandler])
     expected_data = dump([['deny-all', "3"]])
     self.assertEqual(data, expected_data)
Exemplo n.º 11
0
 def factory(cls, workspace, log_count=1, dry_run=False):
     log_dir_path = os.path.join(workspace, 'log')
     ensure_dir(log_dir_path)
     log_file = os.path.join(log_dir_path, 'results.log')
     cmd_log_file = os.path.join(log_dir_path, 'chaos_run_list.log')
     cmd_log_name = 'cmd_log'
     setup_logging(log_path=log_file, log_count=log_count)
     setup_logging(log_path=cmd_log_file,
                   log_count=log_count,
                   name=cmd_log_name,
                   add_stream=False,
                   disable_formatter=True)
     chaos_monkey = ChaosMonkey.factory()
     return cls(workspace, chaos_monkey, log_count, dry_run, cmd_log_name)
Exemplo n.º 12
0
 def test_setup_logging_cmd_logger(self):
     with NamedTemporaryFile() as temp_file:
         setup_logging(temp_file.name,
                       log_count=1,
                       log_level=logging.INFO,
                       name='cmd_log',
                       add_stream=False,
                       disable_formatter=True)
         logger = logging.getLogger('cmd_log')
         logger.info(StructuredMessage("deny-all", "3"))
         data = temp_file.read()
     self.assertEqual(logger.level, logging.INFO)
     self.assertEqual(logger.name, 'cmd_log')
     handlers = logger.handlers
     self.assertIn(type(handlers[0]), [RotatingFileHandler])
     expected_data = dump([['deny-all', "3"]])
     self.assertEqual(data, expected_data)
Exemplo n.º 13
0
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ),
                dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)


def run_as_process(custom_config, queue):
    utility.setup_subprocess_logging(queue, logging.getLogger())
    return solve(custom_config)


if __name__ == '__main__':
    custom_config = {}
    # Allow custom configuration to be passed from command line
    if len(sys.argv) > 1:
        custom_config = json.loads(sys.argv[1])

    # Allow logging setup to be disabled from command line
    if len(sys.argv) < 3:
        utility.setup_logging(
            default_conf_path=settings.LOGGING_CONFIGURATION_FILE,
            logging_path=settings.LOG_FILE)
    try:
        solve({})
    except Exception as e:
        # exc_info=True means the stack trace will be printed automatically
        logging.getLogger(__name__).error(
            'Program halted due to a fatal error whose detail is as follow: ',
            exc_info=True)
Exemplo n.º 14
0
        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ), dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ), dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)


def run_as_process(custom_config, queue):
    utility.setup_subprocess_logging(queue, logging.getLogger())
    return solve(custom_config)

if __name__ == '__main__':
    custom_config = {}
    # Allow custom configuration to be passed from command line
    if len(sys.argv) > 1:
        custom_config = json.loads(sys.argv[1])

    # Allow logging setup to be disabled from command line
    if len(sys.argv) < 3:
        utility.setup_logging(default_conf_path=settings.LOGGING_CONFIGURATION_FILE, logging_path=settings.LOG_FILE)
    try:
        solve({})
    except Exception as e:
        # exc_info=True means the stack trace will be printed automatically
        logging.getLogger(__name__).error('Program halted due to a fatal error whose detail is as follow: ',
                                          exc_info=True)
Exemplo n.º 15
0
from classes import *
import utility
import sys
from sys import argv
import logging
import time

if __name__ == "__main__":
    '''
    Go get lunch
    '''

    utility.setup_logging()

    logger = logging.getLogger(__name__)
    logger.debug("main function")

    if len(argv) < 3:
        print "Please include the CSV file for postcodes and restaurants as the program's parameters."
        print "Ex: python lunch.py postcodes.csv restaurants.csv"
        sys.exit()

    dash_printer = 50
    print '-' * dash_printer
    print "Welcome to the Go Get Lunch program!"
    print '-' * dash_printer

    postcodes = ()

    postcodes = utility.import_postcodes_csv(argv[1])
    postcodes = set(postcodes)
        for j in range(subset.shape[1]):
            plt.subplot(n, n, j + 1)
            feature = subset[subset.columns[j]]

            if y is None:   # Plot the feature distribution (full)
                sns.distplot(feature)
            else:   # Plot feature distrbituions separated by class
                for label in labels:
                    feature_label = feature[y == label]
                    sns.distplot(feature_label, label=label, hist_kws={"alpha": 0.4})
                plt.legend()


if __name__ == "__main__":
    utility.setup_logging(params['results_dir'])

    data_df = utility.load_dataset(params['data_file'])

    X_all = data_df.drop(columns=['Series', 'Class'], errors='ignore')
    y_all = data_df['Class']

    plot_class_distribution(y_all)

    X_pca_2d = do_pca(X_all, num_components=2)

    plot_dataset_2d(X_pca_2d, y_all)

    plot_correlation_matrix(X_all)

    plot_feature_distributions_nxn_grid(X_all, n=3)
Exemplo n.º 17
0
--------------------------------------------------------------------------------

Name:       manager.py
Purpose:    This is the main Lambda handler file( autoscale_manager ).
            Takes AWS Lambda triggers & routes the request to appropriate function module.
"""

import json
import time
import utility as utl
from ngfw import NgfwInstance
from fmc import FirepowerManagementCenter
from aws import SimpleNotificationService

# Setup Logging
logger = utl.setup_logging(utl.e_var['DebugDisable'])


def lambda_handler(event, context):
    """
    Purpose:    Main Lambda functions of Autoscale Manager
    Parameters: AWS Events (cloudwatch, SNS)
    Returns:
    Raises:
    """
    utl.put_line_in_log('AutoScale Manager Lambda Handler started', 'thick')
    logger.info("Received event: " + json.dumps(event, separators=(',', ':')))

    # SNS Event
    try:
        if event["Records"][0]["EventSource"] == "aws:sns":
Exemplo n.º 18
0
def main():
    utility.setup_logging()
    # p.database.setup_database()
    dump()
Exemplo n.º 19
0
--------------------------------------------------------------------------------

Name:       aws.py
Purpose:    This python file has AWS related class & methods
            These classes will be initialized in Lambda function as needed
"""

import boto3
import botocore
import json
import re
from botocore.exceptions import ClientError
import constant as const
import utility as utl

logger = utl.setup_logging()


class SimpleNotificationService:
    """
        SimpleNotificationService class contains methods for AWS SNS service
    """
    def __init__(self):
        self.sns_client = boto3.client('sns')

    def publish_to_topic(self, topic_arn, subject, sns_message):
        """
        Purpose:    Publish message to SNS Topic
        Parameters: Topic ARN, Message Body, Subject, to_function, category, instance_id, counter
        Returns:    Response of Message publish
        Raises:     None
def main():
    args = docopt(__doc__, version="RandomActivityGen v0.1")

    setup_logging(args)

    # Parse random and seed arguments
    if not args["--random"]:
        random.seed(args["--seed"])
    pop_offset = 65_536 * random.random()
    work_offset = 65_536 * random.random()
    while pop_offset == work_offset:
        work_offset = 65_536 * random.random()
    logging.debug(
        f"[main] Using pop_offset: {pop_offset}, work_offset: {work_offset}")

    # Read SUMO network
    logging.debug(f"[main] Reading network from: {args['--net-file']}")
    net = sumolib.net.readNet(args["--net-file"])

    # Parse statistics configuration
    logging.debug(f"[main] Parsing stat file: {args['--stat-file']}")
    stats = ET.parse(args["--stat-file"])
    verify_stats(stats)

    max_display_size = int(args["--display.size"])

    centre = find_city_centre(
        net) if args["--centre.pos"] == "auto" else tuple(
            map(int, args["--centre.pos"].split(",")))
    radius = radius_of_network(net, centre)

    # If display-only, load stat-file as input and exit after rendering
    if args["--display-only"]:
        # Try the output file first, as, if given, it contains a computed statistics file, otherwise try the input
        stats = ET.parse(args["--output-file"] or args["--stat-file"])
        logging.debug(
            f"[main] Displaying network as image of max size {max_display_size}x{max_display_size}"
        )
        display_network(net, stats, max_display_size, centre,
                        args["--net-file"])
        exit(0)

    # Prepare noise sampling
    pop_noise = NoiseSampler(centre, float(args['--centre.pop-weight']),
                             radius, pop_offset)
    work_noise = NoiseSampler(centre, float(args['--centre.work-weight']),
                              radius, work_offset)

    logging.debug(f"[main] Using centre: {centre}, "
                  f"radius: {radius}, "
                  f"centre.pop-weight: {float(args['--centre.pop-weight'])}, "
                  f"centre.work-weight: {float(args['--centre.work-weight'])}")

    # Insert streets, gates, and schools
    logging.info("[main] Setting up streets with population and workplaces")
    setup_streets(net, stats, pop_noise, work_noise)
    logging.debug(f"[main] Setting up city gates")
    setup_city_gates(net, stats, args["--gates.count"], radius)
    logging.info("[main] Setting up schools")
    setup_schools(args, net, stats, pop_noise)

    if args["--bus-stop"]:
        logging.debug(f"[main] Setting up bus-stops")
        setup_bus_stops(net, stats, int(args["--bus-stop.distance"]),
                        int(args["--bus-stop.k"]))

    # Write statistics back
    logging.debug(f"[main] Writing statistics file to {args['--output-file']}")
    stats.write(args["--output-file"])

    if args["--display"]:
        logging.debug(
            f"[main] Displaying network as image of max size {max_display_size}x{max_display_size}"
        )
        display_network(net, stats, max_display_size, centre,
                        args["--net-file"])