def get_config(path=None, base=None): cfg = ConfigParser.SafeConfigParser() if path: cfg.readfp(get_config_io(path, base)) return cfg
def load_environment(srcdir, datadir=None, allow_old=False): """ Load configuration values for an environment :param srcdir: environment source directory :param datadir: environment data direcory, if None will be discovered from srcdir :param allow_old: Don't throw an exception if this is an old site This is only valid for sites that you are purging. if datadir is None it will be discovered from srcdir Returns (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key) """ cp = ConfigParser.SafeConfigParser() try: cp.read([srcdir + '/.datacats-environment']) except ConfigParser.Error: raise DatacatsError('Error reading environment information') name = cp.get('datacats', 'name') if datadir: # update the link in case user moved their srcdir save_srcdir_location(datadir, srcdir) else: datadir = path.expanduser('~/.datacats/' + name) # FIXME: check if datadir is sane, project-dir points back to srcdir if migrate.needs_format_conversion(datadir) and not allow_old: raise DatacatsError('This environment uses an old format. You must' ' migrate to the new format. To do so, use the' ' "datacats migrate" command.') if migrate.is_locked(datadir): raise DatacatsError( 'Migration in progress, cannot continue.\n' 'If you interrupted a migration, you should' ' attempt manual recovery or contact us by' ' filing an issue at http://github.com/datacats/' 'datacats.\nAs a last resort, you could delete' ' all your stored data and create a new environment' ' by running "datacats purge" followed by' ' "datacats init".') # FIXME: consider doing data_complete check here ckan_version = cp.get('datacats', 'ckan_version') try: always_prod = cp.getboolean('datacats', 'always_prod') except ConfigParser.NoOptionError: always_prod = False try: extra_containers = cp.get('datacats', 'extra_containers').split(' ') except ConfigParser.NoOptionError: extra_containers = () # if remote_server's custom ssh connection # address is defined, # we overwrite the default datacats.com one try: deploy_target = cp.get('deploy', 'remote_server_user') \ + "@" + cp.get('deploy', 'remote_server') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): deploy_target = DEFAULT_REMOTE_SERVER_TARGET # if remote_server's ssh public key is given, # we overwrite the default datacats.com one try: remote_server_key = cp.get('deploy', 'remote_server_key') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): remote_server_key = None return (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key, extra_containers)
def parse_args(args_str): args_obj = None # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'reset_config': False, 'wipe_config': False, 'listen_ip_addr': _WEB_HOST, 'listen_port': _WEB_PORT, 'admin_port': _ADMIN_PORT, 'cassandra_server_list': "127.0.0.1:9160", 'rdbms_server_list': "127.0.0.1:3306", 'rdbms_connection_config': "", 'collectors': None, 'http_server_port': '8084', 'log_local': True, 'log_level': SandeshLevel.SYS_NOTICE, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'trace_file': '/var/log/contrail/vnc_openstack.err', 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'logging_level': 'WARN', 'logging_conf': '', 'logger_class': None, 'multi_tenancy': None, 'aaa_mode': None, 'zk_server_ip': '127.0.0.1:2181', 'worker_id': '0', 'rabbit_server': 'localhost', 'rabbit_port': '5672', 'rabbit_user': '******', 'rabbit_password': '******', 'rabbit_vhost': None, 'rabbit_ha_mode': False, 'rabbit_max_pending_updates': '4096', 'rabbit_health_check_interval': '120.0', # in seconds 'cluster_id': '', 'max_requests': 1024, 'paginate_count': 256, 'region_name': 'RegionOne', 'stale_lock_seconds': '5', # lock but no resource past this => stale 'cloud_admin_role': cfgm_common.CLOUD_ADMIN_ROLE, 'global_read_only_role': cfgm_common.GLOBAL_READ_ONLY_ROLE, 'rabbit_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', 'object_cache_entries': '10000', # max number of objects cached for read 'object_cache_exclude_types': '', # csv of object types to *not* cache 'db_engine': 'cassandra', 'max_request_size': 1024000, } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) # keystone options ksopts = { 'auth_host': '127.0.0.1', 'auth_port': '35357', 'auth_protocol': 'http', 'admin_user': '', 'admin_password': '', 'admin_tenant_name': '', 'admin_user_domain_name': None, 'identity_uri': None, 'project_domain_name': None, 'insecure': True, 'cafile': '', 'certfile': '', 'keyfile': '', 'auth_type': 'password', 'auth_url': '', } # cassandra options cassandraopts = {'cassandra_user': None, 'cassandra_password': None} # rdbms options rdbmsopts = { 'rdbms_user': None, 'rdbms_password': None, 'rdbms_connection': None } # sandesh options sandeshopts = SandeshConfig.get_default_options() config = None saved_conf_file = args.conf_file if args.conf_file: config = ConfigParser.SafeConfigParser({'admin_token': None}) config.read(args.conf_file) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if 'multi_tenancy' in config.options('DEFAULTS'): defaults['multi_tenancy'] = config.getboolean( 'DEFAULTS', 'multi_tenancy') if 'default_encoding' in config.options('DEFAULTS'): default_encoding = config.get('DEFAULTS', 'default_encoding') gen.resource_xsd.ExternalEncoding = default_encoding if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) if 'QUOTA' in config.sections(): for (k, v) in config.items("QUOTA"): try: if str(k) != 'admin_token': vnc_quota.QuotaHelper.default_quota[str(k)] = int(v) except ValueError: pass if 'CASSANDRA' in config.sections(): cassandraopts.update(dict(config.items('CASSANDRA'))) if 'RDBMS' in config.sections(): rdbmsopts.update(dict(config.items('RDBMS'))) SandeshConfig.update_options(sandeshopts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) defaults.update(cassandraopts) defaults.update(rdbmsopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument( "--rdbms_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument("--rdbms_connection", help="DB Connection string") parser.add_argument("--redis_server_ip", help="IP address of redis server") parser.add_argument("--redis_server_port", help="Port of redis server") parser.add_argument("--auth", choices=['keystone', 'no-auth'], help="Type of authentication for user-requests") parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument("--wipe_config", action="store_true", help="Warning! Destroy previous configuration") parser.add_argument("--listen_ip_addr", help="IP address to provide service on, default %s" % (_WEB_HOST)) parser.add_argument("--listen_port", help="Port to provide service on, default %s" % (_WEB_PORT)) parser.add_argument( "--admin_port", help="Port with local auth for admin access, default %s" % (_ADMIN_PORT)) parser.add_argument("--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument("--http_server_port", help="Port of local HTTP server") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--logging_level", help=( "Log level for python logging: DEBUG, INFO, WARN, ERROR default: %s" % defaults['logging_level'])) parser.add_argument( "--logging_conf", help=("Optional logging configuration file, default: None")) parser.add_argument("--logger_class", help=("Optional external logger class, default: None")) parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument( "--trace_file", help="Filename for the errors backtraces to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument( "--multi_tenancy", action="store_true", help="Validate resource permissions (implies token validation)") parser.add_argument("--aaa_mode", choices=cfgm_common.AAA_MODE_VALID_VALUES, help="AAA mode") parser.add_argument("--worker_id", help="Worker Id") parser.add_argument("--zk_server_ip", help="Ip address:port of zookeeper server") parser.add_argument("--rabbit_server", help="Rabbitmq server address") parser.add_argument("--rabbit_port", help="Rabbitmq server port") parser.add_argument("--rabbit_user", help="Username for rabbit") parser.add_argument("--rabbit_vhost", help="vhost for rabbit") parser.add_argument("--rabbit_password", help="password for rabbit") parser.add_argument( "--rabbit_ha_mode", help="True if the rabbitmq cluster is mirroring all queue") parser.add_argument("--rabbit_max_pending_updates", help="Max updates before stateful changes disallowed") parser.add_argument( "--rabbit_health_check_interval", help="Interval seconds between consumer heartbeats to rabbitmq") parser.add_argument("--cluster_id", help="Used for database keyspace separation") parser.add_argument( "--max_requests", type=int, help="Maximum number of concurrent requests served by api server") parser.add_argument( "--paginate_count", type=int, help="Default number of items when pagination is requested") parser.add_argument("--cassandra_user", help="Cassandra user name") parser.add_argument("--cassandra_password", help="Cassandra password") parser.add_argument( "--stale_lock_seconds", help="Time after which lock without resource is stale, default 60") parser.add_argument("--cloud_admin_role", help="Role name of cloud administrator") parser.add_argument( "--global_read_only_role", help="Role name of user with Read-Only access to all objects") parser.add_argument( "--object_cache_entries", help="Maximum number of objects cached for read, default 10000") parser.add_argument( "--object_cache_exclude_types", help="Comma separated values of object types to not cache") parser.add_argument("--db_engine", help="Database engine to use, default cassandra") parser.add_argument( "--max_request_size", type=int, help="Maximum size of bottle requests served by api server") SandeshConfig.add_parser_arguments(parser) args_obj, remaining_argv = parser.parse_known_args(remaining_argv) args_obj.conf_file = args.conf_file args_obj.config_sections = config if type(args_obj.cassandra_server_list) is str: args_obj.cassandra_server_list =\ args_obj.cassandra_server_list.split() if type(args_obj.rdbms_server_list) is str: args_obj.rdbms_server_list =\ args_obj.rdbms_server_list.split() if type(args_obj.collectors) is str: args_obj.collectors = args_obj.collectors.split() args_obj.sandesh_config = SandeshConfig.from_parser_arguments(args_obj) args_obj.conf_file = saved_conf_file return args_obj, remaining_argv
''' this module has one purpose... read the config file ''' import base64 import ConfigParser import logging import os CONF_FILE_LOC = os.path.join(os.path.expanduser("~"), ".openmolar", "om_chart.conf") logging.debug("using conf file at '%s'" % CONF_FILE_LOC) try: parser = ConfigParser.SafeConfigParser() parser.read(CONF_FILE_LOC) KWARGS = { "host": parser.get("Database", "host"), "port": int(parser.get("Database", "port")), "user": parser.get("Database", "user"), "passwd": base64.b64decode(parser.get("Database", "password")), "db": parser.get("Database", "db_name"), "use_unicode": True, "charset": "utf8" } SURGERY_NO = int(parser.get("Surgery", "number")) except ConfigParser.NoSectionError:
"""Singleton config object""" import ConfigParser import os try: from json import loads except ImportError: from simplejson import loads cfdir = os.path.join(os.path.expanduser('~'), '.rst2pdf') cfname = os.path.join(cfdir, 'config') def getValue(section, key, default=None): section = section.lower() key = key.lower() try: return loads(conf.get(section, key)) except Exception: return default class ConfigError(Exception): def __init__(self, modulename, msg): self.modulename = modulename self.msg = msg conf = ConfigParser.SafeConfigParser() conf.read(["/etc/rst2pdf.conf", cfname])
def config_from_string(config_str, portnumfile): parser = ConfigParser.SafeConfigParser() parser.readfp(BytesIO(config_str)) return _Config(parser, portnumfile, '<in-memory>')
def __init__(self, generalConfPath): super(SignatureSimulator, self).__init__() inifile = ConfigParser.SafeConfigParser() inifile.optionxform = str inifile.read(generalConfPath) self.__prepared = False self.__I = inifile.getint('settings', 'I') self.__Js = [] self.__JLambda = inifile.getfloat('settings', 'JLambda') for i in range(self.__I): self.__Js.append(numpy.random.poisson(self.__JLambda)) self.__T = inifile.getint('settings', 'T') self.__N = inifile.getint('settings', 'N') self.__L = inifile.getint('settings', 'L') self.__Ml = [] for l in range(self.__L): m = inifile.getint('settings', 'ml_' + str(l)) self.__Ml.append(m) self.__alpha = inifile.getfloat('settings', 'alpha') self.__beta0 = inifile.getfloat('settings', 'beta0') self.__beta1 = inifile.getfloat('settings', 'beta1') self.__etaFactor = [inifile.getfloat('settings', 'etaFactor')] self.__etaBases = [] self.__eta = [] for l in range(self.__L): self.__etaBases.append([inifile.getfloat('settings', 'eta')] * self.__Ml[l]) self.__eta.append( numpy.random.dirichlet(self.__etaBases[l]) * self.__etaFactor) self.__f = [] for k in range(self.__T): self.__f.append([]) for k in range(self.__T): for l in range(self.__L): self.__f[k].append(numpy.random.dirichlet(self.__eta[l])) self.__gammaFactor = inifile.getfloat('settings', 'gammaFactor') self.__gammaBases = [inifile.getfloat('settings', 'gamma')] * self.__N self.__gamma = numpy.random.dirichlet( self.__gammaBases) * self.__gammaFactor self.__phiFactor = [inifile.getfloat('settings', 'phiFactor')] self.__phiBases = [] for l in range(self.__L): self.__phiBases.append([inifile.getfloat('settings', 'phi')] * self.__Ml[l]) self.__phi = [] for l in range(self.__L): self.__phi.append( numpy.random.dirichlet(self.__phiBases[l]) * self.__phiFactor) self.__g = [] for n in range(self.__N): self.__g.append([]) for n in range(self.__N): for l in range(self.__L): self.__g[n].append(numpy.random.dirichlet(self.__phi[l]))
import time import uuid # Config stuff. config_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'PlexComskip.conf') if not os.path.exists(config_file_path): print 'Config file not found: %s' % config_file_path print 'Make a copy of PlexConfig.conf.example named PlexConfig.conf, modify as necessary, and place in the same directory as this script.' sys.exit(1) config = ConfigParser.SafeConfigParser({ 'comskip-ini-path': os.path.join(os.path.dirname(os.path.realpath(__file__)), 'comskip.ini'), 'temp-root': tempfile.gettempdir(), 'comskip-root': tempfile.gettempdir(), 'nice-level': '0' }) config.read(config_file_path) COMSKIP_PATH = os.path.expandvars( os.path.expanduser(config.get('Helper Apps', 'comskip-path'))) COMSKIP_INI_PATH = os.path.expandvars( os.path.expanduser(config.get('Helper Apps', 'comskip-ini-path'))) FFMPEG_PATH = os.path.expandvars( os.path.expanduser(config.get('Helper Apps', 'ffmpeg-path'))) LOG_FILE_PATH = os.path.expandvars( os.path.expanduser(config.get('Logging', 'logfile-path'))) CONSOLE_LOGGING = config.getboolean('Logging', 'console-logging')
try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict from oslo.config import cfg import webob.exc import glance.api.policy from glance.common import exception from glance.openstack.common import log as logging from glance.openstack.common import policy # NOTE(bourke): The default dict_type is collections.OrderedDict in py27, but # we must set manually for compatibility with py26 CONFIG = ConfigParser.SafeConfigParser(dict_type=OrderedDict) LOG = logging.getLogger(__name__) property_opts = [ cfg.StrOpt('property_protection_file', default=None, help=_('The location of the property protection file.')), cfg.StrOpt('property_protection_rule_format', default='roles', help=_('This config value indicates whether "roles" or ' '"policies" are used in the property protection file.')), ] CONF = cfg.CONF CONF.register_opts(property_opts)
import requests import ConfigParser import inspect import incremental_counter import error_counter # Const configfile = os.path.dirname(os.path.abspath(__file__)) + '/save2strage.ini' # get settings if not os.path.exists(configfile): raise IOError("no config file!") ini = ConfigParser.SafeConfigParser() ini.read(configfile) if not "save" in ini.sections(): raise IOError("no save section in config file!") if not "log" in ini.sections(): raise IOError("no log section in config file!") if not "error_recovery" in ini.sections(): raise IOError("no error_recovery section in config file!") # https://code.i-harness.com/en/q/aea99 def str2bool(v): return v.lower() in ("yes", "true", "t", "1")
def parse_args(args_str): ''' Eg. python svc_monitor.py --ifmap_server_ip 192.168.1.17 --ifmap_server_port 8443 --ifmap_username test --ifmap_password test --cassandra_server_list 10.1.2.3:9160 --api_server_ip 10.1.2.3 --api_server_port 8082 --zk_server_ip 10.1.2.3 --zk_server_port 2181 --collectors 127.0.0.1:8086 --disc_server_ip 127.0.0.1 --disc_server_port 5998 --http_server_port 8090 --log_local --log_level SYS_DEBUG --log_category test --log_file <stdout> [--region_name <name>] [--reset_config] ''' # Source any specified config/ini file # Turn off help, so we show all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'ifmap_server_ip': '127.0.0.1', 'ifmap_server_port': '8443', 'ifmap_username': '******', 'ifmap_password': '******', 'cassandra_server_list': '127.0.0.1:9160', 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', 'zk_server_ip': '127.0.0.1', 'zk_server_port': '2181', 'collectors': None, 'disc_server_ip': None, 'disc_server_port': None, 'http_server_port': '8088', 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'region_name': None, } secopts = { 'use_certs': False, 'keyfile': '', 'certfile': '', 'ca_certs': '', 'ifmap_certauth_port': "8444", } ksopts = { 'auth_host': '127.0.0.1', 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'default-domain' } if args.conf_file: config = ConfigParser.SafeConfigParser() config.read([args.conf_file]) defaults.update(dict(config.items("DEFAULTS"))) if ('SECURITY' in config.sections() and 'use_certs' in config.options('SECURITY')): if config.getboolean('SECURITY', 'use_certs'): secopts.update(dict(config.items("SECURITY"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(secopts) defaults.update(ksopts) parser.set_defaults(**defaults) parser.add_argument( "--ifmap_server_ip", help="IP address of ifmap server") parser.add_argument("--ifmap_server_port", help="Port of ifmap server") # TODO should be from certificate parser.add_argument("--ifmap_username", help="Username known to ifmap server") parser.add_argument("--ifmap_password", help="Password known to ifmap server") parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument("--api_server_ip", help="IP address of API server") parser.add_argument("--api_server_port", help="Port of API server") parser.add_argument("--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument("--disc_server_ip", help="IP address of the discovery server") parser.add_argument("--disc_server_port", help="Port of the discovery server") parser.add_argument("--http_server_port", help="Port of local HTTP server") parser.add_argument( "--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenant name for keystone admin user") parser.add_argument("--region_name", help="Region name for openstack API") args = parser.parse_args(remaining_argv) if type(args.cassandra_server_list) is str: args.cassandra_server_list = args.cassandra_server_list.split() if type(args.collectors) is str: args.collectors = args.collectors.split() if args.region_name and args.region_name.lower() == 'none': args.region_name = None return args
def _parse_args(self, args_str): ''' Eg. python provision_physical_router.py --api_server_ip 127.0.0.1 --api_server_port 8082 ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { #'public_vn_name': 'default-domain:' #'default-project:default-virtual-network', 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', } ksopts = { 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'default-domain' } if args.conf_file: config = ConfigParser.SafeConfigParser() config.read([args.conf_file]) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) parser.set_defaults(**defaults) parser.add_argument("--api_server_ip", help="IP address of api server") parser.add_argument("--api_server_port", help="Port of api server") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenamt name for keystone admin user") subparsers = parser.add_subparsers(help='Operations (add|delete|list)') add_p = subparsers.add_parser('add') del_p = subparsers.add_parser('delete') lst_p = subparsers.add_parser('list') add_p.add_argument("add", nargs=2, help="name 'pattern'") del_p.add_argument("delete", nargs='+', help="name [name ...]") lst_p.add_argument("list", nargs='*', help="[name ...]") self._args = parser.parse_args(remaining_argv)
import smtplib from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart import sys import ConfigParser num_faces = int(sys.argv[1]) image = sys.argv[2] time = sys.argv[3] cfg = ConfigParser.SafeConfigParser() cfg.read('email.cfg') try: smtp_host = cfg.get('smtp', 'host') smtp_port = cfg.get('smtp', 'port') from_addr = cfg.get('smtp', 'from') to_addr = cfg.get('smtp', 'to') except: sys.exit() mail = MIMEMultipart() mail.preamble = "The attached photo was taken at {0}, and contains {1} detected face(s).".format( time, num_faces) mail['Subject'] = '{0} faces detected in footage'.format(num_faces) mail['From'] = from_addr mail['To'] = to_addr with open(image, 'rb') as image_file: attachment = MIMEImage(image_file.read())
import ConfigParser # Define the names of the options option_names = [ 'from-default', 'from-section', 'file-only', 'init-only', 'init-and-file', 'from-vars', ] # Initialize the parser with some defaults parser = ConfigParser.SafeConfigParser( defaults={ 'from-default': 'value from defaults passed to init', 'init-only': 'value from defaults passed to init', 'init-and-file': 'value from defaults passed to init', 'from-section': 'value from defaults passed to init', 'from-vars': 'value from defaults passed to init', }) print 'Defaults before loading file:' defaults = parser.defaults() for name in option_names: if name in defaults: print ' %-15s = %r' % (name, defaults[name]) # Load the configuration file parser.read('with-defaults.ini') print '\nDefaults after loading file:' for name in option_names: if name in defaults: print '%-15s = %r' % (name, defaults[name])
def parse(self): ''' command line example contrail-alarm-gen --log_level SYS_DEBUG --logging_level DEBUG --log_category test --log_file <stdout> --use_syslog --syslog_facility LOG_USER --collectors 127.0.0.1:8086 --disc_server_ip 127.0.0.1 --disc_server_port 5998 --worker_id 0 --redis_password --http_server_port 5995 --redis_uve_list 127.0.0.1:6379 --kafka_broker_list 127.0.0.1:9092 --conf_file /etc/contrail/contrail-alarm-gen.conf [DEFAULTS] log_local = 0 log_level = SYS_DEBUG log_category = log_file = /var/log/contrail/contrail-alarm-gen.log ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) kwargs = {'help': "Specify config file", 'metavar': "FILE"} if os.path.exists(self.CONF_DEFAULT_PATH): kwargs['default'] = self.CONF_DEFAULT_PATH conf_parser.add_argument("-c", "--conf_file", **kwargs) args, remaining_argv = conf_parser.parse_known_args(self._argv.split()) defaults = { 'collectors': ['127.0.0.1:8086'], 'kafka_broker_list': ['127.0.0.1:9092'], 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'http_server_port': 5995, 'redis_uve_list': ['127.0.0.1:6379'] } redis_opts = { 'redis_server_port': 6379, 'redis_password': None, } disc_opts = { 'disc_server_ip': None, 'disc_server_port': 5998, } config = None if args.conf_file: config = ConfigParser.SafeConfigParser() config.optionxform = str config.read([args.conf_file]) defaults.update(dict(config.items("DEFAULTS"))) if 'REDIS' in config.sections(): redis_opts.update(dict(config.items('REDIS'))) if 'DISCOVERY' in config.sections(): disc_opts.update(dict(config.items('DISCOVERY'))) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(redis_opts) defaults.update(disc_opts) parser.set_defaults(**defaults) parser.add_argument( "--collectors", help="List of Collector IP addresses in ip:port format", nargs="+") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--http_server_port", type=int, help="introspect server port") parser.add_argument("--disc_server_ip", help="Discovery Server IP address") parser.add_argument("--disc_server_port", type=int, help="Discovery Server port") parser.add_argument("--redis_server_port", type=int, help="Redis server port") parser.add_argument("--redis_password", help="Redis server password") parser.add_argument( "--kafka_broker_list", help="List of bootstrap kafka brokers in ip:port format", nargs="+") parser.add_argument( "--redis_uve_list", help="List of redis-uve in ip:port format. For internal use only", nargs="+") self._args = parser.parse_args(remaining_argv) if type(self._args.collectors) is str: self._args.collectors = self._args.collectors.split() if type(self._args.kafka_broker_list) is str: self._args.kafka_broker_list = self._args.kafka_broker_list.split() if type(self._args.redis_uve_list) is str: self._args.redis_uve_list = self._args.redis_uve_list.split()
#!/usr/bin/python import numpy as np import ipromps_lib from sklearn.externals import joblib import os import ConfigParser # the current file path file_path = os.path.dirname(__file__) # read models cfg file cp_models = ConfigParser.SafeConfigParser() cp_models.read(os.path.join(file_path, '../cfg/models.cfg')) # read models params datasets_path = os.path.join(file_path, cp_models.get('datasets', 'path')) num_joints = cp_models.getint('datasets', 'num_joints') num_obs_joints = cp_models.getint('datasets', 'num_obs_joints') len_norm = cp_models.getint('datasets', 'len_norm') num_basis = cp_models.getint('basisFunc', 'num_basisFunc') sigma_basis = cp_models.getfloat('basisFunc', 'sigma_basisFunc') num_alpha_candidate = cp_models.getint('phase', 'num_phaseCandidate') # the pkl data datasets_pkl_path = os.path.join(datasets_path, 'pkl') task_name_path = os.path.join(datasets_pkl_path, 'task_name_list.pkl') datasets_norm_preproc_path = os.path.join(datasets_pkl_path, 'datasets_norm_preproc.pkl') min_max_scaler_path = os.path.join(datasets_pkl_path, 'min_max_scaler.pkl') noise_cov_path = os.path.join(datasets_pkl_path, 'noise_cov.pkl')
raise IOError( "Directory not found: %s" % (outdir,) ) outfile = os.path.join( outdir, self.OUTFILE ) version = "3.1" self._curs.execute( "select version from info" ) row = self._curs.fetchone() if row is None : raise Exception( "Error: no version in the dictionary!" ) version = row[0] with open( outfile, "w" ) as out : out.write( "# this dictionary version is for the Java validator used by the annotators\n" ) out.write( "# and ADIT-NMR post-processor\n" ) out.write( "#\n" ) out.write( "data_%s\n\n" % (version,) ) for table in self.TABLES : self.print_table( table, out ) #################################################################################################### # if __name__ == "__main__" : props = ConfigParser.SafeConfigParser() props.read( sys.argv[1] ) dbfile = props.get( "dictionary", "sqlite3.file" ) if not os.path.exists( dbfile ) : raise IOError( "File not found: %s (create dictionary first?)" % (dbfile,) ) db = ValidatorWriter.create_validator_dictionary( props, dburl = dbfile, verbose = True )
def _parse_args(self, args_str): ''' Eg: python provision_physical_device.py --device_name my_router --vendor_name Juniper --product_name QFX5100 --device_mgmt_ip 10.204.217.39 --device_tunnel_ip 34.34.34.34 --device_tor_agent nodec45-1 --device_tsn nodec45 --api_server_ip 10.204.221.33 --api_server_port 8082 --api_server_use_ssl False --oper <add | del> --admin_user admin --admin_password contrail123 --admin_tenant_name admin --openstack_ip 10.204.221.34 --snmp_monitor --local_port 161 --v2_community public ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', 'api_server_use_ssl': False, 'oper': 'add', } ksopts = { 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'default-domain' } if args.conf_file: config = ConfigParser.SafeConfigParser() config.read([args.conf_file]) defaults.update(dict(config.items("DEFAULTS"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) parser.set_defaults(**defaults) parser.add_argument( "--device_name", help="Name of physical device", required=True) parser.add_argument( "--vendor_name", help="Vendor type of the device", required=True) parser.add_argument( "--product_name", default='', help="Product name of the device") parser.add_argument( "--device_mgmt_ip", help="Management IP of the device") parser.add_argument( "--device_tunnel_ip", help="Tunnel IP of the device") parser.add_argument( "--device_tor_agent", help="Tor Agent Name for the device") parser.add_argument( "--device_tsn", help="TSN Name for the device") parser.add_argument( "--snmp_monitor", help="monitor through snmp", action='store_true') parser.add_argument( "--local_port", help="snmp port to connect to") parser.add_argument( "--v2_community", help="community string for snmp") parser.add_argument( "--api_server_port", help="Port of api server") parser.add_argument("--api_server_use_ssl", help="Use SSL to connect with API server") parser.add_argument( "--openstack_ip", help="Openstack node ip") parser.add_argument( "--oper", default='add', help="Provision operation to be done(add or del)") parser.add_argument( "--admin_user", help="Name of keystone admin user") parser.add_argument( "--admin_password", help="Password of keystone admin user") parser.add_argument( "--admin_tenant_name", help="Tenant name for keystone admin user") group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "--api_server_ip", help="IP address of api server") group.add_argument("--use_admin_api", default=False, help = "Connect to local api-server on admin port", action="store_true") self._args = parser.parse_args(remaining_argv)
def parse_args(args_str): ''' Eg. python device_manager.py --rabbit_server localhost -- rabbit_port 5672 -- rabbit_user guest -- rabbit_password guest --cassandra_server_list 10.1.2.3:9160 --api_server_ip 10.1.2.3 --api_server_port 8082 --api_server_use_ssl False --analytics_server_ip 10.1.2.3 --analytics_server_port 8181 --analytics_username admin --analytics_password admin --zk_server_ip 10.1.2.3 --zk_server_port 2181 --collectors 127.0.0.1:8086 --http_server_port 8090 --log_local --log_level SYS_DEBUG --log_category test --log_file <stdout> --use_syslog --syslog_facility LOG_USER --cluster_id <testbed-name> --repush_interval 15 --repush_max_interval 300 --push_delay_per_kb 0.01 --push_delay_max 100 --push_delay_enable True --push_mode 0 [--reset_config] ''' # Source any specified config/ini file # Turn off help, so we all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'rabbit_server': 'localhost', 'rabbit_port': '5672', 'rabbit_user': '******', 'rabbit_password': '******', 'rabbit_vhost': None, 'rabbit_ha_mode': False, 'cassandra_server_list': '127.0.0.1:9160', 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', 'api_server_use_ssl': False, 'analytics_server_ip': '127.0.0.1', 'analytics_server_port': '8081', 'analytics_username': None, 'analytics_password': None, 'zk_server_ip': '127.0.0.1', 'zk_server_port': '2181', 'collectors': None, 'http_server_port': '8096', 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'cluster_id': '', 'logging_conf': '', 'logger_class': None, 'repush_interval': '15', 'push_mode': 0, 'repush_max_interval': '600', 'push_delay_per_kb': '0.01', 'push_delay_max': '100', 'push_delay_enable': True, 'rabbit_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) secopts = { 'use_certs': False, 'keyfile': '', 'certfile': '', 'ca_certs': '', } ksopts = { 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'default-domain', } cassandraopts = {'cassandra_user': None, 'cassandra_password': None} sandeshopts = SandeshConfig.get_default_options() saved_conf_file = args.conf_file if args.conf_file: config = ConfigParser.SafeConfigParser() config.read(args.conf_file) defaults.update(dict(config.items("DEFAULTS"))) if ('SECURITY' in config.sections() and 'use_certs' in config.options('SECURITY')): if config.getboolean('SECURITY', 'use_certs'): secopts.update(dict(config.items("SECURITY"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) if 'CASSANDRA' in config.sections(): cassandraopts.update(dict(config.items('CASSANDRA'))) SandeshConfig.update_options(sandeshopts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(secopts) defaults.update(ksopts) defaults.update(cassandraopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument("--cassandra_use_ssl", action="store_true", help="Enable TLS for cassandra communication") parser.add_argument("--cassandra_ca_certs", help="Cassandra CA certs") parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument("--api_server_ip", help="IP address of API server") parser.add_argument("--api_server_port", help="Port of API server") parser.add_argument("--api_server_use_ssl", help="Use SSL to connect with API server") parser.add_argument("--analytics_server_ip", help="IP address of Analytics server") parser.add_argument("--analytics_server_port", help="Port of Analytics server") parser.add_argument("--analytics_username", help="Username for Analytics server") parser.add_argument("--analytics_password", help="Password for Analytics server") parser.add_argument("--zk_server_ip", help="IP address:port of zookeeper server") parser.add_argument("--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument("--http_server_port", help="Port of local HTTP server") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenant name for keystone admin user") parser.add_argument("--cluster_id", help="Used for database keyspace separation") parser.add_argument( "--logging_conf", help=("Optional logging configuration file, default: None")) parser.add_argument("--logger_class", help=("Optional external logger class, default: None")) parser.add_argument("--repush_interval", help="time interval for config re push") parser.add_argument("--repush_max_interval", help="max time interval for config re push") parser.add_argument( "--push_delay_per_kb", help="time delay between two successful commits per kb config size") parser.add_argument("--push_delay_max", help="max time delay between two successful commits") parser.add_argument("--push_delay_enable", help="enable delay between two successful commits") parser.add_argument("--cassandra_user", help="Cassandra user name") parser.add_argument("--cassandra_password", help="Cassandra password") SandeshConfig.add_parser_arguments(parser) args = parser.parse_args(remaining_argv) if type(args.cassandra_server_list) is str: args.cassandra_server_list = args.cassandra_server_list.split() if type(args.collectors) is str: args.collectors = args.collectors.split() args.sandesh_config = SandeshConfig.from_parser_arguments(args) args.cassandra_use_ssl = (str(args.cassandra_use_ssl).lower() == 'true') args.conf_file = saved_conf_file return args
def main(config_file): print( '{0}\n\n{1}: Display Commute Time\n [data provided by Google Maps and 511.org]\n [https://developers.google.com/maps/]\n [http://www.511.org/developers/]\n' .format(time.strftime('%A, %d %b %Y, %H:%M:%S', time.localtime()), verstring)) ## # open the configuration file print('Reading Config file ' + config_file) config = ConfigParser.SafeConfigParser() try: config_info = config.read(config_file) if not config_info: print('ERROR: config file not found ("{0}")'.format(config_file)) sys.exit(1) except: print('ERROR: Problem with config file (missing, bad format, etc)') print(' (Config file "{0}")'.format(config_file)) print("{0}".format(sys.exc_info())) sys.exit(1) # get the config values try: USER_KEY = config.get('USER', 'USER_KEY') API_TOKEN = config.get('USER', 'API_TOKEN') if config.has_option('USER', 'DATA_FILE'): DATA_FILE = config.get('USER', 'DATA_FILE') if DATA_FILE: print('Commute Data will be saved to: {0}'.format(DATA_FILE)) else: DATA_FILE = [] STARTPOINT = config.get('COMMUTE', 'ORIGIN') ENDPOINT = config.get('COMMUTE', 'DESTINATION') ROADS = config.get('COMMUTE', 'ROADS') SEG_LIST = config.get('COMMUTE', 'SEG_LIST').split(',') EST_OTHER = int(config.get('COMMUTE', 'EST_OTHER')) UPDATE_INTERVAL = int(config.get('DISPLAY', 'UPDATE_INTERVAL')) COMMUTE_PIXEL = int(config.get('DISPLAY', 'COMMUTE_PIXEL')) except: print("ERROR: Unable to read from config file") print(" Unexpected error: {0}".format(sys.exc_info())) sys.exit(1) ## # initialize LED displays print('Initializing LED displays') # Create display instance with I2C address (eg 0x70). display = BicolorMatrix8x8.BicolorMatrix8x8(address=0x70) sevenseg = SevenSegment.SevenSegment(address=0x72) # Initialize the display(s). Must be called once before using the display. display.begin() display.clear() display.write_display() sevenseg.begin() sevenseg.clear() sevenseg.write_display() # turn on the colon to show that we are working... sevenseg.set_colon(True) sevenseg.write_display() # show startup animation startup_splash(display, sevenseg) ## Create 2D arrays for pixel values # array for traffic intensity pxArray = [[0 for y in range(8)] for x in range(8)] # array for traffic incidents (top line of display) tiArray = [0 for x in range(8)] # create image for "error message" errorImage = Image.new('RGB', (8, 8)) edraw = ImageDraw.Draw(errorImage) # Draw an X with two red lines: edraw.line((1, 1, 6, 6), fill=(255, 0, 0)) edraw.line((1, 6, 6, 1), fill=(255, 0, 0)) ## # main loop print('') retries = 0 # for retrying when errors are received update_count = 0 # for updating LED matrix display while retries < numRetries: try: # blink colon to show update in progress sevenseg.set_colon(False) sevenseg.write_display() print('Requesting traffic data from Google Maps... ({0})'.format( time.strftime('%A, %d %b %Y, %H:%M:%S', time.localtime()))) # Get current travel time from Google Maps rGGL = requests.get( 'https://maps.googleapis.com/maps/api/distancematrix/xml?units=imperial&origins={0}&destinations={1}&departure_time=now&key={2}' .format(STARTPOINT, ENDPOINT, USER_KEY)) # get the traffic incident info from 511 # r511 = requests.get('http://api.511.org/traffic/traffic_segments?api_key={0}&road={1}&limit=10000&format=xml'.format(API_TOKEN,ROADS)) if rGGL.status_code != 200: print('ERROR: Problem with Google API request') print('Response status code {0}'.format(rGGL.status_code)) retries += 1 continue # elif r511.status_code != 200: # print('ERROR: Problem with 511 API request') # print('Response status code {0}'.format(r511.status_code) ) # retries += 1 # # elif r511.content.find('Error') >= 0: # print('WARNING: 511 server returned an error ({0})\n'.format(retries) ) # print(r511.content) # retries += 1 # quit after trying a few times # time.sleep(1) # continue nowTime = time.localtime() # blink colon to show update in progress sevenseg.set_colon(True) sevenseg.write_display() # r.content contains the reply XML string # parse the XML into a tree try: root = ET.fromstring(rGGL.content) except: print('ERROR: XML parse error.') time.sleep(5) retries += 1 # quit after trying a few times continue # the xml tree contains routes between the destinations # arranged in rows of elements. Google returns the "best" route # first, so look for one row with one element rows = root.findall('row') if len(rows) > 1: print('WARNING {0} rows.'.format(len(rows))) if len(rows) == 0: print('ERROR: no routes found (0 rows)') retries += 1 # quit after trying a few times continue elements = rows[0].findall('element') if len(elements) > 1: print('WARNING {0} elements.'.format(len(elements))) if len(elements) == 0: print('ERROR: no routes found (0 elements)') retries += 1 # quit after trying a few times continue # get Travel Time # time values are in seconds durTraf = elements[0].find('duration_in_traffic') curTime = int(durTraf.find('value').text) print(' Current Travel Time with traffic: {0:.2f} min'.format( curTime / 60)) durTyp = elements[0].find('duration') typTime = int(durTyp.find('value').text) print(' Travel Time without traffic: {0:.2f} min'.format(typTime / 60)) # the xml tree contains "segments", i.e portions of roads. # traffic_segments = root.find('traffic_segments') # segments = traffic_segments.findall('traffic_segment') # if len(segments) == 0: # print('ERROR: no traffic segments found') # sys.exit(1) # # get traffic incidents # incidents = paths[route_index].findall('incidents') # incident_list = incidents[0].findall('incident') # if len(incident_list) > 0: # print(' Traffic incidents:') # for incident in incident_list: # print(' {0}'.format(incident.text) ) # else: # print(' No traffic incidents reported at this time') incident_list = [] # estimate arrival time # google provides end-to-end estimate, so no need for other factors estDriveTime = time.localtime(time.mktime(nowTime) + (curTime)) # units of seconds print(' Estimated time of Arrival: {0}'.format( time.strftime('%A, %d %b %Y, %H:%M', estDriveTime))) # show arrival time estimate on 7-segment display #print(time.strftime('%H%M',estDriveTime)) sevenseg.print_number_str(time.strftime('%H%M', estDriveTime)) sevenseg.write_display() # write results to file if desired if DATA_FILE: f = open(DATA_FILE, 'a') f.write('{0},{1},{2}\n'.format(time.mktime(nowTime), curTime, typTime)) f.close() ## Update LED display # update at startup, and then at a multiple of data updates if update_count == 0 or update_count > UPDATE_INTERVAL: #print('Update LED matrix') pxArray, tiArray = update_matrix(display, pxArray, curTime / 60, typTime / 60, incident_list, tiArray, COMMUTE_PIXEL) # reset update count update_count = 1 # update the LED matrix display less often than the 7-segment display update_count += 1 # wait for the "whole minute" between updates # v1.5: update time display every minute. Update LED matrix less often. time.sleep(60 - time.localtime()[5]) print('') # reset the retries counter; if we got here it was a successful loop retries = 0 # Use Ctrl-C to quit manually, or for Supervisord to kill process except KeyboardInterrupt: print('\n** Program Stopped (INT signal) ** ' + time.strftime("%Y/%m/%d-%H:%M:%S", time.localtime())) streamEndTime = time.time( ) # time of stream end, unix epoch seconds #print("sys.exec_info(): {0}".format(sys.exc_info()) ) break print('') except requests.exceptions.ConnectionError: print('\n** Connection Error ** ' + time.strftime("%Y/%m/%d-%H:%M:%S", time.localtime())) print(" Will Retry Connection...") time.sleep(10) retries += 1 # quit after trying a few times except: print('\n** Other Exception ** ' + time.strftime("%Y/%m/%d-%H:%M:%S", time.localtime())) print("Unexpected error: {0}".format(sys.exc_info())) if retries > 0: print('\nCommuteClock: Exit after {0} retries\n'.format(retries)) # show error on display display.set_image(errorImage) display.write_display() else: display.clear() display.write_display() sevenseg.clear() sevenseg.write_display() print('CommuteClock: Program Exit\n')
def parse(self): ''' command line example contrail-broadview [-h] [-c FILE] [--analytics_api ANALYTICS_API [ANALYTICS_API ...]] [--collectors COLLECTORS [COLLECTORS ...]] [--log_file LOG_FILE] [--log_local] [--log_category LOG_CATEGORY] [--log_level LOG_LEVEL] [--use_syslog] [--syslog_facility SYSLOG_FACILITY] [--scan_frequency SCAN_FREQUENCY] [--http_server_port HTTP_SERVER_PORT] optional arguments: -h, --help show this help message and exit -c FILE, --conf_file FILE Specify config file --analytics_api ANALYTICS_API [ANALYTICS_API ...] List of analytics-api IP addresses in ip:port format --collectors COLLECTORS [COLLECTORS ...] List of Collector IP addresses in ip:port format --log_file LOG_FILE Filename for the logs to be written to --log_local Enable local logging of sandesh messages --log_category LOG_CATEGORY Category filter for local logging of sandesh messages --log_level LOG_LEVEL Severity level for local logging of sandesh messages --use_syslog Use syslog for logging --syslog_facility SYSLOG_FACILITY Syslog facility to receive log lines --scan_frequency SCAN_FREQUENCY Time between snmp poll --http_server_port HTTP_SERVER_PORT introspect server port ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) kwargs = { 'help': "Specify config file", 'metavar': "FILE", 'action': 'append' } if os.path.exists(self.CONF_DEFAULT_PATH): kwargs['default'] = [self.CONF_DEFAULT_PATH] conf_parser.add_argument("-c", "--conf_file", **kwargs) args, remaining_argv = conf_parser.parse_known_args(self._argv.split()) defaults = { 'collectors': ['127.0.0.1:8086'], 'analytics_api': ['127.0.0.1:8081'], 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'scan_frequency': 60, 'http_server_port': 5922, 'zookeeper': '127.0.0.1:2181', 'sandesh_send_rate_limit': SandeshSystem.get_sandesh_send_rate_limit(), 'device_file': '/etc/contrail/bv_devices.conf', } sandesh_opts = { 'sandesh_keyfile': '/etc/contrail/ssl/private/server-privkey.pem', 'sandesh_certfile': '/etc/contrail/ssl/certs/server.pem', 'sandesh_ca_cert': '/etc/contrail/ssl/certs/ca-cert.pem', 'sandesh_ssl_enable': False, 'introspect_ssl_enable': False } config = None if args.conf_file: config = ConfigParser.SafeConfigParser() config.optionxform = str config.read(args.conf_file) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if 'SANDESH' in config.sections(): sandesh_opts.update(dict(config.items('SANDESH'))) if 'sandesh_ssl_enable' in config.options('SANDESH'): sandesh_opts['sandesh_ssl_enable'] = config.getboolean( 'SANDESH', 'sandesh_ssl_enable') if 'introspect_ssl_enable' in config.options('SANDESH'): sandesh_opts['introspect_ssl_enable'] = config.getboolean( 'SANDESH', 'introspect_ssl_enable') # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(sandesh_opts) parser.set_defaults(**defaults) parser.add_argument( "--analytics_api", help="List of analytics-api IP addresses in ip:port format", nargs="+") parser.add_argument( "--collectors", help="List of Collector IP addresses in ip:port format", nargs="+") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--scan_frequency", type=int, help="Time between snmp poll") parser.add_argument("--http_server_port", type=int, help="introspect server port") parser.add_argument("--device_file", help="boardview devices") parser.add_argument("--zookeeper", help="ip:port of zookeeper server") parser.add_argument("--sandesh_send_rate_limit", type=int, help="Sandesh send rate limit in messages/sec.") parser.add_argument("--sandesh_keyfile", help="Sandesh ssl private key") parser.add_argument("--sandesh_certfile", help="Sandesh ssl certificate") parser.add_argument("--sandesh_ca_cert", help="Sandesh CA ssl certificate") parser.add_argument("--sandesh_ssl_enable", action="store_true", help="Enable ssl for sandesh connection") parser.add_argument("--introspect_ssl_enable", action="store_true", help="Enable ssl for introspect connection") self._args = parser.parse_args(remaining_argv) if type(self._args.collectors) is str: self._args.collectors = self._args.collectors.split() if type(self._args.analytics_api) is str: self._args.analytics_api = self._args.analytics_api.split() self._args.config_sections = config self.devices = device_config.DeviceConfig.from_file(self.device_file())
def parse(self): ''' command line example contrail-alarm-gen --log_level SYS_DEBUG --logging_level DEBUG --log_category test --log_file <stdout> --use_syslog --syslog_facility LOG_USER --worker_id 0 --partitions 5 --redis_password --http_server_port 5995 --redis_server_port 6379 --redis_uve_list 127.0.0.1:6379 --alarmgen_list 127.0.0.1:0 --kafka_broker_list 127.0.0.1:9092 --zk_list 127.0.0.1:2181 --rabbitmq_server_list 127.0.0.1:5672 --conf_file /etc/contrail/contrail-alarm-gen.conf ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action="append", help="Specify config file", metavar="FILE", default=ServicesDefaultConfigurationFiles.get( SERVICE_ALARM_GENERATOR, None)) args, remaining_argv = conf_parser.parse_known_args(self._argv.split()) defaults = { 'host_ip': '127.0.0.1', 'collectors': [], 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'http_server_port': 5995, 'worker_id': '0', 'partitions': 15, 'zk_list': None, 'alarmgen_list': ['127.0.0.1:0'], 'cluster_id': '', } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) redis_opts = { 'redis_server_port': 6379, 'redis_password': None, 'redis_uve_list': ['127.0.0.1:6379'], 'redis_use_ssl': False, 'redis_keyfile': None, 'redis_certfile': None, 'redis_ca_cert': None } configdb_opts = { 'rabbitmq_server_list': None, 'rabbitmq_port': 5672, 'rabbitmq_user': '******', 'rabbitmq_password': '******', 'rabbitmq_vhost': None, 'rabbitmq_ha_mode': False, 'rabbitmq_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', 'config_db_server_list': None, 'config_db_username': None, 'config_db_password': None, 'config_db_use_ssl': False, 'config_db_ca_certs': None } kafka_opts = { 'kafka_broker_list': ['127.0.0.1:9092'], 'kafka_ssl_enable': False, 'kafka_keyfile': '/etc/contrail/ssl/private/server-privkey.pem', 'kafka_certfile': '/etc/contrail/ssl/certs/server.pem', 'kafka_ca_cert': '/etc/contrail/ssl/certs/ca-cert.pem' } sandesh_opts = SandeshConfig.get_default_options() config = None if args.conf_file: config = ConfigParser.SafeConfigParser() config.optionxform = str config.read(args.conf_file) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items('DEFAULTS'))) if 'REDIS' in config.sections(): redis_opts.update(dict(config.items('REDIS'))) if 'CONFIGDB' in config.sections(): configdb_opts.update(dict(config.items('CONFIGDB'))) if 'KAFKA' in config.sections(): kafka_opts.update(dict(config.items('KAFKA'))) SandeshConfig.update_options(sandesh_opts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) defaults.update(redis_opts) defaults.update(configdb_opts) defaults.update(sandesh_opts) defaults.update(kafka_opts) parser.set_defaults(**defaults) parser.add_argument("--host_ip", help="Host IP address") parser.add_argument( "--collectors", help="List of Collector IP addresses in ip:port format", nargs="+") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--http_server_port", type=int, help="introspect server port") parser.add_argument("--worker_id", help="Worker Id") parser.add_argument("--partitions", type=int, help="Number of partitions for hashing UVE keys") parser.add_argument("--redis_server_port", type=int, help="Redis server port") parser.add_argument("--redis_password", help="Redis server password") parser.add_argument("--redis_use_ssl", action='store_true', help="Enable SSL encryption for REDIS connection") parser.add_argument("--redis_certfile", type=str, help="Location of redis ssl host certificate") parser.add_argument("--redis_keyfile", type=str, help="Location of redis ssl private key") parser.add_argument("--redis_ca_cert", type=str, help="Location of redis ssl CA certificate") parser.add_argument( "--kafka_broker_list", help="List of bootstrap kafka brokers in ip:port format", nargs="+") parser.add_argument("--zk_list", help="List of zookeepers in ip:port format", nargs="+") parser.add_argument( "--rabbitmq_server_list", type=str, help="List of Rabbitmq server ip address separated by comma") parser.add_argument("--rabbitmq_port", help="Rabbitmq server port") parser.add_argument("--rabbitmq_user", help="Username for Rabbitmq") parser.add_argument("--rabbitmq_password", help="Password for Rabbitmq") parser.add_argument("--rabbitmq_vhost", help="vhost for Rabbitmq") parser.add_argument( "--rabbitmq_ha_mode", action="store_true", help="True if the rabbitmq cluster is mirroring all queue") parser.add_argument("--config_db_server_list", help="List of cassandra servers in ip:port format", nargs='+') parser.add_argument("--config_db_username", help="Cassandra user name") parser.add_argument("--config_db_password", help="Cassandra password") parser.add_argument("--config_db_use_ssl", help="Cassandra SSL enable flag") parser.add_argument("--config_db_ca_certs", help="Cassandra CA certs file path") parser.add_argument( "--redis_uve_list", help="List of redis-uve in ip:port format. For internal use only", nargs="+") parser.add_argument( "--alarmgen_list", help="List of alarmgens in ip:inst format. For internal use only", nargs="+") parser.add_argument("--cluster_id", help="Analytics Cluster Id") parser.add_argument("--kafka_ssl_enable", action='store_true', help="Enable SSL encryption for kafka connection") parser.add_argument("--kafka_keyfile", type=str, help="Location of kafka ssl private key") parser.add_argument("--kafka_certfile", type=str, help="Location of kafka ssl host certificate") parser.add_argument("--kafka_ca_cert", type=str, help="Location of kafka ssl CA certificate") SandeshConfig.add_parser_arguments(parser) self._args = parser.parse_args(remaining_argv) if type(self._args.collectors) is str: self._args.collectors = self._args.collectors.split() if type(self._args.kafka_broker_list) is str: self._args.kafka_broker_list = self._args.kafka_broker_list.split() if type(self._args.zk_list) is str: self._args.zk_list = self._args.zk_list.split() if type(self._args.redis_uve_list) is str: self._args.redis_uve_list = self._args.redis_uve_list.split() if type(self._args.alarmgen_list) is str: self._args.alarmgen_list = self._args.alarmgen_list.split() if type(self._args.config_db_server_list) is str: self._args.config_db_server_list = \ self._args.config_db_server_list.split() self._args.conf_file = args.conf_file redis_agg_db_offset = os.getenv('ALARMGEN_REDIS_AGGREGATE_DB_OFFSET', "1") self._args._redis_agg_db = ALARMGEN_REDIS_AGGREGATE_DB_BASE_INDEX + \ int(redis_agg_db_offset) - 1 self._args.kafka_ssl_enable = (str( self._args.kafka_ssl_enable).lower() == 'true') self._args.config_db_use_ssl = (str( self._args.config_db_use_ssl).lower() == 'true') self._args.redis_use_ssl = (str( self._args.redis_use_ssl).lower() == 'true')
# uncomment to enable logging to console console_logger = logging.StreamHandler() console_formatter = logging.Formatter(log_format) console_logger.setFormatter(console_formatter) logging.getLogger('').addHandler(console_logger) # just in case there's a running tail -f on the log file # it's a hack, I know. logging.info('\x1b[2J\x1b[H') # log pid logging.info('My pid: %s' % os.getpid()) # read config logging.debug('reading config file') # this is probably abusing the defaults mechanism... config = ConfigParser.SafeConfigParser(default_config) try: t = config.read(cmd_args.config) except ConfigParser.Error as e: # error when parsing config file msg = 'Error parsing config file %s: %s' % (cmd_args.config, str(e)) logging.critical(msg) logging.critical('Exiting.') sys.exit(msg) if len(t) == 0 and cmd_args.config != '': # couldn't open config file msg = 'Unable to open config file ' + cmd_args.config logging.critical(msg) logging.critical('Exiting.') sys.exit(msg)
def __init__(self): # Get current path this_file = inspect.getfile(inspect.currentframe()) current_path = os.path.abspath(os.path.dirname(this_file)).replace( '\\', '/') # Get test config info from external file test_cfg_file = os.path.join(current_path, '..', 'targets', 'test_config.ini') if not os.path.isfile(test_cfg_file): raise RuntimeError("No such configuration file: %s" % test_cfg_file) config = ConfigParser.SafeConfigParser() config.read(test_cfg_file) self.targetName = config.get('config', 'cpu') self.projectName = config.get('config', 'board') self.buildVersion = config.get('config', 'build') self.kibble_serial_port = config.get('config', 'serial_port') self.kibble_assistant_serial_port = config.get( 'config', 'assistant_serial_port') self.jlinkUsbId = config.get('config', 'jlinkUsbId') self.kibble_project_image = config.get('config', 'project_image') # Get the peripheral info from peripheral_config.ini peripheral_info_cfg_file = os.path.join(current_path, '..', 'targets', self.targetName, 'peripheral_config.ini') if not os.path.isfile(peripheral_info_cfg_file): raise RuntimeError("No such configuration file: %s" % peripheral_info_cfg_file) config = ConfigParser.SafeConfigParser() config.read(peripheral_info_cfg_file) self.kibble_uart_support = config.getboolean('supported_peripherals', 'uart_support') self.kibble_i2c_support = config.getboolean('supported_peripherals', 'i2c_support') self.kibble_spi_support = config.getboolean('supported_peripherals', 'spi_support') self.kibble_can_support = config.getboolean('supported_peripherals', 'can_support') self.kibble_usb_support = config.getboolean('supported_peripherals', 'usb_support') self.testConfigs = [] if self.kibble_uart_support: uart_speed = config.get('speed', 'uart_speed') uart_info = ('uart', uart_speed, self.kibble_serial_port, 'True') self.testConfigs.append(uart_info) else: pass if self.kibble_i2c_support: i2c_speed = config.get('speed', 'i2c_speed') i2c_info = ('i2c', i2c_speed, self.kibble_assistant_serial_port, 'True') self.testConfigs.append(i2c_info) else: pass if self.kibble_spi_support: spi_speed = config.get('speed', 'spi_speed') spi_info = ('spi', spi_speed, self.kibble_assistant_serial_port, 'True') self.testConfigs.append(spi_info) else: pass if self.kibble_can_support: can_speed = config.get('speed', 'can_speed') can_info = ('can', can_speed, self.kibble_assistant_serial_port, 'True') self.testConfigs.append(can_info) else: pass if self.kibble_usb_support: usb_info = ('usb', '', '', 'True') self.testConfigs.append(usb_info) else: pass
def _parse_args(self, args_str): ''' Eg. python provision_encap.py --api_server_ip 127.0.0.1 --api_server_port 8082 --api_server_use_ssl False --encap_priority "MPLSoUDP,MPLSoGRE,VXLAN" --vxlan_vn_id_mode "automatic" --oper <add | delete> ''' # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', 'api_server_use_ssl': False, 'oper': 'add', 'encap_priority': 'MPLSoUDP,MPLSoGRE,VXLAN', 'vxlan_vn_id_mode': 'automatic' } ksopts = { 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'admin' } if args.conf_file: config = ConfigParser.SafeConfigParser() config.read([args.conf_file]) defaults.update(dict(config.items("DEFAULTS"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) parser.set_defaults(**defaults) parser.add_argument("--api_server_ip", help="IP address of api server") parser.add_argument("--api_server_port", help="Port of api server") parser.add_argument("--api_server_use_ssl", help="Use SSL to connect with API server") parser.add_argument("--encap_priority", help="List of Encapsulation priority", required=True) parser.add_argument("--vxlan_vn_id_mode", help="Virtual Network id type to be used") parser.add_argument( "--oper", default='add', help="Provision operation to be done(add or delete)") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenant name for keystone admin user") self._args = parser.parse_args(remaining_argv) if not self._args.encap_priority: parser.error('encap_priority is required')
def parse_args(): prog = os.path.basename(sys.argv[0]) config_file = os.path.sep.join(('~', '.config', prog, 'config')) conf_parser = argparse.ArgumentParser(prog=prog, add_help=False) conf_parser.add_argument('--conf', default=config_file, help='config file (default %s)' % config_file, metavar='FILE') args, remaining_argv = conf_parser.parse_known_args() defaults = {'celsius': False} config_file = os.path.expanduser(args.conf) if os.path.exists(config_file): config = configparser.SafeConfigParser() config.read([config_file]) if config.has_section('nest'): defaults.update(dict(config.items('nest'))) else: defaults.update(dict(config.items('DEFAULT'))) description = 'Command line interface to Nest™ Thermostats' parser = argparse.ArgumentParser(description=description, parents=[conf_parser]) parser.set_defaults(**defaults) parser.add_argument('--token-cache', dest='token_cache', help='auth access token cache file', metavar='TOKEN_CACHE_FILE') parser.add_argument('-t', '--token', dest='token', help='auth access token', metavar='TOKEN') parser.add_argument('-u', '--user', dest='user', help='username for nest.com', metavar='USER') parser.add_argument('-p', '--password', dest='password', help='password for nest.com', metavar='PASSWORD') parser.add_argument('-c', '--celsius', dest='celsius', action='store_true', help='use celsius instead of farenheit') parser.add_argument('-s', '--serial', dest='serial', help='optional, specify serial number of nest ' 'thermostat to talk to') parser.add_argument('-S', '--structure', dest='structure', help='optional, specify structure name to' 'scope device actions') parser.add_argument('-i', '--index', dest='index', default=0, type=int, help='optional, specify index number of nest to ' 'talk to') subparsers = parser.add_subparsers(dest='command', help='command help') temp = subparsers.add_parser('temp', help='show/set temperature') temp.add_argument('temperature', nargs='*', type=float, help='target tempterature to set device to') fan = subparsers.add_parser('fan', help='set fan "on" or "auto"') fan_group = fan.add_mutually_exclusive_group() fan_group.add_argument('--auto', action='store_true', default=False, help='set fan to auto') fan_group.add_argument('--on', action='store_true', default=False, help='set fan to on') mode = subparsers.add_parser('mode', help='show/set current mode') mode_group = mode.add_mutually_exclusive_group() mode_group.add_argument('--cool', action='store_true', default=False, help='set mode to cool') mode_group.add_argument('--heat', action='store_true', default=False, help='set mode to heat') mode_group.add_argument('--range', action='store_true', default=False, help='set mode to range') mode_group.add_argument('--off', action='store_true', default=False, help='set mode to off') away = subparsers.add_parser('away', help='show/set current away status') away_group = away.add_mutually_exclusive_group() away_group.add_argument('--away', action='store_true', default=False, help='set away status to "away"') away_group.add_argument('--home', action='store_true', default=False, help='set away status to "home"') subparsers.add_parser('target', help='show current temp target') subparsers.add_parser('humid', help='show current humidity') target_hum = subparsers.add_parser('target_hum', help='show/set target humidty') target_hum.add_argument('humidity', nargs='*', help='specify target humidity value or auto ' 'to auto-select a humidity based on outside ' 'temp') subparsers.add_parser('show', help='show everything') return parser.parse_args()
def create_targz(tmp, files, start_time, options, user, state, set_state, file_format): now = datetime.datetime.now() domain = re.match('[\w-]+\.[\w-]+(\.[\w-]+)*', djsettings.APP_URL) if domain: domain = '_'.join(domain.get(0).split('.')) else: domain = 'localhost' fname = "%s-%s" % (domain, now.strftime('%Y%m%d%H%M')) if file_format == 'zip': full_fname = "%s.zip" % fname else: full_fname = "%s.tar.gz" % fname if file_format == 'zip': t = zipfile.ZipFile( os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, full_fname), 'w') def add_to_file(f, a): t.write(f, a) else: t = tarfile.open(os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, full_fname), mode='w:gz') def add_to_file(f, a): t.add(f, a) state['overall']['status'] = _('Compressing xml files') set_state() for f in files: add_to_file(os.path.join(tmp, f), "/%s" % f) if options.get('uplodaded_files', False): state['overall']['status'] = _('Importing uploaded files') set_state() export_upfiles(t) if options.get('import_skins_folder', False): state['overall']['status'] = _('Importing skins folder') set_state() export_skinsfolder(t) state['overall']['status'] = _('Writing inf file.') set_state() inf = ConfigParser.SafeConfigParser() inf.add_section(DATE_AND_AUTHOR_INF_SECTION) inf.set(DATE_AND_AUTHOR_INF_SECTION, 'file-name', full_fname) inf.set(DATE_AND_AUTHOR_INF_SECTION, 'author', unicode(user.id)) inf.set(DATE_AND_AUTHOR_INF_SECTION, 'site', djsettings.APP_URL) inf.set(DATE_AND_AUTHOR_INF_SECTION, 'started', start_time.strftime(DATETIME_FORMAT)) inf.set(DATE_AND_AUTHOR_INF_SECTION, 'finished', now.strftime(DATETIME_FORMAT)) inf.add_section(OPTIONS_INF_SECTION) inf.set(OPTIONS_INF_SECTION, 'anon-data', str(options.get('anon_data', False))) inf.set(OPTIONS_INF_SECTION, 'with-upfiles', str(options.get('uplodaded_files', False))) inf.set(OPTIONS_INF_SECTION, 'with-skins', str(options.get('import_skins_folder', False))) inf.add_section(META_INF_SECTION) for id, s in state.items(): inf.set(META_INF_SECTION, id, str(s['count'])) with open(os.path.join(tmp, '%s.backup.inf' % fname), 'wb') as inffile: inf.write(inffile) add_to_file(os.path.join(tmp, '%s.backup.inf' % fname), '/backup.inf') state['overall']['status'] = _('Saving backup file') set_state() t.close() return full_fname
def main(argv=None): ''' Runs the program. There are three ways to pass arguments 1) environment variables TFB_* 2) configuration file benchmark.cfg 3) command line flags In terms of precedence, 3 > 2 > 1, so config file trumps environment variables but command line flags have the final say ''' # Do argv default this way, as doing it in the functional declaration sets it at compile time if argv is None: argv = sys.argv # Enable unbuffered output so messages will appear in the proper order with subprocess output. sys.stdout = Unbuffered(sys.stdout) # Update python environment # 1) Ensure the current directory (which should be the benchmark home directory) is in the path so that the tests can be imported. sys.path.append('.') # 2) Ensure toolset/setup/linux is in the path so that the tests can "import setup_util". sys.path.append('toolset/setup/linux') # Update environment for shell scripts os.environ['FWROOT'] = setup_util.get_fwroot() os.environ['IROOT'] = os.environ['FWROOT'] + '/installs' # 'Ubuntu', '14.04', 'trusty' respectively os.environ['TFB_DISTRIB_ID'], os.environ[ 'TFB_DISTRIB_RELEASE'], os.environ[ 'TFB_DISTRIB_CODENAME'] = platform.linux_distribution() # App server cpu count os.environ['CPU_COUNT'] = str(multiprocessing.cpu_count()) print("FWROOT is {!s}.".format(os.environ['FWROOT'])) conf_parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) conf_parser.add_argument( '--conf_file', default='benchmark.cfg', metavar='FILE', help= 'Optional configuration file to provide argument defaults. All config options can be overridden using the command line.' ) args, remaining_argv = conf_parser.parse_known_args() defaults = {} try: if not os.path.exists( os.path.join( os.environ['FWROOT'], args.conf_file)) and not os.path.exists( os.path.join(os.environ['FWROOT'] + 'benchmark.cfg')): print("No config file found. Aborting!") exit(1) with open(os.path.join(os.environ['FWROOT'], args.conf_file)): config = ConfigParser.SafeConfigParser() config.read([os.path.join(os.environ['FWROOT'], args.conf_file)]) defaults.update(dict(config.items("Defaults"))) # Convert strings into proper python types for k, v in defaults.iteritems(): try: defaults[k] = literal_eval(v) except Exception: pass except IOError: print("Configuration file not found!") exit(1) ########################################################## # Set up default values ########################################################## # Verify and massage options if defaults['client_user'] is None or defaults['client_host'] is None: print("client_user and client_host are required!") print("Please check your configuration file.") print("Aborting!") exit(1) if defaults['database_user'] is None: defaults['database_user'] = defaults['client_user'] if defaults['database_host'] is None: defaults['database_host'] = defaults['client_host'] if defaults['server_host'] is None: defaults['server_host'] = defaults['client_host'] if defaults['ulimit'] is None: defaults['ulimit'] = 200000 os.environ['ULIMIT'] = str(defaults['ulimit']) ########################################################## # Set up argument parser ########################################################## parser = argparse.ArgumentParser( description="Install or run the Framework Benchmarks test suite.", parents=[conf_parser], formatter_class=argparse.ArgumentDefaultsHelpFormatter, epilog= '''If an argument includes (type int-sequence), then it accepts integer lists in multiple forms. Using a single number e.g. 5 will create a list [5]. Using commas will create a list containing those values e.g. 1,3,6 creates [1, 3, 6]. Using three colon-separated numbers of start:step:end will create a list, using the semantics of python's range function, e.g. 1:3:15 creates [1, 4, 7, 10, 13] while 0:1:5 creates [0, 1, 2, 3, 4] ''') # Install options parser.add_argument('--clean', action='store_true', default=False, help='Removes the results directory') parser.add_argument('--clean-all', action='store_true', dest='clean_all', default=False, help='Removes the results and installs directories') parser.add_argument('--new', action='store_true', default=False, help='Initialize a new framework test') # Test options parser.add_argument('--test', nargs='+', help='names of tests to run') parser.add_argument( '--test-dir', nargs='+', dest='test_dir', help='name of framework directory containing all tests to run') parser.add_argument('--exclude', nargs='+', help='names of tests to exclude') parser.add_argument('--type', choices=[ 'all', 'json', 'db', 'query', 'cached_query', 'fortune', 'update', 'plaintext' ], default='all', help='which type of test to run') parser.add_argument( '-m', '--mode', choices=['benchmark', 'verify', 'debug'], default='benchmark', help= 'verify mode will only start up the tests, curl the urls and shutdown. debug mode will skip verification and leave the server running.' ) parser.add_argument('--list-tests', action='store_true', default=False, help='lists all the known tests that can run') # Benchmark options parser.add_argument('--duration', default=15, help='Time in seconds that each test should run for.') parser.add_argument( '--sleep', type=int, default=60, help= 'the amount of time to sleep after starting each test to allow the server to start up.' ) # Misc Options parser.add_argument( '--results-name', help='Gives a name to this set of results, formatted as a date', default='(unspecified, datetime = %Y-%m-%d %H:%M:%S)') parser.add_argument( '--results-environment', help='Describes the environment in which these results were gathered', default='(unspecified, hostname = %s)' % socket.gethostname()) parser.add_argument( '--results-upload-uri', default=None, help= 'A URI where the in-progress results.json file will be POSTed periodically' ) parser.add_argument( '--parse', help= 'Parses the results of the given timestamp and merges that with the latest results' ) parser.add_argument( '-v', '--verbose', action='store_true', default=False, help= 'Causes the configuration to print before any other commands are executed.' ) parser.add_argument( '--quiet', action='store_true', default=False, help= 'Only print a limited set of messages to stdout, keep the bulk of messages in log files only' ) parser.set_defaults( **defaults ) # Must do this after add, or each option's default will override the configuration file default args = parser.parse_args(remaining_argv) if args.new: Scaffolding() return 0 benchmarker = Benchmarker(vars(args)) # Run the benchmarker in the specified mode # Do not use benchmarker variables for these checks, # they are either str or bool based on the python version if args.list_tests: benchmarker.run_list_tests() elif args.parse != None: benchmarker.parse_timestamp() else: return benchmarker.run()
def parse_args(args_str): ''' Eg. python ironic_notification_manager.py \ -c ironic-notification-manager.conf \ -c contrail-keystone-auth.conf ''' # Source any specified config/ini file # Turn off help, so we all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'collectors': '127.0.0.1:8086', 'introspect_port': int( ServiceHttpPortMap[ModuleNames[Module.IRONIC_NOTIF_MANAGER]]), 'log_level': 'SYS_INFO', 'log_local': False, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY } ksopts = { 'auth_host': '127.0.0.1', 'auth_port': '35357', 'auth_protocol': 'http', 'auth_version': 'v2.0', 'admin_user': '', 'admin_password': '', 'admin_tenant_name': '', 'user_domain_name': None, 'identity_uri': None, 'project_domain_name': None, 'insecure': True, 'cafile': '', 'certfile': '', 'keyfile': '', 'auth_type': 'password', 'auth_url': '', 'region_name': '', 'endpoint_type': 'internalURL' } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) sandesh_opts = SandeshConfig.get_default_options() if args.conf_file: config = ConfigParser.SafeConfigParser() config.read(args.conf_file) defaults.update(dict(config.items("DEFAULTS"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) SandeshConfig.update_options(sandesh_opts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) defaults.update(sandesh_opts) parser.set_defaults(**defaults) args = parser.parse_args(remaining_argv) if type(args.collectors) is str: args.collectors = args.collectors.split() if type(args.introspect_port) is str: args.introspect_port = int(args.introspect_port) return args
def load_config(self, path): """Read configuration from given path and return a config object.""" config = ConfigParser.SafeConfigParser() config.read(path) return config