コード例 #1
0
    def test_get_config_value_dtype_None(self):
        config = RawConfigParser()
        config["section"] = {"key": "val"}

        val = get_config_value(config, "section", "key")
        self.assertEqual(val, "val")
コード例 #2
0
ファイル: settings.py プロジェクト: zeyaliu/TexadaProject
"""
Django settings for texada project.

"""

import os
import logging
from configparser import RawConfigParser

config = RawConfigParser()
config.read('/etc/texada_settings/settings.ini')

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

ROOT_URLCONF = 'texadadjango.urls'

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('secrets', 'SECRET_KEY')
#debugging
DEBUG_API = config.getboolean('django', 'DEBUG_API')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.getboolean('django', 'DEBUG')

ALLOWED_HOSTS = ['192.168.56.101', '127.0.0.1', 'localhost']

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin', 'django.contrib.auth',
    'django.contrib.contenttypes', 'django.contrib.sessions',
    'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework',
コード例 #3
0
def entry():
    _deprecation_check(sys.argv[0])

    from django.core.exceptions import ImproperlyConfigured
    from django.core.management import execute_from_command_line, find_commands
    from django.core.management import CommandParser
    from django.core.management.base import BaseCommand

    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'desktop.settings')
    cm_config_file = '/etc/cloudera-scm-agent/config.ini'
    ld_path_orig = None
    if "LD_LIBRARY_PATH" in list(os.environ.keys()):
        ld_path_orig = os.environ["LD_LIBRARY_PATH"]

    # What's the subcommand being run?
    # This code uses the same logic from django.core.management to handle command args
    subcommand = None
    if "--skip-reload" in sys.argv:
        skip_reload = True
        sys.argv.remove("--skip-reload")
    else:
        skip_reload = False

    # Check if --cm-managed flag is set and strip it out
    # to prevent from sending to subcommands
    if "--cm-managed" in sys.argv:
        sys.argv.remove("--cm-managed")
        cm_managed = True
    else:
        cm_managed = False

    if len(sys.argv) > 1:
        subcommand = sys.argv[1]
    parser = CommandParser(None,
                           usage="%(prog)s subcommand [options] [args]",
                           add_help=False)
    parser.parse_known_args(sys.argv[2:])

    if len(sys.argv) > 1:
        prof_id = subcommand = sys.argv[1]
        #Check if this is a CM managed cluster
        if os.path.isfile(
                cm_config_file) and not cm_managed and not skip_reload:
            print("ALERT: This appears to be a CM Managed environment")
            print(
                "ALERT: HUE_CONF_DIR must be set when running hue commands in CM Managed environment"
            )
            print("ALERT: Please run 'hue <command> --cm-managed'")
    else:
        prof_id = str(os.getpid())

    # CM managed configure env vars
    if cm_managed:
        if sys.version_info[0] > 2:
            from configparser import NoOptionError, RawConfigParser
        else:
            from ConfigParser import NoOptionError, RawConfigParser

        config = RawConfigParser()
        config.read(cm_config_file)
        try:
            cm_agent_run_dir = config.get(
                'General', 'agent_wide_credential_cache_location')
        except NoOptionError:
            cm_agent_run_dir = '/var/run/cloudera-scm-agent'
            pass

        #Parse CM supervisor include file for Hue and set env vars
        cm_supervisor_dir = cm_agent_run_dir + '/supervisor/include'
        cm_process_dir = cm_agent_run_dir + '/process'
        hue_env_conf = None
        envline = None
        cm_hue_string = "HUE_SERVER"

        for file in os.listdir(cm_supervisor_dir):
            if cm_hue_string in file:
                hue_env_conf = file
                hue_env_conf = cm_supervisor_dir + "/" + hue_env_conf

        if hue_env_conf == None:
            process_dirs = fnmatch.filter(os.listdir(cm_process_dir),
                                          '*%s*' % cm_hue_string)
            process_dirs.sort()
            hue_process_dir = cm_process_dir + "/" + process_dirs[-1]
            hue_env_conf = fnmatch.filter(os.listdir(hue_process_dir),
                                          'supervisor.conf')[0]
            hue_env_conf = hue_process_dir + "/" + hue_env_conf

        if not hue_env_conf == None:
            if os.path.isfile(hue_env_conf):
                hue_env_conf_file = open(hue_env_conf, "r")
                for line in hue_env_conf_file:
                    if "environment" in line:
                        envline = line
                    if "directory" in line:
                        empty, hue_conf_dir = line.split("directory=")
                        os.environ["HUE_CONF_DIR"] = hue_conf_dir.rstrip()
        else:
            print("This appears to be a CM managed cluster, but the")
            print("supervisor/include file for Hue could not be found")
            print("in order to successfully run commands that access")
            print("the database you need to set the following env vars:")
            print("")
            print("  export JAVA_HOME=<java_home>")
            print(
                "  export HUE_CONF_DIR=\"%s/`ls -1 %s | grep %s | sort -n | tail -1 `\""
                % (cm_processs_dir, cm_process_dir, cm_hue_string))
            print("  export HUE_IGNORE_PASSWORD_SCRIPT_ERRORS=1")
            print("  export HUE_DATABASE_PASSWORD=<hueDBpassword>")
            print("If using Oracle as your database:")
            print("  export LD_LIBRARY_PATH=/path/to/instantclient")
            print("")
            print(
                "If the above does not work, make sure Hue has been started on this server."
            )

        if not envline == None:
            empty, environment = envline.split("environment=")
            for envvar in environment.split(","):
                include_env_vars = ("HADOOP_C", "PARCEL", "SCM_DEFINES",
                                    "LD_LIBRARY")
                if any(include_env_var in envvar
                       for include_env_var in include_env_vars):
                    envkey, envval = envvar.split("=")
                    envval = envval.replace("'", "").rstrip()
                    os.environ[envkey] = envval

        #Set JAVA_HOME
        if "JAVA_HOME" not in list(os.environ.keys()):
            if os.path.isfile(
                    '/usr/lib64/cmf/service/common/cloudera-config.sh'):
                locate_java = subprocess.Popen([
                    'bash', '-c',
                    '. /usr/lib64/cmf/service/common/cloudera-config.sh; locate_java_home'
                ],
                                               stdout=subprocess.PIPE,
                                               stderr=subprocess.PIPE)
            elif os.path.isfile(
                    '/opt/cloudera/cm-agent/service/common/cloudera-config.sh'
            ):
                locate_java = subprocess.Popen([
                    'bash', '-c',
                    '. /opt/cloudera/cm-agent/service/common/cloudera-config.sh; locate_java_home'
                ],
                                               stdout=subprocess.PIPE,
                                               stderr=subprocess.PIPE)
            else:
                locate_java = None

            JAVA_HOME = "UNKNOWN"

            if locate_java is not None:
                for line in iter(locate_java.stdout.readline, ''):
                    if 'JAVA_HOME' in line:
                        JAVA_HOME = line.rstrip().split('=')[1]

            if JAVA_HOME != "UNKNOWN":
                os.environ["JAVA_HOME"] = JAVA_HOME

            if "JAVA_HOME" not in list(os.environ.keys()):
                print(
                    "JAVA_HOME must be set and can't be found, please set JAVA_HOME environment variable"
                )
                print("  export JAVA_HOME=<java_home>")
                sys.exit(1)

        #Make sure we set Oracle Client if configured
        if "LD_LIBRARY_PATH" not in list(os.environ.keys()):
            if "SCM_DEFINES_SCRIPTS" in list(os.environ.keys()):
                for scm_script in os.environ["SCM_DEFINES_SCRIPTS"].split(":"):
                    if "ORACLE" in scm_script:
                        if os.path.isfile(scm_script):
                            oracle_source = subprocess.Popen(
                                ". %s; env" % scm_script,
                                stdout=subprocess.PIPE,
                                shell=True,
                                executable="/bin/bash")
                            for line in oracle_source.communicate(
                            )[0].splitlines():
                                if "LD_LIBRARY_PATH" in line:
                                    var, oracle_ld_path = line.split("=")
                                    os.environ[
                                        "LD_LIBRARY_PATH"] = oracle_ld_path

        if "LD_LIBRARY_PATH" not in list(os.environ.keys()):
            print(
                "LD_LIBRARY_PATH can't be found, if you are using ORACLE for your Hue database"
            )
            print("then it must be set, if not, you can ignore")
            print("  export LD_LIBRARY_PATH=/path/to/instantclient")

    if "LD_LIBRARY_PATH" in list(os.environ.keys()):
        if ld_path_orig is not None and ld_path_orig == os.environ[
                "LD_LIBRARY_PATH"]:
            skip_reload = True

    if not skip_reload:
        reload_with_cm_env(cm_managed)

    try:
        # Let django handle the normal execution
        if os.getenv("DESKTOP_PROFILE"):
            _profile(prof_id, lambda: execute_from_command_line(sys.argv))
        else:
            execute_from_command_line(sys.argv)
    except ImproperlyConfigured as e:
        if len(sys.argv) > 1 and sys.argv[
                1] == 'is_db_alive' and 'oracle' in str(e).lower():
            print(e,
                  file=sys.stderr)  # Oracle connector is improperly configured
            sys.exit(10)
        else:
            raise e
    except subprocess.CalledProcessError as e:
        if "altscript.sh" in str(e).lower():
            print("%s" % e)
            print(
                "HUE_CONF_DIR seems to be set to CM location and '--cm-managed' flag not used"
            )
コード例 #4
0
    def get_merged_config(self, **options):
        """Get the final merged configuration for supvervisord, as a string.

        This is the top-level function exported by this module.  It combines
        the config file from the main project with default settings and those
        specified in the command-line, processes various special section names,
        and returns the resulting configuration as a string.
        """
        config_file = "supervisord.conf"
        
        #  Initialise the ConfigParser.
        #  Fortunately for us, ConfigParser has merge-multiple-config-files
        #  functionality built into it.  You just read each file in turn, and
        #  values from later files overwrite values from former.
        cfg = RawConfigParser()
        #  Start from the default configuration options.
        cfg.readfp(StringIO(self.DEFAULT_CONFIG))
        """
        #  Add in the project-specific config file.
        with open(config_file,"r") as f:
            data = f.read()            
        cfg.readfp(StringIO(data))
        """
        #  Add in the options from the self.services
        cfg.readfp(StringIO(self.get_config_from_services()))
        #  Add in the options specified on the command-line.
        cfg.readfp(StringIO(self.get_config_from_options(**options)))
        #  Add options from [program:__defaults__] to each program section
        #  if it happens to be missing that option.
        PROG_DEFAULTS = "program:__defaults__"
        if cfg.has_section(PROG_DEFAULTS):
            for option in cfg.options(PROG_DEFAULTS):
                default = cfg.get(PROG_DEFAULTS,option)
                for section in cfg.sections():
                    if section.startswith("program:"):
                        if not cfg.has_option(section,option):
                            cfg.set(section,option,default)
            cfg.remove_section(PROG_DEFAULTS)
        #  Add options from [program:__overrides__] to each program section
        #  regardless of whether they already have that option.
        PROG_OVERRIDES = "program:__overrides__"
        if cfg.has_section(PROG_OVERRIDES):
            for option in cfg.options(PROG_OVERRIDES):
                override = cfg.get(PROG_OVERRIDES,option)
                for section in cfg.sections():
                    if section.startswith("program:"):
                        cfg.set(section,option,override)
            cfg.remove_section(PROG_OVERRIDES)
        #  Make sure we've got a port configured for supervisorctl to
        #  talk to supervisord.  It's passworded based on secret key.
        #  If they have configured a unix socket then use that, otherwise
        #  use an inet server on localhost at fixed-but-randomish port.
        
        username = hashlib.md5("angelo".encode('utf-8')).hexdigest()[:7]
        password = hashlib.md5(username.encode('utf-8')).hexdigest()
        if cfg.has_section("unix_http_server"):
            self.set_if_missing(cfg,"unix_http_server","username",username)
            self.set_if_missing(cfg,"unix_http_server","password",password)
            serverurl = "unix://" + cfg.get("unix_http_server","file")
        else:
            #  This picks a "random" port in the 9000 range to listen on.
            #  It's derived from the secret key, so it's stable for a given
            #  project but multiple projects are unlikely to collide.
            port = int(hashlib.md5(password.encode('utf-8')).hexdigest()[:3],16) % 1000
            addr = "127.0.0.1:9%03d" % (port,)
            self.set_if_missing(cfg,"inet_http_server","port",addr)
            self.set_if_missing(cfg,"inet_http_server","username",username)
            self.set_if_missing(cfg,"inet_http_server","password",password)
            serverurl = "http://" + cfg.get("inet_http_server","port")
        self.set_if_missing(cfg,"supervisorctl","serverurl",serverurl)
        self.set_if_missing(cfg,"supervisorctl","username",username)
        self.set_if_missing(cfg,"supervisorctl","password",password)
        self.set_if_missing(cfg,"rpcinterface:supervisor",
                        "supervisor.rpcinterface_factory",
                        "supervisor.rpcinterface:make_main_rpcinterface")
        
        #  Remove any [program:] sections with exclude=true
        for section in cfg.sections():
            try:
                if cfg.getboolean(section,"exclude"):
                    cfg.remove_section(section)
            except NoOptionError:
                pass
        #  Sanity-check to give better error messages.
        for section in cfg.sections():
            if section.startswith("program:"):
                if not cfg.has_option(section,"command"):
                    msg = "Process name '%s' has no command configured"
                    raise ValueError(msg % (section.split(":",1)[-1]))
        #  Write it out to a StringIO and return the data
        s = StringIO()
        cfg.write(s)
        return s.getvalue()
コード例 #5
0
ファイル: config.py プロジェクト: grayerbeard/house-power-log
 def read_file(self):
     here = "config.read_file"
     config_read = RawConfigParser()
     config_read.read(self.config_filename)
     section = "Debug"
     self.debug_reread_config = str2bool(
         config_read.get(section, 'debug_reread_config'))
     self.debug_flag_1 = str2bool(config_read.get(section, 'debug_flag_1'))
     self.debug_flag_2 = str2bool(config_read.get(section, 'debug_flag_2'))
     self.debug_flag_ftp = str2bool(
         config_read.get(section, 'debug_flag_ftp'))
     section = "Scan"
     self.scan_delay = float(config_read.get(section, 'scan_delay'))
     self.max_scans = float(config_read.get(section, 'max_scans'))
     section = "Log"
     self.log_directory = config_read.get(section, 'log_directory')
     self.local_dir_www = config_read.get(section, 'local_dir_www')
     self.log_buffer_flag = config_read.getboolean(section,
                                                   'log_buffer_flag')
     self.text_buffer_length = int(
         config_read.get(section, 'text_buffer_length'))
     section = "Ftp"
     self.ftp_creds_filename = config_read.get(section,
                                               'ftp_creds_filename')
     self.ftp_log_max_count = float(
         config_read.get(section, 'ftp_log_max_count'))
     section = "Heating_Fan"
     self.heat_max_temp = float(config_read.get(section, 'heat_max_temp'))
     self.heat_min_temp = float(config_read.get(section, 'heat_min_temp'))
     self.heat_max_speed = float(config_read.get(section, 'heat_max_speed'))
     self.heat_min_speed = float(config_read.get(section, 'heat_min_speed'))
     self.heat_max_freq = float(config_read.get(section, 'heat_max_freq'))
     self.heat_min_freq = float(config_read.get(section, 'heat_min_freq'))
     section = "Sauna"
     self.sauna_max_temp = float(config_read.get(section, 'sauna_max_temp'))
     self.sauna_min_temp = float(config_read.get(section, 'sauna_min_temp'))
     self.sauna_max_speed = float(
         config_read.get(section, 'sauna_max_speed'))
     self.sauna_min_speed = float(
         config_read.get(section, 'sauna_min_speed'))
     self.sauna_max_freq = float(config_read.get(section, 'sauna_max_freq'))
     self.sauna_min_freq = float(config_read.get(section, 'sauna_min_freq'))
     self.sauna_GPIO_port = float(
         config_read.get(section, 'sauna_GPIO_port'))
     self.sauna_brightness = float(
         config_read.get(section, 'sauna_brightness'))
     section = "Power_Log"
     self.adc_scan_size = int(config_read.get(section, 'adc_scan_size'))
     self.adc_target_scan_msec = int(
         config_read.get(section, 'adc_target_scan_msec'))
     self.adc_channel = int(config_read.get(section, 'adc_channel'))
     self.adc_default_gain = int(
         config_read.get(section, 'adc_default_gain'))
     self.adc_top_limit = int(config_read.get(section, 'adc_top_limit'))
     self.adc_bottom_limit = int(
         config_read.get(section, 'adc_bottom_limit'))
     self.adc_input_offset_mv = float(
         config_read.get(section, 'adc_input_offset_mv'))
     self.adc_input_amp_gain = float(
         config_read.get(section, 'adc_input_amp_gain'))
     self.adc_CT_ratio = float(config_read.get(section, 'adc_CT_ratio'))
     self.adc_CT_resister = float(
         config_read.get(section, 'adc_CT_resister'))
     return
コード例 #6
0
def ini_to_dict(fname, section):
    """Convert *section* of .ini *config* to dictionary."""
    from configparser import RawConfigParser, NoOptionError

    config = RawConfigParser()
    config.read(fname)

    conf = {}
    conf['posttroll'] = {}
    posttroll = conf['posttroll']
    posttroll['topics'] = config.get(section, 'topics').split()
    try:
        nameservers = config.get(section, 'nameservers')
        nameservers = nameservers.split()
    except (NoOptionError, ValueError):
        nameservers = None
    posttroll['nameservers'] = nameservers

    try:
        addresses = config.get(section, 'addresses')
        addresses = addresses.split()
    except (NoOptionError, ValueError):
        addresses = None
    posttroll['addresses'] = addresses

    try:
        services = config.get(section, 'services')
        services = services.split()
    except (NoOptionError, ValueError):
        services = ""
    posttroll['services'] = services

    try:
        publish_port = config.get(section, 'publish_port')
    except NoOptionError:
        publish_port = 0
    posttroll['publish_port'] = publish_port

    posttroll['publish_topic'] = config.get(section, "publish_topic")

    conf['patterns'] = {section: {}}
    patterns = conf['patterns'][section]
    patterns['pattern'] = config.get(section, 'pattern')
    patterns['critical_files'] = config.get(section, 'critical_files')
    patterns['wanted_files'] = config.get(section, 'wanted_files')
    patterns['all_files'] = config.get(section, 'all_files')
    patterns['is_critical_set'] = False
    try:
        patterns['variable_tags'] = config.get(section,
                                               'variable_tags').split(',')
    except NoOptionError:
        patterns['variable_tags'] = []

    try:
        conf['time_tolerance'] = config.getint(section, "time_tolerance")
    except NoOptionError:
        conf['time_tolerance'] = 30
    try:
        # Seconds
        conf['timeliness'] = config.getint(section, "timeliness")
    except (NoOptionError, ValueError):
        conf['timeliness'] = 1200

    try:
        conf['num_files_premature_publish'] = \
            config.getint(section, "num_files_premature_publish")
    except (NoOptionError, ValueError):
        conf['num_files_premature_publish'] = -1

    try:
        conf['group_by_minutes'] = config.getint(section, 'group_by_minutes')
    except (NoOptionError, ValueError):
        pass

    try:
        kps = config.get(section, 'keep_parsed_keys')
        conf['keep_parsed_keys'] = kps.split()
    except NoOptionError:
        pass

    try:
        conf['providing_server'] = config.get(section, "providing_server")
    except (NoOptionError, ValueError):
        conf['providing_server'] = None

    try:
        conf['time_name'] = config.get(section, "time_name")
    except (NoOptionError, ValueError):
        conf['time_name'] = 'start_time'

    try:
        conf['check_existing_files_after_start'] = config.getboolean(
            section, "check_existing_files_after_start")
    except (NoOptionError, ValueError):
        conf['check_existing_files_after_start'] = False

    return conf
コード例 #7
0
ファイル: eds.py プロジェクト: christiansandberg/canopen
def export_eds(od, dest=None, file_info={}, device_commisioning=False):
    def export_object(obj, eds):
        if type(obj) is objectdictionary.Variable:
            return export_variable(obj, eds)
        if type(obj) is objectdictionary.Record:
            return export_record(obj, eds)
        if type(obj) is objectdictionary.Array:
            return export_array(obj, eds)

    def export_common(var, eds, section):
        eds.add_section(section)
        eds.set(section, "ParameterName", var.name)
        if var.storage_location:
            eds.set(section, "StorageLocation", var.storage_location)

    def export_variable(var, eds):
        if type(var.parent) is objectdictionary.ObjectDictionary:
            # top level variable
            section = "%04X" % var.index
        else:
            # nested variable
            section = "%04Xsub%X" % (var.index, var.subindex)

        export_common(var, eds, section)
        eds.set(section, "ObjectType", "0x%X" % VAR)
        if var.data_type:
            eds.set(section, "DataType", "0x%04X" % var.data_type)
        if var.access_type:
            eds.set(section, "AccessType", var.access_type)

        if getattr(var, 'default_raw', None) is not None:
            eds.set(section, "DefaultValue", var.default_raw)
        elif getattr(var, 'default', None) is not None:
            eds.set(section, "DefaultValue",
                    _revert_variable(var.data_type, var.default))

        if device_commisioning:
            if getattr(var, 'value_raw', None) is not None:
                eds.set(section, "ParameterValue", var.value_raw)
            elif getattr(var, 'value', None) is not None:
                eds.set(section, "ParameterValue",
                        _revert_variable(var.data_type, var.default))

        eds.set(section, "DataType", "0x%04X" % var.data_type)
        eds.set(section, "PDOMapping", hex(var.pdo_mappable))

        if getattr(var, 'min', None) is not None:
            eds.set(section, "LowLimit", var.min)
        if getattr(var, 'max', None) is not None:
            eds.set(section, "HighLimit", var.max)

    def export_record(var, eds):
        section = "%04X" % var.index
        export_common(var, eds, section)
        eds.set(section, "SubNumber", "0x%X" % len(var.subindices))
        ot = RECORD if type(var) is objectdictionary.Record else ARR
        eds.set(section, "ObjectType", "0x%X" % ot)
        for i in var:
            export_variable(var[i], eds)

    export_array = export_record

    eds = RawConfigParser()
    # both disables lowercasing, and allows int keys
    eds.optionxform = str

    from datetime import datetime as dt
    defmtime = dt.utcnow()

    try:
        # only if eds was loaded by us
        origFileInfo = od.__edsFileInfo
    except AttributeError:
        origFileInfo = {
            # just set some defaults
            "CreationDate": defmtime.strftime("%m-%d-%Y"),
            "CreationTime": defmtime.strftime("%I:%m%p"),
            "EdsVersion": 4.2,
        }

    file_info.setdefault("ModificationDate", defmtime.strftime("%m-%d-%Y"))
    file_info.setdefault("ModificationTime", defmtime.strftime("%I:%m%p"))
    for k, v in origFileInfo.items():
        file_info.setdefault(k, v)

    eds.add_section("FileInfo")
    for k, v in file_info.items():
        eds.set("FileInfo", k, v)

    eds.add_section("DeviceInfo")
    for eprop, odprop in [
        ("VendorName", "vendor_name"),
        ("VendorNumber", "vendor_number"),
        ("ProductName", "product_name"),
        ("ProductNumber", "product_number"),
        ("RevisionNumber", "revision_number"),
        ("OrderCode", "order_code"),
        ("SimpleBootUpMaster", "simple_boot_up_master"),
        ("SimpleBootUpSlave", "simple_boot_up_slave"),
        ("Granularity", "granularity"),
        ("DynamicChannelsSupported", "dynamic_channels_supported"),
        ("GroupMessaging", "group_messaging"),
        ("NrOfRXPDO", "nr_of_RXPDO"),
        ("NrOfTXPDO", "nr_of_TXPDO"),
        ("LSS_Supported", "LSS_supported"),
    ]:
        val = getattr(od.device_information, odprop, None)
        if type(val) is None:
            continue
        elif type(val) is str:
            eds.set("DeviceInfo", eprop, val)
        elif type(val) in (int, bool):
            eds.set("DeviceInfo", eprop, int(val))

    # we are also adding out of spec baudrates here.
    for rate in od.device_information.allowed_baudrates.union(
        {10e3, 20e3, 50e3, 125e3, 250e3, 500e3, 800e3, 1000e3}):
        eds.set("DeviceInfo", "BaudRate_%i" % (rate / 1000),
                int(rate in od.device_information.allowed_baudrates))

    if device_commisioning and (od.bitrate or od.node_id):
        eds.add_section("DeviceComissioning")
        if od.bitrate:
            eds.set("DeviceComissioning", "BaudRate", int(od.bitrate / 1000))
        if od.node_id:
            eds.set("DeviceComissioning", "NodeID", int(od.node_id))

    eds.add_section("Comments")
    i = 0
    for line in od.comments.splitlines():
        i += 1
        eds.set("Comments", "Line%i" % i, line)
    eds.set("Comments", "Lines", i)

    eds.add_section("DummyUsage")
    for i in range(1, 8):
        key = "Dummy%04d" % i
        eds.set("DummyUsage", key, 1 if (key in od) else 0)

    def mandatory_indices(x):
        return x in {0x1000, 0x1001, 0x1018}

    def manufacturer_idices(x):
        return x in range(0x2000, 0x6000)

    def optional_indices(x):
        return all((
            x > 0x1001,
            not mandatory_indices(x),
            not manufacturer_idices(x),
        ))

    supported_mantatory_indices = list(filter(mandatory_indices, od))
    supported_optional_indices = list(filter(optional_indices, od))
    supported_manufacturer_indices = list(filter(manufacturer_idices, od))

    def add_list(section, list):
        eds.add_section(section)
        eds.set(section, "SupportedObjects", len(list))
        for i in range(0, len(list)):
            eds.set(section, (i + 1), "0x%04X" % list[i])
        for index in list:
            export_object(od[index], eds)

    add_list("MandatoryObjects", supported_mantatory_indices)
    add_list("OptionalObjects", supported_optional_indices)
    add_list("ManufacturerObjects", supported_manufacturer_indices)

    if not dest:
        import sys
        dest = sys.stdout

    eds.write(dest, False)
コード例 #8
0
def parse_setup_cfg():
    cfg = RawConfigParser()
    r = cfg.read([os.path.join(ROOTDIR, "setup.cfg")])
    if len(r) != 1:
        print("Cannot read 'setup.cfg'")
        sys.exit(1)

    metadata = {
        "name": cfg.get("x-metadata", "name"),
        "version": cfg.get("x-metadata", "version"),
        "description": cfg.get("x-metadata", "description"),
    }

    _opt_value(cfg, metadata, "x-metadata", "license")
    _opt_value(cfg, metadata, "x-metadata", "maintainer")
    _opt_value(cfg, metadata, "x-metadata", "maintainer_email")
    _opt_value(cfg, metadata, "x-metadata", "author")
    _opt_value(cfg, metadata, "x-metadata", "author_email")
    _opt_value(cfg, metadata, "x-metadata", "url")
    _opt_value(cfg, metadata, "x-metadata", "download_url")
    _opt_value(cfg, metadata, "x-metadata", "classifiers", _as_lines)
    _opt_value(cfg, metadata, "x-metadata", "platforms", _as_list)
    _opt_value(cfg, metadata, "x-metadata", "packages", _as_list)
    _opt_value(cfg, metadata, "x-metadata", "keywords", _as_list)

    try:
        v = cfg.get("x-metadata", "requires-dist")

    except (NoOptionError, NoSectionError):
        pass

    else:
        requires = _as_requires(v)
        if requires:
            metadata["install_requires"] = requires

    try:
        v = cfg.get("x-metadata", "requires-test")

    except (NoOptionError, NoSectionError):
        pass

    else:
        requires = _as_requires(v)
        if requires:
            metadata["tests_require"] = requires

    try:
        v = cfg.get("x-metadata", "long_description_file")
    except (NoOptionError, NoSectionError):
        pass

    else:
        parts = []
        for nm in v.split():
            fp = open(nm, "r")
            parts.append(fp.read())
            fp.close()

        metadata["long_description"] = "\n\n".join(parts)

    try:
        v = cfg.get("x-metadata", "zip-safe")
    except (NoOptionError, NoSectionError):
        pass

    else:
        metadata["zip_safe"] = _as_bool(v)

    try:
        v = cfg.get("x-metadata", "console_scripts")
    except (NoOptionError, NoSectionError):
        pass

    else:
        if "entry_points" not in metadata:
            metadata["entry_points"] = {}

        metadata["entry_points"]["console_scripts"] = v.splitlines()

    if sys.version_info[:2] <= (2, 6):
        try:
            metadata["tests_require"] += ", unittest2"
        except KeyError:
            metadata["tests_require"] = "unittest2"

    if cfg.has_option("x-metadata", "ext_modules"):
        extensions = []
        for ext in _as_list(cfg.get("x-metadata", "ext_modules")):
            ext = _extract_extension(cfg, ext)
            if ext is not None:
                extensions.append(ext)

        if extensions:
            metadata["ext_modules"] = [
                Extension(*args, **kwds) for (args, kwds) in extensions
            ]

    return metadata
コード例 #9
0
ファイル: globalvars.py プロジェクト: paulroub/SmokeDetector
class GlobalVars:
    false_positives = []
    whitelisted_users = []
    blacklisted_users = []
    blacklisted_usernames = []
    blacklisted_websites = []
    bad_keywords = []
    watched_keywords = {}
    ignored_posts = []
    auto_ignored_posts = []
    startup_utc = datetime.utcnow().strftime("%H:%M:%S")
    latest_questions = []
    api_backoff_time = 0
    charcoal_room_id = "11540"
    meta_tavern_room_id = "89"
    socvr_room_id = "41570"
    blockedTime = {
        "all": 0,
        charcoal_room_id: 0,
        meta_tavern_room_id: 0,
        socvr_room_id: 0
    }
    metasmoke_last_ping_time = datetime.now()
    not_privileged_warning = """
    You are not a privileged user. Please see
    [the privileges wiki page](https://charcoal-se.org/smokey/Privileges) for
    information on what privileges are and what is expected of privileged users.
    """.strip().replace("\n", " ")

    experimental_reasons = [  # Don't widely report these
        "potentially bad keyword in answer", "potentially bad keyword in body",
        "potentially bad keyword in title",
        "potentially bad keyword in username"
    ]
    non_socvr_reasons = []  # Don't report to SOCVR
    non_tavern_reasons = [  # Don't report in the Tavern
        "all-caps body",
        "all-caps answer",
        "repeating characters in body",
        "repeating characters in title",
        "repeating characters in answer",
        "few unique characters in body",
        "few unique characters in answer",
        "title has only one unique char",
        "phone number detected in title",
        "offensive body detected",
        "no whitespace in body",
        "no whitespace in answer",
    ]
    non_tavern_sites = ["stackoverflow.com"]

    parser = HTMLParser()
    parser.unescape = unescape
    wrap = Client("stackexchange.com")
    wrapm = Client("meta.stackexchange.com")
    wrapso = Client("stackoverflow.com")
    privileged_users = {
        charcoal_room_id: [
            "117490",  # Normal Human
            "66258",  # Andy
            "31768",  # ManishEarth
            "103081",  # hichris123
            "73046",  # Undo
            "88521",  # ProgramFOX
            "59776",  # Doorknob
            "31465",  # Seth
            "88577",  # Santa Claus
            "34124",  # Andrew Leach
            "54229",  # apnorton
            "20459",  # S.L. Barth
            "32436",  # tchrist
            "30477",  # Brock Adams
            "58529",  # ferrybig
            "145208",  # Robert Longson
            "178825",  # Ms Yvette
            "171800",  # JAL
            "64978",  # PeterJ
            "125141",  # Jeffrey Bosboom
            "54902",  # bummi
            "135450",  # M.A.R.
            "145604",  # Quill
            "60548",  # rene
            "121401",  # michaelpri
            "116218",  # JamesENL
            "82927",  # Braiam
            "11606",  # bwDraco
            "19761",  # Ilmari Karonen
            "108271",  # Andrew T.
            "171054",  # Magisch
            "190011",  # Petter Friberg
            "165661",  # Tunaki
            "145086",  # Wai Ha Lee
            "137665",  # ByteCommander
            "147884",  # wythagoras
            "186395",  # Åna
            "181293",  # Ashish Ahuja
            "163686",  # Gothdo
            "145827",  # angussidney
            "244748",  # Supreme Leader SnokeDetector (angussidney's sock)
            "121520",  # ArtOfCode
            "244382",  # Lt. A. Code (ArtOfCode's sock to test things with)
            "137388",  # QPaysTaxes
            "212311",  # Ryan Bemrose
            "172397",  # Kyll
            "224538",  # FrankerZ
            "61202",  # OldSkool
            "56166",  # Jan Dvorak
            "133966",  # DavidPostill
            "22839",  # djsmiley2k
            "97389",  # Kaz Wolfe
            "144962",  # DJMcMayhem
            "139423",  # NobodyNada
            "62118",  # tripleee
            "130558",  # Registered User
            "128113",  # arda
            "164318",  # Glorfindel
            "175347",  # Floern
            "180274",  # Alexander O'Mara
            "158742",  # Rob
            "207356",  # 4castle
            "133031",  # Mithrandir
            "215671",  # Locutus of Borg (Mithrandir's Sock)
            "169713",  # Mego
            "126657",  # Cerbrus
            "10145",  # Thomas Ward
            "161943",  # J F
            "195967",  # CaffeineAddiction
            "5363",  # Stijn
            "248139",  # FelixSFD
            "156721",  # D-side
            "167070",  # quartata
            "172450",  # Hovercraft Full Of Eels
            "56200",  # Eric Leschinski
            "211021",  # Henders
            "255290",  # Gypsy Spellweaver
            "64521",  # CalvT
            "165474",  # Hyper Neutrino
            "281362",  # Hyper Neutrino v2
            "169252",  # Cai
            "155243",  # Nisse Engström
            "69330",  # Sconibulus
            "164187",  # Okx
            "202619",  # John Militer
            "262693",  # suraj
            "11287",  # Martin Sleziak
            "88588",  # NVZ
            "281674",  # paper1111
            "279119",  # Tetsuya Yamamoto
            "307652",  # Ajay Brahmakshatriya
            "238145",  # Owen Hines
            "268731",  # Sagar V
            "205208",  # a-j
            "234375",  # Jarko Dubbeldam
            "139041",  # Catija
            "305737",  # Tinkeringbell
            "210948"  # WELZ
        ],
        meta_tavern_room_id: [
            "315433",  # Normal Human
            "244519",  # CRABOLO
            "244382",  # TGMCians
            "194047",  # Jan Dvorak
            "158100",  # rene
            "178438",  # Manishearth
            "237685",  # hichris123
            "215468",  # Undo
            "229438",  # ProgramFOX
            "180276",  # Doorknob
            "161974",  # Lynn Crumbling
            "186281",  # Andy
            "266094",  # Unihedro
            "245167",  # Infinite Recursion
            "230261",  # Jason C
            "213575",  # Braiam
            "241919",  # Andrew T.
            "203389",  # backwards-Seth
            "202832",  # Mooseman
            "160017",  # bwDraco
            "201151",  # bummi
            "188558",  # Frank
            "229166",  # Santa Claus
            "159034",  # Kevin Brown
            "203972",  # PeterJ
            "188673",  # Alexis King
            "258672",  # AstroCB
            "227577",  # Sam
            "255735",  # cybermonkey
            "279182",  # Ixrec
            "271104",  # James
            "220428",  # Qantas 94 Heavy
            "153355",  # tchrist
            "238426",  # Ed Cottrell
            "166899",  # Second Rikudo
            "287999",  # ASCIIThenANSI
            "208518",  # JNat
            "284141",  # michaelpri
            "260312",  # vaultah
            "244062",  # SouravGhosh
            "152859",  # Shadow Wizard
            "201314",  # apnorton
            "280934",  # M.A.Ramezani
            "200235",  # durron597
            "148310",  # Awesome Poodles / Brock Adams
            "168333",  # S.L. Barth
            "257207",  # Unikitty
            "244282",  # DroidDev
            "163250",  # Cupcake
            "298265",  # BoomsPlus
            "253560",  # josilber
            "244254",  # misterManSam
            "188189",  # Robert Longson
            "174699",  # Ilmari Karonen
            "202362",  # chmod 666 telkitty
            "289717",  # Quill
            "237813",  # bjb568
            "311345",  # Simon Klaver
            "171881",  # rekire
            "260388",  # Pandya
            "310756",  # Ms Yvette
            "262399",  # Jeffrey Bosboom
            "242209",  # JAL
            "280883",  # ByteCommander
            "302251",  # kos
            "262823",  # ArtOfCode
            "215067",  # Ferrybig
            "308386",  # Magisch
            "285368",  # angussidney
            "158829",  # Thomas Ward
            "294691",  # Mithrandir
            "203553",  # CalvT
            "289971",  # Hyper Neutrino
            "346854",  # DonQuiKong
            "284336"  # Catija
        ],
        socvr_room_id: [
            "1849664",  # Undo
            "2581872",  # hichris123
            "1198729",  # Manishearth
            "3717023",  # Normal Human aka 1999
            "2619912",  # ProgramFOX
            "578411",  # rene
            "1043380",  # gunr2171
            "2246344",  # Sam
            "2756409",  # TylerH
            "1768232",  # durron597
            "359284",  # Kevin Brown
            "258400",  # easwee
            "3622940",  # Unihedron
            "3204551",  # Deduplicator
            "4342498",  # NathanOliver
            "4639281",  # Tiny Giant
            "3093387",  # josilber
            "1652962",  # cimmanon
            "1677912",  # Mogsdad
            "656243",  # Lynn Crumbling
            "3933332",  # Rizier123
            "2422013",  # cybermonkey
            "3478852",  # Nisse Engström
            "2302862",  # Siguza
            "1324",  # Paul Roub
            "1743880",  # Tunaki
            "1663001",  # DavidG
            "2415822",  # JAL
            "4174897",  # Kyll
            "5299236",  # Kevin Guan
            "4050842",  # Thaillie
            "1816093",  # Drew
            "874188",  # Triplee
            "880772",  # approxiblue
            "1835379",  # Cerbrus
            "3956566",  # JamesENL
            "2357233",  # Ms Yvette
            "3155639",  # AlexanderOMara
            "462627",  # Praveen Kumar
            "4490559",  # intboolstring
            "1364007",  # Wai Ha Lee
            "1699210",  # bummi
            "563532",  # Rob
            "5389107",  # Magisch
            "4099593",  # bhargav-rao
            "1542723",  # Ferrybig
            "2025923",  # Tushar
            "5292302",  # Petter Friberg
            "792066",  # Braiam
            "5666987",  # Ian
            "3160466",  # ArtOfCode
            "4688119",  # Ashish Ahuja
            "3476191",  # Nobody Nada
            "2227743",  # Eric D
            "821878",  # Ryan Bemrose
            "1413395",  # Panta Rei
            "4875631",  # FrankerZ
            "2958086",  # Compass
            "499214",  # JanDvorak
            "5647260",  # Andrew L.
            "559745",  # Floern
            "5743988",  # 4castle
            "4622463",  # angussidney
            "603346",  # Thomas Ward
            "3002139",  # Baum mit Augen
            "1863564",  # QPaysTaxes
            "4687348",  # FelixSFD
            "4751173",  # Glorfindel
            "2233391",  # henders
            "4805174",  # kayess
            "2370483",  # Machavity
            "1873567",  # CalvT
            "4826457",  # suraj
            "8242698",  # user0042
            "3773011",  # Makyen
            "2858773"  # Ajay Brahmakshatriya
        ],
        '111347': [  # SOBotics
            "3160466",  # ArtOfCode
            "1849664",  # Undo
            "3002139",  # Baum mit Augen
            "3476191",  # Nobody Nada
            "5292302",  # Petter Friberg
            "4688119",  # Ashish Ahuja
            "4099593",  # Bhargav Rao
            "1743880",  # Tunaki
            "559745",  # Floern
            "4687348",  # FelixSFD
            "6375113",  # Bugs
            "4622463",  # angussidney
            "563532",  # Rob
            "4050842",  # Thaillie
            "1915448"  # g00glen00b
        ]
    }

    code_privileged_users = None

    smokeDetector_user_id = {
        charcoal_room_id: "120914",
        meta_tavern_room_id: "266345",
        socvr_room_id: "3735529",
        '111347': '3735529'
    }

    censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"}

    commit = git_commit_info()
    if md5(commit['author'][0].encode(
            'utf-8')).hexdigest() in censored_committer_names:
        commit['author'] = censored_committer_names[md5(
            commit['author'][0].encode('utf-8')).hexdigest()]

    commit_with_author = "%s (%s: *%s*)" % (
        commit['id'], commit['author'][0] if type(commit['author'])
        in [list, tuple] else commit['author'], commit['message'])

    on_master = "HEAD detached" not in git_status()
    charcoal_hq = None
    tavern_on_the_meta = None
    socvr = None
    s = ""
    s_reverted = ""
    specialrooms = []
    apiquota = -1
    bodyfetcher = None
    se_sites = []
    users_chatting = {
        meta_tavern_room_id: [],
        charcoal_room_id: [],
        socvr_room_id: [],
        '111347': []
    }
    why_data = []
    why_data_allspam = []
    notifications = []
    listen_to_these_if_edited = []
    multiple_reporters = []
    api_calls_per_site = {}

    standby_message = ""
    standby_mode = False

    api_request_lock = threading.Lock()

    num_posts_scanned = 0
    post_scan_time = 0
    posts_scan_stats_lock = threading.Lock()

    config = RawConfigParser()

    if os.path.isfile('config'):
        config.read('config')
    else:
        config.read('config.ci')

    latest_smokedetector_messages = {
        meta_tavern_room_id: [],
        charcoal_room_id: [],
        socvr_room_id: [],
        '111347': []
    }

    # environ_or_none defined in helpers.py
    bot_name = environ_or_none("SMOKEDETECTOR_NAME") or "SmokeDetector"
    bot_repository = environ_or_none(
        "SMOKEDETECTOR_REPO") or "//github.com/Charcoal-SE/SmokeDetector"
    chatmessage_prefix = "[{}]({})".format(bot_name, bot_repository)

    site_id_dict = {}
    post_site_id_to_question = {}

    location = config.get("Config", "location")

    metasmoke_ws = None

    try:
        metasmoke_host = config.get("Config", "metasmoke_host")
    except NoOptionError:
        metasmoke_host = None
        log(
            'info',
            "metasmoke host not found. Set it as metasmoke_host in the config file."
            "See https://github.com/Charcoal-SE/metasmoke.")

    try:
        metasmoke_key = config.get("Config", "metasmoke_key")
    except NoOptionError:
        metasmoke_key = ""
        log(
            'info',
            "No metasmoke key found, which is okay if both are running on the same host"
        )

    try:
        metasmoke_ws_host = config.get("Config", "metasmoke_ws_host")
    except NoOptionError:
        metasmoke_ws_host = ""
        log(
            'info',
            "No metasmoke websocket host found, which is okay if you're anti-websocket"
        )

    try:
        github_username = config.get("Config", "github_username")
        github_password = config.get("Config", "github_password")
    except NoOptionError:
        github_username = None
        github_password = None
コード例 #10
0
ファイル: config.py プロジェクト: CMPUT463T2-MMLVG/MMLVG
    def _read_pypirc(self):
        """Reads the .pypirc file."""
        rc = self._get_rc_file()
        if os.path.exists(rc):
            self.announce('Using PyPI login from %s' % rc)
            repository = self.repository or self.DEFAULT_REPOSITORY

            config = RawConfigParser()
            config.read(rc)
            sections = config.sections()
            if 'distutils' in sections:
                # let's get the list of servers
                index_servers = config.get('distutils', 'index-servers')
                _servers = [
                    server.strip() for server in index_servers.split('\n')
                    if server.strip() != ''
                ]
                if _servers == []:
                    # nothing set, let's try to get the default pypi
                    if 'pypi' in sections:
                        _servers = ['pypi']
                    else:
                        # the file is not properly defined, returning
                        # an empty dict
                        return {}
                for server in _servers:
                    current = {'server': server}
                    current['username'] = config.get(server, 'username')

                    # optional params
                    for key, default in (('repository',
                                          self.DEFAULT_REPOSITORY),
                                         ('realm', self.DEFAULT_REALM),
                                         ('password', None)):
                        if config.has_option(server, key):
                            current[key] = config.get(server, key)
                        else:
                            current[key] = default

                    # work around people having "repository" for the "pypi"
                    # section of their config set to the HTTP (rather than
                    # HTTPS) URL
                    if (server == 'pypi' and repository
                            in (self.DEFAULT_REPOSITORY, 'pypi')):
                        current['repository'] = self.DEFAULT_REPOSITORY
                        return current

                    if (current['server'] == repository
                            or current['repository'] == repository):
                        return current
            elif 'server-login' in sections:
                # old format
                server = 'server-login'
                if config.has_option(server, 'repository'):
                    repository = config.get(server, 'repository')
                else:
                    repository = self.DEFAULT_REPOSITORY
                return {
                    'username': config.get(server, 'username'),
                    'password': config.get(server, 'password'),
                    'repository': repository,
                    'server': server,
                    'realm': self.DEFAULT_REALM
                }

        return {}
コード例 #11
0
ファイル: javdb_fc2.py プロジェクト: zychen/javsdt
from functions_process import check_subt_divulge, replace_xml_win
from functions_picture import check_pic, add_watermark_subt
from functions_requests import download_pic
########################################################################################################################
from functions_picture import add_watermark_divulge, crop_poster_baidu, crop_poster_default
from functions_requests import get_search_db_html, get_db_html

#  main开始
print('1、若一直连不上javdb,请在ini中更新防屏蔽网址\n'
      '2、javdb限制搜索次数,5分钟只能搜索12次左右,然后睡眠5分钟,建议挂机整理!\n'
      '   如果刚启动整理就睡眠,请检查当前网络环境能否访问javdb!\n'
      '4、整理FC2,fc2的信息非常非常少,大概只有个标题、卖家(片商)\n')
# 读取配置文件,这个ini文件用来给用户设置
print('正在读取ini中的设置...', end='')
try:
    config_settings = RawConfigParser()
    config_settings.read('【点我设置整理规则】.ini', encoding='utf-8-sig')
    ####################################################################################################################
    # 是否 收集nfo
    bool_nfo = True if config_settings.get("收集nfo",
                                           "是否收集nfo?") == '是' else False
    # 是否 跳过已存在nfo的文件夹,不整理已有nfo的文件夹
    bool_skip = True if config_settings.get(
        "收集nfo", "是否跳过已存在nfo的文件夹?") == '是' else False
    # 自定义 nfo中title的格式
    custom_nfo_title = config_settings.get("收集nfo", "nfo中title的格式")
    # 自定义 将片商等元素作为特征,因为emby不会直接在影片介绍页面上显示片商
    custom_genres = config_settings.get("收集nfo", "额外将以下元素添加到特征中")
    # 是否 将特征保存到风格中
    bool_genre = True if config_settings.get(
        "收集nfo", "是否将特征保存到genre?") == '是' else False
コード例 #12
0
ファイル: frontend.py プロジェクト: gitaarik/babel
def parse_mapping(fileobj, filename=None):
    """Parse an extraction method mapping from a file-like object.

    >>> buf = StringIO('''
    ... [extractors]
    ... custom = mypackage.module:myfunc
    ...
    ... # Python source files
    ... [python: **.py]
    ...
    ... # Genshi templates
    ... [genshi: **/templates/**.html]
    ... include_attrs =
    ... [genshi: **/templates/**.txt]
    ... template_class = genshi.template:TextTemplate
    ... encoding = latin-1
    ...
    ... # Some custom extractor
    ... [custom: **/custom/*.*]
    ... ''')

    >>> method_map, options_map = parse_mapping(buf)
    >>> len(method_map)
    4

    >>> method_map[0]
    ('**.py', 'python')
    >>> options_map['**.py']
    {}
    >>> method_map[1]
    ('**/templates/**.html', 'genshi')
    >>> options_map['**/templates/**.html']['include_attrs']
    ''
    >>> method_map[2]
    ('**/templates/**.txt', 'genshi')
    >>> options_map['**/templates/**.txt']['template_class']
    'genshi.template:TextTemplate'
    >>> options_map['**/templates/**.txt']['encoding']
    'latin-1'

    >>> method_map[3]
    ('**/custom/*.*', 'mypackage.module:myfunc')
    >>> options_map['**/custom/*.*']
    {}

    :param fileobj: a readable file-like object containing the configuration
                    text to parse
    :see: `extract_from_directory`
    """
    extractors = {}
    method_map = []
    options_map = {}

    parser = RawConfigParser()
    parser._sections = OrderedDict(
        parser._sections)  # We need ordered sections
    parser.read_file(fileobj, filename)

    for section in parser.sections():
        if section == 'extractors':
            extractors = dict(parser.items(section))
        else:
            method, pattern = [part.strip() for part in section.split(':', 1)]
            method_map.append((pattern, method))
            options_map[pattern] = dict(parser.items(section))

    if extractors:
        for idx, (pattern, method) in enumerate(method_map):
            if method in extractors:
                method = extractors[method]
            method_map[idx] = (pattern, method)

    return method_map, options_map
コード例 #13
0
    def __init__(self, monolithe_config, api_info):
        """ Initializes a _JavaSDKAPIVersionFileWriter

        """
        super(APIVersionWriter,
              self).__init__(package="monolithe.generators.lang.vro")

        self.api_version = api_info["version"]
        self._api_version_string = SDKUtils.get_string_version(
            self.api_version)
        self.api_root = api_info["root"]
        self.api_prefix = api_info["prefix"]

        self.monolithe_config = monolithe_config
        self._output = self.monolithe_config.get_option(
            "output", "transformer")
        self._name = self.monolithe_config.get_option("name", "transformer")
        self._class_prefix = ""
        self._product_accronym = self.monolithe_config.get_option(
            "product_accronym")
        self._product_name = self.monolithe_config.get_option("product_name")
        self._url = self.monolithe_config.get_option("url", "transformer")

        self._package_prefix = self._get_package_prefix(self._url)
        self._package_name = self._package_prefix + ".vro." + self._name
        self._package_subdir = self._package_name.replace('.', '/')

        self.output_directory = "%s/vro" % (self._output)
        self.override_folder = os.path.normpath("%s/__overrides" %
                                                self.output_directory)
        self.fetchers_path = "/fetchers/"
        self.enums_path = "/enums/"

        self.attrs_defaults = RawConfigParser()
        path = "%s/vro/__attributes_defaults/attrs_defaults.ini" % self._output
        self.attrs_defaults.optionxform = str
        self.attrs_defaults.read(path)

        self.inventory_entities = RawConfigParser()
        path = "%s/vro/__attributes_defaults/inventory_entities.ini" % self._output
        self.inventory_entities.optionxform = str
        self.inventory_entities.read(path)

        self.workflow_attrs = RawConfigParser()
        path = "%s/vro/__attributes_defaults/workflow_attrs.ini" % self._output
        self.workflow_attrs.optionxform = str
        self.workflow_attrs.read(path)

        self.attrs_types = RawConfigParser()
        path = "%s/vro/__attributes_defaults/attrs_types.ini" % self._output
        self.attrs_types.optionxform = str
        self.attrs_types.read(path)

        self.plugin_version = self.monolithe_config.get_option(
            "version", "transformer")

        self.workflow_version = self.monolithe_config.get_option(
            "version", "transformer")

        with open("%s/vro/__code_header" % self._output, "r") as f:
            self.header_content = f.read()
コード例 #14
0
    def test_get_config_value_dtype_bool(self):
        config = RawConfigParser()
        config["section"] = {"key": "False"}

        val = get_config_value(config, "section", "key")
        self.assertEqual(val, False)
コード例 #15
0
ファイル: beacon.py プロジェクト: tjirab/decipher3
 def parser(self):
     parser = RawConfigParser()
     parser.read(self.inifile)
     return parser
コード例 #16
0
    if not cfg['_MESMERIZE_PREFIX_COMMANDS'].endswith('\n'):
        cfg['_MESMERIZE_PREFIX_COMMANDS'] += '\n'

    with open(sys_cfg_file, 'w') as f:
        json.dump(cfg, f, indent=4)


#################################################################

# Project Configuration

#################################################################

proj_path = None
proj_cfg = RawConfigParser(allow_no_value=True)
proj_cfg['ROI_DEFS'] = {}
proj_cfg['STIM_DEFS'] = {}

proj_cfg.optionxform = str
special = {}
df_refs = {}


def save_proj_config():
    set_proj_special()
    with open(proj_path + '/config.cfg', 'w') as configfile:
        proj_cfg.write(configfile)


def create_new_proj_config():
コード例 #17
0
 def __init__(self,
              bus: Bus,
              parser=RawConfigParser(defaults=DEFAULTS, strict=True)):
     self.parser = parser
     self._bus = bus
     self.load()
コード例 #18
0
https://docs.djangoproject.com/en/2.2/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""

import os
from configparser import RawConfigParser
from ast import literal_eval

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# Create RawConfigParser instance and read a conf file with all that
# secret and critical settings shit
conf_parser = RawConfigParser()
conf_parser.read(os.path.join(BASE_DIR, 'conf', 'local.conf'))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = conf_parser.get('keys', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = conf_parser.getboolean('common', 'DEBUG', fallback=False)

INTERNAL_IPS = [
    '127.0.0.1',
]
コード例 #19
0
ファイル: eds.py プロジェクト: christiansandberg/canopen
def import_eds(source, node_id):
    eds = RawConfigParser()
    eds.optionxform = str
    if hasattr(source, "read"):
        fp = source
    else:
        fp = open(source)
    try:
        # Python 3
        eds.read_file(fp)
    except AttributeError:
        # Python 2
        eds.readfp(fp)
    fp.close()
    od = objectdictionary.ObjectDictionary()

    if eds.has_section("FileInfo"):
        od.__edsFileInfo = {
            opt: eds.get("FileInfo", opt)
            for opt in eds.options("FileInfo")
        }

    if eds.has_section("Comments"):
        linecount = int(eds.get("Comments", "Lines"), 0)
        od.comments = '\n'.join([
            eds.get("Comments", "Line%i" % line)
            for line in range(1, linecount + 1)
        ])

    if not eds.has_section("DeviceInfo"):
        logger.warn(
            "eds file does not have a DeviceInfo section. This section is mandatory"
        )
    else:
        for rate in [10, 20, 50, 125, 250, 500, 800, 1000]:
            baudPossible = int(
                eds.get("DeviceInfo", "BaudRate_%i" % rate, fallback='0'), 0)
            if baudPossible != 0:
                od.device_information.allowed_baudrates.add(rate * 1000)

        for t, eprop, odprop in [
            (str, "VendorName", "vendor_name"),
            (int, "VendorNumber", "vendor_number"),
            (str, "ProductName", "product_name"),
            (int, "ProductNumber", "product_number"),
            (int, "RevisionNumber", "revision_number"),
            (str, "OrderCode", "order_code"),
            (bool, "SimpleBootUpMaster", "simple_boot_up_master"),
            (bool, "SimpleBootUpSlave", "simple_boot_up_slave"),
            (bool, "Granularity", "granularity"),
            (bool, "DynamicChannelsSupported", "dynamic_channels_supported"),
            (bool, "GroupMessaging", "group_messaging"),
            (int, "NrOfRXPDO", "nr_of_RXPDO"),
            (int, "NrOfTXPDO", "nr_of_TXPDO"),
            (bool, "LSS_Supported", "LSS_supported"),
        ]:
            try:
                if t in (int, bool):
                    setattr(od.device_information, odprop,
                            t(int(eds.get("DeviceInfo", eprop), 0)))
                elif t is str:
                    setattr(od.device_information, odprop,
                            eds.get("DeviceInfo", eprop))
            except NoOptionError:
                pass

    if eds.has_section("DeviceComissioning"):
        od.bitrate = int(eds.get("DeviceComissioning", "BaudRate")) * 1000
        od.node_id = int(eds.get("DeviceComissioning", "NodeID"), 0)

    for section in eds.sections():
        # Match dummy definitions
        match = re.match(r"^[Dd]ummy[Uu]sage$", section)
        if match is not None:
            for i in range(1, 8):
                key = "Dummy%04d" % i
                if eds.getint(section, key) == 1:
                    var = objectdictionary.Variable(key, i, 0)
                    var.data_type = i
                    var.access_type = "const"
                    od.add_object(var)

        # Match indexes
        match = re.match(r"^[0-9A-Fa-f]{4}$", section)
        if match is not None:
            index = int(section, 16)
            name = eds.get(section, "ParameterName")
            try:
                object_type = int(eds.get(section, "ObjectType"), 0)
            except NoOptionError:
                # DS306 4.6.3.2 object description
                # If the keyword ObjectType is missing, this is regarded as
                # "ObjectType=0x7" (=VAR).
                object_type = VAR
            try:
                storage_location = eds.get(section, "StorageLocation")
            except NoOptionError:
                storage_location = None

            if object_type in (VAR, DOMAIN):
                var = build_variable(eds, section, node_id, index)
                od.add_object(var)
            elif object_type == ARR and eds.has_option(section,
                                                       "CompactSubObj"):
                arr = objectdictionary.Array(name, index)
                last_subindex = objectdictionary.Variable(
                    "Number of entries", index, 0)
                last_subindex.data_type = objectdictionary.UNSIGNED8
                arr.add_member(last_subindex)
                arr.add_member(build_variable(eds, section, node_id, index, 1))
                arr.storage_location = storage_location
                od.add_object(arr)
            elif object_type == ARR:
                arr = objectdictionary.Array(name, index)
                arr.storage_location = storage_location
                od.add_object(arr)
            elif object_type == RECORD:
                record = objectdictionary.Record(name, index)
                record.storage_location = storage_location
                od.add_object(record)

            continue

        # Match subindexes
        match = re.match(r"^([0-9A-Fa-f]{4})[S|s]ub([0-9A-Fa-f]+)$", section)
        if match is not None:
            index = int(match.group(1), 16)
            subindex = int(match.group(2), 16)
            entry = od[index]
            if isinstance(entry,
                          (objectdictionary.Record, objectdictionary.Array)):
                var = build_variable(eds, section, node_id, index, subindex)
                entry.add_member(var)

        # Match [index]Name
        match = re.match(r"^([0-9A-Fa-f]{4})Name", section)
        if match is not None:
            index = int(match.group(1), 16)
            num_of_entries = int(eds.get(section, "NrOfEntries"))
            entry = od[index]
            # For CompactSubObj index 1 is were we find the variable
            src_var = od[index][1]
            for subindex in range(1, num_of_entries + 1):
                var = copy_variable(eds, section, subindex, src_var)
                if var is not None:
                    entry.add_member(var)

    return od
コード例 #20
0
ファイル: common.py プロジェクト: rickardm/CatAmount
    'data_column_fixid': '0',
    'data_column_catid': '1',
    'data_column_utcdatetime': '4',
    'data_column_utmy': '6',
    'data_column_utmx': '7',
    'radius': '200',
    'time_cutoff': '144',
    'minimum_count': '0',
    'minimum_stay': '0',
    'start_date': '0',
    'end_date': '0',
    'dot_size': '4',
    'perimeter_resolution': '9'
}

config = RawConfigParser(fallback_values)
config.read(configfilepath)

cfg_datafile_path = config.get('Global_Settings', 'datafile_path')
cfg_outdir_path = config.get('Global_Settings', 'outdir_path')
cfg_data_column_fixid = config.get('Global_Settings', 'data_column_fixid')
cfg_data_column_catid = config.get('Global_Settings', 'data_column_catid')
cfg_data_column_utcdatetime = config.get('Global_Settings',
                                         'data_column_utcdatetime')
cfg_data_column_utmy = config.get('Global_Settings', 'data_column_utmy')
cfg_data_column_utmx = config.get('Global_Settings', 'data_column_utmx')

cfg_cluster_radius = config.get('Cluster_Settings', 'radius')
cfg_cluster_time_cutoff = config.get('Cluster_Settings', 'time_cutoff')
cfg_cluster_minimum_count = config.get('Cluster_Settings', 'minimum_count')
cfg_cluster_minimum_stay = config.get('Cluster_Settings', 'minimum_stay')
コード例 #21
0
def set_config():
    sys_para = sys.argv
    file_path = os.path.split(sys_para[0])[0]
    gui = False
    if platform.uname()[0] == 'Windows':  # Win默认打开
        gui = True
    if platform.uname()[0] == 'Linux':  # Linux 默认关闭
        gui = False
    if '--gui' in sys.argv:  # 指定 gui 模式
        gui = True
    if '--nogui' in sys.argv:  # 带 nogui 就覆盖前面Win打开要求
        gui = False

    config_file = os.path.join(file_path, 's3_download_config.ini')
    # If no config file, read the default config
    if not os.path.exists(config_file):
        config_file += '.default'
        print("No customized config, use the default config")
    cfg = ConfigParser()
    print(f'Reading config file: {config_file}')

    try:
        global SrcBucket, S3Prefix, SrcFileIndex, SrcProfileName, DesDir, MaxRetry, MaxThread, MaxParallelFile, LoggingLevel
        cfg.read(config_file, encoding='utf-8-sig')
        SrcBucket = cfg.get('Basic', 'SrcBucket')
        S3Prefix = cfg.get('Basic', 'S3Prefix')
        SrcFileIndex = cfg.get('Basic', 'SrcFileIndex')
        SrcProfileName = cfg.get('Basic', 'SrcProfileName')
        DesDir = cfg.get('Basic', 'DesDir')
        Megabytes = 1024 * 1024
        ChunkSize = cfg.getint('Advanced', 'ChunkSize') * Megabytes
        MaxRetry = cfg.getint('Advanced', 'MaxRetry')
        MaxThread = cfg.getint('Advanced', 'MaxThread')
        MaxParallelFile = cfg.getint('Advanced', 'MaxParallelFile')
        LoggingLevel = cfg.get('Advanced', 'LoggingLevel')
    except Exception as e:
        print("ERR loading s3_download_config.ini", str(e))
        input('PRESS ENTER TO QUIT')
        sys.exit(0)

    if gui:
        # For GUI
        from tkinter import Tk, filedialog, END, StringVar, BooleanVar, messagebox
        from tkinter.ttk import Combobox, Label, Button, Entry, Spinbox, Checkbutton
        # get profile name list in ./aws/credentials
        pro_conf = RawConfigParser()
        pro_path = os.path.join(os.path.expanduser("~"), ".aws")
        cre_path = os.path.join(pro_path, "credentials")
        if os.path.exists(cre_path):
            pro_conf.read(cre_path)
            profile_list = pro_conf.sections()
        else:
            print(
                f"There is no aws_access_key in {cre_path}, please input for S3 Bucket: "
            )
            os.mkdir(pro_path)
            aws_access_key_id = input('aws_access_key_id: ')
            aws_secret_access_key = input('aws_secret_access_key: ')
            region = input('region: ')
            pro_conf.add_section('default')
            pro_conf['default']['aws_access_key_id'] = aws_access_key_id
            pro_conf['default'][
                'aws_secret_access_key'] = aws_secret_access_key
            pro_conf['default']['region'] = region
            profile_list = ['default']
            with open(cre_path, 'w') as f:
                print(f"Saving credentials to {cre_path}")
                pro_conf.write(f)

        # Click Select Folder
        def browse_folder():
            local_dir = filedialog.askdirectory(
                initialdir=os.path.dirname(__file__))
            url_txt.delete(0, END)
            url_txt.insert(0, local_dir)
            file_txt.delete(0, END)
            file_txt.insert(0, "*")
            # Finsih browse folder

        # Click List Buckets
        def ListBuckets(*args):
            SrcProfileName = SrcProfileName_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            bucket_list = []
            try:
                response = client.list_buckets()
                if 'Buckets' in response:
                    bucket_list = [b['Name'] for b in response['Buckets']]
            except Exception as e:
                messagebox.showerror(
                    'Error', f'Failt to List buckets. \n'
                    f'Please verify your aws_access_key of profile: [{SrcProfileName}]\n'
                    f'{str(e)}')
                bucket_list = ['CAN_NOT_GET_BUCKET_LIST']
            SrcBucket_txt['values'] = bucket_list
            SrcBucket_txt.current(0)
            # Finish ListBuckets

        # Click List Prefix
        def ListPrefix(*args):
            SrcProfileName = SrcProfileName_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            prefix_list = []
            this_bucket = SrcBucket_txt.get()
            max_get = 100
            try:
                response = client.list_objects_v2(
                    Bucket=this_bucket, Delimiter='/'
                )  # Only get the max 1000 prefix for simply list
                if 'CommonPrefixes' in response:
                    prefix_list = [
                        c['Prefix'] for c in response['CommonPrefixes']
                    ]
                if not prefix_list:
                    messagebox.showinfo(
                        'Message', f'There is no "/" Prefix in: {this_bucket}')
                if response['IsTruncated']:
                    messagebox.showinfo(
                        'Message',
                        f'More than {max_get} Prefix, cannot fully list here.')
            except Exception as e:
                messagebox.showinfo(
                    'Error',
                    f'Cannot get prefix list from bucket: {this_bucket}, {str(e)}'
                )
            S3Prefix_txt['values'] = prefix_list
            S3Prefix_txt.current(0)
            # Finish list prefix

        def browse_file(*args):
            SrcProfileName = SrcProfileName_txt.get()
            S3Prefix = S3Prefix_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            file_list = []
            this_bucket = SrcBucket_txt.get()
            max_get = 100
            try:
                response = client.list_objects_v2(
                    Bucket=this_bucket,
                    Prefix=str(PurePosixPath(S3Prefix)) + '/',
                    Delimiter='/'
                )  # Only get the max 1000 files for simply list

                # For delete prefix in des_prefix
                if S3Prefix == '' or S3Prefix == '/':
                    # 目的bucket没有设置 Prefix
                    dp_len = 0
                else:
                    # 目的bucket的 "prefix/"长度
                    dp_len = len(str(PurePosixPath(S3Prefix))) + 1

                if 'Contents' in response:
                    file_list = [
                        c['Key'][dp_len:] for c in response['Contents']
                    ]  # 去掉Prefix
                if not file_list:
                    messagebox.showinfo(
                        'Message',
                        f'There is no files in s3://{this_bucket}/{S3Prefix}')
                if response['IsTruncated']:
                    messagebox.showinfo(
                        'Message',
                        f'More than {max_get} files, cannot fully list here.')
            except Exception as e:
                messagebox.showinfo(
                    'Error',
                    f'Cannot get file list from bucket s3://{this_bucket}/{S3Prefix}, {str(e)}'
                )
            file_txt['values'] = file_list
            file_txt.current(0)
            # Finish list files

        # Click START button
        def close():
            window.withdraw()
            ok = messagebox.askokcancel(
                'Start downloading job',
                f'DOWNLOAD FROM s3://{SrcBucket_txt.get()}/{S3Prefix_txt.get()}\n'
                f'TO LOCAL {url_txt.get()}\n'
                f'Click OK to START')
            if not ok:
                window.deiconify()
                return
            window.quit()
            return
            # Finish close()

        # Start GUI
        window = Tk()
        window.title(
            "LONGBOW - AMAZON S3 DOWNLOAD TOOL WITH BREAK-POINT RESUMING")
        window.geometry('705x350')
        window.configure(background='#ECECEC')
        window.protocol("WM_DELETE_WINDOW", sys.exit)

        Label(window, text="S3 Bucket").grid(column=0,
                                             row=1,
                                             sticky='w',
                                             padx=2,
                                             pady=2)
        SrcBucket_txt = Combobox(window, width=48)
        SrcBucket_txt.grid(column=1, row=1, sticky='w', padx=2, pady=2)
        SrcBucket_txt['values'] = SrcBucket
        SrcBucket_txt.current(0)
        Button(window, text="List Buckets", width=10, command=ListBuckets) \
            .grid(column=2, row=1, sticky='w', padx=2, pady=2)

        Label(window, text="S3 Prefix").grid(column=0,
                                             row=2,
                                             sticky='w',
                                             padx=2,
                                             pady=2)
        S3Prefix_txt = Combobox(window, width=48)
        S3Prefix_txt.grid(column=1, row=2, sticky='w', padx=2, pady=2)
        S3Prefix_txt['values'] = S3Prefix
        if S3Prefix != '':
            S3Prefix_txt.current(0)
        Button(window, text="List Prefix", width=10, command=ListPrefix) \
            .grid(column=2, row=2, sticky='w', padx=2, pady=2)

        Label(window, text="Filename or *").grid(column=0,
                                                 row=3,
                                                 sticky='w',
                                                 padx=2,
                                                 pady=2)
        file_txt = Combobox(window, width=48)
        file_txt.grid(column=1, row=3, sticky='w', padx=2, pady=2)
        file_txt['values'] = SrcFileIndex
        if SrcFileIndex != '':
            file_txt.current(0)
        Button(window, text="Select File", width=10, command=browse_file) \
            .grid(column=2, row=3, sticky='w', padx=2, pady=2)

        Label(window, text="AWS Profile").grid(column=0,
                                               row=4,
                                               sticky='w',
                                               padx=2,
                                               pady=2)
        SrcProfileName_txt = Combobox(window, width=15, state="readonly")
        SrcProfileName_txt['values'] = tuple(profile_list)
        SrcProfileName_txt.grid(column=1, row=4, sticky='w', padx=2, pady=2)
        if SrcProfileName in profile_list:
            position = profile_list.index(SrcProfileName)
            SrcProfileName_txt.current(position)
        else:
            SrcProfileName_txt.current(0)
        SrcProfileName = SrcProfileName_txt.get()
        SrcProfileName_txt.bind("<<ComboboxSelected>>", ListBuckets)

        Label(window, text="Folder").grid(column=0,
                                          row=5,
                                          sticky='w',
                                          padx=2,
                                          pady=2)
        url_txt = Entry(window, width=50)
        url_txt.grid(column=1, row=5, sticky='w', padx=2, pady=2)
        url_btn = Button(window,
                         text="Select Folder",
                         width=10,
                         command=browse_folder)
        url_btn.grid(column=2, row=5, sticky='w', padx=2, pady=2)
        url_txt.insert(0, DesDir)

        Label(window, text="MaxThread/File").grid(column=0,
                                                  row=6,
                                                  sticky='w',
                                                  padx=2,
                                                  pady=2)
        if MaxThread < 1 or MaxThread > 100:
            MaxThread = 5
        var_t = StringVar()
        var_t.set(str(MaxThread))
        MaxThread_txt = Spinbox(window,
                                from_=1,
                                to=100,
                                width=15,
                                textvariable=var_t)
        MaxThread_txt.grid(column=1, row=6, sticky='w', padx=2, pady=2)

        Label(window, text="MaxParallelFile").grid(column=0,
                                                   row=7,
                                                   sticky='w',
                                                   padx=2,
                                                   pady=2)
        if MaxParallelFile < 1 or MaxParallelFile > 100:
            MaxParallelFile = 5
        var_f = StringVar()
        var_f.set(str(MaxParallelFile))
        MaxParallelFile_txt = Spinbox(window,
                                      from_=1,
                                      to=100,
                                      width=15,
                                      textvariable=var_f)
        MaxParallelFile_txt.grid(column=1, row=7, sticky='w', padx=2, pady=2)

        save_config = BooleanVar()
        save_config.set(True)
        save_config_txt = Checkbutton(window,
                                      text="Save to s3_download_config.ini",
                                      var=save_config)
        save_config_txt.grid(column=1, row=9, padx=2, pady=2)

        Button(window, text="Start Download", width=15,
               command=close).grid(column=1, row=10, padx=5, pady=5)
        window.mainloop()

        DesDir = url_txt.get()
        SrcFileIndex = file_txt.get()
        SrcBucket = SrcBucket_txt.get()
        S3Prefix = S3Prefix_txt.get()
        SrcProfileName = SrcProfileName_txt.get()
        MaxThread = int(MaxThread_txt.get())
        MaxParallelFile = int(MaxParallelFile_txt.get())

        if save_config:
            cfg['Basic']['SrcBucket'] = SrcBucket
            cfg['Basic']['S3Prefix'] = S3Prefix
            cfg['Basic']['SrcFileIndex'] = SrcFileIndex
            cfg['Basic']['SrcProfileName'] = SrcProfileName
            cfg['Basic']['DesDir'] = DesDir
            cfg['Advanced']['MaxThread'] = str(MaxThread)
            cfg['Advanced']['MaxParallelFile'] = str(MaxParallelFile)

            config_file = os.path.join(file_path, 's3_download_config.ini')
            with codecs.open(config_file, 'w', 'utf-8') as f:
                cfg.write(f)
                print(f"Save config to {config_file}")
        # GUI window finish

    if S3Prefix == '/':
        S3Prefix = ''
    # Finish set_config()
    return ChunkSize
コード例 #22
0
ファイル: common.py プロジェクト: albertlidas/tabua2
 def read_config(self, path):
     config = RawConfigParser()
     config.optionxform = lambda s: s
     config.read(path)
     return config
コード例 #23
0
ファイル: config.py プロジェクト: grayerbeard/house-power-log
    def write_file(self):
        here = "config.write_file"
        config_write = RawConfigParser()
        section = "Debug"
        config_write.add_section(section)
        config_write.set(section, 'debug_reread_config',
                         self.debug_reread_config)
        config_write.set(section, 'debug_flag_1', self.debug_flag_1)
        config_write.set(section, 'debug_flag_2', self.debug_flag_2)
        config_write.set(section, 'debug_flag_ftp', self.debug_flag_ftp)
        section = "Scan"
        config_write.add_section(section)
        config_write.set(section, 'scan_delay', self.scan_delay)
        config_write.set(section, 'max_scans', self.max_scans)
        section = "Log"
        config_write.add_section(section)
        config_write.set(section, 'log_directory', self.log_directory)
        config_write.set(section, 'local_dir_www', self.local_dir_www)
        config_write.set(section, 'log_buffer_flag', self.log_buffer_flag)
        config_write.set(section, 'text_buffer_length',
                         self.text_buffer_length)
        section = "Ftp"
        config_write.add_section(section)
        config_write.set(section, 'ftp_creds_filename',
                         self.ftp_creds_filename)
        config_write.set(section, 'ftp_log_max_count', self.ftp_log_max_count)
        section = "Heating_Fan"
        config_write.add_section(section)
        config_write.set(section, 'heat_max_temp', self.heat_max_temp)
        config_write.set(section, 'heat_min_temp', self.heat_min_temp)
        config_write.set(section, 'heat_max_speed', self.heat_max_speed)
        config_write.set(section, 'heat_min_speed', self.heat_min_speed)
        config_write.set(section, 'heat_max_freq', self.heat_max_freq)
        config_write.set(section, 'heat_min_freq', self.heat_min_freq)
        section = "Sauna"
        config_write.add_section(section)
        config_write.set(section, 'sauna_max_temp', self.sauna_max_temp)
        config_write.set(section, 'sauna_min_temp', self.sauna_min_temp)
        config_write.set(section, 'sauna_max_speed', self.sauna_max_speed)
        config_write.set(section, 'sauna_min_speed', self.sauna_min_speed)
        config_write.set(section, 'sauna_max_freq', self.sauna_max_freq)
        config_write.set(section, 'sauna_min_freq', self.sauna_min_freq)
        config_write.set(section, 'sauna_GPIO_port', self.sauna_GPIO_port)
        config_write.set(section, 'sauna_brightness', self.sauna_brightness)
        section = "Power_Log"
        config_write.add_section(section)
        config_write.set(section, 'adc_scan_size', self.adc_scan_size)
        config_write.set(section, 'adc_target_scan_msec',
                         self.adc_target_scan_msec)
        config_write.set(section, 'adc_channel', self.adc_channel)
        config_write.set(section, 'adc_default_gain ', self.adc_default_gain)
        config_write.set(section, 'adc_top_limit', self.adc_top_limit)
        config_write.set(section, 'adc_bottom_limit', self.adc_bottom_limit)
        config_write.set(section, 'adc_input_offset_mv',
                         self.adc_input_offset_mv)
        config_write.set(section, 'adc_input_amp_gain',
                         self.adc_input_amp_gain)
        config_write.set(section, 'adc_CT_ratio', self.adc_CT_ratio)
        config_write.set(section, 'adc_CT_resister', self.adc_CT_resister)

        # Writing our configuration file to 'self.config_filename'
        pr(self.debug_flag_1, here,
           "ready to write new config file with default values: ",
           self.config_filename)
        with open(self.config_filename, 'w+') as configfile:
            config_write.write(configfile)
        return 0
コード例 #24
0
ファイル: globalvars.py プロジェクト: robbss1/SmokeDetector
class GlobalVars:
    on_windows = 'windows' in platform.platform().lower()

    false_positives = []
    whitelisted_users = set()
    blacklisted_users = dict()
    blacklisted_usernames = []
    blacklisted_websites = []
    blacklisted_numbers = []
    watched_numbers = []
    blacklisted_numbers_normalized = None
    watched_numbers_normalized = None
    bad_keywords = []
    watched_keywords = {}
    ignored_posts = []
    auto_ignored_posts = []
    startup_utc_date = datetime.utcnow()
    startup_utc = startup_utc_date.strftime("%H:%M:%S")
    latest_questions = []
    api_backoff_time = 0
    deletion_watcher = None

    not_privileged_warning = \
        "You are not a privileged user. Please see " \
        "[the privileges wiki page](https://charcoal-se.org/smokey/Privileges) for " \
        "information on what privileges are and what is expected of privileged users."

    experimental_reasons = {  # Don't widely report these
        "potentially bad keyword in answer",
        "potentially bad keyword in body",
        "potentially bad keyword in title",
        "potentially bad keyword in username",
        "potentially bad NS for domain in title",
        "potentially bad NS for domain in body",
        "potentially bad NS for domain in answer",
        "potentially bad ASN for hostname in title",
        "potentially bad ASN for hostname in body",
        "potentially bad ASN for hostname in answer",
        "potentially bad IP for hostname in title",
        "potentially bad IP for hostname in body",
        "potentially bad IP for hostname in answer",
        "potentially problematic NS configuration in title",
        "potentially problematic NS configuration in body",
        "potentially problematic NS configuration in answer",
        "toxic body detected",
        "toxic answer detected",
    }

    parser = HTMLParser()
    parser.unescape = unescape

    code_privileged_users = None

    # these are loaded in GlobalVars.reload()
    commit = None
    commit_with_author = None
    on_branch = None

    s = ""
    s_reverted = ""
    s_norestart_blacklists = ""
    s_norestart_findspam = ""
    apiquota = -1
    bodyfetcher = None
    cookies = {}
    se_sites = []
    why_data = []
    notifications = []
    listen_to_these_if_edited = []
    multiple_reporters = []
    api_calls_per_site = {}
    reason_weights = {}
    metasmoke_ids = {}

    standby_message = ""
    standby_mode = False
    no_se_activity_scan = False

    api_request_lock = threading.Lock(
    )  # Get this lock before making API requests
    apiquota_rw_lock = threading.Lock(
    )  # Get this lock before reading/writing apiquota

    class PostScanStat:
        """ Tracking post scanning data """
        num_posts_scanned = 0
        post_scan_time = 0
        rw_lock = threading.Lock()

        @staticmethod
        def add_stat(posts_scanned, scan_time):
            """ Adding post scanning data """
            with GlobalVars.PostScanStat.rw_lock:
                GlobalVars.PostScanStat.num_posts_scanned += posts_scanned
                GlobalVars.PostScanStat.post_scan_time += scan_time

        @staticmethod
        def get_stat():
            """ Getting post scanning statistics """
            with GlobalVars.PostScanStat.rw_lock:
                posts_scanned = GlobalVars.PostScanStat.num_posts_scanned
                scan_time = GlobalVars.PostScanStat.post_scan_time
            if scan_time == 0:
                posts_per_second = None
            else:
                posts_per_second = posts_scanned / scan_time
            return (posts_scanned, scan_time, posts_per_second)

        @staticmethod
        def reset_stat():
            """ Resetting post scanning data """
            with GlobalVars.PostScanStat.rw_lock:
                GlobalVars.PostScanStat.num_posts_scanned = 0
                GlobalVars.PostScanStat.post_scan_time = 0

    config_parser = RawConfigParser()

    if os.path.isfile('config') and "pytest" not in sys.modules:
        config_parser.read('config')
    else:
        config_parser.read('config.ci')

    config = config_parser["Config"]  # It's a collections.OrderedDict now

    site_id_dict = {}
    post_site_id_to_question = {}

    location = config.get("location", "Continuous Integration")

    class MSStatus:
        """ Tracking metasmoke status """
        ms_is_up = True
        counter = 0
        rw_lock = threading.Lock()

        @staticmethod
        def set_up():
            """ Set metasmoke status to up """
            # Private to metasmoke.py
            with GlobalVars.MSStatus.rw_lock:
                GlobalVars.MSStatus.ms_is_up = True

        @staticmethod
        def set_down():
            """ Set metasmoke status to down """
            # Private to metasmoke.py
            with GlobalVars.MSStatus.rw_lock:
                GlobalVars.MSStatus.ms_is_up = False

        @staticmethod
        def is_up():
            """ Query if metasmoke status is up """
            with GlobalVars.MSStatus.rw_lock:
                current_ms_status = GlobalVars.MSStatus.ms_is_up
            return current_ms_status

        @staticmethod
        def is_down():
            """ Query if metasmoke status is down """
            return not GlobalVars.MSStatus.is_up()

        # Why implement failed() and succeeded() here, as they will only be called in metasmoke.py?
        # Because get_failure_count() need to be exposed to global, so it is more convenient
        # to implement failed() and succeeded() here.
        @staticmethod
        def failed():
            """ Indicate a metasmoke connection failure """
            with GlobalVars.MSStatus.rw_lock:
                GlobalVars.MSStatus.counter += 1

        @staticmethod
        def succeeded():
            """ Indicate a metasmoke connection success """
            with GlobalVars.MSStatus.rw_lock:
                GlobalVars.MSStatus.counter = 0

        @staticmethod
        def get_failure_count():
            """ Get consecutive metasmoke connection failure count """
            with GlobalVars.MSStatus.rw_lock:
                failure_count = GlobalVars.MSStatus.counter
            return failure_count

        @staticmethod
        def reset_ms_status():
            """ Reset class GlobalVars.MSStatus to default values """
            with GlobalVars.MSStatus.rw_lock:
                GlobalVars.MSStatus.ms_is_up = True
                GlobalVars.MSStatus.counter = 0

    chatexchange_u = config.get("ChatExchangeU")
    chatexchange_p = config.get("ChatExchangeP")

    metasmoke_host = config.get("metasmoke_host")
    metasmoke_key = config.get("metasmoke_key")
    metasmoke_ws_host = config.get("metasmoke_ws_host")

    git_name = config.get("git_username", "SmokeDetector")
    git_email = config.get("git_useremail", "*****@*****.**")

    github_username = config.get("github_username")
    github_password = config.get("github_password")
    github_access_token = config.get("github_access_token")

    perspective_key = config.get("perspective_key")

    flovis_host = config.get("flovis_host")
    flovis = None

    # Miscellaneous
    log_time_format = config.get("log_time_format", "%H:%M:%S")

    # Blacklist privileged users from config
    se_blacklisters = regex.sub(r"[^\d,]", "",
                                config.get("se_blacklisters", "")).split(",")
    mse_blacklisters = regex.sub(r"[^\d,]", "",
                                 config.get("mse_blacklisters", "")).split(",")
    so_blacklisters = regex.sub(r"[^\d,]", "",
                                config.get("so_blacklisters", "")).split(",")

    # Create a set of blacklisters equivalent to what's used in code_privileged_users.
    config_blacklisters = set()
    for id in se_blacklisters:
        if id:
            config_blacklisters.add(("stackexchange.com", int(id)))

    for id in mse_blacklisters:
        if id:
            config_blacklisters.add(("meta.stackexchange.com", int(id)))

    for id in so_blacklisters:
        if id:
            config_blacklisters.add(("stackoverflow.com", int(id)))

    # environ_or_none replaced by os.environ.get (essentially dict.get)
    bot_name = os.environ.get("SMOKEDETECTOR_NAME", git_name)
    bot_repo_slug = os.environ.get("SMOKEDETECTOR_REPO", git_user_repo)
    bot_repository = "//github.com/{}".format(bot_repo_slug)
    chatmessage_prefix = "[{}]({})".format(bot_name, bot_repository)

    valid_content = """This is a totally valid post that should never be caught. Any blacklist or watchlist item that triggers on this item should be avoided. java.io.BbbCccDddException: nothing wrong found. class Safe { perfect valid code(int float &#%$*v a b c =+ /* - 0 1 2 3 456789.EFGQ} English 中文Français Español Português Italiano Deustch ~@#%*-_/'()?!:;" vvv kkk www sss ttt mmm absolute std::adjacent_find (power).each do |s| bbb end ert zal l gsopsq kdowhs@ xjwk* %_sooqmzb xjwpqpxnf.  Please don't blacklist disk-partition.com, it's a valid domain (though it also gets spammed rather frequently)."""  # noqa: E501

    @classmethod
    def reload(cls):
        cls.commit = commit = git_commit_info()

        cls.commit_with_author = "`{}` ({}: {})".format(
            commit.id, commit.author, commit.message)

        # We don't want to escape `[` and `]` when they are within code.
        split_commit_with_author = cls.commit_with_author.split('`')
        split_length = len(split_commit_with_author)
        for index in range(0, split_length, 2):
            split_commit_with_author[index] = split_commit_with_author[
                index].replace('[', '\\[').replace(']', '\\]')
        # There's not an even number of ` characters, so the parsing hack failed, but we assume the last one needs
        # escaping.
        if not split_length % 2:
            split_commit_with_author[-1] = split_commit_with_author[
                -1].replace('[', '\\[').replace(']', '\\]')

        cls.commit_with_author_escaped = '`'.join(split_commit_with_author)

        cls.on_branch = git_ref()
        cls.s = "[ {} ] SmokeDetector started at [rev {}]({}/commit/{}) (running on {}, Python {})".format(
            cls.chatmessage_prefix, cls.commit_with_author_escaped,
            cls.bot_repository, cls.commit.id, cls.location,
            platform.python_version())
        cls.s_reverted = \
            "[ {} ] SmokeDetector started in [reverted mode](" \
            "https://charcoal-se.org/smokey/SmokeDetector-Statuses#reverted-mode) " \
            "at [rev {}]({}/commit/{}) (running on {})".format(
                cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository,
                cls.commit.id, cls.location)
        cls.s_norestart_blacklists = \
            "[ {} ] Blacklists reloaded at [rev {}]({}/commit/{}) (running on {})".format(
                cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository,
                cls.commit.id, cls.location)
        cls.s_norestart_findspam = \
            "[ {} ] FindSpam module reloaded at [rev {}]({}/commit/{}) (running on {})".format(
                cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository,
                cls.commit.id, cls.location)
        cls.standby_message = \
            "[ {} ] SmokeDetector started in [standby mode](" \
            "https://charcoal-se.org/smokey/SmokeDetector-Statuses#standby-mode) " \
            "at [rev {}]({}/commit/{}) (running on {})".format(
                cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository,
                cls.commit.id, cls.location)
コード例 #25
0
 def __init__(self, path):
     config = RawConfigParser()
     config.optionxform = lambda s: s
     if path:  # Test support
         config.read(path)
     self.config = config
コード例 #26
0
class GlobalVars:
    on_windows = 'windows' in platform.platform().lower()

    false_positives = []
    whitelisted_users = set()
    blacklisted_users = dict()
    blacklisted_usernames = []
    blacklisted_websites = []
    blacklisted_numbers = []
    watched_numbers = []
    blacklisted_numbers_normalized = None
    watched_numbers_normalized = None
    bad_keywords = []
    watched_keywords = {}
    ignored_posts = []
    auto_ignored_posts = []
    startup_utc_date = datetime.utcnow()
    startup_utc = startup_utc_date.strftime("%H:%M:%S")
    latest_questions = []
    api_backoff_time = 0
    deletion_watcher = None

    metasmoke_last_ping_time = datetime.now()
    not_privileged_warning = \
        "You are not a privileged user. Please see " \
        "[the privileges wiki page](https://charcoal-se.org/smokey/Privileges) for " \
        "information on what privileges are and what is expected of privileged users."

    experimental_reasons = {  # Don't widely report these
        "potentially bad keyword in answer",
        "potentially bad keyword in body",
        "potentially bad keyword in title",
        "potentially bad keyword in username",
        "potentially bad NS for domain in title",
        "potentially bad NS for domain in body",
        "potentially bad NS for domain in answer",
        "potentially bad ASN for hostname in title",
        "potentially bad ASN for hostname in body",
        "potentially bad ASN for hostname in answer",
        "potentially problematic NS configuration in title",
        "potentially problematic NS configuration in body",
        "potentially problematic NS configuration in answer",
        "toxic body detected",
        "toxic answer detected",
    }

    parser = HTMLParser()
    parser.unescape = unescape

    code_privileged_users = None

    # these are loaded in GlobalVars.reload()
    commit = None
    commit_with_author = None
    on_master = None

    s = ""
    s_reverted = ""
    s_norestart = ""
    s_norestart2 = ""
    apiquota = -1
    bodyfetcher = None
    cookies = {}
    se_sites = []
    why_data = []
    notifications = []
    listen_to_these_if_edited = []
    multiple_reporters = []
    api_calls_per_site = {}
    reason_weights = {}
    metasmoke_ids = {}

    standby_message = ""
    standby_mode = False

    api_request_lock = threading.Lock()

    num_posts_scanned = 0
    post_scan_time = 0
    posts_scan_stats_lock = threading.Lock()

    config_parser = RawConfigParser()

    if os.path.isfile('config') and "pytest" not in sys.modules:
        config_parser.read('config')
        log('debug', "Configuration loaded from \"config\"")
    else:
        config_parser.read('config.ci')
        if "pytest" in sys.modules and os.path.isfile('config'):  # Another config found while running in pytest
            log('debug', "Running in pytest, force load config from \"config.ci\"")
        else:
            log('debug', "Configuration loaded from \"config.ci\"")

    config = config_parser["Config"]  # It's a collections.OrderedDict now

    # environ_or_none replaced by os.environ.get (essentially dict.get)
    bot_name = os.environ.get("SMOKEDETECTOR_NAME", "SmokeDetector")
    bot_repo_slug = os.environ.get("SMOKEDETECTOR_REPO", "Charcoal-SE/SmokeDetector")
    bot_repository = "//github.com/{}".format(bot_repo_slug)
    chatmessage_prefix = "[{}]({})".format(bot_name, bot_repository)

    site_id_dict = {}
    post_site_id_to_question = {}

    location = config.get("location", "Continuous Integration")

    metasmoke_ws = None
    metasmoke_down = False
    metasmoke_failures = 0  # Consecutive count, not cumulative

    chatexchange_u = config.get("ChatExchangeU")
    chatexchange_p = config.get("ChatExchangeP")

    metasmoke_host = config.get("metasmoke_host")
    metasmoke_key = config.get("metasmoke_key")
    metasmoke_ws_host = config.get("metasmoke_ws_host")

    github_username = config.get("github_username")
    github_password = config.get("github_password")

    perspective_key = config.get("perspective_key")

    flovis_host = config.get("flovis_host")
    flovis = None

    # Miscellaneous
    log_time_format = config.get("log_time_format", "%H:%M:%S")

    valid_content = """This is a totally valid post that should never be caught. Any blacklist or watchlist item that triggers on this item should be avoided. java.io.BbbCccDddException: nothing wrong found. class Safe { perfect valid code(int float &#%$*v a b c =+ /* - 0 1 2 3 456789.EFGQ} English 中文Français Español Português Italiano Deustch ~@#%*-_/'()?!:;" vvv kkk www sss ttt mmm absolute std::adjacent_find (power).each do |s| bbb end ert zal l gsopsq kdowhs@ xjwk* %_sooqmzb xjwpqpxnf.  Please don't blacklist disk-partition.com, it's a valid domain (though it also gets spammed rather frequently)."""  # noqa: E501

    @staticmethod
    def reload():
        commit = git_commit_info()
        GlobalVars.commit = commit

        GlobalVars.commit_with_author = "`{}` ({}: {})".format(
            commit['id'], commit['author'], commit['message'])

        GlobalVars.on_master = git_ref()
        GlobalVars.s = "[ {} ] SmokeDetector started at [rev {}]({}/commit/{}) (running on {}, Python {})".format(
            GlobalVars.chatmessage_prefix, GlobalVars.commit_with_author, GlobalVars.bot_repository,
            GlobalVars.commit['id'], GlobalVars.location, platform.python_version())
        GlobalVars.s_reverted = \
            "[ {} ] SmokeDetector started in [reverted mode](" \
            "https://charcoal-se.org/smokey/SmokeDetector-Statuses#reverted-mode) " \
            "at [rev {}]({}/commit/{}) (running on {})".format(
                GlobalVars.chatmessage_prefix, GlobalVars.commit_with_author, GlobalVars.bot_repository,
                GlobalVars.commit['id'], GlobalVars.location)
        GlobalVars.s_norestart = "[ {} ] Blacklists reloaded at [rev {}]({}/commit/{}) (running on {})".format(
            GlobalVars.chatmessage_prefix, GlobalVars.commit_with_author, GlobalVars.bot_repository,
            GlobalVars.commit['id'], GlobalVars.location)
        GlobalVars.s_norestart2 = "[ {} ] FindSpam module reloaded at [rev {}]({}/commit/{}) (running on {})".format(
            GlobalVars.chatmessage_prefix, GlobalVars.commit_with_author, GlobalVars.bot_repository,
            GlobalVars.commit['id'], GlobalVars.location)
        GlobalVars.standby_message = \
            "[ {} ] SmokeDetector started in [standby mode](" \
            "https://charcoal-se.org/smokey/SmokeDetector-Statuses#standby-mode) " \
            "at [rev {}]({}/commit/{}) (running on {})".format(
                GlobalVars.chatmessage_prefix, GlobalVars.commit_with_author, GlobalVars.bot_repository,
                GlobalVars.commit['id'], GlobalVars.location)
        log('debug', "GlobalVars loaded")
コード例 #27
0
ファイル: __init__.py プロジェクト: uw-dims/bumpversion
def main(original_args=None):

    positionals, args = split_args_in_optional_and_positional(
        sys.argv[1:] if original_args is None else original_args)

    if len(positionals[1:]) > 2:
        warnings.warn(
            "Giving multiple files on the command line will be deprecated, please use [bumpversion:file:...] in a config file.",
            PendingDeprecationWarning)

    parser1 = argparse.ArgumentParser(add_help=False)

    parser1.add_argument(
        '--config-file',
        metavar='FILE',
        default=argparse.SUPPRESS,
        required=False,
        help=
        'Config file to read most of the variables from (default: .bumpversion.cfg)'
    )

    parser1.add_argument('--verbose',
                         action='count',
                         default=0,
                         help='Print verbose logging to stderr',
                         required=False)

    parser1.add_argument('--list',
                         action='store_true',
                         default=False,
                         help='List machine readable information',
                         required=False)

    parser1.add_argument('--allow-dirty',
                         action='store_true',
                         default=False,
                         help="Don't abort if working directory is dirty",
                         required=False)

    known_args, remaining_argv = parser1.parse_known_args(args)

    logformatter = logging.Formatter('%(message)s')

    if len(logger.handlers) == 0:
        ch = logging.StreamHandler(sys.stderr)
        ch.setFormatter(logformatter)
        logger.addHandler(ch)

    if len(logger_list.handlers) == 0:
        ch2 = logging.StreamHandler(sys.stdout)
        ch2.setFormatter(logformatter)
        logger_list.addHandler(ch2)

    if known_args.list:
        logger_list.setLevel(1)

    log_level = {
        0: logging.WARNING,
        1: logging.INFO,
        2: logging.DEBUG,
    }.get(known_args.verbose, logging.DEBUG)

    logger.setLevel(log_level)

    logger.debug("Starting {}".format(DESCRIPTION))

    defaults = {}
    vcs_info = {}

    for vcs in VCS:
        if vcs.is_usable():
            vcs_info.update(vcs.latest_tag_info())

    if 'current_version' in vcs_info:
        defaults['current_version'] = vcs_info['current_version']

    config = RawConfigParser('')

    # don't transform keys to lowercase (which would be the default)
    config.optionxform = lambda option: option

    config.add_section('bumpversion')

    explicit_config = hasattr(known_args, 'config_file')

    if explicit_config:
        config_file = known_args.config_file
    elif not os.path.exists('.bumpversion.cfg') and \
            os.path.exists('setup.cfg'):
        config_file = 'setup.cfg'
    else:
        config_file = '.bumpversion.cfg'

    config_file_exists = os.path.exists(config_file)

    part_configs = {}

    files = []

    if config_file_exists:

        logger.info("Reading config file {}:".format(config_file))
        logger.info(io.open(config_file, 'rt', encoding='utf-8').read())

        config.readfp(io.open(config_file, 'rt', encoding='utf-8'))

        log_config = StringIO()
        config.write(log_config)

        if 'files' in dict(config.items("bumpversion")):
            warnings.warn(
                "'files =' configuration is will be deprecated, please use [bumpversion:file:...]",
                PendingDeprecationWarning)

        defaults.update(dict(config.items("bumpversion")))

        for listvaluename in ("serialize", ):
            try:
                value = config.get("bumpversion", listvaluename)
                defaults[listvaluename] = list(
                    filter(None, (x.strip() for x in value.splitlines())))
            except NoOptionError:
                pass  # no default value then ;)

        for boolvaluename in ("commit", "tag", "dry_run"):
            try:
                defaults[boolvaluename] = config.getboolean(
                    "bumpversion", boolvaluename)
            except NoOptionError:
                pass  # no default value then ;)

        for section_name in config.sections():

            section_name_match = re.compile(
                "^bumpversion:(file|part):(.+)").match(section_name)

            if not section_name_match:
                continue

            section_prefix, section_value = section_name_match.groups()

            section_config = dict(config.items(section_name))

            if section_prefix == "part":

                ThisVersionPartConfiguration = NumericVersionPartConfiguration

                if 'values' in section_config:
                    section_config['values'] = list(
                        filter(
                            None,
                            (x.strip()
                             for x in section_config['values'].splitlines())))
                    ThisVersionPartConfiguration = ConfiguredVersionPartConfiguration

                part_configs[section_value] = ThisVersionPartConfiguration(
                    **section_config)

            elif section_prefix == "file":

                filename = section_value

                if 'serialize' in section_config:
                    section_config['serialize'] = list(
                        filter(
                            None,
                            (x.strip()
                             for x in section_config['serialize'].splitlines()
                             )))

                section_config['part_configs'] = part_configs

                if not 'parse' in section_config:
                    section_config['parse'] = defaults.get(
                        "parse",
                        '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)')

                if not 'serialize' in section_config:
                    section_config['serialize'] = defaults.get(
                        'serialize', [str('{major}.{minor}.{patch}')])

                if not 'search' in section_config:
                    section_config['search'] = defaults.get(
                        "search", '{current_version}')

                if not 'replace' in section_config:
                    section_config['replace'] = defaults.get(
                        "replace", '{new_version}')

                files.append(
                    ConfiguredFile(filename, VersionConfig(**section_config)))

    else:
        message = "Could not read config file at {}".format(config_file)
        if explicit_config:
            raise argparse.ArgumentTypeError(message)
        else:
            logger.info(message)

    parser2 = argparse.ArgumentParser(prog='bumpversion',
                                      add_help=False,
                                      parents=[parser1])
    parser2.set_defaults(**defaults)

    parser2.add_argument('--current-version',
                         metavar='VERSION',
                         help='Version that needs to be updated',
                         required=False)
    parser2.add_argument('--parse',
                         metavar='REGEX',
                         help='Regex parsing the version string',
                         default=defaults.get(
                             "parse",
                             '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'))
    parser2.add_argument('--serialize',
                         metavar='FORMAT',
                         action=DiscardDefaultIfSpecifiedAppendAction,
                         help='How to format what is parsed back to a version',
                         default=defaults.get(
                             "serialize", [str('{major}.{minor}.{patch}')]))
    parser2.add_argument('--search',
                         metavar='SEARCH',
                         help='Template for complete string to search',
                         default=defaults.get("search", '{current_version}'))
    parser2.add_argument('--replace',
                         metavar='REPLACE',
                         help='Template for complete string to replace',
                         default=defaults.get("replace", '{new_version}'))

    known_args, remaining_argv = parser2.parse_known_args(args)

    defaults.update(vars(known_args))

    assert type(known_args.serialize) == list

    context = dict(
        list(time_context.items()) + list(prefixed_environ().items()) +
        list(vcs_info.items()))

    try:
        vc = VersionConfig(
            parse=known_args.parse,
            serialize=known_args.serialize,
            search=known_args.search,
            replace=known_args.replace,
            part_configs=part_configs,
        )
    except sre_constants.error as e:
        sys.exit(1)

    current_version = vc.parse(
        known_args.current_version) if known_args.current_version else None

    new_version = None

    if not 'new_version' in defaults and known_args.current_version:
        try:
            if current_version and len(positionals) > 0:
                logger.info("Attempting to increment part '{}'".format(
                    positionals[0]))
                new_version = current_version.bump(positionals[0], vc.order())
                logger.info("Values are now: " +
                            keyvaluestring(new_version._values))
                defaults['new_version'] = vc.serialize(new_version, context)
        except MissingValueForSerializationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except IncompleteVersionRepresenationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except KeyError as e:
            logger.info("Opportunistic finding of new_version failed")

    parser3 = argparse.ArgumentParser(
        prog='bumpversion',
        description=DESCRIPTION,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        conflict_handler='resolve',
        parents=[parser2],
    )

    parser3.set_defaults(**defaults)

    parser3.add_argument('--current-version',
                         metavar='VERSION',
                         help='Version that needs to be updated',
                         required=not 'current_version' in defaults)
    parser3.add_argument('--dry-run',
                         '-n',
                         action='store_true',
                         default=False,
                         help="Don't write any files, just pretend.")
    parser3.add_argument('--new-version',
                         metavar='VERSION',
                         help='New version that should be in the files',
                         required=not 'new_version' in defaults)

    commitgroup = parser3.add_mutually_exclusive_group()

    commitgroup.add_argument('--commit',
                             action='store_true',
                             dest="commit",
                             help='Commit to version control',
                             default=defaults.get("commit", False))
    commitgroup.add_argument('--no-commit',
                             action='store_false',
                             dest="commit",
                             help='Do not commit to version control',
                             default=argparse.SUPPRESS)

    taggroup = parser3.add_mutually_exclusive_group()

    taggroup.add_argument('--tag',
                          action='store_true',
                          dest="tag",
                          default=defaults.get("tag", False),
                          help='Create a tag in version control')
    taggroup.add_argument('--no-tag',
                          action='store_false',
                          dest="tag",
                          help='Do not create a tag in version control',
                          default=argparse.SUPPRESS)

    parser3.add_argument('--tag-name',
                         metavar='TAG_NAME',
                         help='Tag name (only works with --tag)',
                         default=defaults.get('tag_name', 'v{new_version}'))

    parser3.add_argument(
        '--message',
        '-m',
        metavar='COMMIT_MSG',
        help='Commit message',
        default=defaults.get(
            'message', 'Bump version: {current_version} → {new_version}'))

    file_names = []
    if 'files' in defaults:
        assert defaults['files'] != None
        file_names = defaults['files'].split(' ')

    parser3.add_argument('part', help='Part of the version to be bumped.')
    parser3.add_argument('files',
                         metavar='file',
                         nargs='*',
                         help='Files to change',
                         default=file_names)

    args = parser3.parse_args(remaining_argv + positionals)

    if args.dry_run:
        logger.info("Dry run active, won't touch any files.")

    if not new_version:
        new_version = vc.parse(args.new_version)

    logger.info("New version will be '{}'".format(args.new_version))

    file_names = file_names or positionals[1:]

    for file_name in file_names:
        files.append(ConfiguredFile(file_name, vc))

    for vcs in VCS:
        if vcs.is_usable():
            try:
                vcs.assert_nondirty()
            except WorkingDirectoryIsDirtyException as e:
                if not defaults['allow_dirty']:
                    logger.warn(
                        "{}\n\nUse --allow-dirty to override this if you know what you're doing."
                        .format(e.message))
                    raise
            break
        else:
            vcs = None

    # make sure files exist and contain version string

    logger.info("Asserting files {} contain the version string:".format(
        ", ".join([str(f) for f in files])))

    for f in files:
        f.should_contain_version(current_version, context)

    # change version string in files
    for f in files:
        f.replace(current_version, new_version, context, args.dry_run)

    commit_files = [f.path for f in files]

    config.set('bumpversion', 'new_version', args.new_version)

    for key, value in config.items('bumpversion'):
        logger_list.info("{}={}".format(key, value))

    config.remove_option('bumpversion', 'new_version')

    config.set('bumpversion', 'current_version', args.new_version)

    new_config = StringIO()

    try:
        write_to_config_file = (not args.dry_run) and config_file_exists

        logger.info("{} to config file {}:".format(
            "Would write" if not write_to_config_file else "Writing",
            config_file,
        ))

        config.write(new_config)
        logger.info(new_config.getvalue())

        if write_to_config_file:
            with io.open(config_file, 'wb') as f:
                f.write(new_config.getvalue().encode('utf-8'))

    except UnicodeEncodeError:
        warnings.warn(
            "Unable to write UTF-8 to config file, because of an old configparser version. "
            "Update with `pip install --upgrade configparser`.")

    if config_file_exists:
        commit_files.append(config_file)

    if not vcs:
        return

    assert vcs.is_usable(), "Did find '{}' unusable, unable to commit.".format(
        vcs.__name__)

    do_commit = (not args.dry_run) and args.commit
    do_tag = (not args.dry_run) and args.tag

    logger.info("{} {} commit".format(
        "Would prepare" if not do_commit else "Preparing",
        vcs.__name__,
    ))

    for path in commit_files:
        logger.info("{} changes in file '{}' to {}".format(
            "Would add" if not do_commit else "Adding",
            path,
            vcs.__name__,
        ))

        if do_commit:
            vcs.add_path(path)

    vcs_context = {
        "current_version": args.current_version,
        "new_version": args.new_version,
    }
    vcs_context.update(time_context)
    vcs_context.update(prefixed_environ())

    commit_message = args.message.format(**vcs_context)

    logger.info("{} to {} with message '{}'".format(
        "Would commit" if not do_commit else "Committing",
        vcs.__name__,
        commit_message,
    ))

    if do_commit:
        vcs.commit(message=commit_message)

    tag_name = args.tag_name.format(**vcs_context)
    logger.info("{} '{}' in {}".format(
        "Would tag" if not do_tag else "Tagging", tag_name, vcs.__name__))

    if do_tag:
        vcs.tag(tag_name)
コード例 #28
0
from __future__ import absolute_import, division, print_function, unicode_literals
from datetime import datetime
import os
from configparser import RawConfigParser

AI_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(AI_DIR)

# config_setting = RawConfigParser()
# config_setting.read(BASE_DIR+'/setting.ini')
config_version = RawConfigParser()
config_version.read(BASE_DIR+'/version.cfg')
version = '{}.{}'.format(config_version.get('MAIN', 'V'), config_version.get('MAIN', 'SUR'))




def print_spend_time(_st_time):
    _ed_time = datetime.now() #
    _spend_seconds = (_ed_time - _st_time).total_seconds() #
    _left_seconds = int(_spend_seconds % 60)
    _spend_minutes = int(_spend_seconds // 60)
    _left_minutes = int(_spend_minutes % 60)
    _left_hours = int(_spend_minutes // 60)
    print('==== spend time: {:d} h: {:d} m: {:d} s'.format(_left_hours, _left_minutes, _left_seconds))
    return _spend_seconds

def check_path(path):
    if not os.path.exists(path):
        os.makedirs(path)
    return path
コード例 #29
0
ファイル: eds.py プロジェクト: vladnerad/CANScanner
def import_eds(source, node_id):
    eds = RawConfigParser()
    if hasattr(source, "read"):
        fp = source
    else:
        fp = open(source)
    try:
        # Python 3
        eds.read_file(fp)
    except AttributeError:
        # Python 2
        eds.readfp(fp)
    fp.close()
    od = objectdictionary.ObjectDictionary()
    if eds.has_section("DeviceComissioning"):
        od.bitrate = int(eds.get("DeviceComissioning", "Baudrate")) * 1000
        od.node_id = int(eds.get("DeviceComissioning", "NodeID"))

    for section in eds.sections():
        # Match indexes
        match = re.match(r"^[0-9A-Fa-f]{4}$", section)
        if match is not None:
            index = int(section, 16)
            name = eds.get(section, "ParameterName")
            try:
                object_type = int(eds.get(section, "ObjectType"), 0)
            except NoOptionError:
                # DS306 4.6.3.2 object description
                # If the keyword ObjectType is missing, this is regarded as
                # "ObjectType=0x7" (=VAR).
                object_type = VAR

            if object_type in (VAR, DOMAIN):
                var = build_variable(eds, section, node_id, index)
                od.add_object(var)
            elif object_type == ARR and eds.has_option(section,
                                                       "CompactSubObj"):
                arr = objectdictionary.Array(name, index)
                last_subindex = objectdictionary.Variable(
                    "Number of entries", index, 0)
                last_subindex.data_type = objectdictionary.UNSIGNED8
                arr.add_member(last_subindex)
                arr.add_member(build_variable(eds, section, node_id, index, 1))
                od.add_object(arr)
            elif object_type == ARR:
                arr = objectdictionary.Array(name, index)
                od.add_object(arr)
            elif object_type == RECORD:
                record = objectdictionary.Record(name, index)
                od.add_object(record)

            continue

        # Match subindexes
        match = re.match(r"^([0-9A-Fa-f]{4})[S|s]ub([0-9A-Fa-f]+)$", section)
        if match is not None:
            index = int(match.group(1), 16)
            subindex = int(match.group(2), 16)
            entry = od[index]
            if isinstance(entry,
                          (objectdictionary.Record, objectdictionary.Array)):
                var = build_variable(eds, section, node_id, index, subindex)
                entry.add_member(var)

        # Match [index]Name
        match = re.match(r"^([0-9A-Fa-f]{4})Name", section)
        if match is not None:
            index = int(match.group(1), 16)
            num_of_entries = int(eds.get(section, "NrOfEntries"))
            entry = od[index]
            # For CompactSubObj index 1 is were we find the variable
            src_var = od[index][1]
            for subindex in range(1, num_of_entries + 1):
                var = copy_variable(eds, section, subindex, src_var)
                if var is not None:
                    entry.add_member(var)

    return od
コード例 #30
0
    def test_get_config_value_None_bool(self):
        config = RawConfigParser()
        config["section"] = {"key": None}

        val = get_config_value(config, "section", "key", dtype=bool)
        self.assertEqual(val, False)