Example #1
0
def main():
    parser = OptionParser(usage='%prog filename [options] arg',
                          version='%prog ' + __version__,
                          description='recognition captcha for http://jw.xujc.com/')
    parser.add_option('-v', '--verbose', action="store_true", dest="verbose", help="verbose")
    parser.add_option('-t', '--threshold', default=DEFAULT_THRESHOLD, dest="threshold", help="threshold for binary image (default:%default)")
    (options, args) = parser.parse_args()

    if len(args) < 1:
        parser.error('filename not given')

    if options.verbose:
        log_level = logging.INFO
    else:
        log_level = logging.WARNING

    logging.basicConfig(level=log_level, format='%(message)s',)

    parser.get_default_values()

    filename = args[0]
    logging.info('recognition file: %s' % filename)

    image = Image.open(filename)
    logging.info('image size: %s' % str(image.size))
    logging.info('threshold: %d' % options.threshold)

    captcha = XujcCaptcha(options.threshold)
    print captcha.recognition(image)
 def get_default_values(self):
     import ConfigParser
     config = ConfigParser.ConfigParser()
     config.read(self.defaultfiles)
     try:
         if config.has_section(self.prog):
             for key in config.options(self.prog):
                 self.defaults[key]=config.get(self.prog, key)
     except KeyError:
         pass
     return OptionParser.get_default_values(self)
    def get_default_values(self):
        """Introduce the ExtValues class with class constant
            - make it dynamic, otherwise the class constant is shared between multiple instances
            - class constant is used to avoid _taken_action as option in the __dict__
        """
        values = OptionParser.get_default_values(self)
        class ExtValues(self.VALUES_CLASS):
            _action_taken = {}

        newvalues = ExtValues()
        newvalues.__dict__ = values.__dict__.copy()
        return newvalues
Example #4
0
    def get_default_values(self):
        """Introduce the ExtValues class with class constant
            - make it dynamic, otherwise the class constant is shared between multiple instances
            - class constant is used to avoid _taken_action as option in the __dict__
        """
        values = OptionParser.get_default_values(self)

        class ExtValues(self.VALUES_CLASS):
            _action_taken = {}

        newvalues = ExtValues()
        newvalues.__dict__ = values.__dict__.copy()
        return newvalues
Example #5
0
class TestDefaultValues(BaseTest):
    def setUp(self):
        self.parser = OptionParser()
        self.parser.add_option("-v", "--verbose", default=True)
        self.parser.add_option("-q", "--quiet", dest="verbose")
        self.parser.add_option("-n", type="int", default=37)
        self.parser.add_option("-m", type="int")
        self.parser.add_option("-s", default="foo")
        self.parser.add_option("-t")
        self.parser.add_option("-u", default=None)
        self.expected = {"verbose": True, "n": 37, "m": None, "s": "foo", "t": None, "u": None}

    def test_basic_defaults(self):
        self.assertEqual(self.parser.get_default_values(), self.expected)

    def test_mixed_defaults_post(self):
        self.parser.set_defaults(n=42, m=-100)
        self.expected.update({"n": 42, "m": -100})
        self.assertEqual(self.parser.get_default_values(), self.expected)

    def test_mixed_defaults_pre(self):
        self.parser.set_defaults(x="barf", y="blah")
        self.parser.add_option("-x", default="frob")
        self.parser.add_option("-y")

        self.expected.update({"x": "frob", "y": "blah"})
        self.assertEqual(self.parser.get_default_values(), self.expected)

        self.parser.remove_option("-y")
        self.parser.add_option("-y", default=None)
        self.expected.update({"y": None})
        self.assertEqual(self.parser.get_default_values(), self.expected)

    def test_process_default(self):
        self.parser.option_class = DurationOption
        self.parser.add_option("-d", type="duration", default=300)
        self.parser.add_option("-e", type="duration", default="6m")
        self.parser.set_defaults(n="42")
        self.expected.update({"d": 300, "e": 360, "n": 42})
        self.assertEqual(self.parser.get_default_values(), self.expected)

        self.parser.set_process_default_values(False)
        self.expected.update({"d": 300, "e": "6m", "n": "42"})
        self.assertEqual(self.parser.get_default_values(), self.expected)
Example #6
0
def main (args):
    parser = OptionParser (usage='%prog [ options ... ] URI [ FILES ]',
                           description='Analyze repository modifications',
                           version=VERSION)
    parser.disable_interspersed_args()
    parser.add_option ('-g', '--debug', dest='debug',
                       action="store_true", default=False,
                       help="Run in debug mode")
    parser.add_option ('-c', '--config-file', dest='config_file',
                       metavar='FILE',
                       help="Use a custom configuration file")
    parser.add_option ('-r', '--revision', dest='revision',
                       metavar='REV',
                       help='Revision to analyze (HEAD)')
    parser.add_option ('-f', '--fast', dest='fast',
                       action="store_true", default=False,
                       help="Run faster but moves and copies are not detected")
    parser.add_option ('-o', '--output', dest='output',
                       default = 'text',
                       help='Output type [text|db|xml|csv] (%default)')
    add_outputs_options (parser)

    # Save default values and pass an emtpy Values object to
    # parser_args, so that default values are not set. We need it
    # to know whether a value has been provided by the user or not
    # After parsing the command line we complete the config options
    # with the default values for the options that have not been set
    # by the parser or by a config file
    defaults = parser.get_default_values ()
    options, args = parser.parse_args (args, values = Values())

    try:
        config = Config (options.config_file)
    except AttributeError:
        config = Config ()

    config.update (options.__dict__)
    config.add (defaults.__dict__)

    if not args:
        parser.error("missing required repository URI")
        return 1

    parser.destroy ()

    if config.debug:
        import repositoryhandler.backends
        repositoryhandler.backends.DEBUG = True

    uri = args[0]
    files = args[1:]
    files_from_stdin = (files and files[0] == '-')

    # Create repository
    path = uri_to_filename (uri)
    if path is not None:
        try:
            repo = create_repository_from_path (path)
        except RepositoryUnknownError:
            printerr ("Path %s doesn't seem to point to a repository supported by guilty", (path,))
            return 1
        except Exception, e:
            printerr ("Unknown error creating repository for path %s (%s)", (path, str (e)))
            return 1
        uri = repo.get_uri_for_path (path)
Example #7
0
class CmdBase(object):

    doesLogging = True
    """
    Class used for all Zenoss commands
    """
    def __init__(self, noopts=0, args=None, should_log=None):
        zope.component.provideAdapter(DefaultTraversable, (None, ))
        # This explicitly loads all of the products - must happen first!
        from OFS.Application import import_products
        import_products()
        #make sure we aren't in debug mode
        import Globals
        Globals.DevelopmentMode = False
        # We must import ZenossStartup at this point so that all Zenoss daemons
        # and tools will have any ZenPack monkey-patched methods available.
        import Products.ZenossStartup
        unused(Products.ZenossStartup)
        zcml.load_site()
        import Products.ZenWidgets
        load_config_override('scriptmessaging.zcml', Products.ZenWidgets)

        self.usage = "%prog [options]"
        self.noopts = noopts
        self.inputArgs = args

        # inputArgs was created to allow unit tests to pass in command line
        # arguments and get around whatever Zope was doing to sys.argv.
        if self.inputArgs is None:
            self.inputArgs = sys.argv[1:]

        self.parser = None
        self.args = []

        self.buildParser()
        self.buildOptions()

        # Get defaults from global.conf. They will be overridden by
        # daemon-specific config file or command line arguments.
        applyGlobalConfToParser(self.parser)
        self.parseOptions()
        if self.options.configfile:
            self.parser.defaults = self.getConfigFileDefaults(
                self.options.configfile)
            # We've updated the parser with defaults from configs, now we need
            # to reparse our command-line to get the correct overrides from
            # the command-line
            self.parseOptions()

        if should_log is not None:
            self.doesLogging = should_log

        if self.doesLogging:
            self.setupLogging()

    def buildParser(self):
        """
        Create the options parser
        """
        if not self.parser:
            from Products.ZenModel.ZenossInfo import ZenossInfo
            try:
                zinfo = ZenossInfo('')
                version = str(zinfo.getZenossVersion())
            except Exception:
                from Products.ZenModel.ZVersion import VERSION
                version = VERSION
            self.parser = OptionParser(usage=self.usage,
                                       version="%prog " + version,
                                       option_class=LogSeverityOption)

    def buildOptions(self):
        """
        Basic options setup. Other classes should call this before adding
        more options
        """
        self.buildParser()
        if self.doesLogging:
            group = OptionGroup(self.parser, "Logging Options")
            group.add_option(
                '-v',
                '--logseverity',
                dest='logseverity',
                default='INFO',
                type='loglevel',
                help='Logging severity threshold',
            )
            group.add_option(
                '--logpath',
                dest='logpath',
                default=zenPath('log'),
                type='str',
                help='Override the default logging path; default $ZENHOME/log')
            group.add_option(
                '--maxlogsize',
                dest='maxLogKiloBytes',
                default=10240,
                type='int',
                help='Max size of log file in KB; default 10240',
            )
            group.add_option(
                '--maxbackuplogs',
                dest='maxBackupLogs',
                default=3,
                type='int',
                help='Max number of back up log files; default 3',
            )
            self.parser.add_option_group(group)

        self.parser.add_option("-C",
                               "--configfile",
                               dest="configfile",
                               help="Use an alternate configuration file")

        self.parser.add_option("--genconf",
                               action="store_true",
                               default=False,
                               help="Generate a template configuration file")

        self.parser.add_option(
            "--genxmltable",
            action="store_true",
            default=False,
            help="Generate a Docbook table showing command-line switches.")

        self.parser.add_option(
            "--genxmlconfigs",
            action="store_true",
            default=False,
            help="Generate an XML file containing command-line switches.")

    def parseOptions(self):
        """
        Uses the optparse parse previously populated and performs common options.
        """

        if self.noopts:
            args = []
        else:
            args = self.inputArgs

        (self.options, self.args) = self.parser.parse_args(args=args)

        if self.options.genconf:
            self.generate_configs(self.parser, self.options)

        if self.options.genxmltable:
            self.generate_xml_table(self.parser, self.options)

        if self.options.genxmlconfigs:
            self.generate_xml_configs(self.parser, self.options)

    def getConfigFileDefaults(self, filename, correctErrors=True):
        # TODO: This should be refactored - duplicated code with CmdBase.
        """
        Parse a config file which has key-value pairs delimited by white space,
        and update the parser's option defaults with these values.

        @parameter filename: name of configuration file
        @type filename: string
        """

        options = self.parser.get_default_values()
        lines = self.loadConfigFile(filename)
        if lines:
            lines, errors = self.validateConfigFile(
                filename, lines, correctErrors=correctErrors)

            args = self.getParamatersFromConfig(lines)
            try:
                self.parser._process_args([], args, options)
            except (BadOptionError, OptionValueError) as err:
                print >> sys.stderr, 'WARN: %s in config file %s' % (err,
                                                                     filename)

        return options.__dict__

    def getGlobalConfigFileDefaults(self):
        # Deprecated: This method is going away - it is duplicated in GlobalConfig.py
        """
        Parse a config file which has key-value pairs delimited by white space,
        and update the parser's option defaults with these values.
        """

        filename = zenPath('etc', 'global.conf')
        options = self.parser.get_default_values()
        lines = self.loadConfigFile(filename)
        if lines:
            args = self.getParamatersFromConfig(lines)

            try:
                self.parser._process_args([], args, options)
            except (BadOptionError, OptionValueError):
                # Ignore it, we only care about our own options as defined in the parser
                pass

        return options.__dict__

    def loadConfigFile(self, filename):
        # TODO: This should be refactored - duplicated code with CmdBase.
        """
        Parse a config file which has key-value pairs delimited by white space.

        @parameter filename: path to the configuration file
        @type filename: string
        """
        lines = []
        if not os.path.exists(filename):
            return lines
        try:
            with open(filename) as file:
                for line in file:
                    if line.lstrip().startswith('#') or line.strip() == '':
                        lines.append(dict(type='comment', line=line))
                    else:
                        try:
                            # add default blank string for keys with no default value
                            # valid delimiters are space, ':' and/or '=' (see ZenUtils/config.py)
                            key, value = (
                                re.split(r'[\s:=]+', line.strip(), 1) + [
                                    '',
                                ])[:2]
                        except ValueError:
                            lines.append(
                                dict(type='option',
                                     line=line,
                                     key=line.strip(),
                                     value=None,
                                     option=None))
                        else:
                            option = self.parser.get_option('--%s' % key)
                            lines.append(
                                dict(type='option',
                                     line=line,
                                     key=key,
                                     value=value,
                                     option=option))
        except IOError as e:
            errorMessage = (
                'WARN: unable to read config file {filename} '
                '-- skipping. ({exceptionName}: {exception})').format(
                    filename=filename,
                    exceptionName=e.__class__.__name__,
                    exception=e)
            print >> sys.stderr, errorMessage
            return []

        return lines

    def validateConfigFile(self,
                           filename,
                           lines,
                           correctErrors=True,
                           warnErrors=True):
        """
        Validate config file lines which has key-value pairs delimited by white space,
        and validate that the keys exist for this command's option parser. If
        the option does not exist or has an empty value it will comment it out
        in the config file.

        @parameter filename: path to the configuration file
        @type filename: string
        @parameter lines: lines from config parser
        @type lines: list
        @parameter correctErrors: Whether or not invalid conf values should be
            commented out.
        @type correctErrors: boolean
        """

        output = []
        errors = []
        validLines = []
        date = datetime.datetime.now().isoformat()
        errorTemplate = '## Commenting out by config parser (%s) on %s: %%s\n' % (
            sys.argv[0], date)

        for lineno, line in enumerate(lines):
            if line['type'] == 'comment':
                output.append(line['line'])
            elif line['type'] == 'option':
                if line['value'] is None:
                    errors.append(
                        (lineno + 1, 'missing value for "%s"' % line['key']))
                    output.append(errorTemplate % 'missing value')
                    output.append('## %s' % line['line'])
                elif line['option'] is None:
                    errors.append(
                        (lineno + 1, 'unknown option "%s"' % line['key']))
                    output.append(errorTemplate % 'unknown option')
                    output.append('## %s' % line['line'])
                else:
                    validLines.append(line)
                    output.append(line['line'])
            else:
                errors.append((lineno + 1, 'unknown line "%s"' % line['line']))
                output.append(errorTemplate % 'unknown line')
                output.append('## %s' % line['line'])

        if errors:
            if correctErrors:
                for lineno, message in errors:
                    print >> sys.stderr, 'INFO: Commenting out %s on line %d in %s' % (
                        message, lineno, filename)

                with open(filename, 'w') as file:
                    file.writelines(output)

            if warnErrors:
                for lineno, message in errors:
                    print >> sys.stderr, 'WARN: %s on line %d in %s' % (
                        message, lineno, filename)

        return validLines, errors

    def getParamatersFromConfig(self, lines):
        # Deprecated: This method is going away
        return _convertConfigLinesToArguments(self.parser, lines)

    def setupLogging(self):
        """
        Set common logging options
        """
        rlog = logging.getLogger()
        rlog.setLevel(logging.WARN)
        mname = self.__class__.__name__
        self.log = logging.getLogger("zen." + mname)
        zlog = logging.getLogger("zen")
        try:
            loglevel = int(self.options.logseverity)
        except ValueError:
            loglevel = getattr(logging, self.options.logseverity.upper(),
                               logging.INFO)
        zlog.setLevel(loglevel)

        logdir = self.checkLogpath()
        if logdir:
            logfile = os.path.join(logdir, mname.lower() + ".log")
            maxBytes = self.options.maxLogKiloBytes * 1024
            backupCount = self.options.maxBackupLogs
            h = logging.handlers.RotatingFileHandler(logfile,
                                                     maxBytes=maxBytes,
                                                     backupCount=backupCount)
            h.setFormatter(
                logging.Formatter(
                    "%(asctime)s %(levelname)s %(name)s: %(message)s",
                    "%Y-%m-%d %H:%M:%S"))
            rlog.addHandler(h)
        else:
            logging.basicConfig()

    def checkLogpath(self):
        """
        Validate the logpath is valid
        """
        if not self.options.logpath:
            return None
        else:
            logdir = self.options.logpath
            if not os.path.exists(logdir):
                # try creating the directory hierarchy if it doesn't exist...
                try:
                    os.makedirs(logdir)
                except OSError:
                    raise SystemExit(
                        "logpath:%s doesn't exist and cannot be created" %
                        logdir)
            elif not os.path.isdir(logdir):
                raise SystemExit("logpath:%s exists but is not a directory" %
                                 logdir)
            return logdir

    def pretty_print_config_comment(self, comment):
        """
        Quick and dirty pretty printer for comments that happen to be longer than can comfortably
be seen on the display.
        """

        max_size = 40
        #
        # As a heuristic we'll accept strings that are +-  text_window
        # size in length.
        #
        text_window = 5

        if len(comment) <= max_size + text_window:
            return comment

        #
        # First, take care of embedded newlines and expand them out to array entries
        #
        new_comment = []
        all_lines = comment.split('\n')
        for line in all_lines:
            if len(line) <= max_size + text_window:
                new_comment.append(line)
                continue

            start_position = max_size - text_window
            while len(line) > max_size + text_window:
                index = line.find(' ', start_position)
                if index > 0:
                    new_comment.append(line[0:index])
                    line = line[index:]

                else:
                    if start_position == 0:
                        #
                        # If we get here it means that the line is just one big string with no spaces
                        # in it.  There's nothing that we can do except print it out.  Doh!
                        #
                        new_comment.append(line)
                        break

                    #
                    # Okay, haven't found anything to split on -- go back and try again
                    #
                    start_position = start_position - text_window
                    if start_position < 0:
                        start_position = 0

            else:
                new_comment.append(line)

        return "\n# ".join(new_comment)

    def generate_configs(self, parser, options):
        """
        Create a configuration file based on the long-form of the option names

        @parameter parser: an optparse parser object which contains defaults, help
        @parameter options: parsed options list containing actual values
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name = os.path.basename(sys.argv[0])
        daemon_name = daemon_name.replace('.py', '')

        print """#
# Configuration file for %s
#
#  To enable a particular option, uncomment the desired entry.
#
# Parameter     Setting
# ---------     -------""" % (daemon_name)

        options_to_ignore = ('help', 'version', '', 'genconf', 'genxmltable')

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
            if opt.help is SUPPRESS_HELP:
                continue

            #
            # Get rid of the short version of the command
            #
            option_name = re.sub(r'.*/--', '', "%s" % opt)

            #
            # And what if there's no short version?
            #
            option_name = re.sub(r'^--', '', "%s" % option_name)

            #
            # Don't display anything we shouldn't be displaying
            #
            if option_name in options_to_ignore:
                continue

            #
            # Find the actual value specified on the command line, if any,
            # and display it
            #

            value = getattr(parser.values, opt.dest)

            default_value = parser.defaults.get(opt.dest)
            if default_value is NO_DEFAULT or default_value is None:
                default_value = ""
            default_string = ""
            if default_value != "":
                default_string = ", default: " + str(default_value)

            comment = self.pretty_print_config_comment(opt.help +
                                                       default_string)

            #
            # NB: I would prefer to use tabs to separate the parameter name
            #     and value, but I don't know that this would work.
            #
            print """#
# %s
#%s %s""" % (comment, option_name, value)

        #
        # Pretty print and exit
        #
        print "#"
        sys.exit(0)

    def generate_xml_table(self, parser, options):
        """
        Create a Docbook table based on the long-form of the option names

        @parameter parser: an optparse parser object which contains defaults, help
        @parameter options: parsed options list containing actual values
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name = os.path.basename(sys.argv[0])
        daemon_name = daemon_name.replace('.py', '')

        print """<?xml version="1.0" encoding="UTF-8"?>

<section version="4.0" xmlns="http://docbook.org/ns/docbook"
   xmlns:xlink="http://www.w3.org/1999/xlink"
   xmlns:xi="http://www.w3.org/2001/XInclude"
   xmlns:svg="http://www.w3.org/2000/svg"
   xmlns:mml="http://www.w3.org/1998/Math/MathML"
   xmlns:html="http://www.w3.org/1999/xhtml"
   xmlns:db="http://docbook.org/ns/docbook"

  xml:id="%s.options"
>

<title>%s Options</title>
<para />
<table frame="all">
  <caption>%s <indexterm><primary>Daemons</primary><secondary>%s</secondary></indexterm> options</caption>
<tgroup cols="2">
<colspec colname="option" colwidth="1*" />
<colspec colname="description" colwidth="2*" />
<thead>
<row>
<entry> <para>Option</para> </entry>
<entry> <para>Description</para> </entry>
</row>
</thead>
<tbody>
""" % (daemon_name, daemon_name, daemon_name, daemon_name)

        options_to_ignore = ('help', 'version', '', 'genconf', 'genxmltable')

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
            if opt.help is SUPPRESS_HELP:
                continue

            #
            # Create a Docbook-happy version of the option strings
            # Yes, <arg></arg> would be better semantically, but the output
            # just looks goofy in a table.  Use literal instead.
            #
            all_options = '<literal>' + re.sub(
                r'/', '</literal>,</para> <para><literal>',
                "%s" % opt) + '</literal>'

            #
            # Don't display anything we shouldn't be displaying
            #
            option_name = re.sub(r'.*/--', '', "%s" % opt)
            option_name = re.sub(r'^--', '', "%s" % option_name)
            if option_name in options_to_ignore:
                continue

            default_value = parser.defaults.get(opt.dest)
            if default_value is NO_DEFAULT or default_value is None:
                default_value = ""
            default_string = ""
            if default_value != "":
                default_string = "<para> Default: <literal>" + str(
                    default_value) + "</literal></para>\n"

            comment = self.pretty_print_config_comment(opt.help)

            #
            # TODO: Determine the variable name used and display the --option_name=variable_name
            #
            if opt.action in ['store_true', 'store_false']:
                print """<row>
<entry> <para>%s</para> </entry>
<entry>
<para>%s</para>
%s</entry>
</row>
""" % (all_options, comment, default_string)

            else:
                target = '=<replaceable>' + opt.dest.lower() + '</replaceable>'
                all_options = all_options + target
                all_options = re.sub(r',', target + ',', all_options)
                print """<row>
<entry> <para>%s</para> </entry>
<entry>
<para>%s</para>
%s</entry>
</row>
""" % (all_options, comment, default_string)

        #
        # Close the table elements
        #
        print """</tbody></tgroup>
</table>
<para />
</section>
"""
        sys.exit(0)

    def generate_xml_configs(self, parser, options):
        """
        Create an XML file that can be used to create Docbook files
        as well as used as the basis for GUI-based daemon option
        configuration.
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name = os.path.basename(sys.argv[0])
        daemon_name = daemon_name.replace('.py', '')

        export_date = datetime.datetime.now()

        print """<?xml version="1.0" encoding="UTF-8"?>

<!-- Default daemon configuration generated on %s -->
<configuration id="%s" >

""" % (export_date, daemon_name)

        options_to_ignore = (
            'help',
            'version',
            '',
            'genconf',
            'genxmltable',
            'genxmlconfigs',
        )

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
            if opt.help is SUPPRESS_HELP:
                continue

            #
            # Don't display anything we shouldn't be displaying
            #
            option_name = re.sub(r'.*/--', '', "%s" % opt)
            option_name = re.sub(r'^--', '', "%s" % option_name)
            if option_name in options_to_ignore:
                continue

            default_value = parser.defaults.get(opt.dest)
            if default_value is NO_DEFAULT or default_value is None:
                default_string = ""
            else:
                default_string = str(default_value)

#
# TODO: Determine the variable name used and display the --option_name=variable_name
#
            if opt.action in ['store_true', 'store_false']:
                print """    <option id="%s" type="%s" default="%s" help="%s" />
""" % (
                    option_name,
                    "boolean",
                    default_string,
                    quote(opt.help),
                )

            else:
                target = opt.dest.lower()
                print """    <option id="%s" type="%s" default="%s" target="%s" help="%s" />
""" % (
                    option_name,
                    opt.type,
                    quote(default_string),
                    target,
                    quote(opt.help),
                )

        #
        # Close the table elements
        #
        print """
</configuration>
"""
        sys.exit(0)
class TestStandard(BaseTest):
    def setUp(self):
        options = [make_option("-a", type="string"),
                   make_option("-b", "--boo", type="int", dest='boo'),
                   make_option("--foo", action="append")]

        self.parser = OptionParser(usage=SUPPRESS_USAGE, option_list=options)

    def test_required_value(self):
        self.assertParseFail(["-a"], "-a option requires a value")

    def test_invalid_integer(self):
        self.assertParseFail(["-b", "5x"],
                             "option -b: invalid integer value: '5x'")

    def test_no_such_option(self):
        self.assertParseFail(["--boo13"], "no such option: --boo13")

    def test_long_invalid_integer(self):
        self.assertParseFail(["--boo=x5"],
                             "option --boo: invalid integer value: 'x5'")

    def test_empty(self):
        self.assertParseOK([], {'a': None, 'boo': None, 'foo': None}, [])

    def test_shortopt_empty_longopt_append(self):
        self.assertParseOK(["-a", "", "--foo=blah", "--foo="],
                           {'a': "", 'boo': None, 'foo': ["blah", ""]},
                           [])

    def test_long_option_append(self):
        self.assertParseOK(["--foo", "bar", "--foo", "", "--foo=x"],
                           {'a': None,
                            'boo': None,
                            'foo': ["bar", "", "x"]},
                           [])

    def test_option_argument_joined(self):
        self.assertParseOK(["-abc"],
                           {'a': "bc", 'boo': None, 'foo': None},
                           [])

    def test_option_argument_split(self):
        self.assertParseOK(["-a", "34"],
                           {'a': "34", 'boo': None, 'foo': None},
                           [])

    def test_option_argument_joined_integer(self):
        self.assertParseOK(["-b34"],
                           {'a': None, 'boo': 34, 'foo': None},
                           [])

    def test_option_argument_split_negative_integer(self):
        self.assertParseOK(["-b", "-5"],
                           {'a': None, 'boo': -5, 'foo': None},
                           [])

    def test_long_option_argument_joined(self):
        self.assertParseOK(["--boo=13"],
                           {'a': None, 'boo': 13, 'foo': None},
                           [])

    def test_long_option_argument_split(self):
        self.assertParseOK(["--boo", "111"],
                           {'a': None, 'boo': 111, 'foo': None},
                           [])

    def test_long_option_short_option(self):
        self.assertParseOK(["--foo=bar", "-axyz"],
                           {'a': 'xyz', 'boo': None, 'foo': ["bar"]},
                           [])

    def test_abbrev_long_option(self):
        self.assertParseOK(["--f=bar", "-axyz"],
                           {'a': 'xyz', 'boo': None, 'foo': ["bar"]},
                           [])

    def test_defaults(self):
        (options, args) = self.parser.parse_args([])
        defaults = self.parser.get_default_values()
        self.assertEqual(vars(defaults), vars(options))

    def test_ambiguous_option(self):
        self.parser.add_option("--foz", action="store",
                               type="string", dest="foo")
        possibilities = ", ".join({"--foz": None, "--foo": None}.keys())
        self.assertParseFail(["--f=bar"],
                             "ambiguous option: --f (%s?)" % possibilities)


    def test_short_and_long_option_split(self):
        self.assertParseOK(["-a", "xyz", "--foo", "bar"],
                           {'a': 'xyz', 'boo': None, 'foo': ["bar"]},
                           []),

    def test_short_option_split_long_option_append(self):
        self.assertParseOK(["--foo=bar", "-b", "123", "--foo", "baz"],
                           {'a': None, 'boo': 123, 'foo': ["bar", "baz"]},
                           [])

    def test_short_option_split_one_positional_arg(self):
        self.assertParseOK(["-a", "foo", "bar"],
                           {'a': "foo", 'boo': None, 'foo': None},
                           ["bar"]),

    def test_short_option_consumes_separator(self):
        self.assertParseOK(["-a", "--", "foo", "bar"],
                           {'a': "--", 'boo': None, 'foo': None},
                           ["foo", "bar"]),

    def test_short_option_joined_and_separator(self):
        self.assertParseOK(["-ab", "--", "--foo", "bar"],
                           {'a': "b", 'boo': None, 'foo': None},
                           ["--foo", "bar"]),

    def test_invalid_option_becomes_positional_arg(self):
        self.assertParseOK(["-ab", "-", "--foo", "bar"],
                           {'a': "b", 'boo': None, 'foo': ["bar"]},
                           ["-"])

    def test_no_append_versus_append(self):
        self.assertParseOK(["-b3", "-b", "5", "--foo=bar", "--foo", "baz"],
                           {'a': None, 'boo': 5, 'foo': ["bar", "baz"]},
                           [])

    def test_option_consumes_optionlike_string(self):
        self.assertParseOK(["-a", "-b3"],
                           {'a': "-b3", 'boo': None, 'foo': None},
                           [])
Example #9
0
    def __init__(self, player, threshold, options=None):
        """Initialize runtime.

        Initialized a runtime owned by the given, the threshold, and
        optionally a set of options. The runtime has no network
        connections and knows of no other players -- the
        :func:`create_runtime` function should be used instead to
        create a usable runtime.
        """
        assert threshold > 0, "Must use a positive threshold."
        #: ID of this player.
        self.id = player.id
        #: Shamir secret sharing threshold.
        self.threshold = threshold

        if options is None:
            parser = OptionParser()
            self.add_options(parser)
            self.options = parser.get_default_values()
        else:
            self.options = options

        if self.options.deferred_debug:
            from twisted.internet import defer
            defer.setDebugging(True)

        #: Pool of preprocessed data.
        self._pool = {}
        #: Description of needed preprocessed data.
        self._needed_data = {}

        #: Current program counter.
        __comp_id = self.options.computation_id
        if __comp_id is None:
            __comp_id = 0
        else:
            assert __comp_id > 0, "Non-positive ID: %d." % __comp_id
        self.program_counter = [__comp_id, 0]

        #: Connections to the other players.
        #:
        #: Mapping from from Player ID to :class:`ShareExchanger`
        #: objects.
        self.protocols = {}

        #: Number of known players.
        #:
        #: Equal to ``len(self.players)``, but storing it here is more
        #: direct.
        self.num_players = 0

        #: Information on players.
        #:
        #: Mapping from Player ID to :class:`Player` objects.
        self.players = {}
        # Add ourselves, but with no protocol since we wont be
        # communicating with ourselves.
        protocol = SelfShareExchanger(self.id, SelfShareExchangerFactory(self))
        protocol.transport = FakeTransport()
        self.add_player(player, protocol)

        #: Queue of deferreds and data.
        self.deferred_queue = deque()
        self.complex_deferred_queue = deque()
        #: Counter for calls of activate_reactor().
        self.activation_counter = 0
        #: Record the recursion depth.
        self.depth_counter = 0
        self.max_depth = 0
        #: Recursion depth limit by experiment, including security margin.
        self.depth_limit = int(sys.getrecursionlimit() / 50)
        #: Use deferred queues only if the ViffReactor is running.
        self.using_viff_reactor = isinstance(reactor, viff.reactor.ViffReactor)
Example #10
0
    def dataflow_wrapper(self,code_file):

        from optparse import OptionParser

        optparser = OptionParser()
        optparser.add_option("-v","--version",action="store_true",dest="showversion",
                             default=False,help="Show the version")
        optparser.add_option("-I","--include",dest="include",action="append",
                             default=[],help="Include path")
        optparser.add_option("-D",dest="define",action="append",
                             default=[],help="Macro Definition")
        optparser.add_option("-t","--top",dest="topmodule",
                             default="TOP",help="Top module, Default=TOP")
        optparser.add_option("--nobind",action="store_true",dest="nobind",
                             default=False,help="No binding traversal, Default=False")
        optparser.add_option("--noreorder",action="store_true",dest="noreorder",
                             default=False,help="No reordering of binding dataflow, Default=False")

        filelist = {code_file}
        options = optparser.get_default_values()


        for f in filelist:
            if not os.path.exists(f): raise IOError("file not found: " + f)

        verilogdataflowanalyzer = VerilogDataflowAnalyzer(filelist, options.topmodule,
                                                          noreorder=options.noreorder,
                                                          nobind=options.nobind,
                                                          preprocess_include=options.include,
                                                          preprocess_define=options.define)
        verilogdataflowanalyzer.generate()

        directives = verilogdataflowanalyzer.get_directives()
        print('Directive:')
        for dr in directives:
            print(dr)

        instances = verilogdataflowanalyzer.getInstances()
        print('Instance:')
        for ins in instances:
            print(ins)

        if options.nobind:
            print('Signal:')
            signals = verilogdataflowanalyzer.getSignals()
            for sig in signals:
                print(sig)

            print('Const:')
            consts = verilogdataflowanalyzer.getConsts()
            for con in consts:
                print(con)

        else:
            terms = verilogdataflowanalyzer.getTerms()
            print('Term:')
            for tk, tv in sorted(terms.items(), key=lambda x:len(x[0])):
                print(tv.tostr())

            binddict = verilogdataflowanalyzer.getBinddict()
            print('Bind:')
            for bk, bv in sorted(binddict.items(), key=lambda x:len(x[0])):
                for bvi in bv:
                    print(bvi.tostr())

        return terms, binddict
Example #11
0
def main_i():
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        setattr(parser.values, opt.dest, os.path.abspath(val))
    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val
    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    op = OptionParser(usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command',     metavar='CMD',   default='glusterfs')
    op.add_option('--gluster-log-file',    metavar='LOGF',  default=os.devnull, type=str, action='callback', callback=store_abs)
    op.add_option('--gluster-log-level',   metavar='LVL')
    op.add_option('-p', '--pid-file',      metavar='PIDF',  type=str, action='callback', callback=store_abs)
    op.add_option('-l', '--log-file',      metavar='LOGF',  type=str, action='callback', callback=store_abs)
    op.add_option('-L', '--log-level',     metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',   default=os.path.abspath(sys.argv[0]))
    op.add_option('-s', '--ssh-command',   metavar='CMD',   default='ssh')
    op.add_option('--rsync-command',       metavar='CMD',   default='rsync')
    op.add_option('--rsync-extra',         metavar='ARGS',  default='-sS', help=SUPPRESS_HELP)
    op.add_option('--timeout',             metavar='SEC',   type=int, default=120)
    op.add_option('--sync-jobs',           metavar='N',     type=int, default=3)
    op.add_option('--turns',               metavar='N',     type=int, default=0, help=SUPPRESS_HELP)

    op.add_option('-c', '--config-file',   metavar='CONF',  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,      action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",    action='callback', callback=store_local_curry('dont'))
    op.add_option('--debug', dest="go_daemon",              action='callback', callback=lambda *a: (store_local_curry('dont')(*a),
                                                                                                    setattr(a[-1].values, 'log_file', '-'),
                                                                                                    setattr(a[-1].values, 'log_level', 'DEBUG'))),
    op.add_option('--config-get',           metavar='OPT',  type=str, dest='config', action='callback', callback=store_local)
    op.add_option('--config-get-all', dest='config', action='callback', callback=store_local_curry(True))
    op.add_option('--config-set',           metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                   callback=lambda o, oo, vx, p: store_local(o, oo, (vx[0], vx[1], False), p))
    op.add_option('--config-set-rx',        metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                   callback=lambda o, oo, vx, p: store_local(o, oo, (vx[0], vx[1], True), p))
    op.add_option('--config-del',           metavar='OPT',  type=str, dest='config', action='callback', callback=lambda o, oo, vx, p:
                                                                                                                    store_local(o, oo, (vx, False, False), p))
    op.add_option('--config-del-rx',        metavar='OPT',  type=str, dest='config', action='callback', callback=lambda o, oo, vx, p:
                                                                                                                    store_local(o, oo, (vx, False, True), p))
    op.add_option('--canonicalize-url',        dest='do_canon', action='callback', callback=store_local_curry('raw'))
    op.add_option('--canonicalize-escape-url', dest='do_canon', action='callback', callback=store_local_curry('escaped'))

    # precedence for sources of values: 1) commandline, 2) cfg file, 3) defaults
    # -- for this to work out we need to tell apart defaults from explicitly set
    # options... so churn out the defaults here and call the parser with virgin
    # values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    confdata = rconf.get('config')
    if not (len(args) == 2 or \
            (len(args) == 1 and rconf.get('listen')) or \
            (len(args) <= 2 and confdata) or \
            rconf.get('do_canon')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    if confdata and isinstance(confdata, tuple) and confdata[2]:
        # peers are regexen, don't try to parse them
        canon_peers = args
    else:
        rscs = [resource.parse_url(u) for u in args]
        dc = rconf.get('do_canon')
        if dc:
            for r in rscs:
                print(r.get_url(canonical=True, escaped=(dc=='escaped')))
            return
        local = remote = None
        if rscs:
          local = rscs[0]
          if len(rscs) > 1:
              remote = rscs[1]
          if not local.can_connect_to(remote):
              raise RuntimeError("%s cannot work with %s" % (local.path, remote and remote.path))
        pa = ([], [])
        canon = [False, True]
        for x in (local, remote):
            if x:
                for i in range(2):
                    pa[i].append(x.get_url(canonical=canon[i]))
        peers, canon_peers = pa
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(os.path.dirname(sys.argv[0]), "conf/gsyncd.conf")
    gcnf = GConffile(rconf['config_file'], canon_peers)

    if confdata:
        if isinstance(confdata, tuple):
            if confdata[1]:
                gcnf.set(*confdata)
            else:
                gcnf.delete(confdata[0], confdata[1])
        else:
            if confdata == True:
                confdata = None
            gcnf.get(confdata)
        return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)

    #normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise RuntimeError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    go_daemon = rconf['go_daemon']

    if isinstance(remote, resource.SSH) and go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    startup(go_daemon=go_daemon, log_file=log_file, slave=(not remote))

    logging.info("syncing: %s" % " -> ".join(peers))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    local.service_loop(*[r for r in [remote] if r])

    logging.info("exiting.")
Example #12
0
def get_default_options():
    "@return: The default HDPM options."
    parser = OptionParser()
    add_hyper_parameter_options(parser)
    return parser.get_default_values()
Example #13
0
class PluginHelper(object):
    """ PluginHelper takes away some of the tedious work of writing Nagios plugins. Primary features include:

    * Keep a collection of your plugin messages (queue for both summary and longoutput)
    * Keep record of exit status
    * Keep a collection of your metrics (for both perfdata and thresholds)
    * Automatic Command-line arguments
    * Make sure output of your plugin is within Plugin Developer Guidelines

    Usage:
    p = PluginHelper()
    p.status(warning)
    p.add_summary('Example Plugin with warning status')
    p.add_metric('cpu load', '90')
    p.exit()
    """
    _nagios_status = -1  # exit status of the plugin
    _long_output = None  # Long output of the plugin
    _summary = None  # Summary of the plugin
    _perfdata = None  # Performance and Threshold Metrics are stored here
    show_longoutput = True  # If True, print longoutput
    show_perfdata = True  # If True, print perfdata
    show_summary = True  # If True, print Summary
    show_status_in_summary = True
    show_legacy = False  # Deprecated, doesnt do anything
    verbose = False  # Extra verbosity
    show_debug = False  # Extra debugging
    # By default, plugins timeout right before nagios kills the plugin
    timeout = 58

    thresholds = None  # List of strings in the nagios threshold format
    options = None  # OptionParser() options
    arguments = None  # OptionParser() arguments

    def __init__(self):
        self._long_output = []
        self._summary = []
        self.thresholds = []
        # Performance and Threshold Metrics are stored here
        self._perfdata = PerfData()

        self.parser = OptionParser()
        generic_group = OptionGroup(self.parser, "Generic Options")
        generic_group.add_option(
            '--timeout',
            help="Exit plugin with unknown status after x seconds",
            type='int',
            metavar='50',
            dest="timeout",
            default=self.timeout)
        generic_group.add_option(
            '--threshold',
            default=[],
            help="Thresholds in standard nagios threshold format",
            metavar='range',
            dest="thresholds",
            action="append")
        generic_group.add_option('--th',
                                 default=[],
                                 help="Same as --threshold",
                                 metavar='range',
                                 dest="thresholds",
                                 action="append")

        generic_group.add_option(
            '--extra-opts',
            help=
            "Read options from an ini file. See http://nagiosplugins.org/extra-opts",
            metavar='@file',
            dest="extra_opts")
        generic_group.add_option("-d",
                                 "--debug",
                                 dest="show_debug",
                                 help="Print debug info",
                                 metavar="d",
                                 action="store_true",
                                 default=self.show_debug)

        # Display options are options that affect the output of the plugin
        # But usually not its function
        display_group = OptionGroup(self.parser, "Display Options")
        display_group.add_option("-v",
                                 "--verbose",
                                 dest="verbose",
                                 help="Print more verbose info",
                                 metavar="v",
                                 action="store_true",
                                 default=self.verbose)
        display_group.add_option("--no-perfdata",
                                 dest="show_perfdata",
                                 help="Dont show any performance data",
                                 action="store_false",
                                 default=self.show_perfdata)
        display_group.add_option(
            "--no-longoutput",
            dest="show_longoutput",
            help=
            "Hide longoutput from the plugin output (i.e. only display first line of the output)",
            action="store_false",
            default=self.show_longoutput)
        display_group.add_option("--no-summary",
                                 dest="show_summary",
                                 help="Hide summary from plugin output",
                                 action="store_false",
                                 default=self.show_summary)

        display_group.add_option(
            "--get-metrics",
            dest="get_metrics",
            help=
            "Print all available metrics and exit (can be combined with --verbose)",
            action="store_true",
            default=False)
        display_group.add_option("--legacy",
                                 dest="show_legacy",
                                 help="Deprecated, do not use",
                                 action="store_true",
                                 default=self.show_legacy)

        self.parser.add_option_group(generic_group)
        self.parser.add_option_group(display_group)

    def parse_arguments(self, argument_list=None):
        """ Parsers commandline arguments, prints error if there is a syntax error.

        Creates:
            self.options   -- As created by OptionParser.parse()
            self.arguments -- As created by OptionParser.parse()
        Arguments:
            argument_list -- By default use sys.argv[1:], override only if you know what you are doing.
        Returns:
            None
        """
        self.options, self.arguments = self.parser.parse_args(
            args=argument_list)

        extra_opts = self.options.extra_opts
        if extra_opts is not None:  # --extra-opts was specified
            if extra_opts == '':  # --extra-opts= with no value.
                section_name = None
                config_file = None
            elif '@' in extra_opts:  # filename was specified
                section_name, config_file = extra_opts.split('@', 1)
            else:  # Only section was specified
                section_name = extra_opts
                config_file = None
            values = self.get_default_values(section_name, config_file)
            self.options, self.arguments = self.parser.parse_args(
                args=argument_list, values=values)

        # TODO: Handle it if developer decides to remove some options before
        # calling parse_arguments()
        self.thresholds = self.options.thresholds
        self.show_longoutput = self.options.show_longoutput
        self.show_perfdata = self.options.show_perfdata
        self.show_legacy = self.options.show_legacy
        self.show_debug = self.options.show_debug
        self.verbose = self.options.verbose
        #self.show_status_in_summary = self.options.show_status_in_summary

        self.set_timeout(self.options.timeout)

    def add_long_output(self, message):
        """ Appends message to the end of Plugin long_output. Message does not need a \n suffix

        Examples:
          >>> p = PluginHelper()
          >>> p.add_long_output('Status of sensor 1')
          >>> p.add_long_output('* Temperature: OK')
          >>> p.add_long_output('* Humidity: OK')
          >>> p.get_long_output()
          u'Status of sensor 1\\n* Temperature: OK\\n* Humidity: OK'

        """
        self._long_output.append(message)

    def add_option(self, *args, **kwargs):
        """ Same as self.parser.add_option() """
        return self.parser.add_option(*args, **kwargs)

    def get_long_output(self):
        """ Returns all long_output that has been added via add_long_output """
        return '\n'.join(self._long_output)

    def set_long_output(self, message):
        """ Overwrite current long_output with message

        Example:
        >>> s = PluginHelper()
        >>> s.add_long_output('first long output')
        >>> s.set_long_output('Fatal error')
        >>> s.get_long_output()
        u'Fatal error'
        """
        self._long_output = [message]

    def add_summary(self, message):
        """ Adds message to Plugin Summary """
        self._summary.append(message.strip())

    def set_summary(self, message):
        """ Overwrite current summary with message

        Example:
        >>> s = PluginHelper()
        >>> s.add_summary('first summary')
        >>> s.set_summary('Fatal error')
        >>> s.get_summary()
        u'Fatal error'
        """
        self._summary = [message]

    def get_summary(self):
        return '. '.join(self._summary)

    def get_status(self):
        """ Returns the worst nagios status (integer 0,1,2,3) that has been put with add_status()

        If status has never been added, returns 3 for UNKNOWN
        """

        # If no status has been set, return unknown
        if self._nagios_status == -1:
            return UNKNOWN
        else:
            return self._nagios_status

    def status(self, new_status=None):
        """ Same as get_status() if new_status=None, otherwise call add_status(new_status) """
        if new_status is None:
            return self.get_status()
        if new_status not in state_text:
            new_status = unknown
        return self.add_status(new_status)

    def add_status(self, new_status=None):
        """ Update exit status of the nagios plugin. This function will keep history of the worst status added

        Examples:
        >>> p = PluginHelper()
        >>> p.add_status(0) # ok
        >>> p.add_status(2) # critical
        >>> p.add_status(1) # warning
        >>> p.get_status()  #
        2

        >>> p = PluginHelper()
        >>> p.add_status('warning')
        >>> p.add_status('ok')
        >>> p.get_status()
        1
        >>> p.add_status('okay')
        Traceback (most recent call last):
        ...
        Exception: Invalid status supplied "okay"
        """

        # If new status was entered as a human readable string (ok,warn,etc)
        # lets convert it to int:
        if isinstance(new_status, basestring):
            if new_status.lower() in state:
                new_status = state[new_status]
            else:
                raise Exception("Invalid status supplied \"%s\"" %
                                (new_status))

        self._nagios_status = max(self._nagios_status, new_status)

    def add_metric(self,
                   label="",
                   value="",
                   warn="",
                   crit="",
                   min="",
                   max="",
                   uom="",
                   perfdatastring=None):
        """ Add numerical metric (will be outputted as nagios performanca data)

        Examples:
          >>> p = PluginHelper()
          >>> p.add_metric(label="load1", value="7")
          >>> p.add_metric(label="load5", value="5")
          >>> p.add_metric(label="load15",value="2")
          >>> p.get_perfdata()
          "'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;"

          >>> p = PluginHelper()
          >>> p.add_metric(perfdatastring="load1=6;;;;")
          >>> p.add_metric(perfdatastring="load5=4;;;;")
          >>> p.add_metric(perfdatastring="load15=1;;;;")
          >>> p.get_perfdata()
          "'load1'=6;;;; 'load5'=4;;;; 'load15'=1;;;;"

        """
        if not perfdatastring is None:
            self._perfdata.add_perfdatametric(perfdatastring=perfdatastring)
        else:
            self._perfdata.add_perfdatametric(label=label,
                                              value=value,
                                              warn=warn,
                                              crit=crit,
                                              min=min,
                                              max=max,
                                              uom=uom)

    def get_default_values(self, section_name=None, config_file=None):
        """ Returns an optionParser.Values instance of all defaults after parsing extra opts config file

        The Nagios extra-opts spec we use is the same as described here: http://nagiosplugins.org/extra-opts

        Arguments

        """
        # Get the program defaults
        values = self.parser.get_default_values()

        # Create an ExtraOptsParser instance and get all the values from that
        # config file
        extra_opts = ExtraOptsParser(section_name=section_name,
                                     config_file=config_file).get_values()

        for option in self.parser.option_list:
            name = option.dest
            if name in extra_opts:
                if option.action == 'append':
                    setattr(values, name, extra_opts[option.dest])
                else:
                    setattr(values, name, extra_opts[option.dest][0])
        return values

    def get_metric(self, label):
        """ Return one specific metric (PerfdataMetric object) with the specified label. Returns None if not found.

        Example:
        >>> p = PluginHelper()
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_metric("load1")
        'load1'=7;;;;
        >>> p.get_metric("unknown") # Returns None

        """
        for i in self._perfdata.metrics:
            if i.label == label:
                return i
        return None

    def convert_perfdata(self, perfdata):
        """ Converts new threshold range format to old one. Returns None.

        Examples:
            x..y -> x:y
            inf..y -> :y
            -inf..y -> :y
            x..inf -> x:
            -inf..inf -> :
        """
        for metric in perfdata:
            metric.warn = reconsile_threshold(metric.warn)
            metric.crit = reconsile_threshold(metric.crit)
        return None

    def get_perfdata(self):
        """ Get perfdatastring for all valid perfdatametrics collected via add_perfdata

        Examples:
        >>> p = PluginHelper()
        >>> p.add_metric(label="load1", value="7", warn="-inf..10", crit="10..inf")
        >>> p.add_metric(label="load5", value="5", warn="-inf..7", crit="7..inf")
        >>> p.add_metric(label="load15",value="2", warn="-inf..5", crit="5..inf")
        >>> p.get_perfdata()
        "'load1'=7;10:;~:10;; 'load5'=5;7:;~:7;; 'load15'=2;5:;~:5;;"

        Example with legacy output (show_legacy should be set with a cmdline option):
        >>> p.show_legacy = True
        >>> p.get_perfdata()
        "'load1'=7;10:;~:10;; 'load5'=5;7:;~:7;; 'load15'=2;5:;~:5;;"

        """
        # Normalize the perfdata to so the thresholds match the current nagios plugin guidelines
        self.convert_perfdata(self._perfdata.metrics)
        return str(self._perfdata)

    def get_plugin_output(self,
                          exit_code=None,
                          summary=None,
                          long_output=None,
                          perfdata=None):
        """ Get all plugin output as it would be printed to screen with self.exit()

        Examples of functionality:
        >>> p = PluginHelper()
        >>> p.get_plugin_output()
        u'Unknown -'

        >>> p = PluginHelper()
        >>> p.add_summary('Testing')
        >>> p.add_long_output('Long testing output')
        >>> p.add_long_output('More output')
        >>> p.get_plugin_output(exit_code=0)
        u'OK - Testing\\nLong testing output\\nMore output'

        >>> p = PluginHelper()
        >>> p.add_summary('Testing')
        >>> p.add_status(0)
        >>> p.get_plugin_output()
        u'OK - Testing'

        >>> p = PluginHelper()
        >>> p.show_status_in_summary = False
        >>> p.add_summary('Testing')
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load5", value="5")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_plugin_output(exit_code=0)
        u"Testing | 'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;"

        >>> p = PluginHelper()
        >>> p.show_status_in_summary = False
        >>> p.add_summary('Testing')
        >>> p.add_long_output('Long testing output')
        >>> p.add_long_output('More output')
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load5", value="5")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_plugin_output(exit_code=0)
        u"Testing | 'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;\\nLong testing output\\nMore output"

        """
        if summary is None:
            summary = self.get_summary()
        if long_output is None:
            long_output = self.get_long_output()
        if perfdata is None:
            perfdata = self.get_perfdata()
        if exit_code is None:
            exit_code = self.get_status()

        return_buffer = ""
        if self.show_status_in_summary is True:
            return_buffer += "%s - " % state_text[exit_code]
        if self.show_summary is True:
            return_buffer += summary
        if self.show_perfdata is True and len(perfdata) > 0:
            return_buffer += " | %s\n" % perfdata

        if not return_buffer.endswith('\n'):
            return_buffer += '\n'
        if self.show_longoutput is True and len(long_output) > 0:
            return_buffer += long_output

        return_buffer = return_buffer.strip()
        return return_buffer

    def set_timeout(self, seconds=50):
        """ Configures plugin to timeout after seconds number of seconds """
        timeout = lambda x, y: self.exit(
            unknown,
            summary="Plugin timeout exceeded after %s seconds." % seconds)
        signal.signal(signal.SIGALRM, timeout)
        signal.alarm(seconds)

    def exit(self,
             exit_code=None,
             summary=None,
             long_output=None,
             perfdata=None):
        """ Print all collected output to screen and exit nagios style, no arguments are needed
            except if you want to override default behavior.

        Arguments:
            summary     -- Is this text as the plugin summary instead of self.get_summary()
            long_output -- Use this text as long_output instead of self.get_long_output()
            perfdata    -- Use this text instead of self.get_perfdata()
            exit_code   -- Use this exit code instead of self.status()
        """
        if exit_code is None:
            exit_code = self.get_status()
        if self.options and self.options.get_metrics is True:
            summary = "Available metrics for this plugin:"
            metrics = []

            for i in self._perfdata.metrics:
                if self.options.verbose is True:
                    metrics.append(str(i))
                else:
                    metrics.append(i.label)
            long_output = '\n'.join(metrics)

        plugin_output = self.get_plugin_output(exit_code=exit_code,
                                               summary=summary,
                                               long_output=long_output,
                                               perfdata=perfdata)

        print(plugin_output)
        sys.exit(exit_code)

    def check_metric(self, metric_name, thresholds):
        """ Check one specific metric against a list of thresholds. Updates self.status() and writes to summary or longout as appropriate.

        Arguments:
          metric_name -- A string representing the name of the metric (the label part of the performance data)
          thresholds  -- a list in the form of [ (level,range) ] where range is a string in the format of "start..end"

        Examples:
        >>> p = PluginHelper()
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.get_plugin_output()
        u'Unknown -'
        >>> p.add_metric('load15', '3')
        >>> p.check_metric('load15',thresholds)
        >>> p.get_plugin_output()
        u"Warning - Warning on load15 | 'load15'=3;@2:5;~:5;;"

        >>> p = PluginHelper()
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.add_metric('load15', '3')
        >>> p.verbose = True
        >>> p.check_metric('load15',thresholds)
        >>> p.get_plugin_output()
        u"Warning - Warning on load15 | 'load15'=3;@2:5;~:5;;\\nWarning on load15"

        Invalid metric:
        >>> p = PluginHelper()
        >>> p.add_status(ok)
        >>> p.add_summary('Everythings fine!')
        >>> p.get_plugin_output()
        u'OK - Everythings fine!'
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.check_metric('never_added_metric', thresholds)
        >>> p.get_plugin_output()
        u'Unknown - Everythings fine!. Metric never_added_metric not found'

        Invalid threshold:
        >>> p = PluginHelper()
        >>> thresholds = [(warning, 'invalid'), (critical,'5..inf')]
        >>> p.add_metric('load1', '10')
        >>> p.check_metric('load1', thresholds)
        Traceback (most recent call last):
        ...
        SystemExit: 3

        Returns:
          None
        """
        metric = self.get_metric(label=metric_name)

        # If threshold was specified but metric not found in our data, set
        # status unknown
        if metric is None:
            self.status(unknown)
            self.add_summary("Metric %s not found" % (metric_name))
            return

        metric_status = -1  # by default assume nothing
        default_state = 0  # By default if no treshold matches, we assume OK
        highest_level = ok  # highest threshold range seen
        # Iterate through all thresholds, and log down warn and crit for
        # perfdata purposes
        for level, threshold_range in thresholds:
            if metric.warn == '' and level == warning:
                metric.warn = threshold_range
            elif metric.crit == '' and level == critical:
                metric.crit = threshold_range
            if level == ok:
                default_state = 2

        # Iterate all threshold and determine states
        for level, threshold_range in thresholds:
            highest_level = max(highest_level, level)
            # If ok threshold was specified, default state is critical according to spec
            # If value matches our threshold, we increment the status
            try:
                in_range = new_threshold_syntax.check_range(
                    metric.value, threshold_range)
            except pynag.errors.PynagError:
                self.set_summary(
                    "Could not parse threshold %s=%s for metric %s" %
                    (state_text[level], threshold_range, metric_name))
                self.set_long_output(
                    "Thresholds should be in the format metric=<metric_name>,ok=0..90,warning=90..95"
                )
                self.add_long_output("Example: ")
                self.add_long_output(
                    "--th metric=load,ok=0..1,warning=1..5,critical=5..inf")
                self.status(unknown)
                self.exit()
            if in_range:
                metric_status = max(metric_status, level)
                self.debug('%s is within %s range "%s"' %
                           (metric_name, state_text[level], threshold_range))
                if level == ok:
                    self.debug(
                        "OK threshold matches, not checking any more thresholds"
                    )
                    metric_status = ok
                    break
            else:
                self.debug('%s is outside %s range "%s"' %
                           (metric_name, state_text[level], threshold_range))

        # If no thresholds matched, set a default return code
        if metric_status < 0:
            metric_status = default_state

        # OK's go to long output, errors go directly to summary
        self.add_status(metric_status)
        message = '%s on %s' % (state_text[metric_status], metric_name)

        # Errors are added to the summary:
        if metric_status > 0:
            self.add_summary(message)

        if self.verbose is True:
            self.add_long_output(message)

    def check_all_metrics(self):
        """ Checks all metrics (add_metric() against any thresholds set in self.options.thresholds or with --threshold from commandline)"""
        checked_metrics = []
        for threshold in self.thresholds:
            parsed_threshold = new_threshold_syntax.parse_threshold(threshold)
            metric_name = parsed_threshold['metric']
            thresholds = parsed_threshold['thresholds']
            self.check_metric(metric_name, thresholds)
            checked_metrics.append(metric_name)

        # Lets look at metrics that were not specified on the command-line but might have a default
        # threshold specified with their metric data
        for i in self._perfdata.metrics:
            if i.label in checked_metrics:
                continue
            thresholds = []

            if i.warn != '':
                thresholds.append((warning, i.warn))
            if i.crit != '':
                thresholds.append((critical, i.crit))
            self.check_metric(i.label, thresholds)

    def run_function(self, function, *args, **kwargs):
        """ Executes "function" and exits Nagios style with status "unkown"
        if there are any exceptions. The stacktrace will be in long_output.

        Example:
        >>> p = PluginHelper()
        >>> p.add_status('ok')
        >>> p.get_status()
        0
        >>> p.add_status('okay')
        Traceback (most recent call last):
        ...
        Exception: Invalid status supplied "okay"
        >>> p.run_function( p.add_status, 'warning' )
        >>> p.get_status()
        1
        >>> p.run_function( p.add_status, 'okay' )
        Traceback (most recent call last):
        ...
        SystemExit: 3
        """
        try:
            function(*args, **kwargs)
        except Exception:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            exit_code = unknown
            # traceback.print_exc(file=sys.stdout)
            summary = "Unhandled '%s' exception while running plugin (traceback below)" % exc_type
            long_output = traceback.format_exc()
            self.exit(exit_code=exit_code,
                      summary=summary,
                      long_output=long_output,
                      perfdata='')

    def debug(self, message):  # pragma: no cover
        if self.show_debug is True:
            self.add_long_output("debug: %s" % message)

    def __str__(self):
        """
        >>> p = PluginHelper()
        >>> p.add_status(ok)
        >>> p.add_summary('Test')
        >>> print(p)
        OK - Test
        """
        return self.get_plugin_output()

    def __repr__(self):
        return self.get_plugin_output(long_output='', perfdata='')
Example #14
0
File: runtime.py Project: xony/viff
    def __init__(self, player, threshold, options=None):
        """Initialize runtime.

        Initialized a runtime owned by the given, the threshold, and
        optionally a set of options. The runtime has no network
        connections and knows of no other players -- the
        :func:`create_runtime` function should be used instead to
        create a usable runtime.
        """
        assert threshold > 0, "Must use a positive threshold."
        #: ID of this player.
        self.id = player.id
        #: Shamir secret sharing threshold.
        self.threshold = threshold

        if options is None:
            parser = OptionParser()
            self.add_options(parser)
            self.options = parser.get_default_values()
        else:
            self.options = options

        if self.options.deferred_debug:
            from twisted.internet import defer
            defer.setDebugging(True)

        #: Pool of preprocessed data.
        self._pool = {}
        #: Description of needed preprocessed data.
        self._needed_data = {}

        #: Current program counter.
        __comp_id = self.options.computation_id
        if __comp_id is None:
            __comp_id = 0
        else:
            assert __comp_id > 0, "Non-positive ID: %d." % __comp_id
        self.program_counter = [__comp_id, 0]

        #: Connections to the other players.
        #:
        #: Mapping from from Player ID to :class:`ShareExchanger`
        #: objects.
        self.protocols = {}

        #: Number of known players.
        #:
        #: Equal to ``len(self.players)``, but storing it here is more
        #: direct.
        self.num_players = 0

        #: Information on players.
        #:
        #: Mapping from Player ID to :class:`Player` objects.
        self.players = {}
        # Add ourselves, but with no protocol since we wont be
        # communicating with ourselves.
        protocol = SelfShareExchanger(self.id, SelfShareExchangerFactory(self))
        protocol.transport = FakeTransport()
        self.add_player(player, protocol)

        #: Queue of deferreds and data.
        self.deferred_queue = deque()
        self.complex_deferred_queue = deque()
        #: Counter for calls of activate_reactor().
        self.activation_counter = 0
        #: Record the recursion depth.
        self.depth_counter = 0
        self.max_depth = 0
        #: Recursion depth limit by experiment, including security margin.
        self.depth_limit = int(sys.getrecursionlimit() / 50)
        #: Use deferred queues only if the ViffReactor is running.
        self.using_viff_reactor = isinstance(reactor, viff.reactor.ViffReactor)
Example #15
0
Test STEME motif spacing functionality.
"""

# do this first as otherwise have strange segmentation violation. Not sure of reason for this
import scipy.special

from setup_environment import init_test_env, logging
init_test_env(__file__, level=logging.INFO)

import pkg_resources
from optparse import OptionParser
from stempy.scan import load_occurrences_from_stream
from stempy.spacing import add_max_distance_option, count_all_pairs, spacing_idx

parser = OptionParser()
options = parser.get_default_values()
options.max_distance = 4


#
# Load the occurrences and associated sequence lengths,
# they will come sorted by position
#
logging.info('Loading occurrences')
occurrences, seq_infos, motifs = load_occurrences_from_stream(
    pkg_resources.resource_stream('stempy', 'test/spacing/steme-pwm-scan.out'),
    pkg_resources.resource_stream('stempy', 'test/spacing/steme-pwm-scan.seqs'),
)


#
Example #16
0
def main_i():
    """internal main routine

    parse command line, decide what action will be taken;
    we can either:
    - query/manipulate configuration
    - format gsyncd urls using gsyncd's url parsing engine
    - start service in following modes, in given stages:
      - agent: startup(), ChangelogAgent()
      - monitor: startup(), monitor()
      - master: startup(), connect_remote(), connect(), service_loop()
      - slave: startup(), connect(), service_loop()
    """
    rconf = {"go_daemon": "should"}

    def store_abs(opt, optstr, val, parser):
        if val and val != "-":
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)

    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val

    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option("--gluster-command-dir", metavar="DIR", default="")
    op.add_option(
        "--gluster-log-file", metavar="LOGF", default=os.devnull, type=str, action="callback", callback=store_abs
    )
    op.add_option("--gluster-log-level", metavar="LVL")
    op.add_option("--gluster-params", metavar="PRMS", default="")
    op.add_option("--glusterd-uuid", metavar="UUID", type=str, default="", help=SUPPRESS_HELP)
    op.add_option("--gluster-cli-options", metavar="OPTS", default="--log-file=-")
    op.add_option("--mountbroker", metavar="LABEL")
    op.add_option("-p", "--pid-file", metavar="PIDF", type=str, action="callback", callback=store_abs)
    op.add_option("-l", "--log-file", metavar="LOGF", type=str, action="callback", callback=store_abs)
    op.add_option("--iprefix", metavar="LOGD", type=str, action="callback", callback=store_abs)
    op.add_option("--changelog-log-file", metavar="LOGF", type=str, action="callback", callback=store_abs)
    op.add_option("--log-file-mbr", metavar="LOGF", type=str, action="callback", callback=store_abs)
    op.add_option("--state-file", metavar="STATF", type=str, action="callback", callback=store_abs)
    op.add_option("--state-detail-file", metavar="STATF", type=str, action="callback", callback=store_abs)
    op.add_option("--georep-session-working-dir", metavar="STATF", type=str, action="callback", callback=store_abs)
    op.add_option("--ignore-deletes", default=False, action="store_true")
    op.add_option("--isolated-slave", default=False, action="store_true")
    op.add_option("--use-rsync-xattrs", default=False, action="store_true")
    op.add_option("--sync-xattrs", default=True, action="store_true")
    op.add_option("--sync-acls", default=True, action="store_true")
    op.add_option("--log-rsync-performance", default=False, action="store_true")
    op.add_option("--pause-on-start", default=False, action="store_true")
    op.add_option("-L", "--log-level", metavar="LVL")
    op.add_option("-r", "--remote-gsyncd", metavar="CMD", default=os.path.abspath(sys.argv[0]))
    op.add_option("--volume-id", metavar="UUID")
    op.add_option("--slave-id", metavar="ID")
    op.add_option("--session-owner", metavar="ID")
    op.add_option("--local-id", metavar="ID", help=SUPPRESS_HELP, default="")
    op.add_option("--local-path", metavar="PATH", help=SUPPRESS_HELP, default="")
    op.add_option("-s", "--ssh-command", metavar="CMD", default="ssh")
    op.add_option("--ssh-command-tar", metavar="CMD", default="ssh")
    op.add_option("--rsync-command", metavar="CMD", default="rsync")
    op.add_option("--rsync-options", metavar="OPTS", default="")
    op.add_option("--rsync-ssh-options", metavar="OPTS", default="--compress")
    op.add_option("--timeout", metavar="SEC", type=int, default=120)
    op.add_option("--connection-timeout", metavar="SEC", type=int, default=60, help=SUPPRESS_HELP)
    op.add_option("--sync-jobs", metavar="N", type=int, default=3)
    op.add_option("--replica-failover-interval", metavar="N", type=int, default=1)
    op.add_option("--changelog-archive-format", metavar="N", type=str, default="%Y%m")
    op.add_option("--use-meta-volume", default=False, action="store_true")
    op.add_option("--meta-volume-mnt", metavar="N", type=str, default="/var/run/gluster/shared_storage")
    op.add_option("--turns", metavar="N", type=int, default=0, help=SUPPRESS_HELP)
    op.add_option("--allow-network", metavar="IPS", default="")
    op.add_option("--socketdir", metavar="DIR")
    op.add_option("--state-socket-unencoded", metavar="SOCKF", type=str, action="callback", callback=store_abs)
    op.add_option("--checkpoint", metavar="LABEL", default="0")

    # tunables for failover/failback mechanism:
    # None   - gsyncd behaves as normal
    # blind  - gsyncd works with xtime pairs to identify
    #          candidates for synchronization
    # wrapup - same as normal mode but does not assign
    #          xtimes to orphaned files
    # see crawl() for usage of the above tunables
    op.add_option("--special-sync-mode", type=str, help=SUPPRESS_HELP)

    # changelog or xtime? (TODO: Change the default)
    op.add_option("--change-detector", metavar="MODE", type=str, default="xtime")
    # sleep interval for change detection (xtime crawl uses a hardcoded 1
    # second sleep time)
    op.add_option("--change-interval", metavar="SEC", type=int, default=3)
    # working directory for changelog based mechanism
    op.add_option("--working-dir", metavar="DIR", type=str, action="callback", callback=store_abs)
    op.add_option("--use-tarssh", default=False, action="store_true")

    op.add_option("-c", "--config-file", metavar="CONF", type=str, action="callback", callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option("--monitor", dest="monitor", action="callback", callback=store_local_curry(True))
    op.add_option("--agent", dest="agent", action="callback", callback=store_local_curry(True))
    op.add_option("--resource-local", dest="resource_local", type=str, action="callback", callback=store_local)
    op.add_option("--resource-remote", dest="resource_remote", type=str, action="callback", callback=store_local)
    op.add_option(
        "--feedback-fd", dest="feedback_fd", type=int, help=SUPPRESS_HELP, action="callback", callback=store_local
    )
    op.add_option("--rpc-fd", dest="rpc_fd", type=str, help=SUPPRESS_HELP)
    op.add_option("--subvol-num", dest="subvol_num", type=int, help=SUPPRESS_HELP)
    op.add_option("--listen", dest="listen", help=SUPPRESS_HELP, action="callback", callback=store_local_curry(True))
    op.add_option("-N", "--no-daemon", dest="go_daemon", action="callback", callback=store_local_curry("dont"))
    op.add_option("--verify", type=str, dest="verify", action="callback", callback=store_local)
    op.add_option("--create", type=str, dest="create", action="callback", callback=store_local)
    op.add_option("--delete", dest="delete", action="callback", callback=store_local_curry(True))
    op.add_option("--status-get", dest="status_get", action="callback", callback=store_local_curry(True))
    op.add_option(
        "--debug",
        dest="go_daemon",
        action="callback",
        callback=lambda *a: (
            store_local_curry("dont")(*a),
            setattr(a[-1].values, "log_file", "-"),
            setattr(a[-1].values, "log_level", "DEBUG"),
            setattr(a[-1].values, "changelog_log_file", "-"),
        ),
    )
    op.add_option("--path", type=str, action="append")

    for a in ("check", "get"):
        op.add_option(
            "--config-" + a,
            metavar="OPT",
            type=str,
            dest="config",
            action="callback",
            callback=store_local_obj(a, lambda vx: {"opt": vx}),
        )
    op.add_option(
        "--config-get-all", dest="config", action="callback", callback=store_local_obj("get", lambda vx: {"opt": None})
    )
    for m in ("", "-rx", "-glob"):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able
        # to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option(
                "--config-set" + m,
                metavar="OPT VAL",
                type=str,
                nargs=2,
                dest="config",
                action="callback",
                callback=store_local_obj("set", lambda vx: {"opt": vx[0], "val": vx[1], "rx": rx}),
            )
            op.add_option(
                "--config-del" + m,
                metavar="OPT",
                type=str,
                dest="config",
                action="callback",
                callback=store_local_obj("del", lambda vx: {"opt": vx, "rx": rx}),
            )

        conf_mod_opt_regex_variant(m and m[1:] or False)

    op.add_option("--normalize-url", dest="url_print", action="callback", callback=store_local_curry("normal"))
    op.add_option("--canonicalize-url", dest="url_print", action="callback", callback=store_local_curry("canon"))
    op.add_option(
        "--canonicalize-escape-url", dest="url_print", action="callback", callback=store_local_curry("canon_esc")
    )

    tunables = [
        norm(o.get_opt_string()[2:])
        for o in op.option_list
        if (o.callback in (store_abs, "store_true", None) and o.get_opt_string() not in ("--version", "--help"))
    ]
    remote_tunables = ["listen", "go_daemon", "timeout", "session_owner", "config_file", "use_rsync_xattrs"]
    rq_remote_tunables = {"listen": True}

    # precedence for sources of values: 1) commandline, 2) cfg file, 3)
    # defaults for this to work out we need to tell apart defaults from
    # explicitly set options... so churn out the defaults here and call
    # the parser with virgin values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    args_orig = args[:]
    r = rconf.get("resource_local")
    if r:
        if len(args) == 0:
            args.append(None)
        args[0] = r
    r = rconf.get("resource_remote")
    if r:
        if len(args) == 0:
            raise GsyncdError("local resource unspecfied")
        elif len(args) == 1:
            args.append(None)
        args[1] = r
    confdata = rconf.get("config")
    if not (
        len(args) == 2
        or (len(args) == 1 and rconf.get("listen"))
        or (len(args) <= 2 and confdata)
        or rconf.get("url_print")
    ):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    verify = rconf.get("verify")
    if verify:
        logging.info(verify)
        logging.info("Able to spawn gsyncd.py")
        return

    restricted = os.getenv("_GSYNCD_RESTRICTED_")

    if restricted:
        allopts = {}
        allopts.update(opts.__dict__)
        allopts.update(rconf)
        bannedtuns = set(allopts.keys()) - set(remote_tunables)
        if bannedtuns:
            raise GsyncdError(
                "following tunables cannot be set with " "restricted SSH invocaton: " + ", ".join(bannedtuns)
            )
        for k, v in rq_remote_tunables.items():
            if not k in allopts or allopts[k] != v:
                raise GsyncdError("tunable %s is not set to value %s required " "for restricted SSH invocaton" % (k, v))

    confrx = getattr(confdata, "rx", None)

    def makersc(aa, check=True):
        if not aa:
            return ([], None, None)
        ra = [resource.parse_url(u) for u in aa]
        local = ra[0]
        remote = None
        if len(ra) > 1:
            remote = ra[1]
        if check and not local.can_connect_to(remote):
            raise GsyncdError("%s cannot work with %s" % (local.path, remote and remote.path))
        return (ra, local, remote)

    if confrx:
        # peers are regexen, don't try to parse them
        if confrx == "glob":
            args = ["\A" + fnmatch.translate(a) for a in args]
        canon_peers = args
        namedict = {}
    else:
        dc = rconf.get("url_print")
        rscs, local, remote = makersc(args_orig, not dc)
        if dc:
            for r in rscs:
                print(
                    r.get_url(
                        **{
                            "normal": {},
                            "canon": {"canonical": True},
                            "canon_esc": {"canonical": True, "escaped": True},
                        }[dc]
                    )
                )
            return
        pa = ([], [], [])
        urlprms = ({}, {"canonical": True}, {"canonical": True, "escaped": True})
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        _, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring
        # to / repreenting peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[0].upper() + x[1:], lambda x: "e" + x[0].upper() + x[1:])
        if remote:
            rmap = {local: ("local", "master"), remote: ("remote", "slave")}
        else:
            rmap = {local: ("local", "slave")}
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                namedict[name + "vol"] = x.volume
                if name == "remote":
                    namedict["remotehost"] = x.remotehost
    if not "config_file" in rconf:
        rconf["config_file"] = os.path.join(os.path.dirname(sys.argv[0]), "conf/gsyncd_template.conf")

    upgrade_config_file(rconf["config_file"])
    gcnf = GConffile(rconf["config_file"], canon_peers, defaults.__dict__, opts.__dict__, namedict)

    checkpoint_change = False
    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == "check":
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise GsyncdError("not a valid option: " + confdata.opt)
        if confdata.op == "get":
            gcnf.get(confdata.opt)
        elif confdata.op == "set":
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == "del":
            gcnf.delete(confdata.opt, confdata.rx)
        # when modifying checkpoint, it's important to make a log
        # of that, so in that case we go on to set up logging even
        # if its just config invocation
        if confdata.opt == "checkpoint" and confdata.op in ("set", "del") and not confdata.rx:
            checkpoint_change = True
        if not checkpoint_change:
            return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    delete = rconf.get("delete")
    if delete:
        logging.info("geo-replication delete")
        # Delete pid file, status file, socket file
        cleanup_paths = []
        if getattr(gconf, "pid_file", None):
            cleanup_paths.append(gconf.pid_file)

        if getattr(gconf, "state_file", None):
            cleanup_paths.append(gconf.state_file)

        if getattr(gconf, "state_detail_file", None):
            cleanup_paths.append(gconf.state_detail_file)

        if getattr(gconf, "state_socket_unencoded", None):
            cleanup_paths.append(gconf.state_socket_unencoded)

        cleanup_paths.append(rconf["config_file"][:-11] + "*")

        # Cleanup changelog working dirs
        if getattr(gconf, "working_dir", None):
            try:
                shutil.rmtree(gconf.working_dir)
            except (IOError, OSError):
                if sys.exc_info()[1].errno == ENOENT:
                    pass
                else:
                    raise GsyncdError("Error while removing working dir: %s" % gconf.working_dir)

        for path in cleanup_paths:
            # To delete temp files
            for f in glob.glob(path + "*"):
                _unlink(f)
        return

    if restricted and gconf.allow_network:
        ssh_conn = os.getenv("SSH_CONNECTION")
        if not ssh_conn:
            # legacy env var
            ssh_conn = os.getenv("SSH_CLIENT")
        if ssh_conn:
            allowed_networks = [IPNetwork(a) for a in gconf.allow_network.split(",")]
            client_ip = IPAddress(ssh_conn.split()[0])
            allowed = False
            for nw in allowed_networks:
                if client_ip in nw:
                    allowed = True
                    break
            if not allowed:
                raise GsyncdError("client IP address is not allowed")

    ffd = rconf.get("feedback_fd")
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    # normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise GsyncdError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    if not privileged() and gconf.log_file_mbr:
        gconf.log_file = gconf.log_file_mbr

    if checkpoint_change:
        try:
            GLogger._gsyncd_loginit(log_file=gconf.log_file, label="conf")
            if confdata.op == "set":
                logging.info("checkpoint %s set" % confdata.val)
            elif confdata.op == "del":
                logging.info("checkpoint info was reset")
        except IOError:
            if sys.exc_info()[1].errno == ENOENT:
                # directory of log path is not present,
                # which happens if we get here from
                # a peer-multiplexed "config-set checkpoint"
                # (as that directory is created only on the
                # original node)
                pass
            else:
                raise
        return

    create = rconf.get("create")
    if create:
        if getattr(gconf, "state_file", None):
            set_monitor_status(gconf.state_file, create)
        return

    go_daemon = rconf["go_daemon"]
    be_monitor = rconf.get("monitor")
    be_agent = rconf.get("agent")

    rscs, local, remote = makersc(args)

    status_get = rconf.get("status_get")
    if status_get:
        for brick in gconf.path:
            brick_status = GeorepStatus(gconf.state_file, brick)
            checkpoint_time = int(getattr(gconf, "checkpoint", "0"))
            brick_status.print_status(checkpoint_time=checkpoint_time)
        return

    if not be_monitor and isinstance(remote, resource.SSH) and go_daemon == "should":
        go_daemon = "postconn"
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = "monitor"
    elif be_agent:
        label = "agent"
    elif remote:
        # master
        label = gconf.local_path
    else:
        label = "slave"
    startup(go_daemon=go_daemon, log_file=log_file, label=label)
    resource.Popen.init_errhandler()

    if be_agent:
        os.setsid()
        logging.debug("rpc_fd: %s" % repr(gconf.rpc_fd))
        return agent(Changelog(), gconf.rpc_fd)

    if be_monitor:
        return monitor(*rscs)

    logging.info("syncing: %s" % " -> ".join(r.url for r in rscs))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon="done")
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])
Example #17
0
                metavar="<tx_hash>",
                help="specify the hash of any out transaction sent by <address> "
                     "(use it when the the <address> is personal)")
    parser.add_option("-c", "--creation-transaction", 
                metavar="<tx_hash>",
                dest="creation_transaction",
                help="specify the hash of transaction that created contract "
                     "(use it when the the <address> is contract)")
    parser.add_option("-t", "--testnet", dest="testnet",
                action="store_true", default=False,
                help="use ropsten network")
    parser.add_option("-i", "--ipcpath", dest="ipcpath",
                default=None, metavar="<path>",
                help="specify the path to geth.ipc file")
    
    vars(parser.get_default_values())
    (options, args) = parser.parse_args()

    # Network
    print("Network: {}".format('testnet' if options.testnet else 'mainnet'))

    # Web3
    ipcpath = options.ipcpath if options.ipcpath is not None \
                else utils.get_default_ipc_path(testnet=options.testnet) 
    print('Using local geth IPC at {}.'.format(ipcpath))
    web3 = Web3(IPCProvider(ipc_path=ipcpath))

    # List accounts
    if options.list:
        print('Personal accounts list:')
        try:
Example #18
0
def main_i():
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        if val and val != '-':
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)
    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val
    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)
    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command',     metavar='CMD',   default='glusterfs')
    op.add_option('--gluster-log-file',    metavar='LOGF',  default=os.devnull, type=str, action='callback', callback=store_abs)
    op.add_option('--gluster-log-level',   metavar='LVL')
    op.add_option('-p', '--pid-file',      metavar='PIDF',  type=str, action='callback', callback=store_abs)
    op.add_option('-l', '--log-file',      metavar='LOGF',  type=str, action='callback', callback=store_abs)
    op.add_option('--state-file',          metavar='STATF', type=str, action='callback', callback=store_abs)
    op.add_option('-L', '--log-level',     metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',   default=os.path.abspath(sys.argv[0]))
    op.add_option('--volume-id',           metavar='UUID')
    op.add_option('--session-owner',       metavar='ID')
    op.add_option('-s', '--ssh-command',   metavar='CMD',   default='ssh')
    op.add_option('--rsync-command',       metavar='CMD',   default='rsync')
    op.add_option('--rsync-extra',         metavar='ARGS',  default='-sS', help=SUPPRESS_HELP)
    op.add_option('--timeout',             metavar='SEC',   type=int, default=120)
    op.add_option('--sync-jobs',           metavar='N',     type=int, default=3)
    op.add_option('--turns',               metavar='N',     type=int, default=0, help=SUPPRESS_HELP)

    op.add_option('-c', '--config-file',   metavar='CONF',  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--monitor', dest='monitor', action='callback', callback=store_local_curry(True))
    op.add_option('--feedback-fd', dest='feedback_fd', type=int, help=SUPPRESS_HELP, action='callback', callback=store_local)
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,      action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",    action='callback', callback=store_local_curry('dont'))
    op.add_option('--debug', dest="go_daemon",              action='callback', callback=lambda *a: (store_local_curry('dont')(*a),
                                                                                                    setattr(a[-1].values, 'log_file', '-'),
                                                                                                    setattr(a[-1].values, 'log_level', 'DEBUG'))),

    for a in ('check', 'get'):
        op.add_option('--config-' + a,      metavar='OPT',  type=str, dest='config', action='callback',
                      callback=store_local_obj(a, lambda vx: {'opt': vx}))
    op.add_option('--config-get-all', dest='config', action='callback', callback=store_local_obj('get', lambda vx: {'opt': None}))
    for m in ('', '-rx'):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option('--config-set' + m,   metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                          callback=store_local_obj('set', lambda vx: {'opt': vx[0], 'val': vx[1], 'rx': rx}))
            op.add_option('--config-del' + m,   metavar='OPT',  type=str, dest='config', action='callback',
                          callback=store_local_obj('del', lambda vx: {'opt': vx, 'rx': rx}))
        conf_mod_opt_regex_variant(not not m)

    op.add_option('--normalize-url',           dest='url_print', action='callback', callback=store_local_curry('normal'))
    op.add_option('--canonicalize-url',        dest='url_print', action='callback', callback=store_local_curry('canon'))
    op.add_option('--canonicalize-escape-url', dest='url_print', action='callback', callback=store_local_curry('canon_esc'))

    tunables = [ norm(o.get_opt_string()[2:]) for o in op.option_list if o.callback in (store_abs, None) and o.get_opt_string() not in ('--version', '--help') ]

    # precedence for sources of values: 1) commandline, 2) cfg file, 3) defaults
    # -- for this to work out we need to tell apart defaults from explicitly set
    # options... so churn out the defaults here and call the parser with virgin
    # values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    confdata = rconf.get('config')
    if not (len(args) == 2 or \
            (len(args) == 1 and rconf.get('listen')) or \
            (len(args) <= 2 and confdata) or \
            rconf.get('url_print')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    if getattr(confdata, 'rx', None):
        # peers are regexen, don't try to parse them
        canon_peers = args
        namedict = {}
    else:
        rscs = [resource.parse_url(u) for u in args]
        dc = rconf.get('url_print')
        if dc:
            for r in rscs:
                print(r.get_url(**{'normal': {},
                                   'canon': {'canonical': True},
                                   'canon_esc': {'canonical': True, 'escaped': True}}[dc]))
            return
        local = remote = None
        if rscs:
            local = rscs[0]
            if len(rscs) > 1:
                remote = rscs[1]
            if not local.can_connect_to(remote):
                raise GsyncdError("%s cannot work with %s" % (local.path, remote and remote.path))
        pa = ([], [], [])
        urlprms = ({}, {'canonical': True}, {'canonical': True, 'escaped': True})
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        peers, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring to / repreenting
        # peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[0].upper() + x[1:], lambda x: 'e' + x[0].upper() + x[1:])
        if remote:
            rmap = { local: ('local', 'master'), remote: ('remote', 'slave') }
        else:
            rmap = { local: ('local', 'slave') }
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                if x.scheme == 'gluster':
                    namedict[name + 'vol'] = x.volume
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(os.path.dirname(sys.argv[0]), "conf/gsyncd.conf")
    gcnf = GConffile(rconf['config_file'], canon_peers, defaults.__dict__, opts.__dict__, namedict)

    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == 'check':
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise GsyncdError("not a valid option: " + confdata.opt)
        if confdata.op == 'get':
            gcnf.get(confdata.opt)
        elif confdata.op == 'set':
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == 'del':
            gcnf.delete(confdata.opt, confdata.rx)
        return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    ffd = rconf.get('feedback_fd')
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    #normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise GsyncdError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    go_daemon = rconf['go_daemon']
    be_monitor = rconf.get('monitor')

    if not be_monitor and isinstance(remote, resource.SSH) and \
       go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = 'monitor'
    elif remote:
        #master
        label = ''
    else:
        label = 'slave'
    startup(go_daemon=go_daemon, log_file=log_file, label=label)

    if be_monitor:
        return monitor()

    logging.info("syncing: %s" % " -> ".join(peers))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])
Example #19
0
def optionparser(mode):

    global flags

    parser = OptionParser()
    parser.add_option("-z",
                      "--noprogress",
                      dest="noprogress",
                      action="store_false")
    parser.add_option("-o", "--output", dest='output', metavar='')
    parser.add_option("-q", "--quiet", dest='quiet', action='store_false')
    parser.add_option("-t", "--threads", dest='threads')
    parser.add_option("-v", "--verbose", dest='verbose', action='store_false')
    parser.add_option("-w", "--wordlist", dest='wordlist', metavar='FILE')
    parser.add_option("-R",
                      "--recursive",
                      dest='recursive',
                      action='store_false')

    if mode == 'dir':
        parser.add_option("-u", "--url", dest="url")
        parser.add_option("-c", "--cookies", dest="cookies")
        parser.add_option("-e",
                          "--expanded",
                          dest="expanded",
                          action="store_false")
        parser.add_option("-r",
                          "--followredirect",
                          dest="followredirect",
                          action="store_false")
        parser.add_option("-x", "--extensions", dest="extensions")
        parser.add_option("-H", "--headers", dest="headers")
        parser.add_option("-l",
                          "--includelength",
                          dest="includelength",
                          action="store_false")
        parser.add_option("-k",
                          "--insecuressl",
                          dest="insecuressl",
                          action="store_false")
        parser.add_option("-n",
                          "--nostatus",
                          dest="nostatus",
                          action="store_false")
        parser.add_option("-P", "--password", dest="password")
        parser.add_option("-p", "--proxy", dest="proxy")
        parser.add_option("-s", "--statuscodes", dest="statuscodes")
        parser.add_option("-b",
                          "--statuscodesblacklist",
                          dest="statuscodesblacklist")
        parser.add_option("-a", "--useragent", dest="useragent")
        parser.add_option("-U", "--username", dest="username")
        parser.add_option("--timeout", dest="timeout")
        parser.add_option("--wildcard", dest="wildcard", action="store_false")

    if mode == 'dns':
        parser.add_option("-d", "--domain", dest="domain")
        parser.add_option("-r", "--resolver", dest="resolver")
        parser.add_option("-c",
                          "--showcname",
                          dest="showcname",
                          action="store_false")
        parser.add_option("-i",
                          "--showips",
                          dest="showips",
                          action="store_false")
        parser.add_option("--timeout", dest="timeout")
        parser.add_option("--wildcard", dest="wildcard", action="store_false")

    if mode == 'vhost':
        parser.add_option("-u", "--url", dest="url")
        parser.add_option("-c", "--cookies", dest="cookies")
        parser.add_option("-r",
                          "--followredirect",
                          dest="followredirect",
                          action="store_false")
        parser.add_option("-H", "--headers", dest="headers")
        parser.add_option("-k", "--insecuressl", dest="insecuressl")
        parser.add_option("-P", "--password", dest="password")
        parser.add_option("-p", "--proxy", dest="proxy")
        parser.add_option("-a", "--useragent", dest="useragent")
        parser.add_option("-U", "--username", dest="username")
        parser.add_option("--timeout", dest="timeout")

    set_opts = Values()
    (options, args) = parser.parse_args(values=set_opts)
    options = Values(parser.get_default_values().__dict__)
    options._update_careful(set_opts.__dict__)
    flags = set_opts.__dict__
Example #20
0
def main_i():
    """internal main routine

    parse command line, decide what action will be taken;
    we can either:
    - query/manipulate configuration
    - format gsyncd urls using gsyncd's url parsing engine
    - start service in following modes, in given stages:
      - monitor: startup(), monitor()
      - master: startup(), connect_remote(), connect(), service_loop()
      - slave: startup(), connect(), service_loop()
    """
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        if val and val != '-':
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)
    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val
    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)
    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command-dir', metavar='DIR',   default='')
    op.add_option('--gluster-log-file',    metavar='LOGF',  default=os.devnull, type=str, action='callback', callback=store_abs)
    op.add_option('--gluster-log-level',   metavar='LVL')
    op.add_option('--gluster-params',      metavar='PRMS',  default='')
    op.add_option('--gluster-cli-options', metavar='OPTS',  default='--log-file=-')
    op.add_option('--mountbroker',         metavar='LABEL')
    op.add_option('-p', '--pid-file',      metavar='PIDF',  type=str, action='callback', callback=store_abs)
    op.add_option('-l', '--log-file',      metavar='LOGF',  type=str, action='callback', callback=store_abs)
    op.add_option('--state-file',          metavar='STATF', type=str, action='callback', callback=store_abs)
    op.add_option('--ignore-deletes',      default=False, action='store_true')
    op.add_option('-L', '--log-level',     metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',   default=os.path.abspath(sys.argv[0]))
    op.add_option('--volume-id',           metavar='UUID')
    op.add_option('--session-owner',       metavar='ID')
    op.add_option('-s', '--ssh-command',   metavar='CMD',   default='ssh')
    op.add_option('--rsync-command',       metavar='CMD',   default='rsync')
    op.add_option('--timeout',             metavar='SEC',   type=int, default=120)
    op.add_option('--connection-timeout',  metavar='SEC',   type=int, default=60, help=SUPPRESS_HELP)
    op.add_option('--sync-jobs',           metavar='N',     type=int, default=3)
    op.add_option('--turns',               metavar='N',     type=int, default=0, help=SUPPRESS_HELP)
    op.add_option('--allow-network',       metavar='IPS',   default='')

    op.add_option('-c', '--config-file',   metavar='CONF',  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--monitor', dest='monitor', action='callback', callback=store_local_curry(True))
    op.add_option('--feedback-fd', dest='feedback_fd', type=int, help=SUPPRESS_HELP, action='callback', callback=store_local)
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,      action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",    action='callback', callback=store_local_curry('dont'))
    op.add_option('--debug', dest="go_daemon",              action='callback', callback=lambda *a: (store_local_curry('dont')(*a),
                                                                                                    setattr(a[-1].values, 'log_file', '-'),
                                                                                                    setattr(a[-1].values, 'log_level', 'DEBUG'))),

    for a in ('check', 'get'):
        op.add_option('--config-' + a,      metavar='OPT',  type=str, dest='config', action='callback',
                      callback=store_local_obj(a, lambda vx: {'opt': vx}))
    op.add_option('--config-get-all', dest='config', action='callback', callback=store_local_obj('get', lambda vx: {'opt': None}))
    for m in ('', '-rx', '-glob'):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option('--config-set' + m,   metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                          callback=store_local_obj('set', lambda vx: {'opt': vx[0], 'val': vx[1], 'rx': rx}))
            op.add_option('--config-del' + m,   metavar='OPT',  type=str, dest='config', action='callback',
                          callback=store_local_obj('del', lambda vx: {'opt': vx, 'rx': rx}))
        conf_mod_opt_regex_variant(m and m[1:] or False)

    op.add_option('--normalize-url',           dest='url_print', action='callback', callback=store_local_curry('normal'))
    op.add_option('--canonicalize-url',        dest='url_print', action='callback', callback=store_local_curry('canon'))
    op.add_option('--canonicalize-escape-url', dest='url_print', action='callback', callback=store_local_curry('canon_esc'))

    tunables = [ norm(o.get_opt_string()[2:]) for o in op.option_list if o.callback in (store_abs, 'store_true', None) and o.get_opt_string() not in ('--version', '--help') ]
    remote_tunables = [ 'listen', 'go_daemon', 'timeout', 'session_owner', 'config_file' ]
    rq_remote_tunables = { 'listen': True }

    # precedence for sources of values: 1) commandline, 2) cfg file, 3) defaults
    # -- for this to work out we need to tell apart defaults from explicitly set
    # options... so churn out the defaults here and call the parser with virgin
    # values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    confdata = rconf.get('config')
    if not (len(args) == 2 or \
            (len(args) == 1 and rconf.get('listen')) or \
            (len(args) <= 2 and confdata) or \
            rconf.get('url_print')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    restricted = os.getenv('_GSYNCD_RESTRICTED_')

    if restricted:
        allopts = {}
        allopts.update(opts.__dict__)
        allopts.update(rconf)
        bannedtuns = set(allopts.keys()) - set(remote_tunables)
        if bannedtuns:
            raise GsyncdError('following tunables cannot be set with restricted SSH invocaton: ' + \
                              ', '.join(bannedtuns))
        for k, v in rq_remote_tunables.items():
            if not k in allopts or allopts[k] != v:
                raise GsyncdError('tunable %s is not set to value %s required for restricted SSH invocaton' % \
                                  (k, v))

    confrx = getattr(confdata, 'rx', None)
    if confrx:
        # peers are regexen, don't try to parse them
        if confrx == 'glob':
            args = [ '\A' + fnmatch.translate(a) for a in args ]
        canon_peers = args
        namedict = {}
    else:
        rscs = [resource.parse_url(u) for u in args]
        dc = rconf.get('url_print')
        if dc:
            for r in rscs:
                print(r.get_url(**{'normal': {},
                                   'canon': {'canonical': True},
                                   'canon_esc': {'canonical': True, 'escaped': True}}[dc]))
            return
        local = remote = None
        if rscs:
            local = rscs[0]
            if len(rscs) > 1:
                remote = rscs[1]
            if not local.can_connect_to(remote):
                raise GsyncdError("%s cannot work with %s" % (local.path, remote and remote.path))
        pa = ([], [], [])
        urlprms = ({}, {'canonical': True}, {'canonical': True, 'escaped': True})
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        peers, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring to / repreenting
        # peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[0].upper() + x[1:], lambda x: 'e' + x[0].upper() + x[1:])
        if remote:
            rmap = { local: ('local', 'master'), remote: ('remote', 'slave') }
        else:
            rmap = { local: ('local', 'slave') }
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                if x.scheme == 'gluster':
                    namedict[name + 'vol'] = x.volume
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(os.path.dirname(sys.argv[0]), "conf/gsyncd.conf")
    gcnf = GConffile(rconf['config_file'], canon_peers, defaults.__dict__, opts.__dict__, namedict)

    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == 'check':
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise GsyncdError("not a valid option: " + confdata.opt)
        if confdata.op == 'get':
            gcnf.get(confdata.opt)
        elif confdata.op == 'set':
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == 'del':
            gcnf.delete(confdata.opt, confdata.rx)
        return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    if restricted and gconf.allow_network:
        ssh_conn = os.getenv('SSH_CONNECTION')
        if not ssh_conn:
            #legacy env var
            ssh_conn = os.getenv('SSH_CLIENT')
        if ssh_conn:
            allowed_networks = [ IPNetwork(a) for a in gconf.allow_network.split(',') ]
            client_ip = IPAddress(ssh_conn.split()[0])
            allowed = False
            for nw in allowed_networks:
                if client_ip in nw:
                    allowed = True
                    break
            if not allowed:
                raise GsyncdError("client IP address is not allowed")

    ffd = rconf.get('feedback_fd')
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    #normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise GsyncdError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    go_daemon = rconf['go_daemon']
    be_monitor = rconf.get('monitor')

    if not be_monitor and isinstance(remote, resource.SSH) and \
       go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = 'monitor'
    elif remote:
        #master
        label = ''
    else:
        label = 'slave'
    startup(go_daemon=go_daemon, log_file=log_file, label=label)

    if be_monitor:
        return monitor()

    logging.info("syncing: %s" % " -> ".join(peers))
    resource.Popen.init_errhandler()
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])
Example #21
0
def main_i():
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        setattr(parser.values, opt.dest, os.path.abspath(val))
    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val
    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    op = OptionParser(usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command',     metavar='CMD',   default='glusterfs')
    op.add_option('--gluster-log-file',    metavar='LOGF',  default=os.devnull, type=str, action='callback', callback=store_abs)
    op.add_option('--gluster-log-level',   metavar='LVL')
    op.add_option('-p', '--pid-file',      metavar='PIDF',  type=str, action='callback', callback=store_abs)
    op.add_option('-l', '--log-file',      metavar='LOGF',  type=str, action='callback', callback=store_abs)
    op.add_option('-L', '--log-level',     metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',   default=os.path.abspath(sys.argv[0]))
    op.add_option('-s', '--ssh-command',   metavar='CMD',   default='ssh')
    op.add_option('--rsync-command',       metavar='CMD',   default='rsync')
    op.add_option('--rsync-extra',         metavar='ARGS',  default='-sS', help=SUPPRESS_HELP)
    op.add_option('--timeout',             metavar='SEC',   type=int, default=120)
    op.add_option('--sync-jobs',           metavar='N',     type=int, default=3)
    op.add_option('--turns',               metavar='N',     type=int, default=0, help=SUPPRESS_HELP)

    op.add_option('-c', '--config-file',   metavar='CONF',  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,      action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",    action='callback', callback=store_local_curry('dont'))
    op.add_option('--debug', dest="go_daemon",              action='callback', callback=lambda *a: (store_local_curry('dont')(*a),
                                                                                                    a[-1].values.__dict__.get('log_level') or \
                                                                                                     a[-1].values.__dict__.update(log_level='DEBUG')))
    op.add_option('--config-get',           metavar='OPT',  type=str, dest='config', action='callback', callback=store_local)
    op.add_option('--config-get-all', dest='config', action='callback', callback=store_local_curry(True))
    op.add_option('--config-set',           metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                   callback=lambda o, oo, vx, p: store_local(o, oo, (vx[0], vx[1], False), p))
    op.add_option('--config-set-rx',        metavar='OPT VAL', type=str, nargs=2, dest='config', action='callback',
                   callback=lambda o, oo, vx, p: store_local(o, oo, (vx[0], vx[1], True), p))
    op.add_option('--config-del',           metavar='OPT',  type=str, dest='config', action='callback', callback=lambda o, oo, vx, p:
                                                                                                                    store_local(o, oo, (vx, False, False), p))
    op.add_option('--config-del-rx',        metavar='OPT',  type=str, dest='config', action='callback', callback=lambda o, oo, vx, p:
                                                                                                                    store_local(o, oo, (vx, False, True), p))
    op.add_option('--canonicalize-url',        dest='do_canon', action='callback', callback=store_local_curry('raw'))
    op.add_option('--canonicalize-escape-url', dest='do_canon', action='callback', callback=store_local_curry('escaped'))

    # precedence for sources of values: 1) commandline, 2) cfg file, 3) defaults
    # -- for this to work out we need to tell apart defaults from explicitly set
    # options... so churn out the defaults here and call the parser with virgin
    # values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    confdata = rconf.get('config')
    if not (len(args) == 2 or \
            (len(args) == 1 and rconf.get('listen')) or \
            (len(args) <= 2 and confdata) or \
            rconf.get('do_canon')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    if confdata and isinstance(confdata, tuple) and confdata[2]:
        # peers are regexen, don't try to parse them
        canon_peers = args
    else:
        rscs = [resource.parse_url(u) for u in args]
        dc = rconf.get('do_canon')
        if dc:
            for r in rscs:
                print(r.get_url(canonical=True, escaped=(dc=='escaped')))
            return
        local = remote = None
        if rscs:
          local = rscs[0]
          if len(rscs) > 1:
              remote = rscs[1]
          if not local.can_connect_to(remote):
              raise RuntimeError("%s cannot work with %s" % (local.path, remote and remote.path))
        pa = ([], [])
        canon = [False, True]
        for x in (local, remote):
            if x:
                for i in range(2):
                    pa[i].append(x.get_url(canonical=canon[i]))
        peers, canon_peers = pa
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(os.path.dirname(sys.argv[0]), "conf/gsyncd.conf")
    gcnf = GConffile(rconf['config_file'], canon_peers)

    if confdata:
        if isinstance(confdata, tuple):
            if confdata[1]:
                gcnf.set(*confdata)
            else:
                gcnf.delete(confdata[0], confdata[1])
        else:
            if confdata == True:
                confdata = None
            gcnf.get(confdata)
        return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)

    #normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise RuntimeError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    go_daemon = rconf['go_daemon']

    if isinstance(remote, resource.SSH) and go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    startup(go_daemon=go_daemon, log_file=log_file, slave=(not remote))

    logging.info("syncing: %s" % " -> ".join(peers))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    local.service_loop(*[r for r in [remote] if r])

    logging.info("exiting.")
Example #22
0
class PluginHelper:

    """ PluginHelper takes away some of the tedious work of writing Nagios plugins. Primary features include:

    * Keep a collection of your plugin messages (queue for both summary and longoutput)
    * Keep record of exit status
    * Keep a collection of your metrics (for both perfdata and thresholds)
    * Automatic Command-line arguments
    * Make sure output of your plugin is within Plugin Developer Guidelines

    Usage:
    p = PluginHelper()
    p.status(warning)
    p.add_summary('Example Plugin with warning status')
    p.add_metric('cpu load', '90')
    p.exit()
    """
    _nagios_status = -1     # exit status of the plugin
    _long_output = None     # Long output of the plugin
    _summary = None         # Summary of the plugin
    _perfdata = None        # Performance and Threshold Metrics are stored here
    show_longoutput = True  # If True, print longoutput
    show_perfdata = True    # If True, print perfdata
    show_summary = True     # If True, print Summary
    show_status_in_summary = True
    show_legacy = False     # Deprecated, doesnt do anything
    verbose = False         # Extra verbosity
    show_debug = False      # Extra debugging
    # By default, plugins timeout right before nagios kills the plugin
    timeout = 58

    thresholds = None       # List of strings in the nagios threshold format
    options = None          # OptionParser() options
    arguments = None        # OptionParser() arguments

    def __init__(self):
        self._long_output = []
        self._summary = []
        self.thresholds = []
        # Performance and Threshold Metrics are stored here
        self._perfdata = PerfData()

        self.parser = OptionParser()
        generic_group = OptionGroup(self.parser, "Generic Options")
        generic_group.add_option(
            '--timeout',
            help="Exit plugin with unknown status after x seconds",
            type='int',
            metavar='50',
            dest="timeout",
            default=self.timeout
        )
        generic_group.add_option(
            '--threshold',
            default=[],
            help="Thresholds in standard nagios threshold format",
            metavar='range',
            dest="thresholds",
            action="append"
        )
        generic_group.add_option(
            '--th',
            default=[],
            help="Same as --threshold",
            metavar='range',
            dest="thresholds",
            action="append"
        )

        generic_group.add_option(
            '--extra-opts',
            help="Read options from an ini file. See http://nagiosplugins.org/extra-opts",
            metavar='@file',
            dest="extra_opts"
        )
        generic_group.add_option(
            "-d", "--debug",
            dest="show_debug",
            help="Print debug info",
            metavar="d",
            action="store_true",
            default=self.show_debug
        )

        # Display options are options that affect the output of the plugin
        # But usually not its function
        display_group = OptionGroup(self.parser, "Display Options")
        display_group.add_option(
            "-v", "--verbose",
            dest="verbose",
            help="Print more verbose info",
            metavar="v",
            action="store_true",
            default=self.verbose
        )
        display_group.add_option(
            "--no-perfdata",
            dest="show_perfdata",
            help="Dont show any performance data",
            action="store_false",
            default=self.show_perfdata
        )
        display_group.add_option(
            "--no-longoutput",
            dest="show_longoutput",
            help="Hide longoutput from the plugin output (i.e. only display first line of the output)",
            action="store_false",
            default=self.show_longoutput
        )
        display_group.add_option(
            "--no-summary",
            dest="show_summary",
            help="Hide summary from plugin output",
            action="store_false",
            default=self.show_summary
        )

        display_group.add_option(
            "--get-metrics",
            dest="get_metrics",
            help="Print all available metrics and exit (can be combined with --verbose)",
            action="store_true",
            default=False
        )
        display_group.add_option(
            "--legacy",
            dest="show_legacy",
            help="Deprecated, do not use",
            action="store_true",
            default=self.show_legacy
        )

        self.parser.add_option_group(generic_group)
        self.parser.add_option_group(display_group)

    def parse_arguments(self, argument_list=None):
        """ Parsers commandline arguments, prints error if there is a syntax error.

        Creates:
            self.options   -- As created by OptionParser.parse()
            self.arguments -- As created by OptionParser.parse()
        Arguments:
            argument_list -- By default use sys.argv[1:], override only if you know what you are doing.
        Returns:
            None
        """
        self.options, self.arguments = self.parser.parse_args(
            args=argument_list)

        extra_opts = self.options.extra_opts
        if extra_opts is not None:  # --extra-opts was specified
            if extra_opts == '':  # --extra-opts= with no value.
                section_name = None
                config_file = None
            elif '@' in extra_opts:  # filename was specified
                section_name, config_file = extra_opts.split('@', 1)
            else:  # Only section was specified
                section_name = extra_opts
                config_file = None
            values = self.get_default_values(section_name, config_file)
            self.options, self.arguments = self.parser.parse_args(
                args=argument_list, values=values)

        # TODO: Handle it if developer decides to remove some options before
        # calling parse_arguments()
        self.thresholds = self.options.thresholds
        self.show_longoutput = self.options.show_longoutput
        self.show_perfdata = self.options.show_perfdata
        self.show_legacy = self.options.show_legacy
        self.show_debug = self.options.show_debug
        self.verbose = self.options.verbose
        #self.show_status_in_summary = self.options.show_status_in_summary

        self.set_timeout(self.options.timeout)

    def add_long_output(self, message):
        """ Appends message to the end of Plugin long_output. Message does not need a \n suffix

        Examples:
          >>> p = PluginHelper()
          >>> p.add_long_output('Status of sensor 1')
          >>> p.add_long_output('* Temperature: OK')
          >>> p.add_long_output('* Humidity: OK')
          >>> p.get_long_output()
          'Status of sensor 1\\n* Temperature: OK\\n* Humidity: OK'
        """
        self._long_output.append(message)

    def add_option(self, *args, **kwargs):
        """ Same as self.parser.add_option() """
        return self.parser.add_option(*args, **kwargs)

    def get_long_output(self):
        """ Returns all long_output that has been added via add_long_output """
        return '\n'.join(self._long_output)

    def set_long_output(self, message):
        """ Overwrite current long_output with message

        Example:
        >>> s = PluginHelper()
        >>> s.add_long_output('first long output')
        >>> s.set_long_output('Fatal error')
        >>> s.get_long_output()
        'Fatal error'
        """
        self._long_output = [message]

    def add_summary(self, message):
        """ Adds message to Plugin Summary """
        self._summary.append(message.strip())

    def set_summary(self, message):
        """ Overwrite current summary with message

        Example:
        >>> s = PluginHelper()
        >>> s.add_summary('first summary')
        >>> s.set_summary('Fatal error')
        >>> s.get_summary()
        'Fatal error'
        """
        self._summary = [message]

    def get_summary(self):
        return '. '.join(self._summary)

    def get_status(self):
        """ Returns the worst nagios status (integer 0,1,2,3) that has been put with add_status()

        If status has never been added, returns 3 for UNKNOWN
        """

        # If no status has been set, return unknown
        if self._nagios_status == -1:
            return UNKNOWN
        else:
            return self._nagios_status

    def status(self, new_status=None):
        """ Same as get_status() if new_status=None, otherwise call add_status(new_status) """
        if new_status is None:
            return self.get_status()
        if new_status not in state_text:
            new_status = unknown
        return self.add_status(new_status)

    def add_status(self, new_status=None):
        """ Update exit status of the nagios plugin. This function will keep history of the worst status added

        Examples:
        >>> p = PluginHelper()
        >>> p.add_status(0) # ok
        >>> p.add_status(2) # critical
        >>> p.add_status(1) # warning
        >>> p.get_status()  #
        2

        >>> p = PluginHelper()
        >>> p.add_status('warning')
        >>> p.add_status('ok')
        >>> p.get_status()
        1
        >>> p.add_status('okay')
        Traceback (most recent call last):
        ...
        Exception: Invalid status supplied "okay"
        """

        # If new status was entered as a human readable string (ok,warn,etc)
        # lets convert it to int:
        if isinstance(new_status, basestring):
            if new_status.lower() in state:
                new_status = state[new_status]
            else:
                raise Exception(
                    "Invalid status supplied \"%s\"" % (new_status))

        self._nagios_status = max(self._nagios_status, new_status)

    def add_metric(self, label="", value="", warn="", crit="", min="", max="", uom="", perfdatastring=None):
        """ Add numerical metric (will be outputted as nagios performanca data)

        Examples:
          >>> p = PluginHelper()
          >>> p.add_metric(label="load1", value="7")
          >>> p.add_metric(label="load5", value="5")
          >>> p.add_metric(label="load15",value="2")
          >>> p.get_perfdata()
          "'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;"

          >>> p = PluginHelper()
          >>> p.add_metric(perfdatastring="load1=6;;;;")
          >>> p.add_metric(perfdatastring="load5=4;;;;")
          >>> p.add_metric(perfdatastring="load15=1;;;;")
          >>> p.get_perfdata()
          "'load1'=6;;;; 'load5'=4;;;; 'load15'=1;;;;"
        """
        if not perfdatastring is None:
            self._perfdata.add_perfdatametric(perfdatastring=perfdatastring)
        else:
            self._perfdata.add_perfdatametric(
                label=label, value=value, warn=warn, crit=crit, min=min, max=max, uom=uom)

    def get_default_values(self, section_name=None, config_file=None):
        """ Returns an optionParser.Values instance of all defaults after parsing extra opts config file

        The Nagios extra-opts spec we use is the same as described here: http://nagiosplugins.org/extra-opts

        Arguments

        """
        # Get the program defaults
        values = self.parser.get_default_values()

        # Create an ExtraOptsParser instance and get all the values from that
        # config file
        extra_opts = ExtraOptsParser(
            section_name=section_name, config_file=config_file).get_values()

        for option in self.parser.option_list:
            name = option.dest
            if name in extra_opts:
                if option.action == 'append':
                    setattr(values, name, extra_opts[option.dest])
                else:
                    setattr(values, name, extra_opts[option.dest][0])
        return values

    def get_metric(self, label):
        """ Return one specific metric (PerfdataMetric object) with the specified label. Returns None if not found.

        Example:
        >>> p = PluginHelper()
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_metric("load1")
        'load1'=7;;;;
        >>> p.get_metric("unknown") # Returns None

        """
        for i in self._perfdata.metrics:
            if i.label == label:
                return i
        return None

    def convert_perfdata(self, perfdata):
        """ Converts new threshold range format to old one. Returns None.

        Examples:
            x..y -> x:y
            inf..y -> :y
            -inf..y -> :y
            x..inf -> x:
            -inf..inf -> :
        """
        for metric in perfdata:
            metric.warn = reconsile_threshold(metric.warn)
            metric.crit = reconsile_threshold(metric.crit)
        return None

    def get_perfdata(self):
        """ Get perfdatastring for all valid perfdatametrics collected via add_perfdata

        Examples:
        >>> p = PluginHelper()
        >>> p.add_metric(label="load1", value="7", warn="-inf..10", crit="10..inf")
        >>> p.add_metric(label="load5", value="5", warn="-inf..7", crit="7..inf")
        >>> p.add_metric(label="load15",value="2", warn="-inf..5", crit="5..inf")
        >>> p.get_perfdata()
        "'load1'=7;10:;~:10;; 'load5'=5;7:;~:7;; 'load15'=2;5:;~:5;;"

        Example with legacy output (show_legacy should be set with a cmdline option):
        >>> p.show_legacy = True
        >>> p.get_perfdata()
        "'load1'=7;10:;~:10;; 'load5'=5;7:;~:7;; 'load15'=2;5:;~:5;;"

        """
        # Normalize the perfdata to so the thresholds match the current nagios plugin guidelines
        self.convert_perfdata(self._perfdata.metrics)
        return str(self._perfdata)

    def get_plugin_output(self, exit_code=None, summary=None, long_output=None, perfdata=None):
        """ Get all plugin output as it would be printed to screen with self.exit()

        Examples of functionality:
        >>> p = PluginHelper()
        >>> p.get_plugin_output()
        'Unknown -'

        >>> p = PluginHelper()
        >>> p.add_summary('Testing')
        >>> p.add_long_output('Long testing output')
        >>> p.add_long_output('More output')
        >>> p.get_plugin_output(exit_code=0)
        'OK - Testing\\nLong testing output\\nMore output'

        >>> p = PluginHelper()
        >>> p.add_summary('Testing')
        >>> p.add_status(0)
        >>> p.get_plugin_output()
        'OK - Testing'

        >>> p = PluginHelper()
        >>> p.show_status_in_summary = False
        >>> p.add_summary('Testing')
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load5", value="5")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_plugin_output(exit_code=0)
        "Testing | 'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;"

        >>> p = PluginHelper()
        >>> p.show_status_in_summary = False
        >>> p.add_summary('Testing')
        >>> p.add_long_output('Long testing output')
        >>> p.add_long_output('More output')
        >>> p.add_metric(label="load1", value="7")
        >>> p.add_metric(label="load5", value="5")
        >>> p.add_metric(label="load15",value="2")
        >>> p.get_plugin_output(exit_code=0)
        "Testing | 'load1'=7;;;; 'load5'=5;;;; 'load15'=2;;;;\\nLong testing output\\nMore output"

        """
        if summary is None:
            summary = self.get_summary()
        if long_output is None:
            long_output = self.get_long_output()
        if perfdata is None:
            perfdata = self.get_perfdata()
        if exit_code is None:
            exit_code = self.get_status()

        return_buffer = ""
        if self.show_status_in_summary is True:
            return_buffer += "%s - " % state_text[exit_code]
        if self.show_summary is True:
            return_buffer += summary
        if self.show_perfdata is True and len(perfdata) > 0:
            return_buffer += " | %s\n" % perfdata

        if not return_buffer.endswith('\n'):
            return_buffer += '\n'
        if self.show_longoutput is True and len(long_output) > 0:
            return_buffer += long_output

        return_buffer = return_buffer.strip()
        return return_buffer

    def set_timeout(self, seconds=50):
        """ Configures plugin to timeout after seconds number of seconds """
        timeout = lambda x, y: self.exit(
            unknown, summary="Plugin timeout exceeded after %s seconds." % seconds)
        signal.signal(signal.SIGALRM, timeout)
        signal.alarm(seconds)

    def exit(self, exit_code=None, summary=None, long_output=None, perfdata=None):
        """ Print all collected output to screen and exit nagios style, no arguments are needed
            except if you want to override default behavior.

        Arguments:
            summary     -- Is this text as the plugin summary instead of self.get_summary()
            long_output -- Use this text as long_output instead of self.get_long_output()
            perfdata    -- Use this text instead of self.get_perfdata()
            exit_code   -- Use this exit code instead of self.status()
        """
        if exit_code is None:
            exit_code = self.get_status()
        if self.options and self.options.get_metrics is True:
            summary = "Available metrics for this plugin:"
            metrics = []

            for i in self._perfdata.metrics:
                if self.options.verbose is True:
                    metrics.append(str(i))
                else:
                    metrics.append(i.label)
            long_output = '\n'.join(metrics)

        plugin_output = self.get_plugin_output(
            exit_code=exit_code, summary=summary, long_output=long_output, perfdata=perfdata)

        print plugin_output
        sys.exit(exit_code)

    def check_metric(self, metric_name, thresholds):
        """ Check one specific metric against a list of thresholds. Updates self.status() and writes to summary or longout as appropriate.

        Arguments:
          metric_name -- A string representing the name of the metric (the label part of the performance data)
          thresholds  -- a list in the form of [ (level,range) ] where range is a string in the format of "start..end"

        Examples:
        >>> p = PluginHelper()
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.get_plugin_output()
        'Unknown -'
        >>> p.add_metric('load15', '3')
        >>> p.check_metric('load15',thresholds)
        >>> p.get_plugin_output()
        "Warning - Warning on load15 | 'load15'=3;@2:5;~:5;;"

        >>> p = PluginHelper()
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.add_metric('load15', '3')
        >>> p.verbose = True
        >>> p.check_metric('load15',thresholds)
        >>> p.get_plugin_output()
        "Warning - Warning on load15 | 'load15'=3;@2:5;~:5;;\\nWarning on load15"

        Invalid metric:
        >>> p = PluginHelper()
        >>> p.add_status(ok)
        >>> p.add_summary('Everythings fine!')
        >>> p.get_plugin_output()
        'OK - Everythings fine!'
        >>> thresholds = [(warning,'2..5'), (critical,'5..inf')]
        >>> p.check_metric('never_added_metric', thresholds)
        >>> p.get_plugin_output()
        'Unknown - Everythings fine!. Metric never_added_metric not found'

        Invalid threshold:
        >>> p = PluginHelper()
        >>> thresholds = [(warning, 'invalid'), (critical,'5..inf')]
        >>> p.add_metric('load1', '10')
        >>> p.check_metric('load1', thresholds)
        Traceback (most recent call last):
        ...
        SystemExit: 3

        Returns:
          None
        """
        metric = self.get_metric(label=metric_name)

        # If threshold was specified but metric not found in our data, set
        # status unknown
        if metric is None:
            self.status(unknown)
            self.add_summary("Metric %s not found" % (metric_name))
            return

        metric_status = -1  # by default assume nothing
        default_state = 0  # By default if no treshold matches, we assume OK
        highest_level = ok  # highest threshold range seen
        # Iterate through all thresholds, and log down warn and crit for
        # perfdata purposes
        for level, threshold_range in thresholds:
            if metric.warn == '' and level == warning:
                metric.warn = threshold_range
            elif metric.crit == '' and level == critical:
                metric.crit = threshold_range
            if level == ok:
                default_state = 2

        # Iterate all threshold and determine states
        for level, threshold_range in thresholds:
            highest_level = max(highest_level, level)
            # If ok threshold was specified, default state is critical according to spec
            # If value matches our threshold, we increment the status
            try:
                in_range = new_threshold_syntax.check_range(
                    metric.value, threshold_range)
            except PynagError:
                self.set_summary(
                    "Could not parse threshold %s=%s for metric %s" %
                    (state_text[
                     level], threshold_range, metric_name)
                )
                self.set_long_output(
                    "Thresholds should be in the format metric=<metric_name>,ok=0..90,warning=90..95")
                self.add_long_output("Example: ")
                self.add_long_output(
                    "--th metric=load,ok=0..1,warning=1..5,critical=5..inf")
                self.status(unknown)
                self.exit()
            if in_range:
                metric_status = max(metric_status, level)
                self.debug('%s is within %s range "%s"' %
                           (metric_name, state_text[level], threshold_range))
                if level == ok:
                    self.debug(
                        "OK threshold matches, not checking any more thresholds")
                    metric_status = ok
                    break
            else:
                self.debug('%s is outside %s range "%s"' %
                           (metric_name, state_text[level], threshold_range))

        # If no thresholds matched, set a default return code
        if metric_status < 0:
            metric_status = default_state

        # OK's go to long output, errors go directly to summary
        self.add_status(metric_status)
        message = '%s on %s' % (state_text[metric_status], metric_name)

        # Errors are added to the summary:
        if metric_status > 0:
            self.add_summary(message)

        if self.verbose is True:
            self.add_long_output(message)

    def check_all_metrics(self):
        """ Checks all metrics (add_metric() against any thresholds set in self.options.thresholds or with --threshold from commandline)"""
        checked_metrics = []
        for threshold in self.thresholds:
            parsed_threshold = new_threshold_syntax.parse_threshold(threshold)
            metric_name = parsed_threshold['metric']
            thresholds = parsed_threshold['thresholds']
            self.check_metric(metric_name, thresholds)
            checked_metrics.append(metric_name)

        # Lets look at metrics that were not specified on the command-line but might have a default
        # threshold specified with their metric data
        for i in self._perfdata.metrics:
            if i.label in checked_metrics:
                continue
            thresholds = []

            if i.warn != '':
                thresholds.append((warning, i.warn))
            if i.crit != '':
                thresholds.append((critical, i.crit))
            self.check_metric(i.label, thresholds)

    def run_function(self, function, *args, **kwargs):
        """ Executes "function" and exits Nagios style with status "unkown"
        if there are any exceptions. The stacktrace will be in long_output.

        Example:
        >>> p = PluginHelper()
        >>> p.add_status('ok')
        >>> p.get_status()
        0
        >>> p.add_status('okay')
        Traceback (most recent call last):
        ...
        Exception: Invalid status supplied "okay"
        >>> p.run_function( p.add_status, 'warning' )
        >>> p.get_status()
        1
        >>> p.run_function( p.add_status, 'okay' )
        Traceback (most recent call last):
        ...
        SystemExit: 3
        """
        try:
            function(*args, **kwargs)
        except Exception:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            exit_code = unknown
            # traceback.print_exc(file=sys.stdout)
            summary = "Unhandled '%s' exception while running plugin (traceback below)" % exc_type
            long_output = traceback.format_exc()
            self.exit(exit_code=exit_code, summary=summary,
                      long_output=long_output, perfdata='')

    def debug(self, message):  # pragma: no cover
        if self.show_debug is True:
            self.add_long_output("debug: %s" % message)

    def __str__(self):
        """
        >>> p = PluginHelper()
        >>> p.add_status(ok)
        >>> p.add_summary('Test')
        >>> print p
        OK - Test
        """
        return self.get_plugin_output()

    def __repr__(self):
        return self.get_plugin_output(long_output='', perfdata='')
Example #23
0
class CmdBase(object):

    doesLogging = True

    """
    Class used for all Zenoss commands
    """
    def __init__(self, noopts=0, args=None, should_log=None):
        zope.component.provideAdapter(DefaultTraversable, (None,))
        # This explicitly loads all of the products - must happen first!
        from OFS.Application import import_products
        import_products()
        #make sure we aren't in debug mode
        import Globals
        Globals.DevelopmentMode = False
        # We must import ZenossStartup at this point so that all Zenoss daemons
        # and tools will have any ZenPack monkey-patched methods available.
        import Products.ZenossStartup
        unused(Products.ZenossStartup)
        zcml.load_site()
        import Products.ZenWidgets
        load_config_override('scriptmessaging.zcml', Products.ZenWidgets)

        self.usage = "%prog [options]"
        self.noopts = noopts
        self.inputArgs = args

        # inputArgs was created to allow unit tests to pass in command line
        # arguments and get around whatever Zope was doing to sys.argv.
        if self.inputArgs is None:
            self.inputArgs = sys.argv[1:]

        self.parser = None
        self.args = []

        self.buildParser()
        self.buildOptions()

        # Get defaults from global.conf. They will be overridden by
        # daemon-specific config file or command line arguments.
        applyGlobalConfToParser(self.parser)
        self.parseOptions()
        if self.options.configfile:
            self.parser.defaults = self.getConfigFileDefaults(self.options.configfile)
            # We've updated the parser with defaults from configs, now we need
            # to reparse our command-line to get the correct overrides from
            # the command-line
            self.parseOptions()

        if should_log is not None:
            self.doesLogging = should_log
            
        if self.doesLogging:
            self.setupLogging()


    def buildParser(self):
        """
        Create the options parser
        """
        if not self.parser:
            from Products.ZenModel.ZenossInfo import ZenossInfo
            try:
                zinfo= ZenossInfo('')
                version= str(zinfo.getZenossVersion())
            except Exception:
                from Products.ZenModel.ZVersion import VERSION
                version= VERSION
            self.parser = OptionParser(usage=self.usage,
                                       version="%prog " + version,
                                       option_class=LogSeverityOption)


    def buildOptions(self):
        """
        Basic options setup. Other classes should call this before adding
        more options
        """
        self.buildParser()
        if self.doesLogging:
            group = OptionGroup(self.parser, "Logging Options")
            group.add_option(
                '-v', '--logseverity',
                dest='logseverity', default='INFO', type='loglevel',
                help='Logging severity threshold',
            )
            group.add_option(
                '--logpath', dest='logpath', default=zenPath('log'), type='str',
                help='Override the default logging path; default $ZENHOME/log'
            )
            group.add_option(
                '--maxlogsize',
                dest='maxLogKiloBytes', default=10240, type='int',
                help='Max size of log file in KB; default 10240',
            )
            group.add_option(
                '--maxbackuplogs',
                dest='maxBackupLogs', default=3, type='int',
                help='Max number of back up log files; default 3',
            )
            self.parser.add_option_group(group)

        self.parser.add_option("-C", "--configfile",
                    dest="configfile",
                    help="Use an alternate configuration file" )

        self.parser.add_option("--genconf",
                               action="store_true",
                               default=False,
                               help="Generate a template configuration file" )

        self.parser.add_option("--genxmltable",
                               action="store_true",
                               default=False,
                               help="Generate a Docbook table showing command-line switches." )

        self.parser.add_option("--genxmlconfigs",
                               action="store_true",
                               default=False,
                               help="Generate an XML file containing command-line switches." )


    def parseOptions(self):
        """
        Uses the optparse parse previously populated and performs common options.
        """

        if self.noopts:
            args = []
        else:
            args = self.inputArgs

        (self.options, self.args) = self.parser.parse_args(args=args)

        if self.options.genconf:
            self.generate_configs( self.parser, self.options )

        if self.options.genxmltable:
            self.generate_xml_table( self.parser, self.options )

        if self.options.genxmlconfigs:
            self.generate_xml_configs( self.parser, self.options )


    def getConfigFileDefaults(self, filename, correctErrors=True):
        # TODO: This should be refactored - duplicated code with CmdBase.
        """
        Parse a config file which has key-value pairs delimited by white space,
        and update the parser's option defaults with these values.

        @parameter filename: name of configuration file
        @type filename: string
        """

        options = self.parser.get_default_values()
        lines = self.loadConfigFile(filename)
        if lines:
            lines, errors = self.validateConfigFile(filename, lines,
                                                    correctErrors=correctErrors)

            args = self.getParamatersFromConfig(lines)
            try:
                self.parser._process_args([], args, options)
            except (BadOptionError, OptionValueError) as err:
                print >>sys.stderr, 'WARN: %s in config file %s' % (err, filename)

        return options.__dict__


    def getGlobalConfigFileDefaults(self):
        # Deprecated: This method is going away - it is duplicated in GlobalConfig.py
        """
        Parse a config file which has key-value pairs delimited by white space,
        and update the parser's option defaults with these values.
        """

        filename = zenPath('etc', 'global.conf')
        options = self.parser.get_default_values()
        lines = self.loadConfigFile(filename)
        if lines:
            args = self.getParamatersFromConfig(lines)

            try:
                self.parser._process_args([], args, options)
            except (BadOptionError, OptionValueError):
                # Ignore it, we only care about our own options as defined in the parser
                pass

        return options.__dict__


    def loadConfigFile(self, filename):
        # TODO: This should be refactored - duplicated code with CmdBase.
        """
        Parse a config file which has key-value pairs delimited by white space.

        @parameter filename: path to the configuration file
        @type filename: string
        """
        lines = []
        if not os.path.exists(filename):
            return lines
        try:
            with open(filename) as file:
                for line in file:
                    if line.lstrip().startswith('#') or line.strip() == '':
                        lines.append(dict(type='comment', line=line))
                    else:
                        try:
                            # add default blank string for keys with no default value
                            # valid delimiters are space, ':' and/or '=' (see ZenUtils/config.py)
                            key, value = (re.split(r'[\s:=]+', line.strip(), 1) + ['',])[:2]
                        except ValueError:
                            lines.append(dict(type='option', line=line, key=line.strip(), value=None, option=None))
                        else:
                            option = self.parser.get_option('--%s' % key)
                            lines.append(dict(type='option', line=line, key=key, value=value, option=option))
        except IOError as e:
            errorMessage = ('WARN: unable to read config file {filename} '
                '-- skipping. ({exceptionName}: {exception})').format(
                filename=filename,
                exceptionName=e.__class__.__name__,
                exception=e
            )
            print >>sys.stderr, errorMessage
            return []

        return lines


    def validateConfigFile(self, filename, lines, correctErrors=True, warnErrors=True):
        """
        Validate config file lines which has key-value pairs delimited by white space,
        and validate that the keys exist for this command's option parser. If
        the option does not exist or has an empty value it will comment it out
        in the config file.

        @parameter filename: path to the configuration file
        @type filename: string
        @parameter lines: lines from config parser
        @type lines: list
        @parameter correctErrors: Whether or not invalid conf values should be
            commented out.
        @type correctErrors: boolean
        """

        output = []
        errors = []
        validLines = []
        date = datetime.datetime.now().isoformat()
        errorTemplate = '## Commenting out by config parser (%s) on %s: %%s\n' % (
                sys.argv[0], date)

        for lineno, line in enumerate(lines):
            if line['type'] == 'comment':
                output.append(line['line'])
            elif line['type'] == 'option':
                if line['value'] is None:
                    errors.append((lineno + 1, 'missing value for "%s"' % line['key']))
                    output.append(errorTemplate % 'missing value')
                    output.append('## %s' % line['line'])
                elif line['option'] is None:
                    errors.append((lineno + 1, 'unknown option "%s"' % line['key']))
                    output.append(errorTemplate % 'unknown option')
                    output.append('## %s' % line['line'])
                else:
                    validLines.append(line)
                    output.append(line['line'])
            else:
                errors.append((lineno + 1, 'unknown line "%s"' % line['line']))
                output.append(errorTemplate % 'unknown line')
                output.append('## %s' % line['line'])

        if errors:
            if correctErrors:
                for lineno, message in errors:
                    print >>sys.stderr, 'INFO: Commenting out %s on line %d in %s' % (message, lineno, filename)

                with open(filename, 'w') as file:
                    file.writelines(output)

            if warnErrors:
                for lineno, message in errors:
                    print >>sys.stderr, 'WARN: %s on line %d in %s' % (message, lineno, filename)

        return validLines, errors


    def getParamatersFromConfig(self, lines):
        # Deprecated: This method is going away
        return _convertConfigLinesToArguments(self.parser, lines)


    def setupLogging(self):
        """
        Set common logging options
        """
        rlog = logging.getLogger()
        rlog.setLevel(logging.WARN)
        mname = self.__class__.__name__
        self.log = logging.getLogger("zen."+ mname)
        zlog = logging.getLogger("zen")
        try:
            loglevel = int(self.options.logseverity)
        except ValueError:
            loglevel = getattr(logging, self.options.logseverity.upper(), logging.INFO)
        zlog.setLevel(loglevel)

        logdir = self.checkLogpath()
        if logdir:
            logfile = os.path.join(logdir, mname.lower()+".log")
            maxBytes = self.options.maxLogKiloBytes * 1024
            backupCount = self.options.maxBackupLogs
            h = logging.handlers.RotatingFileHandler(logfile, maxBytes=maxBytes,
                                                     backupCount=backupCount)
            h.setFormatter(logging.Formatter(
                "%(asctime)s %(levelname)s %(name)s: %(message)s",
                "%Y-%m-%d %H:%M:%S"))
            rlog.addHandler(h)
        else:
            logging.basicConfig()


    def checkLogpath(self):
        """
        Validate the logpath is valid
        """
        if not self.options.logpath:
            return None
        else:
            logdir = self.options.logpath
            if not os.path.exists(logdir):
                # try creating the directory hierarchy if it doesn't exist...
                try:
                    os.makedirs(logdir)
                except OSError:
                    raise SystemExit("logpath:%s doesn't exist and cannot be created" % logdir)
            elif not os.path.isdir(logdir):
                raise SystemExit("logpath:%s exists but is not a directory" % logdir)
            return logdir


    def pretty_print_config_comment( self, comment ):
        """
        Quick and dirty pretty printer for comments that happen to be longer than can comfortably
be seen on the display.
        """

        max_size= 40
        #
        # As a heuristic we'll accept strings that are +-  text_window
        # size in length.
        #
        text_window= 5

        if len( comment ) <= max_size + text_window:
             return comment

        #
        # First, take care of embedded newlines and expand them out to array entries
        #
        new_comment= []
        all_lines= comment.split( '\n' )
        for line in all_lines:
           if len(line) <= max_size + text_window:
                new_comment.append( line )
                continue

           start_position= max_size - text_window
           while len(line) > max_size + text_window:
                index= line.find( ' ', start_position )
                if index > 0:
                     new_comment.append( line[ 0:index ] )
                     line= line[ index: ]

                else:
                     if start_position == 0:
                        #
                        # If we get here it means that the line is just one big string with no spaces
                        # in it.  There's nothing that we can do except print it out.  Doh!
                        #
                        new_comment.append( line )
                        break

                     #
                     # Okay, haven't found anything to split on -- go back and try again
                     #
                     start_position= start_position - text_window
                     if start_position < 0:
                        start_position= 0

           else:
                new_comment.append( line )

        return "\n# ".join( new_comment )



    def generate_configs( self, parser, options ):
        """
        Create a configuration file based on the long-form of the option names

        @parameter parser: an optparse parser object which contains defaults, help
        @parameter options: parsed options list containing actual values
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name= os.path.basename( sys.argv[0] )
        daemon_name= daemon_name.replace( '.py', '' )

        print """#
# Configuration file for %s
#
#  To enable a particular option, uncomment the desired entry.
#
# Parameter     Setting
# ---------     -------""" % ( daemon_name )


        options_to_ignore= ( 'help', 'version', '', 'genconf', 'genxmltable' )

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
                if opt.help is SUPPRESS_HELP:
                        continue

                #
                # Get rid of the short version of the command
                #
                option_name= re.sub( r'.*/--', '', "%s" % opt )

                #
                # And what if there's no short version?
                #
                option_name= re.sub( r'^--', '', "%s" % option_name )

                #
                # Don't display anything we shouldn't be displaying
                #
                if option_name in options_to_ignore:
                        continue

                #
                # Find the actual value specified on the command line, if any,
                # and display it
                #

                value= getattr( parser.values,  opt.dest )

                default_value= parser.defaults.get( opt.dest )
                if default_value is NO_DEFAULT or default_value is None:
                        default_value= ""
                default_string= ""
                if default_value != "":
                        default_string= ", default: " + str( default_value )

                comment=  self.pretty_print_config_comment( opt.help + default_string )

                #
                # NB: I would prefer to use tabs to separate the parameter name
                #     and value, but I don't know that this would work.
                #
                print """#
# %s
#%s %s""" % ( comment, option_name, value )

        #
        # Pretty print and exit
        #
        print "#"
        sys.exit( 0 )



    def generate_xml_table( self, parser, options ):
        """
        Create a Docbook table based on the long-form of the option names

        @parameter parser: an optparse parser object which contains defaults, help
        @parameter options: parsed options list containing actual values
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name= os.path.basename( sys.argv[0] )
        daemon_name= daemon_name.replace( '.py', '' )

        print """<?xml version="1.0" encoding="UTF-8"?>

<section version="4.0" xmlns="http://docbook.org/ns/docbook"
   xmlns:xlink="http://www.w3.org/1999/xlink"
   xmlns:xi="http://www.w3.org/2001/XInclude"
   xmlns:svg="http://www.w3.org/2000/svg"
   xmlns:mml="http://www.w3.org/1998/Math/MathML"
   xmlns:html="http://www.w3.org/1999/xhtml"
   xmlns:db="http://docbook.org/ns/docbook"

  xml:id="%s.options"
>

<title>%s Options</title>
<para />
<table frame="all">
  <caption>%s <indexterm><primary>Daemons</primary><secondary>%s</secondary></indexterm> options</caption>
<tgroup cols="2">
<colspec colname="option" colwidth="1*" />
<colspec colname="description" colwidth="2*" />
<thead>
<row>
<entry> <para>Option</para> </entry>
<entry> <para>Description</para> </entry>
</row>
</thead>
<tbody>
""" % ( daemon_name, daemon_name, daemon_name, daemon_name )


        options_to_ignore= ( 'help', 'version', '', 'genconf', 'genxmltable' )

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
                if opt.help is SUPPRESS_HELP:
                        continue

                #
                # Create a Docbook-happy version of the option strings
                # Yes, <arg></arg> would be better semantically, but the output
                # just looks goofy in a table.  Use literal instead.
                #
                all_options= '<literal>' + re.sub( r'/', '</literal>,</para> <para><literal>', "%s" % opt ) + '</literal>'

                #
                # Don't display anything we shouldn't be displaying
                #
                option_name= re.sub( r'.*/--', '', "%s" % opt )
                option_name= re.sub( r'^--', '', "%s" % option_name )
                if option_name in options_to_ignore:
                        continue

                default_value= parser.defaults.get( opt.dest )
                if default_value is NO_DEFAULT or default_value is None:
                        default_value= ""
                default_string= ""
                if default_value != "":
                        default_string= "<para> Default: <literal>" + str( default_value ) + "</literal></para>\n"

                comment= self.pretty_print_config_comment( opt.help )

#
# TODO: Determine the variable name used and display the --option_name=variable_name
#
                if opt.action in [ 'store_true', 'store_false' ]:
                   print """<row>
<entry> <para>%s</para> </entry>
<entry>
<para>%s</para>
%s</entry>
</row>
""" % ( all_options, comment, default_string )

                else:
                   target= '=<replaceable>' +  opt.dest.lower() + '</replaceable>'
                   all_options= all_options + target
                   all_options= re.sub( r',', target + ',', all_options )
                   print """<row>
<entry> <para>%s</para> </entry>
<entry>
<para>%s</para>
%s</entry>
</row>
""" % ( all_options, comment, default_string )



        #
        # Close the table elements
        #
        print """</tbody></tgroup>
</table>
<para />
</section>
"""
        sys.exit( 0 )



    def generate_xml_configs( self, parser, options ):
        """
        Create an XML file that can be used to create Docbook files
        as well as used as the basis for GUI-based daemon option
        configuration.
        """

        #
        # Header for the configuration file
        #
        unused(options)
        daemon_name= os.path.basename( sys.argv[0] )
        daemon_name= daemon_name.replace( '.py', '' )

        export_date = datetime.datetime.now()

        print """<?xml version="1.0" encoding="UTF-8"?>

<!-- Default daemon configuration generated on %s -->
<configuration id="%s" >

""" % ( export_date, daemon_name )

        options_to_ignore= (
            'help', 'version', '', 'genconf', 'genxmltable',
            'genxmlconfigs',
        )

        #
        # Create an entry for each of the command line flags
        #
        # NB: Ideally, this should print out only the option parser dest
        #     entries, rather than the command line options.
        #
        import re
        for opt in getAllParserOptionsGen(parser):
                if opt.help is SUPPRESS_HELP:
                        continue

                #
                # Don't display anything we shouldn't be displaying
                #
                option_name= re.sub( r'.*/--', '', "%s" % opt )
                option_name= re.sub( r'^--', '', "%s" % option_name )
                if option_name in options_to_ignore:
                        continue

                default_value= parser.defaults.get( opt.dest )
                if default_value is NO_DEFAULT or default_value is None:
                        default_string= ""
                else:
                        default_string= str( default_value )

#
# TODO: Determine the variable name used and display the --option_name=variable_name
#
                if opt.action in [ 'store_true', 'store_false' ]:
                   print """    <option id="%s" type="%s" default="%s" help="%s" />
""" % ( option_name, "boolean", default_string, quote(opt.help),  )

                else:
                   target= opt.dest.lower()
                   print """    <option id="%s" type="%s" default="%s" target="%s" help="%s" />
""" % ( option_name, opt.type, quote(default_string), target, quote(opt.help), )


        #
        # Close the table elements
        #
        print """
</configuration>
"""
        sys.exit( 0 )
Example #24
0
def main_i():
    """internal main routine

    parse command line, decide what action will be taken;
    we can either:
    - query/manipulate configuration
    - format gsyncd urls using gsyncd's url parsing engine
    - start service in following modes, in given stages:
      - agent: startup(), ChangelogAgent()
      - monitor: startup(), monitor()
      - master: startup(), connect_remote(), connect(), service_loop()
      - slave: startup(), connect(), service_loop()
    """
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        if val and val != '-':
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)

    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val

    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(
            o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(
        usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command-dir', metavar='DIR', default='')
    op.add_option('--gluster-log-file', metavar='LOGF',
                  default=os.devnull, type=str, action='callback',
                  callback=store_abs)
    op.add_option('--gluster-log-level', metavar='LVL')
    op.add_option('--gluster-params', metavar='PRMS', default='')
    op.add_option(
        '--glusterd-uuid', metavar='UUID', type=str, default='',
        help=SUPPRESS_HELP)
    op.add_option(
        '--gluster-cli-options', metavar='OPTS', default='--log-file=-')
    op.add_option('--mountbroker', metavar='LABEL')
    op.add_option('-p', '--pid-file', metavar='PIDF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('-l', '--log-file', metavar='LOGF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--iprefix',  metavar='LOGD',  type=str,
                  action='callback', callback=store_abs)
    op.add_option('--changelog-log-file',  metavar='LOGF',  type=str,
                  action='callback', callback=store_abs)
    op.add_option('--log-file-mbr', metavar='LOGF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--state-file', metavar='STATF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--state-detail-file', metavar='STATF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--georep-session-working-dir', metavar='STATF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--ignore-deletes', default=False, action='store_true')
    op.add_option('--isolated-slave', default=False, action='store_true')
    op.add_option('--use-rsync-xattrs', default=False, action='store_true')
    op.add_option('--pause-on-start', default=False, action='store_true')
    op.add_option('-L', '--log-level', metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',
                  default=os.path.abspath(sys.argv[0]))
    op.add_option('--volume-id', metavar='UUID')
    op.add_option('--slave-id', metavar='ID')
    op.add_option('--session-owner', metavar='ID')
    op.add_option('--local-id', metavar='ID', help=SUPPRESS_HELP, default='')
    op.add_option(
        '--local-path', metavar='PATH', help=SUPPRESS_HELP, default='')
    op.add_option('-s', '--ssh-command', metavar='CMD', default='ssh')
    op.add_option('--ssh-command-tar', metavar='CMD', default='ssh')
    op.add_option('--rsync-command', metavar='CMD', default='rsync')
    op.add_option('--rsync-options', metavar='OPTS', default='')
    op.add_option('--rsync-ssh-options', metavar='OPTS', default='--compress')
    op.add_option('--timeout', metavar='SEC', type=int, default=120)
    op.add_option('--connection-timeout', metavar='SEC',
                  type=int, default=60, help=SUPPRESS_HELP)
    op.add_option('--sync-jobs', metavar='N', type=int, default=3)
    op.add_option('--replica-failover-interval', metavar='N',
                  type=int, default=1)
    op.add_option(
        '--turns', metavar='N', type=int, default=0, help=SUPPRESS_HELP)
    op.add_option('--allow-network', metavar='IPS', default='')
    op.add_option('--socketdir', metavar='DIR')
    op.add_option('--state-socket-unencoded', metavar='SOCKF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--checkpoint', metavar='LABEL', default='')

    # tunables for failover/failback mechanism:
    # None   - gsyncd behaves as normal
    # blind  - gsyncd works with xtime pairs to identify
    #          candidates for synchronization
    # wrapup - same as normal mode but does not assign
    #          xtimes to orphaned files
    # see crawl() for usage of the above tunables
    op.add_option('--special-sync-mode', type=str, help=SUPPRESS_HELP)

    # changelog or xtime? (TODO: Change the default)
    op.add_option(
        '--change-detector', metavar='MODE', type=str, default='xtime')
    # sleep interval for change detection (xtime crawl uses a hardcoded 1
    # second sleep time)
    op.add_option('--change-interval', metavar='SEC', type=int, default=3)
    # working directory for changelog based mechanism
    op.add_option('--working-dir', metavar='DIR', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--use-tarssh', default=False, action='store_true')

    op.add_option('-c', '--config-file', metavar='CONF',
                  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--monitor', dest='monitor', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--agent', dest='agent', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--resource-local', dest='resource_local',
                  type=str, action='callback', callback=store_local)
    op.add_option('--resource-remote', dest='resource_remote',
                  type=str, action='callback', callback=store_local)
    op.add_option('--feedback-fd', dest='feedback_fd', type=int,
                  help=SUPPRESS_HELP, action='callback', callback=store_local)
    op.add_option('--rpc-fd', dest='rpc_fd', type=str, help=SUPPRESS_HELP)
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,
                  action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",
                  action='callback', callback=store_local_curry('dont'))
    op.add_option('--verify', type=str, dest="verify",
                  action='callback', callback=store_local)
    op.add_option('--create', type=str, dest="create",
                  action='callback', callback=store_local)
    op.add_option('--delete', dest='delete', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--debug', dest="go_daemon", action='callback',
                  callback=lambda *a: (store_local_curry('dont')(*a),
                                       setattr(
                                           a[-1].values, 'log_file', '-'),
                                       setattr(a[-1].values, 'log_level',
                                               'DEBUG'),
                                       setattr(a[-1].values,
                                               'changelog_log_file', '-')))
    op.add_option('--path', type=str, action='append')

    for a in ('check', 'get'):
        op.add_option('--config-' + a, metavar='OPT', type=str, dest='config',
                      action='callback',
                      callback=store_local_obj(a, lambda vx: {'opt': vx}))
    op.add_option('--config-get-all', dest='config', action='callback',
                  callback=store_local_obj('get', lambda vx: {'opt': None}))
    for m in ('', '-rx', '-glob'):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able
        # to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option('--config-set' + m, metavar='OPT VAL', type=str,
                          nargs=2, dest='config', action='callback',
                          callback=store_local_obj('set', lambda vx: {
                              'opt': vx[0], 'val': vx[1], 'rx': rx}))
            op.add_option('--config-del' + m, metavar='OPT', type=str,
                          dest='config', action='callback',
                          callback=store_local_obj('del', lambda vx: {
                              'opt': vx, 'rx': rx}))
        conf_mod_opt_regex_variant(m and m[1:] or False)

    op.add_option('--normalize-url', dest='url_print',
                  action='callback', callback=store_local_curry('normal'))
    op.add_option('--canonicalize-url', dest='url_print',
                  action='callback', callback=store_local_curry('canon'))
    op.add_option('--canonicalize-escape-url', dest='url_print',
                  action='callback', callback=store_local_curry('canon_esc'))

    tunables = [norm(o.get_opt_string()[2:])
                for o in op.option_list
                if (o.callback in (store_abs, 'store_true', None) and
                    o.get_opt_string() not in ('--version', '--help'))]
    remote_tunables = ['listen', 'go_daemon', 'timeout',
                       'session_owner', 'config_file', 'use_rsync_xattrs']
    rq_remote_tunables = {'listen': True}

    # precedence for sources of values: 1) commandline, 2) cfg file, 3)
    # defaults for this to work out we need to tell apart defaults from
    # explicitly set options... so churn out the defaults here and call
    # the parser with virgin values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    args_orig = args[:]
    r = rconf.get('resource_local')
    if r:
        if len(args) == 0:
            args.append(None)
        args[0] = r
    r = rconf.get('resource_remote')
    if r:
        if len(args) == 0:
            raise GsyncdError('local resource unspecfied')
        elif len(args) == 1:
            args.append(None)
        args[1] = r
    confdata = rconf.get('config')
    if not (len(args) == 2 or
            (len(args) == 1 and rconf.get('listen')) or
            (len(args) <= 2 and confdata) or
            rconf.get('url_print')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    verify = rconf.get('verify')
    if verify:
        logging.info(verify)
        logging.info("Able to spawn gsyncd.py")
        return

    restricted = os.getenv('_GSYNCD_RESTRICTED_')

    if restricted:
        allopts = {}
        allopts.update(opts.__dict__)
        allopts.update(rconf)
        bannedtuns = set(allopts.keys()) - set(remote_tunables)
        if bannedtuns:
            raise GsyncdError('following tunables cannot be set with '
                              'restricted SSH invocaton: ' +
                              ', '.join(bannedtuns))
        for k, v in rq_remote_tunables.items():
            if not k in allopts or allopts[k] != v:
                raise GsyncdError('tunable %s is not set to value %s required '
                                  'for restricted SSH invocaton' %
                                  (k, v))

    confrx = getattr(confdata, 'rx', None)

    def makersc(aa, check=True):
        if not aa:
            return ([], None, None)
        ra = [resource.parse_url(u) for u in aa]
        local = ra[0]
        remote = None
        if len(ra) > 1:
            remote = ra[1]
        if check and not local.can_connect_to(remote):
            raise GsyncdError("%s cannot work with %s" %
                              (local.path, remote and remote.path))
        return (ra, local, remote)
    if confrx:
        # peers are regexen, don't try to parse them
        if confrx == 'glob':
            args = ['\A' + fnmatch.translate(a) for a in args]
        canon_peers = args
        namedict = {}
    else:
        dc = rconf.get('url_print')
        rscs, local, remote = makersc(args_orig, not dc)
        if dc:
            for r in rscs:
                print(r.get_url(**{'normal': {},
                                   'canon': {'canonical': True},
                                   'canon_esc': {'canonical': True,
                                                 'escaped': True}}[dc]))
            return
        pa = ([], [], [])
        urlprms = (
            {}, {'canonical': True}, {'canonical': True, 'escaped': True})
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        _, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring
        # to / repreenting peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[
                0].upper() + x[1:], lambda x: 'e' + x[0].upper() + x[1:])
        if remote:
            rmap = {local: ('local', 'master'), remote: ('remote', 'slave')}
        else:
            rmap = {local: ('local', 'slave')}
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                namedict[name + 'vol'] = x.volume
                if name == 'remote':
                    namedict['remotehost'] = x.remotehost
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(
            os.path.dirname(sys.argv[0]), "conf/gsyncd_template.conf")

    upgrade_config_file(rconf['config_file'])
    gcnf = GConffile(
        rconf['config_file'], canon_peers,
        defaults.__dict__, opts.__dict__, namedict)

    checkpoint_change = False
    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == 'check':
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise GsyncdError("not a valid option: " + confdata.opt)
        if confdata.op == 'get':
            gcnf.get(confdata.opt)
        elif confdata.op == 'set':
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == 'del':
            gcnf.delete(confdata.opt, confdata.rx)
        # when modifying checkpoint, it's important to make a log
        # of that, so in that case we go on to set up logging even
        # if its just config invocation
        if confdata.opt == 'checkpoint' and confdata.op in ('set', 'del') and \
           not confdata.rx:
            checkpoint_change = True
        if not checkpoint_change:
            return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    delete = rconf.get('delete')
    if delete:
        logging.info('geo-replication delete')
        # Delete pid file, status file, socket file
        cleanup_paths = []
        if getattr(gconf, 'pid_file', None):
            cleanup_paths.append(gconf.pid_file)

        if getattr(gconf, 'state_file', None):
            cleanup_paths.append(gconf.state_file)

        if getattr(gconf, 'state_detail_file', None):
            cleanup_paths.append(gconf.state_detail_file)

        if getattr(gconf, 'state_socket_unencoded', None):
            cleanup_paths.append(gconf.state_socket_unencoded)

        cleanup_paths.append(rconf['config_file'][:-11] + "*")

        # Cleanup changelog working dirs
        if getattr(gconf, 'working_dir', None):
            try:
                shutil.rmtree(gconf.working_dir)
            except (IOError, OSError):
                if sys.exc_info()[1].errno == ENOENT:
                    pass
                else:
                    raise GsyncdError(
                        'Error while removing working dir: %s' %
                        gconf.working_dir)

        for path in cleanup_paths:
            # To delete temp files
            for f in glob.glob(path + "*"):
                _unlink(f)
        return

    if restricted and gconf.allow_network:
        ssh_conn = os.getenv('SSH_CONNECTION')
        if not ssh_conn:
            # legacy env var
            ssh_conn = os.getenv('SSH_CLIENT')
        if ssh_conn:
            allowed_networks = [IPNetwork(a)
                                for a in gconf.allow_network.split(',')]
            client_ip = IPAddress(ssh_conn.split()[0])
            allowed = False
            for nw in allowed_networks:
                if client_ip in nw:
                    allowed = True
                    break
            if not allowed:
                raise GsyncdError("client IP address is not allowed")

    ffd = rconf.get('feedback_fd')
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    # normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise GsyncdError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    if not privileged() and gconf.log_file_mbr:
        gconf.log_file = gconf.log_file_mbr

    if checkpoint_change:
        try:
            GLogger._gsyncd_loginit(log_file=gconf.log_file, label='conf')
            if confdata.op == 'set':
                logging.info('checkpoint %s set' % confdata.val)
                gcnf.delete('checkpoint_completed')
                gcnf.delete('checkpoint_target')
            elif confdata.op == 'del':
                logging.info('checkpoint info was reset')
                # if it is removing 'checkpoint' then we need
                # to remove 'checkpoint_completed' and 'checkpoint_target' too
                gcnf.delete('checkpoint_completed')
                gcnf.delete('checkpoint_target')

        except IOError:
            if sys.exc_info()[1].errno == ENOENT:
                # directory of log path is not present,
                # which happens if we get here from
                # a peer-multiplexed "config-set checkpoint"
                # (as that directory is created only on the
                # original node)
                pass
            else:
                raise
        return

    create = rconf.get('create')
    if create:
        if getattr(gconf, 'state_file', None):
            update_file(gconf.state_file, lambda f: f.write(create + '\n'))
        return

    go_daemon = rconf['go_daemon']
    be_monitor = rconf.get('monitor')
    be_agent = rconf.get('agent')

    rscs, local, remote = makersc(args)
    if not be_monitor and isinstance(remote, resource.SSH) and \
       go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = 'monitor'
    elif be_agent:
        label = 'agent'
    elif remote:
        # master
        label = gconf.local_path
    else:
        label = 'slave'
    startup(go_daemon=go_daemon, log_file=log_file, label=label)
    resource.Popen.init_errhandler()

    if be_agent:
        os.setsid()
        logging.debug('rpc_fd: %s' % repr(gconf.rpc_fd))
        return agent(Changelog(), gconf.rpc_fd)

    if be_monitor:
        return monitor(*rscs)

    logging.info("syncing: %s" % " -> ".join(r.url for r in rscs))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])
Example #25
0
class TestStandard(BaseTest):
    def setUp(self):
        options = [
            make_option("-a", type="string"),
            make_option("-b", "--boo", type="int", dest='boo'),
            make_option("--foo", action="append")
        ]

        self.parser = OptionParser(usage=SUPPRESS_USAGE, option_list=options)

    def test_required_value(self):
        self.assertParseFail(["-a"], "-a option requires a value")

    def test_invalid_integer(self):
        self.assertParseFail(["-b", "5x"],
                             "option -b: invalid integer value: '5x'")

    def test_no_such_option(self):
        self.assertParseFail(["--boo13"], "no such option: --boo13")

    def test_long_invalid_integer(self):
        self.assertParseFail(["--boo=x5"],
                             "option --boo: invalid integer value: 'x5'")

    def test_empty(self):
        self.assertParseOK([], {'a': None, 'boo': None, 'foo': None}, [])

    def test_shortopt_empty_longopt_append(self):
        self.assertParseOK(["-a", "", "--foo=blah", "--foo="], {
            'a': "",
            'boo': None,
            'foo': ["blah", ""]
        }, [])

    def test_long_option_append(self):
        self.assertParseOK(["--foo", "bar", "--foo", "", "--foo=x"], {
            'a': None,
            'boo': None,
            'foo': ["bar", "", "x"]
        }, [])

    def test_option_argument_joined(self):
        self.assertParseOK(["-abc"], {'a': "bc", 'boo': None, 'foo': None}, [])

    def test_option_argument_split(self):
        self.assertParseOK(["-a", "34"], {
            'a': "34",
            'boo': None,
            'foo': None
        }, [])

    def test_option_argument_joined_integer(self):
        self.assertParseOK(["-b34"], {'a': None, 'boo': 34, 'foo': None}, [])

    def test_option_argument_split_negative_integer(self):
        self.assertParseOK(["-b", "-5"], {
            'a': None,
            'boo': -5,
            'foo': None
        }, [])

    def test_long_option_argument_joined(self):
        self.assertParseOK(["--boo=13"], {
            'a': None,
            'boo': 13,
            'foo': None
        }, [])

    def test_long_option_argument_split(self):
        self.assertParseOK(["--boo", "111"], {
            'a': None,
            'boo': 111,
            'foo': None
        }, [])

    def test_long_option_short_option(self):
        self.assertParseOK(["--foo=bar", "-axyz"], {
            'a': 'xyz',
            'boo': None,
            'foo': ["bar"]
        }, [])

    def test_abbrev_long_option(self):
        self.assertParseOK(["--f=bar", "-axyz"], {
            'a': 'xyz',
            'boo': None,
            'foo': ["bar"]
        }, [])

    def test_defaults(self):
        (options, args) = self.parser.parse_args([])
        defaults = self.parser.get_default_values()
        self.assertEqual(vars(defaults), vars(options))

    def test_ambiguous_option(self):
        self.parser.add_option("--foz",
                               action="store",
                               type="string",
                               dest="foo")
        possibilities = ", ".join({"--foz": None, "--foo": None}.keys())
        self.assertParseFail(["--f=bar"],
                             "ambiguous option: --f (%s?)" % possibilities)

    def test_short_and_long_option_split(self):
        self.assertParseOK(["-a", "xyz", "--foo", "bar"], {
            'a': 'xyz',
            'boo': None,
            'foo': ["bar"]
        }, []),

    def test_short_option_split_long_option_append(self):
        self.assertParseOK(["--foo=bar", "-b", "123", "--foo", "baz"], {
            'a': None,
            'boo': 123,
            'foo': ["bar", "baz"]
        }, [])

    def test_short_option_split_one_positional_arg(self):
        self.assertParseOK(["-a", "foo", "bar"], {
            'a': "foo",
            'boo': None,
            'foo': None
        }, ["bar"]),

    def test_short_option_consumes_separator(self):
        self.assertParseOK(["-a", "--", "foo", "bar"], {
            'a': "--",
            'boo': None,
            'foo': None
        }, ["foo", "bar"]),

    def test_short_option_joined_and_separator(self):
        self.assertParseOK(["-ab", "--", "--foo", "bar"], {
            'a': "b",
            'boo': None,
            'foo': None
        }, ["--foo", "bar"]),

    def test_invalid_option_becomes_positional_arg(self):
        self.assertParseOK(["-ab", "-", "--foo", "bar"], {
            'a': "b",
            'boo': None,
            'foo': ["bar"]
        }, ["-"])

    def test_no_append_versus_append(self):
        self.assertParseOK(["-b3", "-b", "5", "--foo=bar", "--foo", "baz"], {
            'a': None,
            'boo': 5,
            'foo': ["bar", "baz"]
        }, [])

    def test_option_consumes_optionlike_string(self):
        self.assertParseOK(["-a", "-b3"], {
            'a': "-b3",
            'boo': None,
            'foo': None
        }, [])
Example #26
0
def main(args):
    parser = OptionParser(usage='%prog [ options ... ] URI [ FILES ]',
                          description='Analyze repository modifications',
                          version=VERSION)
    parser.disable_interspersed_args()
    parser.add_option('-g',
                      '--debug',
                      dest='debug',
                      action="store_true",
                      default=False,
                      help="Run in debug mode")
    parser.add_option('-c',
                      '--config-file',
                      dest='config_file',
                      metavar='FILE',
                      help="Use a custom configuration file")
    parser.add_option('-r',
                      '--revision',
                      dest='revision',
                      metavar='REV',
                      help='Revision to analyze (HEAD)')
    parser.add_option('-f',
                      '--fast',
                      dest='fast',
                      action="store_true",
                      default=False,
                      help="Run faster but moves and copies are not detected")
    parser.add_option('-o',
                      '--output',
                      dest='output',
                      default='text',
                      help='Output type [text|db|xml|csv] (%default)')
    add_outputs_options(parser)

    # Save default values and pass an emtpy Values object to
    # parser_args, so that default values are not set. We need it
    # to know whether a value has been provided by the user or not
    # After parsing the command line we complete the config options
    # with the default values for the options that have not been set
    # by the parser or by a config file
    defaults = parser.get_default_values()
    options, args = parser.parse_args(args, values=Values())

    try:
        config = Config(options.config_file)
    except AttributeError:
        config = Config()

    config.update(options.__dict__)
    config.add(defaults.__dict__)

    if not args:
        parser.error("missing required repository URI")
        return 1

    parser.destroy()

    if config.debug:
        import repositoryhandler.backends
        repositoryhandler.backends.DEBUG = True

    uri = args[0]
    files = args[1:]
    files_from_stdin = (files and files[0] == '-')

    # Create repository
    path = uri_to_filename(uri)
    if path is not None:
        try:
            repo = create_repository_from_path(path)
        except RepositoryUnknownError:
            printerr(
                "Path %s doesn't seem to point to a repository supported by guilty",
                (path, ))
            return 1
        except Exception, e:
            printerr("Unknown error creating repository for path %s (%s)",
                     (path, str(e)))
            return 1
        uri = repo.get_uri_for_path(path)
parser = OptionParser()
parser.add_option('-p',
                  '--pol',
                  dest='poldeg',
                  type="int",
                  default=1,
                  help="polinom degree")

(options, args) = parser.parse_args()

if __name__ == "__main__":
    if options.poldeg == 1:
        pol01()
    elif options.poldeg == 2:
        pol02()
    else:
        parser.print_help()
        print('parser.usage = ', parser.usage)
        print(options)
        print(type(options))
        print(args)
        print(type(args))
        print('len(args) = ', len(args))
        print('type(options.poldeg)        = ', type(options.poldeg))
        print('parser.get_usage()          = ', parser.get_usage())
        print('parser.get_default_values() = ', parser.get_default_values())
        print('parser.get_prog_name()      = ', parser.get_prog_name())
        #print(parser.get_version())
        #print(parser.get_description())
        #print(parser.get_option_group())
Example #28
0
def main_i():
    """internal main routine

    parse command line, decide what action will be taken;
    we can either:
    - query/manipulate configuration
    - format gsyncd urls using gsyncd's url parsing engine
    - start service in following modes, in given stages:
      - agent: startup(), ChangelogAgent()
      - monitor: startup(), monitor()
      - master: startup(), connect_remote(), connect(), service_loop()
      - slave: startup(), connect(), service_loop()
    """
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        if val and val != '-':
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)

    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val

    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(
            o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(
        usage="%prog [options...] <master> <slave>", version="%prog 0.0.1")
    op.add_option('--gluster-command-dir', metavar='DIR', default='')
    op.add_option('--gluster-log-file', metavar='LOGF',
                  default=os.devnull, type=str, action='callback',
                  callback=store_abs)
    op.add_option('--gluster-log-level', metavar='LVL')
    op.add_option('--gluster-params', metavar='PRMS', default='')
    op.add_option(
        '--glusterd-uuid', metavar='UUID', type=str, default='',
        help=SUPPRESS_HELP)
    op.add_option(
        '--gluster-cli-options', metavar='OPTS', default='--log-file=-')
    op.add_option('--mountbroker', metavar='LABEL')
    op.add_option('-p', '--pid-file', metavar='PIDF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('-l', '--log-file', metavar='LOGF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--iprefix',  metavar='LOGD',  type=str,
                  action='callback', callback=store_abs)
    op.add_option('--changelog-log-file',  metavar='LOGF',  type=str,
                  action='callback', callback=store_abs)
    op.add_option('--log-file-mbr', metavar='LOGF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--state-file', metavar='STATF', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--state-detail-file', metavar='STATF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--georep-session-working-dir', metavar='STATF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--ignore-deletes', default=False, action='store_true')
    op.add_option('--isolated-slave', default=False, action='store_true')
    op.add_option('--use-rsync-xattrs', default=False, action='store_true')
    op.add_option('--pause-on-start', default=False, action='store_true')
    op.add_option('-L', '--log-level', metavar='LVL')
    op.add_option('-r', '--remote-gsyncd', metavar='CMD',
                  default=os.path.abspath(sys.argv[0]))
    op.add_option('--volume-id', metavar='UUID')
    op.add_option('--slave-id', metavar='ID')
    op.add_option('--session-owner', metavar='ID')
    op.add_option('--local-id', metavar='ID', help=SUPPRESS_HELP, default='')
    op.add_option(
        '--local-path', metavar='PATH', help=SUPPRESS_HELP, default='')
    op.add_option('-s', '--ssh-command', metavar='CMD', default='ssh')
    op.add_option('--ssh-command-tar', metavar='CMD', default='ssh')
    op.add_option('--rsync-command', metavar='CMD', default='rsync')
    op.add_option('--rsync-options', metavar='OPTS', default='')
    op.add_option('--rsync-ssh-options', metavar='OPTS', default='--compress')
    op.add_option('--timeout', metavar='SEC', type=int, default=120)
    op.add_option('--connection-timeout', metavar='SEC',
                  type=int, default=60, help=SUPPRESS_HELP)
    op.add_option('--sync-jobs', metavar='N', type=int, default=3)
    op.add_option('--replica-failover-interval', metavar='N',
                  type=int, default=1)
    op.add_option('--changelog-archive-format', metavar='N',
                  type=str, default="%Y%m")
    op.add_option(
        '--turns', metavar='N', type=int, default=0, help=SUPPRESS_HELP)
    op.add_option('--allow-network', metavar='IPS', default='')
    op.add_option('--socketdir', metavar='DIR')
    op.add_option('--state-socket-unencoded', metavar='SOCKF',
                  type=str, action='callback', callback=store_abs)
    op.add_option('--checkpoint', metavar='LABEL', default='')

    # tunables for failover/failback mechanism:
    # None   - gsyncd behaves as normal
    # blind  - gsyncd works with xtime pairs to identify
    #          candidates for synchronization
    # wrapup - same as normal mode but does not assign
    #          xtimes to orphaned files
    # see crawl() for usage of the above tunables
    op.add_option('--special-sync-mode', type=str, help=SUPPRESS_HELP)

    # changelog or xtime? (TODO: Change the default)
    op.add_option(
        '--change-detector', metavar='MODE', type=str, default='xtime')
    # sleep interval for change detection (xtime crawl uses a hardcoded 1
    # second sleep time)
    op.add_option('--change-interval', metavar='SEC', type=int, default=3)
    # working directory for changelog based mechanism
    op.add_option('--working-dir', metavar='DIR', type=str,
                  action='callback', callback=store_abs)
    op.add_option('--use-tarssh', default=False, action='store_true')

    op.add_option('-c', '--config-file', metavar='CONF',
                  type=str, action='callback', callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--monitor', dest='monitor', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--agent', dest='agent', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--resource-local', dest='resource_local',
                  type=str, action='callback', callback=store_local)
    op.add_option('--resource-remote', dest='resource_remote',
                  type=str, action='callback', callback=store_local)
    op.add_option('--feedback-fd', dest='feedback_fd', type=int,
                  help=SUPPRESS_HELP, action='callback', callback=store_local)
    op.add_option('--rpc-fd', dest='rpc_fd', type=str, help=SUPPRESS_HELP)
    op.add_option('--listen', dest='listen', help=SUPPRESS_HELP,
                  action='callback', callback=store_local_curry(True))
    op.add_option('-N', '--no-daemon', dest="go_daemon",
                  action='callback', callback=store_local_curry('dont'))
    op.add_option('--verify', type=str, dest="verify",
                  action='callback', callback=store_local)
    op.add_option('--create', type=str, dest="create",
                  action='callback', callback=store_local)
    op.add_option('--delete', dest='delete', action='callback',
                  callback=store_local_curry(True))
    op.add_option('--debug', dest="go_daemon", action='callback',
                  callback=lambda *a: (store_local_curry('dont')(*a),
                                       setattr(
                                           a[-1].values, 'log_file', '-'),
                                       setattr(a[-1].values, 'log_level',
                                               'DEBUG'),
                                       setattr(a[-1].values,
                                               'changelog_log_file', '-')))
    op.add_option('--path', type=str, action='append')

    for a in ('check', 'get'):
        op.add_option('--config-' + a, metavar='OPT', type=str, dest='config',
                      action='callback',
                      callback=store_local_obj(a, lambda vx: {'opt': vx}))
    op.add_option('--config-get-all', dest='config', action='callback',
                  callback=store_local_obj('get', lambda vx: {'opt': None}))
    for m in ('', '-rx', '-glob'):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able
        # to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option('--config-set' + m, metavar='OPT VAL', type=str,
                          nargs=2, dest='config', action='callback',
                          callback=store_local_obj('set', lambda vx: {
                              'opt': vx[0], 'val': vx[1], 'rx': rx}))
            op.add_option('--config-del' + m, metavar='OPT', type=str,
                          dest='config', action='callback',
                          callback=store_local_obj('del', lambda vx: {
                              'opt': vx, 'rx': rx}))
        conf_mod_opt_regex_variant(m and m[1:] or False)

    op.add_option('--normalize-url', dest='url_print',
                  action='callback', callback=store_local_curry('normal'))
    op.add_option('--canonicalize-url', dest='url_print',
                  action='callback', callback=store_local_curry('canon'))
    op.add_option('--canonicalize-escape-url', dest='url_print',
                  action='callback', callback=store_local_curry('canon_esc'))

    tunables = [norm(o.get_opt_string()[2:])
                for o in op.option_list
                if (o.callback in (store_abs, 'store_true', None) and
                    o.get_opt_string() not in ('--version', '--help'))]
    remote_tunables = ['listen', 'go_daemon', 'timeout',
                       'session_owner', 'config_file', 'use_rsync_xattrs']
    rq_remote_tunables = {'listen': True}

    # precedence for sources of values: 1) commandline, 2) cfg file, 3)
    # defaults for this to work out we need to tell apart defaults from
    # explicitly set options... so churn out the defaults here and call
    # the parser with virgin values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    args_orig = args[:]
    r = rconf.get('resource_local')
    if r:
        if len(args) == 0:
            args.append(None)
        args[0] = r
    r = rconf.get('resource_remote')
    if r:
        if len(args) == 0:
            raise GsyncdError('local resource unspecfied')
        elif len(args) == 1:
            args.append(None)
        args[1] = r
    confdata = rconf.get('config')
    if not (len(args) == 2 or
            (len(args) == 1 and rconf.get('listen')) or
            (len(args) <= 2 and confdata) or
            rconf.get('url_print')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    verify = rconf.get('verify')
    if verify:
        logging.info(verify)
        logging.info("Able to spawn gsyncd.py")
        return

    restricted = os.getenv('_GSYNCD_RESTRICTED_')

    if restricted:
        allopts = {}
        allopts.update(opts.__dict__)
        allopts.update(rconf)
        bannedtuns = set(allopts.keys()) - set(remote_tunables)
        if bannedtuns:
            raise GsyncdError('following tunables cannot be set with '
                              'restricted SSH invocaton: ' +
                              ', '.join(bannedtuns))
        for k, v in rq_remote_tunables.items():
            if not k in allopts or allopts[k] != v:
                raise GsyncdError('tunable %s is not set to value %s required '
                                  'for restricted SSH invocaton' %
                                  (k, v))

    confrx = getattr(confdata, 'rx', None)

    def makersc(aa, check=True):
        if not aa:
            return ([], None, None)
        ra = [resource.parse_url(u) for u in aa]
        local = ra[0]
        remote = None
        if len(ra) > 1:
            remote = ra[1]
        if check and not local.can_connect_to(remote):
            raise GsyncdError("%s cannot work with %s" %
                              (local.path, remote and remote.path))
        return (ra, local, remote)
    if confrx:
        # peers are regexen, don't try to parse them
        if confrx == 'glob':
            args = ['\A' + fnmatch.translate(a) for a in args]
        canon_peers = args
        namedict = {}
    else:
        dc = rconf.get('url_print')
        rscs, local, remote = makersc(args_orig, not dc)
        if dc:
            for r in rscs:
                print(r.get_url(**{'normal': {},
                                   'canon': {'canonical': True},
                                   'canon_esc': {'canonical': True,
                                                 'escaped': True}}[dc]))
            return
        pa = ([], [], [])
        urlprms = (
            {}, {'canonical': True}, {'canonical': True, 'escaped': True})
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        _, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring
        # to / repreenting peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[
                0].upper() + x[1:], lambda x: 'e' + x[0].upper() + x[1:])
        if remote:
            rmap = {local: ('local', 'master'), remote: ('remote', 'slave')}
        else:
            rmap = {local: ('local', 'slave')}
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                namedict[name + 'vol'] = x.volume
                if name == 'remote':
                    namedict['remotehost'] = x.remotehost
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(
            os.path.dirname(sys.argv[0]), "conf/gsyncd_template.conf")

    upgrade_config_file(rconf['config_file'])
    gcnf = GConffile(
        rconf['config_file'], canon_peers,
        defaults.__dict__, opts.__dict__, namedict)

    checkpoint_change = False
    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == 'check':
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise GsyncdError("not a valid option: " + confdata.opt)
        if confdata.op == 'get':
            gcnf.get(confdata.opt)
        elif confdata.op == 'set':
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == 'del':
            gcnf.delete(confdata.opt, confdata.rx)
        # when modifying checkpoint, it's important to make a log
        # of that, so in that case we go on to set up logging even
        # if its just config invocation
        if confdata.opt == 'checkpoint' and confdata.op in ('set', 'del') and \
           not confdata.rx:
            checkpoint_change = True
        if not checkpoint_change:
            return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    delete = rconf.get('delete')
    if delete:
        logging.info('geo-replication delete')
        # Delete pid file, status file, socket file
        cleanup_paths = []
        if getattr(gconf, 'pid_file', None):
            cleanup_paths.append(gconf.pid_file)

        if getattr(gconf, 'state_file', None):
            cleanup_paths.append(gconf.state_file)

        if getattr(gconf, 'state_detail_file', None):
            cleanup_paths.append(gconf.state_detail_file)

        if getattr(gconf, 'state_socket_unencoded', None):
            cleanup_paths.append(gconf.state_socket_unencoded)

        cleanup_paths.append(rconf['config_file'][:-11] + "*")

        # Cleanup changelog working dirs
        if getattr(gconf, 'working_dir', None):
            try:
                shutil.rmtree(gconf.working_dir)
            except (IOError, OSError):
                if sys.exc_info()[1].errno == ENOENT:
                    pass
                else:
                    raise GsyncdError(
                        'Error while removing working dir: %s' %
                        gconf.working_dir)

        for path in cleanup_paths:
            # To delete temp files
            for f in glob.glob(path + "*"):
                _unlink(f)
        return

    if restricted and gconf.allow_network:
        ssh_conn = os.getenv('SSH_CONNECTION')
        if not ssh_conn:
            # legacy env var
            ssh_conn = os.getenv('SSH_CLIENT')
        if ssh_conn:
            allowed_networks = [IPNetwork(a)
                                for a in gconf.allow_network.split(',')]
            client_ip = IPAddress(ssh_conn.split()[0])
            allowed = False
            for nw in allowed_networks:
                if client_ip in nw:
                    allowed = True
                    break
            if not allowed:
                raise GsyncdError("client IP address is not allowed")

    ffd = rconf.get('feedback_fd')
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    # normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise GsyncdError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    if not privileged() and gconf.log_file_mbr:
        gconf.log_file = gconf.log_file_mbr

    if checkpoint_change:
        try:
            GLogger._gsyncd_loginit(log_file=gconf.log_file, label='conf')
            if confdata.op == 'set':
                logging.info('checkpoint %s set' % confdata.val)
                gcnf.delete('checkpoint_completed')
                gcnf.delete('checkpoint_target')
            elif confdata.op == 'del':
                logging.info('checkpoint info was reset')
                # if it is removing 'checkpoint' then we need
                # to remove 'checkpoint_completed' and 'checkpoint_target' too
                gcnf.delete('checkpoint_completed')
                gcnf.delete('checkpoint_target')

        except IOError:
            if sys.exc_info()[1].errno == ENOENT:
                # directory of log path is not present,
                # which happens if we get here from
                # a peer-multiplexed "config-set checkpoint"
                # (as that directory is created only on the
                # original node)
                pass
            else:
                raise
        return

    create = rconf.get('create')
    if create:
        if getattr(gconf, 'state_file', None):
            update_file(gconf.state_file, lambda f: f.write(create + '\n'))
        return

    go_daemon = rconf['go_daemon']
    be_monitor = rconf.get('monitor')
    be_agent = rconf.get('agent')

    rscs, local, remote = makersc(args)
    if not be_monitor and isinstance(remote, resource.SSH) and \
       go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = 'monitor'
    elif be_agent:
        label = 'agent'
    elif remote:
        # master
        label = gconf.local_path
    else:
        label = 'slave'
    startup(go_daemon=go_daemon, log_file=log_file, label=label)
    resource.Popen.init_errhandler()

    if be_agent:
        os.setsid()
        logging.debug('rpc_fd: %s' % repr(gconf.rpc_fd))
        return agent(Changelog(), gconf.rpc_fd)

    if be_monitor:
        return monitor(*rscs)

    logging.info("syncing: %s" % " -> ".join(r.url for r in rscs))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])
Example #29
0
def main_i():
    rconf = {'go_daemon': 'should'}

    def store_abs(opt, optstr, val, parser):
        if val and val != '-':
            val = os.path.abspath(val)
        setattr(parser.values, opt.dest, val)

    def store_local(opt, optstr, val, parser):
        rconf[opt.dest] = val

    def store_local_curry(val):
        return lambda o, oo, vx, p: store_local(o, oo, val, p)

    def store_local_obj(op, dmake):
        return lambda o, oo, vx, p: store_local(
            o, oo, FreeObject(op=op, **dmake(vx)), p)

    op = OptionParser(usage="%prog [options...] <master> <slave>",
                      version="%prog 0.0.1")
    op.add_option('--gluster-command', metavar='CMD', default='glusterfs')
    op.add_option('--gluster-log-file',
                  metavar='LOGF',
                  default=os.devnull,
                  type=str,
                  action='callback',
                  callback=store_abs)
    op.add_option('--gluster-log-level', metavar='LVL')
    op.add_option('-p',
                  '--pid-file',
                  metavar='PIDF',
                  type=str,
                  action='callback',
                  callback=store_abs)
    op.add_option('-l',
                  '--log-file',
                  metavar='LOGF',
                  type=str,
                  action='callback',
                  callback=store_abs)
    op.add_option('--state-file',
                  metavar='STATF',
                  type=str,
                  action='callback',
                  callback=store_abs)
    op.add_option('-L', '--log-level', metavar='LVL')
    op.add_option('-r',
                  '--remote-gsyncd',
                  metavar='CMD',
                  default=os.path.abspath(sys.argv[0]))
    op.add_option('--volume-id', metavar='UUID')
    op.add_option('--session-owner', metavar='ID')
    op.add_option('-s', '--ssh-command', metavar='CMD', default='ssh')
    op.add_option('--rsync-command', metavar='CMD', default='rsync')
    op.add_option('--rsync-extra',
                  metavar='ARGS',
                  default='-sS',
                  help=SUPPRESS_HELP)
    op.add_option('--timeout', metavar='SEC', type=int, default=120)
    op.add_option('--sync-jobs', metavar='N', type=int, default=3)
    op.add_option('--turns',
                  metavar='N',
                  type=int,
                  default=0,
                  help=SUPPRESS_HELP)

    op.add_option('-c',
                  '--config-file',
                  metavar='CONF',
                  type=str,
                  action='callback',
                  callback=store_local)
    # duh. need to specify dest or value will be mapped to None :S
    op.add_option('--monitor',
                  dest='monitor',
                  action='callback',
                  callback=store_local_curry(True))
    op.add_option('--feedback-fd',
                  dest='feedback_fd',
                  type=int,
                  help=SUPPRESS_HELP,
                  action='callback',
                  callback=store_local)
    op.add_option('--listen',
                  dest='listen',
                  help=SUPPRESS_HELP,
                  action='callback',
                  callback=store_local_curry(True))
    op.add_option('-N',
                  '--no-daemon',
                  dest="go_daemon",
                  action='callback',
                  callback=store_local_curry('dont'))
    op.add_option('--debug',
                  dest="go_daemon",
                  action='callback',
                  callback=lambda *a:
                  (store_local_curry('dont')
                   (*a), setattr(a[-1].values, 'log_file', '-'),
                   setattr(a[-1].values, 'log_level', 'DEBUG'))),

    for a in ('check', 'get'):
        op.add_option('--config-' + a,
                      metavar='OPT',
                      type=str,
                      dest='config',
                      action='callback',
                      callback=store_local_obj(a, lambda vx: {'opt': vx}))
    op.add_option('--config-get-all',
                  dest='config',
                  action='callback',
                  callback=store_local_obj('get', lambda vx: {'opt': None}))
    for m in ('', '-rx'):
        # call this code 'Pythonic' eh?
        # have to define a one-shot local function to be able to inject (a value depending on the)
        # iteration variable into the inner lambda
        def conf_mod_opt_regex_variant(rx):
            op.add_option('--config-set' + m,
                          metavar='OPT VAL',
                          type=str,
                          nargs=2,
                          dest='config',
                          action='callback',
                          callback=store_local_obj(
                              'set', lambda vx: {
                                  'opt': vx[0],
                                  'val': vx[1],
                                  'rx': rx
                              }))
            op.add_option('--config-del' + m,
                          metavar='OPT',
                          type=str,
                          dest='config',
                          action='callback',
                          callback=store_local_obj(
                              'del', lambda vx: {
                                  'opt': vx,
                                  'rx': rx
                              }))

        conf_mod_opt_regex_variant(not not m)

    op.add_option('--normalize-url',
                  dest='url_print',
                  action='callback',
                  callback=store_local_curry('normal'))
    op.add_option('--canonicalize-url',
                  dest='url_print',
                  action='callback',
                  callback=store_local_curry('canon'))
    op.add_option('--canonicalize-escape-url',
                  dest='url_print',
                  action='callback',
                  callback=store_local_curry('canon_esc'))

    tunables = [
        norm(o.get_opt_string()[2:]) for o in op.option_list
        if o.callback in (store_abs,
                          None) and o.get_opt_string() not in ('--version',
                                                               '--help')
    ]

    # precedence for sources of values: 1) commandline, 2) cfg file, 3) defaults
    # -- for this to work out we need to tell apart defaults from explicitly set
    # options... so churn out the defaults here and call the parser with virgin
    # values container.
    defaults = op.get_default_values()
    opts, args = op.parse_args(values=optparse.Values())
    confdata = rconf.get('config')
    if not (len(args) == 2 or \
            (len(args) == 1 and rconf.get('listen')) or \
            (len(args) <= 2 and confdata) or \
            rconf.get('url_print')):
        sys.stderr.write("error: incorrect number of arguments\n\n")
        sys.stderr.write(op.get_usage() + "\n")
        sys.exit(1)

    if getattr(confdata, 'rx', None):
        # peers are regexen, don't try to parse them
        canon_peers = args
        namedict = {}
    else:
        rscs = [resource.parse_url(u) for u in args]
        dc = rconf.get('url_print')
        if dc:
            for r in rscs:
                print(
                    r.get_url(
                        **{
                            'normal': {},
                            'canon': {
                                'canonical': True
                            },
                            'canon_esc': {
                                'canonical': True,
                                'escaped': True
                            }
                        }[dc]))
            return
        local = remote = None
        if rscs:
            local = rscs[0]
            if len(rscs) > 1:
                remote = rscs[1]
            if not local.can_connect_to(remote):
                raise RuntimeError("%s cannot work with %s" %
                                   (local.path, remote and remote.path))
        pa = ([], [], [])
        urlprms = ({}, {
            'canonical': True
        }, {
            'canonical': True,
            'escaped': True
        })
        for x in rscs:
            for i in range(len(pa)):
                pa[i].append(x.get_url(**urlprms[i]))
        peers, canon_peers, canon_esc_peers = pa
        # creating the namedict, a dict representing various ways of referring to / repreenting
        # peers to be fillable in config templates
        mods = (lambda x: x, lambda x: x[0].upper() + x[1:],
                lambda x: 'e' + x[0].upper() + x[1:])
        if remote:
            rmap = {local: ('local', 'master'), remote: ('remote', 'slave')}
        else:
            rmap = {local: ('local', 'slave')}
        namedict = {}
        for i in range(len(rscs)):
            x = rscs[i]
            for name in rmap[x]:
                for j in range(3):
                    namedict[mods[j](name)] = pa[j][i]
                if x.scheme == 'gluster':
                    namedict[name + 'vol'] = x.volume
    if not 'config_file' in rconf:
        rconf['config_file'] = os.path.join(os.path.dirname(sys.argv[0]),
                                            "conf/gsyncd.conf")
    gcnf = GConffile(rconf['config_file'], canon_peers, defaults.__dict__,
                     opts.__dict__, namedict)

    if confdata:
        opt_ok = norm(confdata.opt) in tunables + [None]
        if confdata.op == 'check':
            if opt_ok:
                sys.exit(0)
            else:
                sys.exit(1)
        elif not opt_ok:
            raise RuntimeError("not a valid option: " + confdata.opt)
        if confdata.op == 'get':
            gcnf.get(confdata.opt)
        elif confdata.op == 'set':
            gcnf.set(confdata.opt, confdata.val, confdata.rx)
        elif confdata.op == 'del':
            gcnf.delete(confdata.opt, confdata.rx)
        return

    gconf.__dict__.update(defaults.__dict__)
    gcnf.update_to(gconf.__dict__)
    gconf.__dict__.update(opts.__dict__)
    gconf.configinterface = gcnf

    ffd = rconf.get('feedback_fd')
    if ffd:
        fcntl.fcntl(ffd, fcntl.F_SETFD, fcntl.FD_CLOEXEC)

    #normalize loglevel
    lvl0 = gconf.log_level
    if isinstance(lvl0, str):
        lvl1 = lvl0.upper()
        lvl2 = logging.getLevelName(lvl1)
        # I have _never_ _ever_ seen such an utterly braindead
        # error condition
        if lvl2 == "Level " + lvl1:
            raise RuntimeError('cannot recognize log level "%s"' % lvl0)
        gconf.log_level = lvl2

    go_daemon = rconf['go_daemon']
    be_monitor = rconf.get('monitor')

    if not be_monitor and isinstance(remote, resource.SSH) and \
       go_daemon == 'should':
        go_daemon = 'postconn'
        log_file = None
    else:
        log_file = gconf.log_file
    if be_monitor:
        label = 'monitor'
    elif remote:
        #master
        label = ''
    else:
        label = 'slave'
    startup(go_daemon=go_daemon, log_file=log_file, label=label)

    if be_monitor:
        return monitor()

    logging.info("syncing: %s" % " -> ".join(peers))
    if remote:
        go_daemon = remote.connect_remote(go_daemon=go_daemon)
        if go_daemon:
            startup(go_daemon=go_daemon, log_file=gconf.log_file)
            # complete remote connection in child
            remote.connect_remote(go_daemon='done')
    local.connect()
    if ffd:
        os.close(ffd)
    local.service_loop(*[r for r in [remote] if r])

    logging.info("exiting.")