class TestCount(BaseTest):
    def setUp(self):
        self.parser = OptionParser(usage=SUPPRESS_USAGE)
        self.v_opt = make_option("-v", action="count", dest="verbose")
        self.parser.add_option(self.v_opt)
        self.parser.add_option("--verbose", type="int", dest="verbose")
        self.parser.add_option("-q", "--quiet",
                               action="store_const", dest="verbose", const=0)

    def test_empty(self):
        self.assertParseOK([], {'verbose': None}, [])

    def test_count_one(self):
        self.assertParseOK(["-v"], {'verbose': 1}, [])

    def test_count_three(self):
        self.assertParseOK(["-vvv"], {'verbose': 3}, [])

    def test_count_three_apart(self):
        self.assertParseOK(["-v", "-v", "-v"], {'verbose': 3}, [])

    def test_count_override_amount(self):
        self.assertParseOK(["-vvv", "--verbose=2"], {'verbose': 2}, [])

    def test_count_override_quiet(self):
        self.assertParseOK(["-vvv", "--verbose=2", "-q"], {'verbose': 0}, [])

    def test_count_overriding(self):
        self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"],
                           {'verbose': 1}, [])

    def test_count_interspersed_args(self):
        self.assertParseOK(["--quiet", "3", "-v"],
                           {'verbose': 1},
                           ["3"])

    def test_count_no_interspersed_args(self):
        self.parser.disable_interspersed_args()
        self.assertParseOK(["--quiet", "3", "-v"],
                           {'verbose': 0},
                           ["3", "-v"])

    def test_count_no_such_option(self):
        self.assertParseFail(["-q3", "-v"], "no such option: -3")

    def test_count_option_no_value(self):
        self.assertParseFail(["--quiet=3", "-v"],
                             "--quiet option does not take a value")

    def test_count_with_default(self):
        self.parser.set_default('verbose', 0)
        self.assertParseOK([], {'verbose':0}, [])

    def test_count_overriding_default(self):
        self.parser.set_default('verbose', 0)
        self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"],
                           {'verbose': 1}, [])
class TestExpandDefaults(BaseTest):
    def setUp(self):
        self.parser = OptionParser(prog="test")
        self.help_prefix = """\
usage: test [options]

options:
  -h, --help            show this help message and exit
"""
        self.file_help = "read from FILE [default: %default]"
        self.expected_help_file = self.help_prefix + "  -f FILE, --file=FILE  read from FILE [default: foo.txt]\n"
        self.expected_help_none = self.help_prefix + "  -f FILE, --file=FILE  read from FILE [default: none]\n"

    def test_option_default(self):
        self.parser.add_option("-f", "--file", default="foo.txt", help=self.file_help)
        self.assertHelp(self.parser, self.expected_help_file)

    def test_parser_default_1(self):
        self.parser.add_option("-f", "--file", help=self.file_help)
        self.parser.set_default("file", "foo.txt")
        self.assertHelp(self.parser, self.expected_help_file)

    def test_parser_default_2(self):
        self.parser.add_option("-f", "--file", help=self.file_help)
        self.parser.set_defaults(file="foo.txt")
        self.assertHelp(self.parser, self.expected_help_file)

    def test_no_default(self):
        self.parser.add_option("-f", "--file", help=self.file_help)
        self.assertHelp(self.parser, self.expected_help_none)

    def test_default_none_1(self):
        self.parser.add_option("-f", "--file", default=None, help=self.file_help)
        self.assertHelp(self.parser, self.expected_help_none)

    def test_default_none_2(self):
        self.parser.add_option("-f", "--file", help=self.file_help)
        self.parser.set_defaults(file=None)
        self.assertHelp(self.parser, self.expected_help_none)

    def test_float_default(self):
        self.parser.add_option("-p", "--prob", help="blow up with probability PROB [default: %default]")
        self.parser.set_defaults(prob=0.43)
        expected_help = self.help_prefix + "  -p PROB, --prob=PROB  blow up with probability PROB [default: 0.43]\n"
        self.assertHelp(self.parser, expected_help)

    def test_alt_expand(self):
        self.parser.add_option("-f", "--file", default="foo.txt", help="read from FILE [default: *DEFAULT*]")
        self.parser.formatter.default_tag = "*DEFAULT*"
        self.assertHelp(self.parser, self.expected_help_file)

    def test_no_expand(self):
        self.parser.add_option("-f", "--file", default="foo.txt", help="read from %default file")
        self.parser.formatter.default_tag = None
        expected_help = self.help_prefix + "  -f FILE, --file=FILE  read from %default file\n"
        self.assertHelp(self.parser, expected_help)
def main():
    '''Main opensignsis processing function.
    '''   
    usage = 'usage: %prog [options] sis_input sis_output'
    version = '%%prog %s (%s)' % (__version__, __date__)
    
    # create parameters parser
    optparser = OptionParser(usage=usage, version=version)
    optparser.add_option('-i', '--imei', dest='imei',
        help='IMEI of the target device')
    optparser.add_option('-c', '--caps', dest='caps', metavar='CAPABILITIES',
        help='list of capabilities names, separated by +')
    optparser.add_option('-e', '--email', dest='email',
        help='e-mail address used to retrive the signed sis file')
    optparser.add_option('-s', '--server', dest='server', metavar='POP3_SERVER',
        help='host[:port] of the e-mail address POP3 server, defaults to the ' \
        'host part of the e-mail')
    optparser.add_option('-l', '--login', dest='login', metavar='LOGIN',
        help='POP3 server login name, defaults to the username part of the e-mail')
    optparser.add_option('-p', '--passwd', dest='passwd', metavar='PASSWORD',
        help='password associated with the login name, if ommited will cause ' \
        'a prompt at runtime')
    optparser.add_option('-t', '--ssl', dest='ssl', action='store_true',
        help='use SSL to login to POP3 server')
    optparser.add_option('-r', '--inplace', dest='inplace', action='store_true',
        help='replace the input file with the output file')
    optparser.add_option('-v', '--verbose', dest='verbose', action='store_true',
        help='print out more information while running')

    # parse config
    cfgparser = ConfigParser.SafeConfigParser()
    cfgoptions = {'device': ('imei',),
        'email': ('email', 'server', 'login', 'passwd', 'ssl')}
    cfgpath = os.path.join(os.path.dirname(__file__), 'opensignsis.config')
    if os.path.exists(cfgpath):
        # read config
        if cfgparser.read(cfgpath):
            for section, items in cfgoptions.items():
                for item in items:
                    try:
                        optparser.set_default(item, cfgparser.get(section, item))
                    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
                        pass
    else:
        # write empty config file
        for section, items in cfgoptions.items():
            cfgparser.add_section(section)
            for item in items:
                cfgparser.set(section, item, '')
        try:
            h = open(cfgpath, 'w')
            cfgparser.write(h)
            h.close()
        except IOError, e:
            warning('couldn\'t create an empty config file (%s)' % e)
Beispiel #4
0
def opt():
  parser = OptionParser()
  parser.add_option("-i", dest="input_file",
                    help="Input fasta file", metavar="FILE")
  parser.add_option("-o", dest="output_file", 
                    help="Output fasta file", metavar="FILE")
  parser.add_option("--max_length", dest="max_length", type=int,
                    help="Max length of fasta sequence", metavar="INT")
  parser.add_option("--overlap", dest="overlap", type=int,
                    help="Length of overlap of cutted sequences", metavar="INT")

  parser.set_default(max_length=14999)
  parser.set_default(overlap=100)
  
  return parser
Beispiel #5
0
 def __init__(self):
     self.util = Debian_Utilities()
     parser = OptionParser()
     parser.add_option('-g', '--group', action='store', type='string', dest='GROUP')
     parser.add_option('-d', '--datasource', action='store', type='int', dest='DATASOURCE')
     parser.set_default('DATASOURCE', '0')
     parser.set_default('GROUP', 'all')
     (options, args) = parser.parse_args()
     
     if options.GROUP != 'all':
         for project in self.getProjectGroup(options.GROUP):
             self.createJob(self.util, project, options.GROUP, options.DATASOURCE)
     else:
         for project in self.getProjectGroup('stable'):
             self.createJob(self.util, project, 'stable', options.DATASOURCE)
         for project in self.getProjectGroup('testing'):
             self.createJob(self.util, project, 'testing', options.DATASOURCE)
         for project in self.getProjectGroup('unstable'):
             self.createJob(self.util, project, 'unstable', options.DATASOURCE)
Beispiel #6
0
def parse_args():
    """Parse command-line arguments."""
    parser = OptionParser(usage='%prog [options] <name>',
                          description='create a VM using the SDK')
    parser.add_option('-U', '--url', help='the API entry point URL')
    parser.add_option('-u', '--username', help='the user to connect as')
    #parser.add_option('-p', '--password', help='the user\'s password')
    parser.add_option('-d', '--debug', action="store_true", help='enable debugging')
    for key in ('url', 'username'):
        name = 'RHEV_%s' % key.upper()
        parser.set_default(key, os.environ.get(name))
    opts, args = parser.parse_args()
    for key in ('url', 'username'):
        if getattr(opts, key) is None:
            name = 'RHEV_%s' % key.upper()
            parser.error('please specify --%s or set $%s' % (key, name))
    if len(args) != 1:
        parser.print_usage()
        parser.exit()
    return opts, args
Beispiel #7
0
def main():
    usage = "usage: %prog [params] arg1 arg2 %prog 1.0"
    oparser = OptionParser(usage=usage)
    oparser.add_option("-b", "--bash", action="store_true", dest="bash", default=False)
    oparser.add_option("-d", "--dest", dest="dest", type="string", help="the distination directory")

    oparser.add_option("-n", "--pages", dest="pages", type="int", help="Number of RSS pages")
    oparser.set_default("dest", os.path.dirname(__file__))
    oparser.set_default("pages", 1)

    (options, args) = oparser.parse_args()

    url = args[0]
    service = services.get_service_from_url(url)

    PLD = PlayListDownloader(url, options.pages, services.map[service])

    if options.bash:
        PLD.genbash(options.dest)
    else:
        PLD.download(options.dest)
Beispiel #8
0
def main(argv):
    parser = OptionParser(description='Continous Integration Server')
    parser.add_option('-u', '--api-url', help='specify the API URL')
    parser.add_option('-n', '--api-user', help='API username')
    parser.add_option('-p', '--api-password', help='API password')
    parser.add_option('-r', '--directory', help='data directory')
    parser.add_option('-w', '--webhook-addr', help='webhook listen address')
    parser.add_option('-d', '--debug', action='store_true',
                      help='show debugging output')
    parser.set_default('api_url', 'http://cloud.ravellosystems.com/')
    parser.set_default('directory', '/etc/ciserver')
    parser.set_default('webhook_addr', ':80')
    opts, args = parser.parse_args(argv)
    if not opts.api_user:
        parser.error('you must supply --api-user\n')
    if not opts.api_password:
        parser.error('you must supply --api-password\n')
    setup_logging(opts.debug)
    if not test_api_parameters(opts.api_url, opts.api_user, opts.api_password):
        sys.stderr.write('Error: could not connect to API at %s\n' % opts.api_url)
        sys.exit(1)
    JobRunner.set_api_parameters(opts.api_url, opts.api_user, opts.api_password)
    scheduler = create(Scheduler, opts)
    scheduler.start()
    server = create(WebhookServer, opts)
    server.start()
    logger = logging.getLogger('ciserver')
    logger.info('Ravello Continous Integration Server started')
    logger.info('Using API at %s' % opts.api_url)
    logger.info('Github webhook listening on %s:%d' % server.address)
    logger.info('Press CTRL-C to end')
    scheduler.join()
def get_option_parser():
    parser = OptionParser()
    parser.add_option("--tok",
                      dest="tokenizer_path",
                      help="Path to the tokenizer folder",
                      metavar="FILE",
                      default=None)
    parser.add_option("--model", dest="model", metavar="FILE", default=None)
    parser.add_option("--fp16",
                      action="store_true",
                      dest="fp16",
                      default=False)
    parser.add_option("--capacity",
                      dest="total_capacity",
                      help="Batch capacity",
                      type="int",
                      default=2000)
    parser.add_option("--data", dest="data", metavar="FILE", default=None)
    parser.add_option("--sens", dest="sens", metavar="FILE", default=None)
    parser.add_option("--output", dest="output", metavar="FILE", default=None)
    parser.add_option("--resume", dest="resume_index", type="int", default=0)
    parser.add_option("--end", dest="end_index", type="int", default=-1)
    parser.set_default("model_size", 6)
    return parser
Beispiel #10
0
    else:
        return map(check_int, m.groups())

class TimeOption(Option):
    TYPES = Option.TYPES + ("time",)
    TYPE_CHECKER = copy(Option.TYPE_CHECKER)
    TYPE_CHECKER["time"] = check_time

parser = OptionParser(option_class=TimeOption)
parser.add_option('-i', action="store_const", dest='mode', const='i')
parser.add_option('-m', action="store_const", dest='mode', const='m')
parser.add_option('-s', action="store_const", dest='mode', const='s')
parser.add_option('-u', action="store_const", dest='mode', const='u')
parser.add_option('-t', type="time", dest="till")

parser.set_default('mode', 'd')

(opts, args) = parser.parse_args()

mode = opts.mode

multipliers = {
  "s": 1,  # seconds
  "m": 60,  # minutes
  "h": 60 * 60,  # hours
  "d": 60 * 60 * 24  # days
}

total = 0  # 0 seconds by default
d = datetime.now()
if opts.till is None:
Beispiel #11
0
def parse_args():
    """Parse command-line arguments."""
    parser = OptionParser(usage='%prog [options] <name>',
                          description='create a VM using the SDK')
    parser.add_option('-U', '--url', help='the API entry point URL')
    parser.add_option('-u', '--username', help='the user to connect as')
    parser.add_option('-p', '--password', help='the user\'s password')
    parser.add_option('-m', '--memory', type='int', help='memory size in MB')
    parser.add_option('-D', '--disk', type='int', help='disk size in GB')
    parser.add_option('-c', '--cluster', help='the cluster to add the VM to')
    parser.add_option('-t', '--template', help='base the VM off this template')
    parser.add_option('-N', '--network', help='the network to connect to')
    parser.add_option('-d', '--debug', action='store_true',
                      help='enable debugging')
    for key in ('url', 'username', 'password'):
        name = 'RHEV_%s' % key.upper()
        parser.set_default(key, os.environ.get(name))
    parser.set_default('cluster', 'Default')
    parser.set_default('template', 'Blank')
    parser.set_default('memory', 512)
    parser.set_default('disk', 8)
    parser.set_default('network', 'rhevm')
    opts, args = parser.parse_args()
    for key in ('url', 'username', 'password'):
        if getattr(opts, key) is None:
            name = 'RHEV_%s' % key.upper()
            parser.error('please specify --%s or set $%s' % (key, name))
    if len(args) != 1:
        parser.print_usage()
        parser.exit()
    return opts, args
Beispiel #12
0
from optparse import OptionParser

from cdecimal import getcontext, FloatOperation, ROUND_HALF_UP
import iosim
import importlib

# Initialize cdecimal module
c = getcontext()
c.prec = 12
c.traps[FloatOperation] = True
c.rounding = ROUND_HALF_UP

if __name__ == '__main__':
    # Parse command line
    parser = OptionParser()
    parser.set_default("num_users", 1)
    parser.set_default("num_files", 50)
    parser.set_default("file_size", 64)
    parser.set_default("block_size", 16)
    parser.set_default("replica_selection_policy", 1)
    parser.set_default("file_creation_policy", 0)
    parser.set_default("fanout", 3)
    parser.set_default("num_disks", 200)
    parser.set_default("num_disks_recovery", 50)
    parser.set_default("max_replication", 100)
    parser.set_default("init_replication", 3)
    parser.set_default("random_seed", None)
    parser.set_default("min_ratio_gap", "0.05")
    parser.set_default("min_utility_gap", "0.01")
    parser.set_default("target_mmr", "0.95")
    parser.set_default("max_iters_ratio", 500)
Beispiel #13
0
def main():
    ###########################################################################
    # Configure the parser
    ###########################################################################
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options] globalTag",
        version=__version__,
        description=
        """This script performs hashing sum based comparison of two databases tagged
with a global tag of interest.""")

    parser.add_option("-m",
                      "--master-db",
                      type="string",
                      help="Master database connection string. "
                      "DEFAULT: $SQLITEDBPATH/database.db/partition")
    parser.add_option("-s",
                      "--slave-db",
                      type="string",
                      help="Slave database connection string.")
    parser.add_option("--hash-alg",
                      type="string",
                      help="Name of an hashing algorithm to use."
                      " DEFAULT: 'sha1'.")

    parser.set_default("master_db", os.environ["SQLITEDBPATH"])
    parser.set_default("slave_db", None)
    parser.set_default("hash_alg", 'sha1')

    # parse command line
    options, args = parser.parse_args()

    ###########################################################################
    # Prepare local logger
    ###########################################################################
    import logging
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # set the global stream handler
    from CondDBUI import LOG_FORMAT
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    ###########################################################################
    # Check arguments
    ###########################################################################
    if len(args) < 1:
        parser.error("Not enough arguments. Try with --help.")
    tag = args[0]

    # Set connection strings
    slave_ConnStr = options.slave_db
    if not slave_ConnStr:
        parser.error("The target DB is not set to check in.")
    partitions = ["DDDB", "LHCBCOND", "SIMCOND"]
    partition = os.path.split(slave_ConnStr)[1]
    if partition not in partitions:
        parser.error("'%s' is not a valid partition name. Allowed: %s" % \
                     (partition, partitions))
    master_ConnStr = "sqlite_file:" + os.path.join(
        options.master_db, "%s.db" % partition, "%s" % partition)

    # Check hashing algorithm
    import hashlib
    hashAlg = options.hash_alg
    if sys.version_info >= (2, 7):
        if hashAlg not in hashlib.algorithms:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg
    else:
        if hashAlg not in [
                'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
        ]:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg
    # Initialize hashing object
    initialHashSumObj = getattr(hashlib, hashAlg)()

    ###########################################################################
    import timeit
    try:
        log.info("Master DB at: %s" % master_ConnStr)
        log.info("Slave DB at: %s\n" % slave_ConnStr)

        log.info("Connecting to Master DB ...")
        dbMaster = CondDBUI.CondDB(master_ConnStr)
        log.info("Computing the hash sum for '%s' tag ..." % tag)
        hashSumObjMaster = dbMaster.payloadToHash(initialHashSumObj, tag=tag)
        hashSumMaster = hashSumObjMaster.hexdigest()
        log.info("Master DB hash sum is: '%s'\n" %
                 format_text(hashSumMaster, "yellow"))

        log.info("Connecting to Slave DB ...")
        dbSlave = CondDBUI.CondDB(slave_ConnStr)
        log.info("Computing the hash sum for '%s' tag ..." % tag)
        hashSumObjSlave = dbSlave.payloadToHash(initialHashSumObj, tag=tag)
        hashSumSlave = hashSumObjSlave.hexdigest()
        log.info("Slave DB hash sum is: '%s'\n" %
                 format_text(hashSumSlave, "yellow"))

        if hashSumMaster == hashSumSlave:
            log.info("Tags with the name '%s' in both DBs are %s." %
                     (tag, format_text("identical", "green")))
        else:
            log.info("Tags with the name '%s' in both DBs are %s." %
                     (tag, format_text("different", "green", blink=True)))

    except KeyboardInterrupt:
        print "Comparison canceled by user."
        return 0
Beispiel #14
0
""" + string.replace(scriptID, '\n', '\\n'))

for option in [
        'opening',
        'closing',
        'erosion',
        'dilation',
        'average',
        'median',
]:
    parser.add_option('-%s' % option[0],
                      '--%s' % option,
                      dest=option,
                      type='int',
                      help='stencil size for %s filter' % option)
    parser.set_default(option, 0)

(options, filenames) = parser.parse_args()

# ------------------------------------------ read Gwyddion data ---------------------------------------

for file in filenames:
    filters = ''
    header = []
    with open(file, 'r') as f:
        for line in f:
            pieces = line.split()
            if pieces[0] != '#': break
            if pieces[1] == 'Width:': width = float(pieces[2])
            if pieces[1] == 'Height:': height = float(pieces[2])
            header.append(line.lstrip('#').strip())
Beispiel #15
0
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Apply filter(s) to Gwyddion data.
""" + string.replace(scriptID,'\n','\\n')
)

for option in ['opening',
               'closing',
               'erosion',
               'dilation',
               'average',
               'median',
               ]:
  parser.add_option('-%s'%option[0], '--%s'%option,   dest=option, type='int',
                  help = 'stencil size for %s filter'%option)
  parser.set_default(option, 0)

(options, filenames) = parser.parse_args()


# ------------------------------------------ read Gwyddion data ---------------------------------------  

for file in filenames:
  filters = ''
  header = []
  with open(file,'r') as f:
    for line in f:
      pieces = line.split()
      if pieces[0] != '#': break
      if pieces[1] == 'Width:':  width  = float(pieces[2])
      if pieces[1] == 'Height:': height = float(pieces[2])
Beispiel #16
0
op.description = 'CIFTEP: Generate ORTEP3 Instructions From CIF Files'
op.usage = '%prog [options] cif-files'
#
op.add_option('-o',
              '--output',
              type='string',
              dest='tfname',
              metavar='OUTPUT',
              help='Specify output file name')
#
op.add_option('-c',
              '--contents',
              type='choice',
              choices=['aunit', 'grow', 'cell'],
              help='Contents scheme, [aunit|grow|cell], default: aunit')
op.set_default('contents', 'aunit')
op.add_option(
    '-i',
    '--view',
    type='choice',
    choices=['std', 'best', '100', '010', '001'],
    help='Initial view scheme, [std|best|100|010|001], default: best')
op.set_default('view', 'best')
#
op.add_option(
    '-n',
    '--normalize-uh',
    type='float',
    dest='uH',
    help=
    'Normalize the u values of all H atoms to, default: 0.1, negative values mean no normalization'
def main():
    # Configure the parser
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options]",
        description=
        """This script updates ONLINE SQLite snapshots from Oracle server.
It detects used time splitting scheme of ONLINE snapshots at destination
(per-year or per-month) and updates them to extend their coverage up
to current time. If no ONLINE snapshots are found it will generate them
from scratch.""")

    parser.add_option(
        "-d",
        "--destination",
        type="string",
        help="Path to SQLDDDB package ONLINE should be updated/regenerated in. "
        "Default is $SQLDDDBROOT.")
    parser.add_option("-b",
                      "--batch",
                      action="store_true",
                      help="Early choice to regenerate *yearly* snapshots if "
                      "no snapshots are found at all")
    parser.add_option(
        "--rel",
        action="store_true",
        help="Make a record in the *release.notes* file about the update.")

    parser.set_default("destination", os.environ["SQLDDDBROOT"])

    # Parse command line
    options, args = parser.parse_args()

    # Prepare local logger
    import logging
    global log
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # Set the global stream handler
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    # Decrease verbosity of PyCoolDiff
    PyCoolDiff._log.setLevel(logging.WARNING)

    # Form and check arguments
    if not os.path.exists(options.destination):
        log.error("Considered path '%s' doesn't exist." % options.destination)
        return 1
    global dest_sqldddb, db_path
    dest_sqldddb = options.destination.rstrip('/')
    db_path = os.path.join(dest_sqldddb, "db")
    if 'releases' in dest_sqldddb:
        log.error("Released SQLDDDB must not be touched (%s)." % dest_sqldddb)
        return 1
    if not os.path.exists(db_path):
        log.error("Considered path '%s' doesn't exist." % db_path)
        return 1

    # Check availability of flow control file ".stopUpdatingSnapshots"
    stopfilepath = os.path.join(dest_sqldddb, ".stopUpdatingSnapshots")
    isstopfile = os.path.isfile(stopfilepath)
    if isstopfile:
        log.info(
            "Updating on ONLINE snapshots canceled due to the existence of stop file under destination directory."
        )
        return 0

    # Check granularity of ONLINE snapshots in SQLDDDB/db folder
    current_date = datetime.today()
    curr_year = current_date.year
    curr_month = current_date.month

    # Define user's time granularity of ONLINE snapshots
    online_files = [
        file for file in os.listdir(db_path)
        if match(r"^ONLINE-(\d{4})(\d{2})?\.db$", file)
    ]

    if len(online_files) != 0:
        last_snap = max(online_files)
    # Ask to generate full set of new ONLINE snapshots
    else:
        ans = 'Yes' if options.batch else None
        while ans is None:
            ans = raw_input(
                "\nNo ONLINE snapshots found. Do you want to generate "
                "full set of ONLINE snapshots (Yes,[No])? ")
            if not ans: ans = "No"
            if ans not in ["Yes", "No"]:
                print "You have to type exactly 'Yes' or 'No'"
                ans = None

        if ans == "No":
            print "Canceled by user. Snapshots weren't generated."
            return 0

        ans = 'Year' if options.batch else None
        while ans is None:
            ans = raw_input(
                "\nDo you want per-year or per-month granularity for"
                " your snapshots (Month,[Year])? ")
            if not ans: ans = "Year"
            if ans not in ["Year", "Month"]:
                print "You have to type exactly 'Year' or 'Month'"
                ans = None

        if ans == 'Year':
            last_snap = "ONLINE-2008.db"
        else:
            last_snap = "ONLINE-200806.db"

    last_snap_date = last_snap.lstrip("ONLINE-").rstrip(".db")
    if len(last_snap_date) == 4:
        snap_granular = "YEAR"
    elif len(last_snap_date) == 6:
        snap_granular = "MONTH"
    else:
        log.error("Can't define time granularity of ONLINE snapshots at '%s'. "
                  "Check the snapshots naming scheme is standard." % db_path)
        return 1

    # Check how many snapshots do we need to produce for the detected granularity
    if snap_granular == "YEAR":
        delta_snap_years = curr_year - int(last_snap_date)
        number_of_new_snapshots = delta_snap_years + 1
    else:
        delta_snap_years = curr_year - int(last_snap_date[:4])
        delta_snap_months = curr_month - int(
            last_snap_date[4:]) + 12 * delta_snap_years
        number_of_new_snapshots = delta_snap_months + 1

    log.info("%s ONLINE snapshots found to be delivered to '%s'.\n" %
             (number_of_new_snapshots, db_path))

    # Define 'since' and 'until' values ('year' and 'month' are date to snapshot)
    # and make snapshots
    new_snapshots = []
    if snap_granular == "YEAR":
        for s in range(number_of_new_snapshots):
            year = curr_year - s
            since = "%04d-01-01" % year
            until = "%04d-01-01" % (year + 1)

            # Make snapshot
            if _make_snapshots(since, until, 4):
                new_snapshots.append(str(year))
            else:
                if len(new_snapshots):
                    new_snapshots_files = [
                        os.path.join(db_path, 'ONLINE-%s.db' % d)
                        for d in new_snapshots
                    ]
                    log.warning("Not all of ONLINE snapshots were updated,"
                                "the ones updated are: %s" %
                                new_snapshots_files)
                return 1
    else:
        for s in range(number_of_new_snapshots):
            delta = abs(s - curr_month)
            # Compute date for current year
            if curr_month > s:
                year = curr_year
                month = delta
            # Compute date for past years
            else:
                year = curr_year - delta // 12 - 1
                month = 12 - (delta - 12 * (delta // 12))

            since = "%04d-%02d-01" % (year, month)
            # This 'if' clause is to take into account special case of 12th month
            if month != 12:
                until = "%04d-%02d-01" % (year, month + 1)
            else:
                until = "%04d-%02d-01" % (year + 1, 1)

            # Make snapshot
            if _make_snapshots(since, until, 6):
                new_snapshots.append(str(year) + '-%02d' % month)
            else:
                if len(new_snapshots):
                    new_snapshots_files = [
                        os.path.join(db_path,
                                     'ONLINE-%s.db' % d.replace('-', ''))
                        for d in new_snapshots
                    ]
                    log.warning("Not all of ONLINE snapshots were updated,"
                                "the ones updated are: %s" %
                                new_snapshots_files)
                return 1

    log.info('Snapshots are updated successfully! Delivered slices are: %s' %
             new_snapshots)
    if options.rel: _update_rel_notes(dest_sqldddb, new_snapshots)

    return 0
Beispiel #18
0
                        help="don't collapse multiple line breaks")
    parser.add_option("-r", "--raw", dest="raw", action="store_true",
                        help="raw wiki translation -- no wrapping, no toc, no links")
    parser.add_option("-p", "--mainpage", dest="mainpage", metavar="PAGENAME",
                        help="set main page to PAGENAME")

    # Batch / Location options
    parser.add_option("-s", "--srcdir", dest="srcdir",
                        help="wiki format sources in SRCDIR", metavar="SRCDIR")
    parser.add_option("-d", "--destdir", dest="destdir",
                        help="write html output into DESTDIR", metavar="DESTDIR")
    parser.add_option("-e", "--stale", dest="all", action="store_true",
                        help="convert all wiki files that are stale or missing from DESTDIR")


    parser.set_default('toc', True)
    parser.set_default('links', True)
    parser.set_default('template', None)
    parser.set_default('number', False)
    parser.set_default('levels', 3)
    parser.set_default('tabwidth', 8)
    parser.set_default('multibreak', False)
    parser.set_default('mainpage', "MainPage")  # Identity of index

    parser.set_default('srcdir', os.getcwd())
    parser.set_default('destdir', None)
    parser.set_default('all', False)

    # Parse the command line
    (options, args) = parser.parse_args()
Beispiel #19
0
def configure(argv,
              hasRPMSupport=False,
              hasSmartSupport=False,
              hasZypperSupport=False):
    optionParser = OptionParser(
        usage="%prog [options] <searchTerm>",
        version="%%prog %s" % VERSION,
        description=
        "A command-line client for the openSUSE Package Search web service.")
    optionParser.add_option('',
                            '--config',
                            action='store',
                            type='string',
                            dest='configFile',
                            default=defaultUserConfigFile,
                            help="user configuration file (defaults to %s)" %
                            defaultUserConfigFile,
                            metavar="FILE")
    optionParser.add_option(
        '',
        '--skip-global-config',
        action='store_false',
        dest='readGlobalConfig',
        default=True,
        help="skip reading the global configuration file %s" %
        globalConfigFile)
    optionParser.add_option(
        '',
        '--skip-config',
        action='store_false',
        dest='readConfig',
        default=True,
        help="skip reading configuration files alltogether")
    optionParser.add_option('-n',
                            '--by-name',
                            action='store_const',
                            const='ByName',
                            dest='mode',
                            help="only search for matches in package names")
    optionParser.add_option('-c',
                            '--by-content',
                            action='store_const',
                            const='ByContents',
                            dest='mode',
                            help="also search for matches in all file names")
    optionParser.add_option(
        '-s',
        '--simple',
        action='store_const',
        const='Simple',
        dest='mode',
        help=
        "search for matches in package names, package summaries and first match in file names (default)"
    )
    optionParser.add_option(
        '-d',
        '--dist',
        type='string',
        dest='version',
        default=None,
        help=
        "openSUSE version to search for (defaults to %s, may specify 'factory' for Factory or 'latest' for latest release)"
        % defaultSuseVersion,
        metavar="VERSION")
    optionParser.add_option(
        '-l',
        '--latest',
        action='store_const',
        const=latestSuseVersion,
        dest='version',
        help="search in the latest released openSUSE version (%s)" %
        latestSuseVersion)
    optionParser.add_option(
        '-F',
        '--factory',
        action='store_const',
        const='factory',
        dest='version',
        help="search in the openSUSE development version (Factory)")
    optionParser.add_option(
        '-u',
        '--url',
        action='store_true',
        dest='showURL',
        default=False,
        help=
        "also show the URLs of the repositories that contain matching packages"
    )
    optionParser.add_option(
        '-a',
        '--arch',
        action='store_true',
        dest='showArch',
        default=False,
        help=
        "also show the architectures each package match is available for (defaults to false)"
    )
    # disabled for now, will need to add RPM release information in web service results first:
    #optionParser.add_option('-f', '--file', action='store_true', dest='showFileURL', default=False,
    #		help="also show the fully qualified RPM file URLs")
    optionParser.add_option(
        '-t',
        '--timeout',
        action='store',
        type='int',
        dest='timeout',
        default=defaultTimeout,
        help="timeout in seconds for the web service request",
        metavar="TIMEOUT")
    optionParser.add_option(
        '-q',
        '--quiet',
        action='store_false',
        dest='verbose',
        default=True,
        help="don't display progress information (for dumb terminals)")
    optionParser.add_option(
        '-A',
        '--no-ansi',
        action='store_false',
        dest='color',
        default=True,
        help="don't use ANSI escape sequences (for dumb terminals), implies -q"
    )
    optionParser.add_option(
        '',
        '--theme',
        action='store',
        type='string',
        dest='colorScheme',
        default=None,
        help="color scheme to use (unless -A/--no-ansi) -- valid values: %s" %
        (', '.join(colorSchemeMap.keys())),
        metavar='NAME')
    optionParser.add_option(
        '-D',
        '--dump',
        action='store_true',
        dest='dump',
        default=False,
        help="simply dump the XML tree sent back by the server")
    optionParser.add_option('-U',
                            '--show-url',
                            action='store_true',
                            dest='showQueryURL',
                            default=False,
                            help="show the web service query URL")
    optionParser.add_option(
        '',
        '--proxy',
        action='store',
        type='string',
        dest='proxy',
        default=defaultHttpProxy,
        help=
        "HTTP proxy server to use for performing the request (if not specified, uses the http_proxy environment variable)",
        metavar="SERVER:PORT")
    optionParser.add_option('',
                            '--proxy-auth',
                            action='store',
                            type='string',
                            dest='proxyAuth',
                            default=None,
                            help="HTTP proxy authentication",
                            metavar="USER:PASSWORD")
    optionParser.add_option(
        '',
        '--stack-trace',
        action='store_true',
        dest='showStackTrace',
        default=False,
        help=
        "show stack traces on exceptions (only useful for submitting bug reports)"
    )

    helpAddonForRPM = ''
    if not hasRPMSupport:
        helpAddonForRPM = ' (N/A)'
        pass

    optionParser.add_option(
        '-r',
        '--rpm',
        action='store_true',
        dest='rpm',
        default=False,
        help="compare package matches with your current RPM database" +
        helpAddonForRPM)
    optionParser.add_option(
        '',
        '--rpm-root',
        action='store',
        type='string',
        dest='rpmRoot',
        default=None,
        help=
        "set the root directory for the RPM database (not the path to the RPM database but the root of the system)"
        + helpAddonForRPM,
        metavar="DIRECTORY")

    helpAddonForSmart = ''
    if not hasSmartSupport:
        helpAddonForSmart = ' (N/A)'
        pass

    optionParser.add_option('',
                            '--smart',
                            action='store_true',
                            dest='smart',
                            default=False,
                            help="enable smart support to check repositories" +
                            helpAddonForSmart)
    #optionParser.add_option('', '--smart-add', action='store_true', dest='smartAdd', default=False,
    #		help="prompt for adding repositories to smart" + helpAddonForSmart)

    helpAddonForZypper = ''
    if not hasZypperSupport:
        helpAddonForZypper = ' (N/A)'

    optionParser.add_option(
        '',
        '--zypper',
        action='store_true',
        dest='zypper',
        default=False,
        help="enable zypper support to check repositories" +
        helpAddonForZypper)

    (options, args) = optionParser.parse_args(argv)

    if options.readConfig:
        try:
            from ConfigParser import SafeConfigParser
        except ImportError:
            from ConfigParser import ConfigParser
            pass
        try:
            configParser = SafeConfigParser()
        except NameError:
            configParser = ConfigParser()
            pass

        configModeMap = {
            'simple': 'Simple',
            'name': 'ByName',
            'content': 'ByContent'
        }

        userConfigFile = os.path.expanduser(options.configFile)
        configFiles = []
        if options.readGlobalConfig:
            configFiles.append(globalConfigFile)
            pass
        configFiles.append(userConfigFile)

        try:
            configParser.read(configFiles)
        except Exception, e:
            print >> sys.stderr, "Error while reading configuration from %s: %s" % (
                " and ".join(configFiles), e)
            if options.showStackTrace:
                import traceback
                traceback.print_exc()
                pass
            sys.exit(E_CONFIG)
            pass

        # set configuration values as defaults in OptionParser:
        def setOption(type, section, name, option=None):
            if not option:
                option = name
                pass
            if configParser.has_option(section, name):
                m = getattr(configParser, 'get%s' % type)
                optionParser.set_default(option, m(section, name))
                return True
            return False

        if configParser.has_option('General', 'mode'):
            modeConfig = configParser.get('General', 'mode')
            if configModeMap.has_key(modeConfig):
                optionParser.set_default('mode', configModeMap[modeConfig])
            else:
                print >> sys.stderr, 'ERROR: invalid configuration value for parameter "mode" in section "General": %s' % modeConfig
                print >> sys.stderr, 'Valid values are: %s' % ', '.join(
                    configModeMap.keys())
                sys.exit(E_CONFIG)
                pass
            pass
        setOption('', 'General', 'distribution', 'version')
        setOption('boolean', 'Output', 'color')
        setOption('', 'Output', 'theme', 'colorScheme')
        setOption('boolean', 'Output', 'url', 'showURL')
        setOption('boolean', 'Output', 'arch', 'showArch')
        setOption('boolean', 'Output', 'verbose')
        setOption('boolean', 'Output', 'show_query_url', 'showQueryURL')
        if hasRPMSupport:
            setOption('boolean', 'RPM', 'rpm')
            setOption('', 'RPM', 'root', 'rpmRoot')
            pass
        if hasSmartSupport:
            setOption('boolean', 'Smart', 'smart')
            setOption('boolean', 'Smart', 'prompt')
            pass
        setOption('int', 'Network', 'timeout')
        setOption('', 'Network', 'proxy')
        setOption('', 'Network', 'proxy_auth', 'proxyAuth')

        # run option parsing again, now with defaults from the configuration files
        (options, args) = optionParser.parse_args(sys.argv)
        pass
Beispiel #20
0
def main():
    ##########################################################################################
    # Configuring the script options and args parser
    ##########################################################################################
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options] PARTITION(S) DATA_TYPE",
        version=__version__,
        description=
        """Script returns for the given partition and data type the most recent global tag and all \
subsequent local tags.

You have to give the partition name (DDDB, LHCBCOND or SIMCOND) and the data type
(e.g., 2009 or MC09). The output is in the form: (global_tag, [lt1, lt2, ... ,ltN]).
Ordering of the local tags: from the most recent one, to the most old one.
If for the given pair partition-datatype no global tag is found for some reason - "None"
will be returned, even if local tags were found for the given condition.
""")
    parser.add_option("-r",
                      "--rel-notes",
                      type="string",
                      help="XML file containing the release notes to analyze")
    parser.add_option(
        "-g",
        "--all_GTs",
        action="store_true",
        dest="all_GTs",
        default=False,
        help="Search for all global tags for the given partition and data type"
    )
    parser.add_option(
        "--update_bkk",
        action="store_true",
        dest="update_bkk",
        default=False,
        help=
        "If is set, an update of bookkeeping database with a latest global tag \
for the given partition and data type will be done. The user will be asked for \
final confirmation.")

    try:
        parser.set_default(
            "rel_notes",
            os.path.join(os.environ["SQLITEDBPATH"], "..", "doc",
                         "release_notes.xml"))
    except KeyError:
        print "Sorry.. Check your environment. SQLITEDBPATH variable is not set."
        return 1

    options, args = parser.parse_args()
    if len(args) != 2:
        parser.error("Not enough or too much of arguments. Try with --help.")

    datatype = args[1]
    ################ Processing and validation of the given partitions#######################
    partitions = []
    word = ""
    for i in args[0]:
        if i != ",":
            word += i
        elif i == ",":
            partitions.append(word)
            word = ""
        elif i == " ":
            parser.error(
                "Partitions coma separated list should be given without spaces."
            )
    partitions.append(word)

    standard_partitions = ["DDDB", "LHCBCOND", "SIMCOND"]
    for partition in partitions:
        if partition not in standard_partitions and partition != "all":
            parser.error("'%s' is not a valid partition name. Allowed are: %s and 'all'" % \
                        (partition, standard_partitions))
        elif partition == "all":
            partitions = standard_partitions
    print "\n###########################################################################################"
    print "#  Using %s" % options.rel_notes
    print "#  Partitions to look in: %s" % partitions
    print "#  Data type to look for is: %s" % datatype
    print "###########################################################################################"

    ########### Launch parsing and return the result#####################################
    if not options.update_bkk:
        if not options.all_GTs:
            print "\nThe most recent global tags and all new subsequent local tags for specified partition and datatype are:"
            for partition in partitions:
                print "\n\tFor %s:" % partition, last_gt_lts(
                    partition, datatype, options.rel_notes)
        else:
            print "\nThe set of all global tags for specified partition and datatype are:"
            for partition in partitions:
                print "\n\tFor %s:" % partition, all_gts(
                    partition, datatype, options.rel_notes)
    elif options.update_bkk and datatype == "BK":
        BK_tags = {"DDDB": [], "LHCBCOND": [], "SIMCOND": []}
        if not options.all_GTs:
            print "\nThe most recent global tags with the 'bookkeeping' property are:"
            for partition in partitions:
                gt_lts = last_gt_lts(partition, datatype, options.rel_notes)
                if gt_lts:
                    gt, ltgs = gt_lts
                    BK_tags[partition].append(gt)
        else:
            print "\nAll global tags with the 'bookkeeping' property are:"
            for partition in partitions:
                gts = all_gts(partition, datatype, options.rel_notes)
                if gts:
                    BK_tags[partition] = gts
        print "\t", BK_tags

        ans = None
        while ans is None:
            ans = raw_input(
                "\nDo you really want to update the Bookkeeping database (Yes,[No])? "
            )
            if not ans: ans = "No"
            if ans not in ["Yes", "No"]:
                print "You have to type exactly 'Yes' or 'No'"
                ans = None

        if ans == "No":
            print "...\nBookkeeping database update was cancelled by user. No changes were done to the db."
            return 0

        from DIRAC.Core.Base.Script import initialize
        initialize(enableCommandLine=False)
        from LHCbDIRAC.NewBookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
        cl = BookkeepingClient()
        retVal = cl.insertTag(BK_tags)
        if retVal['OK']:
            print "Bookkeeping database was updated."
            print retVal['Value']
        else:
            print retVal['Message']
    else:
        print "\nThe Bookkeeping database can only be updated with 'BK' data type global tags.\n\
The update process wasn't done."

        return 1
Beispiel #21
0
def main():
    from optparse import OptionParser
    import sys
    
    parser = OptionParser()
    
    # Output format options
    parser.add_option("-t", "--template", dest="template",
                        help="use page template to wrap wiki output", metavar="TPLTFILE")
    parser.add_option("-n", "--number", dest="number",
                        help="number the headings in the body and table of contents")
    parser.add_option("-l", "--levels", dest="levels", type="int",
                        help="create toc to depth LEVELS", metavar="LEVELS")
    parser.add_option("-c", "--skiptoc", dest="toc", action="store_false",
                        help="leave toc out, even if template has slot")
    parser.add_option("-u", "--unlink", dest="links", action="store_false",
                        help="don't create named anchors for toc links")
    parser.add_option("-a", "--autolink", dest="autolink", action="store_false",
                        help="autolink wiki words that don't exist")
    parser.add_option("-w", "--tabwidth", dest="tabwidth", type="int",
                        help="replace tabs by WIDTH spaces", metavar="WIDTH")
    parser.add_option("-m", "--multibreak", dest="multibreak", action="store_true",
                        help="don't collapse multiple line breaks")
    parser.add_option("-r", "--raw", dest="raw", action="store_true",
                        help="raw wiki translation -- no wrapping, no toc, no links")
    parser.add_option("-p", "--mainpage", dest="mainpage", metavar="PAGENAME",
                        help="set main page to PAGENAME")
    
    # Batch / Location options
    parser.add_option("-s", "--srcdir", dest="srcdir",
                        help="wiki format sources in SRCDIR", metavar="SRCDIR")
    parser.add_option("-d", "--destdir", dest="destdir",
                        help="write html output into DESTDIR", metavar="DESTDIR")
    parser.add_option("-e", "--stale", dest="all", action="store_true",
                        help="convert all wiki files that are stale or missing from DESTDIR")
    
    
    parser.set_default('toc', True)
    parser.set_default('links', True)
    parser.set_default('template', None)
    parser.set_default('number', False)
    parser.set_default('levels', 3)
    parser.set_default('tabwidth', 8)
    parser.set_default('multibreak', False)
    parser.set_default('mainpage', "MainPage")  # Identity of index
    
    parser.set_default('srcdir', os.getcwd())
    parser.set_default('destdir', None)
    parser.set_default('all', False)

    # Parse the command line
    (options, args) = parser.parse_args()    
    
    if options.template is None:
        options.template = DEFAULT_TEMPLATE
    elif os.path.exists(options.template):
        options.template = file(options.template).read()
    else:
        print "Template not found: %s" % options.template
        parser.print_usage()
        sys.exit()
    #sys.exit()
    for wikiname, htmldata in wikify(args, options):
        if options.destdir:
            print wikiname + ":",
            if htmldata is not None:
                print htmldata
            else:
                print "Complete."
        elif htmldata is not None:
            print htmldata
Beispiel #22
0
def main_common(rmsd_func):
    from optparse import OptionParser
    usage = "\n" \
            "    %prog [options] xyzfname1 xyzfname2\n" \
            "    %prog [options] -F FILE\n" \
            "    %prog -h"
    parser = OptionParser(usage=usage)
    parser.set_default("no_h", False)
    parser.add_option('-H', "--no-h", dest="no_h",
                      action="store_true",
                      help="does not include hydrogen")
    parser.add_option('-a', "--atoms", dest="atoms",
                      help="only compare selected atoms, 1-based",
                      metavar="STRING")
    parser.add_option('-A', "--atomsfile", dest="atomsfile",
                      help="read the selected atoms from file",
                      metavar="FILE")
    parser.add_option('-b', "--atoms1", dest="atoms1",
                      help="the selected atoms for molecule 1",
                      metavar="STRING")
    parser.add_option('-B', "--atoms1file", dest="atoms1file",
                      help="read the selected atoms from file",
                      metavar="FILE")
    parser.add_option('-c', "--atoms2", dest="atoms2",
                      help="the selected atoms for molecule 2",
                      metavar="STRING")
    parser.add_option('-C', "--atoms2file", dest="atoms2file",
                      help="read the selected atoms from file",
                      metavar="FILE")
    parser.add_option('-F', "--files", dest="files",
                      help="read the compare file lists from FILE",
                      metavar="FILE")
    parser.add_option('-s', '--loop-step', 
                      dest='loop_step',
                      type='int',
                      help='logical symmetry: loop step')
    parser.add_option('-m', '--mirror',
                      dest='mirror',
                      action='store_true',
                      help='also consider the mirror molecule')
    parser.add_option('-v', '--verbose',
                      dest='verbose',
                      action='store_true',
                      help='be verbose')
    (options, args) = parser.parse_args()

    if len(args) == 2 and options.files is None:
        pass
    elif len(args) == 0 and options.files:
        pass
    else:
        parser.error("incorrect number of arguments")

    if (options.atoms and options.atomsfile) or \
       (options.atoms1 and options.atoms1file) or \
       (options.atoms2 and options.atoms2file):
        parser.error("options conflict")

    atoms1 = None
    atoms2 = None
    if options.atomsfile:
        options.atoms = file(options.atomsfile).read()
    if options.atoms:
        atoms1 = list(frame.parseframe(options.atoms))
        atoms2 = atoms1[:]
    if options.atoms1file:
        options.atoms1 = file(options.atoms1file).read()
    if options.atoms1:
        atoms1 = frame.parseframe(options.atoms1)

    if options.atoms2file:
        options.atoms2 = file(options.atoms2file).read()
    if options.atoms2:
        atoms2 = frame.parseframe(options.atoms2)

    filelists = []
    if options.files is not None:
        filelists = [line.split() for line in file(options.files).readlines()]
    else:
        filelists = [args]

    from itcc.molecule import read

    mol_cache = {}
    for filepair in filelists:
        fname1 = filepair[0]
        fname2 = filepair[1]
        if options.verbose:
            print fname1, fname2,
        mol1 = cache_mol(fname1, mol_cache)
        mol2 = cache_mol(fname2, mol_cache)

        if options.no_h:
            if atoms1 is None:
                atoms1 = range(len(mol1))
            atoms1 = [x for x in atoms1 if mol1.atoms[x].no != 1]

        if options.no_h:
            if atoms2 is None:
                atoms2_new = range(len(mol2))
            else:
                atoms2_new = atoms2
            atoms2_new = [x for x in atoms2_new if mol2.atoms[x].no != 1]
        else:
            atoms2_new = atoms2
        print rmsd_func(mol1, mol2, atoms1, atoms2_new, 
                        options.mirror, options.loop_step)
Beispiel #23
0
def initial_config():

    traffic_models = ["infinite", "none", "tunnel"]
    

    # Dictionary of default variables
    node_defaults = dict()

    # Get the arg parser and add config file as an option
    arg_parser = ArgumentParser(add_help=False)
    arg_parser.add_argument("-c", "--config-file", help="set config-file name")
    
    
    #(known, args)=arg_parser.parse_known_args()
    known=arg_parser.parse_known_args()[0]
    
    # Setup dev logger
    # import log config template from lincolnlogs
    log_config = deepcopy(digital_ll.log_config)
    # set the log level
#    log_config["loggers"]["developer"]["level"] = known.log_level

    logging.config.dictConfig(log_config)
    dev_log = logging.getLogger('developer')

    f = digital_ll.ContextFilter()
    dev_log.addFilter(f)
    
    # declare config parser
    conf_parser = SafeConfigParser(allow_no_value=True)
    
    # if given a config file, try to parse it, otherwise, skip that step
    if known.config_file is not None:
        # load config file
        file_list = conf_parser.read(known.config_file)
        
        if len(file_list) == 0:
            print "File '%s' not found" % known.config_file
            sys.exit(1)
        
        sections = conf_parser.sections()
        # config file read successfully: Update command line defaults as needed
        for section in sections:
            # get list of all items in each section 
            section_entries = conf_parser.options(section)
            
            # update defaults for any matching variable names
            # iterate through each entry in each section
            for key in section_entries:
                
                # update defaults from the value in the config file
                node_defaults[key] = conf_parser.get(section, key)
                
                # handle special case of converting string representation of bools to bools
                if (node_defaults[key] == "True") | (node_defaults[key] == "False"):
                    node_defaults[key] = (node_defaults[key] == "True")
                    
    # store values from arg parser into defaults of config parser, 
    # so everything is on the same page
    known_dict = vars(known)
    for key in known_dict:
        node_defaults[key] = known_dict[key]  
       
    mods = modulation_utils.type_1_mods()    
    demods = modulation_utils.type_1_demods()

    # Create Options Parser:
    parser = OptionParser (option_class=eng_option, conflict_handler="resolve")
    expert_grp = parser.add_option_group("Expert")

    parser.add_option("--show-gpl", action="store_true", default=False,
                      help="display the full GPL license for this program")

    # note this option is actually handled by the argparse module. This is only included 
    # so the config file option shows up in the help file
    parser.add_option("-c", "--config-file", help="set config-file name")    
    parser.add_option("--log-level", default="INFO", 
                      help="verbosity of debug log. options are %s" % log_levels)
    parser.add_option("","--pcktlog", default="./tdma_packetlog.xml", help="file to save packet log to")
    parser.add_option("","--statelog",default="./tdma_statelog.xml",help="file to save state log to")
    parser.add_option("","--agentlog",default="./agent.log",help="file to save state log to")
    parser.add_option("","--dblog",default="./database.log",help="file to save state log to")
    
    parser.add_option("--start-time", type="float", default=float(0), 
                      help=("Start time of the test, in seconds since 1970. " + 
                             "Starts immediately if start time is in the past. " +
                             "[default=%default]  " + 
                             "Hint: Use date +%s to get the current epoch time."))
    parser.add_option("--run-duration", type="float", default=float(0), 
                      help=("Run time duration of the test in seconds. " + 
                             "Run forever until control-C if run time is 0. " +
                             "[default=%default]"))
    parser.add_option("", "--node-role", type="choice", choices=["tdma_base", "tdma_mobile"], 
                      default='tdma_mobile',
                      help="Select mac from: %s [default=%%default]"
                            % (', '.join(["tdma_base", "tdma_mobile"])))
    
    parser.add_option("", "--modulation", type="choice", choices=["gmsk"], 
                      default='gmsk',
                      help="Select mac type from: %s [default=%%default]"
                            % (', '.join(["gmsk"])))   
    
    parser.add_option("", "--gpsbug-cal-duration", type="float", default=10.0, 
                      help="Duration to run time calibration")
    
    
    # TODO: clean up this option. Store available traffic generation schemes somehow
    parser.add_option("--traffic-generation", type="choice", choices=traffic_models,
                      default="none",
                      help="Select traffic generation method: %s [default=%%default]" % (", ".join(traffic_models)))


    parser.add_option("", "--agent-epoch-duration", type="int", default=20, help="agent epoch length, in frames")
    parser.add_option("", "--agent-type", type="string", default="q_learner", help="Which agent is used")

    receive_path_gmsk.add_options(parser, expert_grp)

#    normally this would be in transmit path add option
    parser.add_option("", "--tx-access-code", type="string",
                      default="1", 
                      help="set transmitter access code 64 1s and 0s [default=%default]")


    parser.add_option("", "--digital-scale-factor", type="float", default=0.5, 
                      help="digital amplitude control for transmit, between 0.0 and 1.0")
    
    uhd_receiver.add_options(parser)
    uhd_transmitter.add_options(parser)

    for mod in mods.values():
        mod.add_options(expert_grp)

    for mod in demods.values():
        mod.add_options(expert_grp)


    channelizer.rx_channelizer.add_options(parser)
    channelizer.tx_channelizer.add_options(parser)
    
    base_rl_agent_protocol_manager.add_options(parser,expert_grp)
    mobile_rl_agent_protocol_manager.add_options(parser,expert_grp)
    RL_Agent_Wrapper.add_options(parser,expert_grp)
    Q_Learner.add_options(parser,expert_grp)
    Sarsa_Learner.add_options(parser,expert_grp)

    tdma_base_sm.add_options(parser,expert_grp)
    tdma_mobile_sm.add_options(parser,expert_grp)
    tdma_controller.add_options(parser,expert_grp)
    Infinite_Backlog_PDU_Streamer.add_options(parser,expert_grp)
    Tunnel_Handler_PDU_Streamer.add_options(parser,expert_grp)
    beacon_consumer.add_options(parser,expert_grp)
    
    # get list of all option defaults in the current option list
    opt_list = parser.defaults
    
    # update defaults for node-options modules
    # iterate through each entry in node_defaults
    for key in node_defaults:
        #dev_log.debug('Searching opt_list for option: %s', key)
        # if there's a match to an entry in opt_list
        if key in opt_list:
            # update default options from the value in gr_defaults
            #dev_log.debug('Updating option default: %s from node_defaults', key)
            parser.set_default(key, node_defaults[key])
        else:
#            print "Ini file option ", key, "doesn't have a field in parser"
#            assert False
            dev_log.warning('Option %s from ini file not present in parser',key)

    for key in opt_list:
        if key not in node_defaults:
            dev_log.warning('Option %s from parser not in ini file',key)
        
    
    
    (options, args) = parser.parse_args ()
        
    
    # update log level
    dev_log.info("new log level is %s", options.log_level)
    log_config["loggers"]["developer"]["level"] = options.log_level
    
    # update agent and database file paths
    expanded_agentlog = os.path.expandvars(os.path.expanduser(options.agentlog))
    abs_agentlog = os.path.abspath(expanded_agentlog)
    expanded_dblog = os.path.expandvars(os.path.expanduser(options.dblog))
    abs_dblog = os.path.abspath(expanded_dblog)
    
    log_config["handlers"]["agent"]["filename"] = abs_agentlog
    log_config["handlers"]["database"]["filename"] = abs_dblog
    
    
    logging.config.dictConfig(log_config)
    dev_log = logging.getLogger('developer')
    
    dev_log.debug("hi")
     
    #if options.pcktlog != -1:
    #    lincolnlog.LincolnLogLayout('debug', -1, options.pcktlog , -1, -1)
    #else:
    #    lincolnlog.LincolnLogLayout('debug', -1, -1, -1, -1)
    lincolnlog.LincolnLogLayout('debug', -1, options.pcktlog , options.statelog, -1)      

    ll_logging     = lincolnlog.LincolnLog(__name__)

    if len(args) != 0:
        parser.print_help(sys.stderr)
        sys.exit(1)
            
    # parse rest of command line args
    if len(args) != 0:
        parser.print_help(sys.stderr)
        sys.exit(1)
            
        # cannot proceed without a config file, so exit
    if known.config_file is None:
        print "No config file provided, exiting"
        sys.exit(1)
        
    

    return(mods, demods, options, ll_logging, dev_log)
Beispiel #24
0
        "-r", "--raw", dest="raw", action="store_true", help="raw wiki translation -- no wrapping, no toc, no links"
    )
    parser.add_option("-p", "--mainpage", dest="mainpage", metavar="PAGENAME", help="set main page to PAGENAME")

    # Batch / Location options
    parser.add_option("-s", "--srcdir", dest="srcdir", help="wiki format sources in SRCDIR", metavar="SRCDIR")
    parser.add_option("-d", "--destdir", dest="destdir", help="write html output into DESTDIR", metavar="DESTDIR")
    parser.add_option(
        "-e",
        "--stale",
        dest="all",
        action="store_true",
        help="convert all wiki files that are stale or missing from DESTDIR",
    )

    parser.set_default("toc", True)
    parser.set_default("links", True)
    parser.set_default("template", None)
    parser.set_default("number", False)
    parser.set_default("levels", 3)
    parser.set_default("tabwidth", 8)
    parser.set_default("multibreak", False)
    parser.set_default("mainpage", "MainPage")  # Identity of index

    parser.set_default("srcdir", os.getcwd())
    parser.set_default("destdir", None)
    parser.set_default("all", False)

    # Parse the command line
    (options, args) = parser.parse_args()
Beispiel #25
0
                        help="don't collapse multiple line breaks")
    parser.add_option("-r", "--raw", dest="raw", action="store_true",
                        help="raw wiki translation -- no wrapping, no toc, no links")
    parser.add_option("-p", "--mainpage", dest="mainpage", metavar="PAGENAME",
                        help="set main page to PAGENAME")
    
    # Batch / Location options
    parser.add_option("-s", "--srcdir", dest="srcdir",
                        help="wiki format sources in SRCDIR", metavar="SRCDIR")
    parser.add_option("-d", "--destdir", dest="destdir",
                        help="write html output into DESTDIR", metavar="DESTDIR")
    parser.add_option("-e", "--stale", dest="all", action="store_true",
                        help="convert all wiki files that are stale or missing from DESTDIR")
    
    
    parser.set_default('toc', True)
    parser.set_default('links', True)
    parser.set_default('template', None)
    parser.set_default('number', False)
    parser.set_default('levels', 3)
    parser.set_default('tabwidth', 8)
    parser.set_default('multibreak', False)
    parser.set_default('mainpage', "MainPage")  # Identity of index
    
    parser.set_default('srcdir', os.getcwd())
    parser.set_default('destdir', None)
    parser.set_default('all', False)

    # Parse the command line
    (options, args) = parser.parse_args()    
    
Beispiel #26
0
    sigbkg.write('Exactly 1 Sec Vertex\n')
    sigbkg.write('MT > 50\n')
    sigbkg.close()

    return 0


"""
"""
if __name__ == "__main__":
    import sys
    tmpargv = sys.argv[:]  # [:] for a copy, not reference
    sys.argv = []
    from ROOT import gROOT, gStyle, gSystem
    sys.argv = tmpargv
    from optparse import OptionParser
    usage = """
	usage: %prog [options] input_directory
	"""
    parser = OptionParser(usage=usage)
    addPlotterOptions(parser)
    parser.set_default(dest='outDir', value='singleTop')
    (opt, args) = parser.parse_args()

    gROOT.SetBatch(True)
    gStyle.SetOptTitle(0)
    gStyle.SetOptStat(0)
    gSystem.Load('libUserCodeTopMassSecVtx.so')

    exit(main(args, opt))
Beispiel #27
0
def assign_opts(*args, **kwargs):
    """
    Assigns options.
    """
    if 'my_args' in kwargs.keys() and kwargs['my_args'] != None:
        my_args = kwargs.pop('my_args')
    else:
        my_args = sys.argv[1:] 
    option_defaults = {}
    if len( args ) == 1:
        option_defaults = args[0]
    else:
        option_defaults = args
    option_properties = {'functions':[], 'eval':[]}
    option_list = []
    copy_props = []
    for i in option_defaults.keys():
        copy_props.append( i )
        if len( option_defaults[i] ) < 4:
            raise ValueError("Not enough options passed in defaults for " \
                             "%s." % i)
        if option_defaults[i][0] != None:
            short = '-' + option_defaults[i][0]
        else:
            short = None
        long_opt = '--' + option_defaults[i][1]
        dest = str(i)
        desc = option_defaults[i][2]
        default = option_defaults[i][3]
        if len( option_defaults[i] ) > 4:
            default_type = option_defaults[i][4]
        else:
            default_type = None
        ltype = None
        if (default_type == "func" or default_type == "function"):
            option_properties['functions'].append( i )
        if default_type == "eval":
            option_properties['eval'].append( i )
        if (default_type == "list" or default_type == "tuple") \
                and len(option_defaults[i]) > 5:
            ltype = option_defaults[i][5]
        if default_type == None or default_type == types.NoneType:
            if short != None:
                opt = OptionExtended(short, long_opt, dest=dest, help=desc, \
                                    default=default)
            else:
                opt = OptionExtended(long_opt, dest=dest, help=desc, \
                                     default=default)
        elif ltype == None:
            if short != None:
                opt = OptionExtended(short, long_opt, dest=dest, help=desc, \
                                     default=default, type = default_type)
            else:
                opt = OptionExtended(long_opt, dest=dest, help=desc, \
                                     default=default, type = default_type)
        else:
            if short != None:
                opt = OptionExtended(short, long_opt, dest=dest, help=desc,
                                     default=default, type = default_type, 
                                     ltype = ltype) 
            else:
                opt = OptionExtended(long_opt, dest=dest, help=desc, 
                                     default=default, type = default_type,
                                     ltype = ltype)
        option_list.append( opt )
    parser = OptionParser(option_class = OptionExtended, \
                          option_list = option_list)
    options, args = parser.parse_args( args=my_args )

    if not hasattr(options, 'problem_file'):
        options.problem_file = None

    if ( options.problem_file != None ):
        options.problem_file = options.problem_file.replace('/','.')
        if options.problem_file.endswith('.py'):
            options.problem_file = options.problem_file[:-3]
        try:
            mod_list = options.problem_file.split('.')
            if len(mod_list) == 1 and mod_list[0].find('/') >= 0:
                mod_list = options.problem_file.split('/')
            #root = mod_list[0]
            prob_mod = __import__( options.problem_file )
            for mod_name in mod_list[1:]:
                prob_mod = getattr( prob_mod, mod_name )
        except Exception:
            logging.error("Couldn't find problem file: %s" % \
                          options.problem_file)
            raise
        for attr in option_defaults.keys():
            try:
                file_val = getattr(prob_mod, attr)
                parser.set_default(attr, file_val)
            except:
                pass
        options, args = parser.parse_args( args = my_args )
        for func_name in option_properties['functions']:
            try:
                attr_val = getattr( prob_mod, func_name )
                #logging.debug("Setting function: %s, value: %s", 
                #    func_name, attr_val )
                setattr( options, func_name, attr_val )
            except Exception, e:
                logging.debug("Function %s not found in module.", func_name)
                logging.debug(e)
Beispiel #28
0
 def __init__(self):
     CCAXMLEntity.__init__(self)
     self.kind = 'component'
     self.libInfoXML = None
     self.options = None
     self.args = None
     self.parser = None
     self.symbol = ''
     self.lang = ''
     self.name = ''
     self.id = ''
     self.palletAlias = ''
     self.includePath = ''
     self.libPath = ''
     self.staticLib = ''
     self.sharedLib = ''
     self.libtoolLib = ''
     self.cClientLib=''
     self.cClientHeaderPath = ''
     self.f77ClientLib=''
     self.f90ClientLib=''
     self.f90ClientHeaderPath = ''
     self.cxxClientLib = ''
     self.cxxClientHeaderPath = ''
     self.portLibPath = ''
     self.usePortLibList = []
     self.providePortLibList = []
     self.dependLibList = []
     
     usage = "usage: %prog --type=component [options] SIDL_SYMBOL"
     parser = OptionParser(usage=usage)
     parser.add_option("-n", "--name", dest = "name",
               help="Component name (default is component SIDL symbol)")
     parser.add_option("-i", "--id", dest = "id",
               help = "Component unique id \n(default is USER@HOST:component_name:time_stamp)", 
               metavar="COMPONENT_ID")
     parser.add_option("-l", "--language", dest = "lang",
               help = "Component implementation language (valid values are c, cxx, f77, f90, python, java)")
     parser.add_option("-a", "--alias", dest = "palletAlias",
               help = "Component alias to be used in GUI's palletes (default is component name)",
               metavar = "PALLET_ALIAS")
     
     parser.add_option("-I", "--include-path", dest = "includePath",
               help = "Path to headers and mod files for the component and its client libraries",
               metavar = "DIR")
     parser.add_option("-L", "--libpath", dest = "libPath",
               help = "Path to component and client libraries",
               metavar = "LIB")
     
     parser.add_option("--shared-lib", dest = "sharedLib",
               help = "Shared library archive for the component. Library must be located in directory specified using --libpath",
               metavar = "LIB")
     parser.add_option("--static-lib", dest = "staticLib",
               help = "Static library archive for the component. Library must be located in directory specified using --libpath",
               metavar = "LIB")
     parser.add_option("--libtool-lib", dest = "libtoolLib",
               help = "libtool-generated library archive for the component. Library must be located in directory specified using --libpath",
               metavar = "LIB")
     
     parser.add_option("--c-client-lib", dest = "cClientLib",
                help = "Component C client library name. Specifying full path overrides directory specified in --libpath",
                metavar = "LIB")
     parser.add_option("--c-client-headerpath", dest = "cClientHeaderPath",
                help = "Path to component C client header files (overrides value specified using --include-path)",
                metavar = "DIR")
     
     parser.add_option("--f77-client-lib", dest = "f77ClientLib",
                help = "Component F77 client library name. Specifying full path overrides directory specified in --libpath",
                metavar = "LIB")
     
     parser.add_option("--f90-client-lib", dest = "f90ClientLib",
                help = "Component C client library name. Specifying full path overrides directory specified in --libpath",
                metavar = "LIB")
     parser.add_option("--f90-client-headerpath", dest = "f90ClientHeaderPath",
                help = "Path to component F90 client header and mod files (overrides value specified using --include-path)",
                metavar = "DIR")
     
     parser.add_option("--cxx-client-lib", dest = "cxxClientLib",
                help = "Component CXX client library name. Specifying full path overrides directory specified in --libpath",
                metavar = "LIB")
     parser.add_option("--cxx-client-headerpath", dest = "cxxClientHeaderPath",
                help = "Path to component CXX client header files (overrides value specified using --include-path)",
                metavar = "DIR")
     
     parser.add_option("--port-libpath", dest = "portLibPath",
                help = "Path to libraries representing ports used or provided by the component. Default is the same as --libpath",
                metavar = "DIR")
     parser.add_option("-u", "--useportlib", dest = "usePortLibList", action = "append",
                help = """Static library corresponding to port used by the component. The library is expected to be located
                          in the directory specified using --port-libpath. Full path to the library is also accepted. This option may be repeated.""",
                metavar = "LIB")
     parser.add_option("-p", "--provideportlib", dest = "providePortLibList", action = "append", 
                help = """Static library corresponding to port provided by the component. The library is expected to be located
                          in the directory specified using --port-libpath. Full path to the library is also accepted. 
                          This option may be repeated.""",
                metavar = "LIB")
     parser.add_option("-d", "--depend-lib", dest = "dependLibList", action = "append",
                help = """Full path to external libraries on which this component depends. This option may be repeated.""",
                metavar = "LIB")
     
     parser.set_default('usePortLibList', [])
     parser.set_default('providePortLibList', [])
     parser.set_default('dependLibList', [])
     self.parser = parser
     return
def main():
    ###########################################################################
    # Configure the parser
    ###########################################################################
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options] partition new_globalTag "
        "base_globalTag [localTag1 localTag2 ... localTagN]",
        version=__version__,
        description=
        """This script performs global tagging in requested partition of CondDB and does the update of
the release notes. The user has to provide a source (partition, e.g.: DDDB, LHCBCOND or SIMCOND),
a name for new global tag, a base global tag name and the local tag(s) for add-on. The script will
ask for the contributor name.""")

    parser.add_option("--rel-notes",
                      type="string",
                      help="XML file containing the release notes")
    parser.add_option(
        "-m",
        "--message",
        type="string",
        help="Message to include in the release notes about global tag.")
    parser.add_option("-c",
                      "--contributor",
                      type="string",
                      help="Name of the contributor of the global tag.")
    parser.add_option(
        "-d",
        "--datatypes",
        type="string",
        help="List of data types new global tag is intended for.")
    parser.add_option("--hash-alg",
                      type="string",
                      help="Hash algorithm to compute the sum for new global"
                      " tag (md5, sha1, etc). Default: sha1.")
    parser.add_option(
        "-n",
        "--dry-run",
        action="store_true",
        help="Skip the actual global tagging and the update of release notes.")

    parser.set_default(
        "rel_notes",
        os.path.normpath(
            os.path.join(os.environ["SQLITEDBPATH"], "..", "doc",
                         "release_notes.xml")))
    parser.set_default("message", None)
    parser.set_default("contributor", None)
    parser.set_default("datatypes", [])
    parser.set_default("hash_alg", "sha1")

    # parse command line
    options, args = parser.parse_args()

    ###########################################################################
    # Prepare local logger
    ###########################################################################
    import logging
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # set the global stream handler
    from CondDBUI import LOG_FORMAT
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    ###########################################################################
    # Preliminary positional arguments verifications
    ###########################################################################
    if len(args) < 3:
        parser.error("Not enough arguments. Try with --help.")
    # prepare contributor name
    if not options.contributor:
        options.contributor = raw_input("Contributor: ")
    log.info("New global tag by: %s" % options.contributor)

    # prepare the message
    if not options.message:
        options.message = ""
    else:
        options.message = options.message.replace("\\n", "\n")
        log.info("Message for the changes: '%s'" % options.message)

    ###########################################################################
    # Positional arguments redefinition and partition verification
    ###########################################################################
    partition, new_globalTag, base_globalTag = args[:3]
    localTags = args[3:]

    partitions = ["DDDB", "LHCBCOND", "SIMCOND", "DQFLAGS", "CALIBOFF"]
    if partition not in partitions:
        parser.error("'%s' is not a valid partition name. Allowed: %s" % \
                     (partition, partitions))

    # Checking requested hashing algorithm to be available
    import hashlib
    hashAlg = options.hash_alg
    if sys.version_info >= (2, 7):
        if hashAlg not in hashlib.algorithms:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg
    else:
        if hashAlg not in [
                'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
        ]:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg
    initialHashSumObj = getattr(hashlib, hashAlg)()

    # Processing the data type option string
    if options.datatypes != []:
        datatypes = []
        word = ""
        for i in options.datatypes:
            if i != ",":
                word += i
            elif i == ",":
                datatypes.append(word)
                word = ""
        datatypes.append(word)
    else:
        datatypes = options.datatypes
    log.info("New global tag name: %s" % new_globalTag)
    log.info("Base global tag name: %s" % base_globalTag)
    log.info("Local tags set for add-on: %s" % localTags)
    log.info("This global tag is intended for the following data types: %s" %
             datatypes)
    log.info("Hash sum will be computed using: %s" % hashAlg)

    ###########################################################################
    # Connecting to the DB
    ###########################################################################
    import CondDBUI.Admin.Management
    masterURL = CondDBUI.Admin.Management._masterCoolURL(partition)
    log.info("Master DB file = %s" % masterURL)
    log.info("Release Notes file = %s" % options.rel_notes)
    db = CondDBUI.CondDB(masterURL, readOnly=False)

    ###########################################################################
    # Processing the case of cloning the global tag with new name
    ###########################################################################
    if len(localTags) == 0:
        rel_notes = updateRelNotes(db, options.contributor, new_globalTag,
                                   base_globalTag, localTags, datatypes,
                                   options.rel_notes,
                                   options.message.splitlines())
        log.info("Script entered cloning mode!")
        if not reallySure():
            return 0
        log.info("Cloning tag %s as %s" % (base_globalTag, new_globalTag))
        if not options.dry_run:
            # Performing global tag cloning
            try:
                db.cloneTag("/", base_globalTag, new_globalTag)
            except IOError:
                log.error("\nSorry.. IO error happened, probably due to some \
                 hardware problems.\nGlobal tagging wasn't done.\nPlease try again."
                          )
                return 1
            # Calculate the hash sum for just created tag
            log.info("Hashing tag '%s'..." % new_globalTag)
            hashSumObj = db.payloadToHash(initialHashSumObj, tag=new_globalTag)
            # Add hash sum to release notes
            rel_notes.addHashToGlobalTag(hashSumObj, partition, new_globalTag)
            log.info("Done.")

            # Write to release_notes.xml file new global tag entry
            rel_notes.write()
            log.info("Updated release notes at %s" % options.rel_notes)
        else:
            log.info("Cloning wasn't performed. Dry-run mode was active.")
        return 0

    ###########################################################################
    # Find the local tag of each Folder for the preferred tag
    ###########################################################################
    log.info("Collecting tags information ...")
    nodes_tags = {}
    found_tags = set()
    nodes = db.getAllNodes()
    for n in filter(db.db.existsFolder, nodes):
        f = db.getCOOLNode(n)
        nodes_tags[n] = None
        for r in localTags + [base_globalTag]:
            try:
                loc = db.resolveTag(f, r)
                nodes_tags[n] = loc
                found_tags.add(r)
                break
            except:
                # tag not found
                pass
    not_found = set(localTags + [base_globalTag]) - found_tags
    if not_found:
        log.warning("You have missing tags:")
        for t in not_found:
            log.warning("\t" + t)

    ignored_files = []
    for n in nodes_tags.keys():
        if nodes_tags[n] is None:
            ignored_files.append(n)
            del nodes_tags[n]
    if ignored_files:
        log.warning("The following files will be ignored by the tag:" +
                    '\n\t' + '\n\t'.join(ignored_files))

    ###########################################################################
    # User verification and final modification of DB and Release notes
    ###########################################################################
    rel_notes = updateRelNotes(db, options.contributor, new_globalTag,
                               base_globalTag, localTags, datatypes,
                               options.rel_notes, options.message.splitlines())
    if not reallySure():
        return 0
    if not options.dry_run:
        if partition == "DDDB":
            log.info("Be patient, DDDB tagging is time consuming...")
        log.info("Tagging ...")
        # Apply changes to connected DB
        try:
            db.moveTagOnNodes('/', new_globalTag, nodes_tags)
        except IOError:
            log.error("\nSorry.. IO error happened, probably due to some \
            hardware problems.\nGlobal tagging wasn't done.\nPlease try again."
                      )
            return 1

        # Calculate the hash sum for just created tag
        log.info("Hashing tag '%s'..." % new_globalTag)
        hashSumObj = db.payloadToHash(initialHashSumObj, tag=new_globalTag)
        # Add hash sum to release notes
        rel_notes.addHashToGlobalTag(hashSumObj, partition, new_globalTag)
        log.info("Done.")

        # Write to release_notes.xml file new global tag entry
        rel_notes.write()
        log.info("Updated release notes at %s" % options.rel_notes)
    else:
        log.info("Global tagging wasn't done. Dry-run mode was active.")

    return 0
def main():
    # Configure the parser
    from optparse import OptionParser
    parser = OptionParser(
        usage=
        "%prog [options] changes_source partition reference_tag local_tag",
        description=
        """This script tries to commit changes to the official CondDB, tagging the new
files and updating the release notes. The user has to provide a source
(directory or SQLite file), the partition to modify (DDDB, LHCBCOND or SIMCOND),
a reference tag in the master CondDB to compare the changes to and the local
tag to use for the modified files. The script will ask for the contributor name
and for a short message for the release notes.""")
    parser.add_option(
        "--user-tag",
        type="string",
        help="Tag to be used to extract the changes from the user " +
        "database. It make sense only if changes_source is a " +
        "COOL database. [default = %default]")
    parser.add_option(
        "-d",
        "--dir",
        type="string",
        help=
        "Directory where to put the DB with the differences. [default is current directory]"
    )
    parser.add_option("--rel-notes",
                      type="string",
                      help="XML file containing the release notes")
    parser.add_option("-m",
                      "--message",
                      type="string",
                      help="Message to include in the release notes")
    parser.add_option("-c",
                      "--contributor",
                      type="string",
                      help="Name of the contributor of the patch")
    parser.add_option("-p",
                      "--provider",
                      type="string",
                      dest="contributor",
                      help="alias for --contributor")
    parser.add_option(
        "-t",
        "--datatypes",
        type="string",
        help=
        "Coma separated string (without spaces) of data types the new local tag is intended for."
    )
    parser.add_option(
        "-n",
        "--dry-run",
        action="store_true",
        help=
        "Skip the actual commit to database and the update of release notes.")
    parser.add_option("-s",
                      "--since",
                      type="string",
                      help="Start of the Interval Of Validity (local time)." +
                      " Format: YYYY-MM-DD[_HH:MM[:SS.SSS]][UTC]")
    parser.add_option("-u",
                      "--until",
                      type="string",
                      help="End of the Interval Of Validity (local time)" +
                      " Format: YYYY-MM-DD[_HH:MM[:SS.SSS]][UTC]")
    parser.add_option("-P",
                      "--patch",
                      type="int",
                      help="numerical id of the patch on savannah")
    parser.set_default("user_tag", "HEAD")
    parser.set_default("dir", None)
    parser.set_default(
        "rel_notes",
        os.path.normpath(
            os.path.join(os.environ["SQLITEDBPATH"], "..", "doc",
                         "release_notes.xml")))
    parser.set_default("message", None)
    parser.set_default("contributor", None)
    parser.set_default("datatypes", [])
    parser.set_default("since", None)
    parser.set_default("until", None)

    # parse command line
    options, args = parser.parse_args()

    # check arguments
    if len(args) != 4:
        parser.error("not enough arguments. Try with --help.")

    # Prepare local logger
    import logging
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # set the global stream handler
    from CondDBUI import LOG_FORMAT
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    # decrease verbosity of PyCoolDiff
    import CondDBUI.PyCoolDiff
    CondDBUI.PyCoolDiff._log.setLevel(logging.WARNING)

    # check arguments
    changes_source, partition, reference_tag, local_tag = args

    partitions = ["DDDB", "LHCBCOND", "SIMCOND", "CALIBOFF"]
    if partition not in partitions:
        parser.error("'%s' is not a valid partition name. Allowed: %s" % \
                     (partition, partitions))

    # Processing the data type option string
    if options.datatypes != []:
        datatypes = []
        word = ""
        for i in options.datatypes:
            if i != ",":
                word += i
            elif i == ",":
                datatypes.append(word)
                word = ""
        datatypes.append(word)
    else:
        datatypes = options.datatypes
    log.info("This local tag is intended for the following data types: %s" %
             datatypes)

    from CondDBUI.Admin import timeToValKey
    from PyCool import cool
    since = timeToValKey(options.since, cool.ValidityKeyMin)
    until = timeToValKey(options.until, cool.ValidityKeyMax)
    if since >= until:
        parser.error("Invalid IOV: %s to %s" % (options.since, options.until))

    log.info(
        "Preparing database with changes from data in %s for partition %s" %
        (changes_source, partition))
    log.info("reference tag = %s" % reference_tag)
    if options.user_tag != "HEAD":
        log.info("user tag = %s" % options.user_tag)
    if options.dir:
        log.info("working dir = %s" % options.dir)
        if not os.path.isdir(options.dir):
            os.makedirs(options.dir)
    if options.since:
        log.info("validity from %s" % options.since)
    if options.until:
        log.info("validity until %s" % options.until)

    from CondDBUI.Admin import prepareChangesDB, analyzeChanges
    changesURL, pass1URL = prepareChangesDB(changes_source,
                                            partition,
                                            reference_tag,
                                            usertag=options.user_tag,
                                            destdir=options.dir,
                                            since=since,
                                            until=until)
    log.info("Created changes database: %s" % changesURL)

    log.info("Analyzing changes with respect to head version of master DB")
    # I do not like it, but I do not have a better way right now
    import CondDBUI.Admin.Management
    masterURL = CondDBUI.Admin.Management._masterCoolURL(partition)
    log.info("master = %s" % masterURL)

    if pass1URL:
        # Check the pass1 diff
        log.info("Analyzing pass1 changes")
        modified, added, problems = analyzeChanges(partition, pass1URL)
        if problems:
            for p in problems:
                log.error("Problem: %s." % p)
            return 1

        if len(modified):
            log.info("Modified %d files:\n\t%s" %
                     (len(modified), '\n\t'.join(modified)))
        if len(added):
            log.info("Added %d files:\n\t%s" %
                     (len(added), '\n\t'.join(added)))
        if (len(modified) + len(added)) == 0:
            log.warning("No changes to apply!")
            return 0

        log.info("Analyzing final changes")

    modified, added, problems = analyzeChanges(partition, changesURL)

    if problems:
        for p in problems:
            log.error("Problem: %s." % p)
        return 1

    if len(modified):
        log.info("Modified %d files:\n\t%s" %
                 (len(modified), '\n\t'.join(modified)))
    if len(added):
        log.info("Added %d files:\n\t%s" % (len(added), '\n\t'.join(added)))

    if (len(modified) + len(added)) == 0:
        log.warning("No changes to apply!")
        return 0

    if not options.contributor:
        options.contributor = raw_input("Contributor: ")
    log.info("Changes by: %s" % options.contributor)

    # prepare the message
    if not options.message:
        options.message = _getMessage()

    if not options.message:
        log.warning("Empty message!")
    else:
        options.message = options.message.replace("\\n", "\n")
        log.info("Message for the changes:\n\n%s\n" % options.message)

    from CondDBUI.Admin import ReleaseNotes
    rel_notes = ReleaseNotes(options.rel_notes)
    rel_notes.addNote(contributor=options.contributor,
                      partitions={
                          partition: (local_tag, {
                              'modified': modified,
                              'added': added
                          })
                      },
                      description=options.message.splitlines(),
                      datatypes=datatypes,
                      patch=options.patch)

    ans = None
    if options.dry_run:
        ans = "No"
        msg = "Updated release notes written in "
        dest = "release_notes.xml"
        if options.dir:
            msg += options.dir
            dest = os.path.join(options.dir, dest)
        else:
            msg += "the local directory"
        log.info(msg)
        rel_notes.write(dest)

    while ans is None:
        ans = raw_input(
            "Do you really want to commit the changes (Yes,[No])? ")
        if not ans: ans = "No"
        if ans not in ["Yes", "No"]:
            print "You have to type exactly 'Yes' or 'No'"
            ans = None

    if ans == "No":
        log.info("Not committing the changes")
        return 0

    try:
        import time
        log.info("Committing the changes in ...")
        timeout = 10
        while timeout >= 0:
            log.info("%d seconds" % timeout)
            time.sleep(1)
            timeout -= 1
    except KeyboardInterrupt:
        log.info("Commit aborted")
        return 0

    if not options.dry_run:
        log.info("Committing the changes")

        from CondDBUI.Admin import MergeAndTag
        MergeAndTag(changesURL, masterURL, local_tag, check_addition_db=False)

        rel_notes.write()
        log.info("Updated release notes at %s" % options.rel_notes)
Beispiel #31
0
def getOptions():
  arguments = OptionParser()
  arguments.add_options(["--username", "--password", "--file"])
  arguments.set_default("file", "mint_backup_%s.csv" % str(date.today()))
  return arguments.parse_args()[0] # options
def main():
    ###########################################################################
    # Configure the parser
    ###########################################################################
    from optparse import OptionParser
    parser = OptionParser(usage = "%prog [options]",
                          version = __version__,
                          description =
"""This script requests a web server for invalid (or both: valid & invalid) tags at Tier.

With default request type (='bad'), the script will get (from TagStatusDB) all tags
known currently to be invalid. re-check them if they become valid, and return
the list of those which become valid.
""")

    parser.add_option("-r", "--request", type = "string",
                      help = "Two request types are possible: 'bad' (get invalid tags)"
                      " and 'good-bad-ugly' (get valid, invalid and being now checked tags). "
                      "DEFAULT: 'bad'."
                      )
    parser.add_option("-w", "--web-server", type = "string",
                      help = "Web server URL where the script for interaction"
                      " with the TagStatusDB is located."
                      )
    parser.add_option("-t", "--tier", type = "string",
                      help = "Tier site name (e.g.: LCG.CNAF.it)."
                      )
    parser.add_option("--recheck", action = "store_true",
                      help = "Re-check invalid tags and return those of them "
                      "which have resurrected."
                      )
    parser.add_option("-o","--output", type = "string",
                      help = "Write result to a file."
                      )

    parser.set_default("request", 'bad')
    parser.set_default("tier", None)

    # parse command line
    options, args = parser.parse_args()

    ###########################################################################
    # Prepare local logger
    ###########################################################################
    import logging
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # set the global stream handler
    from CondDBUI import LOG_FORMAT
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    ###########################################################################
    # Check and set options
    ###########################################################################
    request = options.request
    if request not in ["bad","good-bad-ugly"]:
        parser.error("Request type is unfamiliar.")
    if request == "good-bad-ugly" and options.recheck:
        parser.error("Re-checking tags is not applicable with this request type.")

    url = options.web_server
    if url == None:
        parser.error("Please specify an URL where the TSDB negotiator is located.")

    if not options.tier:
        parser.error("Please specify Tier site to check a tag at.")
    tier = unicode(options.tier)

    file = options.output
    if file and not options.recheck:
        log.info("The mode without tags re-checking doesn't use an output file feature.\n")

    ###########################################################################
    # Interact with the server-side script
    ###########################################################################
    response = urllib.urlopen(url+"?request=%s&tier=%s"%(request,tier))

    if request == "bad":
        tags_to_check = pickle.loads(response.read())['BAD']
        if options.recheck:
            log.info("Tags re-validation started ...")
            tags_to_update = {}
            import hashlib, datetime
            for partition in tags_to_check.keys():
                tags_to_update[partition]=[]
                #tier_db_conn_str = str(tier).split(".")[1].lower() + "/%s"%partition
                tier_db_conn_str = "CondDB/%s"%partition
                db = CondDBUI.CondDB(tier_db_conn_str)
                for tag_dict in tags_to_check[partition]:
                    log.info("Checking '%s/%s' tag:"%(partition,tag_dict["TagName"]))
                    initHashObj = getattr(hashlib,tag_dict["HashAlg"])()
                    hash_sum = db.payloadToHash(initHashObj,tag=str(tag_dict["TagName"])).hexdigest()
                    tag_dict["Time"] = datetime.datetime.now()
                    if unicode(hash_sum) == tag_dict["ReferenceHashSum"]:
                        tag_dict["Status"] = u"GOOD"
                    else:
                        tag_dict["Status"] = u"BAD"
                    tags_to_update[partition].append(tag_dict)
            if file:
                f = open(file,'w')
                pickle.dump(tags_to_update,f)
                f.close()
                log.info("Done! Results have been written to '%s'." %file)
            else:
                pprint(tags_to_update)
        else:
            pprint(tags_to_check)
    elif request == "good-bad-ugly":
        all_tags = response.read()
        print all_tags
Beispiel #33
0
def parse_options(r_options):
    usage = 'usage: %prog [options] arg';
    
    parser = OptionParser(usage);
    
    parser.add_option('-o', '--output', dest='outputFile', type='string', help='')
    parser.add_option('-i', '--input', dest='inputFile', type='string', help='')
    parser.add_option('-x', '--width', dest='videoWidth', type='int', help='')
    parser.add_option('-y', '--height', dest='videoHeight', type='int', help='')
    parser.add_option('-f', '--framerate', dest='videoFramerate', type='string', help='')
    parser.add_option('-v', '--vrate', dest='videoBitrate', type='string', help='')
    parser.add_option('-a', '--arate', dest='audioBitrate', type='string', help='')

    parser.add_option('-t', '--threads', dest='numThreads', type='int', help='')
    parser.set_default('threads', 2)

    (options, args) = parser.parse_args()
    err = False
    
    if not options.outputFile:
        print "Output file must be specified."
        err = True
        
    if not options.inputFile:
        print "Input file must be specified."
        err = True
        
    if not options.videoWidth:
        print "Video resolution (Width) must be specified."
        err = True
        
    if not options.videoHeight:
        print "Video resolution (Height) must be specified."
        err = True
        
    if not options.videoFramerate:
        print "Video framerate must be specified."
        err = True
        
    if not options.videoBitrate:
        print "Video bitrate must be specified."
        err = True
        
    if not options.audioBitrate:
        print "Audio bitrate must be specified."
        err = True

    if options.numThreads:
        if options.numThreads > 8:
            print "A maximum of 8 threads may be specified."
            
    if err == True:
        print
        parser.print_help()
    
    r_options['outputFile'] = options.outputFile
    r_options['inputFile'] = options.inputFile
    r_options['videoWidth'] = options.videoWidth
    r_options['videoHeight'] = options.videoHeight
    r_options['videoFramerate'] = options.videoFramerate
    r_options['videoBitrate'] = options.videoBitrate
    r_options['audioBitrate'] = options.audioBitrate
    
    for option in r_options:
        if r_options[option] == None:
            r_options[option] = ''
    
    if options.numThreads:
        r_options['numThreads'] = options.numThreads
    else:
        r_options['numThreads'] = '2' # XXX
Beispiel #34
0
def main():
    ###########################################################################
    # Configure the parser
    ###########################################################################
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options] partition globalTag",
        version=__version__,
        description=
        """This script hashes a tag of a partition and updates release notes."""
    )

    parser.add_option("--rel-notes",
                      type="string",
                      help="XML file containing the release notes")
    parser.add_option(
        "--hash-alg",
        type="string",
        help="Name of a hashing algorithm to use. DEAFAULT: 'sha1'.")
    parser.add_option("-n",
                      "--dry-run",
                      action="store_true",
                      help="Skip adding hash sums to release notes.")

    parser.set_default(
        "rel_notes",
        os.path.join(os.environ["SQLDDDBROOT"], "doc", "release_notes.xml"))
    parser.set_default("hash_alg", 'sha1')

    # parse command line
    options, args = parser.parse_args()

    ###########################################################################
    # Prepare local logger
    ###########################################################################
    import logging
    log = logging.getLogger(parser.prog or os.path.basename(sys.argv[0]))
    log.setLevel(logging.INFO)

    # set the global stream handler
    from CondDBUI import LOG_FORMAT
    hndlr = logging.StreamHandler()
    hndlr.setFormatter(logging.Formatter(LOG_FORMAT))
    logging.getLogger().handlers = [hndlr]

    ###########################################################################
    # Positional arguments verification
    ###########################################################################
    if len(args) < 2:
        parser.error("Not enough arguments. Try with --help.")

    partition, globalTag = args

    partitions = ["DDDB", "LHCBCOND", "SIMCOND"]
    if partition not in partitions:
        parser.error("'%s' is not a valid partition name. Allowed: %s" % \
                     (partition, partitions))
    import hashlib
    hashAlg = options.hash_alg
    if sys.version_info >= (2, 7):
        if hashAlg not in hashlib.algorithms:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg
    else:
        if hashAlg not in [
                'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
        ]:
            raise Exception, "'%s' algorithm is not implemented in the hashlib." % hashAlg

    ###########################################################################
    # Connecting to the DB and release notes
    ###########################################################################
    import CondDBUI.Admin.Management
    masterURL = CondDBUI.Admin.Management._masterCoolURL(partition)
    db = CondDBUI.CondDB(masterURL)

    from CondDBUI.Admin import ReleaseNotes
    rel_notes = ReleaseNotes(options.rel_notes)

    log.info("Master DB file: %s" % masterURL)
    log.info("Release Notes file: %s" % options.rel_notes)

    ###########################################################################
    # Hashing
    ###########################################################################
    # Initialize hashing object
    initialHashSumObj = getattr(hashlib, hashAlg)()
    # Get global tags to hash
    if globalTag.lower() == 'all':
        GlobalTagsToHash = rel_notes.getGlobalTags(partition)
    else:
        GlobalTagsToHash = [globalTag]

    ans = None
    while ans is None:
        ans = raw_input("\nDo you really want to start hashing (Yes,[No])? ")
        if not ans: ans = "No"
        if ans not in ["Yes", "No"]:
            print "You have to type exactly 'Yes' or 'No'"
            ans = None

    if ans == "No":
        print "Canceled by user. Hashing wasn't started."
        return 0

    try:
        for tag in GlobalTagsToHash:
            log.info("Hashing '%s' tag... " % tag)
            hashSumObj = db.payloadToHash(initialHashSumObj, tag=tag)
            # Add hash sum to release notes
            if not options.dry_run:
                rel_notes.addHashToGlobalTag(hashSumObj, partition, tag)
            log.info("Done!")
    except KeyboardInterrupt:
        print "Canceled by user. Release page wasn't updated."
        return 0

    if not options.dry_run:
        # Write to release_notes.xml file new global tag entry
        rel_notes.write()
        log.info("Updated release notes at %s" % options.rel_notes)
    else:
        log.info("Release notes weren't updated since in dry-run mode.")
Beispiel #35
0
def getOptions():
    arguments = OptionParser()
    arguments.add_options(["--username", "--password", "--file"])
    arguments.set_default("file", "mint_transactions.csv")
    return arguments.parse_args()[0]  # options
def main():
    parser = OptionParser('usage %prog ')

    parser.set_default('iterations', 10)
    parser.set_default('delay', 1)
    parser.set_default('port_name', '/dev/ttyUSB0')
    parser.set_default('connection_name', 'ser')
    parser.set_default('buccy_data_filename', 'PoTA.txt')

    parser.add_option('-c',
                      dest='connection_name',
                      type='string',
                      help='define name of the pyiridium9602 object')
    parser.add_option('-b',
                      dest='buccy_data',
                      action='store_false',
                      default=True,
                      help='activate to send telemetry')
    parser.add_option('-n',
                      dest='buccy_data_filename',
                      type='string',
                      help='specify the location of the data to send')
    parser.add_option('-r',
                      dest='record',
                      action='store_true',
                      default=False,
                      help='activate to record new data')
    parser.add_option('-p',
                      dest='port_name',
                      type='string',
                      help='specify the port to connect to the modem')
    parser.add_option('-i',
                      dest='iterations',
                      type='int',
                      help='specify the number of data points to record')
    parser.add_option(
        '-d',
        dest='delay',
        type='float',
        help='specify the time between each data point in seconds')
    parser.add_option('-s',
                      dest='sig',
                      action='store_true',
                      default=False,
                      help='activate to record signal quality')
    parser.add_option('-l',
                      dest='location',
                      action='store_true',
                      default=False,
                      help='activate to record location')
    parser.add_option('-t',
                      dest='sys_time',
                      action='store_true',
                      default=False,
                      help='activate to system_time')
    parser.add_option('-m',
                      dest='send',
                      action='store_true',
                      default=False,
                      help='activate to send results as SBD msg')
    parser.add_option('-f',
                      '--file',
                      type='string',
                      dest='filename',
                      default=False,
                      help='define location of data storage')
    parser.add_option('-e',
                      dest='enable',
                      action='store_true',
                      default=False,
                      help='activite to enable radio after disabling')
    parser.add_option('-o',
                      dest='disable',
                      action='store_true',
                      default=False,
                      help='activate to disable radio')

    try:
        (options, args) = parser.parse_args()
    except SystemExit as e:
        print(e)
        return

    try:
        ser = initiate_modem(options.connection_name,
                             options.port_name)  # Establish serial object
    except IridiumError as e:
        print(e)
        exit()

    if options.enable:
        enable_radio(ser)
        print('radio enabled')
    elif options.disable:
        disable_radio(ser)
        print('radio disabled')

    if options.buccy_data:  # Default action - send existing data
        try:
            result = send_tlm(ser, options.buccy_data_filename)
            status = ser.acquire_response(b'AT+SBDS')
            stat = ser.acquire_response(b'AT')
            print(
                stat
            )  # Check and print the status of SBD to ensure message has sent
        except IridiumError as err:
            print('Error when sending telemetry: ' + str(err))
    else:
        try:
            result = data_loop(ser, options.iterations, options.delay,
                               options.sig, options.sys_time, options.location,
                               options.filename)
        except IridiumError as err:
            print(err)
        except OptionError as err:
            print(err)

    if options.filename:
        for i in range(options.iterations):
            f = open(options.filename, "a")
            f.write(str(result))
        print('This is the result: ' + str(result))

    if options.send is True:
        try:
            result = str(result)
            result = result.replace('[', '')
            result = result.replace(']', '')
            result = result.replace(' ', '')
            result = result.replace(',', '')
            result = result.replace("'", "")
            if len(result) > 100:
                print('message stripped to 100 characters')
                result = result[:100]
            ser.queue_send_message(result)
            print(result)
            status = ser.acquire_response(b'AT+SBDS')
            print(status)
            ser.initiate_session()
        except IridiumError as err:
            print(err)
        except UnboundLocalError as err:
            print(str(err) + 'helo')
Beispiel #37
0
def _get_options():
    # Configure the parser
    from optparse import OptionParser
    parser = OptionParser(
        usage="%prog [options] FLAG FLAG_VALUE local_tag",
        description=
        """This script adds/removes DQ flags to/from the CondDB, tagging the result and
updating the release notes. The user has to provide a FLAG name to insert/remove
with its value FLAG_VALUE (by convention it is '1' (='BAD')), and the local
tag name to tag achieved changes with. The script will ask for the contributor name
and for a short message for the release notes.""")

    parser.add_option("-d",
                      "--dest",
                      type="string",
                      help="Destination to commit flag to. Can be:"
                      " sqlite(default) or oracle.")
    parser.add_option("--rel-notes",
                      type="string",
                      help="XML file containing the release notes")
    parser.add_option("-m",
                      "--message",
                      type="string",
                      help="Message to include in the release notes")
    parser.add_option("-c",
                      "--contributor",
                      type="string",
                      help="Name of the contributor of the patch")
    parser.add_option("-w",
                      "--with-care",
                      action="store_true",
                      help="Before applying check the final result.")
    parser.add_option("-s",
                      "--since",
                      type="string",
                      help="Start of the Interval Of Validity (local time)."
                      " Format: YYYY-MM-DD[_HH:MM[:SS.SSS]][UTC]")
    parser.add_option("-u",
                      "--until",
                      type="string",
                      help="End of the Interval Of Validity (local time)"
                      " Format: YYYY-MM-DD[_HH:MM[:SS.SSS]][UTC]")
    parser.add_option(
        "--ref_tag",
        type="string",
        help="Reference tag to form modifications starting not from the HEAD"
        " but from a tag given in this option. BE CAREFULL here:"
        " the changes formed in this way will contain the payload of"
        " the 'ref_tag' which was between IOV 'since' and 'until' values"
        " and will be applied on top of the HEAD.")
    parser.add_option("-P",
                      "--patch",
                      type="int",
                      help="numerical id of the patch on savannah")

    parser.set_default("dest", "sqlite")
    parser.set_default(
        "rel_notes",
        os.path.join(os.environ["SQLDDDBROOT"], "doc", "release_notes.xml"))
    parser.set_default("message", None)
    parser.set_default("contributor", None)
    parser.set_default("since", None)
    parser.set_default("until", None)
    parser.set_default("ref_tag", '')

    # parse command line
    options, args = parser.parse_args()

    # check arguments
    if len(args) != 3:
        parser.error("Not enough arguments. Try with --help.")

    if options.ref_tag == '':
        log.info("Reference tag = 'HEAD'")
    else:
        log.info("Reference tag = '%s'" % options.ref_tag)

    if options.since:
        log.info("Validity from %s" % format_text(options.since, 'yellow'))
    if options.until:
        log.info("Validity until %s" % format_text(options.until, 'yellow'))

    from CondDBUI.Admin import timeToValKey
    from PyCool import cool
    options.since = timeToValKey(options.since, cool.ValidityKeyMin)
    options.until = timeToValKey(options.until, cool.ValidityKeyMax)
    if options.since >= options.until:
        parser.error("Invalid IOV: %s to %s" % (options.since, options.until))

    if options.dest.lower() not in ['sqlite', 'oracle']:
        parser.error(
            "'%s' is invalid database destination. Allowed are 'sqlite' and 'oracle'."
            % options.dest)

    import CondDBUI.Admin.Management
    log.info("Destination = %s" % format_text(
        CondDBUI.Admin.Management._masterCoolURL('DQFLAGS'), 'yellow'))

    log.info("Release notes: %s" % format_text(options.rel_notes, 'yellow'))

    if not options.contributor:
        options.contributor = raw_input("Contributor: ")
    log.info("Changes by: %s" % options.contributor)

    ########## Prepare the message ############################################
    if not options.message:
        options.message = _getMessage()

    if not options.message:
        log.warning("Empty message!")
    else:
        options.message = options.message.replace("\\n", "\n")
        log.info("Message for the changes: \n\n%s\n" %
                 format_text(options.message, 'yellow'))
    return options, args
			
	return 0

"""
"""
if __name__ == "__main__":
	import sys
	tmpargv  = sys.argv[:]     # [:] for a copy, not reference
	sys.argv = []
	from ROOT import gROOT, gStyle, gSystem
	sys.argv = tmpargv
	from optparse import OptionParser
	usage = """
	usage: %prog [options] input_directory
	"""
	parser = OptionParser(usage=usage)
	addPlotterOptions(parser)
	parser.set_default(dest='outDir',value='singleTop')
	(opt, args) = parser.parse_args()

	gROOT.SetBatch(True)
	gStyle.SetOptTitle(0)
	gStyle.SetOptStat(0)
	gSystem.Load('libUserCodeTopMassSecVtx.so')

	exit(main(args, opt))




Beispiel #39
0
def configure(argv, hasRPMSupport=False, hasSmartSupport=False, hasZypperSupport=False):
	optionParser = OptionParser(
			usage="%prog [options] <searchTerm>",
			version="%%prog %s" % VERSION,
			description="A command-line client for the openSUSE Package Search web service.")
	optionParser.add_option('', '--config', action='store', type='string', dest='configFile', default=defaultUserConfigFile,
			help="user configuration file (defaults to %s)" % defaultUserConfigFile, metavar="FILE")
	optionParser.add_option('', '--skip-global-config', action='store_false', dest='readGlobalConfig', default=True,
			help="skip reading the global configuration file %s" % globalConfigFile)
	optionParser.add_option('', '--skip-config', action='store_false', dest='readConfig', default=True,
			help="skip reading configuration files alltogether")
	optionParser.add_option('-n', '--by-name', action='store_const', const='ByName', dest='mode',
			help="only search for matches in package names")
	optionParser.add_option('-c', '--by-content', action='store_const', const='ByContents', dest='mode',
			help="also search for matches in all file names")
	optionParser.add_option('-s', '--simple', action='store_const', const='Simple', dest='mode',
			help="search for matches in package names, package summaries and first match in file names (default)")
	optionParser.add_option('-d', '--dist', type='string', dest='version', default=None,
			help="openSUSE version to search for (defaults to %s, may specify 'factory' for Factory or 'latest' for latest release)" % defaultSuseVersion,
			metavar="VERSION")
	optionParser.add_option('-l', '--latest', action='store_const', const=latestSuseVersion, dest='version',
			help="search in the latest released openSUSE version (%s)" % latestSuseVersion)
	optionParser.add_option('-F', '--factory', action='store_const', const='factory', dest='version',
			help="search in the openSUSE development version (Factory)")
	optionParser.add_option('-u', '--url', action='store_true', dest='showURL', default=False,
			help="also show the URLs of the repositories that contain matching packages")
	optionParser.add_option('-a', '--arch', action='store_true', dest='showArch', default=False,
			help="also show the architectures each package match is available for (defaults to false)")
	# disabled for now, will need to add RPM release information in web service results first:
	#optionParser.add_option('-f', '--file', action='store_true', dest='showFileURL', default=False,
	#		help="also show the fully qualified RPM file URLs")
	optionParser.add_option('-t', '--timeout', action='store', type='int', dest='timeout', default=defaultTimeout,
			help="timeout in seconds for the web service request", metavar="TIMEOUT")
	optionParser.add_option('-q', '--quiet', action='store_false', dest='verbose', default=True,
			help="don't display progress information (for dumb terminals)")
	optionParser.add_option('-A', '--no-ansi', action='store_false', dest='color', default=True,
			help="don't use ANSI escape sequences (for dumb terminals), implies -q")
	optionParser.add_option('', '--theme', action='store', type='string', dest='colorScheme', default=None,
			help="color scheme to use (unless -A/--no-ansi) -- valid values: %s" % (', '.join(colorSchemeMap.keys())), metavar='NAME')
	optionParser.add_option('-D', '--dump', action='store_true', dest='dump', default=False,
			help="simply dump the XML tree sent back by the server")
	optionParser.add_option('-U', '--show-url', action='store_true', dest='showQueryURL', default=False,
			help="show the web service query URL")
	optionParser.add_option('', '--proxy', action='store', type='string', dest='proxy', default=defaultHttpProxy,
			help="HTTP proxy server to use for performing the request (if not specified, uses the http_proxy environment variable)", metavar="SERVER:PORT")
	optionParser.add_option('', '--proxy-auth', action='store', type='string', dest='proxyAuth', default=None,
			help="HTTP proxy authentication", metavar="USER:PASSWORD")
	optionParser.add_option('', '--stack-trace', action='store_true', dest='showStackTrace', default=False,
			help="show stack traces on exceptions (only useful for submitting bug reports)")
	
	helpAddonForRPM = ''
	if not hasRPMSupport:
		helpAddonForRPM = ' (N/A)'
		pass
	
	optionParser.add_option('-r', '--rpm', action='store_true', dest='rpm', default=False,
			help="compare package matches with your current RPM database" + helpAddonForRPM)
	optionParser.add_option('', '--rpm-root', action='store', type='string', dest='rpmRoot', default=None,
			help="set the root directory for the RPM database (not the path to the RPM database but the root of the system)"
			+ helpAddonForRPM,
			metavar="DIRECTORY")
	
	helpAddonForSmart = ''
	if not hasSmartSupport:
		helpAddonForSmart = ' (N/A)'
		pass
	
	optionParser.add_option('', '--smart', action='store_true', dest='smart', default=False,
			help="enable smart support to check repositories" + helpAddonForSmart)
	#optionParser.add_option('', '--smart-add', action='store_true', dest='smartAdd', default=False,
	#		help="prompt for adding repositories to smart" + helpAddonForSmart)
	
	helpAddonForZypper = ''
	if not hasZypperSupport:
		helpAddonForZypper = ' (N/A)'
	
	optionParser.add_option('', '--zypper', action='store_true', dest='zypper', default=False,
			help="enable zypper support to check repositories" + helpAddonForZypper)
	
	(options, args) = optionParser.parse_args(argv)
	
	if options.readConfig:
		try:
			from ConfigParser import SafeConfigParser
		except ImportError:
			from ConfigParser import ConfigParser
			pass
		try :
			configParser = SafeConfigParser()
		except NameError:
			configParser = ConfigParser()
			pass
	
		configModeMap = {
				'simple': 'Simple',
				'name': 'ByName',
				'content': 'ByContent'
				}
	
		userConfigFile = os.path.expanduser(options.configFile)
		configFiles = []
		if options.readGlobalConfig:
			configFiles.append(globalConfigFile)
			pass
		configFiles.append(userConfigFile)
	
		try:
			configParser.read(configFiles)
		except Exception, e:
			print >>sys.stderr, "Error while reading configuration from %s: %s" % (" and ".join(configFiles), e)
			if options.showStackTrace:
				import traceback
				traceback.print_exc()
				pass
			sys.exit(E_CONFIG)
			pass
	
		# set configuration values as defaults in OptionParser:
		def setOption(type, section, name, option=None):
			if not option:
				option = name
				pass
			if configParser.has_option(section, name):
				m = getattr(configParser, 'get%s' % type)
				optionParser.set_default(option, m(section, name))
				return True
			return False
	
		if configParser.has_option('General', 'mode'):
			modeConfig = configParser.get('General', 'mode')
			if configModeMap.has_key(modeConfig):
				optionParser.set_default('mode', configModeMap[modeConfig])
			else:
				print >>sys.stderr, 'ERROR: invalid configuration value for parameter "mode" in section "General": %s' % modeConfig
				print >>sys.stderr, 'Valid values are: %s' % ', '.join(configModeMap.keys())
				sys.exit(E_CONFIG)
				pass
			pass
		setOption('', 'General', 'distribution', 'version')
		setOption('boolean', 'Output', 'color')
		setOption('', 'Output', 'theme', 'colorScheme')
		setOption('boolean', 'Output', 'url', 'showURL')
		setOption('boolean', 'Output', 'arch', 'showArch')
		setOption('boolean', 'Output', 'verbose')
		setOption('boolean', 'Output', 'show_query_url', 'showQueryURL')
		if hasRPMSupport:
			setOption('boolean', 'RPM', 'rpm')
			setOption('', 'RPM', 'root', 'rpmRoot')
			pass
		if hasSmartSupport:
			setOption('boolean', 'Smart', 'smart')
			setOption('boolean', 'Smart', 'prompt')
			pass
		setOption('int', 'Network', 'timeout')
		setOption('', 'Network', 'proxy')
		setOption('', 'Network', 'proxy_auth', 'proxyAuth')
	
		# run option parsing again, now with defaults from the configuration files
		(options, args) = optionParser.parse_args(sys.argv)
		pass