Example #1
1
def cmdparameter(argv):
    if len(argv) == 1:
        global desc
        print >> sys.stderr, desc
        cmd = "python " + argv[0] + " -h"
        os.system(cmd)
        sys.exit(1)
    usages = "%prog -i file -c col_file"
    parser = OP(usage=usages)
    parser.add_option("-i", "--input-file", dest="filein", metavar="FILEIN", help="A matrix file.")
    parser.add_option(
        "-c",
        "--column-file",
        dest="col_file",
        metavar="COLFILE",
        help="One column file containing \
the names of columns required to be extracted.",
    )
    parser.add_option(
        "-f",
        "--first-col-keep",
        dest="first_col",
        default=1,
        help="Default 1 meaing extracting \
the first column also. Accept 0 to skip the first column.",
    )
    parser.add_option("-v", "--verbose", dest="verbose", default=0, help="Show process information")
    parser.add_option("-d", "--debug", dest="debug", default=False, help="Debug the program")
    (options, args) = parser.parse_args(argv[1:])
    assert options.filein != None, "A filename needed for -i"
    return (options, args)
Example #2
1
 def _read_checkpoint(self):
     """Read checkpoint file from disk."""
     if not os.path.exists(self.checkpoint_path):
         self.log.info("No checkpoint found in %s." % self.checkpoint_path)
         open(self.checkpoint_path, "w").close()
         self.log.debug("Created checkpoint file in %s." % self.checkpoint_path)
         return ""
     try:
         f = open(self.checkpoint_path, "rb")
         read = pickle.load(f)
         f.close()
         if read != None:
             return read
         else:
             return ""
         self.log.debug("Checkpoint read from %s" % self.checkpoint_path)
     except EOFError:
         return ""
     except Exception, e:
         self.log.error("Error reading checkpoint in %s." % self.checkpoint_path)
         self.log.error(traceback.format_exc())
         if self.remove_corrupted_checkpoint_file:
             self.log.info("Removing corrupted checkpoint file %s." % self.checkpoint_path)
             f.close()
             os.remove(self.checkpoint_path)
             return ""
         sys.exit(-1)
Example #3
1
    def _reboot_buildout(self):
        # rerun bootstrap to recreate bin/buildout with
        # the virtualenf python as the interpreter
        buildout_dir = self.buildout["buildout"]["directory"]
        bootstrap_path = buildout_dir + "/bootstrap.py"
        cmd_list = [self.python_cmd]
        if os.path.exists(bootstrap_path):
            cmd_list.append(bootstrap_path)
            # cmd_list.extend(self.buildout_args)
        else:
            cmd_list.append(self.buildout_path)
            cmd_list.extend(self.buildout_args)
            cmd_list.append("bootstrap")
        subprocess.call(cmd_list)

        # rerun buildout if it isn't running under the
        # virtualenv interpreter
        self.logger.info(sys.executable)
        if sys.executable != self.python_cmd:
            cmd_list = [self.buildout_path]
            cmd_list.extend(self.buildout_args)
            self.logger.info("Rebooting buildout")
            subprocess.call(cmd_list)

            sys.exit()

        pass
Example #4
1
def make_cache_level(ncaches, prototypes, level, next_cache):
    global next_subsys_index, proto_l1, testerspec, proto_tester

    index = next_subsys_index[level]
    next_subsys_index[level] += 1

    # Create a subsystem to contain the crossbar and caches, and
    # any testers
    subsys = SubSystem()
    setattr(system, "l%dsubsys%d" % (level, index), subsys)

    # The levels are indexing backwards through the list
    ntesters = testerspec[len(cachespec) - level]

    # Scale the progress threshold as testers higher up in the tree
    # (smaller level) get a smaller portion of the overall bandwidth,
    # and also make the interval of packet injection longer for the
    # testers closer to the memory (larger level) to prevent them
    # hogging all the bandwidth
    limit = (len(cachespec) - level + 1) * 100000000
    testers = [proto_tester(interval=10 * (level * level + 1), progress_check=limit) for i in xrange(ntesters)]
    if ntesters:
        subsys.tester = testers

    if level != 0:
        # Create a crossbar and add it to the subsystem, note that
        # we do this even with a single element on this level
        xbar = L2XBar()
        subsys.xbar = xbar
        if next_cache:
            xbar.master = next_cache.cpu_side

        # Create and connect the caches, both the ones fanning out
        # to create the tree, and the ones used to connect testers
        # on this level
        tree_caches = [prototypes[0]() for i in xrange(ncaches[0])]
        tester_caches = [proto_l1() for i in xrange(ntesters)]

        subsys.cache = tester_caches + tree_caches
        for cache in tree_caches:
            cache.mem_side = xbar.slave
            make_cache_level(ncaches[1:], prototypes[1:], level - 1, cache)
        for tester, cache in zip(testers, tester_caches):
            tester.port = cache.cpu_side
            cache.mem_side = xbar.slave
    else:
        if not next_cache:
            print "Error: No next-level cache at top level"
            sys.exit(1)

        if ntesters > 1:
            # Create a crossbar and add it to the subsystem
            xbar = L2XBar()
            subsys.xbar = xbar
            xbar.master = next_cache.cpu_side
            for tester in testers:
                tester.port = xbar.slave
        else:
            # Single tester
            testers[0].port = next_cache.cpu_side
def intersectYASP(ListOfFile):
    """Pure intersection of all bpm names in all files """
    if len(ListOfFile) == 0:
        print "Nothing to intersect!!!!"
        sys.exit()
    h = ListOfFile[0].HNAME
    count = 0
    for b in ListOfFile:
        if len(b.HNAME) == 0:
            break
        else:
            h = filter(lambda x: x in h, b.HNAME)
            count = count + 1
        # h=filter(lambda x: x in h   , b.HNAME)
    turns = count
    v = ListOfFile[0].VNAME
    for b in ListOfFile:
        if len(b.VNAME) == 0:
            break
        else:
            v = filter(lambda x: x in v, b.VNAME)
        v = filter(lambda x: x in v, b.VNAME)

    print "Number of turns that can be used ", str(turns)

    return h, v, turns
Example #6
1
    def subdivide_segment(self, A, B, C):
        """ Asuming [AB] is a segment (A.seg has B and B.seg has A),
        we insert C as an intermediate link [AC],[CB].
        This also adds C to self.points .
        Returns True, if subdivision was done. 
        Returns False, if [AB] was shorter than min_subdivide.
    """
        print >> sys.stderr, "subdivide_segment A,B,C: ", A, A.att(), B, B.att(), C, C.att()
        if dist_sq(A, B) < self.min_subdivide_sq:
            print >> sys.stderr, " ---- too short, nothing done."
            # should be caught earlier!
            sys.exit(0)

        a_seg_idx = None
        for n in range(0, len(A.seg)):
            if A.seg[n] == B.id:
                a_seg_idx = n
                break

        b_seg_idx = None
        for n in range(0, len(B.seg)):
            if B.seg[n] == A.id:
                b_seg_idx = n
                break
        if b_seg_idx is None or a_seg_idx is None:
            raise ValueError("A,B not linked???")
        C.sub = True
        C.id = len(self.points)
        self.points.append(C)
        A.seg[a_seg_idx] = C.id
        B.seg[b_seg_idx] = C.id
        C.seg = [A.id, B.id]
        return True
Example #7
1
    def output_add(s, A, B, cut=False):
        """If cut=True, output the segment [AB] as a cut.
       Otherwise jump to B, starting a new path there.
       A is passed so that we can check that this is really the last point
       we have visited. If not, a jump to A is inserted, before the cut.
       If cut is False, we can directly jump the output list to B.

       This is a simpler version of append_or_extend_simple(), which recombines
       segments, into paths. We don't.

       Caller is responsible to postprocess afterwards:
       * remove pathological short segments.
       * flip segments so that we can
       * recombine segments into paths.
    """
        if not "output" in s.__dict__:
            s.output = []
        if s.verbose >= 1:
            if len(s.output):
                print >> sys.stderr, "output_add", s.output[-1][-1], A, B
            else:
                print >> sys.stderr, "output_add", None, A, B
        #
        print >> sys.stderr, "\t...................................."
        if len(s.output) > 30:
            sys.exit(2)

        if cut:
            s.output.append([A, B])
        else:
            s.output.append([A])  # quite useless....
            s.output.append([B])
def breakdownXLS(spreadsheet):  ###open workbook and create dictionary of the spreadsheets within
    STARTCOL = 0
    ENDCOL = 10

    xlsbook = xlrd.open_workbook(spreadsheet)

    print "\n" + "-" * 50
    print "Checking workbook:", os.path.split(spreadsheet)[1]
    print "-" * 50

    if xlsbook.nsheets != 0:  # check if workbook has spreadsheets
        worksheetDic = dict()  # create list of sheets in workbook
        sheetList = xlsbook.sheets()

        for sheet in sheetList:

            if sheet.nrows == 0:  # check if sheet has rows in it. skip if 0
                continue
            # 			print "Sheet name:",sheet.name, "Rows:",sheet.nrows

            rowList = []
            numberOfRows = sheet.nrows  # get number of rows in each spread sheet
            counter = 0
            while counter < numberOfRows:
                currentRow = sheet.row_slice(
                    counter, start_colx=STARTCOL, end_colx=ENDCOL
                )  # get row with only 10 columns
                rowList.append(currentRow)  # add current row to rowlist
                counter += 1

            worksheetDic[sheet.name] = rowList  # put list of rows in dictionary with worksheet name as the key
        return worksheetDic
    else:
        print "Workbook has no spreadsheets in it."
        sys.exit()
def main():
    parser = argparser()
    args = parser.parse_args()

    try:
        nbFeat = -1
        with open(args.filename) as f:
            input = [[int(a) for a in line.strip().split()] for line in f]
            for k in range(0, len(input)):
                input[k] = input[k][3:]
                nbFeat = max(nbFeat, *input[k])
        if args.outputfile == "":
            print "{0} {1}".format(nbFeat + 1, len(input))  # Format => <nbFeat> <nbSamples>\n
            print "\n".join(
                ["{0} {1}".format(len(row), " ".join(map(str, row))) for row in input]
            )  # Format => <length> <f1> ... <fn>
        else:
            with open(args.outputfile, "w") as f:
                f.write("{0} {1}\n".format(nbFeat + 1, len(input)))  # Format => <nbFeat> <nbSamples>\n
                f.write(
                    "\n".join(["{0} {1}".format(len(row), " ".join(map(str, row))) for row in input])
                )  # Format => <length> <f1> ... <fn>

    except Exception as e:
        print >> sys.stderr, "Oops, something wrong!", e
        print >> sys.stderr, sys.exc_info()
        sys.exit(0)
Example #10
1
def require_arg(kwargs, name, value_func=None):
    if value_func is None:
        value_func = lambda x: x is not None

    if not name in kwargs or not value_func(kwargs[name]):
        print >> sys.stderr, "Missing required argument %s" % name
        sys.exit(1)
Example #11
1
 def cmdloop(self):
     try:
         cmd.Cmd.cmdloop(self)
     except KeyboardInterrupt:
         self.terminate_children()
         print ""
         sys.exit(0)
Example #12
0
File: mysql.py Project: ema/conpaas
 def sqldump(self, service_id):
     res = self.callmanager(service_id, "sqldump", False, {})
     if type("error") is dict and "error" in res:
         print res["error"]
         sys.exit(1)
     else:
         print res
Example #13
0
    def _main_loop_handleException(self, dump_info):
        """
        Helper method with one argument only so that it can be registered
        with GLib.idle_add() to run on idle or called from a handler.

        :type dump_info: an instance of the meh.DumpInfo class

        """

        ty = dump_info.exc_info.type
        value = dump_info.exc_info.value

        if (issubclass(ty, blivet.errors.StorageError) and value.hardware_fault) or (
            issubclass(ty, OSError) and value.errno == errno.EIO
        ):
            # hardware fault or '[Errno 5] Input/Output error'
            hw_error_msg = _(
                "The installation was stopped due to what "
                "seems to be a problem with your hardware. "
                "The exact error message is:\n\n%s.\n\n "
                "The installer will now terminate."
            ) % str(value)
            self.intf.messageWindow(_("Hardware error occured"), hw_error_msg)
            sys.exit(0)
        elif isinstance(value, blivet.errors.UnusableConfigurationError):
            sys.exit(0)
        else:
            super(AnacondaExceptionHandler, self).handleException(dump_info)
            return False
Example #14
0
def main():
    import doctest

    doctest.testmod(jsonobject)
    import sys

    sys.exit(pytest.main(args=[__file__.replace(".pyc", ".py")]))
Example #15
0
def _assign_host_tunnel_addr(ipip_pools):
    """
    Claims an IPIP-enabled IP address from the first pool with some
    space.

    Stores the result in the host's config as its tunnel address.

    Exits on failure.
    :param ipip_pools:  List of IPPools to search for an address.
    """
    for ipip_pool in ipip_pools:
        v4_addrs, _ = client.auto_assign_ips(
            num_v4=1, num_v6=0, handle_id=None, attributes={}, pool=(ipip_pool, None), host=hostname
        )
        if v4_addrs:
            # Successfully allocated an address.  Unpack the list.
            [ip_addr] = v4_addrs
            break
    else:
        # Failed to allocate an address, the pools must be full.
        print "Failed to allocate an IP address from an IPIP-enabled pool " "for the host's IPIP tunnel device.  Pools are likely " "exhausted."

        sys.exit(1)
    # If we get here, we've allocated a new IPIP-enabled address,
    # Store it in etcd so that Felix will pick it up.
    client.set_per_host_config(hostname, "IpInIpTunnelAddr", str(ip_addr))
Example #16
0
def distribute_reinvestment_fund():
    """
    This task is for Automatic reinvestment

    This is how tis script do:
    1. Get all project that is eligible for reinvestment:
        (project with monthly_reinvestment_cap >0 and not fully funded)
    2. For each project determine how we'll reinvest ( min(monthly_reinvestment_cap, amount_left)
    3. Add AdminReinvestment object with above value
    4. Set monthly_reinvestment_cap to 0.0
    """

    time.sleep(60)

    try:
        admin = RevolvUserProfile.objects.get(user__username=ADMIN_PAYMENT_USERNAME)
    except RevolvUserProfile.DoesNotExist:
        logger.error("Can't find admin user: {0}. System exiting!".format(ADMIN_PAYMENT_USERNAME))
        sys.exit()

    for project in Project.objects.get_eligible_projects_for_reinvestment():
        amount_to_reinvest = project.reinvest_amount_left
        if amount_to_reinvest > 0.0:
            logger.info("Trying to reinvest {0} to {1}-{2}".format(amount_to_reinvest, project.id, project.title))
            AdminReinvestment.objects.create(amount=amount_to_reinvest, admin=admin, project=project)
        project.monthly_reinvestment_cap = 0.0
        project.save()
Example #17
0
def genmain(argv, gen, usage_fn=usage):
    try:
        gen_initpy = "--initpy" in argv
        no_gen_initpy = "--noinitpy" in argv

        if gen_initpy:
            # #1827
            packages = [p for p in argv[1:] if not p == "--initpy"]
            retcode = gen.generate_initpy(packages)
        else:
            files = get_files(argv, usage_fn, gen.ext)
            if not files:
                print("No matching files found")
                return
            retcode = gen.generate_messages(files, no_gen_initpy)
    except roslib.msgs.MsgSpecException as e:
        sys.stderr.write("ERROR: " + e + "\n")
        retcode = 1
    except MsgGenerationException as e:
        sys.stderr.write("ERROR: " + e + "\n")
        retcode = 2
    except Exception as e:
        traceback.print_exc()
        sys.stderr.write("ERROR: " + e + "\n")
        retcode = 3
    sys.exit(retcode or 0)
Example #18
0
def main():
    lexer = adalex.make_lexer()
    tokens = adalex.tokens
    parser = adaparse.make_parser()
    fpath = sys.argv[1]  # input file path
    global filepath
    filepath = fpath
    program = parser.parse(open(fpath).read())
    cwd = os.getcwd()  # gettign the current working directory
    slash, dot = fpath.rfind("/"), fpath.rfind(".")
    gfilename = fpath[slash + 1 : dot]  # getting the input canonical file name stripping of the rest

    # Check the program
    typechecker = typecheck.typecheck()
    initialize_types(typechecker)
    env = typechecker.check_goal_symbol(program)

    if typechecker.get_error_count() > 0:
        print "Fix the type errors and compile again"
        sys.exit(0)
    # If no errors occurred, generate code
    code = generate_code(program)
    gen_file = cwd + "/assembly/" + gfilename + ".asm"  # forming the output file name
    try:
        fd = open(gen_file, "w")
        fd.write(code)
        fd.flush()
        fd.close()
    except IOError:
        print "folder cannot be created"
    print "Done"
    # Emit the code sequence
    JumpGenerator().visit(code)
Example #19
0
    def __init__(self, code, uuid, status, gzs_objectid, gprcode, config):
        self.config = config
        self.code = code
        self.gprcode = gprcode
        self.uuid = uuid

        self.gdbm_status = status
        self.gzs_objectid = gzs_objectid

        self.logger = self.config["LOGGING"]["logger"]

        self.validation_messages = []
        self.valuetables = []
        self.legends = []
        self.xml = self.__get_xml()

        self.sql_statements = []

        self.is_valid = self.__validate()
        if self.is_valid:
            self.logger.info("DD-Infos der Ebene " + self.code + " werden zusammengetragen.")
            self.extract_dd_infos()
            self.__collect_sql_statements()
        else:
            for msg in self.validation_messages:
                self.logger.error(msg)
            sys.exit()
Example #20
0
def checkLDD(target, source, env):
    file = target[0]
    if not os.path.isfile(file.abspath):
        print ("ERROR: CheckLDD: target %s not found\n" % target[0])
        Exit(1)
    (status, output) = commands.getstatusoutput("ldd -r %s" % file)
    if status != 0:
        print "ERROR: ldd command returned with exit code %d" % ldd_ret
        os.system("rm %s" % target[0])
        sys.exit(1)
    lines = string.split(output, "\n")
    have_undef = 0
    for i_line in lines:
        # print repr(i_line)
        regex = re.compile("undefined symbol: (.*)\t\\((.*)\\)")
        if regex.match(i_line):
            symbol = regex.sub("\\1", i_line)
            try:
                env["ALLOWED_SYMBOLS"].index(symbol)
            except:
                have_undef = 1
    if have_undef:
        print output
        print "ERROR: undefined symbols"
        os.system("rm %s" % target[0])
        sys.exit(1)
Example #21
0
    def Execute(self, opt, args):
        for project, change_id, ps_id in self._ParseChangeIds(args):
            dl = project.DownloadPatchSet(change_id, ps_id)
            if not dl:
                print("[%s] change %d/%d not found" % (project.name, change_id, ps_id), file=sys.stderr)
                sys.exit(1)

            if not opt.revert and not dl.commits:
                print("[%s] change %d/%d has already been merged" % (project.name, change_id, ps_id), file=sys.stderr)
                continue

            if len(dl.commits) > 1:
                print(
                    "[%s] %d/%d depends on %d unmerged changes:" % (project.name, change_id, ps_id, len(dl.commits)),
                    file=sys.stderr,
                )
                for c in dl.commits:
                    print("  %s" % (c), file=sys.stderr)
            if opt.cherrypick:
                project._CherryPick(dl.commit)
            elif opt.revert:
                project._Revert(dl.commit)
            elif opt.ffonly:
                project._FastForward(dl.commit, ffonly=True)
            else:
                project._Checkout(dl.commit)
Example #22
0
def main():
    parser = argparse.ArgumentParser(description="Run some shows.")
    parser.add_argument("config", help="configuration file")

    # XXX we should be able to add all these options in the config file as well
    parser.add_argument("--log-level", dest="loglevel", default="info", help="log level")
    parser.add_argument("--log-output", dest="logoutput", default="-", help="log output")
    parser.add_argument("--daemon", dest="daemonize", action="store_true", help="Start circusd in the background")
    parser.add_argument("--pidfile", dest="pidfile")

    args = parser.parse_args()
    cfg = DefaultConfigParser()
    cfg.read(args.config)

    if args.daemonize:
        daemonize()

    pidfile = None
    if args.pidfile:
        pidfile = Pidfile(args.pidfile)

        try:
            pidfile.create(os.getpid())
        except RuntimeError, e:
            print(str(e))
            sys.exit(1)
Example #23
0
def echo_about():
    """
    This function will print the user guide and stop toolkit.
    """
    about = "xml2csv V1.0\n-c <path>  |  Convert tests.full.xml to csv file\n"
    print about
    sys.exit()
Example #24
0
def test(coverage=False, verbosity=2):
    """ Executa tests unitarios.
    Lembre de definir a variável: OPAC_CONFIG="config.testing" antes de executar este comando:
    > export OPAC_CONFIG="config.testing" && python manager.py test
    """
    if coverage and not os.environ.get("FLASK_COVERAGE"):
        os.environ["FLASK_COVERAGE"] = "1"
        os.execvp(sys.executable, [sys.executable] + sys.argv)

    tests = unittest.TestLoader().discover("tests")
    result = unittest.TextTestRunner(verbosity=verbosity).run(tests)

    if COV:
        COV.stop()
        COV.save()
        print ("Coverage Summary:")
        COV.report()
        # basedir = os.path.abspath(os.path.dirname(__file__))
        # covdir = 'tmp/coverage'
        # COV.html_report(directory=covdir)
        # print('HTML version: file://%s/index.html' % covdir)
        COV.erase()

    if result.wasSuccessful():
        return sys.exit()
    else:
        return sys.exit(1)
def copyToLocal(path):
    if not exists(basename(path)):
        print "Copy-to-Local ", path
        if os.system("hadoop fs -get %s ." % (path)):
            print "Copy-to-Local error, stopping."
            exit(1)
    return basename(path)
Example #26
0
def _check_chksum_6(chk, val, desc, infile):
    comp_chk = make_chksum_6(val)
    if chk != comp_chk:
        msg("%s checksum incorrect in file '%s'!" % (desc, infile))
        msg("Checksum: %s. Computed value: %s" % (chk, comp_chk))
        sys.exit(2)
    dmsg("%s checksum passed: %s" % (capfirst(desc), chk))
Example #27
0
def get_commands():
    while 1:
        try:
            readbuffer = s.recv(1024)
            sBuff = string.split(readbuffer, "\n")
            readbuffer = sBuff.pop()

            for line in sBuff:
                line = string.rstrip(line)
                line = string.split(line)
                # print(line)

                if line[0] == "PING":
                    print("Sending pong")
                    s.send("PONG %s\r\n" % line[1])

                if line[1] == "PRIVMSG":
                    # Get command string
                    command = line[3]
                    command = command.replace(":", "")

                    # Get nickname of user who typed the command
                    nickname = line[0]
                    nickname = nickname.replace(":", "")
                    nickname = nickname[: nickname.find("!")]

                    # Get channel or username of chat in which the command was entered
                    chat = line[2]

                    if command.upper() == "~HELP":
                        print("Help command from " + nickname + " in chat with " + chat)
                        HELP()

                    if command.upper() == "~BI":
                        print("BI command from " + nickname + " in chat with " + chat)
                        BI()

                    if command.upper() == "~UD":
                        print("UD command from " + nickname + " in chat with " + chat)
                        UD()

                    if command.upper() == "~ABOUT":
                        print("ABOUT")
                        ABOUT()

                    if command.upper() == "~UDD":
                        if len(line) >= 5:
                            print('UDD command with arg "' + line[4] + '" from ' + nickname + " in chat with " + chat)
                            UDD(" ".join(line[4:]).strip())
                        else:
                            print("UDD command with no arg from " + nickname + " in chat with " + chat)
                            s.send("PRIVMSG %s :No keyword entered. Try again\r\n" % chat)
                            # NOTE: Could simply merge UD and UDD user commands so that if the user puts an argument to ~UD, he gets the definition, and no argument gets a random definition.

        except SystemExit:
            sys.exit(0)

        except:
            print("Error - ", sys.exc_info()[0], sys.exc_info()[1])
            s.send("PRIVMSG %s :(eror 0x2381ff64) Look at that pipe, it's broken. Try again.\r\n" % LOBBY)
Example #28
0
    def readData(self):
        # Read in all data to a temporary variable
        dataTEMP = self.img.get_data()
        # Check to see if we have a 4D image, represented by a 4th dimension
        if len(self.img.get_shape()) > 3:
            print self.name + " has more than one timepoint, will try using first by default."
            if self.notEmpty(dataTEMP[:, :, :, 0:1]):
                self.data = dataTEMP[:, :, :, 0:1]
            else:
                # Once or twice I've seen melodic spit out a component with 2 TPs, the first empty, and the second correct.
                print "Data at timepoint 1 is an empty or errored image.  Trying next timepoints..."
                if self.img.get_shape()[3] < 2:
                    print "The template is a 4D file but only has one timepoint that cannot be used.  Exiting!"
                    sys.exit()
                else:
                    # Here we are checking timepoint 2, which likely has the map.  We could continue checking timepoints
                    # if two is empty, but this probably means something hugely wrong with the image, and we should stop
                    # and alert the user
                    if self.notEmpty(dataTEMP[:, :, :, 1:2]):
                        print self.name + " has empty first timepoint, using second."
                        self.data = dataTEMP[:, :, :, 1:2]
                    else:
                        print self.name + " is empty at both timepoints 1 and 2, and we cannot use it.  Exiting!"

        # Otherwise, we have a 3D image and only one set of datapoints to try
        else:
            # Make sure that we don't have an empty image
            if self.notEmpty(dataTEMP):
                self.data = dataTEMP
            else:
                print self.name + " is empty and cannot be used as a template!  Exiting."
                sys.exit()
Example #29
0
def usage(message=None):
    """
    display a helpful usage message with
    an optional introductory message first
    """
    if message is not None:
        sys.stderr.write(message)
        sys.stderr.write("\n")
    usage_message = """
Usage: xmlstubs.py --wiki wikidbname --articles path --current path
    --history path [--start number] [--end number]
    [--config path]

Options:

  --wiki (-w):         wiki db name, e.g. enwiki
  --articles (-a):     full path of articles xml stub dump that will be created
  --current (-c):      full path of current pages xml stub dump that will be created
  --history (-h):      full path of xml stub dump with full history that will be created

  --start (-s):        starting page to dump (default: 1)
  --end (-e):          ending page to dump, exclusive of this page (default: dump all)

  --config (-C):       path to wikidump configfile (default: "wikidump.conf" in current dir)
  --dryrun (-d):       display the commands that would be run to produce the output but
                       don't actually run them
"""
    sys.stderr.write(usage_message)
    sys.exit(1)
Example #30
0
def parse_nbytes(nbytes):
    import re

    m = re.match(r"([0123456789]+)(.*)", nbytes)
    smap = (
        ("c", 1),
        ("w", 2),
        ("b", 512),
        ("kB", 1000),
        ("K", 1024),
        ("MB", 1000 * 1000),
        ("M", 1024 * 1024),
        ("GB", 1000 * 1000 * 1000),
        ("G", 1024 * 1024 * 1024),
    )
    if m:
        if m.group(2):
            for k, v in smap:
                if k == m.group(2):
                    return int(m.group(1)) * v
            else:
                msg("Valid byte specifiers: '%s'" % "' '".join([i[0] for i in smap]))
        else:
            return int(nbytes)

    msg("'%s': invalid byte specifier" % nbytes)
    sys.exit(1)