Exemplo n.º 1
0
def scriptCheckpoints(options, maxtick, cptdir):
    if options.at_instruction or options.simpoint:
        checkpoint_inst = options.take_checkpoints

        # maintain correct offset if we restored from some instruction
        if options.checkpoint_restore != None:
            checkpoint_inst += options.checkpoint_restore

        print "Creating checkpoint at inst:%d" % (checkpoint_inst)
        exit_event = m5.simulate()
        exit_cause = exit_event.getCause()
        print "exit cause = %s" % exit_cause

        # skip checkpoint instructions should they exist
        while exit_cause == "checkpoint":
            exit_event = m5.simulate()
            exit_cause = exit_event.getCause()

        if exit_cause == "a thread reached checkpoint inst number" or \
            exit_cause == "sp simulation reached the interval size":
            m5.checkpoint(joinpath(cptdir, "cpt.%s.%d" % \
                    (options.bench, checkpoint_inst)))
            print "Checkpoint written."

    else:
        when, period = options.take_checkpoints.split(",", 1)
        when = int(when)
        period = int(period)
        num_checkpoints = 0

        exit_event = m5.simulate(when - m5.curTick())
        exit_cause = exit_event.getCause()
        while exit_cause == "checkpoint":
            exit_event = m5.simulate(when - m5.curTick())
            exit_cause = exit_event.getCause()

        if exit_cause == "simulate() limit reached":
            m5.checkpoint(joinpath(cptdir, "cpt.%d"))
            num_checkpoints += 1

        sim_ticks = when
        max_checkpoints = options.max_checkpoints

        while num_checkpoints < max_checkpoints and \
                exit_cause == "simulate() limit reached":
            if (sim_ticks + period) > maxtick:
                exit_event = m5.simulate(maxtick - sim_ticks)
                exit_cause = exit_event.getCause()
                break
            else:
                exit_event = m5.simulate(period)
                exit_cause = exit_event.getCause()
                sim_ticks += period
                while exit_event.getCause() == "checkpoint":
                    exit_event = m5.simulate(sim_ticks - m5.curTick())
                if exit_event.getCause() == "simulate() limit reached":
                    m5.checkpoint(joinpath(cptdir, "cpt.%d"))
                    num_checkpoints += 1

    return exit_event
Exemplo n.º 2
0
def main(argv):
    root = normpath(joinpath(dirname(argv[0]), ".."))
    for javadir in java_directories:
        for (dirpath, dirnames, filenames) in os.walk(joinpath(root, javadir)):
            for filename in filenames:
                if filename.endswith(".java"):
                    update_blurb(joinpath(dirpath, filename))
Exemplo n.º 3
0
Arquivo: send.py Projeto: AMDmi3/gem5
def syncdir(srcdir, destdir):
    srcdir = normpath(srcdir)
    destdir = normpath(destdir)
    if not isdir(destdir):
        sys.exit('destination directory "%s" does not exist' % destdir)

    for root, dirs, files in os.walk(srcdir):
        root = normpath(root)
        prefix = os.path.commonprefix([root, srcdir])
        root = root[len(prefix):]
        if root.startswith('/'):
            root = root[1:]
        for rem in [ d for d in dirs if d.startswith('.') or d == 'SCCS']:
            dirs.remove(rem)

        for entry in dirs:
            newdir = joinpath(destdir, root, entry)
            if not isdir(newdir):
                os.mkdir(newdir)
                print 'mkdir', newdir

        for i,d in enumerate(dirs):
            if islink(joinpath(srcdir, root, d)):
                dirs[i] = joinpath(d, '.')

        for entry in files:
            dest = normpath(joinpath(destdir, root, entry))
            src = normpath(joinpath(srcdir, root, entry))
            if not isfile(dest) or not filecmp(src, dest):
                print 'copy %s %s' % (dest, src)
                copy(src, dest)
Exemplo n.º 4
0
 def complete_libs(self, text, state):
     libs = {}
     for i in sys.path:
         try:
             if i == '':
                 i = os.curdir
             files = os.listdir(i)
             for j in files:
                 filename = joinpath(i, j)
                 if isfile(filename):
                     for s in [".py", ".pyc", ".so"]:
                         if j.endswith(s):
                             j = j[:-len(s)]
                             pos = j.find(".")
                             if pos > 0:
                                 j = j[:pos]
                             libs[j] = None
                             break
                 elif isdir(filename):
                     for s in ["__init__.py", "__init__.pyc"]:
                         if isfile(joinpath(filename, s)):
                             libs[j] = None
         except OSError:
             pass
     for j in sys.builtin_module_names:
         libs[j] = None
     libs = sorted(j for j in libs.keys() if j.startswith(text))
     return libs[state]
Exemplo n.º 5
0
def plot(folder, m, n, ground_truth_path = ''):
    path_similarities = joinpath(folder, 'similarities.txt')
    path_lines = joinpath(folder, 'lines.txt')
    m = int(m)
    n = int(n)

    (figure, axes) = pyplot.subplots()
    data = load_similarities(path_similarities, n)

    (x, y) = load_lines(path_lines, m, n)
    axes.plot(x, y, 'k-')

    plot_contours(axes, data)
    plot_ground_truth(axes, ground_truth_path)

    #data = data[:50, :50]
    (m, n) = data.shape
    axes.axis([-0.5, n - 0.5, -0.5,  m - 0.5])

    axes.set_xticks(arange(-0.5, n, 1.0))
    axes.set_yticks(arange(-0.5, m, 1.0))

    axes.set_xticklabels([(str(i) if i % 10 == 0 else '') for i in range(0, n)])
    axes.set_yticklabels([(str(i) if i > 0 and i % 10 == 0 else '') for i in range(0, m)])

    axes.xaxis.set_ticks_position('bottom')

    axes.grid()

    axes.set_xlabel('Replay image index #', labelpad=10)
    axes.set_ylabel('Teach image index #', labelpad=10)

    pyplot.show()
Exemplo n.º 6
0
def put(fitTrackedData, pathArgs=None, force=False, summary=False,  showlist=False, quiet=False):
    commitsFile = getCommitFile()
    commitsFitData = readFitFile(commitsFile)
    available = [(f, o, s) for f,(o, s) in commitsFitData.iteritems()]
    totalSize = sum([size for f,h,size in available])

    if len(available) == 0:
        if not quiet:
            print 'No tranfers needed! There are no cached objects to put in external location for HEAD.'
    elif showlist:
        print
        for filePath,h,size in available:
            print '  %6.2fMB  %s'%(size/1048576, filePath)
        print '\nThe above objects can be tranferred (maximum total transfer size: %.2fMB).'%(totalSize/1048576)
        print 'You may run git-fit put to start the transfer.'
    elif summary:
        print len(fitTrackedData), 'items are being tracked'
        print len(available), 'of the tracked items MAY need to be sent to external location'
        print '%.2fMB maximum possible transfer size'%(totalSize/1048576)
        print 'Run \'git-fit put -l\' to list these items.'
    else:
        successes = []
        _transfer(_put, available, totalSize, fitTrackedData, successes, quiet)

        for filePath, objHash, size in successes:
            del commitsFitData[filePath]

    if len(commitsFitData) > 0:
        writeFitFile(commitsFitData, commitsFile)
    elif exists(commitsFile):
        remove(commitsFile)
    for f in sorted(listdir(commitsDir), key=lambda x: stat(joinpath(commitsDir, x)).st_mtime)[:-2]:
        remove(joinpath(commitsDir, f))
Exemplo n.º 7
0
def dump_inventory(path, config):
    inventory = config.configuration["inventory"]
    hostvars = inventory.get("_meta", {})
    hostvars = hostvars.get("hostvars", {})

    children = {}
    yaml_inventory = {
        "all": {"children": children},
        "vars": {},
    }
    for groupname, groupstruct in inventory.items():
        if groupname == "_meta":
            continue

        hostsdict = {}
        children[groupname] = {
            "hosts": hostsdict,
            "vars": {}
        }
        if isinstance(groupstruct, dict):
            children[groupname]["vars"] = groupstruct["vars"]
            for hostname in groupstruct["hosts"]:
                hostsdict[hostname] = hostvars.get(hostname, {})
        else:
            for hostname in groupstruct:
                hostsdict[hostname] = hostvars.get(hostname, {})

    path.joinpath("inventory.yaml").write_text(
        yaml.dump(yaml_inventory,
                  default_flow_style=False, explicit_start=True, indent=4)
    )
Exemplo n.º 8
0
 def _foldername(self, additionalpath=""):
     if not self._foldername_cache.get(additionalpath):
         fn = joinpath(self.base, self.folder, additionalpath) \
             if not self.is_subfolder \
             else joinpath(self.base, ".%s" % self.folder, additionalpath)
         self._foldername_cache[additionalpath] = fn
     return self._foldername_cache[additionalpath]
Exemplo n.º 9
0
def get_data_path(dir=""):
    """
    Locate the examples directory.
    """
    from os.path import dirname, abspath, join as joinpath, isdir

    key = 'REFLRED_DATA'
    if os.environ.has_key(key):
        # Check for data path in the environment
        path = os.environ[key]
        if not isdir(path):
            raise RuntimeError('Path in environment %s not a directory'%key)

    else:
        # Check for data next to the package.
        try:
            root = dirname(dirname(dirname(abspath(__file__))))
        except:
            raise RuntimeError("Could not find sample data")
        path = joinpath(root,'doc','examples')

    path = joinpath(path, dir)
    if not isdir(path):
        raise RuntimeError('Could not find sample data in '+path)
    return path
Exemplo n.º 10
0
def data_files():
    """
    Return the data files required to run tcc.
    """
    from os.path import dirname, join as joinpath
    import os
    from glob import glob

    ROOT = dirname(find_tcc_path())
    def _find_files(path, patterns):
        target = joinpath('tinycc-data', path) if path else 'tinycc-data'
        files = []
        for pattern in patterns.split(','):
            files.extend(glob(joinpath(ROOT, path, pattern)))
        return (target, files)
    result = []
    result.append(_find_files('include', '*.h'))
    for path, dirs, _ in os.walk(joinpath(ROOT, 'include')):
        relative_path = path[len(ROOT)+1:]
        for d in dirs:
            result.append(_find_files(joinpath(relative_path, d), '*.h'))
    result.append(_find_files('lib', '*'))
    result.append(_find_files('libtcc', '*'))
    result.append(_find_files('', '*.exe,*.dll'))
    return result
Exemplo n.º 11
0
def do_check_style(hgui, repo, *files, **args):
    """check files for proper m5 style guidelines"""
    from mercurial import mdiff, util

    auto = args.get('auto', False)
    if auto:
        auto = 'f'
    ui = MercurialUI(hgui, hgui.verbose, auto)

    if files:
        files = frozenset(files)

    def skip(name):
        return files and name in files

    def prompt(name, func, regions=all_regions):
        result = ui.prompt("(a)bort, (i)gnore, or (f)ix?", 'aif', 'a')
        if result == 'a':
            return True
        elif result == 'f':
            func(repo.wjoin(name), regions)

        return False

    modified, added, removed, deleted, unknown, ignore, clean = repo.status()

    whitespace = Whitespace(ui)
    sorted_includes = SortedIncludes(ui)
    for fname in added:
        if skip(fname):
            continue

        fpath = joinpath(repo.root, fname)

        if whitespace.apply(fpath, prompt):
            return True

        if sorted_includes.apply(fpath, prompt):
            return True

    try:
        wctx = repo.workingctx()
    except:
        from mercurial import context
        wctx = context.workingctx(repo)

    for fname in modified:
        if skip(fname):
            continue

        fpath = joinpath(repo.root, fname)
        regions = modregions(wctx, fname)

        if whitespace.apply(fpath, prompt, regions):
            return True

        if sorted_includes.apply(fpath, prompt, regions):
            return True

    return False
Exemplo n.º 12
0
def loadSystemConfig ():
    global configlog

    configdir = findProgramDirectory ()
    gameconfigdir = joinpath (configdir, "games")

    if not exists (gameconfigdir):
        mkdir (gameconfigdir)

    configname = joinpath (configdir, "pydoom.ini")

    config = ConfigParser ()

    config.add_section ('video')
    config.set ('video', 'fullscreen',  'no')
    config.set ('video', 'width',       '640')
    config.set ('video', 'height',      '480')

    if not exists (configname):
        configlog.info ("{} doesn't exist! Creating it.".format (configname))
        config.write (open (configname, 'wt'))
    else:
        configlog.info ("Read settings from {}.".format (configname))
        configfile = open (configname, 'rt')
        config.read_file (configfile)
        configfile.close ()
Exemplo n.º 13
0
def find_tcc_path():
    """
    Return the path to the tcc executable.
    """
    import sys
    from os import environ
    from os.path import join as joinpath, dirname, realpath, exists
    EXE = 'tcc.exe'

    # Look for the TCC_PATH environment variable
    KEY = 'TCC_ROOT'
    if KEY in environ:
        path = joinpath(environ[KEY], EXE)
        if not exists(path):
            raise RuntimeError("%s %r does not contain %s"
                               % (KEY, environ[KEY], EXE))
        return path

    # Check in the tinycc package
    path = joinpath(dirname(realpath(__file__)), EXE)
    if exists(path):
        return path

    # Check next to exe/zip file
    path = joinpath(realpath(dirname(sys.executable)), 'tinycc-data', EXE)
    if exists(path):
        return path

    raise ImportError("Could not locate tcc.exe")
Exemplo n.º 14
0
def copy_monitor_keyring(path, config):
    secret = kv.KV.find_one("monitor_secret",
                            config.configuration["global_vars"]["fsid"])
    if secret:
        path.joinpath("fetch_directory", "monitor_keyring").write_text(
            secret.value
        )
Exemplo n.º 15
0
def test_enable_import_with_ext():
    """Using exts any file extension can be importable."""
    ext = ".html,.kid.html"
    sys.path.insert(0, tmpdir)
    try:
        raises(ImportError, "import test_suffixes1")
        raises(ImportError, "import test_suffixes2")
        raises(ImportError, "import test_suffixes3")
        kid.enable_import(ext=ext)
        dest = joinpath(tmpdir, "test_suffixes1.kid")
        copyfile(tfile, dest)
        import test_suffixes1  # *.kid files are always importable

        dest = joinpath(tmpdir, "test_suffixes2.html")
        copyfile(tfile, dest)
        import test_suffixes2

        dest = joinpath(tmpdir, "test_suffixes3.kid.html")
        copyfile(tfile, dest)
        import test_suffixes3

        dest = joinpath(tmpdir, "test_suffixes4.xhtml")
        copyfile(tfile, dest)
        raises(ImportError, "import test_suffixes4")
        kid.disable_import()
    finally:
        sys.path.remove(tmpdir)
Exemplo n.º 16
0
 def expand_patterns(path, patterns):
     target_path = joinpath(EXTERNAL_DIR, *path)
     source_path = joinpath(DATA_PATH, *path)
     files = []
     for p in patterns:
         files.extend(glob.glob(joinpath(source_path, p)))
     return target_path, files
Exemplo n.º 17
0
def installTree(srcDir, destDir, paths):
	'''Copies files and directories from the given source directory to the
	given destination directory. The given paths argument is a sequence of
	paths relative to the source directory; only those files and directories
	are copied. The scanTree() function is suitable to provide paths.
	Files and directories are created with permissions such that all users can
	read (and execute, if appropriate) what is installed and only the owner
	can modify what is installed.
	Raises IOError if there is a problem reading or writing files or
	directories.
	'''
	if not isdir(destDir):
		raise IOError('Destination directory "%s" does not exist' % destDir)

	for relPath in paths:
		if altsep is not None:
			relPath = relPath.replace(altsep, sep)
		srcPath = joinpath(srcDir, relPath)
		destPath = joinpath(destDir, relPath)
		if islink(srcPath):
			print 'Skipping symbolic link:', srcPath
		elif isdir(srcPath):
			_installDirsRec(destPath)
		elif isfile(srcPath):
			_installDirsRec(dirname(destPath))
			installFile(srcPath, destPath)
		elif exists(srcPath):
			print 'Skipping unknown kind of file system entry:', srcPath
		else:
			print 'Skipping non-existing path:', srcPath
    def __construct_remote_paths(self, source, root_dest, remote_directories, 
                                local_remote_files):
        """Computes the directories and files that are to uploaded to remote 
        system.
        
        Arguments:
            source (str) - absolute path to the local source directory
            root_dest (str) - absolute path to the remote destination directory
            remote_directories (list) - list reference where the directories
                                        which has to created will be added
            local_remote_files (list) - list reference where a tuples of
                                        (localfile_path, remotefile_path)
                                        will be added
            root_dest_exists (boolean) - defaults to False; Set to True if dest
                                         exists at remote side

        Returns:
            The return values are append to the reference variables 
            i.e remote_directories and local_remote_files list
        """
        if local_isfile(source):
            root_dest = joinpath(root_dest, local_basename(source))
            local_remote_files.append((source, root_dest))
            return
        
        parent_dest_exists = root_dest_exists = False
        parent_path = root_dest
        
        if self.sftp.isdir(root_dest):
            parent_dest_exists = root_dest_exists = True
        
        for base_dir, _, files in local_walk(source):
            
            dest_dir = local_normpath(joinpath(root_dest,
                              base_dir.replace(source, '').strip(pathsep)))
            
            if root_dest_exists:
                new_parent_path = local_dirname(base_dir)
                if new_parent_path == parent_path and not parent_dest_exists:
                    remote_directories.append(dest_dir)
                else: 
                    parent_path = new_parent_path
                    if not self.sftp.exists(dest_dir):
                        parent_dest_exists = False
                        remote_directories.append(dest_dir)
                    elif not self.sftp.isdir(dest_dir):
                        msg = ''.join(["Copy aborted. Mismatch in file type ",
                                       "Local: '%s' Remote: '%s'" % (base_dir,
                                       dest_dir)])
                        raise RemoteDispatcherException(msg)
                    else:
                        parent_dest_exists = True
            else:
                remote_directories.append(local_normpath(dest_dir))
                
            local_remote_files.extend(\
                [(joinpath(base_dir, fname), \
                  joinpath(dest_dir, fname)) \
                 for fname in files])
Exemplo n.º 19
0
def create_filepath( fp ):
    if "\\" in fp:
        fps = joinpath( * fp.split("\\") ) 
    elif "/" in fp:
        fps = joinpath( * fp.split("/") ) 
    else:
        fps = fp
    return fps
Exemplo n.º 20
0
def pull(host, maildir, localmaildir, noop=False, verbose=False):
    localstore = expanduser(joinpath(localmaildir, "store"))
    
    # Get the list of mail we already have locally
    maildir_pattern = re.compile("^([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*(.*)")
    localfiles = [
        maildir_pattern.match(f).group(1) 
        for f in listdir(localstore) if maildir_pattern.match(f)
        ]

    # Make the ssh connection
    np = _SSH(host)

    # This command produces a list of all files in the maildir like:
    #   base-filename timestamp container-directory
    command = """echo {maildir}/{{cur,new}} | tr ' ' '\\n' | while read path ; do ls -1Ugo --time-style=+%s $path | sed -rne "s|[a-zA-Z-]+[ \t]+[0-9]+[ \t]+[0-9]+[ \t]+([0-9]+)[ \t]+([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*|\\2 \\1 $path|p";done""".format(
        maildir=maildir
        )
    if verbose:
        print command
    stdout = np.cmd(command)
    lines = stdout.split("\n")
    maildir_ls = [line.split(" ") for line in lines if len(line.split(" ")) == 3]

    # If we get problems with not finding files in the local list it can help to dump the local list
    #with open("/tmp/mdlog", "w") as fd:
    #    print >>fd, "\n".join(localfiles)
        
    # Loop through the remote files checking the local copies
    for basefile, timestamp, container in maildir_ls:
        if basefile in localfiles:
            if verbose:
                print "found %s" % basefile
        else:
            storefile = joinpath(localstore, basefile)
            if existspath(storefile):
                if verbose:
                    print "exists %s %s" % (basefile, storefile)
            else:
                print "pulling %s %s to %s" % (basefile, container, storefile)
                stdout = np.cmd("cat %s/%s*" % (container, basefile))
                if not noop and len(stdout) > 0:
                    with open(storefile, "w") as fd:
                        fd.write(stdout)
                    try:
                        # Now symlink the store file to the correct location
                        target = joinpath(
                            expanduser(localmaildir), 
                            basename(container),
                            basefile
                            )
                        symlink(abspath(storefile), target)
                    except OSError, e:
                        if e.errno == 17:
                            # file exists
                            pass
                        else:
                            print "%s %s %s" % (e, storefile, target)
Exemplo n.º 21
0
 def filename(self, url):
     head = self.prefix
     i = 0
     filename = joinpath(self.target, "%s.jpg" % head)
     while os.path.exists(filename):
         i += 1
         name = "%s_%d.jpg" % (head, i)
         filename = joinpath(self.target, name)
     return filename
Exemplo n.º 22
0
def test_pyc_generation():
    # if this exists, the test is worthless. make sure this test runs before
    # anything else import test_content
    from kid.test import template_dir
    kid.enable_import()
    assert not exists(joinpath(template_dir, 'test_content.pyc'))
    import test.test_content
    assert exists(joinpath(template_dir, 'test_content.pyc'))
    assert sys.modules.has_key('test.test_content')
Exemplo n.º 23
0
def movedir(srcdir, destdir, dir):
    src = joinpath(srcdir, dir)
    dest = joinpath(destdir, dir)

    if not isdir(src):
        raise AttributeError

    os.makedirs(dirname(dest))
    shutil.move(src, dest)
Exemplo n.º 24
0
def addDirectory(zipFile, root, zipPath):
	for path, dirs, files in os.walk(root):
		if '.svn' in dirs:
			dirs.remove('.svn') # don't visit .svn directories
		for name in files:
			thisZipPath = zipPath
			if abspath(root) != abspath(path):
				thisZipPath = joinpath(thisZipPath, relpath(path, root))
			addFile(zipFile, joinpath(path, name), joinpath(thisZipPath, name))
Exemplo n.º 25
0
def get_set(files, mypath):
    """
    uiid_set - 'mts-my-options' in history
    iid_set - 'mts-my-options' in history
    uiid_set_only_samara - 'samara.mts' in history
    iid_set_only_samara - 'samara.mts' in history

    error_names - error in parsing json to dict
    """
    count_reports_all = 0
    uiid_set = set()
    uiid_set_only_samara = set()
    iid_set = set()
    iid_set_only_samara = set()

    error_names = set()
    count_reports_all = 0
    for f in files[:]:
        if isfile(joinpath(mypath, f)):
            count_reports = 0
            with codecs.open(joinpath(mypath, f), 'r') as infile:
                for line in infile:
                    if 'YA_REPORT_RECEIVE_TIME' in line.rstrip('\n\r'):
                        count_reports += 1
                        count_reports_all += 1

                    elif '{"data"' in line.rstrip('\n\r'):

                        try:
                            js = json.loads(line.rstrip('\r\n'))
                            for x in xrange(len(js["data"])):
                                if js["data"][x]["name"] not in error_names:
                                    try:
                                        uiid = js["data"][x]["data"].get("uiid")
                                        iid = js["data"][x]["data"].get("iid")

                                        if uiid:
                                            for k, v in js["data"][x]["data"].items():
                                                    if k == 'w' and v == 'mts-my-options':
                                                        uiid_set.add(uiid)
                                                    if 'samara.mts' in str(v):
                                                        uiid_set_only_samara.add(uiid)
                                        if iid:
                                            for k, v in js["data"][x]["data"].items():
                                                if k == 'w' and v == 'mts-my-options':
                                                    iid_set.add(iid)
                                                if 'samara.mts' in str(v):
                                                    iid_set_only_samara.add(iid)
                                    except:
                                        error_names.add(js["data"][x]["name"])
                        except ValueError:
                            continue
            print 'count_reports file, ', count_reports, f
            infile.close()

    return uiid_set, iid_set, uiid_set_only_samara, iid_set_only_samara
Exemplo n.º 26
0
def _pull(store, localmaildir, noop=False, verbose=False, filterfile=None):
    localstore = expanduser(joinpath(localmaildir, "store"))
    
    # Get the list of mail we already have locally
    maildir_pattern = re.compile(
        "^([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*(.*)"
        )
    localfiles = [
        maildir_pattern.match(f).group(1) 
        for f in listdir(localstore) if maildir_pattern.match(f)
        ]

    # Read in the filters if we have them
    mailfilters = parse_filter(filterfile) if filterfile else []
    mailparser = HeaderOnlyParser() if mailfilters else None
    mdfolder = MdFolder(
        basename(localmaildir), 
        base=dirname(localmaildir)
        ) if mailfilters else None
    # Loop through the remote files checking the local copies
    for basefile, timestamp, container in _list_remote(store, localmaildir, verbose=verbose):
        if basefile in localfiles:
            if verbose:
                print("found %s" % basefile)
        else:
            storefile = joinpath(localstore, basefile)
            if existspath(storefile):
                if verbose:
                    print("exists %s %s" % (basefile, storefile))
            else:
                print("pulling %s %s to %s" % (basefile, container, storefile))
                stdout = store.cmd("cat %s/%s*" % (container, basefile), verbose=verbose)

                if verbose and len(stdout) < 1:
                    print("%s is an error" % storefile)

                if not noop and len(stdout) > 0:
                    with open(storefile, "w") as fd:
                        fd.write(stdout)
                    try:
                        # Now symlink the store file to the correct location
                        target = joinpath(
                            expanduser(localmaildir), 
                            basename(container),
                            basefile
                            )
                        symlink(abspath(storefile), target)
                    except OSError as e:
                        if e.errno == 17:
                            # file exists
                            pass
                        else:
                            print("%s %s %s" % (e, storefile, target))
                            
                    # If we have filters then we should pass the message object to them
                    list(_filter(stdout, mailparser, mailfilters, mdfolder))
Exemplo n.º 27
0
 def _create(dir, subdir, files):
     '''Create files.'''
     if subdir:
         subdir = joinpath(dir, subdir)
         mkdir(subdir)
     else:
         subdir = dir
     for file in files.split():
         open(joinpath(subdir, file), 'w').write('nothing')
     return subdir
Exemplo n.º 28
0
def packageZip(info):
	print 'Generating install files...'
	generateInstallFiles(info)

	if not exists(info.packagePath):
		os.mkdir(info.packagePath)

	zipFileName = info.packageFileName + '-bin.zip'
	zipFilePath = joinpath(info.packagePath, zipFileName)
	if exists(zipFilePath):
		os.unlink(zipFilePath)

	print 'Generating ' + zipFilePath
	zipFile = ZipFile(zipFilePath, 'w')

	addDirectory(zipFile, joinpath(info.makeInstallPath, 'doc'), 'doc')
	addDirectory(zipFile, joinpath(info.makeInstallPath, 'share'), 'share')
	addDirectory(zipFile, info.codecPath, 'codec')
	addFile(zipFile, info.openmsxExePath, basename(info.openmsxExePath))
	addFile(
		zipFile, joinpath(info.sourcePath, 'resource\\openmsx.ico'),
		'share\\icons\\openmsx.ico'
		)

	addFile(zipFile, info.catapultExePath, 'Catapult\\bin\\Catapult.exe')
	addDirectory(zipFile, joinpath(info.catapultPath, 'doc'), 'Catapult\\doc')
	addDirectory(
		zipFile, joinpath(info.catapultPath, 'resources\\bitmaps'),
		'Catapult\\resources\\bitmaps'
		)
	addDirectory(
		zipFile, joinpath(info.catapultBuildPath, 'install\\dialogs'),
		'Catapult\\resources\\dialogs'
		)
	addFile(
		zipFile, joinpath(info.catapultSourcePath, 'catapult.xpm'),
		'Catapult\\resources\\icons\\catapult.xpm'
		)
	addFile(
		zipFile, joinpath(info.catapultPath, 'README'),
		'Catapult\\doc\\README'
		)
	zipFile.close()

	zipFileName = info.packageFileName + '-pdb.zip'
	zipFilePath = joinpath(info.packagePath, zipFileName)
	if exists(zipFilePath):
		os.unlink(zipFilePath)

	print 'Generating ' + zipFilePath
	zipFile = ZipFile(zipFilePath, 'w')
	addFile(zipFile, info.openmsxPdbPath, basename(info.openmsxPdbPath))
	addFile(zipFile, info.catapultPdbPath, basename(info.catapultPdbPath))
	zipFile.close()
Exemplo n.º 29
0
def main(argv):
    dest_dir = joinpath(dirname(argv[0]), "..", "3pp")
    if not exists(dest_dir):
        mkdir(dest_dir)
    classpath_file = joinpath(dirname(argv[0]), "..", ".classpath")
    for jar in get_3pps(classpath_file):
        jar_path = joinpath(dest_dir, jar)
        if not exists(jar_path):
            url = get_url(jar)
            print "Downloading %s" % url
            download_file(url, jar_path)
Exemplo n.º 30
0
def register_node(cpu_list, mem, node_number):
    nodebasedir = joinpath(m5.options.outdir, 'fs', 'sys', 'devices',
                           'system', 'node')

    nodedir = joinpath(nodebasedir,'node%d' % node_number)
    makedirs(nodedir)

    file_append((nodedir, 'cpumap'), hex_mask(cpu_list))
    file_append((nodedir, 'meminfo'),
                'Node %d MemTotal: %dkB' % (node_number,
                toMemorySize(str(mem))/kibi))
Exemplo n.º 31
0
    def convolve_cube(self,
                      target_fwhm,
                      target_nmoffat=None,
                      input_function='moffat',
                      target_function="gaussian",
                      outcube_name=None,
                      factor_fwhm=3,
                      fft=True):
        """Convolve the cube for a target function 'gaussian' or 'moffat'

        Args:
            target_fwhm (float): target FWHM in arcsecond
            target_nmoffat: target n if Moffat function
            input_function (str): 'gaussian' or 'moffat' ['moffat']
            target_function (str): 'gaussian' or 'moffat' ['gaussian']
            factor_fwhm (float): number of FWHM for size of Kernel
            fft (bool): use FFT to convolve or not [False]

        Creates:
            Convolved cube
        """
        # Separate folder and name of file
        cube_folder, cube_name = os.path.split(self.filename)

        # Creating the outcube filename
        if outcube_name is None:
            outcube_name = "conv{0}_{1:.2f}{2}".format(
                target_function.lower()[0], target_fwhm, cube_name)
        upipe.print_info("The new cube will be named: {}".format(outcube_name))

        # Getting the shape of the Kernel
        scale_spaxel = self.get_step(unit_wcs=u.arcsec)[1]
        nspaxel = np.int(factor_fwhm * target_fwhm / scale_spaxel)
        # Make nspaxel odd to have a PSF centred at the centre of the frame
        if nspaxel % 2 == 0: nspaxel += 1
        shape = [self.shape[0], nspaxel, nspaxel]

        # Computing the kernel
        kernel3d = cube_kernel(shape,
                               self.wave.coord(),
                               self.psf_fwhm0,
                               target_fwhm,
                               input_function,
                               target_function,
                               lambda0=self.psf_l0,
                               input_nmoffat=self.psf_nmoffat,
                               target_nmoffat=target_nmoffat,
                               b=self.psf_b,
                               scale=scale_spaxel,
                               compute_kernel='pypher')

        if fft:
            upipe.print_info("Starting the FFT convolution")
            conv_cube = self.fftconvolve(other=kernel3d)
        else:
            upipe.print_info("Starting the convolution")
            conv_cube = self.convolve(other=kernel3d)

        # Write the output
        upipe.print_info("Writing up the derived cube")
        conv_cube.write(joinpath(cube_folder, outcube_name))

        # just provide the output name by folder+name
        return cube_folder, outcube_name
Exemplo n.º 32
0
    def mask_pixtable(self, mask_name=None, **kwargs):
        """Use the Image Mask and create a new Pixtable

        Input
        -----
        mask_name: str
            Name of the mask to be used (FITS file)
        use_folder: bool
            If True, use the same folder as the Pixtable
            Otherwise just write where you stand
        suffix_out: str
            Suffix for the name of the output Pixtable
            If provided, will overwrite the one in self.suffix_out
        """
        # Open the PixTable
        upipe.print_info("Opening the Pixtable {0}".format(self.pixtable_name))
        pixtable = PixTable(self.pixtable_name)

        # Use the Image mask and create a pixtable mask
        if mask_name is not None:
            self.mask_name = mask_name
        else:
            if not hasattr(self, "mask_name"):
                upipe.print_error("Please provide a mask name (FITS file)")
                return

        upipe.print_info("Creating a column Mask from file {0}".format(
            self.mask_name))
        mask_col = pixtable.mask_column(self.mask_name)

        # extract the right data using the pixtable mask
        upipe.print_info("Extracting the Mask")
        newpixtable = pixtable.extract_from_mask(mask_col.maskcol)

        # Rewrite a new pixtable
        self.suffix_out = kwargs.pop("suffix_out", self.suffix_out)
        use_folder = kwargs.pop("use_folder", True)
        if use_folder:
            self.newpixtable_name = joinpath(
                self.pixtable_folder, "{0}{1}".format(self.suffix_out,
                                                      self.pixtable_name))
        else:
            self.newpixtable_name = "{0}{1}".format(self.suffix_out,
                                                    self.pixtable_name)

        upipe.print_info("Writing the new PixTable in {0}".format(
            self.newpixtable_name))
        newpixtable.write(self.newpixtable_name)

        # Now transfer the flat field if it exists
        ext_name = 'PIXTABLE_FLAT_FIELD'
        try:
            # Test if Extension exists by reading header
            # If it exists then do nothing
            test_data = pyfits.getheader(self.newpixtable_name, ext_name)
            upipe.print_warning(
                "Flat field extension already exists in masked PixTable - all good"
            )
        # If it does not exist test if it exists in the original PixTable
        except KeyError:
            try:
                # Read data and header
                ff_ext_data = pyfits.getdata(self.pixtable_name, ext_name)
                ff_ext_h = pyfits.getheader(self.pixtable_name, ext_name)
                upipe.print_warning(
                    "Flat field extension will be transferred from PixTable")
                # Append it to the new pixtable
                pyfits.append(self.newpixtable_name, ff_ext_data, ff_ext_h)
            except KeyError:
                upipe.print_warning(
                    "No Flat field extension to transfer - all good")
            except:
                pass
        except:
            pass

        # Patch to fix the extension names of the PixTable
        # We have to put a number of extension in lowercase to make sure
        # the MUSE recipes understand them
        descl = ['xpos', 'ypos', 'lambda', 'data', 'dq', 'stat', 'origin']
        for d in descl:
            try:
                pyfits.setval(self.newpixtable_name,
                              keyword='EXTNAME',
                              value=d,
                              extname=d.upper())
                upipe.print_warning(
                    "Rewriting extension name {0} as lowercase".format(
                        d.upper()))
            except:
                upipe.print_warning(
                    "Extension {0} not present - patch ignored".format(
                        d.upper()))
Exemplo n.º 33
0
        get_logger().warning(
            "Error while determining build number\n  Using command:\n %s \n Output:\n %s"
            % (cpe.cmd, cpe.output))


_logger = None


def get_logger():
    global _logger
    if _logger is None:
        from sas.logger_config import SetupLogger
        _logger = SetupLogger(__name__).config_development()
    return _logger


if __name__ == "__main__":
    # Need to add absolute path before actual prepare call,
    # so logging can be done during initialization process too
    root = abspath(dirname(__file__))
    addpath(joinpath(root, 'src'))

    get_logger().debug("Starting SASVIEW in debug mode.")
    prepare()
    from sas.sasview.sasview import run_cli, run_gui
    if len(sys.argv) == 1:
        run_gui()
    else:
        run_cli()
    get_logger().debug("Ending SASVIEW in debug mode.")
Exemplo n.º 34
0
def prepare():
    # Don't create *.pyc files
    sys.dont_write_bytecode = True

    # Debug numpy warnings
    #import numpy; numpy.seterr(all='raise')

    # find the directories for the source and build
    from distutils.util import get_platform
    root = abspath(dirname(__file__))
    platform = '%s-%s' % (get_platform(), sys.version[:3])
    build_path = joinpath(root, 'build', 'lib.' + platform)

    # Notify the help menu that the Sphinx documentation is in a different
    # place than it otherwise would be.
    os.environ['SASVIEW_DOC_PATH'] = joinpath(build_path, "doc")

    # Make sure that we have a private version of mplconfig
    #mplconfig = joinpath(abspath(dirname(__file__)), '.mplconfig')
    #os.environ['MPLCONFIGDIR'] = mplconfig
    #if not os.path.exists(mplconfig): os.mkdir(mplconfig)
    #import matplotlib
    # matplotlib.use('Agg')
    # print matplotlib.__file__
    #import pylab; pylab.hold(False)
    # add periodictable to the path
    try:
        import periodictable
    except:
        addpath(joinpath(root, '..', 'periodictable'))

    try:
        import bumps
    except:
        addpath(joinpath(root, '..', 'bumps'))

    try:
        import tinycc
    except:
        addpath(joinpath(root, '../tinycc/build/lib'))

    # select wx version
    #addpath(os.path.join(root, '..','wxPython-src-3.0.0.0','wxPython'))

    # Build project if the build directory does not already exist.
    # PAK: with "update" we can always build since it is fast
    if True or not os.path.exists(build_path):
        import subprocess
        build_cmd = [sys.executable, "setup.py", "build", "update"]
        if os.name == 'nt':
            build_cmd.append('--compiler=tinycc')
        # need shell=True on windows to keep console box from popping up
        shell = (os.name == 'nt')
        with cd(root):
            subprocess.call(build_cmd, shell=shell)

    # Put the source trees on the path
    addpath(joinpath(root, 'src'))

    # sasmodels on the path
    addpath(joinpath(root, '../sasmodels/'))

    # The sas.models package Compiled Model files should be pulled in from the build directory even though
    # the source is stored in src/sas/models.

    # Compiled modules need to be pulled from the build directory.
    # Some packages are not where they are needed, so load them explicitly.
    import sas.sascalc.pr
    sas.sascalc.pr.core = import_package(
        'sas.sascalc.pr.core',
        joinpath(build_path, 'sas', 'sascalc', 'pr', 'core'))

    # Compiled modules need to be pulled from the build directory.
    # Some packages are not where they are needed, so load them explicitly.
    import sas.sascalc.file_converter
    sas.sascalc.file_converter.core = import_package(
        'sas.sascalc.file_converter.core',
        joinpath(build_path, 'sas', 'sascalc', 'file_converter', 'core'))

    import sas.sascalc.calculator
    sas.sascalc.calculator.core = import_package(
        'sas.sascalc.calculator.core',
        joinpath(build_path, 'sas', 'sascalc', 'calculator', 'core'))

    sys.path.append(build_path)

    set_git_tag()
Exemplo n.º 35
0
Arquivo: main.py Projeto: hoangt/lumos
def build_optparser():
    # Init command line arguments parser
    parser = OptionParser()

    sys_options = OptionGroup(parser, "System Configurations")
    budget_choices = ('large', 'medium', 'small', 'custom')
    sys_options.add_option('--budget', default='large', choices=budget_choices,
            help="choose the budget from pre-defined ("
            + ",".join(budget_choices[:-1])
            + "), or 'custom' for customized budget by specifying AREA, POWER, and BANDWIDTH")
    sys_options.add_option('--sys-area', type='int', default=400, metavar='AREA',
            help='Area budget in mm^2, default: %default. This option will be discarded when budget is NOT custom')
    sys_options.add_option('--sys-power', type='int', default=100, metavar='POWER',
            help='Power budget in Watts, default: %default. This option will be discarded when budget is NOT custom')
    sys_options.add_option('--sys-bw', metavar='BANDWIDTH',
            default='45:180,32:198,22:234,16:252',
            help='Power budget in Watts, default: {%default}. This option will be discarded when budget is NOT custom')
    thru_core_choices = ('IOCore', 'IOCore_TFET')
    sys_options.add_option('--thru-core', default='IOCore', choices=thru_core_choices,
            help='The core type of throughput cores, options are ('
            + ",".join(thru_core_choices[:-1]) + ")")

    parser.add_option_group(sys_options)

    app_options = OptionGroup(parser, "Application Configurations")
    app_options.add_option('--workload', metavar='FILE',
            help='workload configuration file, e.g. workload.xml')
    app_options.add_option('--kernels', metavar='FILE',
            help='kernels configuration file, e.g. kernels.xml')
    parser.add_option_group(app_options)

    anal_options = OptionGroup(parser, "Analysis options")
    action_choices = ('analysis', 'plot')
    anal_options.add_option('-a', '--action', choices=action_choices,
            help='choose the running mode, choose from ('
            + ','.join(action_choices)
            + '), or combine actions seperated by ",". default: N/A.')
    fmt_choices = ('png', 'pdf', 'eps')
    anal_options.add_option('--fmt', default='pdf',
            choices=fmt_choices, metavar='FORMAT',
            help='choose the format of output, choose from ('
            + ','.join(fmt_choices)
            + '), default: %default')
    anal_options.add_option('--series', help='Select series')
    anal_options.add_option('--kids', default='3,4,5,6')
    parser.add_option_group(anal_options)

    llevel_choices = ('info', 'debug', 'error')
    parser.add_option('-l', '--logging-level', default='info',
            choices=llevel_choices, metavar='LEVEL',
            help='Logging level of LEVEL, choose from ('
            + ','.join(llevel_choices)
            + '), default: %default')

    default_cfg = joinpath(HOME, 'default.cfg')
    parser.add_option('-f', '--config-file', default=default_cfg,
            metavar='FILE', help='Use configurations in FILE, default: %default')
    parser.add_option('-n', action='store_false', dest='override', default=True,
            help='DONOT override command line options with the same one in the configuration file. '
            + 'By default, this option is NOT set, so the configuration file will override command line options.')

    return parser
Exemplo n.º 36
0
 def list_soundfonts():
     sfpaths = sorted(glob.glob(joinpath(pxr.sfdir, '**', '*.sf2'),
                                recursive=True),
                      key=str.lower)
     return [relpath(x, start=pxr.sfdir) for x in sfpaths]
Exemplo n.º 37
0
    def __init__(self, platform, configuration, catapultPath):

        self.platform = platform.lower()
        if self.platform == 'win32':
            self.cpu = 'x86'
            self.platform = 'Win32'
            self.win64 = False
        elif self.platform == 'x64':
            self.cpu = 'x64'
            self.platform = 'x64'
            self.win64 = True
        else:
            raise ValueError('Wrong platform: ' + platform)

        self.configuration = configuration.lower()
        if self.configuration == 'release':
            self.configuration = 'Release'
            self.catapultConfiguration = 'Unicode Release'
        elif self.configuration == 'developer':
            self.configuration = 'Developer'
            self.catapultConfiguration = 'Unicode Debug'
        elif self.configuration == 'debug':
            self.configuration = 'Debug'
            self.catapultConfiguration = 'Unicode Debug'
        else:
            raise ValueError('Wrong configuration: ' + configuration)

        self.catapultPath = catapultPath

        # Useful variables
        self.buildFlavor = self.platform + '-VC-' + self.configuration
        self.buildPath = joinpath('derived', self.buildFlavor)
        self.sourcePath = 'src'
        self.codecPath = 'Contrib\\codec\\Win32'
        self.packageWindowsPath = 'build\\package-windows'

        self.catapultSourcePath = joinpath(self.catapultPath, 'src')
        self.catapultBuildFlavor = '%s-VC-%s' % (self.platform,
                                                 self.catapultConfiguration)
        self.catapultBuildPath = joinpath(
            self.catapultPath, joinpath('derived', self.catapultBuildFlavor))
        self.catapultExePath = joinpath(self.catapultBuildPath,
                                        'install\\catapult.exe')
        self.catapultPdbPath = joinpath(self.catapultBuildPath,
                                        'install\\catapult.pdb')

        self.openmsxExePath = joinpath(self.buildPath, 'install\\openmsx.exe')
        self.openmsxPdbPath = joinpath(self.buildPath, 'install\\openmsx.pdb')

        self.packagePath = joinpath(self.buildPath, 'package-windows')
        self.makeInstallPath = joinpath(self.packagePath, 'install')

        self.version = packageVersionNumber
        if releaseFlag:
            self.version += '.0'
        else:
            self.version += '.%d' % extractRevisionNumber()

        # <product>-<version>-<os>-<compiler>-<cpu>-<filetype>.ext
        self.os = 'windows'
        self.compiler = 'vc'

        self.packageFileName = '-'.join(
            (getVersionedPackageName(), self.os, self.compiler, self.cpu))
def fixture(name):
    return joinpath(dirname(__file__), name)
Exemplo n.º 39
0
__date__ = "$Date: 2007-07-06 21:38:45 -0400 (Fri, 06 Jul 2007) $"
__author__ = "Ryan Tomayko ([email protected])"
__copyright__ = "Copyright 2004-2005, Ryan Tomayko"
__license__ = "MIT <http://www.opensource.org/licenses/mit-license.php>"

import sys
import os
import glob
from os.path import abspath, basename, dirname, join as joinpath

from kid.test.util import dot

__all__ = ['dot', 'run_suite',
    'template_package', 'output_dir', 'template_dir']

_mydir = abspath(joinpath(dirname(__file__), '..', '..'))
template_dir = joinpath(_mydir, 'test')
output_dir = template_dir
template_package = 'test.'

test_modules = [basename(f)[:-3] for f in
    glob.glob(joinpath(_mydir, 'kid', 'test', 'test*.py'))]

additional_tests = 0
basic_tests = 0

def run_suite(args):
    stop_first = '-x' in args
    from kid.test.util import run_suite
    tests = ['kid.test.%s' % m for m in test_modules]
    run_suite(tests, stop_first)
Exemplo n.º 40
0
 def __init__(self, *args):
   RESTEntity.__init__(self, *args)
   self._schemadir = joinpath(os.getcwd(), "src/sql")
   self._schema = open(joinpath(self._schemadir, "sitedb.sql")).read()
Exemplo n.º 41
0
def findCptDir(options, cptdir, testsys):
    """Figures out the directory from which the checkpointed state is read.

    There are two different ways in which the directories holding checkpoints
    can be named --
    1. cpt.<benchmark name>.<instruction count when the checkpoint was taken>
    2. cpt.<some number, usually the tick value when the checkpoint was taken>

    This function parses through the options to figure out which one of the
    above should be used for selecting the checkpoint, and then figures out
    the appropriate directory.
    """

    from os.path import isdir, exists
    from os import listdir
    import re

    if not isdir(cptdir):
        fatal("checkpoint dir %s does not exist!", cptdir)

    cpt_starttick = 0
    if options.at_instruction or options.simpoint:
        inst = options.checkpoint_restore
        if options.simpoint:
            # assume workload 0 has the simpoint
            if testsys.cpu[0].workload[0].simpoint == 0:
                fatal('Unable to find simpoint')
            inst += int(testsys.cpu[0].workload[0].simpoint)

        checkpoint_dir = joinpath(cptdir, "cpt.%s.%s" % (options.bench, inst))
        if not exists(checkpoint_dir):
            fatal("Unable to find checkpoint directory %s", checkpoint_dir)

    elif options.restore_simpoint_checkpoint:
        # Restore from SimPoint checkpoints
        # Assumes that the checkpoint dir names are formatted as follows:
        dirs = listdir(cptdir)
        expr = re.compile('cpt\.simpoint_(\d+)_inst_(\d+)' +
                          '_weight_([\d\.e\-]+)_interval_(\d+)_warmup_(\d+)')
        cpts = []
        for dir in dirs:
            match = expr.match(dir)
            if match:
                cpts.append(dir)
        cpts.sort()

        cpt_num = options.checkpoint_restore
        if cpt_num > len(cpts):
            fatal('Checkpoint %d not found', cpt_num)
        checkpoint_dir = joinpath(cptdir, cpts[cpt_num - 1])
        match = expr.match(cpts[cpt_num - 1])
        if match:
            index = int(match.group(1))
            start_inst = int(match.group(2))
            weight_inst = float(match.group(3))
            interval_length = int(match.group(4))
            warmup_length = int(match.group(5))
        print "Resuming from", checkpoint_dir
        simpoint_start_insts = []
        simpoint_start_insts.append(warmup_length)
        simpoint_start_insts.append(warmup_length + interval_length)
        testsys.cpu[0].simpoint_start_insts = simpoint_start_insts
        if testsys.switch_cpus != None:
            testsys.switch_cpus[0].simpoint_start_insts = simpoint_start_insts

        print "Resuming from SimPoint",
        print "#%d, start_inst:%d, weight:%f, interval:%d, warmup:%d" % \
            (index, start_inst, weight_inst, interval_length, warmup_length)

    else:
        dirs = listdir(cptdir)
        expr = re.compile('cpt\.([0-9]+)')
        cpts = []
        for dir in dirs:
            match = expr.match(dir)
            if match:
                cpts.append(match.group(1))

        cpts.sort(lambda a, b: cmp(long(a), long(b)))

        cpt_num = options.checkpoint_restore
        if cpt_num > len(cpts):
            fatal('Checkpoint %d not found', cpt_num)

        cpt_starttick = int(cpts[cpt_num - 1])
        checkpoint_dir = joinpath(cptdir, "cpt.%s" % cpts[cpt_num - 1])

    return cpt_starttick, checkpoint_dir
Exemplo n.º 42
0
def run(options, root, testsys, cpu_class):
    if options.checkpoint_dir:
        cptdir = options.checkpoint_dir
    elif m5.options.outdir:
        cptdir = m5.options.outdir
    else:
        cptdir = getcwd()

    if options.fast_forward and options.checkpoint_restore != None:
        fatal("Can't specify both --fast-forward and --checkpoint-restore")

    if options.fast_forward_pseudo_inst and options.checkpoint_restore != None:
        fatal(
            "Can't specify both --fast-forward-pseudo-inst and --checkpoint-restore"
        )

    if options.standard_switch and not options.caches:
        fatal("Must specify --caches when using --standard-switch")

    if options.standard_switch and options.repeat_switch:
        fatal("Can't specify both --standard-switch and --repeat-switch")

    if options.repeat_switch and options.take_checkpoints:
        fatal("Can't specify both --repeat-switch and --take-checkpoints")

    np = options.num_cpus
    switch_cpus = None

    if options.prog_interval:
        for i in xrange(np):
            testsys.cpu[i].progress_interval = options.prog_interval

    if options.maxinsts:
        for i in xrange(np):
            testsys.cpu[i].max_insts_any_thread = options.maxinsts

    if cpu_class:
        switch_cpus = [
            cpu_class(switched_out=True, cpu_id=(i)) for i in xrange(np)
        ]

        # [SafeSpec] configure simualtion scheme
        if cpu_class == DerivO3CPU:
            #fatal("Ruby can only be used with DerivO3CPU!")
            CpuConfig.config_scheme(cpu_class, switch_cpus, options)
        else:
            warn("restoring from a checkpoint, "
                 "but not simulate using DerivO3CPU.")

        for i in xrange(np):
            if options.fast_forward:
                testsys.cpu[i].max_insts_any_thread = int(options.fast_forward)
            switch_cpus[i].system = testsys
            switch_cpus[i].workload = testsys.cpu[i].workload
            switch_cpus[i].clk_domain = testsys.cpu[i].clk_domain
            switch_cpus[i].progress_interval = \
                testsys.cpu[i].progress_interval
            switch_cpus[i].isa = testsys.cpu[i].isa
            # simulation period
            if options.maxinsts:
                switch_cpus[i].max_insts_any_thread = options.maxinsts
            # Add checker cpu if selected
            if options.checker:
                switch_cpus[i].addCheckerCpu()

        # If elastic tracing is enabled attach the elastic trace probe
        # to the switch CPUs
        if options.elastic_trace_en:
            CpuConfig.config_etrace(cpu_class, switch_cpus, options)

        testsys.switch_cpus = switch_cpus
        switch_cpu_list = [(testsys.cpu[i], switch_cpus[i])
                           for i in xrange(np)]

    if options.repeat_switch:
        switch_class = getCPUClass(options.cpu_type)[0]
        if switch_class.require_caches() and \
                not options.caches:
            print "%s: Must be used with caches" % str(switch_class)
            sys.exit(1)
        if not switch_class.support_take_over():
            print "%s: CPU switching not supported" % str(switch_class)
            sys.exit(1)

        repeat_switch_cpus = [switch_class(switched_out=True, \
                                               cpu_id=(i)) for i in xrange(np)]

        for i in xrange(np):
            repeat_switch_cpus[i].system = testsys
            repeat_switch_cpus[i].workload = testsys.cpu[i].workload
            repeat_switch_cpus[i].clk_domain = testsys.cpu[i].clk_domain
            repeat_switch_cpus[i].isa = testsys.cpu[i].isa

            if options.maxinsts:
                repeat_switch_cpus[i].max_insts_any_thread = options.maxinsts

            if options.checker:
                repeat_switch_cpus[i].addCheckerCpu()

        testsys.repeat_switch_cpus = repeat_switch_cpus

        if cpu_class:
            repeat_switch_cpu_list = [(switch_cpus[i], repeat_switch_cpus[i])
                                      for i in xrange(np)]
        else:
            repeat_switch_cpu_list = [(testsys.cpu[i], repeat_switch_cpus[i])
                                      for i in xrange(np)]

    if options.standard_switch:
        switch_cpus = [
            TimingSimpleCPU(switched_out=True, cpu_id=(i)) for i in xrange(np)
        ]
        switch_cpus_1 = [
            DerivO3CPU(switched_out=True, cpu_id=(i)) for i in xrange(np)
        ]

        for i in xrange(np):
            switch_cpus[i].system = testsys
            switch_cpus_1[i].system = testsys
            switch_cpus[i].workload = testsys.cpu[i].workload
            switch_cpus_1[i].workload = testsys.cpu[i].workload
            switch_cpus[i].clk_domain = testsys.cpu[i].clk_domain
            switch_cpus_1[i].clk_domain = testsys.cpu[i].clk_domain
            switch_cpus[i].isa = testsys.cpu[i].isa
            switch_cpus_1[i].isa = testsys.cpu[i].isa

            # if restoring, make atomic cpu simulate only a few instructions
            if options.checkpoint_restore != None:
                testsys.cpu[i].max_insts_any_thread = 1
            # Fast forward to specified location if we are not restoring
            elif options.fast_forward:
                testsys.cpu[i].max_insts_any_thread = int(options.fast_forward)
            # Fast forward to a simpoint (warning: time consuming)
            elif options.simpoint:
                if testsys.cpu[i].workload[0].simpoint == 0:
                    fatal('simpoint not found')
                testsys.cpu[i].max_insts_any_thread = \
                    testsys.cpu[i].workload[0].simpoint
            # No distance specified, just switch
            # else:
            # testsys.cpu[i].max_insts_any_thread = 1

            # warmup period
            if options.warmup_insts:
                switch_cpus[i].max_insts_any_thread = options.warmup_insts

            # simulation period
            if options.maxinsts:
                switch_cpus_1[i].max_insts_any_thread = options.maxinsts

            # attach the checker cpu if selected
            if options.checker:
                switch_cpus[i].addCheckerCpu()
                switch_cpus_1[i].addCheckerCpu()

        testsys.switch_cpus = switch_cpus
        testsys.switch_cpus_1 = switch_cpus_1
        switch_cpu_list = [(testsys.cpu[i], switch_cpus[i])
                           for i in xrange(np)]
        switch_cpu_list1 = [(switch_cpus[i], switch_cpus_1[i])
                            for i in xrange(np)]

    # set the checkpoint in the cpu before m5.instantiate is called
    if options.take_checkpoints != None and \
           (options.simpoint or options.at_instruction):
        offset = int(options.take_checkpoints)
        # Set an instruction break point
        if options.simpoint:
            for i in xrange(np):
                if testsys.cpu[i].workload[0].simpoint == 0:
                    fatal('no simpoint for testsys.cpu[%d].workload[0]', i)
                checkpoint_inst = int(
                    testsys.cpu[i].workload[0].simpoint) + offset
                testsys.cpu[i].max_insts_any_thread = checkpoint_inst
                # used for output below
                options.take_checkpoints = checkpoint_inst
        else:
            options.take_checkpoints = offset
            # Set all test cpus with the right number of instructions
            # for the upcoming simulation
            for i in xrange(np):
                testsys.cpu[i].max_insts_any_thread = offset

    if options.take_simpoint_checkpoints != None:
        simpoints, interval_length = parseSimpointAnalysisFile(
            options, testsys)

    checkpoint_dir = None
    if options.checkpoint_restore:
        cpt_starttick, checkpoint_dir = findCptDir(options, cptdir, testsys)
    m5.instantiate(checkpoint_dir)

    # Initialization is complete.  If we're not in control of simulation
    # (that is, if we're a slave simulator acting as a component in another
    #  'master' simulator) then we're done here.  The other simulator will
    # call simulate() directly. --initialize-only is used to indicate this.
    if options.initialize_only:
        return

    # Handle the max tick settings now that tick frequency was resolved
    # during system instantiation
    # NOTE: the maxtick variable here is in absolute ticks, so it must
    # include any simulated ticks before a checkpoint
    explicit_maxticks = 0
    maxtick_from_abs = m5.MaxTick
    maxtick_from_rel = m5.MaxTick
    maxtick_from_maxtime = m5.MaxTick
    if options.abs_max_tick:
        maxtick_from_abs = options.abs_max_tick
        explicit_maxticks += 1
    if options.rel_max_tick:
        maxtick_from_rel = options.rel_max_tick
        if options.checkpoint_restore:
            # NOTE: this may need to be updated if checkpoints ever store
            # the ticks per simulated second
            maxtick_from_rel += cpt_starttick
            if options.at_instruction or options.simpoint:
                warn("Relative max tick specified with --at-instruction or" \
                     " --simpoint\n      These options don't specify the " \
                     "checkpoint start tick, so assuming\n      you mean " \
                     "absolute max tick")
        explicit_maxticks += 1
    if options.maxtime:
        maxtick_from_maxtime = m5.ticks.fromSeconds(options.maxtime)
        explicit_maxticks += 1
    if explicit_maxticks > 1:
        warn("Specified multiple of --abs-max-tick, --rel-max-tick, --maxtime."\
             " Using least")
    maxtick = min([maxtick_from_abs, maxtick_from_rel, maxtick_from_maxtime])

    if options.checkpoint_restore != None and maxtick < cpt_starttick:
        fatal("Bad maxtick (%d) specified: " \
              "Checkpoint starts starts from tick: %d", maxtick, cpt_starttick)

    if options.standard_switch or cpu_class:
        if options.standard_switch:
            print "Switch at instruction count:%s" % \
                    str(testsys.cpu[0].max_insts_any_thread)
            exit_event = m5.simulate()
        elif cpu_class and options.fast_forward:
            print "Switch at instruction count:%s" % \
                    str(testsys.cpu[0].max_insts_any_thread)
            exit_event = m5.simulate()
        elif cpu_class and options.fast_forward_pseudo_inst:
            print "Switch at beginning of ROI"
            exit_event = m5.simulate()
        else:
            print "Switch at curTick count:%s" % str(10000)
            exit_event = m5.simulate(10000)

        if options.fast_forward_pseudo_inst:
            while exit_event.getCause() != 'switchcpu':
                print 'Exiting @ tick %i because %s' % (m5.curTick(),
                                                        exit_event.getCause())
                exit_event = m5.simulate()

        print "Switched CPUS @ tick %s" % (m5.curTick())

        m5.switchCpus(testsys, switch_cpu_list)

        if options.standard_switch:
            print "Switch at instruction count:%d" % \
                    (testsys.switch_cpus[0].max_insts_any_thread)

            #warmup instruction count may have already been set
            if options.warmup_insts:
                exit_event = m5.simulate()
            else:
                exit_event = m5.simulate(options.standard_switch)
            print "Switching CPUS @ tick %s" % (m5.curTick())
            print "Simulation ends instruction count:%d" % \
                    (testsys.switch_cpus_1[0].max_insts_any_thread)
            m5.switchCpus(testsys, switch_cpu_list1)

    # If we're taking and restoring checkpoints, use checkpoint_dir
    # option only for finding the checkpoints to restore from.  This
    # lets us test checkpointing by restoring from one set of
    # checkpoints, generating a second set, and then comparing them.
    if (options.take_checkpoints or options.take_simpoint_checkpoints) \
        and options.checkpoint_restore:

        if m5.options.outdir:
            cptdir = m5.options.outdir
        else:
            cptdir = getcwd()

    if options.take_checkpoints != None:
        # Checkpoints being taken via the command line at <when> and at
        # subsequent periods of <period>.  Checkpoint instructions
        # received from the benchmark running are ignored and skipped in
        # favor of command line checkpoint instructions.
        exit_event = scriptCheckpoints(options, maxtick, cptdir)

    # Take SimPoint checkpoints
    elif options.take_simpoint_checkpoints != None:
        takeSimpointCheckpoints(simpoints, interval_length, cptdir)

    # Restore from SimPoint checkpoints
    elif options.restore_simpoint_checkpoint != None:
        restoreSimpointCheckpoint()

    else:
        if options.fast_forward or options.fast_forward_pseudo_inst:
            m5.stats.reset()
        print "**** REAL SIMULATION ****"

        # If checkpoints are being taken, then the checkpoint instruction
        # will occur in the benchmark code it self.
        if options.repeat_switch and maxtick > options.repeat_switch:
            exit_event = repeatSwitch(testsys, repeat_switch_cpu_list, maxtick,
                                      options.repeat_switch)
        else:
            exit_event = benchCheckpoints(options, maxtick, cptdir)

    m5.stats.dump()
    print 'Exiting @ tick %i because %s' % (m5.curTick(),
                                            exit_event.getCause())
    if options.checkpoint_at_end:
        m5.checkpoint(joinpath(cptdir, "cpt.%d"))

    if not m5.options.interactive:
        sys.exit(exit_event.getCode())
Exemplo n.º 43
0
    exepath = dirname(sys.executable)
    path = joinpath(exepath, external_dir)
    if exists(joinpath(path, target_file)):
        return path

    # check in py2app Contents/Resources
    path = joinpath(exepath, '..', 'Resources', external_dir)
    if exists(joinpath(path, target_file)):
        return abspath(path)

    raise RuntimeError('Could not find ' + joinpath(external_dir, target_file))


EXTERNAL_DIR = 'sasmodels-data'
DATA_PATH = get_data_path(EXTERNAL_DIR, 'kernel_iq.c')
MODEL_PATH = joinpath(DATA_PATH, 'models')

F16 = np.dtype('float16')
F32 = np.dtype('float32')
F64 = np.dtype('float64')
try:  # CRUFT: older numpy does not support float128
    F128 = np.dtype('float128')
except TypeError:
    F128 = None

# Conversion from units defined in the parameter table for each model
# to units displayed in the sphinx documentation.
# This section associates the unit with the macro to use to produce the LaTex
# code.  The macro itself needs to be defined in sasmodels/doc/rst_prolog.
#
# NOTE: there is an RST_PROLOG at the end of this file which is NOT
Exemplo n.º 44
0
"""

from os.path import abspath, dirname, relpath
from os.path import join as joinpath
from collections import namedtuple
from cffi import FFI
import six

_DEBUG = False
_COMPILER_ARGS = ['-ggdb'] if _DEBUG else ['-O2']

# pylint: disable=invalid-name
_full_ffi = FFI()
_lite_ffi = FFI()
_pth = abspath(dirname(__file__))
_binding_decls = joinpath(_pth, 'binding_decls.h')
with open(_binding_decls) as bindings:
    declrs = bindings.read()
    _full_ffi.cdef(declrs)
    _lite_ffi.cdef(declrs)

_core_sources = [
    relpath(src) for src in [
        "cld2/encoding_lut.cc", "cld2/binding.cc", "cld2/internal/cldutil.cc",
        "cld2/internal/cldutil_shared.cc", "cld2/internal/compact_lang_det.cc",
        "cld2/internal/compact_lang_det_hint_code.cc",
        "cld2/internal/compact_lang_det_impl.cc", "cld2/internal/debug.cc",
        "cld2/internal/fixunicodevalue.cc",
        "cld2/internal/generated_entities.cc",
        "cld2/internal/generated_language.cc",
        "cld2/internal/generated_ulscript.cc",
project_path = "C:\\Users\\twshe\\Desktop\\mydb\\com-haskell\\testing\\playing-with-row-types"
res = block( "No document"
           , assign( "$foreign"
                   , call( var('import_module')
                         , "playing_with_row_types.Main.purescript_foreign" ) )
           , assign( "ps_getFoo"
                   , define( None
                           , ["ps_v"]
                           , block(ret(get_item(var("ps_v"), "foo"))) ) )
           , assign( "ps_main"
                   , call( call( get_attr(var("$foreign"), "discard")
                               , call( get_attr(var("$foreign"), "println")
                                     , call( var("ps_getFoo")
                                           , metadata( 15
                                                     , 27
                                                     , joinpath( project_path
                                                     , "src\\Main.purs" )
                                                     , record( ( "foo"
                                                               , metadata( 15
                                                                         , 33
                                                                         , joinpath( project_path
                                                                         , "src\\Main.purs" )
                                                                         , 1 ) )
                                                             , ( "bar"
                                                               , metadata( 15
                                                                         , 41
                                                                         , joinpath( project_path
                                                                         , "src\\Main.purs" )
                                                                         , "2" ) ) ) ) ) ) )
                         , define( None
                                 , ["ps_$__unused"]
                                 , block( ret( call( call( get_attr( var( "$foreign" )
Exemplo n.º 46
0
def _emptyDirectory(top):
    for root, dirs, files in walk(top, topdown=False):
        for name in files:
            remove(joinpath(root, name))
        for name in dirs:
            rmdir(joinpath(root, name))
Exemplo n.º 47
0
 def list_banks():
     bpaths = sorted(glob.glob(joinpath(pxr.bankdir, '**', '*.yaml'),
                               recursive=True),
                     key=str.lower)
     return [relpath(x, start=pxr.bankdir) for x in bpaths]
Exemplo n.º 48
0
def get_dftbp_evol(implargs, database, source, model,
                   datafile='detailed.out', *args, **kwargs):
    """Get the data from DFTB+ SCC calculation for all models.

    This is a compound task that augments the source path to include
    individual local directories for different cell volumes, based
    on the assumption that these directories are named by 3 digits.
    Similar assumption applies for the model, where the name
    of the base model is augmented by the 3-digit directory number.

    parameters:
        workroot(string): base directory where model directories are found.
        source (string): model directory
        model(str): name of the model whose data is updated
        datafile (string): optional filename holding the data.
    """
    # setup logger
    # -------------------------------------------------------------------
    logger = implargs.get('logger', LOGGER)
    workroot = implargs.get('workroot', '.')
    # In order to collect the tags that identify individual directories
    # corresponding to a given cell-volume, we must go in the base
    # directory, which includes workroot/source
    cwd = os.getcwd()
    workdir = joinpath(abspath(expanduser(workroot)), source)
    os.chdir(workdir)
    logger.info('Looking for Energy-vs-Strain data in {:s}'.format(workdir))
    # the following should be modifiable by command options
    logger.info('Assuming strain directories are named by digits only.')
    sccdirs = [dd for dd in os.listdir() if dd.isdigit()]
    # These come in a disordered way.
    # But it is pivotal that the names are sorted, so that correspondence
    # with reference data can be established!
    sccdirs.sort()
    logger.info('The following SCC directories are found:\n{}'.format(sccdirs))
    # make sure we return back
    os.chdir(cwd)
    # go over individual volume directories and obtain the data
    e_tot = []
    e_elec = []
    strain = []
    for straindir in sccdirs:
        fin = joinpath(workdir, straindir, datafile)
        logger.debug('Reading {:s}'.format(fin))
        data = DetailedOut.fromfile(fin)
        logger.debug('Done. Data: {}'.format(data))
        e_tot.append(data['Etot'])
        e_elec.append(data['Eel'])
        strain.append(float(straindir) - 100)
    # prepare to update database
    data = {}
    data['totalenergy_volume'] = e_tot
    data['elecenergy_volume'] = e_elec
    data['strain'] = strain
    # report
    logger.info('Done.')
    logger.info('\ttotalenergy_volume: {}'.format(data['totalenergy_volume']))
    logger.info('\telecenergy_volume: {}'.format(data['elecenergy_volume']))
    logger.info('\tstrain: {}'.format(data['strain']))
    outstr = ['# Total Energy[eV], Electronic Energy[eV], Volume tag']
    for total, elec, tag in zip(e_tot, e_elec, sccdirs):
        outstr.append('{:12.6g} {:10.6g} {:>10s}'.format(total, elec, tag))
    with open(joinpath(workdir, 'energy_volume.dat'), 'w') as fout:
        fout.writelines('\n'.join(outstr)+'\n')
    try:
        # assume model in database
        database.get(model).update(data)
    except (KeyError, AttributeError):
        # model not in database
        database.update({model: data})
Exemplo n.º 49
0
def main():

    parser = argparse.ArgumentParser(description='''
        Программа для переноса базы кандидатов из файла Excel в базу 
        Huntflow с помощью Huntflow API (https://github.com/huntflow/api).
        
        Укажите токен к API Huntflow, 
        имя директории, в которой находятся файлы резюме, 
        и имя файла Excel с кандидатами.
        ''')

    parser.add_argument('huntflow_token', help='Токен к API Huntflow')
    parser.add_argument('directory', help='Путь к директории, с файлами')
    args = parser.parse_args()
    '''
    Я предполагаю, что структура директорий и файлов остается неизменной
    и имеет следующий вид (исходя из условий ТЗ):
    
       Тестовое задание
       ├── Тестовая база.xlsx
       ├── README.md
       ├── Frontend-разработчик
       │   ├── Глибин Виталий Николаевич.doc
       │   └── Танский Михаил.pdf
       └── Менеджер по продажам
           ├── Корниенко Максим.doc
           └── Шорин Андрей.pdf
    '''

    # Init

    FILES_DIR = args.directory

    for root, dirs, files in walkpath(FILES_DIR):
        for file in files:
            if file[-5:] == '.xlsx':
                applicants_file = joinpath(FILES_DIR, file)

    # API huntflow.ru
    huntflow_token = 'Bearer {token}'.format(token=args.huntflow_token)

    # Do

    vacancies = get_company_vacancies(huntflow_token)
    vacancies = vacancies['items']
    applicants = get_applicants_from_excel_file(applicants_file)

    for applicant in applicants:
        resume_file = find_resume_file(
            applicant_fullname=applicant['fullname'],
            files_dir=FILES_DIR,
        )
        recognized_resume = upload_resume(
            token=huntflow_token,
            filename=resume_file,
        )
        applicant_data = get_applicant_data(
            applicant=applicant,
            recognized_resume=recognized_resume,
            vacancies=vacancies,
        )

        if applicant['huntflow_upload'] != 'выгружен':
            add_applicant(
                token=huntflow_token,
                applicant_data=applicant_data,
            )
            last_added = get_applicants(huntflow_token)['items'][0]
            applicant['id'] = last_added['id']

            result = add_to_vacancy(
                token=huntflow_token,
                applicant=applicant,
                vacancies=vacancies,
                recognized_resume=recognized_resume,
                applicants_file=applicants_file,
            )
Exemplo n.º 50
0
    'realview64-minor-dual',
    'realview64-switcheroo-o3',
    'realview64-switcheroo-full',
    'realview-simple-timing-ruby',
    'realview-simple-timing-dual-ruby',
    'realview64-simple-timing-ruby',
    'realview64-simple-timing-dual-ruby',
]

tarball = 'aarch-system-2014-10.tar.bz2'
url = "http://gem5.org/dist/current/arm/" + tarball
path = os.path.dirname(os.path.abspath(__file__))
arm_fs_binaries = DownloadedArchive(url, path, tarball)

for name in arm_fs_quick_tests:
    args = [ joinpath(config.base_dir, 'tests', 'configs', name + '.py') ]
    gem5_verify_config(
        name=name,
        verifiers=(), # Add basic stat verifiers
        config=joinpath(path, 'run.py'),
        config_args=args,
        valid_isas=(constants.arm_tag,),
        length=constants.quick_tag,
        fixtures=(arm_fs_binaries,)
    )

for name in arm_fs_long_tests:
    args = [ joinpath(config.base_dir, 'tests', 'configs', name + '.py') ]
    gem5_verify_config(
        name=name,
        verifiers=(), # TODO: Add basic stat verifiers
Exemplo n.º 51
0
    else:
        joblist.append(job)

if listonly:
    if verbose:
        for job in joblist:
            job.printinfo()
    else:
        for job in joblist:
            print job.name
    sys.exit(0)

if not onlyecho:
    newlist = []
    for job in joblist:
        jobdir = JobDir(joinpath(conf.rootdir, job.name))
        if jobdir.exists():
            if not force:
                status = jobdir.getstatus()
                if status == 'queued':
                    continue

                if status == 'running':
                    continue

                if status == 'success':
                    continue

            if not clean:
                sys.exit('job directory %s not clean!' % jobdir)
Exemplo n.º 52
0
def load_model_info(model_string):
    # type: (str) -> modelinfo.ModelInfo
    """
    Load a model definition given the model name.

    *model_string* is the name of the model, or perhaps a model expression
    such as sphere*cylinder or sphere+cylinder. Use '@' for a structure
    factor product, e.g. sphere@hardsphere. Custom models can be specified by
    prefixing the model name with 'custom.', e.g. 'custom.MyModel+sphere'.

    This returns a handle to the module defining the model.  This can be
    used with functions in generate to build the docs or extract model info.
    """
    if '@' in model_string:
        parts = model_string.split('@')
        if len(parts) != 2:
            raise ValueError(
                "Use P@S to apply a structure factor S to model P")
        P_info, Q_info = [load_model_info(part) for part in parts]
        return product.make_product_info(P_info, Q_info)

    product_parts = []
    addition_parts = []

    addition_parts_names = model_string.split('+')
    if len(addition_parts_names) >= 2:
        addition_parts = [
            load_model_info(part) for part in addition_parts_names
        ]
    elif len(addition_parts_names) == 1:
        product_parts_names = model_string.split('*')
        if len(product_parts_names) >= 2:
            product_parts = [
                load_model_info(part) for part in product_parts_names
            ]
        elif len(product_parts_names) == 1:
            if "custom." in product_parts_names[0]:
                # Extract ModelName from "custom.ModelName"
                pattern = "custom.([A-Za-z0-9_-]+)"
                result = re.match(pattern, product_parts_names[0])
                if result is None:
                    raise ValueError("Model name in invalid format: " +
                                     product_parts_names[0])
                model_name = result.group(1)
                # Use ModelName to find the path to the custom model file
                model_path = joinpath(CUSTOM_MODEL_PATH, model_name + ".py")
                if not os.path.isfile(model_path):
                    raise ValueError(
                        "The model file {} doesn't exist".format(model_path))
                kernel_module = custom.load_custom_kernel_module(model_path)
                return modelinfo.make_model_info(kernel_module)
            # Model is a core model
            kernel_module = generate.load_kernel_module(product_parts_names[0])
            return modelinfo.make_model_info(kernel_module)

    model = None
    if len(product_parts) > 1:
        model = mixture.make_mixture_info(product_parts, operation='*')
    if len(addition_parts) > 1:
        if model is not None:
            addition_parts.append(model)
        model = mixture.make_mixture_info(addition_parts, operation='+')
    return model
Exemplo n.º 53
0
from . import kerneldll
from . import kernelfut
from . import custom

if os.environ.get("SAS_OPENCL", "").lower() == "none":
    HAVE_OPENCL = False
else:
    try:
        from . import kernelcl
        HAVE_OPENCL = True
    except Exception:
        HAVE_OPENCL = False

CUSTOM_MODEL_PATH = os.environ.get('SAS_MODELPATH', "")
if CUSTOM_MODEL_PATH == "":
    path = joinpath(os.path.expanduser("~"), ".sasmodels", "custom_models")
    if not os.path.isdir(path):
        os.makedirs(path)
    CUSTOM_MODEL_PATH = path

try:
    from typing import List, Union, Optional, Any
    from .kernel import KernelModel
    from .modelinfo import ModelInfo
except ImportError:
    pass

# TODO: refactor composite model support
# The current load_model_info/build_model does not reuse existing model
# definitions when loading a composite model, instead reloading and
# rebuilding the kernel for each component model in the expression.  This
Exemplo n.º 54
0
def doSetup():
    # Write version file
    version_string = version()
    version_filename = joinpath(dirname(__file__), "calendarserver",
                                "version.py")
    version_file = file(version_filename, "w")

    try:
        version_file.write('version = "{0}"\n\n'.format(version_string))
    finally:
        version_file.close()

    dist = setup(
        name=project_name,
        version=version_string,
        description=description,
        long_description=long_description,
        url=url,
        classifiers=classifiers,
        author=author,
        author_email=author_email,
        license=license,
        platforms=platforms,
        packages=find_packages(),
        package_data={
            "twistedcaldav": [
                "*.html",
                "zoneinfo/*.ics",
                "zoneinfo/*.txt",
                "zoneinfo/*.xml",
                "zoneinfo/*/*.ics",
                "zoneinfo/*/*/*.ics",
                "images/*/*.jpg",
            ],
            "calendarserver.webadmin": ["*.html", "*.xhtml"],
            "twistedcaldav.directory": ["*.html"],
            "txdav.common.datastore": [
                "sql_schema/*.sql",
                "sql_schema/*/*.sql",
                "sql_schema/*/*/*.sql",
            ],
        },
        entry_points=entry_points,
        scripts=[
            "bin/caldavd",
        ],
        data_files=[
            ("caldavd", ["conf/caldavd.plist"]),
        ],
        ext_modules=extensions,
        py_modules=[],
        setup_requires=setup_requirements,
        install_requires=install_requirements,
        extras_require=extras_requirements,
    )

    if "install" in dist.commands:
        install_obj = dist.command_obj["install"]
        if install_obj.root is None:
            return
        install_scripts = normpath(install_obj.install_scripts)
        install_lib = normpath(install_obj.install_lib)
        root = normpath(install_obj.root)
        base = normpath(install_obj.install_base)

        if root:
            install_lib = install_lib[len(root):]

        for script in dist.scripts:
            scriptPath = joinpath(install_scripts, basename(script))

            print("Rewriting {0}".format(scriptPath))

            script = []

            fileType = None

            for line in file(scriptPath, "r"):
                if not fileType:
                    if line.startswith("#!"):
                        if "python" in line.lower():
                            fileType = "python"
                        elif "sh" in line.lower():
                            fileType = "sh"

                line = line.rstrip("\n")
                if fileType == "sh":
                    if line == "#PYTHONPATH":
                        script.append('PYTHONPATH="{add}:$PYTHONPATH"'.format(
                            add=install_lib))
                    elif line == "#PATH":
                        script.append('PATH="{add}:$PATH"'.format(
                            add=joinpath(base, "usr", "bin")))
                    else:
                        script.append(line)

                elif fileType == "python":
                    if line == "#PYTHONPATH":
                        script.append(
                            'PYTHONPATH="{path}"'.format(path=install_lib))
                    elif line == "#PATH":
                        script.append('PATH="{path}"'.format(
                            path=joinpath(base, "usr", "bin")))
                    else:
                        script.append(line)

                else:
                    script.append(line)

            with open(scriptPath, "w") as newScript:
                newScript.write("\n".join(script))
Exemplo n.º 55
0
from os.path import join as joinpath, realpath, dirname, exists
from ctypes import CDLL

_dll_path = joinpath(realpath(dirname(__file__)), '_compiled.so')
dll = CDLL(_dll_path) if exists(_dll_path) else None
Exemplo n.º 56
0
    return "{}a1.dev0+{}.{}".format(
        base_version,
        info["revision"],
        info["branch"].replace("/", ".").replace("-", ".").lower(),
    )


#
# Options
#

project_name = "CalendarServer"

description = "Calendar and Contacts Server"

long_description = file(joinpath(dirname(__file__), "README.rst")).read()

url = "https://github.com/apple/ccs-calendarserver"

classifiers = [
    "Development Status :: 5 - Production/Stable",
    "Framework :: Twisted",
    "Intended Audience :: Information Technology",
    "License :: OSI Approved :: Apache Software License",
    "Operating System :: OS Independent",
    "Programming Language :: Python :: 2.7",
    "Programming Language :: Python :: 2 :: Only",
    "Topic :: Communications",
    "Topic :: Internet :: WWW/HTTP :: HTTP Servers",
    "Topic :: Office/Business :: Groupware",
    "Topic :: Office/Business :: Scheduling",
Exemplo n.º 57
0
    def create_reference_cube(self,
                              lambdamin=4700,
                              lambdamax=9400,
                              step=1.25,
                              outcube_name=None,
                              filter_for_nan=False,
                              **kwargs):
        """Create a reference cube using an input one, and overiding
        the lambda part, to get a new WCS

        Args:
            lambdamin:
            lambdamax:
            step:
            outcube_name:
            filter_for_nan:
            **kwargs:

        Returns:

        """

        # Separate folder and name of file
        cube_folder, cube_name = os.path.split(self.filename)

        # Creating the outcube filename
        if outcube_name is None:
            prefix = kwargs.pop(
                "prefix", "l{0:4d}l{1:4d}_".format(np.int(lambdamin),
                                                   np.int(lambdamax)))
            outcube_name = "{0}{1}".format(prefix, cube_name)

        # Range of lambd and number of spectral pixels
        range_lambda = lambdamax - lambdamin
        npix_spec = np.int(range_lambda // step + 1.0)

        # if filter_nan remove the Nan
        if filter_for_nan:
            ind = np.indices(self.data[0].shape)
            selgood = np.any(~np.isnan(self.data), axis=0)
            if self._debug:
                upipe.print_debug(
                    "Xmin={0} Xmax={1} / Ymin={2} Ymax={3}".format(
                        np.min(ind[0][selgood]), np.max(ind[0][selgood]),
                        np.min(ind[1][selgood]), np.max(ind[1][selgood])))
            subcube = self[:,
                           np.min(ind[0][selgood]):np.max(ind[0][selgood]),
                           np.min(ind[1][selgood]):np.max(ind[1][selgood])]
        else:
            subcube = self

        # Create the WCS which we need for the output cube
        wcs_header = subcube.get_wcs_header()
        wcs1 = subcube.wcs
        wave1 = WaveCoord(cdelt=step,
                          crval=lambdamin,
                          ctype=wcs_header['CTYPE3'],
                          crpix=1.0,
                          shape=npix_spec)
        # Create a fake dataset with int to be faster
        cube_data = np.ones((npix_spec, wcs1.naxis2, wcs1.naxis1),
                            dtype=np.uint8)
        cube = Cube(data=cube_data, wcs=wcs1, wave=wave1)
        # Write the output
        cube.write(joinpath(cube_folder, outcube_name))

        # just provide the output name by folder+name
        return cube_folder, outcube_name
Exemplo n.º 58
0
def getdata(filename):
    return joinpath(ROOT, "sim", filename)
Exemplo n.º 59
0
 def badpath(path, base):
     # joinpath will ignore base if path is absolute
     return not realpath(abspath(joinpath(base, path))).startswith(base)
Exemplo n.º 60
0
 def _find_files(path, patterns):
     target = joinpath('tinycc-data', path) if path else 'tinycc-data'
     files = []
     for pattern in patterns.split(','):
         files.extend(glob(joinpath(ROOT, path, pattern)))
     return (target, files)