Esempio n. 1
0
File: flist.py Progetto: avnr/flist
def iflist(files, path=[], recurse=False):
    if type(files) is list:
        for file in files:
            yield from iflist(file, path, recurse)
        raise StopIteration
    file = str(files)
    if "" == file:
        raise StopIteration
    if REF == file[0]:
        _file = expandvars(expanduser(file[1:]))
        with open(_file, "r", encoding="utf-8") as f:
            for gross_line in f:
                line = gross_line.strip("\n\r").split(" " + COMMENT, 1)[0].strip()
                if not len(line) or COMMENT == line[0]:
                    continue
                at = ""
                if REF == line[0]:
                    at = REF
                    line = line[1:]
                next_file = normpath(join(dirname(realpath(_file)), expandvars(expanduser(line))))
                yield from iflist(at + next_file, path, recurse)
    else:
        found = False
        _file = expandvars(expanduser(file))
        for i in iglob(_file):
            found = True
            yield from _yielder(i, path, recurse)
        if not found and SEP not in _file and ALTSEP is not None and ALTSEP not in _file:
            for i in (j for p in path for j in iglob(join(p, _file))):
                yield from _yielder(i, path, recurse)
Esempio n. 2
0
def main():
    import sys
    src = pt.expandvars('$AZ_AVTX')
    dst = pt.expandvars('$AZ_APEC')
    for sn in xrange(3, 37):
        apec(src, dst, sn, 'lh')
        apec(src, dst, sn, 'rh')
def init(packages=None):
    if 'DROPBOX_PATH' not in os.environ:
        if os.name == "nt":
            os.environ['DROPBOX_PATH'] = expandvars('$USERPROFILE\\Dropbox')
        else:
            os.environ['DROPBOX_PATH'] = expandvars('$HOME/Dropbox')

    if os.name == 'nt':
        repository_root = expandvars(r'$DROPBOX_PATH\home\SublimeText')
    elif os.name == 'posix':
        repository_root = expandvars(r'$DROPBOX_PATH/home/SublimeText')
    else:
        raise NotImplementedError()

    global repo_base
    global packages_path
    repo_base = repository_root
    packages_path = packages if packages else sublime_packages_path()

    try:
        import sublime
        settings = sublime.load_settings(USERVARS)
        settings.set('user_packages_path', repo_base)
        sublime.save_settings(USERVARS)
    except ImportError:
        pass
    def merge_config(self, conf):
        """Merge a conf dictionnary in the object's internal."""
        loaded_conf = self.keys()
        log.debug(self)
        # Agent's name
        if not conf.name:
            if not "name" in loaded_conf:
                self["name"] = DEFAULT_NAME
        else:
            self["name"] = conf.name

        # Modules to load
        cmd_line_modules = {}
        if conf.add_module is not None:
            for module_name in conf.add_module:
                cmd_line_modules[module_name] = {}
        if "modules" not in loaded_conf:
            self["modules"] = {}

        if conf.disable_default_modules:
            self["modules"].update(cmd_line_modules)
        elif cmd_line_modules:
            self["modules"].update(DEFAULT_MODULES)
            self["modules"].update(cmd_line_modules)
        else:
            self["modules"].update(DEFAULT_MODULES)

        # User PyMAS path
        if not "directory" in loaded_conf:
            actual_path = expanduser(expandvars(conf.directory))
            self["directory"] = actual_path
        else:
            actual_path = expanduser(expandvars(self["directory"]))

        sys.path.insert(0, actual_path)
Esempio n. 5
0
def pkg_make_absolute(pkg):
    if pkg.endswith(".spkg"):
        if os.path.exists(pkg):
            return os.path.abspath(pkg)

        pkg_current = expandvars("$CUR/%s" % pkg)
        if os.path.exists(pkg_current):
            return pkg_current

        raise PackageNotFound("Package '%s' not found in the current directory" % pkg)

    candidates = glob(expandvars("$FEMHUB_ROOT/spkg/standard/*.spkg"))
    if len(candidates) == 0:
        raise PackageNotFound("Package '%s' not found" % pkg)
    cands = []
    for p in candidates:
        name, version = extract_name_version_from_path(p)
        if name == pkg:
            return p
        if pkg in name:
            cands.append(p)
    if len(cands) == 0:
        raise PackageNotFound("Package '%s' not found" % pkg)
    elif len(cands) == 1:
        return cands[0]

    print "Too many candidates:"
    print "    " + "\n    ".join(cands)

    raise PackageNotFound("Ambiguous package name.")
    def test02Store(self):
        # Test get trust root to bootstrap trust
        self.test01GetTrustRoots()

        # upload X509 cert and private key to repository
        thisSection = self.cfg['test02Store']

        passphrase = thisSection.get('passphrase')
        if passphrase is None:
            passphrase = getpass("\ntest02Store credential pass-phrase: ")

        sslKeyFilePassphrase = thisSection.get('sslKeyFilePassphrase')
        if sslKeyFilePassphrase is None:
            sslKeyFilePassphrase = getpass("\ntest02Store credential owner "
                                           "pass-phrase: ")

        certFile = path.expandvars(thisSection['ownerCertFile'])
        keyFile = path.expandvars(thisSection['ownerKeyFile'])
        sslCertFile = path.expandvars(thisSection['sslCertFile'])
        sslKeyFile = path.expandvars(thisSection['sslKeyFile'])

        self.clnt.store(thisSection['username'],
                        passphrase,
                        certFile,
                        keyFile,
                        sslCertFile=sslCertFile,
                        sslKeyFile=sslKeyFile,
                        sslKeyFilePassphrase=sslKeyFilePassphrase,
                        force=False)
        print("Store creds for user %s" % thisSection['username'])
Esempio n. 7
0
def link(target, lnk, force=False):
    """
    Creates symbolic link 'lnk' pointing to 'target'.
    """

    if system() not in ('Linux', 'Windows', 'MSYS_NT-6.1'):
        print("{} operating system is not supported.".format(system()))
        return

    isdir = False

    lnk = path.normpath(path.expandvars(path.expanduser(lnk)))
    if path.isdir(target):
        isdir = True
    target = path.normpath(path.expandvars(path.expanduser(target)))

    if isdir:
        print("\n{} -> {} : DIR".format(lnk, target))
    else:
        print("\n{} -> {} : FILE".format(lnk, target))

    if path.isdir(lnk) or path.isfile(lnk):
        if not force:
            print("'{}': link exists".format(lnk))
            return
        else:
            remove(lnk)

    if system() in ('Linux', 'MSYS_NT-6.1'):
        Popen(['ln', '-s', target, lnk]).wait()
    elif system() == 'Windows':
        if isdir:
            CreateSymbolicLink(lnk, target, 1)
        else:
            CreateSymbolicLink(lnk, target, 0)
Esempio n. 8
0
    def __init__(self):
        appdata = op.expandvars('%appdata%')
        program_files = op.expandvars('%programfiles(x86)%')
        self.config_path = op.join(appdata, 'Wox\\Settings\\Plugins\\OpenInFilezilla\\config.json')
        if not op.exists(self.config_path):
            xml_path = op.join(appdata, 'filezilla\\sitemanager.xml')
            exe_path = op.join(program_files, 'FileZilla FTP Client\\filezilla.exe')
            if not op.exists(xml_path):
                xml_path = ''
            if not op.exists(exe_path):
                exe_path = ''
            default_config = {
                'xml': xml_path,
                'exe': exe_path
            }
            try:
                os.makedirs(op.join(appdata, 'Wox\\Settings\\Plugins\\OpenInFilezilla'), mode=0o777)
            except OSError:
                pass
            config_file = open(self.config_path, 'w')
            json.dump(default_config, config_file, indent=4)
            config_file.close()

        config_file = open(self.config_path, 'r')
        self.config = json.load(config_file)
        config_file.close()

        super(OpenInFz, self).__init__()
Esempio n. 9
0
def parse_config_file(text):
    config = configparser.RawConfigParser()

    # default options
    config.add_section('Save')
    config.set('Save', 'magnets', 'false')
    config.set('Save', 'torrents', 'false')
    config.set('Save', 'directory', os.getcwd())

    config.add_section('LocalDB')
    config.set('LocalDB', 'enabled', 'false')
    config.set('LocalDB', 'path', expanduser('~/downloads/pirate-get/db'))

    config.add_section('Misc')
    # TODO: try to use https://docs.python.org/3/library/configparser.html#configparser.BasicInterpolation for interpolating in the command
    config.set('Misc', 'openCommand', '')
    config.set('Misc', 'transmission', 'false')
    config.set('Misc', 'colors', 'true')

    # Additional mirror that work for the user.
    config.add_section('Mirror')
    config.set('Mirror', 'url', 'https://thepiratebay.mn')

    config.read_string(text)

    # expand env variables
    directory = expanduser(expandvars(config.get('Save', 'Directory')))
    path = expanduser(expandvars(config.get('LocalDB', 'path')))

    config.set('Save', 'Directory', directory)
    config.set('LocalDB', 'path', path)

    return config
Esempio n. 10
0
def load_config():
    config = configparser.ConfigParser()

    # default options
    config.add_section('Save')
    config.set('Save', 'magnets', 'false')
    config.set('Save', 'torrents', 'false')
    config.set('Save', 'directory', os.getcwd())

    config.add_section('LocalDB')
    config.set('LocalDB', 'enabled', 'false')
    config.set('LocalDB', 'path', expanduser('~/downloads/pirate-get/db'))

    config.add_section('Misc')    
    config.set('Misc', 'openCommand', '')
    config.set('Misc', 'transmission', 'false')
    config.set('Misc', 'colors', 'true')

    # user-defined config files
    main = expandvars('$XDG_CONFIG_HOME/pirate-get')
    alt = expanduser('~/.config/pirate-get')

    # read config file
    config.read([main] if os.path.isfile(main) else [alt])

    # expand env variables
    directory = expanduser(expandvars(config.get('Save', 'Directory')))
    path = expanduser(expandvars(config.get('LocalDB', 'path')))

    config.set('Save', 'Directory', directory)
    config.set('LocalDB', 'path', path)

    return config
Esempio n. 11
0
def start_femhub(debug=False):
    if debug:
        print "Loading IPython..."
    try:
        import IPython
    except ImportError:
        raise Exception("You need to install 'ipython'")
    if debug:
        print "  Done."
    banner_length = 70
    l = "| FEMhub Version %s, Release Date: %s" % (version, release_date)
    l += " " * (banner_length - len(l) - 1) + "|"
    banner = "-" * banner_length + "\n" + l + "\n"

    def lab_wrapper(old=False, auth=True, *args, **kwargs):
        if old:
            from sagenb.notebook.notebook_object import lab
            lab(*args, **kwargs)
        else:
            run_lab(auth=auth)
    namespace = {"lab": lab_wrapper}

    os.environ["IPYTHONDIR"] = expandvars("$DOT_SAGE/ipython")
    os.environ["IPYTHONRC"] = "ipythonrc"
    if not os.path.exists(os.environ["IPYTHONRC"]):
        cmd('mkdir -p "$DOT_SAGE"')
        cmd('cp -r "$FEMHUB_ROOT/spkg/base/ipython" "$DOT_SAGE/"')
    os.environ["MPLCONFIGDIR"] = expandvars("$DOT_SAGE/matplotlib")
    if not os.path.exists(os.environ["MPLCONFIGDIR"]):
        cmd('cp -r "$FEMHUB_ROOT/spkg/base/matplotlib" "$DOT_SAGE/"')

    if debug:
        print "Starting the main loop..."
    IPython.Shell.start(user_ns=namespace).mainloop(banner=banner)
Esempio n. 12
0
def GetIceCubeFlasherSpectrumData(spectrumType):
    if spectrumType == I3CLSimFlasherPulse.FlasherPulseType.LED340nm:
        data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_340nm_emission_spectrum_cw_measured_20mA_pulseCurrent.txt"), unpack=True)
        #data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_340nm_emission_spectrum_cw_measured_200mA_pulseCurrent.txt"), unpack=True)
        data[0] *= I3Units.nanometer # apply the correct units
        data[1] /= 24.306508         # (20mA pulse)  pre-calculated normalization constant (not really necessary for the generation algorithm)
        #data[1] /= 22.323254        # (200ma pulse) pre-calculated normalization constant (not really necessary for the generation algorithm)
    elif spectrumType == I3CLSimFlasherPulse.FlasherPulseType.LED370nm:
        data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_370nm_emission_spectrum_cw_measured.txt"), unpack=True)
        data[0] *= I3Units.nanometer # apply the correct units
        data[1] /= 15.7001863        # pre-calculated normalization constant (not really necessary for the generation algorithm)
    elif spectrumType == I3CLSimFlasherPulse.FlasherPulseType.LED405nm:
        data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_405nm_emission_spectrum_datasheet.txt"), unpack=True)
        data[0] *= I3Units.nanometer # apply the correct units
        data[1] /= 8541585.10324     # pre-calculated normalization constant (not really necessary for the generation algorithm)
    elif spectrumType == I3CLSimFlasherPulse.FlasherPulseType.LED450nm:
        data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_450nm_emission_spectrum_datasheet.txt"), unpack=True)
        data[0] *= I3Units.nanometer # apply the correct units
        data[1] /= 21.9792812618     # pre-calculated normalization constant (not really necessary for the generation algorithm)
    elif spectrumType == I3CLSimFlasherPulse.FlasherPulseType.LED505nm:
        data = numpy.loadtxt(expandvars("$I3_BUILD/clsim/resources/flasher_data/flasher_led_505nm_emission_spectrum_cw_measured.txt"), unpack=True)
        data[0] *= I3Units.nanometer # apply the correct units
        data[1] /= 38.1881           # pre-calculated normalization constant (not really necessary for the generation algorithm)
    else:
        raise RuntimeError("invalid spectrumType")
        
    ## auto-normalize (not active in order not to depend on scipy)
    #import scipy, scipy.integrate
    #integral = scipy.integrate.trapz(y=data[1], x=data[0]/I3Units.nanometer)
    #data[1] /= integral
    #print integral
    
    return data
Esempio n. 13
0
def parse_config_file(text):
    config = configparser.RawConfigParser()

    # default options
    config.add_section("Save")
    config.set("Save", "magnets", "false")
    config.set("Save", "torrents", "false")
    config.set("Save", "directory", os.getcwd())

    config.add_section("LocalDB")
    config.set("LocalDB", "enabled", "false")
    config.set("LocalDB", "path", expanduser("~/downloads/pirate-get/db"))

    config.add_section("Misc")
    # TODO: try to use https://docs.python.org/3/library/configparser.html#configparser.BasicInterpolation for interpolating in the command
    config.set("Misc", "openCommand", "")
    config.set("Misc", "transmission", "false")
    config.set("Misc", "colors", "true")

    config.read_string(text)

    # expand env variables
    directory = expanduser(expandvars(config.get("Save", "Directory")))
    path = expanduser(expandvars(config.get("LocalDB", "path")))

    config.set("Save", "Directory", directory)
    config.set("LocalDB", "path", path)

    return config
Esempio n. 14
0
def main():
    global ui, net, config, confPath
    
    config = SafeConfigParser()
    config.SaveConfig = saveConfig
    if path.isfile('geeklient.cfg'): # portable mode
        confPath = 'geeklient.cfg'
    elif path.expandvars('$XDG_CONFIG_HOME') != '$XDG_CONFIG_HOME':
        confPath = path.expandvars('$XDG_CONFIG_HOME/geeklient.cfg')
    elif path.expandvars('%APPDATA%') != '%APPDATA%':
        confPath = path.expandvars('%APPDATA%/geeklient.cfg')
    else:
        confPath = path.expandvars('$HOME/.config/geeklient.cfg')
    if path.isfile(confPath):
        config.read(confPath)
    
    net = Networking()
    from ui import UI_GTK
    ui = UI_GTK(net,config,about)
    if net.connect():
        ui.add_message("Welcome to chat!")
    else: 
        net.name = "You"
        ui.add_message("You are not connected",'FF0000')
    ui.begin()
    return None
Esempio n. 15
0
    def test06ChangePassphrase(self):
        # change pass-phrase protecting a given credential
        thisSection = self.cfg['test06ChangePassphrase']

        passphrase = thisSection.get('passphrase')
        if passphrase is None:
            passphrase = getpass("test06ChangePassphrase - passphrase: ")

        newPassphrase = thisSection.get('newPassphrase')
        if newPassphrase is None:
            newPassphrase = getpass("test06ChangePassphrase - new passphrase: ")

            confirmNewPassphrase = getpass("test06ChangePassphrase - confirm "
                                           "new passphrase: ")

            if newPassphrase != confirmNewPassphrase:
                self.fail("New and confirmed new password don't match")

        sslKeyFilePassphrase = thisSection.get('sslKeyFilePassphrase') or \
                            passphrase

        self.clnt.changePassphrase(thisSection['username'],
                               passphrase,
                               newPassphrase,
                               path.expandvars(thisSection['sslCertFile']),
                               path.expandvars(thisSection['sslKeyFile']),
                               sslKeyFilePassphrase=sslKeyFilePassphrase)
        print("Changed pass-phrase")
Esempio n. 16
0
def parse_config_file(text):
    config = configparser.RawConfigParser()

    # default options
    config.add_section('Save')
    config.set('Save', 'magnets', 'false')
    config.set('Save', 'torrents', 'false')
    config.set('Save', 'directory', os.getcwd())

    config.add_section('LocalDB')
    config.set('LocalDB', 'enabled', 'false')
    config.set('LocalDB', 'path', expanduser('~/downloads/pirate-get/db'))

    config.add_section('Misc')
    # TODO: try to use configparser.BasicInterpolation
    #       for interpolating in the command
    config.set('Misc', 'openCommand', '')
    config.set('Misc', 'transmission', 'false')
    config.set('Misc', 'colors', 'true')

    config.read_string(text)

    # expand env variables
    directory = expanduser(expandvars(config.get('Save', 'Directory')))
    path = expanduser(expandvars(config.get('LocalDB', 'path')))

    config.set('Save', 'Directory', directory)
    config.set('LocalDB', 'path', path)

    return config
Esempio n. 17
0
File: MAP.py Progetto: exedre/e4t
    def insert_background(self,f1):
        imagename = expandvars(expanduser(self._spec['IMAGE']))
        logger.debug('Image is %s',imagename)
        if imagename[0]=='/':
            imgfname = imagename
        else:
            imgfname = expandvars(expanduser(join(self._options.input_path,imagename)))
            if not exists(imgfname):
                SHAPE_PATH = DEFAULT_IMAGE_PATH
                imgfname = expandvars(expanduser(join(SHAPE_PATH,imagename)))

        if exists(imgfname):
            logger.debug('Loading background image %s',imgfname)
            logger.debug('MPL VERSION %s',mpl.__version__)
            V=[ int(x) for x in mpl.__version__.split('.')]
            if V > (1,0,0):
                img = np.flipud(mpimg.imread(imgfname))
            else:
                img = mpimg.imread(imgfname)
            if img is not None:
                logger.debug('Showing image')
                f1.imshow(img,origin='lower',visible=True,alpha=.5,aspect='equal')
                logger.debug('Showed image')
            else:
                logger.error('E:FIG:MAP:001 -- Non posso aprire la mappa %s',imgfname)
                raise ValueError,'E:FIG:MAP:001'
        else:
            logger.error("L'immagine %s non esiste",imgfname)
            raise ValueError,'E:FIG:MAP:002'
Esempio n. 18
0
def write_fine_tune(src, dst, ovr = 0):
    """
    train SDA with WM samples in {src}
    zsd: region size and random seed used to sample WM, which decides the folder name
    of the samples, and the default destination folder for SDA
    """
    src = pt.expandvars(pt.expanduser(src))
    dst = pt.expandvars(pt.expanduser(dst))
    hlp.mk_dir(dst)

    ## gather WM surface samples, also check existing output
    sfs = []
    flt = lambda w: w.endswith('npz') or w.endswith('pgz')
    for sf in hlp.itr_fn(src, 'c', flt):
        fo = pt.join(dst, sf + '.pgz')
        fl = pt.join(dst, sf + '.log')
        if pt.isfile(fo) and not ovr:
            print fo, ': exists'
        elif pt.isfile(fl) and not ovr:
            print fl, ': exists'
        else:
            sfs.append(sf)

    ## write commands
    cmd = 'time python wm_fine_tune.py {t} {s} . &>{t}.log\n'
    for fo, sf in hlp.hpcc_iter(
            sfs, dst, npb=1, ppn= 4, mpn=4, tpp=2, qsz = 1,
            mds=['NumPy'],
            lnk=['rdm', 'hlp.py'],
            cpy=['wm_fine_tune.py'],
            pfx=['export MKL_NUM_THREADS={}'.format(4)],
            debug=False):

        ## write command for one processor
        fo.write(cmd.format(s=src, t=sf))
Esempio n. 19
0
    def get_cache_key(cls, infile, phikey="phi", psikey="psi",
        zkey="free energy", mode="hist", bins=72, bandwidth=5, wrap=True,
        mask_cutoff=None,
        calc_populations=False, plot_populations=False,
        *args, **kwargs):
        """
        Generates tuple of arguments to be used as key for dataset
        cache.

        Arguments documented under :func:`__init__`.
        """
        from os.path import expandvars

        if zkey in ["free energy", "probability"]:
            x_bins, y_bins = cls.process_bins_arg(bins, dim=2)
            bins = (tuple(x_bins), tuple(y_bins))
        else:
            x_bins, y_bins, z_bins = cls.process_bins_arg(bins, dim=3)
            bins = (tuple(x_bins), tuple(y_bins), tuple(z_bins))

        if mode == "hist":
            return (cls, expandvars(infile), phikey, psikey, zkey, mode, bins,
                    wrap, mask_cutoff, calc_populations, plot_populations)
        elif mode == "kde":
            return (cls, expandvars(infile), phikey, psikey, zkey, mode, bins,
                    bandwidth, wrap, mask_cutoff, calc_populations,
                    plot_populations)
Esempio n. 20
0
def _config_files(dointeractive=False):
  from os.path import exists, expanduser, expandvars, dirname, join
  from glob import iglob
  from os import environ

  # pattern to distinguish files to run only in interactive mode.
  # these files are loaded by the pylada-ipython extension itself.
  pattern = "*.py" if not dointeractive else "ipy_*.py"
  # dictionary with stuff we want defined when reading config files.
  global_dict = {"pyladamodules": __all__}
  local_dict = {}
  # first configuration files installed with pylada.
  for filename in iglob(join(join(dirname(__file__), "config"), pattern)):
    if dointeractive == False and filename[:4] == 'ipy_': continue
    execfile(filename, global_dict, local_dict)

  # then configuration files installed in a global config directory.
  if "PYLADA_CONFIG_DIR" in environ:
    for directory in environ["PYLADA_CONFIG_DIR"].split(':'):
      for filename in iglob(join(directory, pattern)):
        if dointeractive == False and filename[:4] == 'ipy_': continue
        execfile(filename, global_dict, local_dict)

  # then user configuration file.
  if exists(expandvars(expanduser('~/.pylada'))):
    execfile(expandvars(expanduser('~/.pylada')), global_dict, local_dict)
  return local_dict
Esempio n. 21
0
def start_qsnake(debug=False):
    if debug:
        print "Loading IPython..."
    try:
        import IPython
    except ImportError:
        raise Exception("You need to install 'ipython'")
    if debug:
        print "  Done."
    banner_length = 70
    l = "| Qsnake Version %s, Release Date: %s" % (version, release_date)
    l += " " * (banner_length - len(l) - 1) + "|"
    banner = "-" * banner_length + "\n" + l + "\n"
    l = "| Type lab() for the GUI."
    l += " " * (banner_length - len(l) - 1) + "|"
    banner += l + "\n" + "-" * banner_length + "\n"

    namespace = {"lab": run_lab}

    os.environ["IPYTHONDIR"] = expandvars("$DOT_SAGE/ipython")
    os.environ["IPYTHONRC"] = "ipythonrc"
    if not os.path.exists(os.environ["IPYTHONRC"]):
        cmd('mkdir -p "$DOT_SAGE"')
        cmd('cp -r "$QSNAKE_ROOT/spkg/base/ipython" "$DOT_SAGE/"')
    os.environ["MPLCONFIGDIR"] = expandvars("$DOT_SAGE/matplotlib")
    if not os.path.exists(os.environ["MPLCONFIGDIR"]):
        cmd('cp -r "$QSNAKE_ROOT/spkg/base/matplotlib" "$DOT_SAGE/"')

    if debug:
        print "Starting the main loop..."
    c = IPython.config.loader.Config()
    c.InteractiveShell.confirm_exit = False
    IPython.frontend.terminal.embed.InteractiveShellEmbed(config=c,
            user_ns=namespace, banner1=banner).mainloop(local_ns={})
Esempio n. 22
0
def directorysmartdiff(directoryA, directoryB):
    """docstring for directorysmartdiff"""
    directoryA = path.expandvars( path.expanduser( path.abspath(directoryA) ) )
    directoryB = path.expandvars( path.expanduser( path.abspath(directoryB) ) )
    dirsetA = set(os.listdir(directoryA))
    dirsetB = set(os.listdir(directoryB))
    #
    commonlist = dirsetA & dirsetB
    onlyinA = dirsetA - dirsetB
    onlyinB = dirsetB - dirsetA
    changedfiles = []
    #
    folderdiffprint(directoryA, directoryB, commonlist, onlyinA, onlyinB)
    print '\n[Comparing Shared Files]'
    #
    for filename in commonlist:
        fullfilepathA = os.path.join(directoryA, filename)
        fullfilepathB = os.path.join(directoryB, filename)
        #if isXMLFile(fullfilepathA):
        if smartdiff(fullfilepathA, fullfilepathB):
            print ' [!"{0}" - Change found!]'.format(filename)
            changedfiles.append(filename)
        else:
            print ' [ "{0}" - No change]'.format(filename)
    # Summary
    print ''
    folderdiffprint(directoryA, directoryB, commonlist, onlyinA, onlyinB)
    print '\n\n[Shared Files w/Differences]'
    for file in changedfiles: print "! {0}".format(file)
    pass
Esempio n. 23
0
def main(args):
    """read a POOL file and dump the DataHeader's content

    ex:
     $ check-sg aod.pool.root
     $ check-sg /castor/foo.pool
     $ check-sg root://castoratlas//castor/foo.pool
     $ check-sg LFN:ttbar.pool
    """
    files = args.files
    if isinstance(files, basestring):
        files = [files]

    import os.path as osp
    for i,f in enumerate(files):
        files[i] = osp.expandvars(osp.expanduser(f))

    exitcode = 0
    for fname in files:
        try:
            import AthenaCommon.KeyStore as acks
            print "## checking [%s]..." % (fname,)
            ks = acks.loadKeyStoreFromPoolFile(
                keyStore=osp.basename(fname),
                pool_file=fname,
                label='inputFile')

            print "="*80
            print "%40s%s%-40s" % ("Container type", " | ","StoreGate keys")
            print "%40s%s%-40s" % ("-"*40, "-+-", "-"*(40-3))
            for name,sgkeys in ks.inputFile.dict().items():
                print "%40s%s%-40s" % (name, " | ", ', '.join(sgkeys))
            print "="*80
            if args.output:
                outFileName = args.output
                outFileName = osp.expanduser(outFileName)
                outFileName = osp.expandvars(outFileName)
                print "## saving report into [%s]..." % (outFileName,)
                if osp.splitext(outFileName)[1] in ('.pkl', '.dat'):
                    # we explicitely import 'bsddb' to try to always
                    # get that particular backend for the shelve...
                    import bsddb
                    import shelve
                    if os.path.exists(outFileName):
                        os.remove(outFileName)
                    db = shelve.open(outFileName)
                    db['eventdata_items'] = ks.inputFile.dict()
                    db.close()
                else:
                    ks.write(outFileName, label='inputFile')
        except Exception, e:
            print "## Caught exception [%s] !!" % str(e.__class__)
            print "## What:",e
            print sys.exc_info()[0]
            print sys.exc_info()[1]
            exitcode = 1
            pass

        except :
Esempio n. 24
0
    def _init_default_storage(self, config):
        store_type = expandvars(config.get('default_storage', 'local'))

        storage = config['storage']

        store_class = storage.get(store_type)

        return store_class(expandvars(config['storage_remote_root']))
Esempio n. 25
0
    def __call__(self, handle, collection, name=None):
        """
        Returns
        -------
        Handle
        """

        local_master = get_datalad_master()

        if isdir(abspath(expandvars(expanduser(handle)))):
            h_path = abspath(expandvars(expanduser(handle)))
            handle_repo = HandleRepo(h_path, create=False)
        elif handle in local_master.get_handle_list():
            h_path = urlparse(CollectionRepoHandleBackend(repo=local_master,
                                                 key=handle).url).path
            handle_repo = HandleRepo(h_path, create=False)
            if not isdir(h_path):
                raise RuntimeError("Invalid path to handle '%s':\n%s" %
                                   (handle, h_path))

        elif urlparse(handle).scheme != '':  # rudimentary plausibility check for now
            # treat as a remote annex
            handle_repo = handle
        else:
            raise RuntimeError("Unknown handle '%s'." % handle)

        if isdir(abspath(expandvars(expanduser(collection)))):
            c_path = abspath(expandvars(expanduser(collection)))
        elif collection in local_master.git_get_remotes():
            c_path = urlparse(local_master.git_get_remote_url(collection)).path
            if not isdir(c_path):
                raise RuntimeError("Invalid path to collection '%s':\n%s" %
                                   (collection, c_path))
        else:
            raise RuntimeError("Unknown collection '%s'." % collection)

        collection_repo = CollectionRepo(c_path, create=False)
        collection_repo.add_handle(handle_repo, name=name)

        # get handle's metadata, if there's any:
        if isinstance(handle_repo, HandleRepo) and \
                exists(opj(handle_repo.path, HANDLE_META_DIR,
                           REPO_STD_META_FILE)):
            collection_repo.import_metadata_to_handle(CustomImporter,
                                                      key=name if name is not None else handle_repo.name,
                                                      files=opj(
                                                          handle_repo.path,
                                                          HANDLE_META_DIR))

        # TODO: More sophisticated: Check whether the collection is registered.
        # Might be a different name than collection_repo.name or not at all.
        local_master.git_fetch(collection_repo.name)

        return Handle(CollectionRepoHandleBackend(collection_repo,
                                                  name if name is not None
                                                  else handle_repo.name))
Esempio n. 26
0
def install_package(pkg, install_dependencies=True, force_install=False,
        cpu_count=0):
    """
    Installs the package "pkg".

    "pkg" can be either a full path, or just the name of the package (with or
    without a version).

    "install_dependencies" ... if True, it will also install all dependencies

    "force_install" ... if True, it will install the package even if it has
                    been already installed

    "cpu_count" ... number of processors to use (0 means the number of
            processors in the  machine)

    Examples:
    >>> install_package("http://qsnake.org/stpack/python-2.6.4.p9.spkg")
    >>> install_package("spkg/standard/readline-6.0.spkg")
    >>> install_package("readline-6.0.spkg")
    >>> install_package("readline")

    """
    if pkg.startswith("http") or pkg.startswith("www"):
        # Download from the web:
        remote = True
        import tempfile
        tmpdir = tempfile.mkdtemp()
        cmd("wget --directory-prefix=" + tmpdir + " " + pkg)
        pkg_name = os.path.split(pkg)
        pkg = os.path.join(tmpdir,pkg_name[1])
    elif pkg == ".":
        # Install from the current directory, try to guess
        # how to install it properly:
        if os.path.exists(expandvars("$CUR/spkg-install")):
            setup_cpu(cpu_count)
            try:
                cmd("cd $CUR; /bin/bash spkg-install")
            except CmdException:
                print "Qsnake 'install .' exited with an error."
        elif os.path.exists(expandvars("$CUR/setup.py")):
            try:
                cmd("cd $CUR; python setup.py install")
            except CmdException:
                print "Qsnake 'python setup.py install' exited with an error."
        else:
            print "Don't know how to install from the current directory."
        return
    else:
        # Install the 'pkg' package
        remote = False
        try:
            pkg = pkg_make_absolute(pkg)
        except PackageNotFound, p:
            print p
            sys.exit(1)
Esempio n. 27
0
    def __init__(self, pwndata, fitdir, phase_shift=None, verbosity=True):
        self.pwndata = expandvars(pwndata)
        self.fitdir = expandvars(fitdir)
        if phase_shift is not None:
            self.phase_shift = expandvars(phase_shift)
        else:
            print 'WARNING: PHASE SHIFT IS NONE'
            self.phase_shift = None

        self.verbosity = verbosity
Esempio n. 28
0
    def run_task(self, fw_spec):
        shell_interpret = self.get('shell_interpret', True)
        ignore_errors = self.get('ignore_errors')
        mode = self.get('mode', 'move')

        if mode == 'rtransfer':
            # remote transfers
            # Create SFTP connection
            import paramiko
            ssh = paramiko.SSHClient()
            ssh.load_host_keys(expanduser(os.path.join("~", ".ssh", "known_hosts")))
            ssh.connect(self['server'], username=self.get('user'), key_filename=self.get('key_filename'))
            sftp = ssh.open_sftp()

        for f in self["files"]:
            try:
                if 'src' in f:
                    src = os.path.abspath(expanduser(expandvars(f['src']))) if shell_interpret \
                        else f['src']
                else:
                    src = abspath(expanduser(expandvars(f))) if shell_interpret else f

                if mode == 'rtransfer':
                    dest = self['dest']
                    if os.path.isdir(src):
                        if not self._rexists(sftp, dest):
                            sftp.mkdir(dest)

                        for f in os.listdir(src):
                            if os.path.isfile(os.path.join(src,f)):
                                sftp.put(os.path.join(src, f), os.path.join(dest, f))
                    else:
                        if not self._rexists(sftp, dest):
                            sftp.mkdir(dest)

                        sftp.put(src, os.path.join(dest, os.path.basename(src)))

                else:
                    if 'dest' in f:
                        dest = abspath(expanduser(expandvars(f['dest']))) if shell_interpret \
                            else f['dest']
                    else:
                        dest = abspath(expanduser(expandvars(self['dest']))) if shell_interpret \
                            else self['dest']
                    FileTransferTask.fn_list[mode](src, dest)

            except:
                traceback.print_exc()
                if not ignore_errors:
                    raise ValueError(
                        "There was an error performing operation {} from {} "
                        "to {}".format(mode, self["files"], self["dest"]))
        if mode == 'rtransfer':
            sftp.close()
            ssh.close()
Esempio n. 29
0
def MakePropagator(
    radius=800*I3Units.m,
    length=1600*I3Units.m,
    particleType=dataclasses.I3Particle.ParticleType.MuMinus,
    impl='proposal',
    mediadef=None):
        """
        Create a muon propagator service.

        :param radius: radius of the target cylinder
        :param length: full height of the target cylinder
        :param impl: if "mmc", use MMC, otherwise use PROPOSAL
        :param mediadef: path to MMC media definition file
        """

        seed = 12345 # fixed seed, RNG will be re-set by the propagator module

        if impl.lower() == 'mmc':
            from icecube import sim_services, c2j_icetray, mmc_icetray, icetray
            if mediadef is None:
                mediadef=expandvars('$I3_BUILD/PROPOSAL/resources/mediadef')
            jvmOpts = icetray.vector_string()    # fill this with parameters passed directly to the JavaVM
            jvmOpts.append(expandvars("-Djava.class.path=$I3_BUILD/lib/mmc.jar"))
            jvmOpts.append("-Xms256m")
            jvmOpts.append("-Xmx512m")
            jvmOpts.append("-XX:-HeapDumpOnOutOfMemoryError")

            # Now create the MMC propagators, but first *all* of the options must be set here.
            # There's no special options added behind the scenes.  This is much more flexible.
            #  Below are the standard options.  To interpret them see the MMC docs.
            mmcOpts = "-romb=5 -raw -user -sdec -time -lpm -bs=1 -ph=3 -bb=2 -sh=2 -frho -cont "
            mmcOpts += expandvars("-tdir=$I3_BUILD/mmc-icetray/resources ")
            mmcOpts += expandvars("-mediadef=%s " % mediadef)
            mmcOpts += "-radius=%d " % radius
            mmcOpts += "-length=%d " % length
            mmcOpts += "-seed=%d " % seed

            if particleType in [dataclasses.I3Particle.ParticleType.MuMinus, dataclasses.I3Particle.ParticleType.MuPlus]:
                pass # no extra options necessary
            elif particleType in [dataclasses.I3Particle.ParticleType.TauMinus, dataclasses.I3Particle.ParticleType.TauPlus]:
                mmcOpts += "tau "
            else:
                raise RuntimeError("Cannot propagate particle type!")

            jvm = c2j_icetray.I3JavaVM(jvmOpts)
            return mmc_icetray.I3PropagatorServiceMMC(jvm,mmcOpts)
        elif impl.lower() == 'proposal':
            from icecube import sim_services, PROPOSAL
            # in PROPOSAL everything can be defined in the configuration file
            if mediadef is None:
                mediadef=expandvars('$I3_BUILD/PROPOSAL/resources/config_icesim.json')
            return PROPOSAL.I3PropagatorServicePROPOSAL(
                config_file=mediadef)
        else:
            raise RuntimeError("unknown propagator: %s" % impl)
Esempio n. 30
0
def convert_path(path):
    "Convert the origin path from its containing envs."
    matched = re.search(VARIABLE_PATTERN, path)
    if matched is not None:
        converted = expandvars(matched.group(0))
        if isabs(converted) and isdir(converted):
            return converted
    converted = expandvars(path)
    if isdir(converted):
        return converted
    return path
Esempio n. 31
0
import sys
import os.path as op

from pyrevit import UI
from pyrevit import forms

__context__ = 'zero-doc'
__doc__ = 'Renames PDF sheets printed from Revit and removes the Central ' \
          'model name from the PDF names. The tool will ask for a folder ' \
          'containing the file.\n\n' \
          'Shift-Click:\nRename files on Desktop'

# if user shift-clicks, default to user desktop,
# otherwise ask for a folder containing the PDF files
if __shiftclick__:
    basefolder = op.expandvars('%userprofile%\\desktop')
else:
    basefolder = forms.pick_folder()


def renamePDF(pdffile):
    import re
    r = re.compile('(?<=Sheet - )(.+)')
    fname = r.findall(pdffile)[0]
    r = re.compile('(.+)\s-\s(.+)')
    fnameList = r.findall(fname)
    return fnameList[0][0] + ' - ' + fnameList[0][1].upper()


if basefolder:
    sheetcount = 0
Esempio n. 32
0
    def result_dict(self):
        """``Dictionary<String, String>``: Return results dict for logging."""
        if self.pyrevit_command:
            return self.pyrevit_command.GetResultsDictionary()


# create an instance of _ExecutorParams wrapping current runtime.
EXEC_PARAMS = _ExecutorParams()

# -----------------------------------------------------------------------------
# config user environment paths
# -----------------------------------------------------------------------------
# user env paths
USER_ROAMING_DIR = os.getenv('appdata')
USER_SYS_TEMP = os.getenv('temp')
USER_DESKTOP = op.expandvars('%userprofile%\\desktop')

# create paths for pyrevit files
if EXEC_PARAMS.doc_mode:
    PYREVIT_APP_DIR = PYREVIT_VERSION_APP_DIR = ' '
else:
    # pyrevit file directory
    PYREVIT_APP_DIR = op.join(USER_ROAMING_DIR, PYREVIT_ADDON_NAME)
    PYREVIT_VERSION_APP_DIR = op.join(PYREVIT_APP_DIR, HOST_APP.version)

    # add runtime paths to sys.paths
    # this will allow importing any dynamically compiled DLLs that
    # would be placed under this paths.
    for pyrvt_app_dir in [PYREVIT_APP_DIR, PYREVIT_VERSION_APP_DIR]:
        if not op.isdir(pyrvt_app_dir):
            try:
Esempio n. 33
0
from I3Tray import *

from os.path import expandvars

from icecube import icetray 
from icecube import dataclasses 
from icecube import phys_services 
from icecube import payload_parsing 
from icecube import dataio 
from icecube import daq_decode 


input = sys.argv[1:]

if not input:
    testdata = expandvars("$I3_TESTDATA")
    input = [testdata + "/daq-decode/PFFilt_PhysicsFiltering_Run00126598_Subrun00000000_00000028_slim.i3.gz"]

for i in input:
    print("will read in file ... %s" % i)

workspace = expandvars("$I3_SRC")

mb_id_file = workspace + "/phys-services/resources/mainboard_ids.xml.gz"


tray = I3Tray()


tray.AddService("I3XMLOMKey2MBIDFactory","omkey2mbid")(
   ("Infile",mb_id_file),
Esempio n. 34
0
    def __init__(self,
                 img_folder,
                 npz_files=[],
                 dtype=torch.float32,
                 use_face_contour=False,
                 binarization=True,
                 body_thresh=0.1,
                 hand_thresh=0.2,
                 face_thresh=0.4,
                 min_hand_keypoints=8,
                 min_head_keypoints=8,
                 transforms=None,
                 split='train',
                 return_shape=False,
                 return_full_pose=False,
                 return_params=True,
                 return_gender=False,
                 vertex_folder='vertices',
                 return_vertices=True,
                 vertex_flip_correspondences='',
                 **kwargs):
        super(SPIN, self).__init__()

        self.img_folder = osp.expandvars(img_folder)
        self.transforms = transforms
        self.use_face_contour = use_face_contour
        self.body_thresh = body_thresh
        self.hand_thresh = hand_thresh
        self.face_thresh = face_thresh
        self.binarization = binarization
        self.dtype = dtype
        self.split = split

        self.min_hand_keypoints = min_hand_keypoints
        self.min_head_keypoints = min_head_keypoints

        self.return_vertices = return_vertices
        self.return_gender = return_gender
        self.return_params = return_params
        self.return_shape = return_shape
        self.return_full_pose = return_full_pose

        self.vertex_folder = osp.join(
            osp.split(self.img_folder)[0], vertex_folder)

        vertex_flip_correspondences = osp.expandvars(
            vertex_flip_correspondences)
        err_msg = ('Vertex flip correspondences path does not exist:' +
                   f' {vertex_flip_correspondences}')
        assert osp.exists(vertex_flip_correspondences), err_msg
        flip_data = np.load(vertex_flip_correspondences)
        self.bc = flip_data['bc']
        self.closest_faces = flip_data['closest_faces']

        self.spin_data = {}
        start = 0
        for npz_fn in npz_files:
            npz_fn = osp.expandvars(npz_fn)
            dset = osp.splitext(osp.split(npz_fn)[1])[0]

            data = np.load(npz_fn)
            has_smpl = np.asarray(data['has_smpl']).astype(np.bool)
            data = {key: data[key][has_smpl] for key in data.keys()}

            logger.info(start)
            data['dset'] = [dset] * data['pose'].shape[0]
            start += data['pose'].shape[0]
            if 'genders' not in data and self.return_gender:
                data['genders'] = [''] * len(data['pose'])
            data['indices'] = np.arange(data['pose'].shape[0])
            if dset == 'lsp':
                data['part'][26, [9, 11], :] = data['part'][26, [11, 9], :]
            self.spin_data[dset] = data

        # folder_map_fname = osp.expandvars(
        #     osp.join(img_folder, FOLDER_MAP_FNAME))
        # with open(folder_map_fname, 'rb') as f:
        #     data_dict = pickle.load(f)
        # self.items_per_folder = max(data_dict.values())

        self.indices = np.concatenate(
            [self.spin_data[dset]['indices'] for dset in self.spin_data],
            axis=0).astype(np.int32)
        self.centers = np.concatenate(
            [self.spin_data[dset]['center'] for dset in self.spin_data],
            axis=0).astype(np.float32)
        self.scales = np.concatenate(
            [self.spin_data[dset]['scale'] for dset in self.spin_data],
            axis=0).astype(np.float32)
        self.poses = np.concatenate(
            [self.spin_data[dset]['pose'] for dset in self.spin_data],
            axis=0).astype(np.float32)
        self.keypoints2d = np.concatenate(
            [self.spin_data[dset]['part'] for dset in self.spin_data],
            axis=0).astype(np.float32)
        self.imgname = np.concatenate(
            [self.spin_data[dset]['imgname'] for dset in self.spin_data],
            axis=0).astype(np.string_)
        self.dset = np.concatenate(
            [self.spin_data[dset]['dset'] for dset in self.spin_data],
            axis=0).astype(np.string_)
        if self.return_gender:
            gender = []
            for dset in self.spin_data:
                gender.append(self.spin_data[dset]['genders'])
            self.gender = np.concatenate(gender).astype(np.string_)

        if self.return_shape:
            self.betas = np.concatenate(
                [self.spin_data[dset]['betas'] for dset in self.spin_data],
                axis=0).astype(np.float32)

        #  self.dset_names = list(self.spin_data.keys())
        dset_sizes = list(
            map(lambda x: x['pose'].shape[0], self.spin_data.values()))
        #  logger.info(self.dset_sizes)

        self.num_items = sum(dset_sizes)
        #  logger.info(self.num_items)

        source_idxs, target_idxs = dset_to_body_model(
            model_type='smplx',
            use_hands=True,
            use_face=True,
            dset='spin',
            use_face_contour=self.use_face_contour)
        self.source_idxs = np.asarray(source_idxs, dtype=np.int64)
        self.target_idxs = np.asarray(target_idxs, dtype=np.int64)

        idxs_dict = get_part_idxs()
        body_idxs = idxs_dict['body']
        hand_idxs = idxs_dict['hand']
        face_idxs = idxs_dict['face']
        if not self.use_face_contour:
            face_idxs = face_idxs[:-17]
        self.body_idxs = np.asarray(body_idxs)
        self.hand_idxs = np.asarray(hand_idxs)
        self.face_idxs = np.asarray(face_idxs)
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

See this link for a copy of the GNU General Public License protecting this package.
https://github.com/eirannejad/pyRevit/blob/master/LICENSE
'''

import os.path as op
from Autodesk.Revit.DB import FilteredElementCollector, Element, ImageType

uidoc = __revit__.ActiveUIDocument
doc = __revit__.ActiveUIDocument.Document
selection = [
    doc.GetElement(elId)
    for elId in __revit__.ActiveUIDocument.Selection.GetElementIds()
]

destDir = op.expandvars('%userprofile%\\desktop')

cl = FilteredElementCollector(doc)
list = cl.OfClass(ImageType).ToElements()

for el in list:
    image = el.GetImage()
    imageName = op.basename(el.Path)
    # imageName = Element.Name.GetValue( el )
    print('EXPORTING: {0}'.format(imageName))
    image.Save(op.join(destDir, imageName))
Esempio n. 36
0
 def deployment_path(self):
     dep_path = self.get('settings', ordereddict()).get('deployment_output_path',
                         path.join(self.base_path, 'ansible-deployment/'))
     return path.normpath(path.abspath(path.expanduser(path.expandvars(dep_path))))
Esempio n. 37
0
def call(cmd, stdin=None, cwd=None):
    print(cmd)
    return subprocess.call([expandvars(cmd)], shell=True, stdin=stdin, cwd=cwd)
Esempio n. 38
0
#        env["LINKFLAGS"][i] = "-Wl,-T" + custom_ld_script
#    elif flag == "-T":
#        env["LINKFLAGS"][i + 1] = custom_ld_script

# Custom HEX from ELF
env.AddPostAction(
    join("$BUILD_DIR", "${PROGNAME}.elf"),
    env.VerboseAction(
        " ".join([
            "$OBJCOPY",
            "-O ihex",
            "$TARGET",  # TARGET=.pio/build/fysetc_STM32F1/firmware.elf
            "\"" + join("$BUILD_DIR", "${PROGNAME}.hex") +
            "\"",  # Note: $BUILD_DIR is a full path
        ]),
        "Building $TARGET"))

# In-line command with arguments
UPLOAD_TOOL = "stm32flash"
platform = env.PioPlatform()
if platform.get_package_dir("tool-stm32duino") != None:
    UPLOAD_TOOL = expandvars("\"" +
                             join(platform.get_package_dir("tool-stm32duino"),
                                  "stm32flash", "stm32flash") + "\"")

env.Replace(
    UPLOADER=UPLOAD_TOOL,
    UPLOADCMD=expandvars(
        UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" +
        join("$BUILD_DIR", "${PROGNAME}.hex") + "\"" + " $UPLOAD_PORT"))
Esempio n. 39
0
from pathlib import Path
from cv2 import VideoCapture
from mysql.connector import pooling
from os import path, getenv, environ
from dotenv import load_dotenv, set_key
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import QRunnable, Qt, QObject, QTimer, pyqtSignal, pyqtSlot
from PyQt5.QtWidgets import QCompleter, QLabel, QMenu, QAction, QActionGroup, QFileDialog

load_dotenv(r'GUI\.env')
CREDENTIALS = r'GUI\credentials.ini'
AUTOCOMPLETE = r'GUI\autocomplete.txt'
BATCH = 10000

ROOT = Path(Path().cwd().drive)
PATH = ROOT / path.expandvars(r'\Users\$USERNAME\Dropbox\ん')
parts = ", ".join([f"'{part}'" for part in PATH.parts]).replace('\\', '')
BASE = f'SELECT full_path(imagedata.path, {parts}), artist, tags, rating, stars, type, site FROM imagedata'
COMIC = 'SELECT parent FROM comic WHERE path=get_name(%s)'
UPDATE = 'UPDATE imagedata SET {} WHERE path=get_name(%s)'
DELETE = 'DELETE FROM imagedata WHERE path=get_name(%s)'


class CONNECT(QObject):

    finishedTransaction = pyqtSignal(object)
    finishedSelect = pyqtSignal(object)
    finishedUpdate = pyqtSignal(object)
    finishedDelete = pyqtSignal(object)

    def __init__(self, parent):
Esempio n. 40
0
        exit(1)


check_antipackage()

# ref: https://github.com/ellisonbg/antipackage
import antipackage
from github.appscode.libbuild import libbuild, pydotenv

import os
import os.path
import subprocess
import sys
from os.path import expandvars, join, dirname

libbuild.REPO_ROOT = expandvars(
    '$GOPATH') + '/src/github.com/displague/csi-linode'
BUILD_METADATA = libbuild.metadata(libbuild.REPO_ROOT)
libbuild.BIN_MATRIX = {
    'csi-linode': {
        'type': 'go',
        'go_version': True,
        'use_cgo': False,
        'distro': {
            'alpine': ['amd64'],
            'linux': ['amd64']
        }
    }
}
libbuild.BUCKET_MATRIX = {
    'prod': 'gs://appscode-cdn',
    'dev': 'gs://appscode-dev'
Esempio n. 41
0
#!/usr/bin/env python

from I3Tray import *

from os.path import expandvars

import os
import sys

from icecube import dataclasses
from icecube import phys_services
from icecube import dataio

tray = I3Tray()

tray.AddModule(
    "I3Reader",
    "reader",
    Filename=expandvars("$I3_TESTDATA/string-21/MacOSX-libstdc++6-ppc.i3.gz"))

tray.AddModule("Dump", "dump")

tray.AddModule("TrashCan", "the can")

tray.Execute()
tray.Finish()
Esempio n. 42
0
def check_output(cmd, stdin=None, cwd=libbuild.REPO_ROOT):
    print(cmd)
    return subprocess.check_output([expandvars(cmd)],
                                   shell=True,
                                   stdin=stdin,
                                   cwd=cwd)
Esempio n. 43
0

check_antipackage()

# ref: https://github.com/ellisonbg/antipackage
import antipackage
from github.appscode.libbuild import libbuild, pydotenv

import os
import os.path
import subprocess
import sys
import yaml
from os.path import expandvars, join, dirname

libbuild.REPO_ROOT = expandvars('$GOPATH') + '/src/github.com/appscode/voyager'
BUILD_METADATA = libbuild.metadata(libbuild.REPO_ROOT)
libbuild.BIN_MATRIX = {
    'voyager': {
        'type': 'go',
        'go_version': True,
        'use_cgo': False,
        'distro': {
            'alpine': ['amd64'],
            'darwin': ['amd64'],
            'linux': ['amd64']
        }
    }
}
if libbuild.ENV not in ['prod']:
    libbuild.BIN_MATRIX['voyager']['distro'] = {'alpine': ['amd64']}
Esempio n. 44
0
def install_launcher(source_folder: Path, destination_folder: Path,
                     launcher_exe: str,
                     additional_programfiles_files: List[str],
                     additional_streaming_files: List[str],
                     cmdline: bool) -> None:
    launcher_path = Path(source_folder / 'dist' / launcher_exe)
    programfiles_files = [launcher_path] + [
        source_folder / 'dist' / file_path
        for file_path in additional_programfiles_files
    ]
    streaming_files = [
        source_folder / 'dist' / file_path
        for file_path in additional_streaming_files
    ]

    _logger.debug(
        f'Installing Moonlight HDR Launcher {(source_folder/"dist"/"version").read_text()}'
    )
    _logger.debug(f'Source folder: {source_folder}')
    _logger.debug(f'Destination folder for launcher: {destination_folder}')
    _logger.debug(f'Launcher path: {launcher_path}')
    _logger.debug(
        f'List of files to put in {destination_folder}: {programfiles_files}')
    _logger.debug(
        f'List of files to put in Streaming folder: {streaming_files}')

    if not launcher_path.exists():
        show_error(f'{launcher_path} does not exist', cmdline)
        raise Exception(f'{launcher_path} does not exist')

    # create destination folder
    try:
        destination_folder.mkdir(parents=True, exist_ok=True)
    except DistutilsFileError as e:
        show_warning(
            f'No permission to create {destination_folder}, re-run as Administrator',
            cmdline)
        raise e

    # copy files to programfiles destination
    for source_path in programfiles_files:
        if source_path.exists():
            try:
                dest_name, copied = copy_file(source_path,
                                              destination_folder,
                                              update=True)
                if copied:
                    _logger.info(f'Copied {source_path} to {dest_name}')
                else:
                    _logger.info(
                        f'Skipped copying {source_path} to {dest_name} because destination is newer than source'
                    )
            except DistutilsFileError as e:
                show_warning(
                    f'No permission to copy {source_path} to {destination_folder}, re-run as Administrator',
                    cmdline)
                raise e
        else:
            _logger.warning(f'Source file {source_path} does not exist')

    # set destination folder read-write
    oschmod.set_mode_recursive(str(destination_folder), 0o777)

    # write destination_folder to registry
    try:
        write_destination_folder_to_reg(destination_folder)
    except WindowsError as e:
        show_error(f'Failed to write destination_folder to registry: {e}')
        raise e

    # prompt to re-scan games
    show_warning(
        'Before continuing, open GeForce Experience and re-scan for games.',
        cmdline)

    # find StreamingAssetsData subfolder
    try:
        app_data = Path(expandvars(r'%LOCALAPPDATA%'))
        _logger.info(f'Found AppData path: {app_data}')
        mad_path = next(
            app_data.glob('**/StreamingAssetsData/mass_effect_andromeda/*'))
        _logger.info(
            f'Found StreamingAssetsData folder for Mass Effect Andromeda: {mad_path}'
        )
    except StopIteration as e:
        show_error(
            'Unable to find entry for Mass Effect Andromeda. Have you tried scanning for games in GeForce Experience?',
            cmdline)
        raise e

    # set folder read-write
    oschmod.set_mode_recursive(str(mad_path), 0o777)

    # copy files to StreamingAssetsData destination
    for source_file in streaming_files:
        try:
            dest_name, copied = copy_file(source_file, mad_path, update=True)
            if copied:
                _logger.info(f'Copied {source_file} to {dest_name}')
            else:
                _logger.info(
                    f'Skipped copying {source_file} to {dest_name} because destination is newer than source'
                )
        except DistutilsFileError:
            show_warning(
                f'No permission to copy {source_file} to {mad_path}, re-run as Administrator',
                cmdline)

    # modify StreamingSettings.json
    streaming_settings_path = mad_path / 'StreamingSettings.json'
    streaming_settings = json.loads(streaming_settings_path.read_text())
    streaming_settings['GameData'][0]['StreamingDisplayName'] = 'HDR Launcher'
    streaming_settings['GameData'][0]['StreamingCaption'] = 'HDR Launcher'
    streaming_settings['GameData'][0]['StreamingClassName'] = 'HDR Launcher'
    streaming_settings['GameData'][0][
        'StreamingCommandLine'] = f'start {launcher_exe}'
    final_json = json.dumps(streaming_settings, indent=4)
    _logger.debug(f'Final StreamingSettings.json: {final_json}')
    _logger.debug(f'Saving to {streaming_settings_path}')
    streaming_settings_path.write_text(final_json)

    # modify metadata.json
    metadata_path = mad_path / 'metadata.json'
    metadata = json.loads(metadata_path.read_text())
    metadata['metadata']['files'] = [{
        'filename': f.name,
        'url': '',
        'sha256': get_sha256(f),
        'size': f.stat().st_size
    } for f in [streaming_settings_path, *streaming_files]]
    final_metadata_json = json.dumps(metadata, indent=4)
    _logger.debug(f'Final metadata.json: {final_metadata_json}')
    _logger.debug(f'Saving to {metadata_path}')
    metadata_path.write_text(final_metadata_json)

    # set folder read-only
    oschmod.set_mode_recursive(mad_path.as_posix(), 'a+r,a-w')

    show_warning(
        'Now restart GeForce Experience or the PC to register the config changes',
        cmdline)
Esempio n. 45
0
def getImagePaths(directoryPath, recursiveBoolean, followLinksBoolean,
                  rawsBoolean, hashAlgorithmString):
    "Creates a list of image paths within a given directory (possibly recursively)"

    filePathList = []

    if directoryPath is not None:
        directoryPath = path.normcase(
            path.normpath(
                path.abspath(path.expanduser(path.expandvars(directoryPath)))))
    if hashAlgorithmString is not None:
        checkHashAlgorithmChoice(hashAlgorithmString)
    print("Searching for duplicate images...")
    if directoryPath is not None:
        if recursiveBoolean == True:
            if rawsBoolean == True:
                for root, dirs, files in walk(directoryPath,
                                              followlinks=followLinksBoolean):
                    for file in files:
                        if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                        or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                        or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                        or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                        or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                        or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm")\
                        or str(file).lower().endswith(".3fr") or str(file).lower().endswith(".ari")\
                        or str(file).lower().endswith(".arw") or str(file).lower().endswith(".bay")\
                        or str(file).lower().endswith(".crw") or str(file).lower().endswith(".cr2")\
                        or str(file).lower().endswith(".cap") or str(file).lower().endswith(".data")\
                        or str(file).lower().endswith(".dcs") or str(file).lower().endswith(".dcr")\
                        or str(file).lower().endswith(".dng") or str(file).lower().endswith(".drf")\
                        or str(file).lower().endswith(".eip") or str(file).lower().endswith(".erf")\
                        or str(file).lower().endswith(".fff") or str(file).lower().endswith(".gpr")\
                        or str(file).lower().endswith(".iiq") or str(file).lower().endswith(".k25")\
                        or str(file).lower().endswith(".kdc") or str(file).lower().endswith(".mdc")\
                        or str(file).lower().endswith(".mef") or str(file).lower().endswith(".mos")\
                        or str(file).lower().endswith(".mrw") or str(file).lower().endswith(".nef")\
                        or str(file).lower().endswith(".nrw") or str(file).lower().endswith(".obm")\
                        or str(file).lower().endswith(".orf") or str(file).lower().endswith(".pef")\
                        or str(file).lower().endswith(".ptx") or str(file).lower().endswith(".pxn")\
                        or str(file).lower().endswith(".r3d") or str(file).lower().endswith(".raf")\
                        or str(file).lower().endswith(".raw") or str(file).lower().endswith(".rwl")\
                        or str(file).lower().endswith(".rw2") or str(file).lower().endswith(".rwz")\
                        or str(file).lower().endswith(".sr2") or str(file).lower().endswith(".srf")\
                        or str(file).lower().endswith(".srw") or str(file).lower().endswith(".x3f"):
                            filePathList.append(
                                path.normcase(
                                    path.normpath(
                                        path.abspath(
                                            path.expanduser(
                                                path.expandvars(
                                                    path.join(root, file)))))))
            else:
                for root, dirs, files in walk(directoryPath,
                                              followlinks=followLinksBoolean):
                    for file in files:
                        if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                        or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                        or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                        or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                        or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                        or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm"):
                            filePathList.append(
                                path.normcase(
                                    path.normpath(
                                        path.abspath(
                                            path.expanduser(
                                                path.expandvars(
                                                    path.join(root, file)))))))
        else:
            if rawsBoolean == True:
                for file in listdir(directoryPath):
                    if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                    or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                    or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                    or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                    or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                    or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm")\
                    or str(file).lower().endswith(".3fr") or str(file).lower().endswith(".ari")\
                    or str(file).lower().endswith(".arw") or str(file).lower().endswith(".bay")\
                    or str(file).lower().endswith(".crw") or str(file).lower().endswith(".cr2")\
                    or str(file).lower().endswith(".cap") or str(file).lower().endswith(".data")\
                    or str(file).lower().endswith(".dcs") or str(file).lower().endswith(".dcr")\
                    or str(file).lower().endswith(".dng") or str(file).lower().endswith(".drf")\
                    or str(file).lower().endswith(".eip") or str(file).lower().endswith(".erf")\
                    or str(file).lower().endswith(".fff") or str(file).lower().endswith(".gpr")\
                    or str(file).lower().endswith(".iiq") or str(file).lower().endswith(".k25")\
                    or str(file).lower().endswith(".kdc") or str(file).lower().endswith(".mdc")\
                    or str(file).lower().endswith(".mef") or str(file).lower().endswith(".mos")\
                    or str(file).lower().endswith(".mrw") or str(file).lower().endswith(".nef")\
                    or str(file).lower().endswith(".nrw") or str(file).lower().endswith(".obm")\
                    or str(file).lower().endswith(".orf") or str(file).lower().endswith(".pef")\
                    or str(file).lower().endswith(".ptx") or str(file).lower().endswith(".pxn")\
                    or str(file).lower().endswith(".r3d") or str(file).lower().endswith(".raf")\
                    or str(file).lower().endswith(".raw") or str(file).lower().endswith(".rwl")\
                    or str(file).lower().endswith(".rw2") or str(file).lower().endswith(".rwz")\
                    or str(file).lower().endswith(".sr2") or str(file).lower().endswith(".srf")\
                    or str(file).lower().endswith(".srw") or str(file).lower().endswith(".x3f"):
                        filePathList.append(
                            path.normcase(
                                path.normpath(
                                    path.abspath(
                                        path.expanduser(
                                            path.expandvars(
                                                path.join(directoryPath,
                                                          file)))))))
            else:
                for file in listdir(directoryPath):
                    if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                    or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                    or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                    or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                    or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                    or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm"):
                        filePathList.append(
                            path.normcase(
                                path.normpath(
                                    path.abspath(
                                        path.expanduser(
                                            path.expandvars(
                                                path.join(directoryPath,
                                                          file)))))))
    else:
        if recursiveBoolean == True:
            if rawsBoolean == True:
                for root, dirs, files in walk(path.curdir,
                                              followlinks=followLinksBoolean):
                    for file in files:
                        if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                        or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                        or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                        or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                        or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                        or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm")\
                        or str(file).lower().endswith(".3fr") or str(file).lower().endswith(".ari")\
                        or str(file).lower().endswith(".arw") or str(file).lower().endswith(".bay")\
                        or str(file).lower().endswith(".crw") or str(file).lower().endswith(".cr2")\
                        or str(file).lower().endswith(".cap") or str(file).lower().endswith(".data")\
                        or str(file).lower().endswith(".dcs") or str(file).lower().endswith(".dcr")\
                        or str(file).lower().endswith(".dng") or str(file).lower().endswith(".drf")\
                        or str(file).lower().endswith(".eip") or str(file).lower().endswith(".erf")\
                        or str(file).lower().endswith(".fff") or str(file).lower().endswith(".gpr")\
                        or str(file).lower().endswith(".iiq") or str(file).lower().endswith(".k25")\
                        or str(file).lower().endswith(".kdc") or str(file).lower().endswith(".mdc")\
                        or str(file).lower().endswith(".mef") or str(file).lower().endswith(".mos")\
                        or str(file).lower().endswith(".mrw") or str(file).lower().endswith(".nef")\
                        or str(file).lower().endswith(".nrw") or str(file).lower().endswith(".obm")\
                        or str(file).lower().endswith(".orf") or str(file).lower().endswith(".pef")\
                        or str(file).lower().endswith(".ptx") or str(file).lower().endswith(".pxn")\
                        or str(file).lower().endswith(".r3d") or str(file).lower().endswith(".raf")\
                        or str(file).lower().endswith(".raw") or str(file).lower().endswith(".rwl")\
                        or str(file).lower().endswith(".rw2") or str(file).lower().endswith(".rwz")\
                        or str(file).lower().endswith(".sr2") or str(file).lower().endswith(".srf")\
                        or str(file).lower().endswith(".srw") or str(file).lower().endswith(".x3f"):
                            filePathList.append(
                                path.normcase(
                                    path.normpath(
                                        path.abspath(
                                            path.expanduser(
                                                path.expandvars(
                                                    path.join(root, file)))))))
            else:
                for root, dirs, files in walk(path.curdir,
                                              followlinks=followLinksBoolean):
                    for file in files:
                        if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                        or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                        or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                        or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                        or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                        or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm"):
                            filePathList.append(
                                path.normcase(
                                    path.normpath(
                                        path.abspath(
                                            path.expanduser(
                                                path.expandvars(
                                                    path.join(root, file)))))))
        else:
            if rawsBoolean == True:
                for file in listdir(path.curdir):
                    if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                    or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                    or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                    or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                    or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                    or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm")\
                    or str(file).lower().endswith(".3fr") or str(file).lower().endswith(".ari")\
                    or str(file).lower().endswith(".arw") or str(file).lower().endswith(".bay")\
                    or str(file).lower().endswith(".crw") or str(file).lower().endswith(".cr2")\
                    or str(file).lower().endswith(".cap") or str(file).lower().endswith(".data")\
                    or str(file).lower().endswith(".dcs") or str(file).lower().endswith(".dcr")\
                    or str(file).lower().endswith(".dng") or str(file).lower().endswith(".drf")\
                    or str(file).lower().endswith(".eip") or str(file).lower().endswith(".erf")\
                    or str(file).lower().endswith(".fff") or str(file).lower().endswith(".gpr")\
                    or str(file).lower().endswith(".iiq") or str(file).lower().endswith(".k25")\
                    or str(file).lower().endswith(".kdc") or str(file).lower().endswith(".mdc")\
                    or str(file).lower().endswith(".mef") or str(file).lower().endswith(".mos")\
                    or str(file).lower().endswith(".mrw") or str(file).lower().endswith(".nef")\
                    or str(file).lower().endswith(".nrw") or str(file).lower().endswith(".obm")\
                    or str(file).lower().endswith(".orf") or str(file).lower().endswith(".pef")\
                    or str(file).lower().endswith(".ptx") or str(file).lower().endswith(".pxn")\
                    or str(file).lower().endswith(".r3d") or str(file).lower().endswith(".raf")\
                    or str(file).lower().endswith(".raw") or str(file).lower().endswith(".rwl")\
                    or str(file).lower().endswith(".rw2") or str(file).lower().endswith(".rwz")\
                    or str(file).lower().endswith(".sr2") or str(file).lower().endswith(".srf")\
                    or str(file).lower().endswith(".srw") or str(file).lower().endswith(".x3f"):
                        filePathList.append(
                            path.normpath(
                                path.normcase(
                                    path.abspath(
                                        path.expanduser(
                                            path.expandvars(
                                                path.join(path.curdir,
                                                          file)))))))
            else:
                for file in listdir(path.curdir):
                    if str(file).lower().endswith(".jpg") or str(file).lower().endswith(".jpeg")\
                    or str(file).lower().endswith(".png") or str(file).lower().endswith(".tif")\
                    or str(file).lower().endswith(".tiff") or str(file).lower().endswith(".webp")\
                    or str(file).lower().endswith(".bmp") or str(file).lower().endswith(".jp2")\
                    or str(file).lower().endswith(".j2k") or str(file).lower().endswith(".jpf")\
                    or str(file).lower().endswith(".jpx") or str(file).lower().endswith(".jpm"):
                        filePathList.append(
                            path.normcase(
                                path.normpath(
                                    path.abspath(
                                        path.expanduser(
                                            path.expandvars(
                                                path.join(path.curdir,
                                                          file)))))))
    return filePathList
#!/usr/bin/env python

from icecube import icetray, dataclasses, dataio
from I3Tray import *
from os.path import expandvars

tray = I3Tray()

runfile = expandvars("$I3_TESTDATA/2006data/Run00089508.i3.gz")

tray.AddModule("I3Reader", "read", Filename=runfile)


def SelectDaq(frame):
    assert 'DrivingTime' in frame
    dt = frame.Get("DrivingTime")
    utcdaq = dt.utc_daq_time
    print(utcdaq)
    assert SelectDaq.daqtime != utcdaq
    print("Yup prev time %lu different than new time %lu" %
          (SelectDaq.daqtime, utcdaq))
    SelectDaq.daqtime = utcdaq
    return True


SelectDaq.daqtime = 0

tray.AddModule(SelectDaq, "pickdaq", Streams=[icetray.I3Frame.Physics])

tray.AddModule("Dump", "dump")
Esempio n. 47
0
    def __init__(self, data_path='data/curated_fits',
                 split='train',
                 img_folder='',
                 use_face=True, use_hands=True, use_face_contour=False,
                 head_only=False,
                 hand_only=False,
                 model_type='smplx',
                 keyp_format='coco25',
                 dtype=torch.float32,
                 metrics=None,
                 transforms=None,
                 num_betas=10,
                 num_expression_coeffs=10,
                 body_thresh=0.1,
                 hand_thresh=0.2,
                 face_thresh=0.4,
                 min_hand_keypoints=8,
                 min_head_keypoints=8,
                 binarization=True,
                 return_params=True,
                 vertex_folder='vertices',
                 vertex_flip_correspondences='',
                 **kwargs):
        super(CuratedFittings, self).__init__()

        assert nand(head_only, hand_only), (
            'Hand only and head only can\'t be True at the same time')

        self.binarization = binarization
        if metrics is None:
            metrics = []
        self.metrics = metrics
        self.min_hand_keypoints = min_hand_keypoints
        self.min_head_keypoints = min_head_keypoints

        if 'test' in split:
            split = 'val'
        self.split = split
        self.is_train = 'train' in split
        self.num_betas = num_betas
        self.return_params = return_params

        self.head_only = head_only
        self.hand_only = hand_only

        data_path = osp.expandvars(osp.expanduser(data_path))
        self.data_path = osp.join(data_path, f'{split}.npz')
        self.transforms = transforms
        self.dtype = dtype

        vertex_flip_correspondences = osp.expandvars(
            vertex_flip_correspondences)
        err_msg = (
            'Vertex flip correspondences path does not exist:' +
            f' {vertex_flip_correspondences}'
        )
        assert osp.exists(vertex_flip_correspondences), err_msg
        flip_data = np.load(vertex_flip_correspondences)
        self.bc = flip_data['bc']
        self.closest_faces = flip_data['closest_faces']

        self.img_folder = osp.expandvars(osp.join(img_folder, split))
        folder_map_fname = osp.expandvars(
            osp.join(self.img_folder, FOLDER_MAP_FNAME))
        self.use_folder_split = osp.exists(folder_map_fname)
        if self.use_folder_split:
            with open(folder_map_fname, 'rb') as f:
                data_dict = pickle.load(f)
            self.items_per_folder = max(data_dict.values())

        self.use_face = use_face
        self.use_hands = use_hands
        self.use_face_contour = use_face_contour
        self.model_type = model_type
        self.keyp_format = keyp_format
        self.num_expression_coeffs = num_expression_coeffs
        self.body_thresh = body_thresh
        self.hand_thresh = hand_thresh
        self.face_thresh = face_thresh

        data = np.load(self.data_path, allow_pickle=True)
        data = {key: data[key] for key in data.keys()}

        self.betas = data['betas'].astype(np.float32)
        self.expression = data['expression'].astype(np.float32)
        self.keypoints2D = data['keypoints2D'].astype(np.float32)
        self.pose = data['pose'].astype(np.float32)
        self.img_fns = np.asarray(data['img_fns'], dtype=np.string_)
        self.indices = None
        if 'indices' in data:
            self.indices = np.asarray(data['indices'], dtype=np.int64)
        self.is_right = None
        if 'is_right' in data:
            self.is_right = np.asarray(data['is_right'], dtype=np.bool_)
        if 'dset_name' in data:
            self.dset_name = np.asarray(data['dset_name'], dtype=np.string_)
        self.vertex_folder = osp.join(data_path, vertex_folder, split)

        if self.use_folder_split:
            self.num_items = sum(data_dict.values())
            #  assert self.num_items == self.pose.shape[0]
        else:
            self.num_items = self.pose.shape[0]

        data.clear()
        del data

        source_idxs, target_idxs = dset_to_body_model(
            dset='openpose25+hands+face',
            model_type='smplx', use_hands=True, use_face=True,
            use_face_contour=self.use_face_contour,
            keyp_format=self.keyp_format)
        self.source_idxs = np.asarray(source_idxs, dtype=np.int64)
        self.target_idxs = np.asarray(target_idxs, dtype=np.int64)

        idxs_dict = get_part_idxs()
        body_idxs = idxs_dict['body']
        hand_idxs = idxs_dict['hand']
        left_hand_idxs = idxs_dict['left_hand']
        right_hand_idxs = idxs_dict['right_hand']
        face_idxs = idxs_dict['face']
        head_idxs = idxs_dict['head']
        if not use_face_contour:
            face_idxs = face_idxs[:-17]
            head_idxs = head_idxs[:-17]

        self.body_idxs = np.asarray(body_idxs)
        self.hand_idxs = np.asarray(hand_idxs)
        self.left_hand_idxs = np.asarray(left_hand_idxs)
        self.right_hand_idxs = np.asarray(right_hand_idxs)
        self.face_idxs = np.asarray(face_idxs)
        self.head_idxs = np.asarray(head_idxs)

        self.body_dset_factor = 1.2
        self.head_dset_factor = 2.0
        self.hand_dset_factor = 2.0
Esempio n. 48
0
def _expand_path(p):
    return op.abspath(op.expandvars(op.expanduser(p)))
#!/usr/bin/env python

import os
from icecube import icetray, dataclasses, dataio
from icecube import trigger_sim, phys_services
from I3Tray import I3Tray
from os.path import expandvars

tray = I3Tray()

if os.environ.get("I3_TESTDATA") :
    i3_testdata = expandvars("$I3_TESTDATA") 
else :
    i3_testdata = expandvars("$I3_TESTDATA")
  
gcd_file = i3_testdata + "/sim/GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3.gz"

tray.AddModule("I3InfiniteSource", "source", 
               prefix = gcd_file)

tray.AddModule("SimpleMajorityTrigger","IISMT8")(
    ("TriggerConfigID",1006)
    )

tray.AddModule("SimpleMajorityTrigger","DCSMT3")(
    ("TriggerConfigID",1011)
    )

tray.AddModule("ClusterTrigger","string")    

tray.AddModule("CylinderTrigger","cyl")
Esempio n. 50
0
 def _normalize(self, path):
     if not path:
         return ''
     return realpath(abspath(expanduser(expandvars(path.strip()))))
def expand_var(var):
    return path.expanduser(path.expandvars(var))
Esempio n. 52
0
import glob
from os import path
checkPath = path.expandvars(r'%AppData%\Discord\0.0.*\modules\discord_desktop_core\index.js')
checkString = "module.exports = require('./core.asar');"

print("running check...\n")

for filename in glob.glob(checkPath):
    with open(filename, 'r') as f:
        for ctx in f:
            if ctx != checkString:
                print("> you're infected! <\n")
                print("replace everything in the file at the following path:\n")
                print(filename + "\n")
                print("with:\n")
                print(checkString + "\n")

            else:
                print("you're safe! nothing needs to be done\n")

input("press enter to exit")
Esempio n. 53
0
def root_compile(src=None, fname=None, batch=True):
    """a helper method to compile a set of C++ statements (via ``src``) or
    a C++ file (via ``fname``) via ACLiC
    """
    if src is not None and fname is not None:
        raise ValueError("'src' xor 'fname' should be not None, *not* both")

    if src is None and fname is None:
        raise ValueError("'src' xor 'fname' should be None, *not* both")

    import os
    from .Helpers import ShutUp as root_shutup
    
    ROOT = import_root(batch=batch)
    compile_options = "f"
    if 'dbg' in os.environ.get('CMTCONFIG', 'opt'):
        compile_options += 'g'
    else:
        compile_options += 'O'

    src_file = None
    if src:
        import textwrap
        import tempfile
        src_file = tempfile.NamedTemporaryFile(prefix='root_aclic_',
                                               suffix='.cxx')
        src_file.write(textwrap.dedent(src))
        src_file.flush()
        src_file.seek(0)
        fname = src_file.name

        # Apparently, cling caches files by inode.
        # If you ask it to read a file that has the same inode as one
        # that it has already read, then it will just use the cached
        # contents rather than rereading.  This, however, doesn't play
        # very well if we're reading temp files, where inodes may be reused,
        # giving rise to hard-to-reproduce failures.
        #
        # Try to avoid this by keeping the temp files open until the
        # the program exits.
        _tempfiles.append (src_file)
        pass

    elif fname:
        import os.path as osp
        fname = osp.expanduser(osp.expandvars(fname))
        pass
        
    assert os.access(fname, os.R_OK), "could not read [%s]"%(fname,)
    orig_root_lvl = ROOT.gErrorIgnoreLevel
    ROOT.gErrorIgnoreLevel = ROOT.kWarning
    try:
        with root_shutup():
            sc = ROOT.gSystem.CompileMacro(fname, compile_options)
        if sc == ROOT.kFALSE:
            raise RuntimeError(
                'problem compiling ROOT macro (rc=%s)'%(sc,)
                )
    finally:
        ROOT.gErrorIgnoreLevel = orig_root_lvl
    return
Esempio n. 54
0
 def do_all(self, exe_path):
     os.chdir(path.expandvars(self.working_directory))
     for test_args in test_args_sequence:
         subprocess.call([exe_path] + test_args)
Esempio n. 55
0
def main():
    args = arg_parser.get_parser().parse_args()

    if args.subparser == "gui":
        main_gui()
    elif args.subparser == "manual":
        fetcher = FetcherEnum.get(args.fetcher)
        pyscandl = Pyscandl(fetcher,
                            chapstart=args.chapter_start,
                            output=args.output,
                            pdf=args.pdf,
                            keep=args.keep,
                            image=args.image,
                            all=args.all,
                            link=args.link,
                            manga=args.manga,
                            download_number=args.download_number,
                            chapend=args.chapter_end,
                            quiet=args.quiet,
                            skip=args.skip,
                            tiny=args.tiny)
        pyscandl.full_download()

    elif args.subparser == "manga":
        print(
            "Warning: the current db will be replaced wy a new system in the next major release (3.0.0). Please do not forget the migration at that time",
            file=stderr)

        if args.list or args.list_all or args.list_only:
            ml = Controller().list_mangas(all=args.list_all,
                                          only=args.list_only)
            if ml:
                list = "\n- " + "\n- ".join(ml)
                print(f"current mangas in the autodl db are:{list}")
            else:
                print(
                    "there are currently no mangas in autodl, you may consider adding some to it with manga add"
                )

        elif args.import_db:
            controller = Controller()
            controller.db_import(args.import_db)
            controller.save()

        elif args.export_db:
            Controller().db_export(args.export_db)

        elif args.manga_subparser == "scan":
            try:
                infos = Controller()
                if args.name:
                    try:
                        infos.scan(args.name)
                    except MangaNotFound as e:
                        print(e)
                else:
                    mangas = [
                        row[0] for row in infos._curs.execute(
                            """SELECT name FROM manga WHERE archived=false ORDER BY name"""
                        ).fetchall()
                    ]
                    for manga in mangas:
                        infos.scan(manga)
            except KeyboardInterrupt:
                print("\nmanual interruption")

        elif args.manga_subparser == "info":
            infos = Controller().manga_info(args.name)
            if infos is None:
                print(
                    f"manga '{args.name}' not in the list, you may consider adding it to it with manga add"
                )
            elif infos[4]:
                print(
                    f"{args.name}:\n", f"\tmanga link: {infos[2]}\n",
                    f"\tfetcher: {infos[1]}\n",
                    f"\tnumber of chapters already downloaded: {len(infos[4])}\n",
                    f"\tlast chapter downloaded: {infos[4][0]}\n",
                    f"\tarchived: {bool(infos[3])}")
            else:
                print(f"{args.name}:\n", f"\tmanga link: {infos[2]}\n",
                      f"\tfetcher: {infos[1]}\n",
                      f"\tno chapter downloaded yet\n",
                      f"\tarchived: {bool(infos[3])}")

        elif args.manga_subparser == "add":
            controller = Controller()
            if args.chap:
                chaps = [
                    float(chap) if "." in chap else int(chap)
                    for chap in args.chap
                ]
            else:
                chaps = []
            controller.add(args.name, args.link, args.fetcher, chaps,
                           args.archived)
            controller.save()

        elif args.manga_subparser == "edit":
            controller = Controller()
            if args.chap:
                chaps = [
                    float(chap) if "." in chap else int(chap)
                    for chap in args.chap
                ]
            else:
                chaps = None

            if args.archive:
                archive = True
            elif args.unarchive:
                archive = False
            else:
                archive = None

            controller.edit(args.name, args.link, args.fetcher, chaps, archive)
            controller.save()

        elif args.manga_subparser == "chaplist":
            info = Controller().manga_info(args.name)
            if info is not None:
                chaps = info[4]
                if chaps:
                    if not args.quiet:
                        print(
                            f"the already downloaded chapters for '{args.name}' are:",
                            end=" ")
                    print(' '.join([str(chap) for chap in chaps]))
                else:
                    print(f"no chapter downloaded yet for '{args.name}'")
            elif not args.quiet:
                print(
                    f"manga '{args.name}' not in the list, you may consider adding it to it with manga add"
                )

        elif args.manga_subparser == "rmchaps":
            controller = Controller()
            if controller.rm_chaps(args.name, args.chap):
                controller.save()
                if not args.quiet:
                    print(
                        f"deletion of the chapters {', '.join(args.chap)} from {args.name} sucessfull"
                    )
            else:
                if not args.quiet:
                    print(f"no chapters removed for {args.name}")

        elif args.manga_subparser == "delete":
            controller = Controller()
            if controller.delete_manga(args.name):
                controller.save()
                if not args.quiet:
                    print(f"deletion of {args.name} successful")
            else:
                if not args.quiet:
                    print(f"manga {args.name} not found")

        elif args.manga_subparser == "migrate":
            if not args.quiet:
                print(
                    "WARNING: if there is already a database that was migrated before it will be erased in the process !"
                )
            if args.quiet or input("Continue ? [y/N]").lower() == "y":
                if not args.quiet:
                    print("Creating new db file...", end=" ")

                # as it's in the users folder now it's OS dependent
                platform = system()
                if platform == "Linux":
                    folder_path = path.expanduser("~/.local/share/pyscandl/")
                    # removing the old db if it exists
                    try:
                        remove(folder_path + "db.sqlite")
                    except FileNotFoundError:
                        pass

                    # creating the new db
                    makedirs(folder_path, exist_ok=True)
                    conn = sqlite3.connect(folder_path + "db.sqlite")
                elif platform == "Windows":
                    folder_path = path.expandvars("%APPDATA%/pyscandl/")
                    # removing the old db if it exists
                    try:
                        remove(folder_path + "db.sqlite")
                    except FileNotFoundError:
                        pass

                    # creating the new db
                    makedirs(folder_path, exist_ok=True)
                    conn = sqlite3.connect(folder_path + "db.sqlite")
                elif platform == "Darwin":
                    folder_path = path.expanduser(
                        "~\Library\Preferences/pyscandl/")
                    # removing the old db if it exists
                    try:
                        remove(folder_path + "db.sqlite")
                    except FileNotFoundError:
                        pass

                    # creating the new db
                    makedirs(folder_path, exist_ok=True)
                    conn = sqlite3.connect(folder_path + "db.sqlite")
                else:
                    raise OSError(
                        "The OS couldn't be detected, the db don't have a place to be stored"
                    )
                curs = conn.cursor()

                if not args.quiet:
                    print("Loading the old db file...")
                try:
                    with open(
                            f"{path.dirname(modules['pyscandl.modules.autodl'].__file__)}/db.json",
                            "r") as data:
                        old_db = json.load(data)
                except FileNotFoundError:
                    old_db = {}

                if not args.quiet:
                    print("Creating new tables...", end=" ")
                curs.execute("""
				CREATE TABLE IF NOT EXISTS "manga" (
					"id" INTEGER PRIMARY KEY,
					"name" TEXT UNIQUE,
					"fetcher" TEXT,
					"link" TEXT,
					"archived" BOOL DEFAULT FALSE
				);
				""")

                curs.execute("""
				CREATE TABLE IF NOT EXISTS "chaplist" (
					"manga" INTEGER REFERENCES manga(id),
					"chapter" BLOB,
					CONSTRAINT unique_chap UNIQUE (manga, chapter)
				);
				""")

                if not args.quiet:
                    print("Transfering data...")
                for key, value in old_db.items():
                    if not args.quiet:
                        print(f"{key}: autodl data...", end=" ")
                    curs.execute(
                        """INSERT INTO manga("name", "fetcher", "link", "archived") VALUES (?, ?, ?, ?);""",
                        (key, value.get("fetcher").upper(), value.get("link"),
                         value.get("archived")))
                    if not args.quiet:
                        print("already downloaded chapters...")
                    curs.execute("""SELECT id FROM manga WHERE "name"=?""",
                                 (key, ))
                    manga_id = curs.fetchone()
                    curs.executemany(
                        """INSERT OR IGNORE INTO chaplist("manga", "chapter") VALUES (?, ?);""",
                        [(manga_id[0], chap)
                         for chap in value.get("chapters")])

                conn.commit()
                conn.close()
            else:
                print("Cancelling migration")

    elif args.subparser == "autodl":
        print(
            "Warning: the current db will be replaced wy a new system in the next major release (3.0.0). Please do not forget the migration at that time",
            file=stderr)

        autodl = Controller(args.output, args.quiet, args.tiny)
        # to be sure to save progress done in case of interruption
        try:
            for name in [
                    row[0] for row in autodl._curs.execute(
                        "SELECT name FROM manga WHERE archived=false ORDER BY name"
                    ).fetchall()
            ]:
                # currently having problems with the xml tree fetching sometimes so giving a retry possibility to happen
                tries_left = 3
                while tries_left > 0:
                    try:
                        autodl.scan(name)
                        success = True
                        tries_left = 0
                    except xml.etree.ElementTree.ParseError:
                        if not args.quiet:
                            print(
                                f"problem with the fetching for {name}, retrying..."
                            )
                        success = False
                        tries_left -= 1
                    except DownedSite:
                        # the website can't be accessed for the time being so no retrying
                        success = False
                        tries_left = 0
                if success:
                    try:
                        autodl.download(name,
                                        pdf=args.pdf,
                                        keep=args.keep,
                                        image=args.image)
                    except DownedSite:
                        print(f"can't access {name}, please retry it later")
                elif not args.quiet:
                    print(f"can't access {name}, please retry it later")
        except KeyboardInterrupt:
            if not args.quiet:
                print("\nmanual interruption")
        finally:
            autodl.save()
            if not args.quiet:
                print(f"{autodl.downloads} chapters downloaded")
Esempio n. 56
0
def extract_dom_coordinates(gcd, outdir):
    """Extract the DOM coordinates from a gcd file.

    Parameters
    ----------
    gcd : string
        Path to GCD file

    outdir : string
        Path to directory into which to store the resulting .npy file
        containing the coordinates array

    """
    gcd = expanduser(expandvars(gcd))
    outdir = expanduser(expandvars(outdir))

    gcd_md5 = get_file_md5(gcd)

    print('Extracting geometry from\n  "{}"'.format(abspath(gcd)))
    print('File MD5 sum is\n  {}'.format(gcd_md5))
    print('Will output geom file and metadata file to directory\n'
          '  "{}"'.format(abspath(outdir)))

    if not isfile(gcd):
        raise IOError('`gcd` file does not exist at "{}"'.format(gcd))

    mkdir(outdir)

    geofile = dataio.I3File(gcd)  # pylint: disable=no-member
    geometry = None
    while geofile.more():
        frame = geofile.pop_frame()
        if 'I3Geometry' in frame.keys():
            geometry = frame['I3Geometry']
            break
    if geometry is None:
        raise ValueError('Could not find geometry in file "{}"'.format(gcd))

    omgeo = geometry.omgeo

    geom = np.full(shape=(N_STRINGS, N_OMS, 3), fill_value=np.nan)
    for string in range(N_STRINGS):
        for om in range(N_OMS):
            geom[string,
                 om, :] = (omgeo.get(OMKey(string + 1, om + 1)).position.x,
                           omgeo.get(OMKey(string + 1, om + 1)).position.y,
                           omgeo.get(OMKey(string + 1, om + 1)).position.z)

    assert np.sum(np.isnan(geom)) == 0

    geom_meta = generate_geom_meta(geom)
    geom_meta['sourcefile_path'] = gcd
    geom_meta['sourcefile_md5'] = gcd_md5

    outpath = join(outdir, GEOM_FILE_PROTO.format(**geom_meta))
    metapath = join(outdir, GEOM_META_PROTO.format(**geom_meta))

    json.dump(geom_meta, open(metapath, 'w'), indent=2)
    print('Saved metadata to\n  "{}"'.format(abspath(metapath)))
    np.save(outpath, geom)
    print('Saved geom to\n  "{}"'.format(abspath(outpath)))
    frame.Put("DrivingTime", time)


def NEvents(frame):
    if "NEvPerFile" in frame:
        del frame['NEvPerFile']
    frame.Put('NEvPerFile', icetray.I3Int(numEvents))


### ADDING PROPAGATOR ###
from icecube import PROPOSAL
from os.path import expandvars

propagators = sim_services.I3ParticleTypePropagatorServiceMap()

mediadef = expandvars('$I3_BUILD/PROPOSAL/resources/mediadef')

muMinusPropagator = PROPOSAL.I3PropagatorServicePROPOSAL(
    mediadef=mediadef,
    cylinderRadius=1200,
    cylinderHeight=1700,
    type=I3Particle.ParticleType.MuMinus)
muPlusPropagator = PROPOSAL.I3PropagatorServicePROPOSAL(
    mediadef=mediadef,
    cylinderRadius=1200,
    cylinderHeight=1700,
    type=I3Particle.ParticleType.MuPlus)

propagators[I3Particle.ParticleType.MuMinus] = muMinusPropagator
propagators[I3Particle.ParticleType.MuPlus] = muPlusPropagator
tray.AddModule('I3PropagatorModule',
Esempio n. 58
0
def expand(path):
    return abspath(expanduser(expandvars(path)))
Esempio n. 59
0
 def _trpath(cls, path):
     return op.normpath(op.expanduser(op.expandvars(path)))
Esempio n. 60
0
def MakeIceCubeMediumProperties(detectorCenterDepth = 1948.07*I3Units.m,
                                iceDataDirectory=expandvars("$I3_SRC/clsim/resources/ice/spice_mie"),
                                useTiltIfAvailable=True,
                                returnParameters=False):
    ### read ice information from PPC-compatible tables
    
    # do we have tilt descripton files?
    useTilt=False
    if useTiltIfAvailable:
        hasTiltPar = os.path.isfile(iceDataDirectory+"/tilt.par")
        hasTiltDat = os.path.isfile(iceDataDirectory+"/tilt.dat")
        if hasTiltPar and not hasTiltDat:
            raise RuntimeError("ice model directory has tilt.par but tilt.dat is missing!")
        elif hasTiltDat and not hasTiltPar:
            raise RuntimeError("ice model directory has tilt.dat but tilt.par is missing!")
        elif hasTiltDat and hasTiltPar:
            useTilt=True

    icemodel_dat = numpy.loadtxt(iceDataDirectory+"/icemodel.dat", unpack=True)
    icemodel_par = numpy.loadtxt(iceDataDirectory+"/icemodel.par")
    icemodel_cfg = numpy.loadtxt(iceDataDirectory+"/cfg.txt")
    
    # is this Dima's new 4-parameter file or his old 6-parameter file?
    if len(icemodel_par)==6:
        alpha = icemodel_par[0][0]
        kappa = icemodel_par[1][0]
        A     = icemodel_par[2][0]
        B     = icemodel_par[3][0]
        D     = icemodel_par[4][0]
        E     = icemodel_par[5][0]
    elif len(icemodel_par)==4:
        # the 4-parameter files appeared up in ppc around March 2012
        alpha = icemodel_par[0][0]
        kappa = icemodel_par[1][0]
        A     = icemodel_par[2][0]
        B     = icemodel_par[3][0]
        
        # this is what ppc does to fill the remaining two parameters:
        wv0 = 400.
        D     = wv0**kappa
        E     = 0.
    else:
        raise RuntimeError(iceDataDirectory+"/icemodel.par is not a valid Dima-icemodel file. (needs either 4 or 6 entries, this one has %u entries)" % len(icemodel_par))
    
    if len(icemodel_cfg) < 4:
        raise RuntimeError(iceDataDirectory+"/cfg.txt does not have enough configuration lines. It needs at least 4.")

    oversizeScaling       = icemodel_cfg[0] # currently ignored
    efficiencyCorrection  = icemodel_cfg[1] # currently ignored
    liuScatteringFraction = icemodel_cfg[2]
    meanCosineTheta       = icemodel_cfg[3]

    hasAnisotropy = False
    if len(icemodel_cfg) > 4 and len(icemodel_cfg) < 7:
        raise RuntimeError(iceDataDirectory+"/cfg.txt has more than 4 lines (this means you probably get ice anisotropy), but it needs at least 7 lines in this case.")
    elif len(icemodel_cfg) > 4:
        hasAnisotropy = True
        anisotropyDirAzimuth  = icemodel_cfg[4]*I3Units.deg # direction of ice tilt (perp. to flow)
        magnitudeAlongDir     = icemodel_cfg[5]             # magnitude of ice anisotropy along tilt
        magnitudePerpToDir    = icemodel_cfg[6]             # magnitude of ice anisotropy along flow


    if liuScatteringFraction<0. or liuScatteringFraction>1.:
        raise RuntimeError("Invalid Liu(SAM) scattering fraction configured in cfg.txt: value=%g" % liuScatteringFraction)
    if meanCosineTheta<-1. or meanCosineTheta>1.:
        raise RuntimeError("Invalid <cos(theta)> configured in cfg.txt: value=%g" % meanCosineTheta)
    
    depth = icemodel_dat[0]*I3Units.m
    b_e400 = icemodel_dat[1]
    a_dust400 = icemodel_dat[2]
    delta_tau = icemodel_dat[3]
    
    # check delta_tau values against formula
    # According to the IceCube paper, these values should be calculated like this.
    # Values in tables seem to have an offset of 6.53m
    def temp(depth):
        return 221.5 - 0.00045319*(depth/I3Units.m) + 5.822e-6 * (depth/I3Units.m)**2.
    def deltaTau(depth):
        return temp(depth+6.53*I3Units.m) - temp(1730.*I3Units.m)
    
    maxRelativeError=0.
    for thisDepth, thisDeltaTau in numpy.array([depth, delta_tau]).transpose():
        relativeError = abs(thisDeltaTau-deltaTau(thisDepth))/thisDeltaTau
        if relativeError > maxRelativeError: maxRelativeError=relativeError
    if maxRelativeError > 0.01:
        print("The ice table's delta_tau values do not seem to correpsond to the equation. Loading table anyway.")
    
    
    # some sanity checks on the layer files
    if len(depth)<2: raise RuntimeError("There is only a single layer in your layer definition file")
    
    layerHeight = depth[1]-depth[0]
    if layerHeight<=0.: raise RuntimeError("ice layer depths are not in increasing order")
    
    for i in range(len(depth)-1):
        if abs((depth[i+1]-depth[i]) - layerHeight) > 1e-5:
            raise RuntimeError("ice layers are not spaced evenly")
    
    # change the order (top-to-bottom -> bottom-to-top)
    depth = depth[::-1]
    b_e400 = b_e400[::-1]               # effective scattering length
    a_dust400 = a_dust400[::-1]
    delta_tau = delta_tau[::-1]

    b_400 = b_e400/(1.-meanCosineTheta) # scattering length (used in the simulation)
    
    # Be compatible with PPC, which assumes the specified depths
    # are in the middle of the layer. We need the depth at the
    # top of the layer here, so correct for that:
    depth = depth-layerHeight/2.
    
    # layerZ is in z-coordinates, from bottom to top (ascending z)
    depthAtBottomOfLayer = depth + layerHeight
    layerZStart = detectorCenterDepth - depthAtBottomOfLayer
    layerZEnd = detectorCenterDepth - depth
    
    ##### start making the medium property object
    
    m = I3CLSimMediumProperties(mediumDensity=0.9216*I3Units.g/I3Units.cm3,
                                layersNum=len(layerZStart),
                                layersZStart=layerZStart[0],
                                layersHeight=layerHeight,
                                rockZCoordinate=-870.*I3Units.m,
                                # TODO: inbetween: from 1740 upwards: less dense ice (factor 0.825)
                                airZCoordinate=1940.*I3Units.m)
    
    # None of the IceCube wlen-dependent functions have a fixed minimum
    # or maximum wavelength value set. We need to set some sensible range here.
    # This seems to be the definition range of the DOM wavelength acceptance:
    m.ForcedMinWlen = 265.*I3Units.nanometer
    m.ForcedMaxWlen = 675.*I3Units.nanometer
    
    iceCubeScatModel = I3CLSimRandomValueMixed(
        firstDistribution=I3CLSimRandomValueSimplifiedLiu(meanCosine=meanCosineTheta),
        secondDistribution=I3CLSimRandomValueHenyeyGreenstein(meanCosine=meanCosineTheta),
        fractionOfFirstDistribution=liuScatteringFraction)
    m.SetScatteringCosAngleDistribution(iceCubeScatModel)
    
    if not hasAnisotropy:
        # no ice/water anisotropy. all of these three are no-ops
        m.SetDirectionalAbsorptionLengthCorrection(I3CLSimScalarFieldConstant(1.))
        m.SetPreScatterDirectionTransform(I3CLSimVectorTransformConstant())
        m.SetPostScatterDirectionTransform(I3CLSimVectorTransformConstant())
    else:
        # print("Anisotropy! Whooo!", anisotropyDirAzimuth/I3Units.deg, magnitudeAlongDir, magnitudePerpToDir)

        absLenScaling, preScatterTransform, postScatterTransform = \
            util.GetSpiceLeaAnisotropyTransforms(
                anisotropyDirAzimuth,
                magnitudeAlongDir,
                magnitudePerpToDir
                )

        m.SetDirectionalAbsorptionLengthCorrection(absLenScaling)
        m.SetPreScatterDirectionTransform(preScatterTransform)
        m.SetPostScatterDirectionTransform(postScatterTransform)

    if useTilt:
        # print("Tilt! Wheee!")

        m.SetIceTiltZShift(
            util.GetIceTiltZShift(
                tiltDirectory = iceDataDirectory,
                detectorCenterDepth = detectorCenterDepth,
                )
            )
    else:
        # no ice tilt
        m.SetIceTiltZShift(I3CLSimScalarFieldConstant(0.))

    phaseRefIndex = I3CLSimFunctionRefIndexIceCube(mode="phase")
    groupRefIndex = I3CLSimFunctionRefIndexIceCube(mode="group")
    for i in range(len(layerZStart)):
        #print "layer {0}: depth at bottom is {1} (z_bottom={2}), b_400={3}".format(i, depthAtBottomOfLayer[i], layerZStart[i], b_400[i])
        
        m.SetPhaseRefractiveIndex(i, phaseRefIndex)

        # the IceCube group refractive index parameterization is not exactly 
        # what you would expect from the phase refractive index.
        # To use calculated values instead of the parameterization,
        # just comment this line:
        m.SetGroupRefractiveIndexOverride(i, groupRefIndex)
        
        absLen = I3CLSimFunctionAbsLenIceCube(kappa=kappa, A=A, B=B, D=D, E=E,
                                                        aDust400=a_dust400[i],
                                                        deltaTau=delta_tau[i])
        m.SetAbsorptionLength(i, absLen)

        scatLen = I3CLSimFunctionScatLenIceCube(alpha=alpha,
                                                          b400=b_400[i])
        m.SetScatteringLength(i, scatLen)

    if not returnParameters:
        return m
    else:
        parameters = dict()
        if hasAnisotropy:
            parameters["anisotropyDirAzimuth"]=anisotropyDirAzimuth
            parameters["anisotropyMagnitudeAlongDir"]=magnitudeAlongDir
            parameters["anisotropyMagnitudePerpToDir"]=magnitudePerpToDir
        else:
            parameters["anisotropyDirAzimuth"]=float('NaN')
            parameters["anisotropyMagnitudeAlongDir"]=float('NaN')
            parameters["anisotropyMagnitudePerpToDir"]=float('NaN')
        return (m, parameters)