def Execute(command_string, html_string):
  """
  Execute(command_string, html_string)
    Submits the command to be executed as javascript along with html_string
    D3 and PersistenceExplorer are preloaded if they aren't already
  """
  stylesheet = '<style>'  + pkgutil.get_data('PersistenceExplorer', 'WebApp/PersistenceExplorer.css').decode('ascii') + '</style>'
  javascript = '<script>' + pkgutil.get_data('PersistenceExplorer', 'WebApp/PersistenceExplorer.js').decode('ascii') + '</script>'
  output = stylesheet + javascript + """
    <script>
    var command = function() { 
    """ + command_string + """ };
    function LoadSource(src, tailcall) {
      var elements = document.querySelectorAll("script[src='"+src+"']");
      if ( elements.length == 0 ) {
        var element = document.createElement("script");
        element.src = src;
        document.body.appendChild(element);
        element.onload = tailcall;
      } else {
        tailcall ();
      }
    };
    LoadSource("//d3js.org/d3.v3.min.js", function() {
      command();
    });
    </script>
    """ + html_string
  return IPython.display.HTML(output)
Ejemplo n.º 2
0
    def test_getdata_zipfile(self):
        zip = 'test_getdata_zipfile.zip'
        pkg = 'test_getdata_zipfile'

        # Include a LF and a CRLF, to test that binary data is read back
        RESOURCE_DATA = 'Hello, world!\nSecond line\r\nThird line'

        # Make a package with some resources
        zip_file = os.path.join(self.dirname, zip)
        z = zipfile.ZipFile(zip_file, 'w')

        # Empty init.py
        z.writestr(pkg + '/__init__.py', "")
        # Resource files, res.txt, sub/res.txt
        z.writestr(pkg + '/res.txt', RESOURCE_DATA)
        z.writestr(pkg + '/sub/res.txt', RESOURCE_DATA)
        z.close()

        # Check we can read the resources
        sys.path.insert(0, zip_file)
        res1 = pkgutil.get_data(pkg, 'res.txt')
        self.assertEqual(res1, RESOURCE_DATA)
        res2 = pkgutil.get_data(pkg, 'sub/res.txt')
        self.assertEqual(res2, RESOURCE_DATA)
        del sys.path[0]

        del sys.modules[pkg]
Ejemplo n.º 3
0
def getMMPOSmodel(model,mybp,customdir):

	mmposprob = []
	
	if (model == "random"):
		#mmposprob = [25,20,15,10,10,5,5,5,2,2]
		#mmposprob.extend([.75] * (mybp - 10))

		# Simple weighted probability, where prob of a mismatch increases with increased length
		# First 10 bases are weighted the same:
		myseed = 10
		mmprob = [myseed] * 10
		for x in range(myseed,mybp):
			mmprob.append(myseed + x)
		mmposprob = mmprob

	elif (model == "NIST"):
		myresource = str('data/' + model + '_mmcounts.txt')
		data = pkgutil.get_data(__name__, myresource)
		lines = data.split('\n')
		for line in lines:
			values = line.split("\t")
			try:
				mmposprob.append(int(values[1]))
			except:
				pass
	elif (model == "dm3"):
		myresource = str('data/' + model + '_mmcounts.txt')
		data = pkgutil.get_data(__name__, myresource)
		lines = data.split('\n')
		for line in lines:
			values = line.rstrip().split("\t")
			try:
				mmposprob.append(int(values[1]))
			except:
				pass
	elif (model == "flyheads"):
		myresource = str('data/' + model + '_mmcounts.txt')
		data = pkgutil.get_data(__name__, myresource)
		lines = data.split('\n')
		for line in lines:
			values = line.rstrip().split("\t")
			try:
				mmposprob.append(int(values[1]))
			except:
				pass
	elif (model == "custom"):
		myresource = str(customdir + '/mmcounts.txt')
		data = open(os.path.join('', myresource), 'rb').read()
		lines = data.split('\n')
		for line in lines:
			values = line.rstrip().split("\t")
			try:
				mmposprob.append(int(values[1]))
			except:
				pass
	elif (model == "errorfree"):
		mmposprob = [0] * mybp

	return mmposprob
Ejemplo n.º 4
0
    def load(self, config_path=None, log_dir=None):
        """
            Load the configuration files and configure logging.

            :param str config_path: path to a user defined configuration file
            :param str log_dir: path to the directory where log files are to be stored
        """

        default_config = yaml.load(pkgutil.get_data('vmupdate', 'data/vmupdate.yaml'))

        if config_path:
            with open(config_path, 'r') as config_file:
                user_config = yaml.load(config_file)

            self._data = _merge(default_config, user_config)
        else:
            self._data = default_config

        self._general = General(self._data['General'])
        self._credentials = Credentials(self._data['Credentials'])
        self._network = Network(self._data['Network'])
        self._virtualizers = Virtualizers(self._data['Virtualizers'])
        self._pkgmgrs = PackageManagers(self._data['Package Managers'])
        self._shells = Shells(self._data['Shells'])
        self._machines = Machines(self._data['Machines'])

        self._logging = yaml.load(pkgutil.get_data('vmupdate', 'data/logging.yaml'))

        if not log_dir:
            log_dir = BASE_DIR

        self._set_log_filename(log_dir, 'info_file')
        self._set_log_filename(log_dir, 'error_file')

        logging.config.dictConfig(self._logging)
Ejemplo n.º 5
0
    def test_getdata_filesys(self):
        pkg = 'test_getdata_filesys'

        # Include a LF and a CRLF, to test that binary data is read back
        RESOURCE_DATA = 'Hello, world!\nSecond line\r\nThird line'

        # Make a package with some resources
        package_dir = os.path.join(self.dirname, pkg)
        os.mkdir(package_dir)
        # Empty init.py
        f = open(os.path.join(package_dir, '__init__.py'), "wb")
        f.close()
        # Resource files, res.txt, sub/res.txt
        f = open(os.path.join(package_dir, 'res.txt'), "wb")
        f.write(RESOURCE_DATA)
        f.close()
        os.mkdir(os.path.join(package_dir, 'sub'))
        f = open(os.path.join(package_dir, 'sub', 'res.txt'), "wb")
        f.write(RESOURCE_DATA)
        f.close()

        # Check we can read the resources
        res1 = pkgutil.get_data(pkg, 'res.txt')
        self.assertEqual(res1, RESOURCE_DATA)
        res2 = pkgutil.get_data(pkg, 'sub/res.txt')
        self.assertEqual(res2, RESOURCE_DATA)

        del sys.modules[pkg]
Ejemplo n.º 6
0
    def __enter__(self):

        # create the temporary profile directory
        self.location = tempfile.mkdtemp()

        os.mkdir(
            os.path.join(self.location, 'extensions')
        )

        file(
            os.path.join(
                self.location,
                'extensions',
                '*****@*****.**',
            ),
            'wb',
        ).write(
            pkgutil.get_data('cascajal', 'data/prontoprint/prontoprint.xpi'),
        )

        context = self.options.copy()
        context['profile_dir'] = self.location

        # copy templated files into it
        for filename in self.TEMPLATE_FILES:
            file(os.path.join(self.location, filename), 'w').write(
                pkgutil.get_data('cascajal', 'data/%s' % (filename,)) % context
            )

        return self
Ejemplo n.º 7
0
 def __init__(self, viewer, poslist, radiuslist, colorlist,
              transparent=False, shading='phong'):
     
     vert = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                           "sphereimp.vert")
     frag = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                             "sphereimp.frag")
     
     super(SphereImpostorRenderer, self).__init__(viewer, vert, frag)
     
     self.transparent = transparent
     self.poslist = poslist
     self.radiuslist = radiuslist
     self.colorlist = colorlist
     self.n_spheres = len(poslist)
     self.show_mask = np.ones(self.n_spheres, dtype='bool')
     self.ldir = np.array([0.0, 0.0, 10.0, 1.0])
     
     self.shading = shading
     
     vertices = np.repeat(poslist, 4, axis=0).astype(np.float32)
     radii = np.repeat(radiuslist, 4, axis=0).astype(np.float32)
     colors = np.repeat(colorlist, 4, axis=0).astype(np.uint8)
     
     mapping = np.tile([1.0, 1.0,-1.0, 1.0,-1.0,-1.0,1.0, -1.0,],
                       self.n_spheres).astype(np.float32)
     
     self._verts_vbo = VertexBuffer(vertices,GL_DYNAMIC_DRAW)
     self._color_vbo = VertexBuffer(colors,GL_DYNAMIC_DRAW)
     self._mapping_vbo = VertexBuffer(mapping,GL_DYNAMIC_DRAW)
     self._centers_vbo = VertexBuffer(vertices,GL_DYNAMIC_DRAW)
     self._radius_vbo = VertexBuffer(radii,GL_DYNAMIC_DRAW)
Ejemplo n.º 8
0
  def _generate_project_file(self, configured_project):
    existing_project_components = None
    if not self.nomerge:
      # Grab the existing components, which may include customized ones.
      existing_project_components = self._parse_xml_component_elements(self.project_filename)

    # Generate (without merging in any extra components).
    safe_mkdir(os.path.abspath(self.intellij_output_dir))

    ipr = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.project_template),
                                               project=configured_project))

    if not self.nomerge:
      # Get the names of the components we generated, and then delete the
      # generated files.  Clunky, but performance is not an issue, and this
      # is an easy way to get those component names from the templates.
      extra_project_components = self._get_components_to_merge(existing_project_components, ipr)
      os.remove(ipr)

      # Generate again, with the extra components.
      ipr = self._generate_to_tempfile(
        Generator(pkgutil.get_data(__name__, self.project_template),
                  project=configured_project.extend(extra_components=extra_project_components))
      )
    self.context.log.info('Generated IntelliJ project in {directory}'
                          .format(directory=self.gen_project_workdir))
    return ipr
Ejemplo n.º 9
0
def add_contest(args):
    name = args['name']
    if os.path.exists(name):
        raise ValueError('folder with the same name ({0}) exists'.format(name))

    os.mkdir(name)
    os.mkdir(pjoin(name, 'statements'))
    os.mkdir(pjoin(name, 'problems'))
    os.mkdir(pjoin(name, 'lib'))

    files = [
        # (source, destination),
        ('problems.tex', 'statements/problems.tex'),
        ('olymp.sty', 'statements/olymp.sty'),
        ('contest.json', 'contest.json'),
        ('import.sty', 'statements/import.sty'.format(name)),
        ('clean.sh', 'statements/clean.sh'.format(name)),
        ('r.sh', 'statements/r.sh'.format(name)),
        ('r.cmd', 'statements/r.cmd'.format(name)),
    ]

    for src, dst in files:
        with open(os.path.join(name, dst), 'w') as f:
            data = str(pkgutil.get_data('olymper', os.path.join('data', 'bootstrap', src)), 'utf-8')
            f.write(data)

    with open(pjoin(name, 'lib', 'testlib.h'), 'w') as f:
        data = str(pkgutil.get_data('olymper', pjoin('data', 'testlib.h')), 'utf-8')
        f.write(data)

        for file in ('r.sh', 'r.cmd', 'clean.sh'):
            os.chmod(os.path.join(name, 'statements', file), stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
Ejemplo n.º 10
0
 def install_pyrun(self):
     # download, unpack and patch pyrun
     pyrun_src_tar = self.builddir / 'pyrun.tar.gz'
     download_and_unpack(PYRUN_SRC_URL, pyrun_src_tar, self.builddir)
     pyrun_diff = pkgutil.get_data(__package__, 'patches/pyrun.diff')
     patch(self.builddir / PYRUN_SRC_DIR, pyrun_diff)
     # giving full python source path as makefile target makes pyrun
     # download and patch python
     python_dir = self.pyrun_dir / 'Python-{}-ucs4'.format(self.python_full_version)
     self.pyrun_make(str(python_dir))
     # apply our python patches too
     python_diff = pkgutil.get_data(__package__, 'patches/python34.diff')
     patch(python_dir, python_diff)
     # configure ffi (for ctypes)
     ffi_config_script = python_dir / 'Modules' / '_ctypes' / 'libffi' / 'configure'
     ffi_build_dir = (python_dir / 'build' /
                      'temp.linux-x86_64-{}'.format(self.python_major_version) /
                      'libffi')
     ensure_dir_exists(ffi_build_dir)
     subprocess.check_call([str(ffi_config_script)], cwd=str(ffi_build_dir))
     # build pyrun and move it to top build directory
     self.pyrun_make('pyrun')
     pyrun_target_dir = self.pyrun_dir / 'build-{}-ucs4'.format(self.python_major_version)
     pyrun_bin = (pyrun_target_dir / 'bin' / self.pyrun.name)
     ensure_dir_exists(self.builddir / 'bin')
     ensure_dir_exists(self.builddir / 'lib' /
                       'python{}'.format(self.python_major_version) /
                       'site-packages')
     pyrun_bin.rename(self.pyrun)
     (pyrun_target_dir / 'include').rename(self.builddir / 'include')
Ejemplo n.º 11
0
 def __init__(self,
              xml_fp=None,
              xsd_fp=None):
     """Initialises a new Gbxml instance
     
     Arguments:
         xml_fp (str): filepath to a gbXML file. This is read in as an 
             lxml._ElementTree object. If not supplied then a 
             new lxml._ElementTree object with only a root element is created.
             
         xsd_fp (str): filepath to a gbXML schema file. If not supplied 
             then a default gbXMl schema file is used.
             
     """
     if xml_fp: 
         self._ElementTree=self._read(xml_fp)
     else:
         st = pkgutil.get_data(__package__, 'blank.xml')
         self._ElementTree=self._read(BytesIO(st))
         
     self.ns={'gbxml':'http://www.gbxml.org/schema'}
     
     if xsd_fp:
         self.gbxsd=Gbxsd(xsd_fp)
     else:
         st = pkgutil.get_data(__package__, 'GreenBuildingXML_Ver6.01.xsd')
         self.gbxsd=Gbxsd(BytesIO(st))
Ejemplo n.º 12
0
    def configure_cilogon(self, conf_file_name, conf_link_name, **kwargs):
        self.logger.debug("ENTER: IO.configure_cilogon()")

        conf_file = file(conf_file_name, "w")
        try:
            conf_file.write(
                    "$GSI_AUTHZ_CONF \"%s\"\n" % self.conf.get_authz_config_file())
            conf_file.write("$GRIDMAP \"%s\"\n" %(
                self.conf.get_security_gridmap()))
            os.symlink(conf_file_name, conf_link_name)
        finally:
            conf_file.close()
            
        conf_file = file(self.conf.get_authz_config_file(), "w")
        try:
            cadir = self.conf.get_security_trusted_certificate_directory()
            idp = self.conf.get_security_cilogon_identity_provider()

            ca = pkgutil.get_data(
                    "globus.connect.security",
                    "cilogon-basic.pem")
            signing_policy = pkgutil.get_data(
                    "globus.connect.security",
                    "cilogon-basic.signing_policy")
            cahash = security.get_certificate_hash_from_data(ca)
            security.install_ca(cadir, ca, signing_policy)
            # read from installed conf instead?
            # the | prefix makes it optional, only one callout must succeed
            conf_file.write("|globus_mapping libglobus_gridmap_eppn_callout " +
                    "globus_gridmap_eppn_callout ENV:")
            conf_file.write(
                    "GLOBUS_MYPROXY_CA_CERT=%s " %
                    (os.path.join(cadir, cahash + ".0")))
            conf_file.write(
                    "GLOBUS_MYPROXY_AUTHORIZED_DN=" +
                    "\"/DC=org/DC=cilogon/C=US/O=%s\"\n" % (idp))
                    
            ca = pkgutil.get_data(
                    "globus.connect.security",
                    "cilogon-silver.pem")
            signing_policy = pkgutil.get_data(
                    "globus.connect.security",
                    "cilogon-silver.signing_policy")
            cahash = security.get_certificate_hash_from_data(ca)
            security.install_ca(cadir, ca, signing_policy)
            # read from installed conf instead?
            # the | prefix makes it optional, only one callout must succeed
            conf_file.write("|globus_mapping libglobus_gridmap_eppn_callout " +
                    "globus_gridmap_eppn_callout ENV:")
            conf_file.write(
                    "GLOBUS_MYPROXY_CA_CERT=%s " %
                    (os.path.join(cadir, cahash + ".0")))
            conf_file.write(
                    "GLOBUS_MYPROXY_AUTHORIZED_DN=" +
                    "\"/DC=org/DC=cilogon/C=US/O=%s\"\n" % (idp))

        finally:
            conf_file.close()

        self.logger.debug("EXIT: IO.configure_cilogon()")
Ejemplo n.º 13
0
 def install_pyrun(self):
     # download, unpack and patch pyrun
     pyrun_src_tar = self.builddir / "pyrun.tar.gz"
     download_and_unpack(PYRUN_SRC_URL, pyrun_src_tar, self.builddir)
     pyrun_diff = pkgutil.get_data(__package__, "patches/pyrun.diff")
     patch(self.builddir / PYRUN_SRC_DIR, pyrun_diff)
     # giving full python source path as makefile target makes pyrun
     # download and patch python
     python_dir = self.pyrun_dir / "Python-{}-{}".format(self.python_full_version, self.meta["unicode"])
     self.pyrun_make(str(python_dir))
     # apply our python patches too
     py_patch_path = PYTHON_VERSION_MAP[self.python_major_version]["patch"]
     python_diff = pkgutil.get_data(__package__, str(py_patch_path))
     patch(python_dir, python_diff)
     # configure ffi (for ctypes)
     ffi_config_script = python_dir / "Modules" / "_ctypes" / "libffi" / "configure"
     ffi_build_dir = (
         python_dir / "build" / "temp.linux-{}-{}".format(self.arch, self.python_major_version) / "libffi"
     )
     ensure_dir_exists(ffi_build_dir)
     subprocess.check_call([str(ffi_config_script)], cwd=str(ffi_build_dir))
     self.render_setup_file()
     # build pyrun and move it to top build directory
     self.pyrun_make("pyrun")
     pyrun_target_dir = self.pyrun_dir / "build-{}-{}".format(self.python_major_version, self.meta["unicode"])
     pyrun_bin = pyrun_target_dir / "bin" / self.pyrun.name
     ensure_dir_exists(self.targetdir / "bin")
     ensure_dir_exists(self.targetdir / "lib" / "python{}".format(self.python_major_version) / "site-packages")
     pyrun_bin.rename(self.pyrun)
     (pyrun_target_dir / "include").rename(self.targetdir / "include")
Ejemplo n.º 14
0
def get_spectra():
    """Reads the spectral information and stores is for future use."""

    # PROSPECT-D
    prospect_d_spectraf = pkgutil.get_data('prosail', 'prospect_d_spectra.txt')
    _, nr, kab, kcar, kant, kbrown, kw, km= np.loadtxt(
        BytesIO(prospect_d_spectraf), unpack=True)
    prospect_d_spectra = ProspectDSpectra(nr, kab, kcar, kbrown, kw, km, kant)
    # PROSPECT 5
    prospect_5_spectraf = pkgutil.get_data('prosail', 'prospect5_spectra.txt')
    nr, kab, kcar, kbrown, kw, km =  np.loadtxt(BytesIO(prospect_5_spectraf),
                                                unpack=True)
    prospect_5_spectra = Prospect5Spectra(nr, kab, kcar, kbrown, kw, km)
    # SOIL
    soil_spectraf = pkgutil.get_data('prosail', 'soil_reflectance.txt')
    rsoil1, rsoil2 =  np.loadtxt(BytesIO(soil_spectraf),
                                                unpack=True)
    soil_spectra = SoilSpectra(rsoil1, rsoil2)    
    # LIGHT
    light_spectraf = pkgutil.get_data('prosail', 'light_spectra.txt')
    es, ed =  np.loadtxt(BytesIO(light_spectraf),
                                                unpack=True)
    light_spectra = LightSpectra(es, ed)
    spectra = Spectra(prospect_5_spectra, prospect_d_spectra, 
                      soil_spectra, light_spectra)
    return spectra
Ejemplo n.º 15
0
def create_virtualenv(args):
    builddir = Path('pyrun') / args.py_version
    envdir = Path(args.envdir)
    bindir = envdir / 'bin'
    libdir = envdir / 'lib' / 'python{}'.format(args.py_version) / 'site-packages'
    pipdir = envdir / 'pip'
    for d in (bindir, libdir, pipdir):
        ensure_dir_exists(d)
    # setup bin dir
    pyrun = bindir / 'pyrun{}'.format(args.py_version)
    with pyrun.open('wb') as fp:
        fp.write(pkgutil.get_data(__package__, str(builddir / 'pyrun')))
    pyrun.chmod(0o755)
    (bindir / 'python').symlink_to(pyrun.name)
    (bindir / 'python{}'.format(args.py_version[0])).symlink_to(pyrun.name)
    (bindir / 'python{}'.format(args.py_version)).symlink_to(pyrun.name)
    tmpl = jinja2.Template(ACTIVATE_SCRIPT)
    with (bindir / 'activate').open('w') as fp:
        fp.write(tmpl.render(
            venv_path=str(envdir.resolve()),
            venv_name=envdir.name,
            pyrun_version=args.py_version))
    # setup include dir
    include_tar = io.BytesIO(pkgutil.get_data(__package__, str(builddir / 'include.tar')))
    with tarfile.open(fileobj=include_tar) as tar:
        tar.extractall(str(envdir))
    # install setuptools & pip
    with (pipdir / 'setuptools.egg').open('wb') as fp:
        fp.write(pkgutil.get_data(__package__, str(builddir / 'setuptools.egg')))
    with (pipdir / 'pip.egg').open('wb') as fp:
        fp.write(pkgutil.get_data(__package__, str(builddir / 'pip.egg')))
    pip_bin = bindir / 'pip'
    with (pip_bin).open('w') as fp:
        fp.write(PIP_SCRIPT)
    pip_bin.chmod(0o755)
Ejemplo n.º 16
0
def cmd_init(opts):
    """
    Set up a proper directory structure.

    :param opts: Configuration options
    :type opts: NamedTuple-like object
    """
    base = opts.location
    os.makedirs(os.path.join(base, STATICDIR))
    os.makedirs(os.path.join(base, PAGESDIR))
    os.makedirs(os.path.join(base, CATSDIR))
    os.makedirs(os.path.join(base, TMPLSDIR))
    os.makedirs(os.path.join(base, TMPLSDIR, PAGESDIR))
    os.makedirs(os.path.join(base, TMPLSDIR, CATSDIR))
    os.makedirs(os.path.join(base, PICSDIR))
    os.makedirs(os.path.join(base, PICSDIR, PAGESDIR))
    os.makedirs(os.path.join(base, PICSDIR, CATSDIR))
    # Load files
    tmpl_data = pkgutil.get_data('jenerator',
            'skel/templates_default.html')
    open(os.path.join(base, TMPLSDIR, 'default.html'), 'wb').write(tmpl_data)
    ind_data = pkgutil.get_data('jenerator',
            'skel/index.md')
    open(os.path.join(base, 'index.md'), 'wb').write(ind_data)
    # Create meta directory
    os.makedirs(os.path.join(base, CONFIGDIR))
    with open(os.path.join(base, CONFIGDIR, CONFIGFILE), 'w') as f:
        f.write('{}')
    config_set(opts, 'author_name', opts.author)
    config_set(opts, 'author_email', opts.email)
    config_set(opts, 'site_title', opts.title)
Ejemplo n.º 17
0
 def write_dxf(self, file):
     # Scales pixels to millimeters. This is the predominant unit in CAD.
     unit_factor = self._unit_factors['mm']
     
     layer_indices = {l: i for i, l in enumerate(self._layers)}
     
     file.write(pkgutil.get_data(__name__, 'dxf_header.txt'))
     
     def write_instruction(code, value):
         print >> file, code
         print >> file, value
     
     handle_iter = itertools.count(256)
     
     for layer, path in self._paths:
         for (x1, y1), (x2, y2) in zip(path, path[1:]):
             write_instruction(0, 'LINE')
             
             if layer is not None:
                 write_instruction(8, layer.export_name)
                 write_instruction(62, layer_indices.get(layer, 0))
             
             write_instruction(5, '{:x}'.format(next(handle_iter)))
             write_instruction(100, 'AcDbEntity')
             write_instruction(100, 'AcDbLine')
             write_instruction(10, repr(x1 / unit_factor))
             write_instruction(20, repr(y1 / unit_factor))
             write_instruction(30, 0.0)
             write_instruction(11, repr(x2 / unit_factor))
             write_instruction(21, repr(y2 / unit_factor))
             write_instruction(31, 0.0)
     
     file.write(pkgutil.get_data(__name__, 'dxf_footer.txt'))
Ejemplo n.º 18
0
def test_drpsys_2_instruments(drpmocker):
    """Test that two DRPs are returned"""

    drpdata1 = pkgutil.get_data("numina.core.tests", "drpfake1.yaml")
    drpdata2 = pkgutil.get_data("numina.core.tests", "drpfake2.yaml")
    drpmocker.add_drp("FAKE1", drpdata1)
    drpmocker.add_drp("FAKE2", drpdata2)

    drpsys = DrpSystem()

    ldrp1 = drpsys.query_by_name("FAKE1")

    assert ldrp1 is not None
    assert ldrp1.name == "FAKE1"

    ldrp2 = drpsys.query_by_name("FAKE2")

    assert ldrp2 is not None
    assert ldrp2.name == "FAKE2"

    ldrp3 = drpsys.query_by_name("OTHER")
    assert ldrp3 is None

    alldrps = drpsys.query_all()
    assert len(alldrps) == 2
    assert "FAKE1" in alldrps
    # FIXME: We should check that both are equal, not just the name
    assert alldrps["FAKE1"].name == ldrp1.name

    assert "FAKE2" in alldrps
    # FIXME: We should check that both are equal, not just the name
    assert alldrps["FAKE2"].name == ldrp2.name
Ejemplo n.º 19
0
def test_drpsys_2_instruments(drpmocker):
    """Test that two DRPs are returned"""

    drpdata1 = pkgutil.get_data('numina.drps.tests', 'drptest1.yaml')
    drpdata2 = pkgutil.get_data('numina.drps.tests', 'drptest2.yaml')
    drpmocker.add_drp('TEST1', drpdata1)
    drpmocker.add_drp('TEST2', drpdata2)

    drpsys = DrpSystem()
    drpsys.load()

    ldrp1 = drpsys.query_by_name('TEST1')

    assert ldrp1 is not None
    assert ldrp1.name == 'TEST1'

    ldrp2 = drpsys.query_by_name('TEST2')

    assert ldrp2 is not None
    assert ldrp2.name == 'TEST2'

    res = drpsys.query_by_name('OTHER')
    assert res is None

    alldrps = drpsys.query_all()
    assert len(alldrps) == 2
    assert 'TEST1' in alldrps
    # FIXME: We should check that both are equal, not just the name
    assert alldrps['TEST1'].name == ldrp1.name

    assert 'TEST2' in alldrps
    # FIXME: We should check that both are equal, not just the name
    assert alldrps['TEST2'].name == ldrp2.name
Ejemplo n.º 20
0
 def __init__(self, widget):
     vert = pkgutil.get_data("chemlab.graphics.renderers.shaders",
                                           "default_persp.vert")
     frag = pkgutil.get_data("chemlab.graphics.renderers.shaders",
                                             "default_light.frag")
     
     super(DefaultRenderer, self).__init__(widget, vert, frag)
Ejemplo n.º 21
0
 def __init__(self, widget):
     vert = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                           "default_persp.vert")
     frag = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                             "default_light.frag")
     
     super(DefaultRenderer, self).__init__(widget, vert, frag)
Ejemplo n.º 22
0
    def test_getdata_zipfile(self):
        zip = "test_getdata_zipfile.zip"
        pkg = "test_getdata_zipfile"

        # Include a LF and a CRLF, to test that binary data is read back
        RESOURCE_DATA = b"Hello, world!\nSecond line\r\nThird line"

        # Make a package with some resources
        zip_file = os.path.join(self.dirname, zip)
        z = zipfile.ZipFile(zip_file, "w")

        # Empty init.py
        z.writestr(pkg + "/__init__.py", "")
        # Resource files, res.txt, sub/res.txt
        z.writestr(pkg + "/res.txt", RESOURCE_DATA)
        z.writestr(pkg + "/sub/res.txt", RESOURCE_DATA)
        z.close()

        # Check we can read the resources
        sys.path.insert(0, zip_file)
        res1 = pkgutil.get_data(pkg, "res.txt")
        self.assertEqual(res1, RESOURCE_DATA)
        res2 = pkgutil.get_data(pkg, "sub/res.txt")
        self.assertEqual(res2, RESOURCE_DATA)

        names = []
        for loader, name, ispkg in pkgutil.iter_modules([zip_file]):
            names.append(name)
        self.assertEqual(names, ["test_getdata_zipfile"])

        del sys.path[0]

        del sys.modules[pkg]
Ejemplo n.º 23
0
    def __init__(self, widget, vectors, origin=np.zeros(3), color=black):
        vert = pkgutil.get_data("chemlab.graphics.renderers.shaders",
                                "default_persp.vert")
        frag = pkgutil.get_data("chemlab.graphics.renderers.shaders",
                                "no_light.frag")

        self.color = color
        super(BoxRenderer, self).__init__(widget, vert, frag)
        self.origin = origin
        self.vectors = vectors
Ejemplo n.º 24
0
    def open_connection(self):
        self.logger = logging.getLogger('BSL')
        self.open(self.options.device)

        # only fast mode supported by USB boot loader
        self.use_fast_mode = True
        self.buffer_size = 48

        if self.options.do_mass_erase:
            self.logger.info("Mass erase...")
            try:
                self.BSL_RX_PASSWORD('\xff'*30 + '\0'*2)
            except bsl5.BSL5Error:
                pass # it will fail - that is our intention to trigger the erase
            time.sleep(1)
            # after erase, unlock device
            self.BSL_RX_PASSWORD('\xff'*32)
            # remove mass_erase from action list so that it is not done
            # twice
            self.remove_action(self.mass_erase)
        else:
            if self.options.password is not None:
                password = msp430.memory.load(self.options.password).get_range(0xffe0, 0xffff)
                self.logger.info("Transmitting password: %s" % (password.encode('hex'),))
                self.BSL_RX_PASSWORD(password)

        if self.options.norambsl:
            if self.verbose:
                sys.stderr.write('Downloading utility routines...\n')
            writeflash = pkgutil.get_data('msp430.bsl5', 'writeflash.bin')
            self.writeflash_code=0x2500
            self.writeflash_args=self.writeflash_code+len(writeflash)
            self.memory_write(self.writeflash_code, writeflash)
        else:
            # download full BSL
            if self.verbose:
                sys.stderr.write('Download full BSL...\n')
            bsl_version_expected = (0x00, 0x05, 0x04, 0x34)
            full_bsl_txt = pkgutil.get_data('msp430.bsl5', 'RAM_BSL.00.05.04.34.txt')
            full_bsl = msp430.memory.load('BSL', StringIO(full_bsl_txt), format='titext')
            self.program_file(full_bsl, quiet=True)
            self.BSL_LOAD_PC(0x2504)

            # must re-initialize communication, BSL or USB system needs some time
            # to be ready
            self.logger.info("Waiting for BSL...")
            time.sleep(3)
            self.close()
            self.open(self.options.device)
            # checking version, this is also a connection check
            bsl_version = self.BSL_VERSION()
            if bsl_version_expected !=  bsl_version_expected:
                self.logger.error("BSL version mismatch (continuing anyway)")
            else:
                self.logger.debug("BSL version OK")
Ejemplo n.º 25
0
    def setUp(self):
        self.dirname = tempfile.mkdtemp()
        self.filename = 'tmp.gff.gz'

        with open(os.path.join(self.dirname, self.filename), 'wb') as fileobj:
            fileobj.write(pkgutil.get_data('lhc.test', 'data/randome.gff.gz'))

        with open(os.path.join(self.dirname, self.filename + '.tbi'), 'wb') as fileobj:
            fileobj.write(pkgutil.get_data('lhc.test', 'data/randome.gff.gz.tbi'))

        self.index = pysam.TabixFile(os.path.join(self.dirname, self.filename))
Ejemplo n.º 26
0
 def __init__(self, wiki, heap, cover, **kwargs):
     super().__init__(wiki, heap, author='Various Authors', **kwargs)
     self.book.set_cover(pkgutil.get_data(
         'pyscp_ebooks', 'resources/scp_wiki/' + cover))
     self.book.set_stylesheet(pkgutil.get_data(
         'pyscp_ebooks',
         'resources/scp_wiki/stylesheet.css').decode('UTF-8'))
     self.whitelisted_images = {
         i.url: i for i in self.wiki.list_images()
         if i.status in ('BY-SA CC', 'PUBLIC DOMAIN')}
     self.used_images = []
Ejemplo n.º 27
0
    def __init__(self, metadata):
        self.metadata = metadata

        data = pkgutil.get_data("cubes", "schemas/model.json")
        self.model_schema = json.loads(compat.to_str(data))

        data = pkgutil.get_data("cubes", "schemas/cube.json")
        self.cube_schema = json.loads(compat.to_str(data))

        data = pkgutil.get_data("cubes", "schemas/dimension.json")
        self.dimension_schema = json.loads(compat.to_str(data))
Ejemplo n.º 28
0
    def __init__(self, widget, vectors, origin=np.zeros(3), color=black, width=1.5):
        vert = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                "default_persp.vert")
        frag = pkgutil.get_data("ipymd.visualise.opengl.renderers.opengl_shaders",
                                "no_light.frag")

        self.color = color
        super(HexagonRenderer, self).__init__(widget, vert, frag)
        self.origin = origin
        self.vectors = vectors
        self.width = width
Ejemplo n.º 29
0
def _stdlib_packages(version=sys.version_info.major):
    stdlib_list = ''

    if version == 3:
        stdlib_list = pkgutil.get_data(__name__, 'data/libs3.txt')
    elif version == 2:
        stdlib_list = pkgutil.get_data(__name__, 'data/libs2.txt')

    stdlib_list = _bytes_to_str(stdlib_list)
    stdlib_list = [ x.strip() for x in stdlib_list.split('\n') ]
    stdlib_list = [ x for x in stdlib_list if len(x) > 0 ]
    return set(stdlib_list)
Ejemplo n.º 30
0
 def test_factories(self):
     with NamedTemporaryFile(mode = 'w', prefix = 'fix_occ_test',
             suffix = '.mat') as ntf:
         ntf.write(get_data('ocupy.tests', 'fixmat_demo.mat'))
         ntf.seek(0)
         fm  = fixmat.FixmatFactory(ntf.name)
     with NamedTemporaryFile(mode = 'w', prefix = 'fix_occ_test',
             suffix = '.mat') as ntf:
         ntf.write(get_data('ocupy.tests', 'fixmat_demo.mat'))
         ntf.seek(0)
         fm2 = fixmat.DirectoryFixmatFactory(os.path.dirname(ntf.name), glob_str = 'fix_occ_test*.mat' )
         self.compare_fixmats(fm, fm2)   
         self.assertRaises(ValueError, lambda: fixmat.DirectoryFixmatFactory('.', glob_str = 'xxx*.mat' ))
Ejemplo n.º 31
0
"""os-urlpattern.

Unsupervised URLs clustering, generate and match URL pattern.
"""
import sys
__all__ = ['__version__', 'version_info']

import pkgutil
__version__ = pkgutil.get_data(__package__, 'VERSION').decode('ascii').strip()
version_info = tuple(
    int(v) if v.isdigit() else v for v in __version__.split('.'))

if sys.version_info < (2, 7):
    sys.exit("os-urlpattern %s requires Python 2.7" % __version__)

del pkgutil
del sys
Ejemplo n.º 32
0
 def get_snippet(self, cliargs):
     snippet = pkgutil.get_data(
         'rocker',
         'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8')
     return em.expand(snippet, self.get_environment_subs())
Ejemplo n.º 33
0
    def run(self, workload, template, files, parameters):
        """Run workload on stack deployed by heat.

         Workload can be either file or resource:

         .. code-block: json

             {"file": "/path/to/file.sh"}
             {"resource": ["package.module", "workload.py"]}

         Also it should contain "username" key.

         Given file will be uploaded to `gate_node` and started. This script
         should print `key` `value` pairs separated by colon. These pairs will
         be presented in results.

         Gate node should be accessible via ssh with keypair `key_name`, so
         heat template should accept parameter `key_name`.

        :param workload: workload to run
        :param template: path to heat template file
        :param files: additional template files
        :param parameters: parameters for heat template
        """
        keypair = self.context["user"]["keypair"]
        parameters["key_name"] = keypair["name"]
        network = self.context["tenant"]["networks"][0]
        parameters["router_id"] = network["router_id"]
        self.stack = heat.main.Stack(self,
                                     self.task,
                                     template,
                                     files=files,
                                     parameters=parameters)
        self.stack.create()
        for output in self.stack.stack.outputs:
            if output["output_key"] == "gate_node":
                ip = output["output_value"]
                break
        ssh = sshutils.SSH(workload["username"], ip, pkey=keypair["private"])
        ssh.wait()
        script = workload.get("resource")
        if script:
            script = pkgutil.get_data(*script)
        else:
            script = open(workload["file"]).read()
        ssh.execute("cat > /tmp/.rally-workload", stdin=script)
        ssh.execute("chmod +x /tmp/.rally-workload")
        with atomic.ActionTimer(self, "runcommand_heat.workload"):
            status, out, err = ssh.execute("/tmp/.rally-workload",
                                           stdin=json.dumps(
                                               self.stack.stack.outputs))
        rows = []
        for line in out.splitlines():
            row = line.split(":")
            if len(row) != 2:
                raise exceptions.ScriptError("Invalid data '%s'" % line)
            rows.append(row)
        if not rows:
            raise exceptions.ScriptError("No data returned. Original error "
                                         "message is %s" % err)
        self.add_output(
            complete={
                "title": "Workload summary",
                "description": "Data generated by workload",
                "chart_plugin": "Table",
                "data": {
                    "cols": ["key", "value"],
                    "rows": rows
                }
            })
Ejemplo n.º 34
0
    def _compile_target(self, target):
        # "Compiles" a target by forming an isolated chroot of its sources and transitive deps and then
        # attempting to import each of the target's sources in the case of a python library or else the
        # entry point in the case of a python binary.
        #
        # For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like:
        #
        #   if __name__ == '__main__':
        #     import lib.core
        #     import lib.util
        #
        # For a binary with entry point lib.bin:main the "compiler" main file would look like:
        #
        #   if __name__ == '__main__':
        #     from lib.bin import main
        #
        # In either case the main file is executed within the target chroot to reveal missing BUILD
        # dependencies.

        with self.context.new_workunit(name=target.address.spec):
            modules = []
            if isinstance(target, PythonBinary):
                source = 'entry_point {}'.format(target.entry_point)
                components = target.entry_point.rsplit(':', 1)
                module = components[0]
                if len(components) == 2:
                    function = components[1]
                    data = TemplateData(
                        source=source,
                        import_statement='from {} import {}'.format(
                            module, function))
                else:
                    data = TemplateData(
                        source=source,
                        import_statement='import {}'.format(module))
                modules.append(data)
            else:
                for path in target.sources_relative_to_source_root():
                    if path.endswith('.py'):
                        if os.path.basename(path) == '__init__.py':
                            module_path = os.path.dirname(path)
                        else:
                            module_path, _ = os.path.splitext(path)
                        source = 'file {}'.format(
                            os.path.join(target.target_base, path))
                        module = module_path.replace(os.path.sep, '.')
                        data = TemplateData(
                            source=source,
                            import_statement='import {}'.format(module))
                        modules.append(data)

            if not modules:
                # Nothing to eval, so a trivial compile success.
                return 0

            interpreter = self.select_interpreter_for_targets([target])

            if isinstance(target, PythonBinary):
                pexinfo, platforms = target.pexinfo, target.platforms
            else:
                pexinfo, platforms = None, None

            generator = Generator(pkgutil.get_data(__name__,
                                                   self._EVAL_TEMPLATE_PATH),
                                  chroot_parent=self.chroot_cache_dir,
                                  modules=modules)
            executable_file_content = generator.render()

            chroot = self.cached_chroot(
                interpreter=interpreter,
                pex_info=pexinfo,
                targets=[target],
                platforms=platforms,
                executable_file_content=executable_file_content)
            pex = chroot.pex()
            with self.context.new_workunit(
                    name='eval',
                    labels=[
                        WorkUnitLabel.COMPILER, WorkUnitLabel.RUN,
                        WorkUnitLabel.TOOL
                    ],
                    cmd=' '.join(pex.cmdline())) as workunit:
                returncode = pex.run(stdout=workunit.output('stdout'),
                                     stderr=workunit.output('stderr'))
                workunit.set_outcome(WorkUnit.SUCCESS if returncode ==
                                     0 else WorkUnit.FAILURE)
                if returncode != 0:
                    self.context.log.error('Failed to eval {}'.format(
                        target.address.spec))
                return returncode
# -*- coding: utf-8 -*-
"""
yaspin.spinners
~~~~~~~~~~~~~~~

A collection of cli spinners.
"""

import pkgutil
from collections import namedtuple

try:
    import simplejson as json
except ImportError:
    import json

SPINNERS_DATA = pkgutil.get_data(__name__,
                                 "data/spinners.json").decode("utf-8")


def _hook(dct):
    return namedtuple("Spinner", dct.keys())(*dct.values())


Spinners = json.loads(SPINNERS_DATA, object_hook=_hook)
Ejemplo n.º 36
0
 def image(self):
     retval = ''
     # Note: Binary mode. In Python 3 retval will be a ``bytes`` object,
     #       in Python 2 retval will be a ``str``.
     retval = get_data('gs.content.favicon', self.iconName)
     return retval
Ejemplo n.º 37
0
from singledispatch import singledispatch
import six

from httpolice import known, message, notice, structure
from httpolice.__metadata__ import homepage, version
from httpolice.citation import Citation
from httpolice.header import HeaderView
from httpolice.reports.common import (expand_error, expand_piece,
                                      find_reason_phrase, resolve_reference)
from httpolice.structure import Unavailable
from httpolice.util.text import nicely_join, printable

###############################################################################
# High-level templates.

css_code = pkgutil.get_data('httpolice.reports', 'html.css').decode('utf-8')
js_code = pkgutil.get_data('httpolice.reports', 'html.js').decode('utf-8')


def html_report(exchanges, buf):
    """Generate an HTML report with check results.

    :param exchanges:
        An iterable of :class:`~httpolice.Exchange` objects.
        They must be already processed by :func:`~httpolice.check_exchange`.

    :param buf:
        The file (or file-like object) to which the report will be written.
        It must be opened in binary mode (not text).

    """
Ejemplo n.º 38
0
async def _download_page(request):
    """Renders a download page, GET handler for route '/'."""
    download = pkgutil.get_data(__name__, "static/download.html").decode()
    return web.Response(text=download, content_type="text/html")
Ejemplo n.º 39
0
async def _text_page(request):
    """Renders a text viewing page, GET handler for route '/'."""
    text = pkgutil.get_data(__name__, "static/text.html").decode()
    return web.Response(text=text, content_type="text/html")
Ejemplo n.º 40
0
 def prepare_template(self, provider_type, static_data_file):
     """Prepare the Jinja template for static data."""
     static_data = pkgutil.get_data("api.report.test", static_data_file)
     template = Template(static_data.decode("utf8"))
     static_data_path = f"/tmp/{provider_type}_static_data.yml"
     return template, static_data_path
Ejemplo n.º 41
0
 def server_credentials(self):
     private_key = pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH)
     certificate_chain = pkgutil.get_data(__name__,
                                          _CERTIFICATE_CHAIN_RESOURCE_PATH)
     return grpc.ssl_server_credentials(
         ((private_key, certificate_chain), ))
Ejemplo n.º 42
0
class OnError(Enum):
    exit = "EXIT"
    cont = "CONTINUE"

    def __str__(self):
        return self.value


export_root = Path.home() / "Watson Workspace Export"
on_graphql_error = OnError.exit

build_info = "LOCAL SCRIPT"

try:
    buildtxt_binary = pkgutil.get_data("wwexport", "build.txt")
except FileNotFoundError:
    pass
else:
    build_info = buildtxt_binary.decode(constants.FILE_ENCODING, "ignore")


def progress_bar(iterable=None, desc=None, position=None, unit="", initial=0):
    return tqdm(iterable,
                desc=desc,
                position=position,
                unit=unit,
                initial=initial,
                leave=False if position > 0 else True,
                ncols=75)
Ejemplo n.º 43
0
 def get_spec(self) -> ConnectorSpecification:
     raw_spec = pkgutil.get_data(self.__class__.__module__.split(".")[0], "spec.json")
     return ConnectorSpecification.parse_obj(json.loads(raw_spec))
Ejemplo n.º 44
0
 def get_config(self) -> object:
     return json.loads(pkgutil.get_data(self.__class__.__module__.split(".")[0], "config.json"))
Ejemplo n.º 45
0
import logging
import pkgutil
import os

import cffi

__version__ = "0.9.6"

LOG = logging.getLogger(__name__)

try:
    from ._bindings import ffi, lib
except ModuleNotFoundError:
    ffi = cffi.FFI()
    ffi.cdef(
        pkgutil.get_data(__name__, "grib_api.h").decode("utf-8") +
        pkgutil.get_data(__name__, "eccodes.h").decode("utf-8"))

    LIBNAMES = ["eccodes", "libeccodes.so", "libeccodes"]

    if os.environ.get("ECCODES_DIR"):
        LIBNAMES.insert(
            0, os.path.join(os.environ["ECCODES_DIR"], "lib/libeccodes.so"))

    for libname in LIBNAMES:
        try:
            lib = ffi.dlopen(libname)
            LOG.info("ecCodes library found using name '%s'.", libname)
            break
        except OSError:
            # lazy exception
Ejemplo n.º 46
0
 def read_cloudinit_file(fn):
     return pkgutil.get_data('heat',
                             'cloudinit/%s' % fn).decode('utf-8')
Ejemplo n.º 47
0
    def calculate_transformed_parameters(self, params):
        """
        This function calculates the "c" parameters of the :cite:`Brosh2007`
        equation of state.
        """
        Zs = pkgutil.get_data('burnman',
                              'data/input_masses/atomic_numbers.dat')
        Zs = Zs.decode('ascii').split('\n')
        Z = {
            str(sl[0]): int(sl[1])
            for sl in
            [line.split() for line in Zs if len(line) > 0 and line[0] != '#']
        }

        nZs = [(n_at, float(Z[el]))
               for (el, n_at) in params['formula'].items()]

        # eq. A2 at 300 TPa
        X3_300TPa = [
            np.power(
                1. - params['a'][i - 2] + params['a'][i - 2] * np.power(
                    (1. + float(i) /
                     (3. * params['a'][i - 2]) * 300.e12 / params['K_0']),
                    1. / float(i)), -3.) for i in range(2, 6)
        ]

        # eq. A2 at 330 TPa
        X3_330TPa = [
            np.power(
                1. - params['a'][i - 2] + params['a'][i - 2] * np.power(
                    (1. + float(i) /
                     (3. * params['a'][i - 2]) * 330.e12 / params['K_0']),
                    1. / float(i)), -3.) for i in range(2, 6)
        ]

        # eq. A6a, m^3/mol
        V_QSM_300TPa = np.sum([
            n_at *
            (0.02713 *
             np.exp(0.97626 * np.log(Zi) - 0.057848 * np.log(Zi) * np.log(Zi)))
            for (n_at, Zi) in nZs
        ]) * 1.e-6

        # eq. A6b, m^3/mol
        V_QSM_330TPa = np.sum([
            n_at *
            (0.025692 *
             np.exp(0.97914 * np.log(Zi) - 0.057741 * np.log(Zi) * np.log(Zi)))
            for (n_at, Zi) in nZs
        ]) * 1.e-6

        A = np.array([
            [1., 1., 1., 1.],  # eq A3
            [0., 6., 8., 9.],  # eq A4
            X3_300TPa,  # eq A5a
            X3_330TPa
        ])  # eq A5b

        b = np.array([
            1., 8., V_QSM_300TPa / params['V_0'], V_QSM_330TPa / params['V_0']
        ])

        # does not quite reproduce the published values of c
        # A.c consistently gives b[2], b[3] ~1% larger than Brosh
        return np.linalg.solve(A, b)
Ejemplo n.º 48
0
 def _get_executable_file_content(self, exec_pex_parent, modules):
     generator = Generator(pkgutil.get_data(__name__,
                                            self._EVAL_TEMPLATE_PATH),
                           chroot_parent=exec_pex_parent,
                           modules=modules)
     return generator.render()
Ejemplo n.º 49
0
from random import choice
from pkgutil import get_data

animal_list = get_data('gishgenerator', 'data/animals.txt').split('\n')
animal_image_cache = {}


def generate_name():

    name1 = choice(animal_list)
    name2 = choice(animal_list)

    combined_name = name1[:-1] + name2[1:]

    print name1, "+", name2, "=", combined_name

    img1 = get_image_for_text(name1)
    img2 = get_image_for_text(name2)

    return combined_name, img1, img2


def get_image_for_text(text):
    import urllib2
    import json

    if text in animal_image_cache:
        return animal_image_cache[text]
    try:
        fetcher = urllib2.build_opener()
        f = fetcher.open(
Ejemplo n.º 50
0
def image_bear():
    return BytesIO(pkgutil.get_data('fastbook', 'images/grizzly.jpg'))
Ejemplo n.º 51
0
from pkgutil import get_data
from json import loads

VEGALITE_FILE = 'vega-lite-v2.0.4.json'
VEGALITE_SCHEMA = loads(get_data('pdvega', VEGALITE_FILE).decode('utf-8'))
Ejemplo n.º 52
0
def image_cat():
    return BytesIO(pkgutil.get_data('fastbook', 'images/cat.jpg'))
Ejemplo n.º 53
0
Create a dictionary containing basic information for isotopes and
neutrons.
"""

import json
import pkgutil
import astropy.units as u

# this code was used to create the JSON file as per vn-ki on Riot:
# https://matrix.to/#/!hkWCiyhQyxiYJlUtKF:matrix.org/
#    $1554667515670438wIKlP:matrix.org?via=matrix.org&via=cadair.com
#
# def _isotope_default(obj):
#     if isinstance(obj, u.Quantity):
#         return {
#             "unit": obj.unit.name,
#             "value": obj.value,
#         }
# with open("isotopes.json", "w") as f:
#     json.dump(_Isotopes, f, default=plasma_default, indent=2)


def _isotope_obj_hook(obj):
    if "unit" in obj:
        return obj["value"] * u.Unit(obj["unit"])
    return obj


_Isotopes = json.loads(pkgutil.get_data("plasmapy", "data/isotopes.json"),
                       object_hook=_isotope_obj_hook)
Ejemplo n.º 54
0
 def get_catalog(self) -> ConfiguredAirbyteCatalog:
     raw_spec = pkgutil.get_data(self.__class__.__module__.split(".")[0], "configured_catalog.json")
     return ConfiguredAirbyteCatalog.parse_obj(json.loads(raw_spec))
Ejemplo n.º 55
0
    def __init__(self):
        # load Glade ui
        self.builder = Gtk.Builder()
        self.builder.add_from_string(
            pkgutil.get_data("mfp.gui", "mfp.glade").decode())

        # install Clutter stage in Gtk window
        self.window = self.builder.get_object("main_window")
        box = self.builder.get_object("stage_box")

        self.content_console_pane = self.builder.get_object(
            "content_console_pane")
        self.tree_canvas_pane = self.builder.get_object("tree_canvas_pane")

        self.embed = GtkClutter.Embed.new()
        box.pack_start(self.embed, True, True, 0)
        self.embed.set_sensitive(True)
        self.stage = self.embed.get_stage()

        # significant widgets we will be dealing with later
        self.bottom_notebook = self.builder.get_object("bottom_notebook")
        self.console_view = self.builder.get_object("console_text")
        self.log_view = self.builder.get_object("log_text")
        self.object_view = self.init_object_view()
        self.layer_view = self.init_layer_view()

        # objects for stage -- self.group gets moved/scaled to adjust
        # the view, so anything not in it will be static on the stage
        self.group = Clutter.Group()

        # The HUD is the text overlay at the bottom/top of the window that
        # fades after a short display
        self.hud_history = []
        self.hud_banner_text = None
        self.hud_banner_anim = None
        self.hud_prompt = None
        self.hud_prompt_input = None
        self.hud_prompt_mgr = Prompter(self)
        self.hud_mode_txt = None

        self.autoplace_marker = None
        self.autoplace_layer = None
        self.selection_box = None
        self.selection_box_layer = None

        self.stage.add_actor(self.group)

        # self.objects is PatchElement subclasses representing the
        # currently-displayed patch(es)
        self.patches = []
        self.objects = []
        self.object_counts_by_type = {}

        self.selected_patch = None
        self.selected_layer = None
        self.selected = []

        self.load_in_progress = 0
        self.close_in_progress = False

        self.input_mgr = InputManager(self)
        self.console_mgr = ConsoleMgr("MFP interactive console",
                                      self.console_view)
        self.console_mgr.start()

        # dumb colors
        self.color_unselected = self.get_color('stroke-color')
        self.color_transparent = ColorDB().find('transparent')
        self.color_selected = self.get_color('stroke-color:selected')
        self.color_bg = self.get_color('canvas-color')

        # callbacks facility... not yet too much used, but "select" and
        # "add" are in use
        self.callbacks = {}
        self.callbacks_last_id = 0

        # configure Clutter stage
        self.stage.set_color(self.color_bg)
        self.stage.set_property('user-resizable', True)
        self.zoom = 1.0
        self.view_x = 0
        self.view_y = 0

        # show top-level window
        self.window.show_all()

        # set up key and mouse handling
        self.init_input()
Ejemplo n.º 56
0
def get_shader(name: str) -> str:
    data = pkgutil.get_data('gltfloupe', f'assets/{name}')
    if not data:
        raise Exception()
    return data.decode('utf-8')
Ejemplo n.º 57
0
def write(story, output_file=None):
    """Pre–defined Storyscripts for your app!"""

    # Support '$ story write http -` usecase.`
    if output_file == '-':
        output_file = None

    if story == '-':
        click.echo(click.style('Please specify a template:', bold=True))
        click.echo(
            click.style('  http', fg='cyan') + '      - serverless http')
        click.echo(
            click.style('  function', fg='cyan') + '  - generic function')
        click.echo(
            click.style('  if', fg='cyan') + '        - example if/then')
        click.echo(
            click.style('  loop', fg='cyan') + '      - example for loop')
        click.echo(click.style('  twitter', fg='cyan') + '   - stream Tweets')
        click.echo('')

        click.echo(
            click.style('Coming Soon',
                        bold=True) + ' (under active development):'
        )
        click.echo(click.style('  slack-bot', fg='cyan') + ' - Slack bot')
        click.echo(
            click.style('  subscribe', fg='cyan') + ' - event subscriptions')
        click.echo(
            click.style('  every', fg='cyan') + '     - periodically run this')
        click.echo(
            click.style('  websocket', fg='cyan') + ' - websocket support')
        click.echo('')

        click.echo(
            '  Run $ '
            + click.style('story write :template_name: ', fg='magenta')
            + emoji.emojize(':backhand_index_pointing_left:')
        )
        click.echo('')

        click.echo(click.style('Learn more:', bold=True))
        click.echo(
            '  - Examples: '
            + click.style('https://github.com/topics/storyscript-example',
                          fg='cyan')
        )
        click.echo(
            '  - Services: ' + click.style('https://hub.storyscript.io/',
                                           fg='cyan')
        )
        click.echo('')

    else:

        # Grab the story, from packaging...
        data = pkgutil.get_data('story', f'stories/{story}.story')

        # If output_file was passed, assume it was an interfactive session.
        if output_file:
            # Write to the file...
            with open(output_file, 'wb') as f:
                f.write(data)

            cmd = f'cat {output_file}'
            cmd = click.style(cmd, fg='magenta')
            click.echo(f'$ {cmd}', err=True)

        click.echo(data)

        app_name = cli.get_app_name_from_yml()
        if app_name is None:
            app_name = 'Not created yet'

        cli.track('App Bootstrapped',
                  {'App name': app_name, 'Template used': story})
import json
import pkgutil
from nose.tools import *
from cyr2phon import syllabify, strip_onset

# tests for syllabify()
words_to_syllabify = _lexical_data = json.loads(
    pkgutil.get_data(
        __package__,
        'words_to_syllabify.json').decode('utf-8'))  # file inside package


def test_syllabify_word():
    for word in words_to_syllabify:
        fn = lambda: syllabify(word)
        fn.description = "cyr2phon.tests.test_utility.test_syllabify_word with {}".format(
            word)
        yield fn


def check_syllabify_word(word):
    assert_equal(
        syllabify(word),
        [syllable for syllable in words_to_syllabify[word].split("-")])


#tests for strip_onset()
def test_strip_onset_bA():  # open masculine with onset singular
    expected = ['BA']
    assert_equal(strip_onset(['BA']), expected)
Ejemplo n.º 59
0
def discover_interpreter(action, interpreter_name, discovery_mode, task_vars):
    # interpreter discovery is a 2-step process with the target. First, we use a simple shell-agnostic bootstrap to
    # get the system type from uname, and find any random Python that can get us the info we need. For supported
    # target OS types, we'll dispatch a Python script that calls plaform.dist() (for older platforms, where available)
    # and brings back /etc/os-release (if present). The proper Python path is looked up in a table of known
    # distros/versions with included Pythons; if nothing is found, depending on the discovery mode, either the
    # default fallback of /usr/bin/python is used (if we know it's there), or discovery fails.

    # FUTURE: add logical equivalence for "python3" in the case of py3-only modules?
    if interpreter_name != 'python':
        raise ValueError('Interpreter discovery not supported for {0}'.format(interpreter_name))

    host = task_vars.get('inventory_hostname', 'unknown')
    res = None
    platform_type = 'unknown'
    found_interpreters = [u'/usr/bin/python']  # fallback value
    is_auto_legacy = discovery_mode.startswith('auto_legacy')
    is_silent = discovery_mode.endswith('_silent')

    try:
        platform_python_map = C.config.get_config_value('INTERPRETER_PYTHON_DISTRO_MAP', variables=task_vars)
        bootstrap_python_list = C.config.get_config_value('INTERPRETER_PYTHON_FALLBACK', variables=task_vars)

        display.vvv(msg=u"Attempting {0} interpreter discovery".format(interpreter_name), host=host)

        # not all command -v impls accept a list of commands, so we have to call it once per python
        command_list = ["command -v '%s'" % py for py in bootstrap_python_list]
        shell_bootstrap = "echo PLATFORM; uname; echo FOUND; {0}; echo ENDFOUND".format('; '.join(command_list))

        # FUTURE: in most cases we probably don't want to use become, but maybe sometimes we do?
        res = action._low_level_execute_command(shell_bootstrap, sudoable=False)

        raw_stdout = res.get('stdout', u'')

        match = foundre.match(raw_stdout)

        if not match:
            display.debug(u'raw interpreter discovery output: {0}'.format(raw_stdout), host=host)
            raise ValueError('unexpected output from Python interpreter discovery')

        platform_type = match.groups()[0].lower().strip()

        found_interpreters = [interp.strip() for interp in match.groups()[1].splitlines() if interp.startswith('/')]

        display.debug(u"found interpreters: {0}".format(found_interpreters), host=host)

        if not found_interpreters:
            action._discovery_warnings.append(u'No python interpreters found for host {0} (tried {1})'.format(host, bootstrap_python_list))
            # this is lame, but returning None or throwing an exception is uglier
            return u'/usr/bin/python'

        if platform_type != 'linux':
            raise NotImplementedError('unsupported platform for extended discovery: {0}'.format(to_native(platform_type)))

        platform_script = pkgutil.get_data('assible.executor.discovery', 'python_target.py')

        # FUTURE: respect pipelining setting instead of just if the connection supports it?
        if action._connection.has_pipelining:
            res = action._low_level_execute_command(found_interpreters[0], sudoable=False, in_data=platform_script)
        else:
            # FUTURE: implement on-disk case (via script action or ?)
            raise NotImplementedError('pipelining support required for extended interpreter discovery')

        platform_info = json.loads(res.get('stdout'))

        distro, version = _get_linux_distro(platform_info)

        if not distro or not version:
            raise NotImplementedError('unable to get Linux distribution/version info')

        version_map = platform_python_map.get(distro.lower().strip())
        if not version_map:
            raise NotImplementedError('unsupported Linux distribution: {0}'.format(distro))

        platform_interpreter = to_text(_version_fuzzy_match(version, version_map), errors='surrogate_or_strict')

        # provide a transition period for hosts that were using /usr/bin/python previously (but shouldn't have been)
        if is_auto_legacy:
            if platform_interpreter != u'/usr/bin/python' and u'/usr/bin/python' in found_interpreters:
                # FIXME: support comments in sivel's deprecation scanner so we can get reminded on this
                if not is_silent:
                    action._discovery_deprecation_warnings.append(dict(
                        msg=u"Distribution {0} {1} on host {2} should use {3}, but is using "
                            u"/usr/bin/python for backward compatibility with prior Assible releases. "
                            u"A future Assible release will default to using the discovered platform "
                            u"python for this host. See {4} for more information"
                            .format(distro, version, host, platform_interpreter,
                                    get_versioned_doclink('reference_appendices/interpreter_discovery.html')),
                        version='2.12'))
                return u'/usr/bin/python'

        if platform_interpreter not in found_interpreters:
            if platform_interpreter not in bootstrap_python_list:
                # sanity check to make sure we looked for it
                if not is_silent:
                    action._discovery_warnings \
                        .append(u"Platform interpreter {0} on host {1} is missing from bootstrap list"
                                .format(platform_interpreter, host))

            if not is_silent:
                action._discovery_warnings \
                    .append(u"Distribution {0} {1} on host {2} should use {3}, but is using {4}, since the "
                            u"discovered platform python interpreter was not present. See {5} "
                            u"for more information."
                            .format(distro, version, host, platform_interpreter, found_interpreters[0],
                                    get_versioned_doclink('reference_appendices/interpreter_discovery.html')))
            return found_interpreters[0]

        return platform_interpreter
    except NotImplementedError as ex:
        display.vvv(msg=u'Python interpreter discovery fallback ({0})'.format(to_text(ex)), host=host)
    except Exception as ex:
        if not is_silent:
            display.warning(msg=u'Unhandled error in Python interpreter discovery for host {0}: {1}'.format(host, to_text(ex)))
            display.debug(msg=u'Interpreter discovery traceback:\n{0}'.format(to_text(format_exc())), host=host)
            if res and res.get('stderr'):
                display.vvv(msg=u'Interpreter discovery remote stderr:\n{0}'.format(to_text(res.get('stderr'))), host=host)

    if not is_silent:
        action._discovery_warnings \
            .append(u"Platform {0} on host {1} is using the discovered Python interpreter at {2}, but future installation of "
                    u"another Python interpreter could change the meaning of that path. See {3} "
                    u"for more information."
                    .format(platform_type, host, found_interpreters[0],
                            get_versioned_doclink('reference_appendices/interpreter_discovery.html')))
    return found_interpreters[0]
Ejemplo n.º 60
0
def load_color_map_resource(resource_path: str) -> None:
    bytes = pkgutil.get_data(__name__, resource_path)
    assert bytes is not None
    color_map_json = json.loads(bytes)
    color_maps[color_map_json["id"]] = ColorMap(color_map_json["name"], generate_lookup_array_from_points(color_map_json["points"], 256))