Example #1
0
def test_load_compressed():
    X, y = _load_svmlight_local_test_file(datafile)

    with NamedTemporaryFile(prefix="sklearn-test", suffix=".gz") as tmp:
        tmp.close()  # necessary under windows
        with resources.open_binary(TEST_DATA_MODULE, datafile) as f:
            with gzip.open(tmp.name, "wb") as fh_out:
                shutil.copyfileobj(f, fh_out)
        Xgz, ygz = load_svmlight_file(tmp.name)
        # because we "close" it manually and write to it,
        # we need to remove it manually.
        os.remove(tmp.name)
    assert_array_almost_equal(X.toarray(), Xgz.toarray())
    assert_array_almost_equal(y, ygz)

    with NamedTemporaryFile(prefix="sklearn-test", suffix=".bz2") as tmp:
        tmp.close()  # necessary under windows
        with resources.open_binary(TEST_DATA_MODULE, datafile) as f:
            with BZ2File(tmp.name, "wb") as fh_out:
                shutil.copyfileobj(f, fh_out)
        Xbz, ybz = load_svmlight_file(tmp.name)
        # because we "close" it manually and write to it,
        # we need to remove it manually.
        os.remove(tmp.name)
    assert_array_almost_equal(X.toarray(), Xbz.toarray())
    assert_array_almost_equal(y, ybz)
Example #2
0
    def __init__(self) -> None:
        """Python API to access the realtime database"""

        with pkg_resources.open_binary(resources, 'ngrams.bin') as f:
            self.parser = pickle.load(f)

        with pkg_resources.open_binary(resources, 'realtime_languages.json') as f:
            self.supported_languages = ujson.load(f)
Example #3
0
def test_dhash_image(name_a, name_b, delta):
    with resources.open_binary("tests.data.images", name_a) as image_a:
        hash_a = hashes.dhash_image(image_a)

    with resources.open_binary("tests.data.images", name_b) as image_b:
        hash_b = hashes.dhash_image(image_b)

    assert (hash_a ^ hash_b).bit_count() == delta
Example #4
0
    def confirm_configuration(self):

        train_num_epochs = self.num_epochs.text()
        train_learning_rate = self.learning_rate.text()
        train_weight_decay = self.weight_decay.text()
        train_weight_init = self.weight_init.text()
        train_hidden_dim = self.hidden_dim.text()

        ## read about passing values out of here into
        print("Network Configuration")
        print("Number of Epochs: {}".format(train_num_epochs))
        print("Learning Rate: {}".format(train_learning_rate))
        print("Weight Decay: {}".format(train_weight_decay))
        print("Weight Initialisation: {}".format(train_weight_init))
        print("Hidden Layers Dimension: {}".format(train_hidden_dim))

        with res.open_binary('Titanicbc', 'config.yaml') as fp:
            model_parameters = yaml.load(fp, Loader=yaml.Loader)

        model_parameters['Binary_Network']['initialisations'][
            'hidden_dim'] = int(train_hidden_dim)
        model_parameters['Binary_Network']['optimiser'][
            'learning_rate'] = float(train_learning_rate)
        model_parameters['Binary_Network']['num_epochs'] = int(
            train_num_epochs)
        model_parameters['Binary_Network']['initialisations'][
            'weight_init'] = str(train_weight_init)  ## Read in Binary_Network
        model_parameters['Binary_Network']['optimiser'][
            'weight_decay'] = float(train_weight_decay)

        ## write out parameters

        with res.path('Titanicbc', 'config.yaml') as cf:
            path = cf

        with open(path, 'w') as outfile:
            yaml.dump(model_parameters, outfile, default_flow_style=False)

        ## Read in package resources

        with res.open_binary('Titanicbc', 'train.csv') as train:
            train = pd.read_csv(train)

        with res.open_binary('Titanicbc', 'test.csv') as test:
            test = pd.read_csv(test)

        with res.path('Titanicbc', 'trained_model.pth') as m:
            model_path = m

        # All params are coming through as a string
        self.running_loss, model = Binary_Network.train_new_model(
            train, self.input_dim, train_hidden_dim, model_path,
            train_learning_rate, train_num_epochs, train_weight_decay)
        model.to(self.device)
        Binary_Network.predict(model, test)
def get_glyph(glyph_name, data=None):
    """Return a named tuple (Glyph) containing information derived from a glyph
    name akin to GSGlyphInfo.

    The information is derived from an included copy of GlyphData.xml
    and GlyphData_Ideographs.xml, going purely by the glyph name.
    """

    # Read data on first use.
    if data is None:
        global GLYPHDATA
        if GLYPHDATA is None:
            try:
                from importlib.resources import open_binary
            except ImportError:
                # use backport for python < 3.7
                from importlib_resources import open_binary

            GLYPHDATA = GlyphData.from_files(
                open_binary("glyphsLib.data", "GlyphData.xml"),
                open_binary("glyphsLib.data", "GlyphData_Ideographs.xml"),
            )
        data = GLYPHDATA

    # Look up data by full glyph name first.
    attributes = _lookup_attributes(glyph_name, data)

    production_name = attributes.get("production")
    if production_name is None:
        production_name = _construct_production_name(glyph_name, data=data)

    unicode_value = attributes.get("unicode")

    category = attributes.get("category")
    sub_category = attributes.get("subCategory")
    if category is None:
        category, sub_category = _construct_category(glyph_name, data)

    # TODO: Determine script in ligatures.
    script = attributes.get("script")
    description = attributes.get("description")

    return Glyph(
        glyph_name,
        production_name,
        unicode_value,
        category,
        sub_category,
        script,
        description,
    )
Example #6
0
class Configs:

    REQUEST_KEYS = [
        'manufacturer',
        'product_name',
        'flash_point',
        'specific_gravity',
        'nfpa_fire',
        'nfpa_health',
        'nfpa_reactivity',
        'sara_311',
        'revision_date',
        'physical_state',
        'cas_number',
    ]

    REGEXES = dict()

    with open_binary('static', 'regexes.json') as regex_file:
        regex_file_bytes = regex_file.read()

    for regex_dict in json.loads(regex_file_bytes):
        REGEXES[regex_dict['name']] = regex_dict

    SUPPORTED_MANUFACTURERS = set(REGEXES.keys())
    SUPPORTED_MANUFACTURERS.remove('default')
Example #7
0
 def start(self):
     with resources.open_binary(assets, self.splash_image_asset) as f:
         splash_surface = pygame.image.load(f)
     size = splash_surface.get_size()
     screen = set_mode_if_needed(size)
     screen.blit(splash_surface, (0, 0))
     display.flip()
Example #8
0
def importer_circonscriptions_legislatives(using):
    with open_binary(
            "data_france.data",
            "circonscriptions_legislatives.csv.lzma") as _f, lzma.open(
                _f, "rt") as f:
        import_with_temp_table(f, "data_france_circonscriptionlegislative",
                               using)
Example #9
0
def _load_token_statistics(file_name):
    with open_binary('akimous.resources', file_name) as f1:
        with lzma.open(f1, 'rb') as f2:
            return msgpack.unpack(f2,
                                  use_list=False,
                                  raw=False,
                                  strict_map_key=False)
Example #10
0
def get_magic(zone_name):
    components = zone_name.split("/")
    package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
    resource_name = components[-1]

    with resources.open_binary(package_name, resource_name) as f:
        return f.read(4)
Example #11
0
    def test_to_pd_data_frame(self):
        burial_info = BurialInfo('site_name', 'site_id')
        age_sex_sature = AgeSexStature.empty()
        mouth = Mouth.empty()
        occupational_markers = OccupationalMarkers.empty()
        joints = Joints.empty()
        trauma = Trauma.empty()
        context = Context(BodyPosition.SUPINE, CompassBearing.WEST,
                          Present.PRESENT, Present.NOT_PRESENT, None, None, {
                              'spear': True,
                              'pot': False
                          })

        individual = Individual('id_1', burial_info, age_sex_sature, mouth,
                                occupational_markers, joints, trauma, context)
        df = individual.to_pd_data_frame()

        with open_binary(
                bioarch_test,
                'IndividualTest.test_to_pd_data_frame.json') as json_stream:
            expected_json = json.load(json_stream)

        print(df.to_json(orient='records'))
        actual_json = json.loads(df.to_json(orient='records'))

        self.assertEqual(actual_json, expected_json)
Example #12
0
def CityInfo():
    '''
    Introduction:
    CityInfo()function aims to return a dataset that shows the information such as city id, city, state, country, latitude, longitude of all the cities in the world.
    
    Parameters:
    No parameter.
    
    Output:
    a dataset of city id, city, state, country, latitude, longitude covering all the cities in the world
    
    '''
    from importlib import resources
    with resources.open_binary('getweather', 'city.json') as f:
        content = f.read()
    content1 = content.decode('UTF-8')
    city = pd.DataFrame(json.loads(content1))

    table1 = city[['id', 'name', 'state',
                   'country']]  # extract id, name, state, state, country

    result2 = []  # extract lon and lat
    for i in city['coord']:
        coord = {}
        coord['lon'] = i.get('lon')
        coord['lat'] = i.get('lat')
        result2.append(coord)
    result2
    table2 = pd.DataFrame(result2)

    city_new = pd.concat([table1, table2], axis=1)  # combine table1 and table2
    return city_new
Example #13
0
    def builtin_loader() -> "SpdxLicenses":
        """Loads the license data provided by SPDX.

        A pre-built pickle file is loaded from this package. The file is
        generated by the project team using the script in `__main__.py`.

        Returns:
            The set of SPDX licences if it can be restored.

        Raises:
            ValueError: When the data cannot be loaded.
        """
        from importlib import resources as pkg_resources
        import pickle  # noqa: S403

        DATA_PACKAGE = "valiant.plugins.reports.spdx"
        DATA_FILE = "spdx-licenses.pickle"

        if not pkg_resources.is_resource(DATA_PACKAGE,
                                         DATA_FILE):  # pragma: no cover
            raise ValueError("Failed to access the data in the package.")

        with pkg_resources.open_binary(DATA_PACKAGE, DATA_FILE) as p:
            data = pickle.load(p)  # noqa: S301

        if type(data) is SpdxLicenses:
            return data
        """This is a saftey net in case the pickle file is dodgy."""
        raise ValueError(
            "The loaded datafile did not match the expected structure."
        )  # pragma: no cover
Example #14
0
def load_compton_data(element):
    element_data_fname = element + '.npy'
    with importlib_resources.open_binary(
            'LiquidDiffract.resources.hubbel_compton',
            element_data_fname) as fp:
        cs_Q, _, cs_comp = np.load(fp, allow_pickle=True)
    return cs_Q, cs_comp
Example #15
0
    def test_to_pd_data_frame(self):
        shoulder = LeftRight(JointCondition.NORMAL, JointCondition.NORMAL)
        elbow = LeftRight(None, None)
        wrist = LeftRight(JointCondition.NORMAL, None)
        hip = LeftRight(None, JointCondition.NORMAL)
        knee = LeftRight(JointCondition.NORMAL, JointCondition.MEDIUM)
        ankle = LeftRight(None, JointCondition.FRACTURE)

        sacro_illiac = JointCondition.NORMAL
        c1_3 = None
        c4_7 = JointCondition.EXTREME
        t1_4 = JointCondition.NORMAL
        t5_8 = None
        t9_12 = JointCondition.FRACTURE
        l1_5 = JointCondition.NORMAL

        df = Joints(shoulder, elbow, wrist, hip, knee, ankle, sacro_illiac,
                    c1_3, c4_7, t1_4, t5_8, t9_12,
                    l1_5).to_pd_data_frame('id1')

        with open_binary(
                bioarch_test,
                'JointsTest.test_to_pd_data_frame.json') as json_stream:
            expected_json = json.load(json_stream)

        print(df.to_json(orient='records'))
        actual_json = json.loads(df.to_json(orient='records'))

        self.assertEqual(actual_json, expected_json)
Example #16
0
def read_vep_basic_args():
    import yaml

    with pkg_resources.open_binary(vep.__name__, 'basic_args.yaml') as fp:
        vep_basic_args = yaml.safe_load(fp)

    return vep_basic_args
Example #17
0
    def __init__(self, features, hidden_size):
        super(Binary_Network, self).__init__()

        with res.open_binary('Titanicbc', 'config.yaml') as fp:
            model_parameters = yaml.load(fp, Loader=yaml.Loader)

        self.weight_init = model_parameters['Binary_Network'][
            'initialisations']['weight_init']
        self.linear1 = nn.Linear(features, hidden_size)
        self.linear2 = nn.Linear(hidden_size, hidden_size)
        self.linear3 = nn.Linear(hidden_size, hidden_size)

        self.output_layer = nn.Linear(hidden_size, 1)

        if self.weight_init.lower() == 'xavier':
            torch.nn.init.xavier_uniform_(self.linear1.weight)
            torch.nn.init.xavier_uniform_(self.linear2.weight)
            torch.nn.init.xavier_uniform_(self.linear3.weight)
            torch.nn.init.xavier_uniform_(self.output_layer.weight)

        elif self.weight_init.lower() == 'uniform':
            torch.nn.init.uniform_(self.linear1.weight)
            torch.nn.init.uniform_(self.linear2.weight)
            torch.nn.init.uniform_(self.linear3.weight)
            torch.nn.init.uniform_(self.output_layer.weight)

        else:
            torch.nn.init.xavier_uniform_(self.linear1.weight)
            torch.nn.init.xavier_uniform_(self.linear2.weight)
            torch.nn.init.xavier_uniform_(self.linear3.weight)
            torch.nn.init.xavier_uniform_(self.output_layer.weight)
Example #18
0
 def __init__(self):
     # open font resource
     with resources.open_binary('aesthetic_ascii',
                                'RobotoMono-VariableFont_wght.ttf') as fp:
         font = fp.read()
     # initialize font
     self.font = ImageFont.truetype(io.BytesIO(font))
Example #19
0
def load_data():
    """Load up the pickled interpolator."""
    global atmosphere_interpolator

    filename = "Kurucz_grid_interpolator.pickle"
    atmosphere_interpolator = pickle.load(
        pkg_resources.open_binary(data, filename))
    def testRendersSimpleVoxelGrid(self):
        voxel_grid = t.as_tensor(
            [
                # z = 0
                [
                    [1, 0, 1],  # y = 0
                    [0, 0, 0],  # y = 1
                    [1, 0, 1],  # y = 2
                ],
                # z = 1
                [
                    [0, 0, 0],  # y = 0
                    [0, 1, 0],  # y = 1
                    [0, 0, 0],  # y = 2
                ],
                # z = 2
                [
                    [0, 1, 0],  # y = 0
                    [1, 1, 1],  # y = 1
                    [0, 1, 0],  # y = 2
                ],
            ],
            dtype=t.int32)

        # Create a camera that looks at the voxel grid center from the side. The
        # 0.5 offset is required, since the voxel grid occupies the unit cube,
        # and its center is at  (0.5, 0.5, 0.5)
        look_at = transformations.look_at_rh(
            (-1.2 + 0.5, -1.5 + 0.5, -0.5 + 0.5), (0.5, 0.5, 0.5), (0, 1, 0))
        perspective = transformations.perspective_rh(70 * math.pi / 180, 1,
                                                     0.1, 10.0)
        model_view_matrix = np.matmul(perspective, look_at)

        image = voxel_renderer.render_voxel_grid(
            voxel_grid,
            model_view_matrix,
            (256, 256),
            # Scale down the voxel grid to fit in the unit cube
            transformations.scale((1.0 / 3, ) * 3),
            # Material 0 is transparent, 1 is red
            ((-1, 0, 0), (1.0, 0, 0)),
            # Place the light source at the camera
            light_position=(-1.2 + 0.5, -1.5 + 0.5, -1 + 0.5),
            ambient_light_color=(0.0, 0.0, 0.0),
        )
        image = image.numpy()

        PIL.Image.fromarray(image).save("/tmp/tt/vv.png")

        with resources.open_binary(test_data,
                                   "expected_image_voxels.png") as in_file:
            pil_image = PIL.Image.open(in_file)
            expected_image = np.array(pil_image)[..., :3]

        self.assertEqual(image.dtype, np.uint8)
        self.assertEqual(tuple(image.shape), tuple(expected_image.shape))
        difference_l1 = np.abs(
            image.astype(np.int64) - expected_image.astype(np.int64)).sum()
        self.assertAlmostEqual(difference_l1, 0, 1024)
Example #21
0
 def __init__(self):
     self.base, self.texid = 0, 0
     self.width, self.height = 0, 0
     self.characters = []
     self.initgl()
     self.fontsize = 14
     self.makefont(resources.open_binary('plots.res', 'DejaVuSans.ttf'),
                   self.fontsize)
Example #22
0
def xslt():
    with open_binary("nextbus.populate", "tnds.xslt") as file_:
        xslt = et.XSLT(et.parse(file_))
    # Replicate functions which check for existing stops
    setup_mock_stop_exists()
    setup_service_codes()

    return xslt
Example #23
0
    def op_predict(self):

        with res.open_binary('Titanicbc', 'config.yaml') as fp:
            model_parameters = yaml.load(fp, Loader=yaml.Loader)

        with res.open_binary('Titanicbc', 'test.csv') as test:
            test_predict = pd.read_csv(test)

        prev_hidden_dim = model_parameters['Binary_Network'][
            'initialisations']['hidden_dim']

        with res.path('Titanicbc', 'trained_model.pth') as m:
            model_path = m

        model = Binary_Network.Binary_Network(self.input_dim, prev_hidden_dim)
        model = Binary_Network.load_models(model_path, model).to(self.device)
        Binary_Network.predict(model, test_predict)
Example #24
0
def getDemoClassList():
    """A classlist for demos.

    returns:
        pandas.dataframe: the classlist as a Pandas dataframe.
    """
    with resources.open_binary(plom, "demoClassList.csv") as f:
        return pandas.read_csv(f)
Example #25
0
 def set_fuente(self, ruta_paquete, nombre_archivo_ttf_u_otf):
     """Cambia la fuente activa
     
         :param ruta_paquete: ruta del paquete en forma de nombres separados por puntos (ej utilidades.imagenes.fuentes)
         :param nombre_archivo_ttf_u_otf: nombre del archivo de tipo de letra en formato TTF u OTF
     """
     bytes_fichero=open_binary(ruta_paquete, nombre_archivo_ttf_u_otf)
     self.font= ImageFont.truetype(bytes_fichero)
Example #26
0
def read_vep_predefined_custom_annot():
    import yaml

    with pkg_resources.open_binary(vep.__name__,
                                   'predefined_custom_annot.yaml') as fp:
        vep_predefined_custom_annot = yaml.safe_load(fp)

    return vep_predefined_custom_annot
Example #27
0
 def __init__(self) -> None:
     """Intialize the detector."""
     with open_binary('myanmartools.resources',
                      'zawgyiUnicodeModel.dat') as stream:
         self._chars = check_signature(stream)
         self._params = read_params(stream)
         # the 0 node is for foreign characters so mark as nan
         self._params[0] = nan
Example #28
0
 def from_resource(cls, file):
     """
     Construct a stemmer using stemming table from a given file in the
     stempel package.
     :param file: file containing stemming trie.
     :return: stemmer instance.
     """
     with pkg_resources.open_binary('stempel', file) as inp:
         return cls.from_stream(DataInputStream(inp))
Example #29
0
def load_fgd() -> FGD:
    """Extract the local copy of FGD data.

    This allows the analysis to not depend on local files.
    """

    from lzma import LZMAFile
    with LZMAFile(open_binary(srctools, 'fgd.lzma')) as f:
        return FGD.unserialise(f)
Example #30
0
def list_hxb2():
    """
    Returns the ordered list of HXB2 coordinates in the prepackaged HMM files.
    """
    hxb2 = []
    for gene in genes:
        with resources.open_binary("hivmmer", "{}.hxb2.tsv".format(gene)) as f:
            hxb2 += pd.read_csv(f, sep="\t", usecols=["hxb2"]).hxb2.tolist()
    return hxb2
Example #31
0
def load_fgd() -> FGD:
    """Extract the local copy of FGD data.

    This allows the analysis to not depend on local files.
    """

    from lzma import LZMAFile
    with LZMAFile(open_binary(srctools, 'fgd.lzma')) as f:
        return FGD.unserialise(f)
Example #32
0
 def test_open_binary(self):
     with resources.open_binary(self.data, 'binary.file') as fp:
         result = fp.read()
         self.assertEqual(result, b'\x00\x01\x02\x03')
Example #33
0
 def execute(self, package, path):
     with resources.open_binary(package, path):
         pass
Example #34
0
def resource_bytesio(filename):
    return open_binary('pre_commit.resources', filename)
Example #35
0
 def test_open_binary(self):
     with resources.open_binary(self.data, 'utf-8.file') as fp:
         result = fp.read()
         self.assertEqual(result, b'Hello, UTF-8 world!\n')