def test_dict_to_string(self): net_settings = NetworkSettings(NET_SETS) output = utils.dict_objects_to_str(net_settings) assert_is_instance(output, dict) for k, v in output.items(): assert_is_instance(k, str) assert_not_is_instance(v, ipaddress.IPv4Address)
def add_test_yaml_files(cls): """Add in dodgy yaml files in an extra nbextensions dir.""" cls.jupyter_dirs['dodgy'] = { 'nbexts': os.path.join(cls.jupyter_dirs['root'], 'dodgy', 'nbext')} dodgy_nbext_dir_path = cls.jupyter_dirs['dodgy']['nbexts'] if not os.path.exists(dodgy_nbext_dir_path): os.makedirs(dodgy_nbext_dir_path) cls.config.NotebookApp.setdefault( 'extra_nbextensions_path', []).append(dodgy_nbext_dir_path) # an invalid yaml file yaml_path_invalid = os.path.join( dodgy_nbext_dir_path, 'nbext_invalid_yaml.yaml') with io.open(yaml_path_invalid, 'w') as f: f.write('not valid yaml!: [') # various test yaml files test_yamls = { # we use str() because otherwise python2 will write # !!python/unicode in the yaml, and pyyaml SafeLoader will refuse # to reconstruct the objects :( boo! 'not_an_nbext': [ str('valid yaml'), str("doesn't always"), str('make for a valid nbext yaml, right?'), 3423509], 'missing_key': {str('Main'): True}, 'invalid_type': { str('Main'): str('main.js'), str('Type'): str('blahblahblah') }, 'dummy': { str('Main'): str('dummy.js'), str('Type'): str('Jupyter Notebook Extension'), str('Description'): str('This is a dumb dummy description'), str('Compatibility'): str('4.x 5.x'), }, } for fname, yaml_obj in test_yamls.items(): if fname != 'dummy': nt.assert_not_is_instance( jupyter_nbextensions_configurator._process_nbextension_spec(yaml_obj), # noqa: E501 dict) yaml_path = os.path.join(dodgy_nbext_dir_path, fname + '.yaml') with io.open(yaml_path, 'w') as f: yaml.dump(yaml_obj, f, default_flow_style=False) # a yaml file which shadows an existing nbextension. nbdir = os.path.join( os.path.dirname(jupyter_nbextensions_configurator.__file__), 'static') nbexts = ( jupyter_nbextensions_configurator.get_configurable_nbextensions( [nbdir], as_dict=True)) src = random.choice(list(nbexts.values()))['yaml_path'] dst = os.path.join( dodgy_nbext_dir_path, os.path.relpath(src, start=nbdir)) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) shutil.copy(src, dst)
def test_sequence_request2(self): # Request using class name, no views self.workbench.register(PulsesManagerManifest()) core = self.workbench.get_plugin(u'enaml.workbench.core') com = u'hqc_meas.pulses.sequences_request' kwargs = {'sequences': ['ConditionalSequence'], 'use_class_names': True, 'views': False} sequences, miss = core.invoke_command(com, kwargs, self) assert_equal(sequences.keys(), ['ConditionalSequence']) assert_not_is_instance(sequences['ConditionalSequence'], tuple) assert_equal(miss, [])
def test_context_request2(self): # Request using class name, no views self.workbench.register(PulsesManagerManifest()) core = self.workbench.get_plugin(u'enaml.workbench.core') com = u'hqc_meas.pulses.contexts_request' contexts, miss = core.invoke_command(com, {'contexts': ['AWGContext'], 'use_class_names': True, 'views': False}, self) assert_equal(contexts.keys(), ['AWGContext']) assert_not_is_instance(contexts['AWGContext'], tuple) assert_equal(miss, [])
def test_slice(self): data = ['A', 'C', 'B', 'F'] lbl = UniqueIndex(data) # row slice s = lbl[1:] assert_is_instance(s, UniqueIndex) # index s = lbl[0] assert_is_instance(s, str) assert_not_is_instance(s, UniqueIndex)
def test_slice_types(self): g = GenotypeArray(diploid_genotype_data, dtype='i1') # row slice s = g[1:] assert_is_instance(s, GenotypeArray) # col slice s = g[:, 1:] assert_is_instance(s, GenotypeArray) # row index s = g[0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, GenotypeArray) # col index s = g[:, 0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, GenotypeArray) # ploidy index s = g[:, :, 0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, GenotypeArray) # item s = g[0, 0, 0] assert_is_instance(s, np.int8) assert_not_is_instance(s, GenotypeArray)
def test_slice(self): data = [1, 4, 5, 5, 7, 12] idx = SortedIndex(data, dtype='u4') # row slice s = idx[1:] assert_is_instance(s, SortedIndex) # index s = idx[0] assert_is_instance(s, np.uint32) assert_not_is_instance(s, SortedIndex) eq(data[0], s)
def test_slice_types(self): ac = AlleleCountsArray(allele_counts_data, dtype='u1') # row slice s = ac[1:] assert_is_instance(s, AlleleCountsArray) # col slice s = ac[:, 1:] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, AlleleCountsArray) # row index s = ac[0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, AlleleCountsArray) # col index s = ac[:, 0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, AlleleCountsArray) # item s = ac[0, 0] assert_is_instance(s, np.uint8) assert_not_is_instance(s, AlleleCountsArray)
def test_get_item_types(self): vt = VariantTable(variant_table_data, dtype=variant_table_dtype) # row slice s = vt[1:] assert_is_instance(s, VariantTable) # row index s = vt[0] assert_is_instance(s, np.record) assert_not_is_instance(s, VariantTable) # col access s = vt['CHROM'] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, VariantTable) s = vt[['CHROM', 'POS']] assert_is_instance(s, VariantTable)
def test_get_item_types(self): ft = FeatureTable(feature_table_data, dtype=feature_table_dtype) # row slice s = ft[1:] assert_is_instance(s, FeatureTable) # row index s = ft[0] assert_is_instance(s, np.record) assert_not_is_instance(s, FeatureTable) # col access s = ft['seqid'] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, FeatureTable) s = ft[['seqid', 'start', 'end']] assert_is_instance(s, FeatureTable)
def test_slice_types(self): h = HaplotypeArray(haplotype_data, dtype='i1') # row slice s = h[1:] assert_is_instance(s, HaplotypeArray) # col slice s = h[:, 1:] assert_is_instance(s, HaplotypeArray) # row index s = h[0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, HaplotypeArray) # col index s = h[:, 0] assert_is_instance(s, np.ndarray) assert_not_is_instance(s, HaplotypeArray) # item s = h[0, 0] assert_is_instance(s, np.int8) assert_not_is_instance(s, HaplotypeArray)
def test_meta_dict_order(self): # In Py2 cannot depend on order of declaration mapping; # (it is the unordered built-in dict). # *Force* a mapping that will be "unordered" # (with an ordered dict -- but NOT the one used by the metaclass). default_dict = faraday.item.DeclarativeItemBase.__prepare__('Broken', (faraday.Item,)) tools.assert_not_is_instance(default_dict, OrderedDict) uid_field = faraday.HashKeyField(data_type=faraday.NUMBER) defn = [ ('__module__', 'totes_mod'), # (fake) ('token', faraday.RangeKeyField()), # wuh oh! -- ('user', faraday.ItemLinkField(self.User, db_key=uid_field)), ('uid', uid_field), ] attrs = OrderedDict(defn) tools.eq_(attrs.items(), defn) # preserves (bad) order # There should be no KeyError: NotBroken = faraday.item.DeclarativeItemBase('Broken', (faraday.Item,), attrs) tools.eq_(NotBroken._meta.links['user'].db_key, ('uid',))
def test_getitem(self): gv = GenotypeVector(diploid_genotype_data[0]) # these should return the same type gs = gv[:] assert_is_instance(gs, GenotypeVector) gs = gv[:, :] assert_is_instance(gs, GenotypeVector) gs = gv[...] assert_is_instance(gs, GenotypeVector) gs = gv[0:2] assert_is_instance(gs, GenotypeVector) gs = gv[0:2, :] assert_is_instance(gs, GenotypeVector) gs = gv[np.array([True, False, True], dtype=bool)] assert_is_instance(gs, GenotypeVector) gs = gv[[0, 2]] assert_is_instance(gs, GenotypeVector) # these should return plain array gs = gv[:, 0] assert_not_is_instance(gs, GenotypeVector) gs = gv[np.newaxis, :2, 0] # change dimension semantics assert_not_is_instance(gs, GenotypeVector)
def test_not_is_instance(self): assert_not_is_instance(TestToolsBIT.mylist, tuple)
def add_test_yaml_files(cls): """Add in dodgy yaml files in an extra nbextensions dir.""" cls.jupyter_dirs['dodgy'] = { 'nbexts': os.path.join(cls.jupyter_dirs['root'], 'dodgy', 'nbext') } dodgy_nbext_dir_path = cls.jupyter_dirs['dodgy']['nbexts'] if not os.path.exists(dodgy_nbext_dir_path): os.makedirs(dodgy_nbext_dir_path) cls.config.NotebookApp.setdefault('extra_nbextensions_path', []).append(dodgy_nbext_dir_path) # an invalid yaml file yaml_path_invalid = os.path.join(dodgy_nbext_dir_path, 'nbext_invalid_yaml.yaml') with io.open(yaml_path_invalid, 'w') as f: f.write('not valid yaml!: [') # various test yaml files test_yamls = { # we use str() because otherwise python2 will write # !!python/unicode in the yaml, and pyyaml SafeLoader will refuse # to reconstruct the objects :( boo! 'not_an_nbext': [ str('valid yaml'), str("doesn't always"), str('make for a valid nbext yaml, right?'), 3423509 ], 'missing_key': { str('Main'): True }, 'invalid_type': { str('Main'): str('main.js'), str('Type'): str('blahblahblah') }, 'dummy': { str('Main'): str('dummy.js'), str('Type'): str('Jupyter Notebook Extension'), str('Description'): str('This is a dumb dummy description'), str('Compatibility'): str('4.x 5.x'), }, } for fname, yaml_obj in test_yamls.items(): if fname != 'dummy': nt.assert_not_is_instance( jupyter_nbextensions_configurator. _process_nbextension_spec(yaml_obj), # noqa: E501 dict) yaml_path = os.path.join(dodgy_nbext_dir_path, fname + '.yaml') with io.open(yaml_path, 'w') as f: yaml.dump(yaml_obj, f, default_flow_style=False) # a yaml file which shadows an existing nbextension. nbdir = os.path.join( os.path.dirname(jupyter_nbextensions_configurator.__file__), 'static') nbexts = ( jupyter_nbextensions_configurator.get_configurable_nbextensions( [nbdir], as_dict=True)) src = random.choice(list(nbexts.values()))['yaml_path'] dst = os.path.join(dodgy_nbext_dir_path, os.path.relpath(src, start=nbdir)) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) shutil.copy(src, dst)
def _isinstance_invalid(self, text, error): assert_not_is_instance(text, tree.identifier)
def test_payload(): payload = stats._payload(email) n.assert_not_is_instance(payload, builtins.str) for bomb in payload: n.assert_is_instance(bomb, Message)