def run_setup(exts): setup( name='spacy', packages=['spacy', 'spacy.en', 'spacy.syntax'], description="Industrial-strength NLP", author='Matthew Honnibal', author_email='*****@*****.**', version='0.33', url="http://honnibal.github.io/spaCy/", package_data={ "spacy": ["*.pxd"], "spacy.en": [ "*.pxd", "data/pos/*", "data/wordnet/*", "data/tokenizer/*", "data/vocab/lexemes.bin", "data/vocab/strings.txt" ], "spacy.syntax": ["*.pxd"] }, ext_modules=exts, license="Dual: Commercial or AGPL", install_requires=[ 'numpy', 'murmurhash', 'cymem', 'preshed', 'thinc', "unidecode", 'wget' ], setup_requires=["headers_workaround"], ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash') headers_workaround.install_headers('numpy')
def run_setup(exts): setup( name='spacy', packages=['spacy', 'spacy.tokens', 'spacy.en', 'spacy.serialize', 'spacy.syntax', 'spacy.munge'], description="Industrial-strength NLP", author='Matthew Honnibal', author_email='*****@*****.**', version=VERSION, url="http://honnibal.github.io/spaCy/", package_data={"spacy": ["*.pxd"], "spacy.en": ["*.pxd", "data/pos/*", "data/wordnet/*", "data/tokenizer/*", "data/vocab/lexemes.bin", "data/vocab/serializer.json", "data/vocab/oov_prob", "data/vocab/strings.txt"], "spacy.syntax": ["*.pxd"]}, ext_modules=exts, license="MIT", install_requires=['numpy', 'murmurhash', 'cymem >= 1.11', 'preshed >= 0.42', 'thinc == 3.3', "text_unidecode", 'plac', 'six', 'ujson', 'cloudpickle'], setup_requires=["headers_workaround"], cmdclass = {'build_ext': build_ext_subclass }, ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash') headers_workaround.install_headers('numpy')
def run_setup(exts): setup( name='spacy', packages=['spacy', 'spacy.en', 'spacy.syntax'], description="Industrial-strength NLP", author='Matthew Honnibal', author_email='*****@*****.**', version='0.83', url="http://honnibal.github.io/spaCy/", package_data={"spacy": ["*.pxd"], "spacy.en": ["*.pxd", "data/pos/*", "data/wordnet/*", "data/tokenizer/*", "data/vocab/lexemes.bin", "data/vocab/strings.txt"], "spacy.syntax": ["*.pxd"]}, ext_modules=exts, license="Dual: Commercial or AGPL", install_requires=['numpy', 'murmurhash', 'cymem >= 1.11', 'preshed', 'thinc', "unidecode", 'wget', 'plac', 'six'], setup_requires=["headers_workaround"], ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash') headers_workaround.install_headers('numpy')
def run_setup(exts): setup( name='spacy', packages=['spacy', 'spacy.tokens', 'spacy.en', 'spacy.serialize', 'spacy.syntax', 'spacy.munge'], description="Industrial-strength NLP", author='Matthew Honnibal', author_email='*****@*****.**', version=VERSION, url="http://honnibal.github.io/spaCy/", package_data={"spacy": ["*.pxd"], "spacy.en": ["*.pxd", "data/pos/*", "data/wordnet/*", "data/tokenizer/*", "data/vocab/lexemes.bin", "data/vocab/serializer.json", "data/vocab/oov_prob", "data/vocab/strings.txt"], "spacy.syntax": ["*.pxd"]}, ext_modules=exts, license="MIT", install_requires=['numpy', 'murmurhash', 'cymem >= 1.11', 'preshed == 0.41', 'thinc == 3.3', "text_unidecode", 'wget', 'plac', 'six', 'ujson'], setup_requires=["headers_workaround"], ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash') headers_workaround.install_headers('numpy')
def test_numpy(headers_dir): headers_workaround.install_headers('numpy', include_dir=headers_dir) assert dir_exists(headers_dir) assert dir_exists(path.join(headers_dir, 'numpy')) # Test some arbitrary files --- if any break, add them to the test later... assert file_exists(path.join(headers_dir, 'numpy', 'ndarrayobject.h')) assert file_exists(path.join(headers_dir, 'numpy', 'npy_endian.h')) assert file_exists(path.join(headers_dir, 'numpy', 'npy_math.h'))
def run_setup(exts): setup( name="spacy", packages=[ "spacy", "spacy.tokens", "spacy.en", "spacy.serialize", "spacy.syntax", "spacy.munge", "spacy.tests", "spacy.tests.matcher", "spacy.tests.morphology", "spacy.tests.munge", "spacy.tests.parser", "spacy.tests.serialize", "spacy.tests.spans", "spacy.tests.tagger", "spacy.tests.tokenizer", "spacy.tests.tokens", "spacy.tests.vectors", "spacy.tests.vocab", ], description="Industrial-strength NLP", author="Matthew Honnibal", author_email="*****@*****.**", version=VERSION, url="http://honnibal.github.io/spaCy/", package_data=PACKAGE_DATA, ext_modules=exts, license="MIT", install_requires=[ "numpy", "murmurhash", "cymem == 1.30", "preshed == 0.43", "thinc == 4.0.0", "text_unidecode", "plac", "six", "ujson", "cloudpickle", ], setup_requires=["headers_workaround"], cmdclass={"build_ext": build_ext_subclass}, ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers("murmurhash") headers_workaround.install_headers("numpy")
def run_setup(exts): setup( name="spacy", packages=["spacy", "spacy.tokens", "spacy.en", "spacy.serialize", "spacy.syntax", "spacy.munge"], description="Industrial-strength NLP", author="Matthew Honnibal", author_email="*****@*****.**", version=VERSION, url="http://honnibal.github.io/spaCy/", package_data={ "spacy": ["*.pxd"], "spacy.en": [ "*.pxd", "data/pos/*", "data/wordnet/*", "data/tokenizer/*", "data/vocab/lexemes.bin", "data/vocab/serializer.json", "data/vocab/oov_prob", "data/vocab/strings.txt", ], "spacy.syntax": ["*.pxd"], }, ext_modules=exts, license="MIT", install_requires=[ "numpy", "murmurhash", "cymem >= 1.11", "preshed >= 0.41", "thinc == 3.3", "text_unidecode", "wget", "plac", "six", "ujson", "cloudpickle", ], setup_requires=["headers_workaround"], ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers("murmurhash") headers_workaround.install_headers("numpy")
def run_setup(exts): setup( name='thinc', packages=['thinc'], version='2.0', description="Learn sparse linear models", author='Matthew Honnibal', author_email='*****@*****.**', url="http://github.com/syllog1sm/thinc", package_data={"thinc": ["*.pyx", "*.pxd", "*.pxi"]}, ext_modules=exts, install_requires=["murmurhash", "cymem", "preshed"], setup_requires=["headers_workaround"] ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash')
def run_setup(exts): setup( name='spacy', packages=['spacy', 'spacy.tokens', 'spacy.en', 'spacy.serialize', 'spacy.syntax', 'spacy.munge', 'spacy.tests', 'spacy.tests.matcher', 'spacy.tests.morphology', 'spacy.tests.munge', 'spacy.tests.parser', 'spacy.tests.serialize', 'spacy.tests.spans', 'spacy.tests.tagger', 'spacy.tests.tokenizer', 'spacy.tests.tokens', 'spacy.tests.vectors', 'spacy.tests.vocab'], description="Industrial-strength NLP", author='Matthew Honnibal', author_email='*****@*****.**', version=VERSION, url="http://honnibal.github.io/spaCy/", package_data=PACKAGE_DATA, ext_modules=exts, license="MIT", install_requires=['numpy', 'murmurhash', 'cymem == 1.30', 'preshed == 0.44', 'thinc == 4.0.0', "text_unidecode", 'plac', 'six', 'ujson', 'cloudpickle'], setup_requires=["headers_workaround"], cmdclass = {'build_ext': build_ext_subclass }, ) import headers_workaround headers_workaround.fix_venv_pypy_include() headers_workaround.install_headers('murmurhash') headers_workaround.install_headers('numpy')
def test_murmurhash(headers_dir): headers_workaround.install_headers('murmurhash', include_dir=headers_dir) assert dir_exists(headers_dir) assert dir_exists(path.join(headers_dir, 'murmurhash')) assert file_exists(path.join(headers_dir, 'murmurhash', 'MurmurHash2.h')) assert file_exists(path.join(headers_dir, 'murmurhash', 'MurmurHash3.h'))