def create_release_project(project=project1, test_index=True):
    '''
    Create valid project for release
    '''
    project = project.copy()
    test_succeed_path = Path('operation/mittens/tests/test_succeed.py')
    project.files[test_succeed_path] = extra_files[test_succeed_path] 
    if test_index:
        project.project_py['index_test'] = 'pypitest'
    create_project(project)
    
    git_('add', '.')
    mkproject()
    git_['commit', '-m', 'Initial commit'] & pb.FG
    
    # Create repo and use as remote
    path = Path.cwd() / '.cache'  # some location ignored by .gitignore
    path.mkdir()
    path /= 'other_repo'
    path.mkdir()
    with pb.local.cwd(str(path)):
        git_('init', '--bare')
    git_('remote', 'add', 'origin', path.as_uri())
    git_('push', '--set-upstream', 'origin', 'master')
    git_['status'] & pb.FG
 def test_no_ignore(self, tmpcwd):
     '''
     When well-behaved pre_commit_no_ignore, copy matched files to pre-commit
     tmp dir
     '''
     project = self.project
     project.project_py['pre_commit_no_ignore'] = ['operation/mittens/test/mah_*']
     project.files[Path('operation/mittens/test/mah_file')] = 'content'
     project.files[Path('operation/mittens/test/mah_dir/some_file')] = 'some file content'
     project.files[Path('operation/mittens/test/test_it.py')] = dedent('''\
         from pathlib import Path
         def test_it():
             # file is there
             dir = Path(__file__).parent
             with (dir / 'mah_file').open('r') as f:
                 assert f.read() == 'content'
                 
             # recursively copied directory is there
             with (dir / 'mah_dir/some_file').open('r') as f:
                 assert f.read() == 'some file content'
         ''')
     create_project(project)
     mkproject() # install pre commit hook
     git_('add', '.')
     git_('reset', 'operation/mittens/test/mah_file')
     git_('reset', 'operation/mittens/test/mah_dir')
     git_('commit', '-m', 'message') # run pre-commit
def create_release_project(project=project1, test_index=True):
    '''
    Create valid project for release
    '''
    project = project.copy()
    test_succeed_path = Path('operation/mittens/tests/test_succeed.py')
    project.files[test_succeed_path] = extra_files[test_succeed_path]
    if test_index:
        project.project_py['index_test'] = 'pypitest'
    create_project(project)

    git_('add', '.')
    mkproject()
    git_['commit', '-m', 'Initial commit'] & pb.FG

    # Create repo and use as remote
    path = Path.cwd() / '.cache'  # some location ignored by .gitignore
    path.mkdir()
    path /= 'other_repo'
    path.mkdir()
    with pb.local.cwd(str(path)):
        git_('init', '--bare')
    git_('remote', 'add', 'origin', path.as_uri())
    git_('push', '--set-upstream', 'origin', 'master')
    git_['status'] & pb.FG
def test_wrong_readme_file_name(tmpcwd, name):
    project = project1.copy()
    project.project_py['readme_file'] = name
    create_project(project)
    Path(name).touch()
    
    with assert_process_fails(stderr_matches='readme_file'):
        mkproject()
def test_idempotent(tmpcwd):
    '''
    Running ct-mkproject twice is the same as running it once, in any case
    '''
    create_project()
    mkproject & pb.FG
    with assert_directory_contents(Path('.'), changed=False):
        mkproject()
 def test_undefined(self, tmpcwd):
     '''
     When project.py does not define `project`, abort
     '''
     create_project()
     write_file('project.py', '')
     with assert_process_fails(stderr_matches='must export a `project` variable'):
         mkproject()
 def test_has_unknown_attr(self, tmpcwd, unknown_attr):
     '''
     When `project` contains an unknown attribute, abort
     '''
     project = project1.copy()
     project.project_py[unknown_attr] = 'value'
     create_project(project)
     with assert_process_fails(stderr_matches=unknown_attr):
         mkproject()
 def test_missing_required_attr(self, tmpcwd, required_attr):
     '''
     When `project` lacks a required attribute, abort
     '''
     project = project1.copy()
     del project.project_py[required_attr]
     create_project(project)
     with assert_process_fails(stderr_matches='Missing.+{}'.format(required_attr)):
         mkproject()
 def test_unpinned_sip_dependency(self, tmpcwd):
     '''
     When sip based dependency is not pinned, error
     '''
     project = project1.copy()
     project.files[Path('requirements.in')] = 'pytest\nPyQt5\n'
     create_project(project)
     
     with assert_process_fails(stderr_matches=r"(?i)'PyQt5' .* pin"):
         mkproject()
 def test_attr_has_invalid_value(self, tmpcwd, attr, value):
     '''
     When '\s*' or None as attr values, abort
     
     When dashes or whitespace in name, abort
     '''
     project = project1.copy()
     project.project_py[attr] = value
     create_project(project)
     with assert_process_fails(stderr_matches=attr):
         mkproject()
def test_mkdoc(tmpcwd):
    '''When happy days and a file with proper docstring, generate ./doc'''
    # Setup
    project = project1.copy()
    description = 'Meow meow n meow meow meow'
    add_docstring(project, description)
    create_project(project)
    
    # Run
    mkproject()
    pb.local['ct-mkdoc']()
    
    # Assert
    content = read_file('docs/build/html/index.html')
    assert '0.0.0' in content  # correct version
    assert project.project_py['human_friendly_name'] in content  # human friendly project name
 def test_happy_days(self, tmpcwd):
     '''
     If all well, commit
     '''
     project = self.project
     create_project(project)
     mkproject & pb.FG  # install pre-commit hook
     git_('add', '.')
     
     project.project_py['entry_points'] = {
         'console_scripts': [
             'mycli = operation.mittens.main:main',
         ],
     }
     project.files[Path('operation/mittens/main.py')] = 'def main(): pass'
     update_project(project)  # without staging it
     mkproject()
     
     git_('commit', '-m', 'message')  # runs the hook
     pb.local['sh']('-c', '. venv/bin/activate; mycli')  # mycli should be available in the venv, even though it wasn't part of the commit (it shouldn't be available in the venv during the pre-commit test run)
 def test_missing_file(self, tmpcwd, missing_path, missing_requirements):
     '''
     Test handling of missing files:
     
     - When files are missing, create them if allowed, error otherwise
     - When files are present and may not be updated, they are left untouched
     '''
     create_project()
     remove_file(missing_path)
     if missing_requirements.permission == Permission.none:
         # When can't create, must raise error
         with assert_process_fails(stderr_matches=missing_path.name):
             mkproject()
     else:
         # When can create, create
         with ExitStack() as contexts:
             for file, requirements in project_file_requirements.items():
                 if missing_path != file and missing_path not in file.parents and requirements.permission <= Permission.create:
                     contexts.enter_context(assert_file_access(file, written=False, contents_changed=False))
             mkproject & pb.FG
         assert missing_path.exists()
         content = read_file(missing_path)
         missing_requirements.verify_default_content(content, project1.format_kwargs)
 def test_setup_py(self, tmpcwd):
     '''
     Test generated setup.py and requirements.txt
     
     - install_requires: requirements.in transformed into valid dependency list with version specs maintained
     - long_description present and nonempty
     - classifiers: list of str
     - packages: list of str of packages
     - package_data: dict of package -> list of str of data file paths
     - author, author_email, description, entry_points keywords license name, url: exact same as input
     '''
     project = project1.copy()
     project.project_py['entry_points'] = project_defaults['entry_points']
     add_complex_requirements_in(project)
     project.files[Path('my_extra_requirements.in')] = 'checksumdir\npytest-pep8\n'
     del project.files[Path('test_requirements.in')]
     
     # Create package_data in operation/mittens/tests (it actually may be in non-test as well):
     project.files[Path('operation/mittens/tests/data/subdir/file1')] = ''
     project.files[Path('operation/mittens/tests/data/subdir/file2')] = ''
     project.files[Path('operation/mittens/tests/not_data/file')] = ''
     project.files[Path('operation/mittens/tests/not_data/data/file')] = ''
     project.files[Path('operation/mittens/tests/pkg/__init__.py')] = ''
     project.files[Path('operation/mittens/tests/pkg/data/file')] = ''
     
     #
     create_project(project)
     
     # Run
     mkproject & pb.FG
     
     # Assert setup.py args
     setup_args = get_setup_args()
     
     for attr in ('name', 'author', 'author_email', 'description', 'keywords', 'license', 'url'):
         assert setup_args[attr] == project.project_py[attr].strip()
     assert setup_args['entry_points'] == project.project_py['entry_points']
         
     assert setup_args['long_description'].strip()
     assert set(setup_args['classifiers']) == {'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python :: Implementation :: Stackless'}
     assert set(setup_args['packages']) == {'operation', 'operation.mittens', 'operation.mittens.tests', 'operation.mittens.tests.pkg'}
     assert {k:set(v) for k,v in setup_args['package_data'].items()} == {
         'operation.mittens.tests' : {'data/subdir/file1', 'data/subdir/file2'},
         'operation.mittens.tests.pkg' : {'data/file'},
     }
     assert set(setup_args['install_requires']) == {'pytest', 'pytest-testmon<5.0.0', 'pytest-env==0.6', 'pkg4', 'pytest-cov'}
     assert set(setup_args['extras_require'].keys()) == {'my_extra', 'test', 'dev'}
     assert set(setup_args['extras_require']['my_extra']) == {'checksumdir', 'pytest-pep8'}
     assert set(setup_args['extras_require']['test']) == set(spec.test_requirements_in)
     assert setup_args['version'] == '0.0.0'
     
     # requirements.txt must contain relevant packages, including optional dependencies
     requirements_txt_content = read_file('requirements.txt')
     for name in {'pytest', 'pytest-testmon', 'pytest-env', 'pkg_magic', 'pytest-cov', 'checksumdir', 'pytest-pep8'} | set(spec.test_requirements_in) | set(spec.dev_requirements_in):
         assert name in requirements_txt_content
          
     #TODO may be able to simplify tests a bit as we no longer care about *.in order. In the future we may get rid of *requirements.in files altogether though
     
     # Requirements.txt must be sorted like pip-compile
     #TODO hard to test, exact order used is: https://github.com/nvie/pip-tools/blob/master/piptools/writer.py#L27
     # maybe 2 local setup.py and 2 trivial pypi packages that have no dependencies and won't have any in the near/distant future 
     deps_txt = [get_dependency_name(line[0], line[1]) for line in parse_requirements_file(Path('requirements.txt')) if line[1]]
     
     # and must contain the dependencies of all *requirements.in files
     for path in map(Path, ('requirements.in', 'my_extra_requirements.in', 'test_requirements.in')):
         deps_in = [get_dependency_name(line[0], line[1]) for line in parse_requirements_file(path) if line[1]]
         assert set(deps_in).issubset(set(deps_txt))
         
     # Multiple runs yield the same setup.py each time
     with open('setup.py') as f:
         expected = f.read()
     for _ in range(5):
         mkproject()
         with open('setup.py') as f:
             actual = f.read()
         assert actual == expected