コード例 #1
0
def svn_remote(parentdir, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    server_dirname = 'server_dir'
    server_dir = parentdir.join(server_dirname)

    run(['svnadmin', 'create', str(server_dir)])

    return str(server_dir)
コード例 #2
0
ファイル: test_svn.py プロジェクト: jcfr/libvcs
def svn_dummy_repo_dir(tmpdir_repoparent, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    server_dirname = 'server_dir'
    server_dir = tmpdir_repoparent.join(server_dirname)

    run(['svnadmin', 'create', str(server_dir)])

    return str(server_dir)
コード例 #3
0
def svn_remote(parentdir, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    server_dirname = 'server_dir'
    server_dir = parentdir.join(server_dirname)

    run(['svnadmin', 'create', str(server_dir)])

    return str(server_dir)
コード例 #4
0
ファイル: conftest.py プロジェクト: vcs-python/vcspull
    def fn(repo_name, testfile_filename='testfile.test'):
        repo_path = str(tmpdir_repoparent.join(repo_name))

        run(['git', 'init', repo_name], cwd=str(tmpdir_repoparent))

        run(['touch', testfile_filename], cwd=repo_path)
        run(['git', 'add', testfile_filename], cwd=repo_path)
        run(['git', 'commit', '-m', 'test file for %s' % repo_name], cwd=repo_path)

        return repo_path
コード例 #5
0
def test_repo_git_obtain_initial_commit_repo(tmpdir):
    """initial commit repos return 'initial'.

    note: this behaviors differently from git(1)'s use of the word "bare".
    running `git rev-parse --is-bare-repository` would return false.
    """
    repo_name = 'my_git_project'

    run(['git', 'init', repo_name], cwd=str(tmpdir))

    bare_repo_dir = tmpdir.join(repo_name)

    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + str(bare_repo_dir),
            'repo_dir': str(tmpdir.join('obtaining a bare repo')),
        })

    git_repo.obtain()
    assert git_repo.get_revision() == 'initial'
コード例 #6
0
def test_repo_mercurial(tmpdir, parentdir, hg_remote):
    repo_name = 'my_mercurial_project'

    mercurial_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'hg+file://' + hg_remote,
            'repo_dir': str(parentdir.join(repo_name)),
        })

    run(['hg', 'init', mercurial_repo.name], cwd=str(tmpdir))

    mercurial_repo.update_repo()

    test_repo_revision = run(
        ['hg', 'parents', '--template={rev}'],
        cwd=str(parentdir.join(repo_name)),
    )

    assert mercurial_repo.get_revision() == test_repo_revision
    assert os.path.exists(str(tmpdir.join(repo_name)))
コード例 #7
0
def test_progress_callback(tmpdir, git_remote, mocker):
    def progress_callback_spy(output, timestamp):
        assert isinstance(output, string_types)
        assert isinstance(timestamp, datetime.datetime)

    progress_callback = mocker.Mock(name='progress_callback_stub',
                                    side_effect=progress_callback_spy)

    run(['git', 'rev-parse', 'HEAD'], cwd=git_remote)

    # create a new repo with the repo as a remote
    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + git_remote,
            'repo_dir': str(tmpdir.join('myrepo')),
            'progress_callback': progress_callback,
        })
    git_repo.obtain()

    assert progress_callback.called
コード例 #8
0
ファイル: test_hg.py プロジェクト: tony/libvcs
def test_repo_mercurial(tmpdir, parentdir, hg_remote):
    repo_name = 'my_mercurial_project'

    mercurial_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'hg+file://' + hg_remote,
            'repo_dir': str(parentdir.join(repo_name)),
        }
    )

    run(['hg', 'init', mercurial_repo.name], cwd=str(tmpdir))

    mercurial_repo.update_repo()

    test_repo_revision = run(
        ['hg', 'parents', '--template={rev}'], cwd=str(parentdir.join(repo_name))
    )

    assert mercurial_repo.get_revision() == test_repo_revision
    assert os.path.exists(str(tmpdir.join(repo_name)))
コード例 #9
0
ファイル: test_git.py プロジェクト: tony/libvcs
def test_repo_git_obtain_initial_commit_repo(tmpdir):
    """initial commit repos return 'initial'.

    note: this behaviors differently from git(1)'s use of the word "bare".
    running `git rev-parse --is-bare-repository` would return false.
    """
    repo_name = 'my_git_project'

    run(['git', 'init', repo_name], cwd=str(tmpdir))

    bare_repo_dir = tmpdir.join(repo_name)

    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + str(bare_repo_dir),
            'repo_dir': str(tmpdir.join('obtaining a bare repo')),
        }
    )

    git_repo.obtain()
    assert git_repo.get_revision() == 'initial'
コード例 #10
0
ファイル: conftest.py プロジェクト: gitter-badger/vcspull
def git_dummy_repo_dir(tmpdir_repoparent, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    name = 'dummyrepo'
    repo_path = str(tmpdir_repoparent.join(name))

    run(['git', 'init', name], cwd=str(tmpdir_repoparent))

    testfile_filename = 'testfile.test'

    run(['touch', testfile_filename], cwd=repo_path)
    run(['git', 'add', testfile_filename], cwd=repo_path)
    run(['git', 'commit', '-m', 'test file for %s' % name], cwd=repo_path)

    return repo_path
コード例 #11
0
def hg_remote(parentdir, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    name = 'dummyrepo'
    repo_path = str(parentdir.join(name))

    run(['hg', 'init', name], cwd=str(parentdir))

    testfile_filename = 'testfile.test'

    run(['touch', testfile_filename], cwd=repo_path)
    run(['hg', 'add', testfile_filename], cwd=repo_path)
    run(['hg', 'commit', '-m', 'test file for %s' % name], cwd=repo_path)

    return repo_path
コード例 #12
0
ファイル: test_hg.py プロジェクト: tony/libvcs
def hg_remote(parentdir, scope='session'):
    """Create a git repo with 1 commit, used as a remote."""
    name = 'dummyrepo'
    repo_path = str(parentdir.join(name))

    run(['hg', 'init', name], cwd=str(parentdir))

    testfile_filename = 'testfile.test'

    run(['touch', testfile_filename], cwd=repo_path)
    run(['hg', 'add', testfile_filename], cwd=repo_path)
    run(['hg', 'commit', '-m', 'test file for %s' % name], cwd=repo_path)

    return repo_path
コード例 #13
0
ファイル: test_git.py プロジェクト: tony/libvcs
def test_progress_callback(tmpdir, git_remote, mocker):
    def progress_callback_spy(output, timestamp):
        assert isinstance(output, string_types)
        assert isinstance(timestamp, datetime.datetime)

    progress_callback = mocker.Mock(
        name='progress_callback_stub', side_effect=progress_callback_spy
    )

    run(['git', 'rev-parse', 'HEAD'], cwd=git_remote)

    # create a new repo with the repo as a remote
    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + git_remote,
            'repo_dir': str(tmpdir.join('myrepo')),
            'progress_callback': progress_callback,
        }
    )
    git_repo.obtain()

    assert progress_callback.called
コード例 #14
0
def test_repo_git_obtain_full(tmpdir, git_remote):
    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + git_remote,
            'repo_dir': str(tmpdir.join('myrepo')),
        })

    git_repo.obtain()

    test_repo_revision = run(['git', 'rev-parse', 'HEAD'], cwd=git_remote)

    assert git_repo.get_revision() == test_repo_revision
    assert os.path.exists(str(tmpdir.join('myrepo')))
コード例 #15
0
ファイル: test_git.py プロジェクト: tony/libvcs
def test_repo_git_obtain_full(tmpdir, git_remote):
    git_repo = create_repo_from_pip_url(
        **{
            'pip_url': 'git+file://' + git_remote,
            'repo_dir': str(tmpdir.join('myrepo')),
        }
    )

    git_repo.obtain()

    test_repo_revision = run(['git', 'rev-parse', 'HEAD'], cwd=git_remote)

    assert git_repo.get_revision() == test_repo_revision
    assert os.path.exists(str(tmpdir.join('myrepo')))
コード例 #16
0
ファイル: base.py プロジェクト: YosysHQ/oss-cad-suite-build
def buildCode(build_target, build_arch, nproc, force, dry, pack_sources, single, tar):
	log_info_triple("Building ", build_target, " for {} architecture ...".format(build_arch))

	version_string = datetime.now().strftime("%Y%m%d")
	build_order = createBuildOrder(build_target, build_arch, getArchitecture(), True)
	pos = 0
	if single:
		t = build_order[-1]
		arch = t[0]
		target = targets[t[1]]

		deps = target.dependencies
		if build_target == target.name and target.top_package:
			res = set()
			for d in build_order:
				dep = targets[d[1]]
				if (dep and dep.resources):
					for r in dep.resources:
						res.add(r)
			deps += list(res)
		
		target_build_order = []
		target_build_order.append(tuple((build_arch,build_target)))
		total_pos = len(target_build_order) + len(deps)

		for d in deps:
			pos += 1
			dep = targets[d]
			needed = True
			if dep.arch and arch not in dep.arch:
				needed = False
			if needed:
				build_info = ""
				dep_arch = arch
				if (dep.build_native and build_arch != getArchitecture()):
					dep_arch = getArchitecture()
					build_info = " [" + dep_arch + "]"
				output_dir = os.path.join(OUTPUTS_ROOT, dep_arch, dep.name)
				hash_file = os.path.join(output_dir, '.hash')
				log_info_triple("Step [{:2d}/{:2d}] loading hash ".format(pos,total_pos), dep.name + build_info)
				if dry:
					continue
				if (os.path.exists(hash_file)):
					dep.hash = open(hash_file, 'r').read()
				else:
					log_error("Missing hash file for {} does not exist.".format(dep.name + build_info))		
	else:
		target_build_order = build_order
		total_pos = len(target_build_order)

	for t in target_build_order:
		pos += 1
		arch = t[0]
		target = targets[t[1]]
		target.hash = calculateHash(target, arch, build_order)
		build_info = ""
		if (build_arch != arch):
			build_info = " [" + arch + "]"

		output_dir = os.path.join(OUTPUTS_ROOT, arch, target.name)

		forceBuild = force
		for dep in sorted(target.dependencies):
			forceBuild = forceBuild or targets[dep].built
		hash_file = os.path.join(output_dir, '.hash')
		if (not forceBuild and os.path.exists(hash_file)):
			if target.hash == open(hash_file, 'r').read():				
				log_info_triple("Step [{:2d}/{:2d}] skipping ".format(pos, total_pos), target.name + build_info)
				continue

		log_info_triple("Step [{:2d}/{:2d}] building ".format(pos, total_pos), target.name + build_info)
		if dry:
			continue
		log_step("Remove old output dir ...")
		if os.path.exists(output_dir):
			shutil.rmtree(output_dir, onerror=removeError)
		log_step("Creating output dir ...")
		os.makedirs(output_dir)

		build_dir = os.path.join(BUILDS_ROOT, arch, target.name)
		if not target.top_package:
			log_step("Remove old build dir ...")
			if os.path.exists(build_dir):
				shutil.rmtree(build_dir, onerror=removeError)
			log_step("Creating build dir ...")
			os.makedirs(build_dir)
			for s in target.sources:
				src_dir = os.path.join(SOURCES_ROOT, s)
				log_step_triple("Copy '", s, "' source to build dir ...")
				run(['rsync','-a', src_dir, build_dir])

		deps = target.dependencies
		if t[1] == target.name and target.top_package:
			res = set()
			for d in build_order:
				dep = targets[d[1]]
				if (dep and dep.resources):
					for r in dep.resources:
						res.add(r)
			deps += list(res)

		prefix = "/yosyshq"

		packages = set()
		for d in deps:
			dep = targets[d]
			needed = True
			if dep.arch and arch not in dep.arch:
				needed = False
			if needed:
				dep_build_info = ""
				if (dep.build_native and build_arch != getArchitecture()):
					dep_build_info = " [" + getArchitecture() + "]"
					dep_dir = os.path.join(OUTPUTS_ROOT, getArchitecture(), d)
				else:
					dep_dir = os.path.join(OUTPUTS_ROOT, arch, d)
				if not os.path.exists(dep_dir):
					log_error("Dependency output directory for {} does not exist.".format(d + dep_build_info))
				if not target.top_package:
					log_step_triple("Copy '", d + dep_build_info, "' output to build dir ...")
					run(['rsync','-a', dep_dir, build_dir])
				else:
					if (dep.package):
						packages.add(dep.package)
					log_step_triple("Copy '", d + dep_build_info, "' output to package dir ...")
					run(['rsync','-a', dep_dir+"/", output_dir])

		if target.top_package:
			version_meta = dict({ 'branding': target.branding, 'product':  target.release_name, 'arch': arch, 'version': version_string, 'package_name': target.release_name + "-" + arch + "-" + version_string})
			package_meta = dict.fromkeys(sorted(list(packages)))
			tools_meta = dict.fromkeys(sorted(list(deps)))
			for key in package_meta:
				package_meta[key] = dict({'size': 0, 'files' : [], 'installed' : True })
			for key in tools_meta:
				tools_meta[key] = dict({'files' : [], 'active' : True, 'package' : None })
			for d in deps:
				dep = targets[d]
				needed = True
				if dep.arch and arch not in dep.arch:
					needed = False
				if dep.tools is not None:
					for key in dep.tools:
						tools_meta[key] = dict({'files' : dep.tools[key], 'active' : True, 'package' : dep.package })
					continue
				if needed:
					if (dep.build_native and build_arch != getArchitecture()):
						dep_dir = os.path.join(OUTPUTS_ROOT, getArchitecture(), d)
					else:
						dep_dir = os.path.join(OUTPUTS_ROOT, arch, d)
					if dep.package:
						package_meta[dep.package]['size'] += get_size(dep_dir + prefix)
					for root, _, files in sorted(os.walk(dep_dir)):
						for filename in sorted(files):
							fpath = os.path.join(root, filename).replace(dep_dir,"")
							if fpath.startswith(prefix):
								name = fpath.replace("/yosyshq/","")
								if name.startswith("bin/"):
									tools_meta[dep.name]['files'].append(name[4:])
									tools_meta[dep.name]['package'] = dep.package
								if dep.package:
									package_meta[dep.package]['files'].append(name)
									if name.startswith("bin/") and arch != 'windows-x64':
										package_meta[dep.package]['files'].append("libexec" + name[3:])
			
			metadata = dict({'version' : version_meta, 'packages' : package_meta, 'tools' : tools_meta })
			with open(os.path.join(output_dir, "yosyshq", "share", "manifest.json"), "w") as manifest_file:
				json.dump(metadata, manifest_file)

		code = executeBuild(target, arch, prefix, build_dir if not target.top_package else output_dir, output_dir, nproc, pack_sources)
		if code!=0:
			log_error("Script returned error code {}.".format(code))

		if target.license_file is not None or target.license_url is not None:
			log_step("Generating license file ...")
			license_dir = os.path.join(output_dir + prefix, "license")
			os.makedirs(license_dir)
			license_file = os.path.join(license_dir, "LICENSE." + target.name)
			with open(license_file, 'w') as f:
				f.write("YosysHQ embeds '{}' in its distribution bundle.\n".format(target.name))
				f.write("\nBuild is based on folowing sources:\n")
				f.write('=' * 80 + '\n')
				for s in target.sources:
					f.write("{} {} checkout revision {}\n".format(sources[s].vcs, sources[s].location, sources[s].hash))
				f.write("\nFollowing files are included:\n")
				f.write('=' * 80 + '\n')
				for root, _, files in sorted(os.walk(output_dir)):
					for filename in sorted(files):
						fpath = os.path.join(root, filename).replace(output_dir,"")
						if not fpath.startswith("/dev"):
							f.write(fpath.replace("/yosyshq/","") + '\n')
				f.write("\nSoftware is under following license :\n")
				f.write('=' * 80 + '\n')
				if target.license_url is not None:
					log_step("Retrieving license file for {}...".format(target.name))
					try:
						with urllib.request.urlopen(target.license_url) as lf:
							f.write(lf.read().decode('utf-8'))
					except urllib.error.URLError as e:
						log_error(str(e))
				if target.license_file is not None:
					with open(os.path.join(build_dir, target.license_file), 'r') as lf:
						f.write(lf.read())
				f.write('\n' + '=' * 80 + '\n')

		if target.top_package:
			if arch == 'windows-x64':
				package_name = target.release_name + "-" + arch + "-" + version_string +".exe"
				log_step("Packing {} ...".format(package_name))
				os.replace(os.path.join(output_dir, "yosyshq"), os.path.join(output_dir, target.release_name))
				create_exe(package_name, target.release_name, output_dir)
			else:
				package_name = target.release_name + "-" + arch + "-" + version_string +".tgz"
				log_step("Packing {} ...".format(package_name))
				os.replace(os.path.join(output_dir, "yosyshq"), os.path.join(output_dir, target.release_name))
				create_tar(package_name, target.release_name, output_dir)

		log_step("Marking build finished ...")
		with open(hash_file, 'w') as f:
			f.write(target.hash)
		target.built = True

		if tar:
			package_name = arch + "-" + target.name +".tgz"
			log_step("Packing {} ...".format(package_name))
			create_tar(package_name, output_dir, ".")

		if not target.top_package:
			log_step("Remove build dir ...")
			if os.path.exists(build_dir):
				shutil.rmtree(build_dir, onerror=removeError)