Пример #1
0
def test_create_repo(repo_dict, repo_class, raises_exception, tmpdir):
    # add parent_dir via fixture
    repo_dict['repo_dir'] = str(tmpdir.join('repo_name'))

    if raises_exception:
        with pytest.raises(raises_exception):
            create_repo(**repo_dict)
    else:
        repo = create_repo(**repo_dict)
        assert isinstance(repo, repo_class)
Пример #2
0
def test_create_repo(repo_dict, repo_class, raises_exception, tmpdir):
    # add parent_dir via fixture
    repo_dict['repo_dir'] = str(tmpdir.join('repo_name'))

    if raises_exception:
        with pytest.raises(raises_exception):
            create_repo(**repo_dict)
    else:
        repo = create_repo(**repo_dict)
        assert isinstance(repo, repo_class)
Пример #3
0
def test_repr():
    repo = create_repo(url='file://path/to/myrepo', repo_dir='/hello/', vcs='git')

    str_repo = text_type(repo)
    assert 'GitRepo' in str_repo
    assert 'hello' in str_repo
    assert '<GitRepo hello>' == str_repo
Пример #4
0
def test_repr():
    repo = create_repo(url='file://path/to/myrepo',
                       repo_dir='/hello/',
                       vcs='git')

    str_repo = text_type(repo)
    assert 'GitRepo' in str_repo
    assert 'hello' in str_repo
    assert '<GitRepo hello>' == str_repo
Пример #5
0
def pullCode(target, build_arch, arch, no_update, single):
	log_info("Downloading sources ...")
	if single:
		needed_sources = targets[target].sources
	else:
		needed_sources = createNeededSourceList(target, build_arch, arch)
	for src in needed_sources:
		s = sources[src]
		repo_dir = os.path.abspath(os.path.join(SOURCES_ROOT, s.name))
		repo = create_repo(url=s.location, vcs=s.vcs, repo_dir=repo_dir, no_submodules=s.no_submodules)
		if not os.path.isdir(repo_dir):
			is_cloning = True
		else:
			remote_url = repo.get_remote()
			is_cloning = False
			if remote_url is None:
				log_warning("Destination dir '{}' does not contain repository data. Deleting...".format(s.name))
				is_cloning = True
				shutil.rmtree(repo_dir)
			elif remote_url!=s.location:
				log_warning("Current source location {} does not match {}. Deleting...".format(remote_url,s.location))
				is_cloning = True
				shutil.rmtree(repo_dir)

		if is_cloning:
			log_step_triple("[{}] Cloning ".format(s.name), s.location)
			try:
				repo.obtain()
			except Exception as ex:
				log_error("Error while cloning repository {}.".format(s.location))
		else:
			if not no_update:
				log_step_triple("[{}] Updating ".format(s.name), s.location)
				try:
					repo.update_repo()
				except Exception as ex:
					log_error("Error while updating repository {}.".format(ex))
		if is_cloning or (not no_update):
			log_step_triple("[{}] Checkout ".format(s.name), s.revision)
			repo.checkout(s.revision)
		
		s.hash =  repo.get_revision()

		log_step_triple("[{}] Current revision ".format(s.name), s.hash)
Пример #6
0
def getDirHash(src, dir):
	s = sources[src]
	repo_dir = os.path.abspath(os.path.join(SOURCES_ROOT, s.name))
	repo = create_repo(url=s.location, vcs=s.vcs, repo_dir=repo_dir, no_submodules=s.no_submodules)
	return repo.get_revision_dir(dir)
Пример #7
0
def main():
    """Execute command-line tool."""
    parser = argparse.ArgumentParser(
        description='Checkout and update Slicer extension sources.')
    parser.add_argument("--filter",
                        default=".*",
                        type=str,
                        help="Regular expression to select particular \
        extensions (e.g 'ABC|Slicer.+')")
    parser.add_argument("--delete",
                        action="store_true",
                        help="Delete previous source checkout.")
    parser.add_argument(
        '--log-level',
        dest='log_level',
        default='INFO',
        help='Level of debug verbosity. DEBUG, INFO, WARNING, ERROR, CRITICAL.',
    )

    parser.add_argument("/path/to/ExtensionsIndex")
    parser.add_argument("/path/to/ExtensionsSource")
    args = parser.parse_args()

    def get_path_arg(arg_key):
        """Read command line argument ``arg_key`` as an absolute path."""
        return os.path.abspath(os.path.expanduser(getattr(args, arg_key)))

    extensions_source_dir = get_path_arg("/path/to/ExtensionsSource")
    extensions_index_dir = get_path_arg("/path/to/ExtensionsIndex")

    setup_logger(logger=log, level=args.log_level.upper())

    log.info("extensions_source_dir is [%s]" % extensions_source_dir)
    log.info("extensions_index_dir is [%s]" % extensions_index_dir)

    file_match = "*.s4ext"

    stats_file_name = "ExtensionsCheckoutTimes.json"

    stats_file_path = os.path.join(extensions_source_dir, stats_file_name)
    stats = read_dict(stats_file_path)

    re_file_match = re.compile(args.filter)
    for file_path in glob.glob(os.path.join(extensions_index_dir, file_match)):
        extension_name = os.path.splitext(os.path.basename(file_path))[0]
        if not re_file_match.match(extension_name):
            continue
        metadata = parse_s4ext(file_path)
        log_context = {
            'repo_name': extension_name,
            'repo_vcs': metadata['scm']
        }
        if args.delete:
            extension_source_dir = \
                os.path.join(extensions_source_dir, extension_name)
            if os.path.exists(extension_source_dir):
                log.warning("Deleting %s" % extension_source_dir,
                            extra=log_context)
                if extension_name in stats:
                    del stats[extension_name]
                    write_dict(stats_file_path, stats)
                shutil.rmtree(extension_source_dir)
        elapsed_time_collected = False
        if extension_name in stats:
            elapsed_time_collected = True
        kwargs = {}
        for param_name in ['username', 'password']:
            if 'svn' + param_name in metadata:
                kwargs['svn_' + param_name] = metadata['svn' + param_name]
        repo = create_repo(url=metadata['scmurl'],
                           vcs=metadata['scm'],
                           rev=metadata['scmrevision'],
                           repo_dir=os.path.join(extensions_source_dir,
                                                 extension_name),
                           **kwargs)
        repo.progress_callback = progress_callback
        repo.info("Begin timed call")
        duration, result = time_call(repo.update_repo)()
        repo.info("Elapsed time: {:.2f}s\n".format(duration))
        if not elapsed_time_collected:
            stats[extension_name] = duration

        write_dict(stats_file_path, stats)