else: url, rev = super(Git, self).get_url_rev() return url, rev def update_submodules(self, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return self.run_command( ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location, ) @classmethod def controls_location(cls, location): if super(Git, cls).controls_location(location): return True try: r = cls().run_command(['rev-parse'], cwd=location, show_stdout=False, on_returncode='ignore') return not r except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) return False vcs.register(Git)
for line in urls.splitlines(): line = line.strip() for x in ('checkout of branch: ', 'parent branch: '): if line.startswith(x): repo = line.split(x)[1] if self._is_local_repository(repo): return path_to_url(repo) return repo return None def get_revision(self, location): revision = self.run_command(['revno'], show_stdout=False, cwd=location) return revision.splitlines()[-1] def get_src_requirement(self, dist, location): repo = self.get_url(location) if not repo: return None if not repo.lower().startswith('bzr:'): repo = 'bzr+' + repo egg_project_name = dist.egg_name().split('-', 1)[0] current_rev = self.get_revision(location) return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) def check_version(self, dest, rev_options): """Always assume the versions don't match""" return False vcs.register(Bazaar)
def get_rev_options(url, rev): if rev: rev_options = ['-r', rev] else: rev_options = [] r = urlparse.urlsplit(url) if hasattr(r, 'username'): # >= Python-2.5 username, password = r.username, r.password else: netloc = r[1] if '@' in netloc: auth = netloc.split('@')[0] if ':' in auth: username, password = auth.split(':', 1) else: username, password = auth, None else: username, password = None, None if username: rev_options += ['--username', username] if password: rev_options += ['--password', password] return rev_options vcs.register(Subversion)
return current_revision def get_revision_hash(self, location): current_rev_hash = call_subprocess( [self.cmd, 'parents', '--template={node}'], show_stdout=False, cwd=location).strip() return current_rev_hash def get_src_requirement(self, dist, location, find_tags): repo = self.get_url(location) if not repo.lower().startswith('hg:'): repo = 'hg+' + repo egg_project_name = dist.egg_name().split('-', 1)[0] if not repo: return None current_rev = self.get_revision(location) current_rev_hash = self.get_revision_hash(location) tag_revs = self.get_tag_revs(location) branch_revs = self.get_branch_revs(location) if current_rev in tag_revs: # It's a tag full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) elif current_rev in branch_revs: # It's the tip of a branch full_egg_name = '%s-%s' % (egg_project_name, branch_revs[current_rev]) else: full_egg_name = '%s-dev' % egg_project_name return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name) vcs.register(Mercurial)
tags = call_subprocess( [self.cmd, 'tags'], show_stdout=False, cwd=location) tag_revs = [] for line in tags.splitlines(): tags_match = re.search(r'([.\w-]+)\s*(.*)$', line) if tags_match: tag = tags_match.group(1) rev = tags_match.group(2) tag_revs.append((rev.strip(), tag.strip())) return dict(tag_revs) def get_src_requirement(self, dist, location, find_tags): repo = self.get_url(location) if not repo.lower().startswith('bzr:'): repo = 'bzr+' + repo egg_project_name = dist.egg_name().split('-', 1)[0] if not repo: return None current_rev = self.get_revision(location) tag_revs = self.get_tag_revs(location) if current_rev in tag_revs: # It's a tag full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) else: full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev) return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) vcs.register(Bazaar)
def get_rev_options(url, rev): if rev: rev_options = ['-r', rev] else: rev_options = [] r = urllib_parse.urlsplit(url) if hasattr(r, 'username'): # >= Python-2.5 username, password = r.username, r.password else: netloc = r[1] if '@' in netloc: auth = netloc.split('@')[0] if ':' in auth: username, password = auth.split(':', 1) else: username, password = auth, None else: username, password = None, None if username: rev_options += ['--username', username] if password: rev_options += ['--password', password] return rev_options vcs.register(Subversion)
def revert(self, revision): self.revision = revision self.log.append(('revert', revision, self.target_dir)) def parents(self, pip_compatible=False): return [self.revision] def archive(self, target): utils.mkdirp(target) with open(os.path.join(target, '.fake_archival.txt'), 'w') as f: f.write(str(self.revision)) vcs.SUPPORTED['fakevcs'] = FakeRepo from pip.vcs import vcs as pip_vcs pip_vcs.register(FakeRepo) # for tests around gp.vcsdevelop def get_vcs_log(): return FakeRepo.log def clear_vcs_log(): FakeRepo.log = [] class PersistentRevFakeRepo(FakeRepo): """A variant of FakeRepo that still needs the directory structure around. Makes for a more realistic test of some conditions. In particular, reproduced launchpad #TODO
def bundle_package(self, cache=True): """Makes the pybundle archive (that :program:`pip` can take to install) with completely resolved dependencies. It yields triple of package name, filename of the pybundle archive, and its full path. :: with build.bundle_package() as (package, filename, path): sftp.put(path, filename) :param cache: whether to cache the package file or not. ``True`` by default :type cache: :class:`bool` """ asuka_logger = self.get_logger('bundle_package') # Makes pip.log.logger to forward records to the standard logging if not getattr(type(self), 'initialized', False): type(self).initialized = True logger.consumers.extend([(Logger.FATAL, asuka_logger.critical), (Logger.ERROR, asuka_logger.error), (Logger.WARN, asuka_logger.warn), (Logger.NOTIFY, asuka_logger.info), (Logger.INFO, asuka_logger.info), (Logger.DEBUG, asuka_logger.debug), (Logger.VERBOSE_DEBUG, asuka_logger.debug) ]) vcs.register(Git) main_parser = create_main_parser() bundle = commands['bundle'](main_parser) with self.archive_package() as (package_name, filename, filepath): bundle_filename = package_name + '.pybundle' if cache: cache_dir_path = os.path.join(tempfile.gettempdir(), 'asuka-pybundle-cache') if not os.path.isdir(cache_dir_path): os.makedirs(cache_dir_path) cache_path = os.path.join(cache_dir_path, bundle_filename) if os.path.isfile(cache_path): asuka_logger.info('cache exists: %s, skipping pybundle...', cache_path) yield package_name, bundle_filename, cache_path return tempdir = tempfile.gettempdir() bundle_path = os.path.join(os.path.dirname(filepath), bundle_filename) asuka_logger.info('pybundle_path = %r', bundle_path) options = optparse.Values() options.editables = [] options.requirements = [] options.find_links = [] options.index_url = PYPI_INDEX_URLS[0] options.extra_index_urls = PYPI_INDEX_URLS[1:] options.no_index = False options.use_mirrors = False options.mirrors = True options.build_dir = os.path.join(tempdir, 'asuka-dist-build-bundle') options.target_dir = None options.download_dir = None options.download_cache = os.path.join(tempdir, 'asuka-dist-download-cache') options.src_dir = backup_dir(src_prefix, '-bundle') options.upgrade = False options.force_reinstall = False options.ignore_dependencies = False options.no_install = True options.no_download = False options.install_options = [] options.global_options = [] options.use_user_site = False options.as_egg = False asuka_logger.debug('start: pip bundle %s %s', bundle_path, filepath) retrial = 0 while 1: try: shutil.rmtree(options.build_dir) except (OSError, IOError): pass try: bundle.run(options, [bundle_path, filepath]) except PipError as e: asuka_logger.exception(e) retrial += 1 if retrial < 3: asuka_logger.error( 'retry pip bundle after %d second(s)... (%d)', retrial, retrial**2) options.index_url = PYPI_INDEX_URLS[retrial] options.extra_index_urls = PYPI_INDEX_URLS[retrial + 1:] time.sleep(retrial**2) continue raise finally: if os.path.isdir(options.build_dir): shutil.rmtree(options.build_dir) break asuka_logger.debug('end: pip bundle %s %s', bundle_path, filepath) if cache: asuka_logger.info('save pybundle cache %s...', cache_path) shutil.copyfile(bundle_path, cache_path) yield package_name, os.path.basename(bundle_path), bundle_path
def revert(self, revision): self.revision = revision self.log.append(('revert', revision)) def parents(self, pip_compatible=False): return [self.revision] def archive(self, target): utils.mkdirp(target) with open(os.path.join(target, '.fake_archival.txt'), 'w') as f: f.write(str(self.revision)) vcs.SUPPORTED['fakevcs'] = FakeRepo from pip.vcs import vcs as pip_vcs pip_vcs.register(FakeRepo) # for tests around gp.vcsdevelop def get_vcs_log(): return FakeRepo.log def clear_vcs_log(): FakeRepo.log = [] class RecipeTestCase(unittest.TestCase): """A base setup for tests of recipe classes""" def setUp(self): b_dir = self.buildout_dir = mkdtemp('test_oerp_base_recipe')
def bundle_package(self, cache=True): """Makes the pybundle archive (that :program:`pip` can take to install) with completely resolved dependencies. It yields triple of package name, filename of the pybundle archive, and its full path. :: with build.bundle_package() as (package, filename, path): sftp.put(path, filename) :param cache: whether to cache the package file or not. ``True`` by default :type cache: :class:`bool` """ asuka_logger = self.get_logger('bundle_package') # Makes pip.log.logger to forward records to the standard logging if not getattr(type(self), 'initialized', False): type(self).initialized = True logger.consumers.extend([ (Logger.FATAL, asuka_logger.critical), (Logger.ERROR, asuka_logger.error), (Logger.WARN, asuka_logger.warn), (Logger.NOTIFY, asuka_logger.info), (Logger.INFO, asuka_logger.info), (Logger.DEBUG, asuka_logger.debug), (Logger.VERBOSE_DEBUG, asuka_logger.debug) ]) vcs.register(Git) load_command('bundle') bundle = command_dict['bundle'] with self.archive_package() as (package_name, filename, filepath): if cache: cache_dir_path = os.path.join( tempfile.gettempdir(), 'asuka-pybundle-cache' ) if not os.path.isdir(cache_dir_path): os.makedirs(cache_dir_path) cache_path = os.path.join(cache_dir_path, filename) if os.path.isfile(cache_path): asuka_logger.info('cache exists: %s, skipping pybundle...', cache_path) yield package_name, filename, cache_path return tempdir = tempfile.gettempdir() bundle_path = os.path.join( os.path.dirname(filepath), package_name + '.pybundle' ) asuka_logger.info('pybundle_path = %r', bundle_path) options = optparse.Values() options.editables = [] options.requirements = [] options.find_links = [] options.index_url = PYPI_INDEX_URLS[0] options.extra_index_urls = PYPI_INDEX_URLS[1:] options.no_index = False options.use_mirrors = False options.mirrors = True options.build_dir = os.path.join( tempdir, 'asuka-dist-build-bundle' ) options.target_dir = None options.download_dir = None options.download_cache = os.path.join( tempdir, 'asuka-dist-download-cache' ) options.src_dir = backup_dir(src_prefix, '-bundle') options.upgrade = False options.force_reinstall = False options.ignore_dependencies = False options.no_install = True options.no_download = False options.install_options = [] options.global_options = [] options.use_user_site = False options.as_egg = False asuka_logger.debug('start: pip bundle %s %s', bundle_path, filepath) retrial = 0 while 1: try: shutil.rmtree(options.build_dir) except (OSError, IOError): pass try: bundle.run(options, [bundle_path, filepath]) except PipError as e: asuka_logger.exception(e) retrial += 1 if retrial < 3: asuka_logger.error( 'retry pip bundle after %d second(s)... (%d)', retrial, retrial ** 2 ) options.index_url = PYPI_INDEX_URLS[retrial] options.extra_index_urls = PYPI_INDEX_URLS[retrial+1:] time.sleep(retrial ** 2) continue raise finally: shutil.rmtree(options.build_dir) break asuka_logger.debug('end: pip bundle %s %s', bundle_path, filepath) if cache: asuka_logger.info('save pybundle cache %s...', cache_path) shutil.copyfile(filepath, cache_path) yield package_name, os.path.basename(bundle_path), bundle_path