def copy_config_files(self): for product in ['dse', 'cassandra', 'hadoop', 'hadoop2-client', 'sqoop', 'hive', 'tomcat', 'spark', 'shark', 'mahout', 'pig', 'solr', 'graph']: src_conf = os.path.join(self.get_install_dir(), 'resources', product, 'conf') dst_conf = os.path.join(self.get_path(), 'resources', product, 'conf') if not os.path.isdir(src_conf): continue if os.path.isdir(dst_conf): common.rmdirs(dst_conf) shutil.copytree(src_conf, dst_conf) if product == 'solr': src_web = os.path.join(self.get_install_dir(), 'resources', product, 'web') dst_web = os.path.join(self.get_path(), 'resources', product, 'web') if os.path.isdir(dst_web): common.rmdirs(dst_web) shutil.copytree(src_web, dst_web) if product == 'tomcat': src_lib = os.path.join(self.get_install_dir(), 'resources', product, 'lib') dst_lib = os.path.join(self.get_path(), 'resources', product, 'lib') if os.path.isdir(dst_lib): common.rmdirs(dst_lib) if os.path.exists(src_lib): shutil.copytree(src_lib, dst_lib) src_webapps = os.path.join(self.get_install_dir(), 'resources', product, 'webapps') dst_webapps = os.path.join(self.get_path(), 'resources', product, 'webapps') if os.path.isdir(dst_webapps): common.rmdirs(dst_webapps) shutil.copytree(src_webapps, dst_webapps) src_lib = os.path.join(self.get_install_dir(), 'resources', product, 'gremlin-console', 'conf') dst_lib = os.path.join(self.get_path(), 'resources', product, 'gremlin-console', 'conf') if os.path.isdir(dst_lib): common.rmdirs(dst_lib) if os.path.exists(src_lib): shutil.copytree(src_lib, dst_lib)
def download_dse_version(version, username, password, verbose=False): url = DSE_ARCHIVE % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: if username is None: common.warning( "No dse username detected, specify one using --dse-username or passing in a credentials file using --dse-credentials." ) if password is None: common.warning( "No dse password detected, specify one using --dse-password or passing in a credentials file using --dse-credentials." ) __download(url, target, username=username, password=password, show_progress=verbose) common.debug("Extracting {} as version {} ...".format(target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e))
def download_dse_version(version, username, password, verbose=False): url = DSE_ARCHIVE % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: if username is None: print_("Warning: No dse username detected, specify one using --dse-username or passing in a credentials file using --dse-credentials.", file=sys.stderr) if password is None: print_("Warning: No dse password detected, specify one using --dse-password or passing in a credentials file using --dse-credentials.", file=sys.stderr) __download(url, target, username=username, password=password, show_progress=verbose) if verbose: print_("Extracting %s as version %s ..." % (target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e))
def __init__(self, path, name, partitioner=None, install_dir=None, create_directory=True, version=None, verbose=False, snitch='org.apache.cassandra.locator.PropertyFileSnitch', **kwargs): self.name = name self.id = 0 self.ipprefix = None self.ipformat = None self.nodes = {} self.seeds = [] self.partitioner = partitioner self.snitch = snitch self._config_options = {} self._dse_config_options = {} self.__log_level = "INFO" self.path = path self.__version = None self.use_vnodes = False # Classes that are to follow the respective logging level self._debug = [] self._trace = [] if self.name.lower() == "current": raise RuntimeError("Cannot name a cluster 'current'.") # This is incredibly important for # backwards compatibility. version = kwargs.get('cassandra_version', version) install_dir = kwargs.get('cassandra_dir', install_dir) docker_image = kwargs.get('docker_image') if create_directory: # we create the dir before potentially downloading to throw an error sooner if need be os.mkdir(self.get_path()) if docker_image: self.docker_image = docker_image self.__install_dir = None self.__version = '3.0' # TODO: add option to read the version from docker image return try: if version is None: # at this point, install_dir should always not be None, but # we keep this for backward compatibility (in loading old cluster) if install_dir is not None: if common.is_win(): self.__install_dir = install_dir else: self.__install_dir = os.path.abspath(install_dir) self.__version = self.__get_version_from_build() else: dir, v = self.load_from_repository(version, verbose) self.__install_dir = dir self.__version = v if v is not None else self.__get_version_from_build() if create_directory: common.validate_install_dir(self.__install_dir) self._update_config() except: if create_directory: common.rmdirs(self.get_path()) raise self.debug("Started cluster '{}' version {} installed in {}".format(self.name, self.__version, self.__install_dir))
def __init__(self, path, name, partitioner=None, install_dir=None, create_directory=True, version=None, verbose=False, **kwargs): self.name = name self.nodes = {} self.seeds = [] self.partitioner = partitioner self._config_options = {} self._dse_config_options = {} self.__log_level = "INFO" self.__path = path self.__version = None self.use_vnodes = False # Classes that are to follow the respective logging level self._debug = [] self._trace = [] self.data_dir_count = 1 if self.name.lower() == "current": raise RuntimeError("Cannot name a cluster 'current'.") # This is incredibly important for # backwards compatibility. if 'cassandra_version' in kwargs: version = kwargs['cassandra_version'] if 'cassandra_dir' in kwargs: install_dir = kwargs['cassandra_dir'] if create_directory: # we create the dir before potentially downloading to throw an error sooner if need be os.mkdir(self.get_path()) try: if version is None: # at this point, install_dir should always not be None, but # we keep this for backward compatibility (in loading old cluster) if install_dir is not None: if common.is_win(): self.__install_dir = install_dir else: self.__install_dir = os.path.abspath(install_dir) self.__version = self.__get_version_from_build() else: dir, v = self.load_from_repository(version, verbose) self.__install_dir = dir self.__version = v if v is not None else self.__get_version_from_build( ) if create_directory: common.validate_install_dir(self.__install_dir) self._update_config() except: if create_directory: common.rmdirs(self.get_path()) raise
def __init__( self, path, name, partitioner=None, install_dir=None, create_directory=True, version=None, verbose=False, **kwargs ): self.name = name self.nodes = {} self.seeds = [] self.partitioner = partitioner self._config_options = {} self._dse_config_options = {} self.__log_level = "INFO" self.__path = path self.__version = None self.use_vnodes = False # Classes that are to follow the respective logging level self._debug = [] self._trace = [] if self.name.lower() == "current": raise RuntimeError("Cannot name a cluster 'current'.") ##This is incredibly important for ##backwards compatibility. if "cassandra_version" in kwargs: version = kwargs["cassandra_version"] if "cassandra_dir" in kwargs: install_dir = kwargs["cassandra_dir"] if create_directory: # we create the dir before potentially downloading to throw an error sooner if need be os.mkdir(self.get_path()) try: if version is None: # at this point, install_dir should always not be None, but # we keep this for backward compatibility (in loading old cluster) if install_dir is not None: if common.is_win(): self.__install_dir = install_dir else: self.__install_dir = os.path.abspath(install_dir) self.__version = self.__get_version_from_build() else: dir, v = self.load_from_repository(version, verbose) self.__install_dir = dir self.__version = v if v is not None else self.__get_version_from_build() if create_directory: common.validate_install_dir(self.__install_dir) self._update_config() except: if create_directory: common.rmdirs(self.get_path()) raise
def download_dse_version(version, username, password, verbose=False): url = DSE_ARCHIVE % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(url, target, username=username, password=password, show_progress=verbose) if verbose: print_("Extracting %s as version %s ..." % (target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e))
def download_opscenter_version(version, username, password, target_version, verbose=False): url = OPSC_ARCHIVE if CCM_CONFIG.has_option('repositories', 'opscenter'): url = CCM_CONFIG.get('repositories', 'opscenter') url = url % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: if username is None: common.warning("No dse username detected, specify one using --dse-username or passing in a credentials file using --dse-credentials.") if password is None: common.warning("No dse password detected, specify one using --dse-password or passing in a credentials file using --dse-credentials.") __download(url, target, username=username, password=password, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, target_version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] # pylint: disable=all tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), target_version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version {}".format(version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format(str(e)))
def download_version(version, url=None, verbose=False, binary=False): """Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball. """ assert_jdk_valid_for_cassandra_version(version) archive_url = ARCHIVE if CCM_CONFIG.has_option('repositories', 'cassandra'): archive_url = CCM_CONFIG.get('repositories', 'cassandra') if binary: archive_url = "%s/%s/apache-cassandra-%s-bin.tar.gz" % ( archive_url, version.split('-')[0], version) if url is None else url else: archive_url = "%s/%s/apache-cassandra-%s-src.tar.gz" % ( archive_url, version.split('-')[0], version) if url is None else url _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(archive_url, target, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] # pylint: disable=all tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) if binary: # Binary installs don't have a build.xml that is needed # for pulling the version from. Write the version number # into a file to read later in common.get_version_from_build() with open(os.path.join(target_dir, '0.version.txt'), 'w') as f: f.write(version) else: compile_version(version, target_dir, verbose=verbose) except urllib.error.URLError as e: msg = "Invalid version {}".format( version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format( str(e))) except CCMError as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) common.error("Deleted {} due to error".format(target_dir)) except: raise CCMError( "Building C* version {} failed. Attempted to delete {} but failed. This will need to be manually deleted" .format(version, target_dir)) raise e
def elassandra_version_directory(version, elassandra_version): dir = directory_name(version) if os.path.exists(dir): try: validate_install_dir(os.path.join(dir, 'elassandra-%s' % elassandra_version)) return dir except ArgumentError: rmdirs(dir) return None else: return None
def version_directory(version): dir = directory_name(version) if os.path.exists(dir): try: validate_install_dir(dir) return dir except ArgumentError as e: rmdirs(dir) return None else: return None
def version_directory(version): dir = directory_name(version) if os.path.exists(dir): try: validate_install_dir(dir) return dir except ArgumentError: rmdirs(dir) return None else: return None
def remove_dir_with_retry(self, path): tries = 0 removed = False while removed is False: try: common.rmdirs(path) removed = True except Exception as e: tries = tries + 1 time.sleep(.1) if tries == 5: raise e
def download_version(version, url=None, verbose=False, binary=False): """Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball. """ assert_jdk_valid_for_cassandra_version(version) if binary: u = "%s/%s/apache-cassandra-%s-bin.tar.gz" % ( ARCHIVE, version.split('-')[0], version) if url is None else url else: u = "%s/%s/apache-cassandra-%s-src.tar.gz" % ( ARCHIVE, version.split('-')[0], version) if url is None else url _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(u, target, show_progress=verbose) if verbose: print_("Extracting %s as version %s ..." % (target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) if binary: # Binary installs don't have a build.xml that is needed # for pulling the version from. Write the version number # into a file to read later in common.get_version_from_build() with open(os.path.join(target_dir, '0.version.txt'), 'w') as f: f.write(version) else: compile_version(version, target_dir, verbose=verbose) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e)) except CCMError as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError( "Building C* version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir)) raise e
def remove_dir_with_retry(self, path): tries = 0 removed = False if os.path.exists(path): while not removed: try: common.rmdirs(path) removed = True except: tries = tries + 1 time.sleep(.1) if tries == 5: raise
def download_version(version, url=None, verbose=False, binary=False): """Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball. """ assert_jdk_valid_for_cassandra_version(version) if binary: u = "%s/%s/apache-cassandra-%s-bin.tar.gz" % (ARCHIVE, version.split("-")[0], version) if url is None else url else: u = "%s/%s/apache-cassandra-%s-src.tar.gz" % (ARCHIVE, version.split("-")[0], version) if url is None else url _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(u, target, show_progress=verbose) if verbose: print_("Extracting %s as version %s ..." % (target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) if binary: # Binary installs don't have a build.xml that is needed # for pulling the version from. Write the version number # into a file to read later in common.get_version_from_build() with open(os.path.join(target_dir, "0.version.txt"), "w") as f: f.write(version) else: compile_version(version, target_dir, verbose=verbose) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e)) except CCMError as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError( "Building C* version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir) ) raise e
def remove(self, node=None): if node is not None: if not node.name in self.nodes: return del self.nodes[node.name] if node in self.seeds: self.seeds.remove(node) self._update_config() node.stop(gently=False) common.rmdirs(node.get_path()) else: self.stop(gently=False) common.rmdirs(self.get_path())
def download_version(version, url=None, verbose=False, target_dir=None): """ Download, scylla relocatable package tarballs. """ try: if os.path.exists(url) and url.endswith('.tar.gz'): target = url elif is_valid(url): _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") __download(url, target, show_progress=verbose) else: raise ArgumentError( "unsupported url or file doesn't exist\n\turl={}".format(url)) if verbose: print_("Extracting %s as version %s ..." % (target, version)) tar = tarfile.open(target) tar.extractall(path=target_dir) tar.close() # add breadcrumb so we could list the origin of each part easily for debugging # for example listing all the version we have in ccm scylla-repository # find ~/.ccm/scylla-repository/*/ -iname source.txt | xargs cat source_breadcrumb_file = os.path.join(target_dir, 'source.txt') with open(source_breadcrumb_file, 'w') as f: f.write("version=%s\n" % version) f.write("url=%s\n" % url) except urllib.error.URLError as e: msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url msg = msg + " (underlying error is: %s)" % str(e) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e)) except CCMError as e: if target_dir: # wipe out the directory if anything goes wrong. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError( "Downloading/extracting scylla version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir)) raise e
def download_opscenter_version(version, target_version, verbose=False): url = OPSC_ARCHIVE % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(url, target, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, target_version)) tar = tarfile.open(target) dir = next(tar).name.split("/")[0] tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), target_version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version {}".format(version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format(str(e)))
def download_opscenter_version(version, target_version, verbose=False): url = OPSC_ARCHIVE % version _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(url, target, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, target_version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] # pylint: disable=all tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), target_version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) except urllib.error.URLError as e: msg = "Invalid version {}".format(version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format(str(e)))
def copy_config_files(self): for product in [ "dse", "cassandra", "hadoop", "sqoop", "hive", "tomcat", "spark", "shark", "mahout", "pig", "solr", ]: src_conf = os.path.join(self.get_install_dir(), "resources", product, "conf") dst_conf = os.path.join(self.get_path(), "resources", product, "conf") if not os.path.isdir(src_conf): continue if os.path.isdir(dst_conf): common.rmdirs(dst_conf) shutil.copytree(src_conf, dst_conf) if product == "solr": src_web = os.path.join(self.get_install_dir(), "resources", product, "web") dst_web = os.path.join(self.get_path(), "resources", product, "web") if os.path.isdir(dst_web): common.rmdirs(dst_web) shutil.copytree(src_web, dst_web) if product == "tomcat": src_lib = os.path.join(self.get_install_dir(), "resources", product, "lib") dst_lib = os.path.join(self.get_path(), "resources", product, "lib") if os.path.isdir(dst_lib): common.rmdirs(dst_lib) shutil.copytree(src_lib, dst_lib) src_webapps = os.path.join(self.get_install_dir(), "resources", product, "webapps") dst_webapps = os.path.join(self.get_path(), "resources", product, "webapps") if os.path.isdir(dst_webapps): common.rmdirs(dst_webapps) shutil.copytree(src_webapps, dst_webapps)
def clone_development(git_repo, version, verbose=False): print_(git_repo, version) target_dir = directory_name(version) assert target_dir if 'github' in version: git_repo_name, git_branch = github_username_and_branch_name(version) else: git_repo_name = 'apache' git_branch = version.split(':', 1)[1] local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name) logfile = lastlogfilename() with open(logfile, 'w') as lf: try: # Checkout/fetch a local repository cache to reduce the number of # remote fetches we need to perform: if not os.path.exists(local_git_cache): if verbose: print_("Cloning Cassandra...") out = subprocess.call( ['git', 'clone', '--mirror', git_repo, local_git_cache], cwd=__get_dir(), stdout=lf, stderr=lf) assert out == 0, "Could not do a git clone" else: if verbose: print_("Fetching Cassandra updates...") out = subprocess.call( ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'], cwd=local_git_cache, stdout=lf, stderr=lf) # Checkout the version we want from the local cache: if not os.path.exists(target_dir): # development branch doesn't exist. Check it out. if verbose: print_("Cloning Cassandra (from local cache)") # git on cygwin appears to be adding `cwd` to the commands which is breaking clone if sys.platform == "cygwin": local_split = local_git_cache.split(os.sep) target_split = target_dir.split(os.sep) subprocess.call( ['git', 'clone', local_split[-1], target_split[-1]], cwd=__get_dir(), stdout=lf, stderr=lf) else: subprocess.call( ['git', 'clone', local_git_cache, target_dir], cwd=__get_dir(), stdout=lf, stderr=lf) # determine if the request is for a branch is_branch = False try: branch_listing = subprocess.check_output( ['git', 'branch', '--all'], cwd=target_dir).decode('utf-8') branches = [ b.strip() for b in branch_listing.replace( 'remotes/origin/', '').split() ] is_branch = git_branch in branches except subprocess.CalledProcessError as cpe: print_( "Error Running Branch Filter: {}\nAssumming request is not for a branch" .format(cpe.output)) # now check out the right version if verbose: branch_or_sha_tag = 'branch' if is_branch else 'SHA/tag' print_("Checking out requested {} ({})".format( branch_or_sha_tag, git_branch)) if is_branch: # we use checkout -B with --track so we can specify that we want to track a specific branch # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360 # we use -B instead of -b so we reset branches that already exist and create a new one otherwise out = subprocess.call([ 'git', 'checkout', '-B', git_branch, '--track', 'origin/{git_branch}'.format(git_branch=git_branch) ], cwd=target_dir, stdout=lf, stderr=lf) else: out = subprocess.call(['git', 'checkout', git_branch], cwd=target_dir, stdout=lf, stderr=lf) if int(out) != 0: raise CCMError( 'Could not check out git branch {branch}. ' 'Is this a valid branch name? (see {lastlog} or run ' '"ccm showlastlog" for details)'.format( branch=git_branch, lastlog=logfile)) # now compile compile_version(git_branch, target_dir, verbose) else: # branch is already checked out. See if it is behind and recompile if needed. out = subprocess.call(['git', 'fetch', 'origin'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git fetch" status = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=lf).communicate()[0] if str(status).find('[behind') > -1: if verbose: print_("Branch is behind, recompiling") out = subprocess.call(['git', 'pull'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git pull" out = subprocess.call( [platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not run 'ant realclean'" # now compile compile_version(git_branch, target_dir, verbose) except: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError( "Building C* version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir)) raise
def clone_development(git_repo, version, verbose=False): print_(git_repo, version) target_dir = directory_name(version) assert target_dir if 'github' in version: git_repo_name, git_branch = github_username_and_branch_name(version) elif 'local:' in version: git_repo_name = 'local_{}'.format(git_repo) # add git repo location to distinguish cache location for differing repos git_branch = version.split(':')[-1] # last token on 'local:...' slugs should always be branch name else: git_repo_name = 'apache' git_branch = version.split(':', 1)[1] local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name) logfile = lastlogfilename() with open(logfile, 'w') as lf: try: # Checkout/fetch a local repository cache to reduce the number of # remote fetches we need to perform: if not os.path.exists(local_git_cache): common.info("Cloning Cassandra...") out = subprocess.call( ['git', 'clone', '--mirror', git_repo, local_git_cache], cwd=__get_dir(), stdout=lf, stderr=lf) assert out == 0, "Could not do a git clone" else: common.info("Fetching Cassandra updates...") out = subprocess.call( ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'], cwd=local_git_cache, stdout=lf, stderr=lf) # Checkout the version we want from the local cache: if not os.path.exists(target_dir): # development branch doesn't exist. Check it out. common.info("Cloning Cassandra (from local cache)") # git on cygwin appears to be adding `cwd` to the commands which is breaking clone if sys.platform == "cygwin": local_split = local_git_cache.split(os.sep) target_split = target_dir.split(os.sep) subprocess.call(['git', 'clone', local_split[-1], target_split[-1]], cwd=__get_dir(), stdout=lf, stderr=lf) else: subprocess.call(['git', 'clone', local_git_cache, target_dir], cwd=__get_dir(), stdout=lf, stderr=lf) # determine if the request is for a branch is_branch = False try: branch_listing = subprocess.check_output(['git', 'branch', '--all'], cwd=target_dir).decode('utf-8') branches = [b.strip() for b in branch_listing.replace('remotes/origin/', '').split()] is_branch = git_branch in branches except subprocess.CalledProcessError as cpe: common.error("Error Running Branch Filter: {}\nAssumming request is not for a branch".format(cpe.output)) # now check out the right version branch_or_sha_tag = 'branch' if is_branch else 'SHA/tag' common.info("Checking out requested {} ({})".format(branch_or_sha_tag, git_branch)) if is_branch: # we use checkout -B with --track so we can specify that we want to track a specific branch # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360 # we use -B instead of -b so we reset branches that already exist and create a new one otherwise out = subprocess.call(['git', 'checkout', '-B', git_branch, '--track', 'origin/{git_branch}'.format(git_branch=git_branch)], cwd=target_dir, stdout=lf, stderr=lf) else: out = subprocess.call(['git', 'checkout', git_branch], cwd=target_dir, stdout=lf, stderr=lf) if int(out) != 0: raise CCMError('Could not check out git branch {branch}. ' 'Is this a valid branch name? (see {lastlog} or run ' '"ccm showlastlog" for details)'.format( branch=git_branch, lastlog=logfile )) # now compile compile_version(git_branch, target_dir, verbose) else: # branch is already checked out. See if it is behind and recompile if needed. out = subprocess.call(['git', 'fetch', 'origin'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git fetch" status = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=lf).communicate()[0] if str(status).find('[behind') > -1: common.info("Branch is behind, recompiling") out = subprocess.call(['git', 'pull'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git pull" out = subprocess.call([platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not run 'ant realclean'" # now compile compile_version(git_branch, target_dir, verbose) except Exception as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) common.error("Deleted {} due to error".format(target_dir)) except: print_('Building C* version {version} failed. Attempted to delete {target_dir}' 'but failed. This will need to be manually deleted'.format( version=version, target_dir=target_dir )) finally: raise e
def clone_development(git_repo, version, verbose=False): print_(git_repo, version) target_dir = directory_name(version) assert target_dir if "github" in version: git_repo_name, git_branch = github_username_and_branch_name(version) else: git_repo_name = "apache" git_branch = version.split(":", 1)[1] local_git_cache = os.path.join(__get_dir(), "_git_cache_" + git_repo_name) logfile = lastlogfilename() with open(logfile, "w") as lf: try: # Checkout/fetch a local repository cache to reduce the number of # remote fetches we need to perform: if not os.path.exists(local_git_cache): if verbose: print_("Cloning Cassandra...") out = subprocess.call( ["git", "clone", "--mirror", git_repo, local_git_cache], cwd=__get_dir(), stdout=lf, stderr=lf ) assert out == 0, "Could not do a git clone" else: if verbose: print_("Fetching Cassandra updates...") out = subprocess.call( ["git", "fetch", "-fup", "origin", "+refs/*:refs/*"], cwd=local_git_cache, stdout=lf, stderr=lf ) # Checkout the version we want from the local cache: if not os.path.exists(target_dir): # development branch doesn't exist. Check it out. if verbose: print_("Cloning Cassandra (from local cache)") # git on cygwin appears to be adding `cwd` to the commands which is breaking clone if sys.platform == "cygwin": local_split = local_git_cache.split(os.sep) target_split = target_dir.split(os.sep) subprocess.call( ["git", "clone", local_split[-1], target_split[-1]], cwd=__get_dir(), stdout=lf, stderr=lf ) else: subprocess.call( ["git", "clone", local_git_cache, target_dir], cwd=__get_dir(), stdout=lf, stderr=lf ) # determine if the request is for a branch is_branch = False try: branch_listing = subprocess.check_output(["git", "branch", "--all"], cwd=target_dir).decode("utf-8") branches = [b.strip() for b in branch_listing.replace("remotes/origin/", "").split()] is_branch = git_branch in branches except subprocess.CalledProcessError as cpe: print_("Error Running Branch Filter: {}\nAssumming request is not for a branch".format(cpe.output)) # now check out the right version if verbose: branch_or_sha_tag = "branch" if is_branch else "SHA/tag" print_("Checking out requested {} ({})".format(branch_or_sha_tag, git_branch)) if is_branch: # we use checkout -B with --track so we can specify that we want to track a specific branch # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360 # we use -B instead of -b so we reset branches that already exist and create a new one otherwise out = subprocess.call( [ "git", "checkout", "-B", git_branch, "--track", "origin/{git_branch}".format(git_branch=git_branch), ], cwd=target_dir, stdout=lf, stderr=lf, ) else: out = subprocess.call(["git", "checkout", git_branch], cwd=target_dir, stdout=lf, stderr=lf) if int(out) != 0: raise CCMError( "Could not check out git branch {branch}. " "Is this a valid branch name? (see {lastlog} or run " '"ccm showlastlog" for details)'.format(branch=git_branch, lastlog=logfile) ) # now compile compile_version(git_branch, target_dir, verbose) else: # branch is already checked out. See if it is behind and recompile if needed. out = subprocess.call(["git", "fetch", "origin"], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git fetch" status = subprocess.Popen( ["git", "status", "-sb"], cwd=target_dir, stdout=subprocess.PIPE, stderr=lf ).communicate()[0] if str(status).find("[behind") > -1: if verbose: print_("Branch is behind, recompiling") out = subprocess.call(["git", "pull"], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git pull" out = subprocess.call([platform_binary("ant"), "realclean"], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not run 'ant realclean'" # now compile compile_version(git_branch, target_dir, verbose) except: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError( "Building C* version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir) ) raise
def clean_all(): rmdirs(__get_dir())
def clone_development(git_repo, version, verbose=False): print_(git_repo, version) target_dir = directory_name(version) assert target_dir if 'github' in version: git_repo_name, git_branch = github_username_and_branch_name(version) else: git_repo_name = 'apache' git_branch = version.split(':', 1)[1] local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name) logfile = os.path.join(__get_dir(), "last.log") with open(logfile, 'w') as lf: try: #Checkout/fetch a local repository cache to reduce the number of #remote fetches we need to perform: if not os.path.exists(local_git_cache): if verbose: print_("Cloning Cassandra...") out = subprocess.call( ['git', 'clone', '--mirror', git_repo, local_git_cache], cwd=__get_dir(), stdout=lf, stderr=lf) assert out == 0, "Could not do a git clone" else: if verbose: print_("Fetching Cassandra updates...") out = subprocess.call( ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'], cwd=local_git_cache, stdout=lf, stderr=lf) #Checkout the version we want from the local cache: if not os.path.exists(target_dir): # development branch doesn't exist. Check it out. if verbose: print_("Cloning Cassandra (from local cache)") # git on cygwin appears to be adding `cwd` to the commands which is breaking clone if sys.platform == "cygwin": local_split = local_git_cache.split(os.sep) target_split = target_dir.split(os.sep) subprocess.call(['git', 'clone', local_split[-1], target_split[-1]], cwd=__get_dir(), stdout=lf, stderr=lf) else: subprocess.call(['git', 'clone', local_git_cache, target_dir], cwd=__get_dir(), stdout=lf, stderr=lf) # now check out the right version if verbose: print_("Checking out requested branch (%s)" % git_branch) out = subprocess.call(['git', 'checkout', git_branch], cwd=target_dir, stdout=lf, stderr=lf) if int(out) != 0: raise CCMError("Could not check out git branch %s. Is this a valid branch name? (see last.log for details)" % git_branch) # now compile compile_version(git_branch, target_dir, verbose) else: # branch is already checked out. See if it is behind and recompile if needed. out = subprocess.call(['git', 'fetch', 'origin'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git fetch" status = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=lf).communicate()[0] if str(status).find('[behind') > -1: if verbose: print_("Branch is behind, recompiling") out = subprocess.call(['git', 'pull'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not do a git pull" out = subprocess.call([platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=lf, stderr=lf) assert out == 0, "Could not run 'ant realclean'" # now compile compile_version(git_branch, target_dir, verbose) except: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) print_("Deleted %s due to error" % target_dir) except: raise CCMError("Building C* version %s failed. Attempted to delete %s but failed. This will need to be manually deleted" % (version, target_dir)) raise
def clone_development(git_repo, version, verbose=False, alias=False, elassandra_version=None): print_(git_repo, version) target_dir = directory_name(version) assert target_dir if 'github' in version: git_repo_name, git_branch = github_username_and_branch_name(version) elif 'local:' in version: git_repo_name = 'local_{}'.format(git_repo) # add git repo location to distinguish cache location for differing repos git_branch = version.split(':')[-1] # last token on 'local:...' slugs should always be branch name elif alias: git_repo_name = 'alias_{}'.format(version.split('/')[0].split(':')[-1]) git_branch = version.split('/')[-1] else: git_repo_name = 'strapdata' git_branch = version.split(':', 1)[1] local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name) logfile = lastlogfilename() logger = get_logger(logfile) try: # Checkout/fetch a local repository cache to reduce the number of # remote fetches we need to perform: if not os.path.exists(local_git_cache): common.info("Cloning Elassandra...") process = subprocess.Popen( ['git', 'clone', '--mirror', git_repo, local_git_cache], cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not do a git clone" else: common.info("Fetching Elassandra updates...") process = subprocess.Popen( ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'], cwd=local_git_cache, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not update git" # Checkout the version we want from the local cache: if not os.path.exists(target_dir): # development branch doesn't exist. Check it out. common.info("Cloning Elassandra (from local cache)") # git on cygwin appears to be adding `cwd` to the commands which is breaking clone if sys.platform == "cygwin": local_split = local_git_cache.split(os.sep) target_split = target_dir.split(os.sep) process = subprocess.Popen( ['git', 'clone', local_split[-1], target_split[-1]], cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not do a git clone" else: process = subprocess.Popen( ['git', 'clone', local_git_cache, target_dir], cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not do a git clone" # determine if the request is for a branch is_branch = False try: branch_listing = subprocess.check_output(['git', 'branch', '--all'], cwd=target_dir).decode('utf-8') branches = [b.strip() for b in branch_listing.replace('remotes/origin/', '').split()] is_branch = git_branch in branches except subprocess.CalledProcessError as cpe: common.error("Error Running Branch Filter: {}\nAssumming request is not for a branch".format(cpe.output)) # now check out the right version branch_or_sha_tag = 'branch' if is_branch else 'SHA/tag' common.info("Checking out requested {} ({})".format(branch_or_sha_tag, git_branch)) if is_branch: # we use checkout -B with --track so we can specify that we want to track a specific branch # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360 # we use -B instead of -b so we reset branches that already exist and create a new one otherwise process = subprocess.Popen(['git', 'checkout', '-B', git_branch, '--track', 'origin/{git_branch}'.format(git_branch=git_branch)], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) else: process = subprocess.Popen( ['git', 'checkout', git_branch], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) if int(out) != 0: raise CCMError('Could not check out git branch {branch}. ' 'Is this a valid branch name? (see {lastlog} or run ' '"ccm showlastlog" for details)'.format( branch=git_branch, lastlog=logfile )) # now compile compile_version(git_branch, target_dir, verbose, elassandra_version=elassandra_version) else: # branch is already checked out. See if it is behind and recompile if needed. process = subprocess.Popen( ['git', 'fetch', 'origin'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not do a git fetch" process = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, status, _ = log_info(process, logger) if str(status).find('[behind') > -1: # If `status` looks like '## cassandra-2.2...origin/cassandra-2.2 [behind 9]\n' common.info("Branch is behind, recompiling") process = subprocess.Popen(['git', 'pull'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not do a git pull" process = subprocess.Popen([platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _, _ = log_info(process, logger) assert out == 0, "Could not run 'ant realclean'" # now compile compile_version(git_branch, target_dir, verbose, elassandra_version=elassandra_version) elif re.search('\[.*?(ahead|behind).*?\]', status.decode("utf-8")) is not None: # status looks like '## trunk...origin/trunk [ahead 1, behind 29]\n' # If we have diverged in a way that fast-forward merging cannot solve, raise an exception so the cache is wiped common.error("Could not ascertain branch status, please resolve manually.") raise Exception else: # status looks like '## cassandra-2.2...origin/cassandra-2.2\n' common.debug("Branch up to date, not pulling.") except Exception as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) common.error("Deleted {} due to error".format(target_dir)) except: print_('Building C* version {version} failed. Attempted to delete {target_dir}' 'but failed. This will need to be manually deleted'.format( version=version, target_dir=target_dir )) finally: raise e