Exemple #1
0
def download_dse_version(version, username, password, verbose=False):
    url = DSE_ARCHIVE
    if CCM_CONFIG.has_option('repositories', 'dse'):
        url = CCM_CONFIG.get('repositories', 'dse')

    url = url % version
    _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-")
    try:
        if username is None:
            common.warning("No dse username detected, specify one using --dse-username or passing in a credentials file using --dse-credentials.")
        if password is None:
            common.warning("No dse password detected, specify one using --dse-password or passing in a credentials file using --dse-credentials.")
        __download(url, target, username=username, password=password, show_progress=verbose)
        common.debug("Extracting {} as version {} ...".format(target, version))
        tar = tarfile.open(target)
        dir = tar.next().name.split("/")[0]  # pylint: disable=all
        tar.extractall(path=__get_dir())
        tar.close()
        target_dir = os.path.join(__get_dir(), version)
        if os.path.exists(target_dir):
            rmdirs(target_dir)
        shutil.move(os.path.join(__get_dir(), dir), target_dir)
    except urllib.error.URLError as e:
        msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url
        msg = msg + " (underlying error is: %s)" % str(e)
        raise ArgumentError(msg)
    except tarfile.ReadError as e:
        raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e))
Exemple #2
0
            def run(self):
                common.debug("Log-watching thread starting.")

                # run until stop gets requested by .join()
                while not self.req_stop_event.is_set():
                    self.scan_and_report()
                    time.sleep(interval)

                try:
                    # do a final scan to make sure we got to the very end of the files
                    self.scan_and_report()
                finally:
                    common.debug("Log-watching thread exiting.")
                    # done_event signals that the scan completed a final pass
                    self.done_event.set()
Exemple #3
0
            def run(self):
                common.debug("Log-watching thread starting.")

                # run until stop gets requested by .join()
                while not self.req_stop_event.is_set():
                    self.scan_and_report()
                    time.sleep(interval)

                try:
                    # do a final scan to make sure we got to the very end of the files
                    self.scan_and_report()
                finally:
                    common.debug("Log-watching thread exiting.")
                    # done_event signals that the scan completed a final pass
                    self.done_event.set()
Exemple #4
0
    def generate_dc_tokens(self, node_count, tokens):
        if self.cassandra_version() < '4' or (
                self.partitioner and not ('Murmur3' in self.partitioner
                                          or 'Random' in self.partitioner)):
            raise common.ArgumentError(
                "generate-tokens script only for >=4.0 and Murmur3 or Random")
        if not ('num_tokens' in self._config_options
                and self._config_options['num_tokens'] is not None
                and int(self._config_options['num_tokens']) > 1):
            raise common.ArgumentError(
                "Cannot use generate-tokens script without num_tokens > 1")

        partitioner = 'RandomPartitioner' if (
            self.partitioner
            and 'Random' in self.partitioner) else 'Murmur3Partitioner'
        generate_tokens = common.join_bin(self.get_install_dir(),
                                          os.path.join('tools', 'bin'),
                                          'generatetokens')
        cmd_list = [
            generate_tokens, '-n',
            str(node_count), '-t',
            str(self._config_options.get("num_tokens")), '--rf',
            str(min(3, node_count)), '-p', partitioner
        ]
        process = subprocess.Popen(cmd_list,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env=os.environ.copy())
        # the first line is "Generating tokens for X nodes with" and can be ignored
        process.stdout.readline()

        for n in range(1, node_count + 1):
            stdout_output = re.sub(r'^.*?:', '',
                                   process.stdout.readline().decode("utf-8"))
            node_tokens = stdout_output.replace('[',
                                                '').replace(' ', '').replace(
                                                    ']', '').replace('\n', '')
            tokens.append(node_tokens)

        common.debug("pregenerated tokens from cmd_list: {} are {}".format(
            str(cmd_list), tokens))
Exemple #5
0
def download_dse_version(version, username, password, verbose=False):
    url = DSE_ARCHIVE % version
    _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-")
    try:
        if username is None:
            common.warning("No dse username detected, specify one using --dse-username or passing in a credentials file using --dse-credentials.")
        if password is None:
            common.warning("No dse password detected, specify one using --dse-password or passing in a credentials file using --dse-credentials.")
        __download(url, target, username=username, password=password, show_progress=verbose)
        common.debug("Extracting {} as version {} ...".format(target, version))
        tar = tarfile.open(target)
        dir = tar.next().name.split("/")[0]  # pylint: disable=all
        tar.extractall(path=__get_dir())
        tar.close()
        target_dir = os.path.join(__get_dir(), version)
        if os.path.exists(target_dir):
            rmdirs(target_dir)
        shutil.move(os.path.join(__get_dir(), dir), target_dir)
    except urllib.error.URLError as e:
        msg = "Invalid version %s" % version if url is None else "Invalid url %s" % url
        msg = msg + " (underlying error is: %s)" % str(e)
        raise ArgumentError(msg)
    except tarfile.ReadError as e:
        raise ArgumentError("Unable to uncompress downloaded file: %s" % str(e))
Exemple #6
0
def clone_development(git_repo, version, verbose=False, alias=False, elassandra_version=None):
    print_(git_repo, version)
    target_dir = directory_name(version)
    assert target_dir
    if 'github' in version:
        git_repo_name, git_branch = github_username_and_branch_name(version)
    elif 'local:' in version:
        git_repo_name = 'local_{}'.format(git_repo)  # add git repo location to distinguish cache location for differing repos
        git_branch = version.split(':')[-1]  # last token on 'local:...' slugs should always be branch name
    elif alias:
        git_repo_name = 'alias_{}'.format(version.split('/')[0].split(':')[-1])
        git_branch = version.split('/')[-1]
    else:
        git_repo_name = 'strapdata'
        git_branch = version.split(':', 1)[1]
    local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name)

    logfile = lastlogfilename()
    logger = get_logger(logfile)

    try:
        # Checkout/fetch a local repository cache to reduce the number of
        # remote fetches we need to perform:
        if not os.path.exists(local_git_cache):
            common.info("Cloning Elassandra...")
            process = subprocess.Popen(
                ['git', 'clone', '--mirror', git_repo, local_git_cache],
                cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not do a git clone"
        else:
            common.info("Fetching Elassandra updates...")
            process = subprocess.Popen(
                ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'],
                cwd=local_git_cache, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not update git"

        # Checkout the version we want from the local cache:
        if not os.path.exists(target_dir):
            # development branch doesn't exist. Check it out.
            common.info("Cloning Elassandra (from local cache)")

            # git on cygwin appears to be adding `cwd` to the commands which is breaking clone
            if sys.platform == "cygwin":
                local_split = local_git_cache.split(os.sep)
                target_split = target_dir.split(os.sep)
                process = subprocess.Popen(
                    ['git', 'clone', local_split[-1], target_split[-1]],
                    cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git clone"
            else:
                process = subprocess.Popen(
                    ['git', 'clone', local_git_cache, target_dir],
                    cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git clone"

            # determine if the request is for a branch
            is_branch = False
            try:
                branch_listing = subprocess.check_output(['git', 'branch', '--all'], cwd=target_dir).decode('utf-8')
                branches = [b.strip() for b in branch_listing.replace('remotes/origin/', '').split()]
                is_branch = git_branch in branches
            except subprocess.CalledProcessError as cpe:
                common.error("Error Running Branch Filter: {}\nAssumming request is not for a branch".format(cpe.output))

            # now check out the right version
            branch_or_sha_tag = 'branch' if is_branch else 'SHA/tag'
            common.info("Checking out requested {} ({})".format(branch_or_sha_tag, git_branch))
            if is_branch:
                # we use checkout -B with --track so we can specify that we want to track a specific branch
                # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360
                # we use -B instead of -b so we reset branches that already exist and create a new one otherwise
                process = subprocess.Popen(['git', 'checkout', '-B', git_branch,
                                            '--track', 'origin/{git_branch}'.format(git_branch=git_branch)],
                                           cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
            else:
                process = subprocess.Popen(
                    ['git', 'checkout', git_branch],
                    cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
            if int(out) != 0:
                raise CCMError('Could not check out git branch {branch}. '
                               'Is this a valid branch name? (see {lastlog} or run '
                               '"ccm showlastlog" for details)'.format(
                                   branch=git_branch, lastlog=logfile
                               ))
            # now compile
            compile_version(git_branch, target_dir, verbose, elassandra_version=elassandra_version)
        else:  # branch is already checked out. See if it is behind and recompile if needed.
            process = subprocess.Popen(
                ['git', 'fetch', 'origin'],
                cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not do a git fetch"
            process = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            _, status, _ = log_info(process, logger)
            if str(status).find('[behind') > -1:  # If `status` looks like '## cassandra-2.2...origin/cassandra-2.2 [behind 9]\n'
                common.info("Branch is behind, recompiling")
                process = subprocess.Popen(['git', 'pull'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git pull"
                process = subprocess.Popen([platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not run 'ant realclean'"

                # now compile
                compile_version(git_branch, target_dir, verbose, elassandra_version=elassandra_version)
            elif re.search('\[.*?(ahead|behind).*?\]', status.decode("utf-8")) is not None:  # status looks like  '## trunk...origin/trunk [ahead 1, behind 29]\n'
                 # If we have diverged in a way that fast-forward merging cannot solve, raise an exception so the cache is wiped
                common.error("Could not ascertain branch status, please resolve manually.")
                raise Exception
            else:  # status looks like '## cassandra-2.2...origin/cassandra-2.2\n'
                common.debug("Branch up to date, not pulling.")
    except Exception as e:
        # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs.
        try:
            rmdirs(target_dir)
            common.error("Deleted {} due to error".format(target_dir))
        except:
            print_('Building C* version {version} failed. Attempted to delete {target_dir}'
                   'but failed. This will need to be manually deleted'.format(
                       version=version,
                       target_dir=target_dir
                   ))
        finally:
            raise e
Exemple #7
0
    def populate(self,
                 nodes,
                 debug=False,
                 tokens=None,
                 use_vnodes=None,
                 ipprefix='127.0.0.',
                 ipformat=None,
                 install_byteman=False,
                 use_single_interface=False):
        """Populate a cluster with nodes
        @use_single_interface : Populate the cluster with nodes that all share a single network interface.
        """

        if self.cassandra_version() < '4' and use_single_interface:
            raise common.ArgumentError(
                'use_single_interface is not supported in versions < 4.0')

        node_count = nodes
        dcs = []

        if use_vnodes is None:
            self.use_vnodes = len(
                tokens or []) > 1 or self._more_than_one_token_configured()
        else:
            self.use_vnodes = use_vnodes

        if isinstance(nodes, list):
            self.set_configuration_options(
                values={
                    'endpoint_snitch':
                    'org.apache.cassandra.locator.PropertyFileSnitch'
                })
            node_count = 0
            i = 0
            for c in nodes:
                i = i + 1
                node_count = node_count + c
                for x in xrange(0, c):
                    dcs.append('dc%d' % i)

        if node_count < 1:
            raise common.ArgumentError('invalid node count %s' % nodes)

        for i in xrange(1, node_count + 1):
            if 'node%s' % i in list(self.nodes.values()):
                raise common.ArgumentError(
                    'Cannot create existing node node%s' % i)

        if tokens is None:
            if self.use_vnodes:
                # from 4.0 tokens can be pre-generated via the `allocate_tokens_for_local_replication_factor: 3` strategy
                #  this saves time, as allocating tokens during first start is slow and non-concurrent
                if self.can_generate_tokens(
                ) and not 'CASSANDRA_TOKEN_PREGENERATION_DISABLED' in self._environment_variables:
                    if len(dcs) <= 1:
                        for x in xrange(0, node_count):
                            dcs.append('dc1')

                    tokens = self.generated_tokens(dcs)
            else:
                common.debug("using balanced tokens for non-vnode cluster")
                if len(dcs) <= 1:
                    tokens = self.balanced_tokens(node_count)
                else:
                    tokens = self.balanced_tokens_across_dcs(dcs)

        if not ipformat:
            ipformat = ipprefix + "%d"

        for i in xrange(1, node_count + 1):
            tk = None
            if tokens is not None and i - 1 < len(tokens):
                tk = tokens[i - 1]
            dc = dcs[i - 1] if i - 1 < len(dcs) else None

            binary = None
            if self.cassandra_version() >= '1.2':
                if use_single_interface:
                    #Always leave 9042 and 9043 clear, in case someone defaults to adding
                    # a node with those ports
                    binary = (ipformat % 1, 9042 + 2 + (i * 2))
                else:
                    binary = (ipformat % i, 9042)
            thrift = None
            if self.cassandra_version() < '4':
                thrift = (ipformat % i, 9160)

            storage_interface = ((ipformat % i), 7000)
            if use_single_interface:
                #Always leave 7000 and 7001 in case someone defaults to adding
                #with those port numbers
                storage_interface = (ipformat % 1, 7000 + 2 + (i * 2))

            node = self.create_node(
                name='node%s' % i,
                auto_bootstrap=False,
                thrift_interface=thrift,
                storage_interface=storage_interface,
                jmx_port=str(7000 + i * 100),
                remote_debug_port=str(2000 + i * 100) if debug else str(0),
                byteman_port=str(4000 +
                                 i * 100) if install_byteman else str(0),
                initial_token=tk,
                binary_interface=binary,
                environment_variables=self._environment_variables)
            self.add(node, True, dc)
            self._update_config()
        return self
Exemple #8
0
def clone_development(git_repo, version, verbose=False, alias=False):
    print_(git_repo, version)
    target_dir = directory_name(version)
    assert target_dir
    if 'github' in version:
        git_repo_name, git_branch = github_username_and_branch_name(version)
    elif 'local:' in version:
        git_repo_name = 'local_{}'.format(git_repo)  # add git repo location to distinguish cache location for differing repos
        git_branch = version.split(':')[-1]  # last token on 'local:...' slugs should always be branch name
    elif alias:
        git_repo_name = 'alias_{}'.format(version.split('/')[0].split(':')[-1])
        git_branch = version.split('/')[-1]
    else:
        git_repo_name = 'apache'
        git_branch = version.split(':', 1)[1]
    local_git_cache = os.path.join(__get_dir(), '_git_cache_' + git_repo_name)

    logfile = lastlogfilename()
    logger = get_logger(logfile)

    try:
        # Checkout/fetch a local repository cache to reduce the number of
        # remote fetches we need to perform:
        if not os.path.exists(local_git_cache):
            common.info("Cloning Cassandra...")
            process = subprocess.Popen(
                ['git', 'clone', '--mirror', git_repo, local_git_cache],
                cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not do a git clone"
        else:
            common.info("Fetching Cassandra updates...")
            process = subprocess.Popen(
                ['git', 'fetch', '-fup', 'origin', '+refs/*:refs/*'],
                cwd=local_git_cache, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not update git"

        # Checkout the version we want from the local cache:
        if not os.path.exists(target_dir):
            # development branch doesn't exist. Check it out.
            common.info("Cloning Cassandra (from local cache)")

            # git on cygwin appears to be adding `cwd` to the commands which is breaking clone
            if sys.platform == "cygwin":
                local_split = local_git_cache.split(os.sep)
                target_split = target_dir.split(os.sep)
                process = subprocess.Popen(
                    ['git', 'clone', local_split[-1], target_split[-1]],
                    cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git clone"
            else:
                process = subprocess.Popen(
                    ['git', 'clone', local_git_cache, target_dir],
                    cwd=__get_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git clone"

            # determine if the request is for a branch
            is_branch = False
            try:
                branch_listing = subprocess.check_output(['git', 'branch', '--all'], cwd=target_dir).decode('utf-8')
                branches = [b.strip() for b in branch_listing.replace('remotes/origin/', '').split()]
                is_branch = git_branch in branches
            except subprocess.CalledProcessError as cpe:
                common.error("Error Running Branch Filter: {}\nAssumming request is not for a branch".format(cpe.output))

            # now check out the right version
            branch_or_sha_tag = 'branch' if is_branch else 'SHA/tag'
            common.info("Checking out requested {} ({})".format(branch_or_sha_tag, git_branch))
            if is_branch:
                # we use checkout -B with --track so we can specify that we want to track a specific branch
                # otherwise, you get errors on branch names that are also valid SHAs or SHA shortcuts, like 10360
                # we use -B instead of -b so we reset branches that already exist and create a new one otherwise
                process = subprocess.Popen(['git', 'checkout', '-B', git_branch,
                                            '--track', 'origin/{git_branch}'.format(git_branch=git_branch)],
                                           cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
            else:
                process = subprocess.Popen(
                    ['git', 'checkout', git_branch],
                    cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
            if int(out) != 0:
                raise CCMError('Could not check out git branch {branch}. '
                               'Is this a valid branch name? (see {lastlog} or run '
                               '"ccm showlastlog" for details)'.format(
                                   branch=git_branch, lastlog=logfile
                               ))
            # now compile
            compile_version(git_branch, target_dir, verbose)
        else:  # branch is already checked out. See if it is behind and recompile if needed.
            process = subprocess.Popen(
                ['git', 'fetch', 'origin'],
                cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, _, _ = log_info(process, logger)
            assert out == 0, "Could not do a git fetch"
            process = subprocess.Popen(['git', 'status', '-sb'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            _, status, _ = log_info(process, logger)
            if str(status).find('[behind') > -1:  # If `status` looks like '## cassandra-2.2...origin/cassandra-2.2 [behind 9]\n'
                common.info("Branch is behind, recompiling")
                process = subprocess.Popen(['git', 'pull'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not do a git pull"
                process = subprocess.Popen([platform_binary('ant'), 'realclean'], cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                out, _, _ = log_info(process, logger)
                assert out == 0, "Could not run 'ant realclean'"

                # now compile
                compile_version(git_branch, target_dir, verbose)
            elif re.search('\[.*?(ahead|behind).*?\]', status.decode("utf-8")) is not None:  # status looks like  '## trunk...origin/trunk [ahead 1, behind 29]\n'
                 # If we have diverged in a way that fast-forward merging cannot solve, raise an exception so the cache is wiped
                common.error("Could not ascertain branch status, please resolve manually.")
                raise Exception
            else:  # status looks like '## cassandra-2.2...origin/cassandra-2.2\n'
                common.debug("Branch up to date, not pulling.")
    except Exception as e:
        # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs.
        try:
            rmdirs(target_dir)
            common.error("Deleted {} due to error".format(target_dir))
        except:
            print_('Building C* version {version} failed. Attempted to delete {target_dir}'
                   'but failed. This will need to be manually deleted'.format(
                       version=version,
                       target_dir=target_dir
                   ))
        finally:
            raise e