Exemplo n.º 1
0
class Hello(object):
    help_menu = 'test authentication to Jenkins'
    _help = """
Test authentication to Jenkins and return your user's fullName attribute.
"""
    name = 'hello'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Authenticate to Jenkins and print our username to STDOUT.
            Useful for checking that our authentication credentials are
            correct. """
        jenkins = util.jenkins_connection()
        data = jenkins.get_whoami()
        name = data['fullName']  # Our Jenkins instance gets this from LDAP
        jenkins_version = jenkins.get_version()
        print('Hello %s from Jenkins %s' % (name, jenkins_version))
        print('Logged in to %s' % jenkins.url)
Exemplo n.º 2
0
class Source(object):
    help_menu = 'build a source package on the local system'
    _help = """
Build a source package on the local system.
"""
    name = 'source'

    def __init__(self, argv):
        self.argv = argv

    def main(self):
        self.parser = Transport(self.argv)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a source package on the local system. """
        util.setup_pristine_tar_branch()
        cmd = ['gbp', 'buildpackage', '--git-tag', '--git-retag', '-S',
               '-us', '-uc']
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)
Exemplo n.º 3
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning('provided path does not exist: %s',
                                   self.argv[-1])
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            if self.parser.get('delete') is None:
                raise SystemExit('Specify a URL to delete a binary.')
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 4
0
class Source(object):
    help_menu = 'build a source package on the local system'
    _help = """
Build a source package on the local system.
"""
    name = 'source'

    def __init__(self, argv):
        self.argv = argv

    def main(self):
        self.parser = Transport(self.argv)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a source package on the local system. """
        util.setup_pristine_tar_branch()
        cmd = [
            'gbp', 'buildpackage', '--git-tag', '--git-retag', '-S', '-us',
            '-uc'
        ]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)
Exemplo n.º 5
0
    def main(self):
        parser = Transport(self.arguments,
                           options=self.options,
                           check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        branch = parser.get('--branch', 'master')
        user = parser.get('--user', 'vagrant')
        high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v'
        if not parser.unknown_commands:
            log.error(
                "it is required to pass a host to deploy to, but none was provided"
            )
            raise SystemExit(1)

        command = [
            "ansible-playbook",
            "-i",
            "%s," % parser.unknown_commands[-1],
            high_verbosity,
            "-u",
            user,
            "--extra-vars",
            'branch=%s' % branch,
            "deploy.yml",
        ]
        log.debug("Running command: %s" % ' '.join(command))
        out, err, code = process.run(command, cwd=playbook_path)
        log.error(err)
        log.debug(out)
Exemplo n.º 6
0
class Build(object):
    help_menu = 'build a package in Jenkins'
    _help = """
Build a package in Jenkins.
"""
    name = 'build'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith('patch-queue/'):
            log.error('%s is a patch-queue branch' % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info('building %s branch %s at %s', pkg_name, branch_name,
                 posixpath.join(jenkins.url, 'job', 'build-package'))
        job_params = {'PKG_NAME': pkg_name, 'BRANCH': branch_name}

        queue_number = jenkins.build_job('build-package',
                                         parameters=job_params,
                                         token=jenkins.password)

        # Job is now queued, not yet running.
        log.info('Waiting for build queue #%d' % queue_number)
        log.info('This may be safely interrupted...')
        queue_item = jenkins.get_queue_item(queue_number)
        while 'executable' not in queue_item:
            try:
                log.info('queue state: %s' % queue_item['why'])
                sleep(2)
                queue_item = jenkins.get_queue_item(queue_number)
            except KeyboardInterrupt:
                # We have no build_number, so just print a general message with
                # a basic URL for the user to check.
                print('')
                print('Build is queued for starting at %s' % jenkins.url)
                raise SystemExit(1)

        # Job is now running.
        build_number = queue_item['executable']['number']
        # Pass the rest over to the "watch-build" command.
        watcher = WatchBuild(['watch'])
        watcher.watch(build_number)
Exemplo n.º 7
0
    def main(self):
        parser = Transport(self.argv, check_help=False)
        parser.catch_help = self.help()
        parser.catches_help()
        if util.build_is_triggered():
            print(
                'Build has been triggered via Github, will skip setting status'
            )
            return
        print('Build has not been triggered via Github')
        print('Assuming manual job execution, will set status')

        required_env_vars = [
            'GITHUB_REPOSITORY', 'GITHUB_SHA', 'GITHUB_OAUTH_TOKEN',
            'GITHUB_STATUS_CONTEXT', 'GITHUB_STATUS_STARTED',
            'GITHUB_STATUS_SUCCESS', 'GITHUB_STATUS_FAILURE',
            'GITHUB_STATUS_ERROR', 'GITHUB_STATUS_STATE', 'BUILD_URL'
        ]
        missing_envs = []
        for env in required_env_vars:
            if not conf['env'].get(env):
                missing_envs.append(env)

        if missing_envs:
            print('Will skip setting status')
            print('Environment variable(s) required but not provided:')
            for env in missing_envs:
                print('\t ' + env)
            return

        self.update()
Exemplo n.º 8
0
 def parse_args(self):
     parser = Transport(self.argv, options=['--socket-location'])
     parser.catch_help = self._help
     parser.parse_args()
     location = parser.get('--socket-location') or '/tmp/pytest.sock'
     delgado.config['allowed'] = ['py.test']
     engine = Engine(socket_location=location)
     engine.run_forever()
Exemplo n.º 9
0
 def parse_args(self):
     options = ['--allowed']
     parser = Transport(self.argv, options=options)
     parser.catch_help = self._help
     parser.parse_args()
     delgado.config['allowed'] = parser.get('--allowed') or []
     engine = Engine(connection=self.connection)
     engine.run_forever()
Exemplo n.º 10
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     try:
         pkg = self.parser.unknown_commands[0]
     except IndexError:
         return self.parser.print_help()
     self._run(pkg)
Exemplo n.º 11
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     try:
         build_number = int(self.parser.unknown_commands[0])
     except (IndexError, ValueError):
         return self.parser.print_help()
     self.watch(build_number)
Exemplo n.º 12
0
class Repo(object):
    _help = dedent("""
    Operate on repositories on a remote chacra instance. Both `recreate` and
    `update` calls are not immediate. They rely on the async service managing
    repos which usually have a delay applied to them.

    Options:

    recreate        Mark a repository to be removed and created from scratch
                    again.
    update          Repository will get updated by running the repo tools on
                    it again.
    """)
    help_menu = "recreate, delete, or update repositories"
    options = ['recreate', 'update']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'repos'
        )

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    @retry()
    def post(self, url):
        exists = requests.head(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()
        logger.info('POST: %s', url)
        response = requests.post(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        response.raise_for_status()
        json = response.json()
        for k, v in json.items():
            logger.info("%s: %s", k, v)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        recreate = self.parser.get('recreate')
        update = self.parser.get('update')
        if recreate:
            url_part = os.path.join(recreate, 'recreate')
            url = os.path.join(self.base_url, url_part)
            self.post(url)
        elif update:
            url_part = os.path.join(update, 'update')
            url = os.path.join(self.base_url, url_part)
            self.post(url)
Exemplo n.º 13
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     try:
         tarball = self.parser.unknown_commands[0]
     except IndexError:
         tarball = None
     bugstr = self.parser.get('--bug')
     self._run(tarball, bugstr)
Exemplo n.º 14
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self._help
     self.parser.parse_args()
     if self.parser.unknown_commands:
         url_part = self.sanitize_url(self.parser.unknown_commands[-1])
         url = os.path.join(self.base_url, url_part)
         return self.head(url)
     else:
         logger.error('no url was passed in')
Exemplo n.º 15
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     force = False
     if self.parser.has(['--force', '--hard-reset']):
         force = True
     if self.parser.unknown_commands:
         log.error('unknown option %s',
                   ' '.join(self.parser.unknown_commands))
         return self.parser.print_help()
     self._run(force)
Exemplo n.º 16
0
class Hello(object):
    help_menu = 'test authentication to Jenkins'
    _help = """
Test authentication to Jenkins and return your user's fullName attribute.
"""
    name = 'hello'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Authenticate to Jenkins and print our username to STDOUT.
            Useful for checking that our authentication credentials are
            correct. """
        jenkins = util.jenkins_connection()
        # python-jenkins does not have syntactic support for "whoami" (the
        # "/me/api/json" endpoint), so we have to hit it and parse it
        # ourselves.
        # https://review.openstack.org/307896

        whoami_url = posixpath.join(jenkins.url, 'me/api/json')
        try:
            response = jenkins.jenkins_open(Request(whoami_url))
            data = json.loads(response)
        except JenkinsException as err:
            raise SystemExit(err)

        name = data['fullName']  # Our Jenkins instance gets this from LDAP
        try:
            jenkins_version = jenkins.get_version()
        except AttributeError:
            # python-jenkins older than 0.4.1 does not have get_version().
            version_url = jenkins.server
            try:
                response = urlopen(Request(version_url))
                if six.PY2:
                    jenkins_version = response.info().getheader('X-Jenkins')
                else:
                    jenkins_version = response.getheader('X-Jenkins')
            except (HTTPError, BadStatusLine) as err:
                raise SystemExit(err)
        print('Hello %s from Jenkins %s' % (name, jenkins_version))
Exemplo n.º 17
0
 def parse_args(self, argv=None):
     """ pass argv during testing """
     if argv is None:
         argv = self.argv
     options = [['--output', '-o']]
     parser = Transport(argv, options=options)
     parser.catch_help = self.help()
     parser.parse_args()
     self.source = util.infer_path(parser.unknown_commands)
     self.output = parser.get('--output', self.source + '-dvd.iso')
     self.check_dependency()
     self.make_iso()
     self.make_sha256sum()
Exemplo n.º 18
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self._help
     self.parser.parse_args()
     recreate = self.parser.get('recreate')
     update = self.parser.get('update')
     if recreate:
         url_part = os.path.join(recreate, 'recreate')
         url = os.path.join(self.base_url, url_part)
         self.post(url)
     elif update:
         url_part = os.path.join(update, 'update')
         url = os.path.join(self.base_url, url_part)
         self.post(url)
Exemplo n.º 19
0
class Exists(object):
    _help = dedent("""
    Check if a given URL part exists already. Mainly does a HEAD request to the
    given endpoint. If the URL does not exist it will return a non-zero exit
    status (404).

    For example:

        chacractl exists binaries/ceph-deploy/master/debian/wheezy

    Positional Arguments:

    [URL]        The endpoint, starting with the full url part (sans fqdn)
    """)
    help_menu = "check if a given URL part exists already"
    options = []

    def __init__(self, argv):
        self.argv = argv
        self.base_url = chacractl.config['url']

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    @retry()
    def head(self, url):
        logger.info('HEAD: %s', url)
        exists = requests.head(url,
                               auth=chacractl.config['credentials'],
                               verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        if self.parser.unknown_commands:
            url_part = self.sanitize_url(self.parser.unknown_commands[-1])
            url = os.path.join(self.base_url, url_part)
            return self.head(url)
        else:
            logger.error('no url was passed in')
Exemplo n.º 20
0
class Repo(object):
    _help = dedent(
        """
    Operate on repositories on a remote chacra instance. Both `recreate` and
    `update` calls are not immediate. They rely on the async service managing
    repos which usually have a delay applied to them.

    Options:

    recreate        Mark a repository to be removed and created from scratch
                    again.
    update          Repository will get updated by running the repo tools on
                    it again.
    """
    )
    help_menu = "recreate, delete, or update repositories"
    options = ["recreate", "update"]

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(chacractl.config["url"], "repos")

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    def post(self, url):
        exists = requests.head(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        exists.raise_for_status()
        logger.info("POST: %s", url)
        response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        response.raise_for_status()
        json = response.json()
        for k, v in json.items():
            logger.info("%s: %s", k, v)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        recreate = self.parser.get("recreate")
        update = self.parser.get("update")
        if recreate:
            url_part = os.path.join(recreate, "recreate")
            url = os.path.join(self.base_url, url_part)
            self.post(url)
        elif update:
            url_part = os.path.join(update, "update")
            url = os.path.join(self.base_url, url_part)
            self.post(url)
Exemplo n.º 21
0
class Exists(object):
    _help = dedent("""
    Check if a given URL part exists already. Mainly does a HEAD request to the
    given endpoint. If the URL does not exist it will return a non-zero exit
    status (404).

    For example:

        chacractl exists binaries/ceph-deploy/master/debian/wheezy

    Positional Arguments:

    [URL]        The endpoint, starting with the full url part (sans fqdn)
    """)
    help_menu = "check if a given URL part exists already"
    options = []

    def __init__(self, argv):
        self.argv = argv
        self.base_url = chacractl.config['url']

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    def head(self, url):
        logger.info('HEAD: %s', url)
        exists = requests.head(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        if self.parser.unknown_commands:
            url_part = self.sanitize_url(self.parser.unknown_commands[-1])
            url = os.path.join(self.base_url, url_part)
            return self.head(url)
        else:
            logger.error('no url was passed in')
Exemplo n.º 22
0
class BaseBackend(base.BaseCommand):

    options = []
    parser = None

    def parse_args(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self.path = util.infer_path(self.parser.unknown_commands)
        self.check_dependency()
        self.sign()

    def sign(self):
        raise NotImplemented()
Exemplo n.º 23
0
class Build(object):
    help_menu = "build a package in Jenkins"
    _help = """
Build a package in Jenkins.
"""
    name = "build"

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith("patch-queue/"):
            log.error("%s a patch-queue branch" % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info(
            "building %s branch %s at %s", pkg_name, branch_name, posixpath.join(jenkins.url, "job", "build-package")
        )
        job_params = {"PKG_NAME": pkg_name, "BRANCH": branch_name}

        if self._has_broken_build_job():
            jenkins.build_job = types.MethodType(_build_job_fixed, jenkins)

        jenkins.build_job("build-package", parameters=job_params, token=jenkins.password)

    def _has_broken_build_job(self):
        # Ubuntu Trusty ships python-jenkins 0.2.1-0ubuntu1, and this version
        # has a broken build_job() method. See
        # https://bugs.launchpad.net/bugs/1177831 .
        # This bug was fixed in python-jenkins v0.3.2 upstream.
        v = get_distribution("python_jenkins").version
        return parse_version(v) < parse_version("0.3.2")
Exemplo n.º 24
0
class Build(object):
    help_menu = 'build a package in Jenkins'
    _help = """
Build a package in Jenkins.
"""
    name = 'build'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith('patch-queue/'):
            log.error('%s a patch-queue branch' % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info('building %s branch %s at %s', pkg_name, branch_name,
                 posixpath.join(jenkins.url, 'job', 'build-package'))
        job_params = {'PKG_NAME': pkg_name, 'BRANCH': branch_name}

        if self._has_broken_build_job():
            jenkins.build_job = types.MethodType(_build_job_fixed, jenkins)

        jenkins.build_job('build-package', parameters=job_params,
                          token=jenkins.password)

    def _has_broken_build_job(self):
        # Ubuntu Trusty ships python-jenkins 0.2.1-0ubuntu1, and this version
        # has a broken build_job() method. See
        # https://bugs.launchpad.net/bugs/1177831 .
        # This bug was fixed in python-jenkins v0.3.2 upstream.
        v = get_distribution('python_jenkins').version
        return parse_version(v) < parse_version('0.3.2')
Exemplo n.º 25
0
 def parse_args(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     self.path = util.infer_path(self.parser.unknown_commands)
     self.check_dependency()
     self.sign()
Exemplo n.º 26
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning(
                        'provided path does not exist: %s', self.argv[-1]
                    )
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 27
0
    def parse_args(self):
        transport = Transport(self.argv, check_help=False)
        transport.catch_help = self.__doc__
        if len(self.argv) <= 1:
            transport.print_help()
        transport.parse_args()

        for action in self.actions:
            if transport.has(action):
                return self.actions.get(action)()

        # If nothing matches, print the help
        transport.print_help()
Exemplo n.º 28
0
    def main(self):
        options = ['--ignore']
        config_ignores = ceph_medic.config.file.get_list('check', '--ignore')
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()
        parser.parse_args()
        ignored_codes = as_list(parser.get('--ignore', ''))
        # fallback to the configuration if nothing is defined in the CLI
        if not ignored_codes:
            ignored_codes = config_ignores

        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config.nodes.keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config.nodes.items():
            for node in nodes:
                node_metadata = {'host': node['host']}
                if 'container' in node:
                    node_metadata['container'] = node['container']
                ceph_medic.metadata['nodes'][daemon].append(node_metadata)

        collector.collect()
        test = runner.Runner()
        test.ignore = ignored_codes
        results = test.run()
        runner.report(results)
        #XXX might want to make this configurable to not bark on warnings for
        # example, setting forcefully for now, but the results object doesn't
        # make a distinction between error and warning (!)
        if results.errors or results.warnings:
            sys.exit(1)
Exemplo n.º 29
0
class Clone(object):
    help_menu = 'clone a package from dist-git'
    _help = """
Clone a package from dist-git. Your SSH key must be set up in Gerrit.

Positional Arguments:

[package]  The name of the package to clone.
"""
    name = 'clone'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            pkg = self.parser.unknown_commands[0]
        except IndexError:
            return self.parser.print_help()
        self._run(pkg)

    def help(self):
        return self._help

    def _run(self, pkg):
        """ Clone a package from dist-git. """
        if os.path.exists(pkg):
            raise SystemExit('%s already exists in current working directory.',
                             pkg)
        configp = util.config()
        try:
            user = configp.get('rhcephpkg', 'user')
            gitbaseurl = configp.get('rhcephpkg', 'gitbaseurl')
        except configparser.Error as err:
            raise SystemExit('Problem parsing .rhcephpkg.conf: %s',
                             err.message)
        # TODO: SafeConfigParser might make the "user" interpolation here
        # unnecessary? Need to test, particularly what it does to %(module).
        pkg_url = gitbaseurl % {'user': user, 'module': pkg}
        cmd = ['git', 'clone', pkg_url]
        subprocess.check_call(cmd)
Exemplo n.º 30
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     try:
         build_number = int(self.parser.unknown_commands[0])
     except (IndexError, ValueError):
         return self.parser.print_help()
     self.watch(build_number)
Exemplo n.º 31
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     try:
         pkg = self.parser.unknown_commands[0]
     except IndexError:
         return self.parser.print_help()
     self._run(pkg)
Exemplo n.º 32
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()

        # Allow user to override the distro.
        if self.parser.has('--dist'):
            if self.parser.get('--dist') is None:
                raise SystemExit('Specify a distro to --dist')
            distro = self.parser.get('--dist')
        else:
            distro = get_distro()

        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()

        self._run(distro)
Exemplo n.º 33
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self._help
     self.parser.parse_args()
     if self.parser.unknown_commands:
         url_part = self.sanitize_url(self.parser.unknown_commands[-1])
         url = os.path.join(self.base_url, url_part)
         return self.head(url)
     else:
         logger.error('no url was passed in')
Exemplo n.º 34
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     force = False
     if self.parser.has(['--force', '--hard-reset']):
         force = True
     if self.parser.unknown_commands:
         log.error('unknown option %s',
                   ' '.join(self.parser.unknown_commands))
         return self.parser.print_help()
     self._run(force)
Exemplo n.º 35
0
    def main(self):
        options = ['--ignore']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()
        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config.nodes.keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config.nodes.items():
            for node in nodes:
                ceph_medic.metadata['nodes'][daemon].append(
                    {'host': node['host']})

        collector.collect()
        test = runner.Runner()
        results = test.run()
        runner.report(results)
        #XXX might want to make this configurable to not bark on warnings for
        # example, setting forcefully for now, but the results object doesn't
        # make a distinction between error and warning (!)
        if results.errors or results.warnings:
            sys.exit(1)
Exemplo n.º 36
0
    def main(self):
        options = ['--stdout']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()

        if len(self.argv) == 1:
            raise SystemExit(
                "A monitor hostname or a ceph.conf file is required as an argument"
            )

        node = self.argv[-1]
        inventory = {}

        with get_connection(node) as conn:
            report = get_mon_report(conn)
            try:
                mons = report['monmap']['mons']
            except KeyError:
                raise SystemExit(report)
            inventory['mons'] = [i['name'] for i in mons]
            osds = report['osd_metadata']
            inventory['osds'] = [i['hostname'] for i in osds]

        if not inventory:
            raise SystemExit(
                'no hosts where found from remote monitor node: %s' % node)

        generate_inventory(inventory, to_stdout=parser.get('--stdout'))
        conn.exit()
        return
Exemplo n.º 37
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()

        # handle posting projects:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    url = os.path.join(self.base_url, url_part)
                    self.post(url)
            else:
                url = os.path.join(self.base_url, url_part)
                self.post(url)
        # XXX this exists here but it not yet enabled from the CLI
        elif self.parser.has('delete'):
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 38
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self._help
     self.parser.parse_args()
     recreate = self.parser.get("recreate")
     update = self.parser.get("update")
     if recreate:
         url_part = os.path.join(recreate, "recreate")
         url = os.path.join(self.base_url, url_part)
         self.post(url)
     elif update:
         url_part = os.path.join(update, "update")
         url = os.path.join(self.base_url, url_part)
         self.post(url)
Exemplo n.º 39
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()

        # FIXME: stop hardcoding trusty. Use the git branch name instead,
        # translating "-ubuntu" into this local computer's own distro.
        distro = 'trusty'
        # Allow user to override the distro.
        if self.parser.has('--dist'):
            if self.parser.get('--dist') is None:
                raise SystemExit('Specify a distro to --dist')
            distro = self.parser.get('--dist')

        self._run(distro)
Exemplo n.º 40
0
    def parse_args(self, argv):
        """
        Main method for parsing arguments, it uses whatever `argv` is although
        it should always be a list. Once it goes through the ``Transport`` class
        it tries to generate the help from the mapped classes and the current
        docstring for this module.

        If nothing matches it will return the help.
        """

        self.get_extend_commands()

        transport = Transport(argv, self.mapper)
        transport.catch_help = "%s \n%s" % (__doc__, transport.subhelp())
        transport.catch_version = "dozo version {0}".format(__version__)
        if len(self.argv) <= 1:
            transport.print_help()
        transport.dispatch()
Exemplo n.º 41
0
    def main(self):
        parser = Transport(self.arguments, options=self.options, check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        if not parser.unknown_commands:
            log.error("it is required to pass an identifer, but none was provided")
            raise SystemExit(1)
        self.identifier = parser.unknown_commands[-1]
        if parser.has('--poll'):
            return self.poll()

        for key in [
                'stdout', 'stderr', 'command', 'ended',
                'started', 'succeeded', 'exit_code']:
            if parser.has(key):
                return self.get(key)

        # if nothing else matches, just try to give a generic, full summary
        self.summary()
Exemplo n.º 42
0
    def main(self):
        parser = Transport(self.arguments,
                           options=self.options,
                           check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        if not parser.unknown_commands:
            log.error(
                "it is required to pass an identifer, but none was provided")
            raise SystemExit(1)
        self.identifier = parser.unknown_commands[-1]
        if parser.has('--poll'):
            return self.poll()

        for key in [
                'stdout', 'stderr', 'command', 'ended', 'started', 'succeeded',
                'exit_code'
        ]:
            if parser.has(key):
                return self.get(key)

        # if nothing else matches, just try to give a generic, full summary
        self.summary()
Exemplo n.º 43
0
    def main(self):
        parser = Transport(self.arguments, options=self.options, check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        branch = parser.get('--branch', 'master')
        user = parser.get('--user', 'vagrant')
        high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v'
        if not parser.unknown_commands:
            log.error("it is required to pass a host to deploy to, but none was provided")
            raise SystemExit(1)

        command = [
            "ansible-playbook",
            "-i", "%s," % parser.unknown_commands[-1],
            high_verbosity,
            "-u", user,
            "--extra-vars", 'branch=%s' % branch,
            "deploy.yml",
        ]
        log.debug("Running command: %s" % ' '.join(command))
        out, err, code = process.run(command, cwd=playbook_path)
        log.error(err)
        log.debug(out)
Exemplo n.º 44
0
    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()

        # handle posting projects:
        if self.parser.has("create"):
            url_part = self.sanitize_url(self.parser.get("create"))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info("reading input from stdin")
                for line in sys.stdin.readlines():
                    url = os.path.join(self.base_url, url_part)
                    self.post(url)
            else:
                url = os.path.join(self.base_url, url_part)
                self.post(url)
        # XXX this exists here but it not yet enabled from the CLI
        elif self.parser.has("delete"):
            url_part = self.sanitize_url(self.parser.get("delete"))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 45
0
    def main(self):
        options = ['--ignore']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()
        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config['nodes'].keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config['nodes'].items():
            for node in nodes:
                ceph_medic.metadata['nodes'][daemon].append(
                    {'host': node['host']})

        collector.collect()
        test = runner.Runner()
        results = test.run()
        runner.report(results)
Exemplo n.º 46
0
class Binary(object):
    _help = dedent("""
    Operate binaries on a remote chacra instance.

    Creating a new binary::

        chacractl binary create project/ref/distro/distro_version/arch /path/to/binary

    Options:

    create        Creates a new binary at a given distro version architecture
    delete        Deletes an existing binary from chacra
    --force       If the resource exists, force the upload
    """)
    help_menu = "create, update metadata, or delete binaries"
    options = ['create', '--force', 'delete']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'binaries'
        )

    def sanitize_filename(self, line):
        """
        lines may come with newlines and leading slashes make sure
        they are clean so that they can be processed
        """
        line = line.strip('\n')
        if os.path.isfile(line):
            return os.path.abspath(line)

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    def post(self, url, filepath):
        filename = os.path.basename(filepath)
        file_url = os.path.join(url, filename) + '/'
        exists = requests.head(file_url, verify=chacractl.config['ssl_verify'])

        if exists.status_code == 200:
            if not self.force:
                logger.warning(
                    'resource exists and --force was not used, will not upload'
                )
                logger.warning('SKIP %s', file_url)
                return
            return self.put(file_url, filepath)
        elif exists.status_code == 404:
            logger.info('POSTing file: %s', filepath)
            with open(filepath, 'rb') as binary:
                response = requests.post(
                    url,
                    files={'file': binary},
                    auth=chacractl.config['credentials'],
                    verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
            response.raise_for_status()

    def put(self, url, filepath):
        logger.info('resource exists and --force was used, will re-upload')
        logger.info('PUTing file: %s', filepath)
        with open(filepath, 'rb') as binary:
            response = requests.put(
                url,
                files={'file': binary},
                auth=chacractl.config['credentials'],
                verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def delete(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('resource already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE file: %s', url)
        response = requests.delete(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning(
                        'provided path does not exist: %s', self.argv[-1]
                    )
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 47
0
    def main(self, argv):
        options = [
            '--cluster',
            '--ssh-config',
            '--inventory',
            '--config',
            '--verbosity',
        ]
        parser = Transport(argv,
                           options=options,
                           check_help=False,
                           check_version=False)
        parser.parse_args()

        self.config_path = parser.get('--config', configuration.location())

        # load medic configuration
        loaded_config = configuration.load(
            path=parser.get('--config', self.config_path))

        # this is the earliest we can have enough config to setup logging
        log.setup(loaded_config)
        ceph_medic.config.file = loaded_config
        global_options = dict(ceph_medic.config.file._sections['global'])

        # SSH config
        ceph_medic.config.ssh_config = parser.get(
            '--ssh-config', global_options.get('--ssh-config'))
        if ceph_medic.config.ssh_config:
            ssh_config_path = ceph_medic.config.ssh_config
            if not os.path.exists(ssh_config_path):
                terminal.error("the given ssh config path does not exist: %s" %
                               ssh_config_path)
                sys.exit()

        ceph_medic.config.cluster_name = parser.get('--cluster', 'ceph')
        ceph_medic.metadata['cluster_name'] = 'ceph'

        # Deployment Type
        deployment_type = ceph_medic.config.file.get_safe(
            'global', 'deployment_type', 'baremetal')
        if deployment_type in ['kubernetes', 'openshift', 'k8s', 'oc']:
            pod_hosts = hosts.container_platform(deployment_type)
            ceph_medic.config.nodes = pod_hosts
            ceph_medic.config.hosts_file = ':memory:'
            self.hosts_file = ':memory:'
        else:
            # Hosts file
            self.hosts_file = parser.get('--inventory',
                                         configuration.get_host_file())

            # find the hosts files, by the CLI first, fallback to the configuration
            # file, and lastly if none of those are found or defined, try to load
            # from well known locations (cwd, and /etc/ansible/)
            loaded_hosts = configuration.load_hosts(
                parser.get('--inventory',
                           global_options.get('--inventory', self.hosts_file)))
            ceph_medic.config.nodes = loaded_hosts.nodes
            ceph_medic.config.hosts_file = loaded_hosts.filename
            self.hosts_file = loaded_hosts.filename

        parser.catch_version = ceph_medic.__version__
        parser.mapper = self.mapper
        parser.catch_help = self.help(parser.subhelp())
        if len(argv) <= 1:
            return parser.print_help()
        ceph_medic.config.config_path = self.config_path
        parser.dispatch()
        parser.catches_help()
        parser.catches_version()

        # Verbosity
        verbosity = parser.get('--verbosity', 'debug')
        ceph_medic.config.verbosity = verbosity.lowercase()
Exemplo n.º 48
0
class MergePatches(object):
    help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch'
    _help = """
Fetch the latest patches branch that rdopkg uses, and then fast-forward merge
that into our local patch-queue branch, so that both branches align.

This command helps to align the patch series between our RHEL packages and our
Ubuntu packages.

Options:
--force    Do a hard reset, rather than restricting to fast-forward merges
           only. Use this option if the RHEL patches branch was amended or
           rebased for some reason.
"""
    name = 'merge-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = ['--force', '--hard-reset']

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        force = False
        if self.parser.has(['--force', '--hard-reset']):
            force = True
        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()
        self._run(force)

    def help(self):
        return self._help

    def _run(self, force=False):
        # Determine the names of the relevant branches
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        patch_queue_branch = util.current_patch_queue_branch()
        rhel_patches_branch = self.get_rhel_patches_branch(debian_branch)

        # Do the merge
        if current_branch == patch_queue_branch:
            # HEAD is our patch-queue branch. Use "git pull" directly.
            # For example: "git pull --ff-only patches/ceph-2-rhel-patches"
            cmd = ['git', 'pull', '--ff-only',
                   'patches/' + rhel_patches_branch]
            if force:
                # Do a hard reset on HEAD instead.
                cmd = ['git', 'reset', '--hard',
                       'patches/' + rhel_patches_branch]
        else:
            # HEAD is our debian branch. Use "git fetch" to update the
            # patch-queue ref. For example:
            # "git fetch . \
            #  patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu"
            util.ensure_patch_queue_branch()
            cmd = ['git', 'fetch', '.',
                   'patches/%s:%s' % (rhel_patches_branch, patch_queue_branch)]
            if force:
                # Do a hard push (with "+") instead.
                cmd = ['git', 'push', '.', '+patches/%s:%s' %
                       (rhel_patches_branch, patch_queue_branch)]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_rhel_patches_branch(self, debian_branch):
        """
        Get the RHEL -patches branch corresponding to this debian branch.

        Examples:
        ceph-2-ubuntu -> ceph-2-rhel-patches
        ceph-2-trusty -> ceph-2-rhel-patches
        ceph-2-xenial -> ceph-2-rhel-patches
        ceph-1.3-ubuntu -> ceph-1.3-rhel-patches
        ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123
        """
        (product, version, distro) = debian_branch.split('-', 2)
        suffix = None
        if '-' in distro:
            (distro, suffix) = distro.split('-', 1)
        rhel = '%s-%s-rhel-patches' % (product, version)
        if suffix is not None:
            rhel = '%s-%s' % (rhel, suffix)
        return rhel
Exemplo n.º 49
0
class Binary(object):
    _help = dedent("""
    Operate binaries on a remote chacra instance.

    Creating a new binary::

        chacractl binary create project/ref/distro/distro_version/arch /path/to/binary

    Options:

    create        Creates a new binary at a given distro version architecture
    delete        Deletes an existing binary from chacra
    --force       If the resource exists, force the upload
    """)
    help_menu = "create, update metadata, or delete binaries"
    options = ['create', '--force', 'delete']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'binaries'
        )

    def sanitize_filename(self, line):
        """
        lines may come with newlines and leading slashes make sure
        they are clean so that they can be processed
        """
        line = line.strip('\n')
        if os.path.isfile(line):
            return os.path.abspath(line)

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    def load_file(self, filepath):
        chsum = sha512()
        binary = open(filepath, 'rb')
        for chunk in iter(lambda: binary.read(4096), b''):
            chsum.update(chunk)
        binary.seek(0)
        return binary, chsum.hexdigest()

    def upload_is_verified(self, arch_url, filename, digest):
        r = requests.get(arch_url, verify=chacractl.config['ssl_verify'])
        r.raise_for_status()
        arch_data = r.json()
        remote_digest = arch_data[filename]['checksum']
        verified = remote_digest == digest
        if not verified:
            logging.error(
                    'Checksum mismatch: server has wrong checksum for %s',
                    filename)
            logging.error('local checksum: %s', digest)
            logging.error('remote checksum: %s', remote_digest)
        return verified

    def post(self, url, filepath):
        filename = os.path.basename(filepath)
        file_url = os.path.join(url, filename) + '/'
        exists = requests.head(file_url, verify=chacractl.config['ssl_verify'])

        if exists.status_code == 200:
            if not self.force:
                logger.warning(
                    'resource exists and --force was not used, will not upload'
                )
                logger.warning('SKIP %s', file_url)
                return
            return self.put(file_url, filepath)
        elif exists.status_code == 404:
            logger.info('POSTing file: %s', filepath)
            binary, digest = self.load_file(filepath)
            with binary:
                response = requests.post(
                        url,
                        files={'file': binary},
                        auth=chacractl.config['credentials'],
                        verify=chacractl.config['ssl_verify'])
                if response.status_code > 201:
                    logger.warning("%s -> %s", response.status_code, response.text)
                    response.raise_for_status()
        if not self.upload_is_verified(url, filename, digest):
            # Since this is a new file, attempt to delete it
            logging.error('Deleting corrupted file from server...')
            self.delete(file_url)
            raise SystemExit(
                    'Checksum mismatch: remote server has wrong checksum for %s'
                    % filepath)

    def put(self, url, filepath):
        filename = os.path.basename(filepath)
        logger.info('resource exists and --force was used, will re-upload')
        logger.info('PUTing file: %s', filepath)
        binary, digest = self.load_file(filepath)
        with binary:
            response = requests.put(
                    url,
                    files={'file': binary},
                    auth=chacractl.config['credentials'],
                    verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
        # trim off binary filename
        url = url.rsplit('/', 2)[0] + "/"
        if not self.upload_is_verified(url, filename, digest):
            # Maybe the old file with a different digest is still there, so
            # don't delete it
            raise SystemExit(
                    'Checksum mismatch: server has wrong checksum for %s!'
                    % filepath)

    def delete(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('resource already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE file: %s', url)
        response = requests.delete(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        if response.status_code < 200 or response.status_code > 299:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning(
                        'provided path does not exist: %s', self.argv[-1]
                    )
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            if self.parser.get('delete') is None:
                raise SystemExit('Specify a URL to delete a binary.')
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
Exemplo n.º 50
0
class MergePatches(object):
    help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch'
    _help = """
Fetch the latest patches branch that rdopkg uses, and then fast-forward merge
that into our local patch-queue branch, so that both branches align.

This command helps to align the patch series between our RHEL packages and our
Ubuntu packages.

Options:
--force    Do a hard reset, rather than restricting to fast-forward merges
           only. Use this option if the RHEL patches branch was amended or
           rebased for some reason.
"""
    name = 'merge-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = ['--force', '--hard-reset']

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        force = False
        if self.parser.has(['--force', '--hard-reset']):
            force = True
        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()
        self._run(force)

    def help(self):
        return self._help

    def _run(self, force=False):
        # Determine the names of the relevant branches
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        patches_branch = util.current_patches_branch()
        rhel_patches_branch = self.get_rhel_patches_branch(debian_branch)

        # Do the merge
        if current_branch == patches_branch:
            # HEAD is our patch-queue branch. Use "git pull" directly.
            # For example: "git pull --ff-only patches/ceph-2-rhel-patches"
            cmd = ['git', 'pull', '--ff-only',
                   'patches/' + rhel_patches_branch]
            if force:
                # Do a hard reset on HEAD instead.
                cmd = ['git', 'reset', '--hard',
                       'patches/' + rhel_patches_branch]
        else:
            # HEAD is our debian branch. Use "git fetch" to update the
            # patch-queue ref. For example:
            # "git fetch . \
            #  patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu"
            cmd = ['git', 'fetch', '.',
                   'patches/%s:%s' % (rhel_patches_branch, patches_branch)]
            if force:
                # Do a hard push (with "+") instead.
                cmd = ['git', 'push', '.', '+patches/%s:%s' %
                       (rhel_patches_branch, patches_branch)]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_rhel_patches_branch(self, debian_branch):
        """
        Get the RHEL -patches branch corresponding to this debian branch.

        Examples:
        ceph-2-ubuntu -> ceph-2-rhel-patches
        ceph-2-trusty -> ceph-2-rhel-patches
        ceph-2-xenial -> ceph-2-rhel-patches
        ceph-1.3-ubuntu -> ceph-1.3-rhel-patches
        ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123
        """
        (product, version, distro) = debian_branch.split('-', 2)
        suffix = None
        if '-' in distro:
            (distro, suffix) = distro.split('-', 1)
        rhel = '%s-%s-rhel-patches' % (product, version)
        if suffix is not None:
            rhel = '%s-%s' % (rhel, suffix)
        return rhel
Exemplo n.º 51
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     self._run()
Exemplo n.º 52
0
class Patch(object):
    help_menu = 'apply patches from patch-queue branch'
    _help = """
Generate patches from a patch-queue branch.

Options:
--nobz    Do not require "Resolves: rhbz#" for every patch. The default is to
          require them. Use this CLI option to override the default.
"""
    name = 'patch'

    def __init__(self, argv):
        self.argv = argv
        self.options = ('--nobz', )

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Generate quilt patch series with gbp pq, and update d/rules """

        # Determine the names of the patch-queue branch and debian branch
        current_branch = util.current_branch()
        patch_queue_branch = util.current_patch_queue_branch()
        debian_branch = util.current_debian_branch()

        # TODO: default to fetching from upstream, the way rdopkg patch does.

        # Get the new sha1 to insert into the $COMMIT variable in d/rules
        cmd = ['git', 'rev-parse', patch_queue_branch]
        output = subprocess.check_output(cmd)
        patch_queue_sha1 = output.rstrip()
        if six.PY3:
            patch_queue_sha1 = output.decode('utf-8').rstrip()

        # Switch to "debian" branch if necessary
        if current_branch != debian_branch:
            cmd = ['git', 'checkout', debian_branch]
            subprocess.check_call(cmd)

        # Get the original (old) patch series
        old_series = self.read_series_file('debian/patches/series')
        old_subjects = [patch.subject for patch in old_series]

        # Git-buildpackage pq operation
        cmd = ['gbp', 'pq', 'export']
        subprocess.check_call(cmd)

        # Add all patch files to Git's index
        cmd = ['git', 'add', '--all', 'debian/patches']
        subprocess.check_call(cmd)

        # Bail early if gbp pq did nothing.
        if not self.read_git_debian_patches_status():
            print('No new patches, quitting.')
            raise SystemExit(1)

        # Replace $COMMIT sha1 in d/rules
        old_sha1 = read_commit()
        if old_sha1:
            rules = read_rules_file()
            with open('debian/rules', 'w') as fileh:
                fileh.write(rules.replace(old_sha1, patch_queue_sha1))

        # Get the new patch series
        new_series = self.read_series_file('debian/patches/series')
        # Select only the ones that are new (according to commit subjects)
        new_series = [p for p in new_series if p.subject not in old_subjects]

        if not new_series:
            # Maybe we rewrote some patch files in place?
            # Check Git itself for changed files:
            new_series = self.read_git_debian_patches()

        # Add patch entries to d/changelog
        changelog = self.generate_changelog(new_series)
        try:
            ensure_bzs(changelog)
        except BzNotFound:
            if not self.parser.has('--nobz'):
                raise
        util.bump_changelog(changelog)

        # Assemble a standard commit message string "clog".
        clog = "debian: %s\n" % util.get_deb_version()
        clog += "\n"
        clog += "Add patches from %s\n" % patch_queue_branch
        clog += "\n"
        clog += util.format_changelog(changelog)

        # Commit everything with the standard commit message.
        with tempfile.NamedTemporaryFile(mode='w+') as temp:
            temp.write(clog)
            temp.flush()
            cmd = [
                'git', 'commit', 'debian/changelog', 'debian/patches',
                'debian/rules', '-F', temp.name
            ]
            subprocess.check_call(cmd)

        # Summarize this commit on STDOUT for the developer.
        # (This matches the behavior of "rdopkg patch".)
        cmd = ['git', '--no-pager', 'log', '--name-status', 'HEAD~..HEAD']
        subprocess.check_call(cmd)

    def generate_changelog(self, series):
        """
        Generate a list of changelog entries for this gbp Patch series.

        :return: a list of strings
        """
        changelog = []
        for p in series:
            # If there was some in-place Git modification for this patch,
            # (.git_action attribute), include that in our log.
            action = getattr(p, 'git_action', 'A')
            # Make common actions human-readable:
            if action == 'M':
                action = 'Modified'
            if action == 'D':
                action = 'Deleted'
            if action == 'R':
                # We don't log .patch file renames
                continue
            change = '%s %s' % (action, p.path)
            if action == 'A':
                # This was a simple patch addition, so just log the patch's
                # subject.
                change = p.subject
            bzs = self.get_rhbzs(p)
            bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs))
            if bzstr != '':
                change += ' (%s)' % bzstr
            changelog.append(change)
        return changelog

    def get_rhbzs(self, patch):
        """
        Return all RHBZ numbers from a Patch's subject and body.
        :param patch: gbp.patch_series.Patch``
        """
        bzs = re.findall(BZ_REGEX, patch.subject)
        body = patch.long_desc
        try:
            if patch.git_action == 'D':
                # patch.long_desc will be empty.
                # Read the deleted file's description from Git instead.
                body = self.read_deleted_patch_description(patch.path)
        except AttributeError:
            # This was a simple patch addition, so we'll just search this
            # patch's .long_desc.
            pass
        bzs.extend(re.findall(BZ_REGEX, body))
        return bzs

    def read_series_file(self, file_):
        return gbp.patch_series.PatchSeries.read_series_file(file_)

    def read_git_debian_patches_status(self):
        """
        Return a list of all edited Debian patch files (from "git status").

        :return: a list of actions/filesname pairs. For example:
                 [
                   ['M', 'debian/patches/0001-foo.patch'],
                   ['D', 'debian/patches/0002-bar.patch'],
                 ]
        """
        cmd = ['git', 'status', '-s', 'debian/patches/']
        output = subprocess.check_output(cmd)
        if six.PY3:
            output = output.decode('utf-8')
        result = []
        for line in output.splitlines():
            if line.endswith('.patch'):
                result.append(line.split(None, 1))
        return result

    def read_git_debian_patches(self):
        """
        Load all edited Debian patches (from "git status") into Patch objects.

        The returned Patch objects have an extra ".git_action" attribute. Use
        this to determine what happened to the patch in Git.

        :return: a list of gbp.patch_series.Patch objects
        """
        patches = []
        for (action, filename) in self.read_git_debian_patches_status():
            patch = gbp.patch_series.Patch(filename)
            # Hack: record what happened to this patch file:
            patch.git_action = action
            patches.append(patch)
        return patches

    def read_deleted_patch_description(self, filename):
        """
        Parse a deleted .patch file with gbp.patch_series.Patch.

        For deleted .patch files, most of the gbp.patch_series.Patch
        attributes from read_git_debian_patches() are empty, because the file
        no longer exists. More hackery to recover the original .long_desc so
        we can recover the original RHBZ number.

        :returns: ``str``, the long_desc attribute.
        """
        with tempfile.NamedTemporaryFile(mode='w+') as temp:
            cmd = ['git', 'show', 'HEAD:%s' % filename]
            subprocess.call(cmd, stdout=temp)
            temp.flush()
            temppatch = gbp.patch_series.Patch(temp.name)
            temppatch._read_info()  # XXX internal API here :(
            return temppatch.long_desc
Exemplo n.º 53
0
class Patch(object):
    help_menu = 'apply patches from patch-queue branch'
    _help = """
Generate patches from a patch-queue branch.

"""
    name = 'patch'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Generate quilt patch series with gbp pq, and update d/rules """

        # Determine the names of the patch-queue branch and debian branch
        current_branch = util.current_branch()
        patches_branch = util.current_patches_branch()
        debian_branch = util.current_debian_branch()

        # TODO: default to fetching from upstream, the way rdopkg patch does.

        # Get the new sha1 to insert into the $COMMIT variable in d/rules
        cmd = ['git', 'rev-parse', patches_branch]
        patches_sha1 = subprocess.check_output(cmd).rstrip()

        # Switch to "debian" branch if necessary
        if current_branch != debian_branch:
            cmd = ['git', 'checkout', debian_branch]
            subprocess.check_call(cmd)

        # Get the original (old) patch series
        old_series = self.read_series_file('debian/patches/series')
        old_subjects = map(lambda x: x.subject, old_series)

        # Git-buildpackage pq operation
        cmd = ['gbp', 'pq', 'export']
        subprocess.check_call(cmd)

        # Add all patch files to Git's index
        cmd = ['git', 'add', '--all', 'debian/patches']
        subprocess.check_call(cmd)

        # Replace $COMMIT sha1 in d/rules
        with open('debian/rules') as rules:
            rules_file = rules.read()
        old = r'export COMMIT=[0-9a-f]{40}'
        new = 'export COMMIT=%s' % patches_sha1
        with open('debian/rules', 'w') as fileh:
            fileh.write(re.sub(old, new, rules_file))

        # Get the new patch series
        new_series = self.read_series_file('debian/patches/series')

        # Add patch entries to d/changelog
        changelog = []
        for p in new_series:
            if p.subject in old_subjects:
                continue
            change = p.subject
            bzs = self.get_rhbzs(p)
            bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs))
            if bzstr != '':
                change += ' (%s)' % bzstr
            changelog.append(change)
        util.bump_changelog(changelog)

        # Assemble a standard commit message string "clog".
        clog = "debian: %s\n" % util.get_deb_version()
        clog += "\n"
        clog += "Add patches from %s\n" % patches_branch
        clog += "\n"
        clog += util.format_changelog(changelog)

        # Commit everything with the standard commit message.
        with tempfile.NamedTemporaryFile() as temp:
            temp.write(clog)
            temp.flush()
            cmd = [
                'git', 'commit', 'debian/changelog', 'debian/patches',
                'debian/rules', '-F', temp.name
            ]
            subprocess.check_call(cmd)

        # Summarize this commit on STDOUT for the developer.
        # (This matches the behavior of "rdopkg patch".)
        cmd = ['git', '--no-pager', 'log', '--name-status', 'HEAD~..HEAD']
        subprocess.check_call(cmd)

    def get_rhbzs(self, patch):
        bzs = re.findall(BZ_REGEX, patch.subject)
        bzs.extend(re.findall(BZ_REGEX, patch.long_desc))
        return bzs

    def read_series_file(self, file_):
        try:
            from gbp.patch_series import PatchSeries
            return PatchSeries.read_series_file(file_)
        except ImportError:
            log.warning('Please run "sudo apt-get install '
                        'git-buildpackage" to write the patches to '
                        './debian/changelog')
Exemplo n.º 54
0
 def main(self):
     self.parser = Transport(self.argv, options=self.options)
     self.parser.catch_help = self.help()
     self.parser.parse_args()
     self._run()
Exemplo n.º 55
0
    def parse_args(self):
        options = ['create', 'update', 'generate', 'remove', 'get']
        parser = Transport(self.argv, options=options)
        parser.catch_help = self._help
        parser.parse_args()

        if parser.has('create'):
            return self.create(parser.get('create'))

        if parser.has('update'):
            optional_args = ['key', 'step', 'secret', 'b32']
            items = [i for i in parser.arguments if i in optional_args]
            return self.update(parser.get('update'), items)

        if parser.has('generate'):
            return self.generate()

        if parser.has('remove'):
            return self.remove(parser.get('remove'))

        if parser.has('get'):
            items = [i for i in parser.arguments if i in ['pin']]
            return self.get(parser.get('get'), items)
Exemplo n.º 56
0
    def main(self, argv):
        parser = Transport(argv, mapper=self.mapper,
                           check_help=False,
                           check_version=False)
        parser.parse_args()
        parser.catch_help = self.help()
        parser.catch_version = wari.__version__
        parser.mapper = self.mapper
        if len(argv) <= 1:
            return parser.print_help()

        # create the connection and set the collection
        conn = wari.db.get_connection()
        wari.db.connection = conn
        wari.db.collection = conn['wari']

        parser.dispatch()
        parser.catches_help()
        parser.catches_version()
        conn.close()
Exemplo n.º 57
0
class Localbuild(object):
    help_menu = 'build a package on the local system'
    _help = """
Build a package on the local system, using pbuilder.

Options:
--dist    "xenial" or "trusty". If unspecified, rhcephpkg will choose one
          based on the current branch's name.

  Rules for automatic distro selection:

    1) If the branch suffix is an ubuntu distro name, use that.
       eg "ceph-3.0-xenial".
    2) If a branch has a version number starting with "1.3", return "trusty".
       eg. "ceph-1.3-ubuntu"
    3) If a branch has a version number starting with "2" return "xenial".
       eg. "ceph-2-ubuntu"
    4) If a branch has a version number starting with "3" return "xenial".
       eg. "ceph-3.0-ubuntu"
    5) Otherwise raise, because we need to add more rules.
"""
    name = 'localbuild'

    def __init__(self, argv):
        self.argv = argv
        self.options = ('--dist', )

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()

        # Allow user to override the distro.
        if self.parser.has('--dist'):
            if self.parser.get('--dist') is None:
                raise SystemExit('Specify a distro to --dist')
            distro = self.parser.get('--dist')
        else:
            distro = get_distro()

        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()

        self._run(distro)

    def help(self):
        return self._help

    def _run(self, distro):
        """ Build a package on the local system, using pbuilder. """
        pkg_name = util.package_name()

        os.environ['BUILDER'] = 'pbuilder'
        j_arg = self._get_j_arg(cpu_count())
        pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro

        setup_pbuilder_cache(pbuilder_cache, distro)

        util.setup_pristine_tar_branch()

        # TODO: we should also probably check parent dir for leftovers and warn
        # the user to delete them (or delete them ourselves?)
        cmd = [
            'gbp', 'buildpackage',
            '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose',
            '--git-pbuilder', j_arg, '-us', '-uc'
        ]

        log.info('building %s with pbuilder', pkg_name)
        subprocess.check_call(cmd)

    def _get_j_arg(self, cpus, total_ram_gb=None):
        """
        Returns a string like "-j4" or "-j8". j is the number of processors,
        with a maximum of x, where x = TOTAL_RAM_GB / 4.

        We want to use all our processors (a high "j" value), but the build
        process will fail with an "out of memory" error out if this j value is
        too high.

        An 8 GB system would have a maximum of -j2
        A 16 GB system would have a maximum of -j4
        A 32 GB system would have a maximum of -j8
        """
        if total_ram_gb is None:
            page_size = os.sysconf('SC_PAGE_SIZE')
            mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES')
            # mem_gib is a decimal, eg. 7.707 on 8GB system
            mem_gib = mem_bytes / (1024.**3)
            # Round up to the nearest GB for our purposes.
            total_ram_gb = math.ceil(mem_gib)
        number = min(cpus, total_ram_gb / 4)
        return '-j%d' % max(number, 1)
Exemplo n.º 58
0
 def main(self, argv):
     options = []
     parser = Transport(argv,
                        mapper=self.mapper,
                        options=options,
                        check_help=False,
                        check_version=False)
     parser.parse_args()
     parser.catch_help = self.help()
     parser.catch_version = rhcephpkg.__version__
     parser.mapper = self.mapper
     if len(argv) <= 1:
         return parser.print_help()
     parser.dispatch()
     parser.catches_help()
     parser.catches_version()
Exemplo n.º 59
0
class Gitbz(object):
    help_menu = 'verify each RHBZ in the last Git commit message'
    _help = """
Verify that each RHBZ in the last Git commit message is approved for this
release.

If the commit message lacks any RHBZ number, or any RHBZs do not correspond to
this release (dist-git branch), then this command exits with a non-zero exit
code.

Requires a cached login to bugzilla (`bugzilla login` command).

This tool mimics the validation that the internal "gitbz" tool provides for
RHEL dist-git.
"""
    name = 'gitbz'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        debian_branch = util.current_debian_branch()
        try:
            flag = release_flag(debian_branch)
        except ValueError:
            raise SystemExit('could not parse debian branch "%s".' %
                             debian_branch)

        msg = last_commit_message()
        bzids = find_bzs(msg)

        if not bzids:
            raise SystemExit('no BZs found')

        bzapi = get_bzapi()
        bugs = bzapi.getbugs(bzids,
                             include_fields=['id', 'flags'],
                             permissive=False)
        missing = []
        for bug in bugs:
            has_release_flag = False
            for f in bug.flags:
                if f['name'] == flag:
                    print('rhbz#%s: %s%s' % (bug.id, f['name'], f['status']))
                    has_release_flag = True
            if not has_release_flag:
                missing.append(bug.id)

        if missing:
            print('Missing %s release flag:' % flag)
            for m in missing:
                print('rhbz#%s' % m)
            raise SystemExit(1)