コード例 #1
0
    def main(self):
        options = ['--ignore']
        config_ignores = ceph_medic.config.file.get_list('check', '--ignore')
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()
        parser.parse_args()
        ignored_codes = as_list(parser.get('--ignore', ''))
        # fallback to the configuration if nothing is defined in the CLI
        if not ignored_codes:
            ignored_codes = config_ignores

        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config.nodes.keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config.nodes.items():
            for node in nodes:
                node_metadata = {'host': node['host']}
                if 'container' in node:
                    node_metadata['container'] = node['container']
                ceph_medic.metadata['nodes'][daemon].append(node_metadata)

        collector.collect()
        test = runner.Runner()
        test.ignore = ignored_codes
        results = test.run()
        runner.report(results)
        #XXX might want to make this configurable to not bark on warnings for
        # example, setting forcefully for now, but the results object doesn't
        # make a distinction between error and warning (!)
        if results.errors or results.warnings:
            sys.exit(1)
コード例 #2
0
ファイル: source.py プロジェクト: ktdreyer/rhcephpkg
class Source(object):
    help_menu = 'build a source package on the local system'
    _help = """
Build a source package on the local system.
"""
    name = 'source'

    def __init__(self, argv):
        self.argv = argv

    def main(self):
        self.parser = Transport(self.argv)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a source package on the local system. """
        util.setup_pristine_tar_branch()
        cmd = ['gbp', 'buildpackage', '--git-tag', '--git-retag', '-S',
               '-us', '-uc']
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)
コード例 #3
0
    def main(self, argv):
        # Console Logger
        sh = logging.StreamHandler()
        sh.setFormatter(log.color_format())
        sh.setLevel(logging.DEBUG)

        root_logger = logging.getLogger()
        root_logger.setLevel(logging.DEBUG)
        root_logger.addHandler(sh)

        self.api_credentials()

        # TODO: Need to implement `--filename` and make it available
        options = [['--log', '--logging']]
        parser = Transport(argv,
                           mapper=self.mapper,
                           options=options,
                           check_help=False,
                           check_version=False)
        parser.parse_args()
        chacractl.config['verbosity'] = parser.get('--log', 'info')
        parser.catch_help = self.help()
        parser.catch_version = chacractl.__version__
        parser.mapper = self.mapper
        if len(argv) <= 1:
            return parser.print_help()
        parser.dispatch()
        parser.catches_help()
        parser.catches_version()
コード例 #4
0
ファイル: hello.py プロジェクト: red-hat-storage/rhcephpkg
class Hello(object):
    help_menu = 'test authentication to Jenkins'
    _help = """
Test authentication to Jenkins and return your user's fullName attribute.
"""
    name = 'hello'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Authenticate to Jenkins and print our username to STDOUT.
            Useful for checking that our authentication credentials are
            correct. """
        jenkins = util.jenkins_connection()
        data = jenkins.get_whoami()
        name = data['fullName']  # Our Jenkins instance gets this from LDAP
        jenkins_version = jenkins.get_version()
        print('Hello %s from Jenkins %s' % (name, jenkins_version))
        print('Logged in to %s' % jenkins.url)
コード例 #5
0
ファイル: source.py プロジェクト: red-hat-storage/rhcephpkg
class Source(object):
    help_menu = 'build a source package on the local system'
    _help = """
Build a source package on the local system.
"""
    name = 'source'

    def __init__(self, argv):
        self.argv = argv

    def main(self):
        self.parser = Transport(self.argv)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a source package on the local system. """
        util.setup_pristine_tar_branch()
        cmd = [
            'gbp', 'buildpackage', '--git-tag', '--git-retag', '-S', '-us',
            '-uc'
        ]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)
コード例 #6
0
    def main(self):
        parser = Transport(self.arguments,
                           options=self.options,
                           check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        branch = parser.get('--branch', 'master')
        user = parser.get('--user', 'vagrant')
        high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v'
        if not parser.unknown_commands:
            log.error(
                "it is required to pass a host to deploy to, but none was provided"
            )
            raise SystemExit(1)

        command = [
            "ansible-playbook",
            "-i",
            "%s," % parser.unknown_commands[-1],
            high_verbosity,
            "-u",
            user,
            "--extra-vars",
            'branch=%s' % branch,
            "deploy.yml",
        ]
        log.debug("Running command: %s" % ' '.join(command))
        out, err, code = process.run(command, cwd=playbook_path)
        log.error(err)
        log.debug(out)
コード例 #7
0
ファイル: check.py プロジェクト: Richard-Barrett/ceph-medic
    def main(self):
        options = ['--ignore']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()
        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config.nodes.keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config.nodes.items():
            for node in nodes:
                ceph_medic.metadata['nodes'][daemon].append(
                    {'host': node['host']})

        collector.collect()
        test = runner.Runner()
        results = test.run()
        runner.report(results)
        #XXX might want to make this configurable to not bark on warnings for
        # example, setting forcefully for now, but the results object doesn't
        # make a distinction between error and warning (!)
        if results.errors or results.warnings:
            sys.exit(1)
コード例 #8
0
ファイル: build.py プロジェクト: red-hat-storage/rhcephpkg
class Build(object):
    help_menu = 'build a package in Jenkins'
    _help = """
Build a package in Jenkins.
"""
    name = 'build'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith('patch-queue/'):
            log.error('%s is a patch-queue branch' % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info('building %s branch %s at %s', pkg_name, branch_name,
                 posixpath.join(jenkins.url, 'job', 'build-package'))
        job_params = {'PKG_NAME': pkg_name, 'BRANCH': branch_name}

        queue_number = jenkins.build_job('build-package',
                                         parameters=job_params,
                                         token=jenkins.password)

        # Job is now queued, not yet running.
        log.info('Waiting for build queue #%d' % queue_number)
        log.info('This may be safely interrupted...')
        queue_item = jenkins.get_queue_item(queue_number)
        while 'executable' not in queue_item:
            try:
                log.info('queue state: %s' % queue_item['why'])
                sleep(2)
                queue_item = jenkins.get_queue_item(queue_number)
            except KeyboardInterrupt:
                # We have no build_number, so just print a general message with
                # a basic URL for the user to check.
                print('')
                print('Build is queued for starting at %s' % jenkins.url)
                raise SystemExit(1)

        # Job is now running.
        build_number = queue_item['executable']['number']
        # Pass the rest over to the "watch-build" command.
        watcher = WatchBuild(['watch'])
        watcher.watch(build_number)
コード例 #9
0
ファイル: main.py プロジェクト: ceph/chacractl
    def main(self, argv):
        # Console Logger
        sh = logging.StreamHandler()
        sh.setFormatter(log.color_format())
        sh.setLevel(logging.DEBUG)

        root_logger = logging.getLogger()
        root_logger.setLevel(logging.DEBUG)
        root_logger.addHandler(sh)

        self.api_credentials()

        # TODO: Need to implement `--filename` and make it available
        options = [['--log', '--logging']]
        parser = Transport(argv, mapper=self.mapper,
                           options=options, check_help=False,
                           check_version=False)
        parser.parse_args()
        chacractl.config['verbosity'] = parser.get('--log', 'info')
        parser.catch_help = self.help()
        parser.catch_version = chacractl.__version__
        parser.mapper = self.mapper
        if len(argv) <= 1:
            return parser.print_help()
        parser.dispatch()
        parser.catches_help()
        parser.catches_version()
コード例 #10
0
ファイル: generate.py プロジェクト: yangly0815/ceph-medic
    def main(self):
        options = ['--stdout']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()

        if len(self.argv) == 1:
            raise SystemExit(
                "A monitor hostname or a ceph.conf file is required as an argument"
            )

        node = self.argv[-1]
        inventory = {}

        with get_connection(node) as conn:
            report = get_mon_report(conn)
            try:
                mons = report['monmap']['mons']
            except KeyError:
                raise SystemExit(report)
            inventory['mons'] = [i['name'] for i in mons]
            osds = report['osd_metadata']
            inventory['osds'] = [i['hostname'] for i in osds]

        if not inventory:
            raise SystemExit(
                'no hosts where found from remote monitor node: %s' % node)

        generate_inventory(inventory, to_stdout=parser.get('--stdout'))
        conn.exit()
        return
コード例 #11
0
ファイル: main.py プロジェクト: jeanchlopez/ceph-medic
    def main(self, argv):
        options = [
            '--cluster',
            '--ssh-config',
            '--inventory',
            '--config',
        ]
        parser = Transport(argv,
                           options=options,
                           check_help=False,
                           check_version=False)
        parser.parse_args()

        self.config_path = parser.get('--config', configuration.location())

        # load medic configuration
        loaded_config = configuration.load(
            path=parser.get('--config', self.config_path))

        # this is the earliest we can have enough config to setup logging
        log.setup(loaded_config)
        # update the module-wide configuration object
        ceph_medic.config.update(configuration.get_overrides(loaded_config))

        # SSH config
        ceph_medic.config['ssh_config'] = parser.get('--ssh-config')
        if ceph_medic.config['ssh_config']:
            ssh_config_path = ceph_medic.config['ssh_config']
            if not os.path.exists(ssh_config_path):
                terminal.error("the given ssh config path does not exist: %s" %
                               ssh_config_path)
                sys.exit()

        ceph_medic.config['cluster_name'] = parser.get('--cluster')
        ceph_medic.metadata['cluster_name'] = 'ceph'

        # Hosts file
        self.hosts_file = parser.get('--inventory',
                                     configuration.get_host_file())

        # find the hosts files, by the CLI first, fallback to the configuration
        # file, and lastly if none of those are found or defined, try to load
        # from well known locations (cwd, and /etc/ansible/)
        loaded_hosts = configuration.load_hosts(
            parser.get('--inventory',
                       ceph_medic.config.get('--inventory', self.hosts_file)))
        ceph_medic.config['nodes'] = loaded_hosts.nodes
        ceph_medic.config['hosts_file'] = loaded_hosts.filename
        self.hosts_file = loaded_hosts.filename

        parser.catch_version = ceph_medic.__version__
        parser.mapper = self.mapper
        parser.catch_help = self.help(parser.subhelp())
        if len(argv) <= 1:
            return parser.print_help()
        ceph_medic.config['config_path'] = self.config_path
        parser.dispatch()
        parser.catches_help()
        parser.catches_version()
コード例 #12
0
ファイル: server.py プロジェクト: johnmontero/delgado
 def parse_args(self):
     options = ['--allowed']
     parser = Transport(self.argv, options=options)
     parser.catch_help = self._help
     parser.parse_args()
     delgado.config['allowed'] = parser.get('--allowed') or []
     engine = Engine(connection=self.connection)
     engine.run_forever()
コード例 #13
0
ファイル: pytest.py プロジェクト: alfredodeza/delgado
 def parse_args(self):
     parser = Transport(self.argv, options=['--socket-location'])
     parser.catch_help = self._help
     parser.parse_args()
     location = parser.get('--socket-location') or '/tmp/pytest.sock'
     delgado.config['allowed'] = ['py.test']
     engine = Engine(socket_location=location)
     engine.run_forever()
コード例 #14
0
ファイル: repos.py プロジェクト: bigjust/chacractl
class Repo(object):
    _help = dedent("""
    Operate on repositories on a remote chacra instance. Both `recreate` and
    `update` calls are not immediate. They rely on the async service managing
    repos which usually have a delay applied to them.

    Options:

    recreate        Mark a repository to be removed and created from scratch
                    again.
    update          Repository will get updated by running the repo tools on
                    it again.
    """)
    help_menu = "recreate, delete, or update repositories"
    options = ['recreate', 'update']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'repos'
        )

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    @retry()
    def post(self, url):
        exists = requests.head(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()
        logger.info('POST: %s', url)
        response = requests.post(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        response.raise_for_status()
        json = response.json()
        for k, v in json.items():
            logger.info("%s: %s", k, v)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        recreate = self.parser.get('recreate')
        update = self.parser.get('update')
        if recreate:
            url_part = os.path.join(recreate, 'recreate')
            url = os.path.join(self.base_url, url_part)
            self.post(url)
        elif update:
            url_part = os.path.join(update, 'update')
            url = os.path.join(self.base_url, url_part)
            self.post(url)
コード例 #15
0
ファイル: hello.py プロジェクト: ktdreyer/rhcephpkg
class Hello(object):
    help_menu = 'test authentication to Jenkins'
    _help = """
Test authentication to Jenkins and return your user's fullName attribute.
"""
    name = 'hello'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Authenticate to Jenkins and print our username to STDOUT.
            Useful for checking that our authentication credentials are
            correct. """
        jenkins = util.jenkins_connection()
        # python-jenkins does not have syntactic support for "whoami" (the
        # "/me/api/json" endpoint), so we have to hit it and parse it
        # ourselves.
        # https://review.openstack.org/307896

        whoami_url = posixpath.join(jenkins.url, 'me/api/json')
        try:
            response = jenkins.jenkins_open(Request(whoami_url))
            data = json.loads(response)
        except JenkinsException as err:
            raise SystemExit(err)

        name = data['fullName']  # Our Jenkins instance gets this from LDAP
        try:
            jenkins_version = jenkins.get_version()
        except AttributeError:
            # python-jenkins older than 0.4.1 does not have get_version().
            version_url = jenkins.server
            try:
                response = urlopen(Request(version_url))
                if six.PY2:
                    jenkins_version = response.info().getheader('X-Jenkins')
                else:
                    jenkins_version = response.getheader('X-Jenkins')
            except (HTTPError, BadStatusLine) as err:
                raise SystemExit(err)
        print('Hello %s from Jenkins %s' % (name, jenkins_version))
コード例 #16
0
 def main(self, argv):
     parser = Transport(argv, mapper=self.mapper,
                        options=[], check_help=False,
                        check_version=False)
     parser.parse_args()
     parser.catch_help = self.help(parser.subhelp())
     parser.catch_version = ceph_installer.__version__
     parser.mapper = self.mapper
     if len(argv) <= 1:
         return parser.print_help()
     parser.dispatch()
     parser.catches_help()
     parser.catches_version()
コード例 #17
0
ファイル: iso.py プロジェクト: alfredodeza/merfi
 def parse_args(self, argv=None):
     """ pass argv during testing """
     if argv is None:
         argv = self.argv
     options = [['--output', '-o']]
     parser = Transport(argv, options=options)
     parser.catch_help = self.help()
     parser.parse_args()
     self.source = util.infer_path(parser.unknown_commands)
     self.output = parser.get('--output', self.source + '-dvd.iso')
     self.check_dependency()
     self.make_iso()
     self.make_sha256sum()
コード例 #18
0
ファイル: extend.py プロジェクト: johnmontero/dozo
    def parse_args(self):
        transport = Transport(self.argv, check_help=False)
        transport.catch_help = self.__doc__
        if len(self.argv) <= 1:
            transport.print_help()
        transport.parse_args()

        for action in self.actions:
            if transport.has(action):
                return self.actions.get(action)()

        # If nothing matches, print the help
        transport.print_help()
コード例 #19
0
class Exists(object):
    _help = dedent("""
    Check if a given URL part exists already. Mainly does a HEAD request to the
    given endpoint. If the URL does not exist it will return a non-zero exit
    status (404).

    For example:

        chacractl exists binaries/ceph-deploy/master/debian/wheezy

    Positional Arguments:

    [URL]        The endpoint, starting with the full url part (sans fqdn)
    """)
    help_menu = "check if a given URL part exists already"
    options = []

    def __init__(self, argv):
        self.argv = argv
        self.base_url = chacractl.config['url']

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    @retry()
    def head(self, url):
        logger.info('HEAD: %s', url)
        exists = requests.head(url,
                               auth=chacractl.config['credentials'],
                               verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        if self.parser.unknown_commands:
            url_part = self.sanitize_url(self.parser.unknown_commands[-1])
            url = os.path.join(self.base_url, url_part)
            return self.head(url)
        else:
            logger.error('no url was passed in')
コード例 #20
0
ファイル: main.py プロジェクト: ktdreyer/rhcephpkg
 def main(self, argv):
     options = []
     parser = Transport(argv, mapper=self.mapper,
                        options=options, check_help=False,
                        check_version=False)
     parser.parse_args()
     parser.catch_help = self.help()
     parser.catch_version = rhcephpkg.__version__
     parser.mapper = self.mapper
     if len(argv) <= 1:
         return parser.print_help()
     parser.dispatch()
     parser.catches_help()
     parser.catches_version()
コード例 #21
0
ファイル: exists.py プロジェクト: ahills/chacractl
class Exists(object):
    _help = dedent("""
    Check if a given URL part exists already. Mainly does a HEAD request to the
    given endpoint. If the URL does not exist it will return a non-zero exit
    status (404).

    For example:

        chacractl exists binaries/ceph-deploy/master/debian/wheezy

    Positional Arguments:

    [URL]        The endpoint, starting with the full url part (sans fqdn)
    """)
    help_menu = "check if a given URL part exists already"
    options = []

    def __init__(self, argv):
        self.argv = argv
        self.base_url = chacractl.config['url']

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    def head(self, url):
        logger.info('HEAD: %s', url)
        exists = requests.head(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        exists.raise_for_status()

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        if self.parser.unknown_commands:
            url_part = self.sanitize_url(self.parser.unknown_commands[-1])
            url = os.path.join(self.base_url, url_part)
            return self.head(url)
        else:
            logger.error('no url was passed in')
コード例 #22
0
ファイル: repos.py プロジェクト: ceph/chacractl
class Repo(object):
    _help = dedent(
        """
    Operate on repositories on a remote chacra instance. Both `recreate` and
    `update` calls are not immediate. They rely on the async service managing
    repos which usually have a delay applied to them.

    Options:

    recreate        Mark a repository to be removed and created from scratch
                    again.
    update          Repository will get updated by running the repo tools on
                    it again.
    """
    )
    help_menu = "recreate, delete, or update repositories"
    options = ["recreate", "update"]

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(chacractl.config["url"], "repos")

    @catches(requests.exceptions.HTTPError, handler=requests_errors)
    def post(self, url):
        exists = requests.head(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        exists.raise_for_status()
        logger.info("POST: %s", url)
        response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        response.raise_for_status()
        json = response.json()
        for k, v in json.items():
            logger.info("%s: %s", k, v)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        recreate = self.parser.get("recreate")
        update = self.parser.get("update")
        if recreate:
            url_part = os.path.join(recreate, "recreate")
            url = os.path.join(self.base_url, url_part)
            self.post(url)
        elif update:
            url_part = os.path.join(update, "update")
            url = os.path.join(self.base_url, url_part)
            self.post(url)
コード例 #23
0
ファイル: base.py プロジェクト: alfredodeza/merfi
class BaseBackend(base.BaseCommand):

    options = []
    parser = None

    def parse_args(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self.path = util.infer_path(self.parser.unknown_commands)
        self.check_dependency()
        self.sign()

    def sign(self):
        raise NotImplemented()
コード例 #24
0
ファイル: build.py プロジェクト: red-hat-storage/rhcephpkg
class Build(object):
    help_menu = "build a package in Jenkins"
    _help = """
Build a package in Jenkins.
"""
    name = "build"

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith("patch-queue/"):
            log.error("%s a patch-queue branch" % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info(
            "building %s branch %s at %s", pkg_name, branch_name, posixpath.join(jenkins.url, "job", "build-package")
        )
        job_params = {"PKG_NAME": pkg_name, "BRANCH": branch_name}

        if self._has_broken_build_job():
            jenkins.build_job = types.MethodType(_build_job_fixed, jenkins)

        jenkins.build_job("build-package", parameters=job_params, token=jenkins.password)

    def _has_broken_build_job(self):
        # Ubuntu Trusty ships python-jenkins 0.2.1-0ubuntu1, and this version
        # has a broken build_job() method. See
        # https://bugs.launchpad.net/bugs/1177831 .
        # This bug was fixed in python-jenkins v0.3.2 upstream.
        v = get_distribution("python_jenkins").version
        return parse_version(v) < parse_version("0.3.2")
コード例 #25
0
class Build(object):
    help_menu = 'build a package in Jenkins'
    _help = """
Build a package in Jenkins.
"""
    name = 'build'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Build a package in Jenkins. """
        pkg_name = util.package_name()
        branch_name = util.current_branch()
        jenkins = util.jenkins_connection()

        if branch_name.startswith('patch-queue/'):
            log.error('%s a patch-queue branch' % branch_name)
            msg = 'You can switch to the debian branch with "gbp pq switch"'
            raise SystemExit(msg)

        log.info('building %s branch %s at %s', pkg_name, branch_name,
                 posixpath.join(jenkins.url, 'job', 'build-package'))
        job_params = {'PKG_NAME': pkg_name, 'BRANCH': branch_name}

        if self._has_broken_build_job():
            jenkins.build_job = types.MethodType(_build_job_fixed, jenkins)

        jenkins.build_job('build-package', parameters=job_params,
                          token=jenkins.password)

    def _has_broken_build_job(self):
        # Ubuntu Trusty ships python-jenkins 0.2.1-0ubuntu1, and this version
        # has a broken build_job() method. See
        # https://bugs.launchpad.net/bugs/1177831 .
        # This bug was fixed in python-jenkins v0.3.2 upstream.
        v = get_distribution('python_jenkins').version
        return parse_version(v) < parse_version('0.3.2')
コード例 #26
0
ファイル: main.py プロジェクト: alfredodeza/merfi
 def main(self, argv):
     options = [['--log', '--logging']]
     parser = Transport(argv, mapper=self.mapper,
                        options=options, check_help=False,
                        check_version=False)
     parser.parse_args()
     merfi.config['verbosity'] = parser.get('--log', 'info')
     merfi.config['check'] = parser.has('--check')
     parser.catch_help = self.help()
     parser.catch_version = merfi.__version__
     parser.mapper = self.mapper
     if len(argv) <= 1:
         return parser.print_help()
     parser.dispatch()
     parser.catches_help()
     parser.catches_version()
コード例 #27
0
 def main(self, argv):
     options = []
     parser = Transport(argv,
                        mapper=self.mapper,
                        options=options,
                        check_help=False,
                        check_version=False)
     parser.parse_args()
     parser.catch_help = self.help()
     parser.catch_version = rhcephpkg.__version__
     parser.mapper = self.mapper
     if len(argv) <= 1:
         return parser.print_help()
     parser.dispatch()
     parser.catches_help()
     parser.catches_version()
コード例 #28
0
ファイル: clone.py プロジェクト: red-hat-storage/rhcephpkg
class Clone(object):
    help_menu = 'clone a package from dist-git'
    _help = """
Clone a package from dist-git. Your SSH key must be set up in Gerrit.

Positional Arguments:

[package]  The name of the package to clone.
"""
    name = 'clone'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            pkg = self.parser.unknown_commands[0]
        except IndexError:
            return self.parser.print_help()
        self._run(pkg)

    def help(self):
        return self._help

    def _run(self, pkg):
        """ Clone a package from dist-git. """
        if os.path.exists(pkg):
            raise SystemExit('%s already exists in current working directory.',
                             pkg)
        configp = util.config()
        try:
            user = configp.get('rhcephpkg', 'user')
            gitbaseurl = configp.get('rhcephpkg', 'gitbaseurl')
        except configparser.Error as err:
            raise SystemExit('Problem parsing .rhcephpkg.conf: %s',
                             err.message)
        # TODO: SafeConfigParser might make the "user" interpolation here
        # unnecessary? Need to test, particularly what it does to %(module).
        pkg_url = gitbaseurl % {'user': user, 'module': pkg}
        cmd = ['git', 'clone', pkg_url]
        subprocess.check_call(cmd)
コード例 #29
0
ファイル: task.py プロジェクト: b-ranto/ceph-installer
    def main(self):
        parser = Transport(self.arguments, options=self.options, check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        if not parser.unknown_commands:
            log.error("it is required to pass an identifer, but none was provided")
            raise SystemExit(1)
        self.identifier = parser.unknown_commands[-1]
        if parser.has('--poll'):
            return self.poll()

        for key in [
                'stdout', 'stderr', 'command', 'ended',
                'started', 'succeeded', 'exit_code']:
            if parser.has(key):
                return self.get(key)

        # if nothing else matches, just try to give a generic, full summary
        self.summary()
コード例 #30
0
ファイル: main.py プロジェクト: alfredodeza/wari
    def main(self, argv):
        parser = Transport(argv, mapper=self.mapper,
                           check_help=False,
                           check_version=False)
        parser.parse_args()
        parser.catch_help = self.help()
        parser.catch_version = wari.__version__
        parser.mapper = self.mapper
        if len(argv) <= 1:
            return parser.print_help()

        # create the connection and set the collection
        conn = wari.db.get_connection()
        wari.db.connection = conn
        wari.db.collection = conn['wari']

        parser.dispatch()
        parser.catches_help()
        parser.catches_version()
        conn.close()
コード例 #31
0
ファイル: check.py プロジェクト: shreekarSS/ceph-medic
    def main(self):
        options = ['--ignore']
        parser = Transport(self.argv, options=options, check_version=False)
        parser.catch_help = self._help()

        parser.parse_args()
        if len(self.argv) < 1:
            return parser.print_help()

        # populate the nodes metadata with the configured nodes
        for daemon in ceph_medic.config['nodes'].keys():
            ceph_medic.metadata['nodes'][daemon] = []
        for daemon, nodes in ceph_medic.config['nodes'].items():
            for node in nodes:
                ceph_medic.metadata['nodes'][daemon].append(
                    {'host': node['host']})

        collector.collect()
        test = runner.Runner()
        results = test.run()
        runner.report(results)
コード例 #32
0
ファイル: hotp.py プロジェクト: alfredodeza/wari
    def parse_args(self):
        options = ['create', 'update', 'generate', 'remove', 'get']
        parser = Transport(self.argv, options=options)
        parser.catch_help = self._help
        parser.parse_args()

        if parser.has('create'):
            return self.create(parser.get('create'))

        if parser.has('update'):
            optional_args = ['key', 'step', 'secret', 'b32']
            items = [i for i in parser.arguments if i in optional_args]
            return self.update(parser.get('update'), items)

        if parser.has('generate'):
            return self.generate()

        if parser.has('remove'):
            return self.remove(parser.get('remove'))

        if parser.has('get'):
            items = [i for i in parser.arguments if i in ['pin']]
            return self.get(parser.get('get'), items)
コード例 #33
0
ファイル: task.py プロジェクト: STEI-ITB/roseph
    def main(self):
        parser = Transport(self.arguments,
                           options=self.options,
                           check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        if not parser.unknown_commands:
            log.error(
                "it is required to pass an identifer, but none was provided")
            raise SystemExit(1)
        self.identifier = parser.unknown_commands[-1]
        if parser.has('--poll'):
            return self.poll()

        for key in [
                'stdout', 'stderr', 'command', 'ended', 'started', 'succeeded',
                'exit_code'
        ]:
            if parser.has(key):
                return self.get(key)

        # if nothing else matches, just try to give a generic, full summary
        self.summary()
コード例 #34
0
ファイル: dev.py プロジェクト: b-ranto/ceph-installer
    def main(self):
        parser = Transport(self.arguments, options=self.options, check_help=True)
        parser.catch_help = self._help
        parser.parse_args()
        parser.catches_help()
        branch = parser.get('--branch', 'master')
        user = parser.get('--user', 'vagrant')
        high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v'
        if not parser.unknown_commands:
            log.error("it is required to pass a host to deploy to, but none was provided")
            raise SystemExit(1)

        command = [
            "ansible-playbook",
            "-i", "%s," % parser.unknown_commands[-1],
            high_verbosity,
            "-u", user,
            "--extra-vars", 'branch=%s' % branch,
            "deploy.yml",
        ]
        log.debug("Running command: %s" % ' '.join(command))
        out, err, code = process.run(command, cwd=playbook_path)
        log.error(err)
        log.debug(out)
コード例 #35
0
class MergePatches(object):
    help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch'
    _help = """
Fetch the latest patches branch that rdopkg uses, and then fast-forward merge
that into our local patch-queue branch, so that both branches align.

This command helps to align the patch series between our RHEL packages and our
Ubuntu packages.

Options:
--force    Do a hard reset, rather than restricting to fast-forward merges
           only. Use this option if the RHEL patches branch was amended or
           rebased for some reason.
"""
    name = 'merge-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = ['--force', '--hard-reset']

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        force = False
        if self.parser.has(['--force', '--hard-reset']):
            force = True
        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()
        self._run(force)

    def help(self):
        return self._help

    def _run(self, force=False):
        # Determine the names of the relevant branches
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        patch_queue_branch = util.current_patch_queue_branch()
        rhel_patches_branch = self.get_rhel_patches_branch(debian_branch)

        # Do the merge
        if current_branch == patch_queue_branch:
            # HEAD is our patch-queue branch. Use "git pull" directly.
            # For example: "git pull --ff-only patches/ceph-2-rhel-patches"
            cmd = ['git', 'pull', '--ff-only',
                   'patches/' + rhel_patches_branch]
            if force:
                # Do a hard reset on HEAD instead.
                cmd = ['git', 'reset', '--hard',
                       'patches/' + rhel_patches_branch]
        else:
            # HEAD is our debian branch. Use "git fetch" to update the
            # patch-queue ref. For example:
            # "git fetch . \
            #  patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu"
            util.ensure_patch_queue_branch()
            cmd = ['git', 'fetch', '.',
                   'patches/%s:%s' % (rhel_patches_branch, patch_queue_branch)]
            if force:
                # Do a hard push (with "+") instead.
                cmd = ['git', 'push', '.', '+patches/%s:%s' %
                       (rhel_patches_branch, patch_queue_branch)]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_rhel_patches_branch(self, debian_branch):
        """
        Get the RHEL -patches branch corresponding to this debian branch.

        Examples:
        ceph-2-ubuntu -> ceph-2-rhel-patches
        ceph-2-trusty -> ceph-2-rhel-patches
        ceph-2-xenial -> ceph-2-rhel-patches
        ceph-1.3-ubuntu -> ceph-1.3-rhel-patches
        ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123
        """
        (product, version, distro) = debian_branch.split('-', 2)
        suffix = None
        if '-' in distro:
            (distro, suffix) = distro.split('-', 1)
        rhel = '%s-%s-rhel-patches' % (product, version)
        if suffix is not None:
            rhel = '%s-%s' % (rhel, suffix)
        return rhel
コード例 #36
0
ファイル: totp.py プロジェクト: alfredodeza/wari
 def parse_args(self):
     options = ['create', 'update', 'generate']
     parser = Transport(self.argv, options=options)
     parser.catch_help = self._help
     parser.parse_args()
コード例 #37
0
ファイル: binaries.py プロジェクト: ceph/chacractl
class Binary(object):
    _help = dedent("""
    Operate binaries on a remote chacra instance.

    Creating a new binary::

        chacractl binary create project/ref/distro/distro_version/arch /path/to/binary

    Options:

    create        Creates a new binary at a given distro version architecture
    delete        Deletes an existing binary from chacra
    --force       If the resource exists, force the upload
    """)
    help_menu = "create, update metadata, or delete binaries"
    options = ['create', '--force', 'delete']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'binaries'
        )

    def sanitize_filename(self, line):
        """
        lines may come with newlines and leading slashes make sure
        they are clean so that they can be processed
        """
        line = line.strip('\n')
        if os.path.isfile(line):
            return os.path.abspath(line)

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    def load_file(self, filepath):
        chsum = sha512()
        binary = open(filepath, 'rb')
        for chunk in iter(lambda: binary.read(4096), b''):
            chsum.update(chunk)
        binary.seek(0)
        return binary, chsum.hexdigest()

    def upload_is_verified(self, arch_url, filename, digest):
        r = requests.get(arch_url, verify=chacractl.config['ssl_verify'])
        r.raise_for_status()
        arch_data = r.json()
        remote_digest = arch_data[filename]['checksum']
        verified = remote_digest == digest
        if not verified:
            logging.error(
                    'Checksum mismatch: server has wrong checksum for %s',
                    filename)
            logging.error('local checksum: %s', digest)
            logging.error('remote checksum: %s', remote_digest)
        return verified

    def post(self, url, filepath):
        filename = os.path.basename(filepath)
        file_url = os.path.join(url, filename) + '/'
        exists = requests.head(file_url, verify=chacractl.config['ssl_verify'])

        if exists.status_code == 200:
            if not self.force:
                logger.warning(
                    'resource exists and --force was not used, will not upload'
                )
                logger.warning('SKIP %s', file_url)
                return
            return self.put(file_url, filepath)
        elif exists.status_code == 404:
            logger.info('POSTing file: %s', filepath)
            binary, digest = self.load_file(filepath)
            with binary:
                response = requests.post(
                        url,
                        files={'file': binary},
                        auth=chacractl.config['credentials'],
                        verify=chacractl.config['ssl_verify'])
                if response.status_code > 201:
                    logger.warning("%s -> %s", response.status_code, response.text)
                    response.raise_for_status()
        if not self.upload_is_verified(url, filename, digest):
            # Since this is a new file, attempt to delete it
            logging.error('Deleting corrupted file from server...')
            self.delete(file_url)
            raise SystemExit(
                    'Checksum mismatch: remote server has wrong checksum for %s'
                    % filepath)

    def put(self, url, filepath):
        filename = os.path.basename(filepath)
        logger.info('resource exists and --force was used, will re-upload')
        logger.info('PUTing file: %s', filepath)
        binary, digest = self.load_file(filepath)
        with binary:
            response = requests.put(
                    url,
                    files={'file': binary},
                    auth=chacractl.config['credentials'],
                    verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
        # trim off binary filename
        url = url.rsplit('/', 2)[0] + "/"
        if not self.upload_is_verified(url, filename, digest):
            # Maybe the old file with a different digest is still there, so
            # don't delete it
            raise SystemExit(
                    'Checksum mismatch: server has wrong checksum for %s!'
                    % filepath)

    def delete(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('resource already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE file: %s', url)
        response = requests.delete(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        if response.status_code < 200 or response.status_code > 299:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning(
                        'provided path does not exist: %s', self.argv[-1]
                    )
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            if self.parser.get('delete') is None:
                raise SystemExit('Specify a URL to delete a binary.')
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
コード例 #38
0
class MergePatches(object):
    help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch'
    _help = """
Fetch the latest patches branch that rdopkg uses, and then fast-forward merge
that into our local patch-queue branch, so that both branches align.

This command helps to align the patch series between our RHEL packages and our
Ubuntu packages.

"""
    name = 'merge-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        # Determine the names of the relevant branches
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        patches_branch = util.current_patches_branch()
        rhel_patches_branch = self.get_rhel_patches_branch(debian_branch)

        # Do the merge
        if current_branch == patches_branch:
            # HEAD is our patch-queue branch. Use "git pull" directly.
            # For example: "git pull --ff-only patches/ceph-2-rhel-patches"
            cmd = ['git', 'pull', '--ff-only',
                   'patches/' + rhel_patches_branch]
        else:
            # HEAD is our debian branch. Use "git fetch" to update the
            # patch-queue ref. For example:
            # "git fetch . \
            #  patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu"
            cmd = ['git', 'fetch', '.',
                   'patches/%s:%s' % (rhel_patches_branch, patches_branch)]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_rhel_patches_branch(self, debian_branch):
        """
        Get the RHEL -patches branch corresponding to this debian branch.

        Examples:
        ceph-2-ubuntu -> ceph-2-rhel-patches
        ceph-2-trusty -> ceph-2-rhel-patches
        ceph-2-xenial -> ceph-2-rhel-patches
        ceph-1.3-ubuntu -> ceph-1.3-rhel-patches
        """
        deb_regex = r'^(\w+)-([\d\.]+)-.*'
        rhel_regex = r'\1-\2-rhel-patches'
        return re.sub(deb_regex, rhel_regex, debian_branch)
コード例 #39
0
ファイル: patch.py プロジェクト: red-hat-storage/rhcephpkg
class Patch(object):
    help_menu = 'apply patches from patch-queue branch'
    _help = """
Generate patches from a patch-queue branch.

Options:
--nobz    Do not require "Resolves: rhbz#" for every patch. The default is to
          require them. Use this CLI option to override the default.
"""
    name = 'patch'

    def __init__(self, argv):
        self.argv = argv
        self.options = ('--nobz', )

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Generate quilt patch series with gbp pq, and update d/rules """

        # Determine the names of the patch-queue branch and debian branch
        current_branch = util.current_branch()
        patch_queue_branch = util.current_patch_queue_branch()
        debian_branch = util.current_debian_branch()

        # TODO: default to fetching from upstream, the way rdopkg patch does.

        # Get the new sha1 to insert into the $COMMIT variable in d/rules
        cmd = ['git', 'rev-parse', patch_queue_branch]
        output = subprocess.check_output(cmd)
        patch_queue_sha1 = output.rstrip()
        if six.PY3:
            patch_queue_sha1 = output.decode('utf-8').rstrip()

        # Switch to "debian" branch if necessary
        if current_branch != debian_branch:
            cmd = ['git', 'checkout', debian_branch]
            subprocess.check_call(cmd)

        # Get the original (old) patch series
        old_series = self.read_series_file('debian/patches/series')
        old_subjects = [patch.subject for patch in old_series]

        # Git-buildpackage pq operation
        cmd = ['gbp', 'pq', 'export']
        subprocess.check_call(cmd)

        # Add all patch files to Git's index
        cmd = ['git', 'add', '--all', 'debian/patches']
        subprocess.check_call(cmd)

        # Bail early if gbp pq did nothing.
        if not self.read_git_debian_patches_status():
            print('No new patches, quitting.')
            raise SystemExit(1)

        # Replace $COMMIT sha1 in d/rules
        old_sha1 = read_commit()
        if old_sha1:
            rules = read_rules_file()
            with open('debian/rules', 'w') as fileh:
                fileh.write(rules.replace(old_sha1, patch_queue_sha1))

        # Get the new patch series
        new_series = self.read_series_file('debian/patches/series')
        # Select only the ones that are new (according to commit subjects)
        new_series = [p for p in new_series if p.subject not in old_subjects]

        if not new_series:
            # Maybe we rewrote some patch files in place?
            # Check Git itself for changed files:
            new_series = self.read_git_debian_patches()

        # Add patch entries to d/changelog
        changelog = self.generate_changelog(new_series)
        try:
            ensure_bzs(changelog)
        except BzNotFound:
            if not self.parser.has('--nobz'):
                raise
        util.bump_changelog(changelog)

        # Assemble a standard commit message string "clog".
        clog = "debian: %s\n" % util.get_deb_version()
        clog += "\n"
        clog += "Add patches from %s\n" % patch_queue_branch
        clog += "\n"
        clog += util.format_changelog(changelog)

        # Commit everything with the standard commit message.
        with tempfile.NamedTemporaryFile(mode='w+') as temp:
            temp.write(clog)
            temp.flush()
            cmd = [
                'git', 'commit', 'debian/changelog', 'debian/patches',
                'debian/rules', '-F', temp.name
            ]
            subprocess.check_call(cmd)

        # Summarize this commit on STDOUT for the developer.
        # (This matches the behavior of "rdopkg patch".)
        cmd = ['git', '--no-pager', 'log', '--name-status', 'HEAD~..HEAD']
        subprocess.check_call(cmd)

    def generate_changelog(self, series):
        """
        Generate a list of changelog entries for this gbp Patch series.

        :return: a list of strings
        """
        changelog = []
        for p in series:
            # If there was some in-place Git modification for this patch,
            # (.git_action attribute), include that in our log.
            action = getattr(p, 'git_action', 'A')
            # Make common actions human-readable:
            if action == 'M':
                action = 'Modified'
            if action == 'D':
                action = 'Deleted'
            if action == 'R':
                # We don't log .patch file renames
                continue
            change = '%s %s' % (action, p.path)
            if action == 'A':
                # This was a simple patch addition, so just log the patch's
                # subject.
                change = p.subject
            bzs = self.get_rhbzs(p)
            bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs))
            if bzstr != '':
                change += ' (%s)' % bzstr
            changelog.append(change)
        return changelog

    def get_rhbzs(self, patch):
        """
        Return all RHBZ numbers from a Patch's subject and body.
        :param patch: gbp.patch_series.Patch``
        """
        bzs = re.findall(BZ_REGEX, patch.subject)
        body = patch.long_desc
        try:
            if patch.git_action == 'D':
                # patch.long_desc will be empty.
                # Read the deleted file's description from Git instead.
                body = self.read_deleted_patch_description(patch.path)
        except AttributeError:
            # This was a simple patch addition, so we'll just search this
            # patch's .long_desc.
            pass
        bzs.extend(re.findall(BZ_REGEX, body))
        return bzs

    def read_series_file(self, file_):
        return gbp.patch_series.PatchSeries.read_series_file(file_)

    def read_git_debian_patches_status(self):
        """
        Return a list of all edited Debian patch files (from "git status").

        :return: a list of actions/filesname pairs. For example:
                 [
                   ['M', 'debian/patches/0001-foo.patch'],
                   ['D', 'debian/patches/0002-bar.patch'],
                 ]
        """
        cmd = ['git', 'status', '-s', 'debian/patches/']
        output = subprocess.check_output(cmd)
        if six.PY3:
            output = output.decode('utf-8')
        result = []
        for line in output.splitlines():
            if line.endswith('.patch'):
                result.append(line.split(None, 1))
        return result

    def read_git_debian_patches(self):
        """
        Load all edited Debian patches (from "git status") into Patch objects.

        The returned Patch objects have an extra ".git_action" attribute. Use
        this to determine what happened to the patch in Git.

        :return: a list of gbp.patch_series.Patch objects
        """
        patches = []
        for (action, filename) in self.read_git_debian_patches_status():
            patch = gbp.patch_series.Patch(filename)
            # Hack: record what happened to this patch file:
            patch.git_action = action
            patches.append(patch)
        return patches

    def read_deleted_patch_description(self, filename):
        """
        Parse a deleted .patch file with gbp.patch_series.Patch.

        For deleted .patch files, most of the gbp.patch_series.Patch
        attributes from read_git_debian_patches() are empty, because the file
        no longer exists. More hackery to recover the original .long_desc so
        we can recover the original RHBZ number.

        :returns: ``str``, the long_desc attribute.
        """
        with tempfile.NamedTemporaryFile(mode='w+') as temp:
            cmd = ['git', 'show', 'HEAD:%s' % filename]
            subprocess.call(cmd, stdout=temp)
            temp.flush()
            temppatch = gbp.patch_series.Patch(temp.name)
            temppatch._read_info()  # XXX internal API here :(
            return temppatch.long_desc
コード例 #40
0
ファイル: binaries.py プロジェクト: rvsharma7339/chacractl
class Binary(object):
    _help = dedent("""
    Operate binaries on a remote chacra instance.

    Creating a new binary::

        chacractl binary create project/ref/distro/distro_version/arch /path/to/binary

    Options:

    create        Creates a new binary at a given distro version architecture
    delete        Deletes an existing binary from chacra
    --force       If the resource exists, force the upload
    """)
    help_menu = "create, update metadata, or delete binaries"
    options = ['create', '--force', 'delete']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(chacractl.config['url'], 'binaries')

    def sanitize_filename(self, line):
        """
        lines may come with newlines and leading slashes make sure
        they are clean so that they can be processed
        """
        line = line.strip('\n')
        if os.path.isfile(line):
            return os.path.abspath(line)

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    def get_checksum(self, filepath):
        chsum = sha512()
        with open(filepath, 'rb') as binary:
            for chunk in iter(lambda: binary.read(4096), b''):
                chsum.update(chunk)
        return chsum.hexdigest()

    @retry()
    def upload_is_verified(self, arch_url, filename, digest):
        r = requests.get(arch_url, verify=chacractl.config['ssl_verify'])
        r.raise_for_status()
        arch_data = r.json()
        remote_digest = arch_data[filename]['checksum']
        verified = remote_digest == digest
        if not verified:
            logging.error(
                'Checksum mismatch: server has wrong checksum for %s',
                filename)
            logging.error('local checksum: %s', digest)
            logging.error('remote checksum: %s', remote_digest)
        return verified

    @retry()
    def post(self, url, filepath):
        filename = os.path.basename(filepath)
        file_url = os.path.join(url, filename) + '/'
        exists = requests.head(file_url, verify=chacractl.config['ssl_verify'])
        digest = self.get_checksum(filepath)

        if exists.status_code == 200:
            if not self.force:
                logger.warning(
                    'resource exists and --force was not used, will not upload'
                )
                logger.warning('SKIP %s', file_url)
                return
            return self.put(file_url, filepath)
        elif exists.status_code == 404:
            length = os.path.getsize(filepath)
            logger.info('POSTing file: %s', filepath)
            mpart = MultipartEncoder(fields={
                'file': (filename, open(filepath, 'rb'), 'text/plain')
            })

            response = requests.post(url,
                                     data=mpart,
                                     headers={
                                         'Content-Type': mpart.content_type,
                                         'Content-Length': '%d' % length,
                                     },
                                     auth=chacractl.config['credentials'],
                                     verify=chacractl.config['ssl_verify'])
            if response.status_code > 201:
                logger.warning("%s -> %s", response.status_code, response.text)
                response.raise_for_status()
        if not self.upload_is_verified(url, filename, digest):
            # Since this is a new file, attempt to delete it
            logging.error('Deleting corrupted file from server...')
            self.delete(file_url)
            raise SystemExit(
                'Checksum mismatch: remote server has wrong checksum for %s' %
                filepath)

    @retry()
    def put(self, url, filepath):
        filename = os.path.basename(filepath)
        logger.info('resource exists and --force was used, will re-upload')
        logger.info('PUTing file: %s', filepath)
        digest = self.get_checksum(filepath)
        length = os.path.getsize(filepath)
        mpart = MultipartEncoder(
            fields={'file': (filename, open(filepath, 'rb'), 'text/plain')})
        response = requests.put(url,
                                data=mpart,
                                headers={
                                    'Content-Type': mpart.content_type,
                                    'Content-Length': '%d' % length,
                                },
                                auth=chacractl.config['credentials'],
                                verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
        # trim off binary filename
        url = url.rsplit('/', 2)[0] + "/"
        if not self.upload_is_verified(url, filename, digest):
            # Maybe the old file with a different digest is still there, so
            # don't delete it
            raise SystemExit(
                'Checksum mismatch: server has wrong checksum for %s!' %
                filepath)

    @retry()
    def delete(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('resource already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE file: %s', url)
        response = requests.delete(url,
                                   auth=chacractl.config['credentials'],
                                   verify=chacractl.config['ssl_verify'])
        if response.status_code < 200 or response.status_code > 299:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning('provided path does not exist: %s',
                                   self.argv[-1])
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            if self.parser.get('delete') is None:
                raise SystemExit('Specify a URL to delete a binary.')
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
コード例 #41
0
ファイル: binaries.py プロジェクト: ahills/chacractl
class Binary(object):
    _help = dedent("""
    Operate binaries on a remote chacra instance.

    Creating a new binary::

        chacractl binary create project/ref/distro/distro_version/arch /path/to/binary

    Options:

    create        Creates a new binary at a given distro version architecture
    delete        Deletes an existing binary from chacra
    --force       If the resource exists, force the upload
    """)
    help_menu = "create, update metadata, or delete binaries"
    options = ['create', '--force', 'delete']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(
            chacractl.config['url'], 'binaries'
        )

    def sanitize_filename(self, line):
        """
        lines may come with newlines and leading slashes make sure
        they are clean so that they can be processed
        """
        line = line.strip('\n')
        if os.path.isfile(line):
            return os.path.abspath(line)

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    def post(self, url, filepath):
        filename = os.path.basename(filepath)
        file_url = os.path.join(url, filename) + '/'
        exists = requests.head(file_url, verify=chacractl.config['ssl_verify'])

        if exists.status_code == 200:
            if not self.force:
                logger.warning(
                    'resource exists and --force was not used, will not upload'
                )
                logger.warning('SKIP %s', file_url)
                return
            return self.put(file_url, filepath)
        elif exists.status_code == 404:
            logger.info('POSTing file: %s', filepath)
            with open(filepath, 'rb') as binary:
                response = requests.post(
                    url,
                    files={'file': binary},
                    auth=chacractl.config['credentials'],
                    verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
            response.raise_for_status()

    def put(self, url, filepath):
        logger.info('resource exists and --force was used, will re-upload')
        logger.info('PUTing file: %s', filepath)
        with open(filepath, 'rb') as binary:
            response = requests.put(
                url,
                files={'file': binary},
                auth=chacractl.config['credentials'],
                verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def delete(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('resource already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE file: %s', url)
        response = requests.delete(
            url,
            auth=chacractl.config['credentials'],
            verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()
        self.force = self.parser.has('--force')

        # handle posting binaries:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    filename = self.sanitize_filename(line)
                    if not filename:
                        continue
                    url = os.path.join(self.base_url, url_part)
                    self.post(url, filename)
            else:
                filepath = self.sanitize_filename(self.argv[-1])
                if not filepath:
                    logger.warning(
                        'provided path does not exist: %s', self.argv[-1]
                    )
                    return
                url = os.path.join(self.base_url, url_part)
                self.post(url, filepath)

        elif self.parser.has('delete'):
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
コード例 #42
0
ファイル: clone.py プロジェクト: red-hat-storage/rhcephpkg
class Clone(object):
    help_menu = 'clone a package from dist-git'
    _help = """
Clone a package from dist-git. Your SSH key must be set up in Gerrit.

Positional Arguments:

[package]  The name of the package to clone.

Python packages are named slightly differently between RHEL and Debian.
If you pass a package name "python-foo" to this command, rhcephpkg will strip
off the "python-" prefix and operate on a Debian package name "foo".
"""
    name = 'clone'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            pkg = self.parser.unknown_commands[0]
        except IndexError:
            return self.parser.print_help()
        self._run(pkg)

    def help(self):
        return self._help

    def _run(self, pkg):
        """ Clone a package from dist-git. """
        if os.path.exists(pkg):
            err = '%s already exists in current working directory.' % pkg
            raise SystemExit(err)
        configp = util.config()
        try:
            user = configp.get('rhcephpkg', 'user')
            gitbaseurl = configp.get('rhcephpkg', 'gitbaseurl')
        except configparser.Error as err:
            raise SystemExit('Problem parsing .rhcephpkg.conf: %s',
                             err.message)
        # If we were given an RPM pkg name, switch to the Debian one:
        if pkg.startswith('python-'):
            pkg = pkg[7:]
        # TODO: SafeConfigParser might make the "user" interpolation here
        # unnecessary? Need to test, particularly what it does to %(module).
        pkg_url = gitbaseurl % {'user': user, 'module': pkg}
        cmd = ['git', 'clone', pkg_url]
        subprocess.check_call(cmd)

        os.chdir(pkg)

        patches_url = find_patches_url(configp, user, pkg)
        if patches_url:
            cmd = ['git', 'remote', 'add', '-f', 'patches', patches_url]
            subprocess.check_call(cmd)

        util.setup_pristine_tar_branch()
コード例 #43
0
class NewVersion(object):
    help_menu = 'Import a new version with git-buildpackage and uscan'
    _help = """
Import a new upstream version with "gbp import-orig --uscan".

This command makes it easier to rebase a package to a new upstream version.

Note: the package must use pristine-tar.

Optional Arguments:

[tarball]  The upstream tarball to import. Omit this to use uscan
           (debian/watch file) instead.

 Example:

  rhcephpkg new-version ansible_2.4.1.0.orig.tar.gz

 This will import the upstream ansible 2.4.1.0 tarball.

-B, --bug  The BZ(s) that this new version resolves.

 Example:

  rhcephpkg new-version -B "rhbz#12345 rhbz#67980"

 This will add rhbz#12345 and rhbz#67890 to the debian/changelog.
"""
    name = 'new-version'

    def __init__(self, argv):
        self.argv = argv
        self.options = [['-B', '--bug']]

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            tarball = self.parser.unknown_commands[0]
        except IndexError:
            tarball = None
        bugstr = self.parser.get('--bug')
        self._run(tarball, bugstr)

    def help(self):
        return self._help

    def _run(self, tarball, bugstr):
        # Ensure we're on the right branch.
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        if current_branch != debian_branch:
            log.error('current branch is "%s"' % current_branch)
            log.error('debian branch is "%s"' % debian_branch)
            raise RuntimeError('Must run `new-version` on debian branch')

        util.setup_pristine_tar_branch()
        self.ensure_gbp_settings()

        self.setup_upstream_branch()
        self.import_orig(tarball)
        version = self.upstream_version()
        self.run_dch(version, bugstr)

        self.commit()
        self.show()

    def ensure_gbp_settings(self):
        """ Ensure some gbp settings are correct. """
        parser = GbpOptionParser('import-orig')
        if parser.config.get('pristine-tar') != 'True':
            err = '"pristine-tar" is %s. Set to "True" in debian/gbp.conf.'
            raise RuntimeError(err % parser.config.get('pristine-tar'))
        if parser.config.get('merge-mode') != 'replace':
            err = '"merge-mode" is %s. Set to "replace" in debian/gbp.conf.'
            raise RuntimeError(err % parser.config.get('merge-mode'))
        # ensure upstream branch is unique for this debian branch
        debian_branch = parser.config.get('debian-branch')
        upstream_branch = parser.config.get('upstream-branch')
        expected = 'upstream/%s' % debian_branch
        if upstream_branch != expected:
            err = '"upstream-branch" is "%s". Set to "%s" in debian/gbp.conf.'
            raise RuntimeError(err % (upstream_branch, expected))

    def setup_upstream_branch(self):
        """ Ensure we have a local "upstream/foo" branch. """
        parser = GbpOptionParser('import-orig')
        upstream_branch = parser.config.get('upstream-branch')
        util.ensure_local_branch(upstream_branch)

    def import_orig(self, tarball=None):
        """ Import new upstream tarball, optionally with uscan. """
        cmd = ['gbp', 'import-orig', '--no-interactive']
        if tarball is None:
            cmd.append('--uscan')
        else:
            cmd.append(tarball)
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def upstream_version(self):
        """
        Find the upstream version we just imported.

        git-buildpackage import-orig will generate this "upstream" tag
        automatically, and we can use it to discover the version of the
        current branch. It uses git-describe, like so:

          git describe --match 'upstream/*' --abbrev=0

        (Note: this method is similar to gbp.deb.git.DebianGitRepository
        debian_version_from_upstream(), but that appends the debian
        release number "-1", and we don't want that here.)
        """
        repo = DebianGitRepository('.')
        tag = repo.find_branch_tag('HEAD', 'HEAD', pattern='upstream/*')
        # should we get tagformat from GbpOptionParser instead of hardcoding?
        tagformat = "upstream/%(version)s"
        return repo.tag_to_version(tag, tagformat)

    def run_dch(self, version, bugstr):
        """ Edit debian/changelog for a new upstream release """
        version_release = version + '-2redhat1'
        text = 'Imported Upstream version %s' % version
        if bugstr:
            text = '%s (%s)' % (text, bugstr)
        dist = changelog.distribution()  # reuse previous distribution
        cmd = ['dch', '-D', dist, '-v', version_release, text]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def commit(self):
        """
        Commit to Git, basing the message on our debian/changelog.
        """
        message = changelog.git_commit_message()
        with tempfile.NamedTemporaryFile(mode='w+') as temp:
            temp.write(message)
            temp.flush()
            cmd = ['git', 'commit', 'debian/changelog', '-F', temp.name]
            subprocess.check_call(cmd)

    def show(self):
        """
        Show our last Git commit.
        """
        subprocess.check_call(['git', 'show'])
コード例 #44
0
class Localbuild(object):
    help_menu = 'build a package on the local system'
    _help = """
Build a package on the local system, using pbuilder.

Options:
--dist    "xenial" or "trusty". If unspecified, rhcephpkg will choose one
          based on the current branch's name.

  Rules for automatic distro selection:

    1) If the branch suffix is an ubuntu distro name, use that.
       eg "ceph-3.0-xenial".
    2) If a branch has a version number starting with "1.3", return "trusty".
       eg. "ceph-1.3-ubuntu"
    3) If a branch has a version number starting with "2" return "xenial".
       eg. "ceph-2-ubuntu"
    4) If a branch has a version number starting with "3" return "xenial".
       eg. "ceph-3.0-ubuntu"
    5) Otherwise raise, because we need to add more rules.
"""
    name = 'localbuild'

    def __init__(self, argv):
        self.argv = argv
        self.options = ('--dist', )

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()

        # Allow user to override the distro.
        if self.parser.has('--dist'):
            if self.parser.get('--dist') is None:
                raise SystemExit('Specify a distro to --dist')
            distro = self.parser.get('--dist')
        else:
            distro = get_distro()

        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()

        self._run(distro)

    def help(self):
        return self._help

    def _run(self, distro):
        """ Build a package on the local system, using pbuilder. """
        pkg_name = util.package_name()

        os.environ['BUILDER'] = 'pbuilder'
        j_arg = self._get_j_arg(cpu_count())
        pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro

        setup_pbuilder_cache(pbuilder_cache, distro)

        util.setup_pristine_tar_branch()

        # TODO: we should also probably check parent dir for leftovers and warn
        # the user to delete them (or delete them ourselves?)
        cmd = [
            'gbp', 'buildpackage',
            '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose',
            '--git-pbuilder', j_arg, '-us', '-uc'
        ]

        log.info('building %s with pbuilder', pkg_name)
        subprocess.check_call(cmd)

    def _get_j_arg(self, cpus, total_ram_gb=None):
        """
        Returns a string like "-j4" or "-j8". j is the number of processors,
        with a maximum of x, where x = TOTAL_RAM_GB / 4.

        We want to use all our processors (a high "j" value), but the build
        process will fail with an "out of memory" error out if this j value is
        too high.

        An 8 GB system would have a maximum of -j2
        A 16 GB system would have a maximum of -j4
        A 32 GB system would have a maximum of -j8
        """
        if total_ram_gb is None:
            page_size = os.sysconf('SC_PAGE_SIZE')
            mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES')
            # mem_gib is a decimal, eg. 7.707 on 8GB system
            mem_gib = mem_bytes / (1024.**3)
            # Round up to the nearest GB for our purposes.
            total_ram_gb = math.ceil(mem_gib)
        number = min(cpus, total_ram_gb / 4)
        return '-j%d' % max(number, 1)
コード例 #45
0
ファイル: patch.py プロジェクト: red-hat-storage/rhcephpkg
class Patch(object):
    help_menu = 'apply patches from patch-queue branch'
    _help = """
Generate patches from a patch-queue branch.

"""
    name = 'patch'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        """ Generate quilt patch series with gbp pq, and update d/rules """

        # Determine the names of the patch-queue branch and debian branch
        current_branch = util.current_branch()
        patches_branch = util.current_patches_branch()
        debian_branch = util.current_debian_branch()

        # TODO: default to fetching from upstream, the way rdopkg patch does.

        # Get the new sha1 to insert into the $COMMIT variable in d/rules
        cmd = ['git', 'rev-parse', patches_branch]
        patches_sha1 = subprocess.check_output(cmd).rstrip()

        # Switch to "debian" branch if necessary
        if current_branch != debian_branch:
            cmd = ['git', 'checkout', debian_branch]
            subprocess.check_call(cmd)

        # Get the original (old) patch series
        old_series = self.read_series_file('debian/patches/series')
        old_subjects = map(lambda x: x.subject, old_series)

        # Git-buildpackage pq operation
        cmd = ['gbp', 'pq', 'export']
        subprocess.check_call(cmd)

        # Add all patch files to Git's index
        cmd = ['git', 'add', '--all', 'debian/patches']
        subprocess.check_call(cmd)

        # Replace $COMMIT sha1 in d/rules
        with open('debian/rules') as rules:
            rules_file = rules.read()
        old = r'export COMMIT=[0-9a-f]{40}'
        new = 'export COMMIT=%s' % patches_sha1
        with open('debian/rules', 'w') as fileh:
            fileh.write(re.sub(old, new, rules_file))

        # Get the new patch series
        new_series = self.read_series_file('debian/patches/series')

        # Add patch entries to d/changelog
        changelog = []
        for p in new_series:
            if p.subject in old_subjects:
                continue
            change = p.subject
            bzs = self.get_rhbzs(p)
            bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs))
            if bzstr != '':
                change += ' (%s)' % bzstr
            changelog.append(change)
        util.bump_changelog(changelog)

        # Assemble a standard commit message string "clog".
        clog = "debian: %s\n" % util.get_deb_version()
        clog += "\n"
        clog += "Add patches from %s\n" % patches_branch
        clog += "\n"
        clog += util.format_changelog(changelog)

        # Commit everything with the standard commit message.
        with tempfile.NamedTemporaryFile() as temp:
            temp.write(clog)
            temp.flush()
            cmd = [
                'git', 'commit', 'debian/changelog', 'debian/patches',
                'debian/rules', '-F', temp.name
            ]
            subprocess.check_call(cmd)

        # Summarize this commit on STDOUT for the developer.
        # (This matches the behavior of "rdopkg patch".)
        cmd = ['git', '--no-pager', 'log', '--name-status', 'HEAD~..HEAD']
        subprocess.check_call(cmd)

    def get_rhbzs(self, patch):
        bzs = re.findall(BZ_REGEX, patch.subject)
        bzs.extend(re.findall(BZ_REGEX, patch.long_desc))
        return bzs

    def read_series_file(self, file_):
        try:
            from gbp.patch_series import PatchSeries
            return PatchSeries.read_series_file(file_)
        except ImportError:
            log.warning('Please run "sudo apt-get install '
                        'git-buildpackage" to write the patches to '
                        './debian/changelog')
コード例 #46
0
class CheckoutFromPatches(object):
    help_menu = 'Choose a Debian branch based on a RHEL -patches branch'
    _help = """
Check out the Git branch that corresponds to a given RHEL (rdopkg-style)
-patches branch.

If you are starting from a RHEL -patches branch name (say, from a trigger in
Jenkins), this will automatically choose the right Debian branch that goes
with your -patches branch.

Example:

  rhcephpkg checkout-from-patches ceph-3.0-rhel-patches

... this will checkout the "ceph-3.0-xenial" Git branch if it exists in the
"origin" remote, or fall back to the "ceph-3.0-ubuntu" branch, or error if
neither exist.

Positional Arguments:

[branch]  The name of the -patches branch.
"""
    name = 'checkout-from-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            patches_branch = self.parser.unknown_commands[0]
        except IndexError:
            return self.parser.print_help()
        self._run(patches_branch)

    def help(self):
        return self._help

    def _run(self, patches_branch):
        debian_branch = self.get_debian_branch(patches_branch)
        if not debian_branch:
            err = 'could not find debian branch for %s' % patches_branch
            raise SystemExit(err)

        cmd = ['git', 'checkout', debian_branch]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_debian_branch(self, patches_branch):
        """
        Get the debian branch corresponding to this RHEL -patches branch.

        Examples:
        ceph-2-rhel-patches -> ceph-2-xenial or ceph-2-ubuntu
        ceph-2-rhel-patches-hotfix-bz123 -> ceph-2-ubuntu-hotfix-bz123

        :returns: name of debian branch, or None if none was found.
        """
        patches_re = re.compile('-rhel-patches')
        debian_re = patches_re.sub('-([a-z]+)', patches_branch)
        ubuntu_branch = None
        for branch in self.get_origin_branches():
            m = re.match('^%s$' % debian_re, branch)
            if m:
                if m.group(1) == 'ubuntu':
                    # Use this only if we could find no other distro branch.
                    ubuntu_branch = branch
                else:
                    return branch
        return ubuntu_branch

    def get_origin_branches(self):
        """ Return a list of all the branches in the "origin" remote. """
        cmd = ['git', 'branch', '-r', '--list', 'origin/*']
        output = subprocess.check_output(cmd)
        if six.PY3:
            output = output.decode('utf-8')
        lines = output.split("\n")
        branches = [line.strip()[7:] for line in lines]
        return branches
コード例 #47
0
ファイル: gitbz.py プロジェクト: ktdreyer/rhcephpkg
class Gitbz(object):
    help_menu = 'verify each RHBZ in the last Git commit message'
    _help = """
Verify that each RHBZ in the last Git commit message is approved for this
release.

If the commit message lacks any RHBZ number, or any RHBZs do not correspond to
this release (dist-git branch), then this command exits with a non-zero exit
code.

Requires a cached login to bugzilla (`bugzilla login` command).

This tool mimics the validation that the internal "gitbz" tool provides for
RHEL dist-git.
"""
    name = 'gitbz'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        debian_branch = util.current_debian_branch()
        try:
            flag = release_flag(debian_branch)
        except ValueError:
            raise SystemExit('could not parse debian branch "%s".'
                             % debian_branch)

        msg = last_commit_message()
        bzids = find_bzs(msg)

        if not bzids:
            raise SystemExit('no BZs found')

        bzapi = get_bzapi()
        bugs = bzapi.getbugs(bzids,
                             include_fields=['id', 'flags'],
                             permissive=False)
        missing = []
        for bug in bugs:
            has_release_flag = False
            for f in bug.flags:
                if f['name'] == flag:
                    print('rhbz#%s: %s%s' % (bug.id, f['name'], f['status']))
                    has_release_flag = True
            if not has_release_flag:
                missing.append(bug.id)

        if missing:
            print('Missing %s release flag:' % flag)
            for m in missing:
                print('rhbz#%s' % m)
            raise SystemExit(1)
コード例 #48
0
class WatchBuild(object):
    help_menu = 'watch a build-package job in Jenkins'
    _help = """
Watch a particular build-package job in Jenkins.

Positional Arguments:

[id]  The build-package job ID to watch

For example: "rhcephpkg watch-build 328"
"""
    name = 'watch-build'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            build_number = int(self.parser.unknown_commands[0])
        except (IndexError, ValueError):
            return self.parser.print_help()
        self.watch(build_number)

    def help(self):
        return self._help

    def watch(self, build_number):
        jenkins = util.jenkins_connection()

        build_info = jenkins.get_build_info('build-package', build_number)

        job_url = posixpath.join(jenkins.url, 'job', 'build-package',
                                 str(build_number))
        log.info('Watching %s' % job_url)

        pkg_name = self.pkg_name(build_info)

        start_seconds = build_info['timestamp'] / 1000.0
        # rcm-jenkins is uses the America/New_York timezone:
        jenkins_tz = tz.gettz('America/New_York')
        start = datetime.fromtimestamp(start_seconds, jenkins_tz)
        # If you want to convert to local time:
        # start = start.astimezone(tz.tzlocal())
        log.info('Started %s' % start.strftime("%F %r %z"))

        was_building = build_info['building']
        while build_info['building']:
            try:
                elapsed = datetime.now(jenkins_tz) - start
                # TODO: Xenial has python-humanize (humanize.naturaldelta()
                # here)
                (minutes, seconds) = divmod(elapsed.total_seconds(), 60)
                # Clear the previous line:
                msg = '\r%s building for %02d:%02d' % \
                    (pkg_name, minutes, seconds)
                sys.stdout.write(msg)
                sys.stdout.flush()
                sleep(10)
                build_info = jenkins.get_build_info('build-package',
                                                    build_number)
            except requests.exceptions.ConnectionError as e:
                print('')
                log.error('connection error: %s' % e)
                log.info('Re-try watching with `rhcephpkg watch-build %s`' %
                         build_number)
            except KeyboardInterrupt:
                print('')
                log.info('continue watching with `rhcephpkg watch-build %s`' %
                         build_number)
                raise SystemExit(1)
        if was_building:
            # The above "while" loop will not print a final newline.
            print('')

        end_millis = build_info['timestamp'] + build_info['duration']
        end_seconds = end_millis / 1000.0
        end = datetime.fromtimestamp(end_seconds, jenkins_tz)
        log.info('Ended %s' % end.strftime("%F %r %z"))

        # Show the final build result.
        if build_info['result'] == 'SUCCESS':
            log.info('result is SUCCESS')
        else:
            log.error(build_info['result'])
            raise SystemExit(1)

    def pkg_name(self, build_info):
        """ Return a package name based on this build's information.

        :param build_info: ``dict`` from python-jenkins' get_build_info()
        :returns: ``str``, for example "ceph" or "ceph-ansible".
        """
        pkg_name = None
        for action in build_info['actions']:
            if action.get('_class', '') == 'hudson.model.ParametersAction':
                for parameter in action['parameters']:
                    if parameter['name'] == 'PKG_NAME':
                        pkg_name = parameter['value']
        if pkg_name is None:
            # Maybe the Jenkins job was badly mis-configured or something.
            # This will probably never happen, but let's raise defensively.
            raise RuntimeError('could not find pkg name in %s' % build_info)
        return pkg_name
コード例 #49
0
ファイル: main.py プロジェクト: gabriellasroman/ceph-medic
    def main(self, argv):
        options = [
            '--cluster',
            '--ssh-config',
            '--inventory',
            '--config',
            '--verbosity',
        ]
        parser = Transport(argv,
                           options=options,
                           check_help=False,
                           check_version=False)
        parser.parse_args()

        self.config_path = parser.get('--config', configuration.location())

        # load medic configuration
        loaded_config = configuration.load(
            path=parser.get('--config', self.config_path))

        # this is the earliest we can have enough config to setup logging
        log.setup(loaded_config)
        ceph_medic.config.file = loaded_config
        global_options = dict(ceph_medic.config.file._sections['global'])

        # SSH config
        ceph_medic.config.ssh_config = parser.get(
            '--ssh-config', global_options.get('--ssh-config'))
        if ceph_medic.config.ssh_config:
            ssh_config_path = ceph_medic.config.ssh_config
            if not os.path.exists(ssh_config_path):
                terminal.error("the given ssh config path does not exist: %s" %
                               ssh_config_path)
                sys.exit()

        ceph_medic.config.cluster_name = parser.get('--cluster', 'ceph')
        ceph_medic.metadata['cluster_name'] = 'ceph'

        # Deployment Type
        deployment_type = ceph_medic.config.file.get_safe(
            'global', 'deployment_type', 'baremetal')
        if deployment_type in ['kubernetes', 'openshift', 'k8s', 'oc']:
            pod_hosts = hosts.container_platform(deployment_type)
            ceph_medic.config.nodes = pod_hosts
            ceph_medic.config.hosts_file = ':memory:'
            self.hosts_file = ':memory:'
        else:
            # Hosts file
            self.hosts_file = parser.get('--inventory',
                                         configuration.get_host_file())

            # find the hosts files, by the CLI first, fallback to the configuration
            # file, and lastly if none of those are found or defined, try to load
            # from well known locations (cwd, and /etc/ansible/)
            loaded_hosts = configuration.load_hosts(
                parser.get('--inventory',
                           global_options.get('--inventory', self.hosts_file)))
            ceph_medic.config.nodes = loaded_hosts.nodes
            ceph_medic.config.hosts_file = loaded_hosts.filename
            self.hosts_file = loaded_hosts.filename

        parser.catch_version = ceph_medic.__version__
        parser.mapper = self.mapper
        parser.catch_help = self.help(parser.subhelp())
        if len(argv) <= 1:
            return parser.print_help()
        ceph_medic.config.config_path = self.config_path
        parser.dispatch()
        parser.catches_help()
        parser.catches_version()

        # Verbosity
        verbosity = parser.get('--verbosity', 'debug')
        ceph_medic.config.verbosity = verbosity.lowercase()
コード例 #50
0
ファイル: projects.py プロジェクト: ceph/chacractl
class Project(object):
    _help = dedent(
        """
    Handle projects on a remote chacra instance.

    Creating a new project::

        chacractl project create project

    Options:

    create        Creates a new project
    """
    )
    help_menu = "create projects"
    options = ["create"]

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(chacractl.config["url"], "binaries")

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip("/")

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith("/"):
            url = "%s/" % url
        return url

    def post(self, url):
        exists = requests.head(url, verify=chacractl.config["ssl_verify"])

        if exists.status_code == 200:
            logger.warning("resource exists, will not upload")
            logger.warning("SKIP %s", url)
            return
        elif exists.status_code == 404:
            logger.info("POSTing to project: %s", url)
            response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
            response.raise_for_status()

    def delete(self, url):
        # XXX This exists here but it is not yet implemented, e.g. nothing
        # calls this method
        exists = requests.head(url, verify=chacractl.config["ssl_verify"])
        if exists.status_code == 404:
            logger.warning("project already deleted")
            logger.warning("SKIP %s", url)
            return
        logger.info("DELETE project: %s", url)
        response = requests.delete(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()

        # handle posting projects:
        if self.parser.has("create"):
            url_part = self.sanitize_url(self.parser.get("create"))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info("reading input from stdin")
                for line in sys.stdin.readlines():
                    url = os.path.join(self.base_url, url_part)
                    self.post(url)
            else:
                url = os.path.join(self.base_url, url_part)
                self.post(url)
        # XXX this exists here but it not yet enabled from the CLI
        elif self.parser.has("delete"):
            url_part = self.sanitize_url(self.parser.get("delete"))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
コード例 #51
0
ファイル: projects.py プロジェクト: rvsharma7339/chacractl
class Project(object):
    _help = dedent("""
    Handle projects on a remote chacra instance.

    Creating a new project::

        chacractl project create project

    Options:

    create        Creates a new project
    """)
    help_menu = "create projects"
    options = ['create']

    def __init__(self, argv):
        self.argv = argv

    @property
    def base_url(self):
        return os.path.join(chacractl.config['url'], 'binaries')

    def sanitize_url(self, url_part):
        # get rid of the leading slash to prevent issues when joining
        url = url_part.lstrip('/')

        # and add a trailing slash so that the request is done at the correct
        # canonical url
        if not url.endswith('/'):
            url = "%s/" % url
        return url

    @retry()
    def post(self, url):
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])

        if exists.status_code == 200:
            logger.warning('resource exists, will not upload')
            logger.warning('SKIP %s', url)
            return
        elif exists.status_code == 404:
            logger.info('POSTing to project: %s', url)
            response = requests.post(url,
                                     auth=chacractl.config['credentials'],
                                     verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)
            response.raise_for_status()

    @retry()
    def delete(self, url):
        # XXX This exists here but it is not yet implemented, e.g. nothing
        # calls this method
        exists = requests.head(url, verify=chacractl.config['ssl_verify'])
        if exists.status_code == 404:
            logger.warning('project already deleted')
            logger.warning('SKIP %s', url)
            return
        logger.info('DELETE project: %s', url)
        response = requests.delete(url,
                                   auth=chacractl.config['credentials'],
                                   verify=chacractl.config['ssl_verify'])
        if response.status_code > 201:
            logger.warning("%s -> %s", response.status_code, response.text)

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self._help
        self.parser.parse_args()

        # handle posting projects:
        if self.parser.has('create'):
            url_part = self.sanitize_url(self.parser.get('create'))
            if not sys.stdin.isatty():
                # read from stdin
                logger.info('reading input from stdin')
                for line in sys.stdin.readlines():
                    url = os.path.join(self.base_url, url_part)
                    self.post(url)
            else:
                url = os.path.join(self.base_url, url_part)
                self.post(url)
        # XXX this exists here but it not yet enabled from the CLI
        elif self.parser.has('delete'):
            url_part = self.sanitize_url(self.parser.get('delete'))
            url = os.path.join(self.base_url, url_part)
            self.delete(url)
コード例 #52
0
class Gitbz(object):
    help_menu = 'verify each RHBZ in the last Git commit message'
    _help = """
Verify that each RHBZ in the last Git commit message is approved for this
release.

If the commit message lacks any RHBZ number, or any RHBZs do not correspond to
this release (dist-git branch), then this command exits with a non-zero exit
code.

Requires a cached login to bugzilla (`bugzilla login` command).

This tool mimics the validation that the internal "gitbz" tool provides for
RHEL dist-git.
"""
    name = 'gitbz'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        self._run()

    def help(self):
        return self._help

    def _run(self):
        debian_branch = util.current_debian_branch()
        try:
            flag = release_flag(debian_branch)
        except ValueError:
            raise SystemExit('could not parse debian branch "%s".' %
                             debian_branch)

        msg = last_commit_message()
        bzids = find_bzs(msg)

        if not bzids:
            raise SystemExit('no BZs found')

        bzapi = get_bzapi()
        bugs = bzapi.getbugs(bzids,
                             include_fields=['id', 'flags'],
                             permissive=False)
        missing = []
        for bug in bugs:
            has_release_flag = False
            for f in bug.flags:
                if f['name'] == flag:
                    print('rhbz#%s: %s%s' % (bug.id, f['name'], f['status']))
                    has_release_flag = True
            if not has_release_flag:
                missing.append(bug.id)

        if missing:
            print('Missing %s release flag:' % flag)
            for m in missing:
                print('rhbz#%s' % m)
            raise SystemExit(1)
コード例 #53
0
ファイル: merge_patches.py プロジェクト: ktdreyer/rhcephpkg
class MergePatches(object):
    help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch'
    _help = """
Fetch the latest patches branch that rdopkg uses, and then fast-forward merge
that into our local patch-queue branch, so that both branches align.

This command helps to align the patch series between our RHEL packages and our
Ubuntu packages.

Options:
--force    Do a hard reset, rather than restricting to fast-forward merges
           only. Use this option if the RHEL patches branch was amended or
           rebased for some reason.
"""
    name = 'merge-patches'

    def __init__(self, argv):
        self.argv = argv
        self.options = ['--force', '--hard-reset']

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        force = False
        if self.parser.has(['--force', '--hard-reset']):
            force = True
        if self.parser.unknown_commands:
            log.error('unknown option %s',
                      ' '.join(self.parser.unknown_commands))
            return self.parser.print_help()
        self._run(force)

    def help(self):
        return self._help

    def _run(self, force=False):
        # Determine the names of the relevant branches
        current_branch = util.current_branch()
        debian_branch = util.current_debian_branch()
        patches_branch = util.current_patches_branch()
        rhel_patches_branch = self.get_rhel_patches_branch(debian_branch)

        # Do the merge
        if current_branch == patches_branch:
            # HEAD is our patch-queue branch. Use "git pull" directly.
            # For example: "git pull --ff-only patches/ceph-2-rhel-patches"
            cmd = ['git', 'pull', '--ff-only',
                   'patches/' + rhel_patches_branch]
            if force:
                # Do a hard reset on HEAD instead.
                cmd = ['git', 'reset', '--hard',
                       'patches/' + rhel_patches_branch]
        else:
            # HEAD is our debian branch. Use "git fetch" to update the
            # patch-queue ref. For example:
            # "git fetch . \
            #  patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu"
            cmd = ['git', 'fetch', '.',
                   'patches/%s:%s' % (rhel_patches_branch, patches_branch)]
            if force:
                # Do a hard push (with "+") instead.
                cmd = ['git', 'push', '.', '+patches/%s:%s' %
                       (rhel_patches_branch, patches_branch)]
        log.info(' '.join(cmd))
        subprocess.check_call(cmd)

    def get_rhel_patches_branch(self, debian_branch):
        """
        Get the RHEL -patches branch corresponding to this debian branch.

        Examples:
        ceph-2-ubuntu -> ceph-2-rhel-patches
        ceph-2-trusty -> ceph-2-rhel-patches
        ceph-2-xenial -> ceph-2-rhel-patches
        ceph-1.3-ubuntu -> ceph-1.3-rhel-patches
        ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123
        """
        (product, version, distro) = debian_branch.split('-', 2)
        suffix = None
        if '-' in distro:
            (distro, suffix) = distro.split('-', 1)
        rhel = '%s-%s-rhel-patches' % (product, version)
        if suffix is not None:
            rhel = '%s-%s' % (rhel, suffix)
        return rhel
コード例 #54
0
ファイル: download.py プロジェクト: red-hat-storage/rhcephpkg
class Download(object):
    help_menu = 'download a build from chacra'
    _help = """
Download a build's entire artifacts from chacra.

Positional Arguments:

[build]  The name of the build to download, eg. "ceph_10.2.0-2redhat1trusty"
"""
    name = 'download'

    def __init__(self, argv):
        self.argv = argv
        self.options = []

    def main(self):
        self.parser = Transport(self.argv, options=self.options)
        self.parser.catch_help = self.help()
        self.parser.parse_args()
        try:
            build = self.parser.unknown_commands[0]
        except IndexError:
            return self.parser.print_help()
        self._run(build)

    def help(self):
        return self._help

    def _run(self, build):
        configp = util.config()
        try:
            base_url = configp.get('rhcephpkg.chacra', 'url')
        except configparser.Error as err:
            raise SystemExit('Problem parsing .rhcephpkg.conf: %s',
                             err.message)
        try:
            (pkg, version) = build.split('_')
        except ValueError:
            log.error('%s is not a valid package build N-V-R' % build)
            return self.parser.print_help()
        build_url = posixpath.join(base_url, 'binaries/', pkg, version,
                                   'ubuntu', 'all')
        log.info('searching %s for builds' % build_url)
        build_response = urlopen(Request(build_url))
        headers = build_response.headers
        if six.PY2:
            encoding = headers.getparam('charset') or 'utf-8'
            # if encoding is None:
            #    encoding = 'utf-8'
        else:
            encoding = headers.get_content_charset(failobj='utf-8')
        payload = json.loads(build_response.read().decode(encoding))
        for arch, binaries in six.iteritems(payload):
            for binary in binaries:
                if os.path.isfile(binary):
                    # TODO: check the sha256sum of the already-downloaded file
                    # here?
                    log.info('skipping %s' % binary)
                    continue
                log.info('downloading %s' % binary)
                binary_url = posixpath.join(build_url, arch, binary) + '/'
                response = urlopen(Request(binary_url))
                with open(binary, 'wb') as fp:
                    shutil.copyfileobj(response, fp)