class StashClient(object):
    def __init__(self, server, username, password):
        self.logger = LoggerFactory.getLogger("com.xebialabs.bitbucket-plugin")
        creds = CredentialsFallback(server, username,
                                    password).getCredentials()
        self.http_request = HttpRequest(server, creds['username'],
                                        creds['password'])

    @staticmethod
    def get_client(server, username, password):
        return StashClient(server, username, password)

    def parse_output(self, lines):
        result_output = ""
        for line in lines:
            result_output = '\n'.join([result_output, line])
        return result_output

    def api_call(self, method, endpoint, **options):

        try:
            options['method'] = method.upper()
            options['context'] = endpoint
            #self.logger.warn( options )
            response = self.http_request.doRequest(**options)
        except ClientProtocolException:
            raise Exception("URL is not valid")
        if not response.isSuccessful():
            raise Exception("HTTP response code %s (%s)" %
                            (response.getStatus(), response.errorDump()))
        return response

    def stash_createpullrequest(self, variables):
        endpoint = "/rest/api/1.0/projects/%s/repos/%s/pull-requests" % (
            variables['project'], variables['repository'])
        reviewers_str = '['
        for reviewer in variables['reviewers'].split(','):
            reviewers_str += '''{"user":{"name":"%s"}},''' % (reviewer.strip())
        reviewers_str = reviewers_str[:-1]
        reviewers_str += ']'
        content = '''{
            "title": "%s",
            "description": "%s",
            "fromRef": {
                "id": "refs/heads/%s"
            },
            "toRef": {
                "id": "refs/heads/%s"
            },
            "reviewers": %s
        }''' % (str(variables['title']), str(variables['description']),
                str(variables['source']), str(
                    variables['target']), str(reviewers_str))
        self.logger.warn("Submitting Pull Request %s using endpoint %s" %
                         (content, endpoint))
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn("Pull Request created with ID %s " % data['id'])
        return {'output': data, 'prid': data['id']}

    def stash_mergepullrequest(self, variables):
        endpoint_get = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']))
        self.logger.warn(
            "Getting Pull Request %s current version using endpoint %s" %
            (str(variables['prid']), endpoint_get))
        response = self.api_call('GET',
                                 endpoint_get,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        content = '{}'
        endpoint_post = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s/merge?version=%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']), data['version'])
        self.logger.warn("Merging Pull Request %s using endpoint %s" %
                         (str(variables['prid']), endpoint_post))
        response = self.api_call('POST',
                                 endpoint_post,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn("Pull Request %s merged sucessfully with STATE : %s" %
                         (data['id'], data['state']))
        return {'output': data}

    def stash_declinepullrequest(self, variables):
        endpoint_get = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']))
        self.logger.warn(
            "Getting Pull Request %s current version using endpoint %s" %
            (str(variables['prid']), endpoint_get))
        response = self.api_call('GET',
                                 endpoint_get,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = json.loads(response.getResponse())
        content = '{}'
        endpoint_post = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s/decline?version=%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']), data['version'])
        self.logger.warn("Declining Pull Request %s using endpoint %s" %
                         (str(variables['prid']), endpoint_post))
        response = self.api_call('POST',
                                 endpoint_post,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn(
            "Pull Request %s decline sucessfully with STATE : %s" %
            (data['id'], data['state']))
        return {'output': data}

    def stash_getpullrequest(self, variables):
        endpoint_get = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']))
        self.logger.warn(
            "Getting Pull Request %s current version using endpoint %s" %
            (str(variables['prid']), endpoint_get))
        response = self.api_call('GET',
                                 endpoint_get,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = response.getResponse()
        return {'output': data}

    def stash_searchfilecontent(self, variables):
        endpoint = "/rest/api/1.0/projects/%s/repos/%s/browse/%s?at=refs/heads/%s" % (
            variables['project'], variables['repository'],
            str(variables['filepath']), variables['branch'])
        self.logger.warn("Parsing file content for file :%s for branch %s" %
                         (str(variables['filepath']), variables['branch']))
        response = self.api_call('GET',
                                 endpoint,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        pattern = re.compile(variables['pattern'])
        for item in data['lines']:
            result = pattern.search(item['text'])
            if result != None and len(result.groups()) == 1:
                return {'group0': result.group(0), 'group1': result.group(1)}

    def stash_waitformerge(self, variables):
        endpoint = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s" % (
            variables['project'], variables['repository'],
            str(variables['prid']))
        self.logger.warn(
            "Waiting for Merge Pull Request %s using endpoint %s" %
            (str(variables['prid']), endpoint))
        isClear = False
        while (not isClear):
            response = self.api_call('GET',
                                     endpoint,
                                     contentType="application/json")
            data = json.loads(response.getResponse())
            if data['state'] == "MERGED":
                isClear = True
                self.logger.warn(
                    "Pull Request %s merged sucessfully with STATE : %s" %
                    (data['id'], data['state']))
            else:
                self.logger.warn(
                    "Pull Request %s : current STATE :[ %s ], retrying after %s seconds\n"
                    % (data['id'], data['state'], str(
                        variables['pollInterval'])))
                time.sleep(variables['pollInterval'])
        return {'output': data}

    def stash_tagrelease(self, variables):
        endpoint = "/rest/git/1.0/projects/%s/repos/%s/tags" % (
            variables['project'], variables['repository'])
        self.logger.warn("Tag project (%s/%s" %
                         (variables['project'], variables['repository']))
        content = '''{"force":"true", "message":"%s", "name":"%s", "startPoint":"refs/heads/%s", "type":"ANNOTATED"}''' % (
            variables['message'], variables['tagname'], variables['branch'])
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = response.getResponse()
        return {'output': data}

    # TODO -  apache cleint doesnt support body with DELETE method. add ability to xlrelease.HTTPRequest
    def stash_deletebranch_old(self, variables):
        endpoint = "/rest/branch-utils/1.0/projects/%s/repos/%s/branches" % (
            variables['project'], variables['repository'])
        content = '''{"name": "refs/heads/%s"}''' % (variables['branch'])
        self.logger.warn("Deleting %s using endpoint %s" % (content, endpoint))
        response = self.api_call('DELETE',
                                 endpoint,
                                 body=content,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        if response.getStatus() == "204 No Content":
            self.logger.warn("Successfully deleted branch %s " %
                             (variables['branch']))
            return {}
        else:
            raise Exception(" Not able to delete branch %s " %
                            (variables['branch']))

    def stash_deletebranch(self, variables):
        endpoint = "/rest/branch-utils/1.0/projects/%s/repos/%s/branches" % (
            variables['project'], variables['repository'])
        content = '''{"name": "refs/heads/%s"}''' % (variables['branch'])
        self.logger.warn("Deleting %s using endpoint %s" % (content, endpoint))
        url_split = variables['server']['url'].split("://")
        userAndPass = b64encode(b"%s:%s" %
                                (self.http_request.username,
                                 self.http_request.password)).decode("ascii")
        headers = {
            'Authorization': 'Basic %s' % userAndPass,
            'Content-Type': 'application/json'
        }
        if url_split[0].lower() == "http":
            conn = httplib.HTTPConnection(url_split[1])
        else:
            conn = httplib.HTTPSConnection(url_split[1])
        conn.request('DELETE', endpoint, content, headers=headers)
        response = conn.getresponse()
        if str(response.status) == "204":
            self.logger.warn("Successfully deleted branch %s " %
                             (variables['branch']))
            return {}
        else:
            raise Exception(
                " Not able to delete branch %s , Response Code : %s " %
                (variables['branch'], response.status))

    # Requires the stash archive plugin installed
    def stash_downloadcode(self, variables):
        downloadURL = "%s/rest/archive/latest/projects/%s/repos/%s/archive?at=refs/heads/%s&format=zip" % (
            variables['server']['url'], variables['project'],
            variables['repository'], variables['branch'])
        connection = LocalConnection.getLocalConnection()

        capturedOutput = ""

        self.logger.warn("Cleaning up download folder : %s" %
                         variables['downloadPath'])
        command = CmdLine()
        command.addArgument("mkdir")
        command.addArgument("-p")
        command.addArgument(variables['downloadPath'])
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput = self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        self.logger.warn(" Now downloading code in download folder : %s" %
                         variables['downloadPath'])
        command = CmdLine()
        script = '''
            cd %s
            ls | grep -v extract.sh | xargs rm -rf
            wget --user %s --password %s  -O code.zip '%s'
            unzip -o code.zip
            rm code.zip
        ''' % (variables['downloadPath'], self.http_request.username,
               self.http_request.password, downloadURL)
        script_file = connection.getFile(
            OverthereUtils.constructPath(
                connection.getFile(variables['downloadPath']), 'extract.sh'))
        OverthereUtils.write(String(script).getBytes(), script_file)
        script_file.setExecutable(True)
        command.addArgument(script_file.getPath())
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput += self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        command = CmdLine()
        command.addArgument("rm")
        command.addArgument("-f")
        command.addArgument(variables['downloadPath'] + "/extract.sh")
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput += self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        return {'output': capturedOutput}

    def stash_createbranch(self, variables):
        endpoint = "/rest/api/1.0/projects/%s/repos/%s/branches" % (
            variables['project'], variables['repository'])
        content = '''{
                        "name": "%s",
                        "startPoint": "%s",
                        "message": "%s"
        }''' % (str(variables['branchName']), str(
            variables['startPoint']), str(variables['message']))
        self.logger.warn(
            "Submitting Create Branch Request %s using endpoint %s" %
            (content, endpoint))
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn(
            "Sucessfully created branch :  %s from commit has %s" %
            (str(variables['branch_name']), str(variables['startPoint'])))
        return {'output': data}

    def stash_approvepullrequest(self, variables):
        endpoint_post = "/rest/api/1.0/projects/%s/repos/%s/pull-requests/%s/approve" % (
            variables['project'], variables['repository'],
            str(variables['prid']))
        content = '{}'
        self.logger.warn("Approving Pull Request %s using endpoint %s" %
                         (str(variables['prid']), endpoint_post))
        response = self.api_call('POST',
                                 endpoint_post,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn(
            "Pull Request %s approved sucessfully with STATE : %s" %
            (str(variables['prid']), data['status']))
        return {'output': data}

    def stash_commitsquery(self, variables):
        variables['slug'] = variables['repository']
        data = json.loads(self.stash_querycommits(variables))
        commits = data['parents']
        self.logger.warn("Build commitList\n %s" % commits)
        commitList = []
        for commit in commits:
            self.logger.warn("~%s~" % commit['message'])
            commitList.append(commit['message'])
        results = {"output": data, "commitList": commitList}
        return results

    # Requires the stash archive plugin installed
    def stash_querycommits(self, variables):
        endpoint_get = "/rest/api/1.0/projects/%s/repos/%s/commits" % (
            variables['project'], variables['slug'])
        if variables['branch'] is not None:
            endpoint_get = "%s/%s" % (endpoint_get, variables['branch'])
        endpoint_get = "%s/?limit=%s" % (endpoint_get,
                                         variables['results_limit'])
        if (variables['tag'] is not None):
            endpoint_get = "%s&at=refs/tags/%s" % (endpoint_get,
                                                   variables['tag'])
        self.logger.warn("stash_querycommits->endpoint_get = %s " %
                         endpoint_get)
        response = self.api_call('GET',
                                 endpoint_get,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = response.getResponse()
        self.logger.warn("endpint = %s" % endpoint_get)
        self.logger.warn(
            "DATA2 = %s" %
            json.dumps(json.loads(data), indent=4, sort_keys=True))
        return data

    def stash_querymergerequests(self, variables):
        endpoint = "/rest/api/1.0/projects/%s/repos/%s/pull-requests?state=%s&limit=100" % (
            variables['project'], variables['slug'], variables['state'])
        self.logger.warn("URL = %s" % endpoint)
        response = self.api_call('GET',
                                 endpoint,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = response.getResponse()
        self.logger.warn(
            "merge_requests = %s" %
            json.dumps(json.loads(data), indent=4, sort_keys=True))
        return data
class BitbucketClient(object):
    def __init__(self, server, username, password):
        self.logger = LoggerFactory.getLogger("com.xebialabs.bitbucket-plugin")
        creds = CredentialsFallback(server, username,
                                    password).getCredentials()
        self.http_request = HttpRequest(server, creds['username'],
                                        creds['password'])

    @staticmethod
    def get_client(server, username, password):
        return BitbucketClient(server, username, password)

    def parse_output(self, lines):
        result_output = ""
        for line in lines:
            result_output = '\n'.join([result_output, line])
        return result_output

    def api_call(self, method, endpoint, **options):
        try:
            options['method'] = method.upper()
            options['context'] = endpoint
            response = self.http_request.doRequest(**options)
        except ClientProtocolException:
            raise Exception("URL is not valid")
        if not response.isSuccessful():
            raise Exception("HTTP response code %s (%s)" %
                            (response.getStatus(), response.errorDump()))
        return response

    def bitbucket_createpullrequest(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests" % str(
            variables['repo_full_name'])
        content = '''{
            "title": "%s",
            "description": "%s",
            "source": {
                "branch": {
                    "name": "%s"
                }
            },
            "destination": {
                "branch": {
                    "name": "%s"
                }
            },
            "close_source_branch": %s
        }''' % (str(variables['title']), str(variables['description']),
                str(variables['source']), str(variables['target']),
                str(variables['closebranch']).lower())
        self.logger.warn("Submitting Pull Request %s using endpoint %s" %
                         (content, endpoint))
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn("Pull Request created with ID %s " % data['id'])
        return {'output': data, 'prid': data['id']}

    def bitbucket_mergepullrequest(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests/%s/merge" % (str(
            variables['repo_full_name']), str(variables['prid']))
        content = '''{
            "message": "%s",
            "close_source_branch": %s
        }''' % (str(variables['message']), str(
            variables['closebranch']).lower())
        self.logger.warn("Merging Pull Request %s using endpoint %s" %
                         (content, endpoint))
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        self.logger.warn("Pull Request %s merged sucessfully with STATE : %s" %
                         (data['id'], data['state']))
        return {'output': data}

    def bitbucket_waitformerge(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests/%s" % (str(
            variables['repo_full_name']), str(variables['prid']))
        self.logger.warn(
            "Waiting for Merge Pull Request %s using endpoint %s" %
            (str(variables['prid']), endpoint))
        isClear = False
        while (not isClear):
            response = self.api_call('GET',
                                     endpoint,
                                     contentType="application/json")
            data = json.loads(response.getResponse())
            if data['state'] == "MERGED":
                isClear = True
                self.logger.warn(
                    "Pull Request %s merged sucessfully with STATE : %s" %
                    (data['id'], data['state']))
            else:
                self.logger.warn(
                    "Pull Request %s : current STATE :[ %s ], retrying after %s seconds\n"
                    % (data['id'], data['state'], str(
                        variables['pollInterval'])))
                time.sleep(variables['pollInterval'])
        return {'output': data}

    def bitbucket_downloadcode(self, variables):
        downloadURL = "%s/%s/get/%s.zip" % (variables['server']['url'].replace(
            "api.", "www."), variables['repo_full_name'], variables['branch'])
        connection = LocalConnection.getLocalConnection()

        capturedOutput = ""

        self.logger.warn("Cleaning up download folder : %s" %
                         variables['downloadPath'])
        command = CmdLine()
        command.addArgument("rm")
        command.addArgument("-rf")
        command.addArgument(variables['downloadPath'] + "/*")
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput = self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        self.logger.warn(" Now downloading code in download folder : %s" %
                         variables['downloadPath'])
        command = CmdLine()
        script = '''
            cd %s
            wget --user %s --password %s  -O code.zip %s
            unzip code.zip
            rm -rf *.zip
            foldername=`ls -d */`
            mv -f $foldername* `pwd`
            rm -rf $foldername
        ''' % (variables['downloadPath'], self.http_request.username,
               self.http_request.password, downloadURL)
        script_file = connection.getFile(
            OverthereUtils.constructPath(
                connection.getFile(variables['downloadPath']), 'extract.sh'))
        OverthereUtils.write(String(script).getBytes(), script_file)
        script_file.setExecutable(True)
        command.addArgument(script_file.getPath())
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput += self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        command = CmdLine()
        command.addArgument("rm")
        command.addArgument("-f")
        command.addArgument(variables['downloadPath'] + "/extract.sh")
        output_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        error_handler = CapturingOverthereExecutionOutputHandler.capturingHandler(
        )
        exit_code = connection.execute(output_handler, error_handler, command)
        capturedOutput += self.parse_output(
            output_handler.getOutputLines()) + self.parse_output(
                error_handler.getOutputLines())

        return {'output': capturedOutput}

    def bitbucket_commitsquery(self, variables):
        self.logger.warn("bitbucket_commitsquery-> START")
        data = self.bitbucket_querycommits(variables)
        commits = data
        #self.logger.warn( "Build commitList\n %s" % json.dumps(commits, indent=4, sort_keys=True) )
        commitList = []
        self.logger.warn("bitbucket_commitsquery-> Loop over commits")
        for commit in commits:
            self.logger.warn("message ~%s~" % commit['message'])
            commitList.append(commit['message'])
        results = {"output": data, "commitList": commitList}
        self.logger.warn("results\n %s" %
                         json.dumps(results, indent=4, sort_keys=True))
        return results

    def bitbucket_querycommits(self, variables):
        endpoint_get = "/2.0/repositories/%s/commits/%s" % (
            variables['repo_full_name'], variables['branch'])
        endpoint_get = "%s?limit=%s" % (endpoint_get,
                                        variables['results_limit'])
        if (variables['tag'] is not None):
            endpoint_get = "%s&at=refs/tags/%s" % (endpoint_get,
                                                   variables['tag'])
        self.logger.warn("endpoint = %s" % endpoint_get)
        response = self.api_call('GET',
                                 endpoint_get,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = response.getResponse()
        data = json.loads(data)['values']
        self.logger.warn("DATA2 = %s" %
                         json.dumps(data, indent=4, sort_keys=True))
        return data

    def bitbucket_querymergerequests(self, variables):
        endpoint = "2.0/repositories/%s/pullrequests?state=%s" % (
            variables['repo_full_name'], variables['state'])
        self.logger.warn("URL = %s" % endpoint)
        response = self.api_call('GET',
                                 endpoint,
                                 contentType="application/json",
                                 Origin=variables['server']['url'])
        data = json.loads(response.getResponse())['values']
        self.logger.warn("merge_requests = %s" %
                         json.dumps(data, indent=4, sort_keys=True))
        return data
Esempio n. 3
0
class CloudboltClient(object):
    def __init__(self, server, username, password):
        creds = CredentialsFallback(server, username,
                                    password).getCredentials()
        self.http_request = HttpRequest(server, creds['username'],
                                        creds['password'])

    @staticmethod
    def get_client(server, username, password):
        return CloudboltClient(server, username, password)

    def parse_output(self, lines):
        result_output = ""
        for line in lines:
            result_output = '\n'.join([result_output, line])
        return result_output

    def api_call(self, method, endpoint, **options):

        try:
            options['method'] = method.upper()
            options['context'] = endpoint
            response = self.http_request.doRequest(**options)
        except ClientProtocolException:
            raise Exception("URL is not valid")
        if not response.isSuccessful():
            raise Exception("HTTP response code %s (%s)" %
                            (response.getStatus(), response.errorDump()))
        return response

#
# Task Methods /API Calls
#
#

    def cloudbolt_provisionserver(self, variables):
        order_id = self.create_order_for_group(variables['groupId'],
                                               variables['ownerId'])

        order_item = self.prov_order_item_dict(variables['envId'],
                                               variables['hostname'],
                                               variables['osBuildId'],
                                               variables['parameters'],
                                               variables['preconfigurations'],
                                               variables['appIds'])

        self.add_prov_order_item(order_id, order_item)

        order = self.submit_order(order_id)
        if order.get('status', '') == 'PENDING':
            # If it's awaiting approval, that must mean it was not auto-approved and, if there was an
            #  order approval hook, it did not approve it. Attempt to approve now.
            order = self.approve_order(order_id)

        # Here we expect the order to be approved; if not,
        # something in this group or environment is misconfigured and manual
        # intervention is required.
        if order.get('status', '') != 'ACTIVE':
            sys.exit(
                "Failure: The submitted order is not active. Please ensure "
                "that the user has approval permission on this group or that "
                "auto-approval is enabled for this group or environment.")

        order = self.wait_for_order_completion(order_id,
                                               int(variables['waitTimeout']),
                                               int(variables['waitInterval']))

        if order['status'] != 'SUCCESS':
            raise Exception("Order not successful. Please retry")

        endpoint = order["_links"]["jobs"][0]["href"]
        resp = self.api_call('GET', endpoint, contentType="application/json")
        job = json.loads(resp.getResponse())
        match = re.search("\(ID[ ]*([\d]+)\)", job["output"].replace("\n", ""))
        serverId = match.group(1)
        return {"serverId": serverId}

    def cloudbolt_decommissionserver(self, variables):
        order_id = self.create_order_for_group(variables['groupId'])
        order_item = self.decom_order_item_dict(variables['envId'],
                                                [variables['serverId']])
        self.add_decom_order_item(order_id, order_item)
        order = self.submit_order(order_id)
        if order.get('status', '') == 'PENDING':
            order = self.approve_order(order_id)
        # Here we expect the order to be approved; if not,
        # something in this group or environment is misconfigured and manual
        # intervention is required.
        if order['status'] != 'ACTIVE':
            sys.exit(
                "Failure: The submitted order is not active. Please ensure "
                "that the user has approval permission on this group or that "
                "auto-approval is enabled for this group or environment.")

        order = self.wait_for_order_completion(order_id,
                                               int(variables['waitTimeout']),
                                               int(variables['waitInterval']))

        if order['status'] != 'SUCCESS':
            raise Exception("Order not successful. Please retry")
        return {}

#
# Helper Methods
#
#

    def create_order_for_group(self, group_id, owner_id=None):
        """
        Starts a new order for this group on the  connection.
        Optionally sets the owner to the User with ID=owner_id.
        """
        print 'Creating order for group {0}...'.format(group_id)
        body = {'group': "/api/v2/groups/{0}".format(group_id)}
        endpoint = '/api/v2/orders/'
        if owner_id:
            body['owner'] = '/api/v2/users/{0}'.format(owner_id)
            print '... and owner {0}'.format(owner_id)
        resp = self.api_call('POST',
                             endpoint,
                             body=json.dumps(body),
                             contentType="application/json")
        response = json.loads(resp.getResponse())
        if 'status_code' in response and response['status_code'] not in range(
                200, 300):
            error = self.pretty_print(response)
            print "Error creating order through the API: {0}".format(error)
            sys.exit(1)

        order_url = response['_links']['self']['href']
        order_id = order_url.replace('/api/v2/orders/', '')

        print 'Order {order_id} created.'.format(order_id=order_id)
        return int(order_id)

    def pretty_print(self, dictionary):
        """
        Method to print out the entire response dict in a nice way
        """
        pp = ""
        for key in dictionary.keys():
            pp += "{0}: {1}\n".format(key, dictionary[key])
        return pp

    def get_order(self, order_id):
        """
        Return dict representing an order details JSON.
        """
        endpoint = "/api/v2/orders/{0}/".format(order_id)
        resp = self.api_call('GET', endpoint, contentType="application/json")
        response = json.loads(resp.getResponse())

        return response

    def submit_order(self, order_id):
        """
        Submit order for approval.
        """
        print 'Submitting order {0}...'.format(order_id)
        endpoint = "/api/v2/orders/{0}/actions/submit/".format(order_id)
        resp = self.api_call('POST',
                             endpoint,
                             body="",
                             contentType="application/json")
        response = json.loads(resp.getResponse())
        print 'Response:\n', response
        return response

    def approve_order(self, order_id):
        """
        Approve order.
        """
        print 'Approving order {0}...'.format(order_id)
        endpoint = "/api/v2/orders/{0}/actions/approve/".format(order_id)
        response = self.api_call('POST',
                                 endpoint,
                                 contentType="application/json")
        print 'Response:\n', response
        return response

    def wait_for_order_completion(self, order_id, timeout_sec, interval_sec):
        """
        Polls for this order's status to change from 'ACTIVE', retrying every
        interval_sec seconds.

        When complete, prints the output & error fields from each job in the order.

        If the order suceeds, return 0.  If the order fails, return 1, if the wait
        timeout is reached, return 3.
        """
        print 'Waiting for order to complete (timeout {0}s)...'.format(
            timeout_sec)
        order = self.get_order(order_id)

        start = time.time()
        waited = 0
        completed = ['SUCCESS', 'WARNING', 'FAILURE']
        while waited < timeout_sec and order['status'] not in completed:
            time.sleep(interval_sec)
            waited = time.time() - start
            order = self.get_order(order_id)
            sys.stdout.write('.')
            sys.stdout.flush()

        print "\n"
        if waited >= timeout_sec:
            # By returning this instead of printing, the caller can send it
            # to stderr instead (via sys.exit() for example).
            print(
                "Failed: Order did not complete within {0}s. "
                "Most recent order status was {1}.".format(
                    timeout_sec, order['status']))
            raise Exception("Failed: Order did not complete within {0}s. "
                            "Most recent order status was {1}.".format(
                                timeout_sec, order['status']))

        self.print_order_job_outputs(order)
        # if order['status'] != 'SUCCESS':
        #   return 1
        # return 0
        return order

    def wait_for_job_completion(self, job_id, timeout_sec, interval_sec):
        """
        Polls for this job's status to change to a completed one, retrying every
        interval_sec seconds.

        When complete, prints the output & error fields from the job.

        If the job suceeds, return 0.  If the job fails, return 1, if the wait
        timeout is reached, return 3.
        """
        print 'Waiting for job to complete (timeout {0}s)...'.format(
            timeout_sec)
        endpoint = "/api/v2/jobs/{}/".format(job_id)
        resp = self.api_call('GET', endpoint, contentType="application/json")
        job = json.loads(resp.getResponse())

        start = time.time()
        waited = 0
        completed = ['SUCCESS', 'WARNING', 'FAILURE']
        while waited < timeout_sec and job['status'] not in completed:
            time.sleep(interval_sec)
            waited = time.time() - start
            endpoint = "/api/v2/jobs/{}/".format(job_id)
            resp = self.api_call('GET',
                                 endpoint,
                                 contentType="application/json")
            job = json.loads(resp.getResponse())
            sys.stdout.write('.')
            sys.stdout.flush()

        print "\n"
        if waited >= timeout_sec:
            # By returning this instead of printing, the caller can send it
            # to stderr instead (via sys.exit() for example).
            print(
                "Failed: Job did not complete within {0}s. "
                "Most recent job status was {1}.".format(
                    timeout_sec, job['status']))
            return 3

        print job["_links"]["self"]["title"]
        print "Output: ", job.get("output", "no output")
        print "Errors: ", job.get("errors", "no errors")
        if job['status'] != 'SUCCESS':
            return 1
        return 0

    def print_order_job_outputs(self, order):
        """
        Prints the output & error for each job within this order
        """
        for j in order["_links"]["jobs"]:
            endpoint = j["href"]
            resp = self.api_call('GET',
                                 endpoint,
                                 contentType="application/json")
            job = json.loads(resp.getResponse())
            print job["_links"]["self"]["title"]
            print "Output: ", job.get("output", "no output")
            print "Errors: ", job.get("errors", "no errors")

    def zipdir(self, dir_path=None, zip_path=None, include_dir_in_zip=True):
        """
        Zips up `dir_path` and returns the zip file's path.

        `dir_path` may be '~/a/b/dirname' or '/c/d/dirname' or 'e/f/dirname'.

        `zip_path`: optional path for the new zip file.  By default the zip file is
        created next to the directory and named after it.

        `include_dir_in_zip`: if True (default), the archive will have one base
        directory named after the directory being zipped; otherwise no prefix will
        be added to the archive members.

        E.g.
            zip_dir('~/a/b/dirname')
              -> dirname.zip with files like 'dirname/blueprint.json'
        """
        dir_path = dir_path.rstrip('/')
        dir_path = os.path.abspath(os.path.expanduser(dir_path))

        if not zip_path:
            zip_path = dir_path + ".zip"
        if not os.path.isdir(dir_path):
            raise OSError("dir_path argument must point to a directory. "
                          "'%s' does not." % dir_path)
        parent_dir, dir_to_zip = os.path.split(dir_path)

        # Little nested function to prepare the proper archive path
        def trimPath(path):
            archive_path = path.replace(parent_dir, "", 1)
            if parent_dir:
                archive_path = archive_path.replace(os.path.sep, "", 1)
            if not include_dir_in_zip:
                archive_path = archive_path.replace(dir_to_zip + os.path.sep,
                                                    "", 1)
            return os.path.normcase(archive_path)

        out_file = zipfile.ZipFile(zip_path,
                                   "w",
                                   compression=zipfile.ZIP_DEFLATED)
        for (archive_path, dir_names, file_names) in os.walk(dir_path):
            for fileName in file_names:
                filePath = os.path.join(archive_path, fileName)
                out_file.write(filePath, trimPath(filePath))
        out_file.close()
        return zip_path

    def prov_order_item_dict(self,
                             env_id,
                             hostname=None,
                             os_build_id=None,
                             parameters={},
                             preconfigurations={},
                             app_ids=[]):
        """
        Helper function to build a dict representing a server provisioning order item.
        """
        order_item = {"environment": "/api/v2/environments/{0}".format(env_id)}
        if hostname:
            order_item['attributes'] = {"hostname": hostname}
        if os_build_id:
            order_item['os-build'] = "/api/v2/os-builds/{0}".format(
                os_build_id)
        if parameters:
            order_item['parameters'] = parameters
        if preconfigurations:
            order_item['preconfigurations'] = preconfigurations
        if app_ids:
            order_item['applications'] = []
            for app_id in app_ids:
                order_item['applications'].append(
                    "/api/v2/applications/{0}".format(app_id))
        return order_item

    def add_prov_order_item(self, order_id, prov_item):
        """
        Creates a new "provision server" order item on specified order.
        Args:
            order_id
            prov_item: dict representing server provisioning order item
                See API docs for samples.
        """
        print 'Adding order item to order {0}...'.format(order_id)
        print json.dumps(prov_item, indent=4)
        endpoint = "/api/v2/orders/{0}/prov-items/".format(order_id)
        response = self.api_call('POST',
                                 endpoint,
                                 body=json.dumps(prov_item),
                                 contentType="application/json")
        print 'Response:\n', response
        return response

    def decom_order_item_dict(self, env_id, server_ids):
        """
        Helper function to build a dict representing a server decom order item.
        """
        order_item = {
            "environment":
            "/api/v2/environments/{env_id}".format(env_id=env_id),
            "servers": []
        }

        for server_id in server_ids:
            order_item['servers'].append(
                "/api/v2/servers/{server_id}".format(server_id=server_id))
        return order_item

    def add_decom_order_item(self, order_id, decom_item):
        """
        Creates a new "decommission server" order item on specified order.
        Args:
            order_id
            decom_item: dict representing server decommissioning order item
                See API docs for samples.
        """
        print 'Adding order item to order {order_id}...'.format(
            order_id=order_id)
        print json.dumps(decom_item, indent=4)
        endpoint = "/api/v2/orders/{order_id}/decom-items/".format(
            order_id=order_id)
        response = self.api_call('POST',
                                 endpoint,
                                 body=json.dumps(decom_item),
                                 contentType="application/json")
        print 'Response:\n', response
        return response
class BitbucketClient(object):
    def __init__(self, server, username, password):
        self.creds = CredentialsFallback(server, username,
                                         password).getCredentials()
        self.http_request = HttpRequest(server, self.creds['username'],
                                        self.creds['password'])

    @staticmethod
    def get_client(server, username, password):
        return BitbucketClient(server, username, password)

    def api_call(self, method, endpoint, **options):
        try:
            options['method'] = method.upper()
            options['context'] = endpoint
            response = self.http_request.doRequest(**options)
        except ClientProtocolException:
            raise Exception("URL is not valid")
        if not response.isSuccessful():
            raise Exception("HTTP response code %s (%s)" %
                            (response.getStatus(), response.errorDump()))
        return response

    def bitbucket_createpullrequest(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests" % str(
            variables['repo_full_name'])
        content = '''{
            "title": "%s",
            "description": "%s",
            "source": {
                "branch": {
                    "name": "%s"
                }
            },
            "destination": {
                "branch": {
                    "name": "%s"
                }
            },
            "close_source_branch": %s
        }''' % (str(variables['title']), str(variables['description']),
                str(variables['source']), str(variables['target']),
                str(variables['closebranch']).lower())
        print "Submitting Pull Request %s using endpoint %s" % (content,
                                                                endpoint)
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        print "Pull Request created with ID %s " % data['id']
        return {'output': data, 'prid': data['id']}

    def bitbucket_mergepullrequest(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests/%s/merge" % (str(
            variables['repo_full_name']), str(variables['prid']))
        content = '''{
            "message": "%s",
            "close_source_branch": %s
        }''' % (str(variables['message']), str(
            variables['closebranch']).lower())
        print "Merging Pull Request %s using endpoint %s" % (content, endpoint)
        response = self.api_call('POST',
                                 endpoint,
                                 body=content,
                                 contentType="application/json")
        data = json.loads(response.getResponse())
        print "Pull Request %s merged successfully with STATE : %s" % (
            data['id'], data['state'])
        return {'output': data}

    def bitbucket_waitformerge(self, variables):
        endpoint = "/2.0/repositories/%s/pullrequests/%s" % (str(
            variables['repo_full_name']), str(variables['prid']))
        print "Waiting for Merge Pull Request %s using endpoint %s" % (str(
            variables['prid']), endpoint)
        is_clear = False
        while not is_clear:
            response = self.api_call('GET',
                                     endpoint,
                                     contentType="application/json")
            data = json.loads(response.getResponse())
            if data['state'] == "MERGED":
                is_clear = True
                print "Pull Request %s merged successfully with STATE : %s" % (
                    data['id'], data['state'])
            else:
                print "Pull Request %s : current STATE :[ %s ], retrying after %s seconds\n" % (
                    data['id'], data['state'], str(variables['pollInterval']))
                time.sleep(variables['pollInterval'])
        return {'output': data}

    def bitbucket_downloadcode(self, variables):
        session = requests.Session()
        session.auth = (self.creds['username'], self.creds['password'])
        download_url = "%s/%s/get/%s.zip" % (
            variables['server']['url'].replace(
                "api.", ""), variables['repo_full_name'], variables['branch'])
        r = session.get(download_url)
        r.raise_for_status()
        z = zipfile.ZipFile(StringIO.StringIO(r.content))
        z.extractall(variables['downloadPath'])
        return {}
Esempio n. 5
0
class SnykClient(object):
    orgId = None
    projects = []

    def __init__(self, server):
        self.logger = LoggerFactory.getLogger("com.xebialabs.snyk-plugin")
        if server in [None, ""]:
            raise Exception("server is undefined")

        self.orgId = server['orgId']
        self.http_request = HttpRequest(server)

    @staticmethod
    def get_client(server):
        return SnykClient(server)

    def parse_output(self, lines):
        result_output = ""
        for line in lines:
            result_output = "\n".join([result_output, line])
        return result_output

    def api_call(self, method, endpoint, **options):
        self.logger.debug("DEBUG:{}".format(endpoint))
        try:
            options["method"] = method.upper()
            options["context"] = endpoint
            response = self.http_request.doRequest(**options)
        except ClientProtocolException:
            raise Exception("URL is not valid")
        if not response.isSuccessful():
            raise Exception("HTTP response code %s (%s)" %
                            (response.getStatus(), response.errorDump()))
        return response

    def set_projects(self, headers):
        endpoint = '/org/{}/projects'.format(self.orgId)

        self.logger.info(
            "Getting scan results for all projects using orgId:{}".format(
                self.orgId))

        resp = self.api_call("GET", endpoint, headers=headers)
        data = json.loads(resp.getResponse())
        if resp.getStatus() in HTTP_SUCCESS:
            self.logger.debug("Results: {}".format(data))

            self.projects = data['projects']

        else:
            self.logBadReturnCodes(data)
            self.throw_error(resp)

    def get_project(self, headers, projectName):
        projectId = None

        for project in self.projects:
            if project['name'] == projectName:
                projectId = project['id']
                break

        if not projectId:
            raise Exception(
                "Exiting - cannot find projectId for project:{}".format(
                    projectName))

        endpoint = '/org/{}/project/{}/aggregated-issues'.format(
            self.orgId, projectId)
        values = """
        {
            "filters": {
                "types": [
                    "vuln",
                    "license"
                ]
            }
        }
        """

        self.logger.debug(
            "Getting aggregated project issues using orgId:{} and projId:{}".
            format(self.orgId, projectId))
        self.logger.debug("Using endpoint:{}".format(endpoint))

        resp = self.api_call("POST", endpoint, data=values, headers=headers)
        data = json.loads(resp.getResponse())

        self.logger.debug("Got response:{}".format(resp.getStatus()))
        if resp.getStatus() in HTTP_SUCCESS:
            self.logger.debug("getproject Results: {}".format(data))

            issueData = {
                'vuln': {
                    'high': 0,
                    'medium': 0,
                    'low': 0
                },
                'license': {
                    'high': 0,
                    'medium': 0,
                    'low': 0
                }
            }

            for issue in data['issues']:
                self.logger.debug("Issue type:{} and severity:{}".format(
                    issue['issueType'], issue['issueData']['severity']))

                if issue['issueType'] in issueData:
                    if issue['issueData']['severity'] in issueData[
                            issue['issueType']]:
                        issueData[issue['issueType']][issue['issueData'][
                            'severity']] = issueData[issue['issueType']][
                                issue['issueData']['severity']] + 1
                    else:
                        self.logger.debug(
                            "Issue severity:{} NOT FOUND IN 'issueData' with issueType:{}"
                            .format(issue['issueData']['severity'],
                                    issue['issueType']))
                        issueData[issue['issueType']] = {
                            issue['issueData']['severity']: 1
                        }
                else:
                    self.logger.debug(
                        "Issue type:{} NOT FOUND IN 'issueData'".format(
                            issue['issueType']))
                    issueData[issue['issueType']] = {
                        issue['issueData']['severity']: 1
                    }

            self.logger.info("Returning issueData from get_project")
            return {'issues': issueData}

        else:
            self.logBadReturnCodes(data)

        self.throw_error(resp)

    def get_issues(self, variables):
        self.logger.info(
            "Getting scan results for all projects using orgId:{} and issueType:{}"
            .format(variables['orgId'], variables['issueType']))

        issue_data = []
        for project in self.projects:
            project_issue_data = self.get_project(variables['headers'],
                                                  project['name'])
            issue_data.append({
                'id':
                project['id'],
                'name':
                project['name'],
                'high':
                project_issue_data['issues'][variables['issueType']]['high'],
                'medium':
                project_issue_data['issues'][variables['issueType']]['medium'],
                'low':
                project_issue_data['issues'][variables['issueType']]['low']
            })

        self.logger.info("Returning project issues data from get_issues")
        return issue_data

    def get_organizations(self, variables):
        endpoint = '/orgs'

        self.logger.debug(
            "Getting organization results for the defined access token")

        resp = self.api_call("GET", endpoint, headers=variables['headers'])
        data = json.loads(resp.getResponse())
        if resp.getStatus() in HTTP_SUCCESS:
            self.logger.debug("Results: {}".format(data))

            return data
        else:
            self.logBadReturnCodes(data)

        self.throw_error(resp)

    def logBadReturnCodes(self, data):
        if ('returnCode' in data) & ('reasonCode' in data) & ('messages'
                                                              in data):
            self.logger.error("Return Code = {}".format(data['returnCode']))
            self.logger.error("Reason Code = {}".format(data['reasonCode']))
            self.logger.error("Message     = {}".format(data['messages']))
        else:
            tb = self.getLastError()
            self.logger.error(
                "Return Codes EXCEPTION \n================\n{}\n================="
                .format(tb))
            self.logger.error("REAL BAD RETURN OBJECT!!!!")
            self.setLastError("{}\nREAL BAD RETURN OBJECT!!!!".format(tb))
            raise Exception(500)

    def setLastError(self, error):
        self.error = error

    def getLastError(self):
        return self.error

    def throw_error(self, resp):
        self.logger.error("Error from Snyk, HTTP Return: {}\n".format(
            resp.getStatus()))
        raise Exception(resp.getStatus())