class Actions(BaseAPIClient):
    def __init__(self, base, api_key, verbose=False):
        """ Creates an API Client for Code DX Jobs API

			Args:
				base: String representing base url from Code DX
				api_key: String representing API key from Code DX
				verbose: Boolean - not supported yet
		"""
        super().__init__(base, api_key, verbose)
        self.projects_api = Projects(base, api_key)

    def bulk_status_update(self, proj, status="false-positive", filters=None):
        """ Create a new Analysis Prep associated with a particular project.

			If Git is configured on that project, the new Analysis Prep will automatically initialize an input corresponding to that configuration.
			Accepts project name or id.

		"""
        if filters == None:
            filters = {}
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = '/api/projects/%d/bulk-status-update' % pid
        params = {"filter": filters, "status": status}
        res = self.call("POST", local_url, params)
        return res
    def __init__(self, base, api_key, verbose=False):
        """ Creates an API Client for Code DX Jobs API
			base: String representing base url from Code DX
			api_key: String representing API key from Code DX
			verbose: Boolean - not supported yet
		"""
        super().__init__(base, api_key, verbose)
        self.projects_api = Projects(base, api_key)
Esempio n. 3
0
    def __init__(self, base, api_key, verbose=False):
        """ Creates an API Client for Code DX Projects API

			Args:
				base: String representing base url from Code DX
				api_key: String representing API key from Code DX
				verbose: Boolean - not supported yet

		"""
        super().__init__(base, api_key, verbose)
        self.report_columns = [
            "projectHierarchy", "id", "creationDate", "updateDate", "severity",
            "status", "cwe", "rule", "tool", "location", "element", "loc.path",
            "loc.line"
        ]
        self.projects_api = Projects(base, api_key, verbose)
 def __init__(self, base, api_key, verbose=False):
     """Iniitilize"""
     super().__init__(base, api_key, verbose)
     self.projects_api = Projects(base, api_key)
class Findings(BaseAPIClient):
    def __init__(self, base, api_key, verbose=False):
        """Iniitilize"""
        super().__init__(base, api_key, verbose)
        self.projects_api = Projects(base, api_key)

    def get_finding(self, fid, options=None):
        """ Returns metadata for the given finding.

			Can include a list of optional expanders to include more information
			Available values: descriptions, descriptor, issue, triage-time, results, results.descriptions, results.descriptor, results.metadata, results.variants

			Args:
				fid: finding id
				options: additional information on finding

			Output:
				response

		"""
        self.type_check(fid, int, "Findings ID")
        local_url = f"/api/findings/{ fid }"
        if options:
            self.type_check(options, list, "Optional expanders")
            query = "?expand=" + ",".join(options)
            local_url += query
        res = self.call("GET", local_url)
        return res

    def get_finding_description(self, fid):
        """ Returns the descriptions for the given finding from all available sources.

			Args:
				fid: finding id

			Output:
				response

		"""
        self.type_check(fid, int, "Findings ID")
        local_url = f"/api/findings/{ fid }/description"
        res = self.call("GET", local_url)
        return res

    def get_finding_history(self, fid):
        """ Responds with an array of “activity event” objects in JSON.

			Args:
				fid: finding id

			Output:
				response

		"""
        self.type_check(fid, int, "Findings ID")
        local_url = f"/api/findings/{ fid }/history"
        res = self.call("GET", local_url)
        return res

    def get_finding_table(self, proj, options=None, req_body=None):
        """ Returns filtered finding table data.

			This endpoint is a candidate to become a more generic querying API; presently it just returns the data required for the findings table as it exists today.

			Args:
				fid: finding id
				options: additional information on finding
				query: query info

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = f"/api/projects/{ pid }/findings/table"
        if options:
            self.type_check(options, list, "Optional expanders")
            query = "?expand=" + ",".join(options)
            local_url += query
        if not req_body: req_body = {}
        res = self.call("POST", local_path=local_url, json_data=req_body)
        return res

    def get_finding_count(self, proj, req_body=None):
        """ Returns the count of all findings in the project matching the given filter.

			Args:
				proj: project name or id
				query: query info

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = f"/api/projects/{ pid }/findings/count"
        if not req_body: req_body = {}
        res = self.call("POST", local_path=local_url, json_data=req_body)
        return res

    def get_finding_group_count(self, proj, req_body=None):
        """ Returns filtered finding table data.

			This endpoint is a candidate to become a more generic querying API; presently it just returns the data required for the findings table as it exists today.

			Args:
				proj: project name or id
				query: query info

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = f"/api/projects/{ pid }/findings/grouped-counts"
        if not req_body: req_body = {}
        res = self.call("POST", local_path=local_url, json_data=req_body)
        return res

    def get_finding_flow(self, proj, req_body=None):
        """ Returns filtered finding table data.

			This endpoint is a candidate to become a more generic querying API; presently it just returns the data required for the findings table as it exists today.

			Args:
				proj: project name or id
				flow_req: See CodedX API

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = f"/api/projects/{ pid }/findings/flow"
        if not req_body: req_body = {}
        res = self.call("POST", local_path=local_url, json_data=req_body)
        return res

    def get_finding_file(self, proj, path):
        """ Returns the contents of a given file, as long as it is a text file.

			Args:
				proj: project name or id
				path: path to codedx file - string or int

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        if isinstance(path, str):
            local_url = f"/api/projects/{ pid }/files/tree/{ path }"
        elif isinstance(path, int):
            local_url = f"/api/projects/{ pid }/files/{ path }"
        else:
            raise Exception("File path must be either string or int.")
        res = self.call("GET", local_url)
        return res
Esempio n. 6
0
class Analysis(BaseAPIClient):
    def __init__(self, base, api_key, verbose=False):
        """ Creates an API Client for Code DX Jobs API

			Args:
				base: String representing base url from Code DX
				api_key: String representing API key from Code DX
				verbose: Boolean - not supported yet
			
			Returns:
				Analysis API client

		"""
        super().__init__(base, api_key, verbose)
        self.projects_api = Projects(base, api_key)

    def create_analysis(self, proj):
        """ Create a new Analysis Prep associated with a particular project.

			If Git is configured on that project, the new Analysis Prep will automatically initialize an input corresponding to that configuration.

			Args:
				proj: project name or id.

			Output:
				response

		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = '/api/analysis-prep'
        params = {"projectId": pid}
        res = self.call("POST", local_url, params)
        return res

    def get_prep(self, prep_id):
        """ Get a list of Input IDs and Verification Errors for an Analysis Prep.

			Args:
				prep_id: Accepts a string as the prep_id

			Output:
				response

		"""
        self.type_check(prep_id, str, "Prep_id")
        local_url = '/api/analysis-prep/%s' % prep_id
        res = self.call("GET", local_url)
        return res

    def upload_analysis(self, prep_id, file_name, client_request_id=None):
        """ Analysis Preps should be populated by uploading files to Code Dx.

			See https://codedx.com/Documentation/UserGuide.html#ImportingScanResults for a list of file upload formats

			Args:
				prep_id: Accepts a string as a prep id.
				file_name: File to upload for analysis.
				client_request_id: can be specified if you need to make modifications to analysis later

			Output:
				response

		"""
        self.type_check(prep_id, str, "Prep_id")
        local_url = '/api/analysis-prep/%s/upload' % prep_id
        accepted_file_types = {
            '.xml': 'text/xml',
            '.json': 'application/json',
            '.zip': 'application/zip',
            #			'.ozasmt': '',
            '.csv': 'text/csv',
            '.txt': 'text/plain',
            #			'.fpr': '',
            #			'.nessus': '',
            #			'.htm': '',
            #			'.tm7': ''
        }
        file_ext = os.path.splitext(file_name)[1]
        if file_ext not in accepted_file_types:
            raise Exception("File type was not accepted.")
        json_data = {
            'file_name': file_name,
            'file_path': file_name,
            'file_type': accepted_file_types[file_ext]
        }
        if client_request_id is not None and self.type_check(
                client_request_id, str, "Client_request_id"):
            json_data['X-Client-Request-Id'] = client_request_id
        res = self.call(method="UPLOAD",
                        local_path=local_url,
                        json_data=json_data)
        return res

    def get_input_metadata(self, prep_id, input_id):
        """ Get metadata for a particular input associated with an Analysis Prep.

			Args:
				prep_id: Accepts a string as a prep id.
				input_id: input from upload.

			Output:
				response

		"""
        self.type_check(prep_id, str, "Prep_id")
        self.type_check(input_id, str, "Input_id")
        local_url = '/api/analysis-prep/%s/%s' % (prep_id, input_id)
        res = self.call(method="GET", local_path=local_url)
        return res

    def delete_input(self, prep_id, input_id):
        """ Delete input. If the inputId is known (this will be the case most of the time), use the URL that includes an input-id parameter.

			Args:
				prep_id: Accepts a string as a prep id.
				input_id: input from upload.

			Output:
				response
	
		"""
        self.type_check(prep_id, str, "Prep_id")
        self.type_check(input_id, str, "Input_id")
        local_url = '/api/analysis-prep/%s/%s' % (prep_id, input_id)
        res = self.call(method="DELETE", local_path=local_url)
        return res

    def delete_pending(self, prep_id, request_id):
        """ Delete pending input. If an input file has just begun to upload, but that request has not completed and returned an inputId, use the “pending” URL.

			This requires the input upload request to have specified a X-Client-Request-Id header.

			Args:
				prep_id: Accepts a string as a prep id.
				request_id: X-Client-Request-Id from upload_analysis.

			Output:
				response
	
		"""
        self.type_check(prep_id, str, "Prep_id")
        self.type_check(request_id, str, "Request_id")
        local_url = '/api/analysis-prep/%s/pending' % prep_id
        headers = {'X-Client-Request-Id': request_id}
        res = self.call(method="DELETE",
                        local_path=local_url,
                        local_headers=headers)
        return res

    def toggle_display_tag(self, prep_id, input_id, tag_id, enabled):
        """ Enable and disable individual display tags on individual prop inputs.
		
			Disabled tags will cause a file to be treated as if that tag were not there, for analysis purposes.

			Args:
				prep_id: Accepts a string as a prep id.
				input_id: input from upload.
				tag_id: tag to enable/disable
				enabled: boolean - enable tag if true

			Output:
				response
	
		"""
        self.type_check(prep_id, str, "Prep_id")
        self.type_check(input_id, str, "Input_id")
        self.type_check(tag_id, str, "Tag_id")
        self.type_check(enabled, bool, "Enable/disable boolean")
        local_url = '/api/analysis-prep/%s/%s/tag/%s' % (prep_id, input_id,
                                                         tag_id)
        params = {"enabled": enabled}
        res = self.call("PUT", local_path=local_url, json_data=params)
        return res

    def enable_display_tag(self, prep_id, input_id, tag_id):
        """ Enable individual display tags on individual prop inputs.

			Args:
				prep_id: Accepts a string as a prep id.
				input_id: input from upload.
				tag_id: tag to enable

			Output:
				response
	
		"""
        res = self.toggle_display_tag(prep_id, input_id, tag_id, True)
        return res

    def disable_display_tag(self, prep_id, input_id, tag_id):
        """ Enable individual display tags on individual prop inputs.

			Disabled tags will cause a file to be treated as if that tag were not there, for analysis purposes.

			Args:
				prep_id: Accepts a string as a prep id.
				input_id: input from upload.
				tag_id: tag to disable

			Output:
				response
	
		"""
        res = self.toggle_display_tag(prep_id, input_id, tag_id, False)
        return res

    def run_analysis(self, prep_id):
        """ Once all of the verificationErrors in an Analysis Prep are addressed, an analysis can be started.

			Args:
				prep_id: Accepts a string as a prep id.

			Output:
				response
	
		"""
        self.type_check(prep_id, str, "Prep_id")
        local_url = '/api/analysis-prep/%s/analyze' % prep_id
        res = self.call("POST", local_url)
        return res

    def get_all_analysis(self, proj):
        """ Obtain analysis details for a project, such as start and finish times.

			Args:
				proj: project name or id.

			Output:
				response
	
		"""
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = '/api/projects/%d/analyses' % pid
        res = self.call("GET", local_url)
        return res

    def get_analysis(self, proj, aid):
        """ Obtain analysis details, such as start and finish times.

			Args:
				proj: project name or id
				aid: analysis id

			Output:
				response
	
		"""
        self.type_check(aid, int, "Analysis ID")
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = '/api/projects/%d/analyses/%d' % (pid, aid)
        res = self.call("GET", local_url)
        return res

    def name_analysis(self, proj, aid, name):
        """ Set a name for a specific analysis.

			Args:
				proj: project name or id
				aid: analysis id
				name: name to give the analysis

			Output:
				response
	
		"""
        self.type_check(name, str, "Name")
        self.projects_api.update_projects()
        pid = self.projects_api.process_project(proj)
        local_url = '/api/projects/%d/analyses/%d' % (pid, aid)
        params = {"name": name}
        res = self.call("PUT",
                        local_path=local_url,
                        json_data=params,
                        content_type=None)
        return res
Esempio n. 7
0
class Reports(BaseAPIClient):
    def __init__(self, base, api_key, verbose=False):
        """ Creates an API Client for Code DX Projects API

			Args:
				base: String representing base url from Code DX
				api_key: String representing API key from Code DX
				verbose: Boolean - not supported yet

		"""
        super().__init__(base, api_key, verbose)
        self.report_columns = [
            "projectHierarchy", "id", "creationDate", "updateDate", "severity",
            "status", "cwe", "rule", "tool", "location", "element", "loc.path",
            "loc.line"
        ]
        self.projects_api = Projects(base, api_key, verbose)

    def report_types(self, proj):
        """ Provides a list of report types for a project.

			Each report type (pdf, csv, xml, nessus, and nbe) has a different set of configuration options. These configuration options are important with respect to generating a report.

		"""
        pid = self.projects_api.process_project(proj)
        local_url = '/api/projects/%d/report/types' % pid
        res = self.call("GET", local_url)
        return res

    def generate(self, pid, report_type, config, filters=None):
        """ Allows user to queue a job to generate a report.

			Each report type has a different set of configuration options that can be obtained from the Report Types endpoint.

å		"""
        params = {}
        params["filter"] = filters
        params["config"] = config
        if not filters: filters = {}
        local_url = '/api/projects/%d/report/%s' % (pid, report_type)
        res = self.call("POST", local_url, params)
        return res

    def generate_pdf(self,
                     proj,
                     summary_mode="simple",
                     details_mode="with-source",
                     include_result_details=False,
                     include_comments=False,
                     include_request_response=False,
                     filters=None):
        """ Allows user to queue a job to generate a pdf report. Returns jobId and status.

			Args:
				summary_mode <String>: Executive summary. One of "none", "simple", or "detailed". Default is "simple".
				details_mode <String>: Finding details. One of "none", "simple", or "with-source". Default is "with-source".
				include_result_details <Boolean>: Include result provided details. Default is false.
				include_comments <Boolean>: Include comments. Default is false.
				include_request_response <Boolean>: Include HTTP requests and responses. Default is false.

		"""
        pid = self.projects_api.process_project(proj)
        if not filters: filters = {}
        config = {}
        if summary_mode not in ["none", "simple", "detailed"]:
            raise Exception("Invalid summary mode input.")
        config["summaryMode"] = summary_mode
        if details_mode not in ["none", "simple", "with-source"]:
            raise Exception("Invalid details mode input given.")
        config["detailsMode"] = details_mode
        self.type_check(include_result_details, bool, "Include_result_details")
        config["includeResultDetails"] = include_result_details
        self.type_check(include_comments, bool, "Include_comments")
        config["includeComments"] = include_comments
        self.type_check(include_request_response, bool,
                        "include_request_response")
        config["includeRequestResponse"] = include_request_response
        res = self.generate(pid, "pdf", config, filters)
        return res

    def get_csv_columns(self):
        """ Returns a list of optional columns for a project csv report."""
        return self.report_columns

    def generate_csv(self, proj, cols=None):
        """ Allows user to queue a job to generate a csv report. Returns jobId and status.

			Accepts a list of columns to include in the report. Default is all columns.
			Call get_csv_columns() to see column options.

		"""
        pid = self.projects_api.process_project(proj)
        config = {}
        if not cols: cols = self.report_columns
        for col in cols:
            if col not in self.report_columns:
                raise Exception("Invaild column name.")
        config["columns"] = cols
        res = self.generate(pid, "csv", config)
        return res

    def generate_xml(self,
                     proj,
                     include_standards=False,
                     include_source=False,
                     include_rule_descriptions=True):
        """ Allows user to queue a job to generate an xml report. Returns jobId and status.

			Args:
				include_standards <Boolean>: List standards violations. Default is fault.
				include_source <Boolean>: Include source code snippets. Default is false.
				include_rule_descriptions <Boolean>: Include rule descriptions. Default is true.

		"""
        pid = self.projects_api.process_project(proj)
        config = {}
        self.type_check(include_standards, bool, "Include_standards")
        config["includeStandards"] = include_standards
        self.type_check(include_source, bool, "Include_source")
        config["includeSource"] = include_source
        self.type_check(include_rule_descriptions, bool,
                        "include_rule_descriptions")
        config["includeRuleDescriptions"] = include_rule_descriptions
        res = self.generate(pid, "xml", config)
        return res

    def generate_nessus(self,
                        proj,
                        default_host=None,
                        operating_system="",
                        mac_address="",
                        netBIOS_name=""):
        """ Allows user to queue a job to generate a nessus report. Returns jobId and status.

			Args:
				default_host <String>: Default host. Required.
				operating_system <String>: Operating System. Default is "".
				mac_address <String>: mac address. Required.
				netBIOS_name <String>: NetBIOS name. Defualt is "".

		"""
        pid = self.projects_api.process_project(proj)
        config = {}
        self.type_check(default_host, str, "Default_host")
        config["defaultHost"] = default_host
        self.type_check(operating_system, str, "Operating_system")
        config["operatingSystem"] = operating_system
        self.type_check(mac_address, str, "mac_address")
        if re.search(mac_address,
                     "^([0-9A-Fa-f]{2}:){5}([0-9A-Fa-f]{2})$") is None:
            raise Exception("Not a valid mac address.")
        config["macAddress"] = mac_address
        self.type_check(netBIOS_name, str, "netBIOS_name")
        config["netBIOSName"] = netBIOS_name
        res = self.generate(pid, "nessus", config)
        return res

    def generate_nbe(self, proj, host_address=None):
        """ Allows user to queue a job to generate an AlienVault/NBE report. Returns jobId and status.

			Args:
				host_address <String>: Host IP address. Required.

		"""
        pid = self.projects_api.process_project(proj)
        config = {}
        self.type_check(host_address, str, "Host_address")
        if re.search(
                host_address,
                "^((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9]))$"
        ) is None:
            raise Exception("Not a valid IPv4 address.")
        config["hostAddresss"] = host_address
        res = self.generate(pid, "nbe", config)
        return res