def _check_should_run_pkg(self, pkg_dir: str, current_branch: str) -> bool: """Checks if there is a difference in the package before this Lint run and after it. Args: pkg_dir: The package directory to check. Returns: bool. True if there is a difference and False otherwise. """ # This will check if there are any changes between current master version and the last commit in master if os.environ.get('CIRCLE_COMPARE_URL') and current_branch == "master": changes_from_last_commit_vs_master = run_command( "git diff --name-only HEAD..HEAD^") else: # This will return a list of all files that changed up until the last commit (not including any changes # which were made but not yet committed). changes_from_last_commit_vs_master = run_command( f"git diff origin/master...{current_branch} --name-only") # This will check if any changes were made to the files in the package (pkg_dir) but are yet to be committed. changes_since_last_commit = run_command( f"git diff --name-only -- {pkg_dir}") # if the package is in the list of changed files or if any files within the package were changed # but not yet committed, return True if pkg_dir in changes_from_last_commit_vs_master or len( changes_since_last_commit) > 0: return True # if no changes were made to the package - return False. return False
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('version', help='Release version') arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with') arg_parser.add_argument('server_version', help='Server version') arg_parser.add_argument('-d', '--date', help='release date in the format %Y-%m-%d', required=False) args = arg_parser.parse_args() date = args.date if args.date else datetime.now().strftime('%Y-%m-%d') # get changed yaml/json files (filter only relevant changed files) validate_manager = ValidateManager() change_log = run_command('git diff --name-status {}'.format(args.git_sha1)) modified_files, added_files, _, _, _ = validate_manager.filter_changed_files(change_log) for file_path in get_changed_content_entities(modified_files, added_files): if not should_clear(file_path, args.server_version): continue rn_path = get_release_notes_file_path(file_path) if os.path.isfile(rn_path): # if file exist, mark the current notes as release relevant with open(rn_path, 'r+') as rn_file: text = rn_file.read() rn_file.seek(0) text = text.replace(UNRELEASE_HEADER, CHANGE_LOG_FORMAT.format(version=args.version, date=date)) rn_file.write(text) else: # if file doesn't exist, create it with new header with open(rn_path, 'w') as rn_file: text = CHANGE_LOG_FORMAT.format(version=args.version, date=date) + get_new_header(file_path) rn_file.write(text) run_command('git add {}'.format(rn_path))
def create_pack_rn(self, rn_path: str, changed_files: dict, new_metadata: dict, new_version: str) -> bool: """ Checks whether the pack requires a new rn and if so, creates it. :param rn_path (str): The rn path changed_files (dict): The changed files details new_metadata (dict): The new pack metadata new_version (str): The new version str representation, e.g 1.0.2, 1.11.2 etc. :rtype: ``bool`` :return Whether the RN was updated successfully or not """ rn_string = self.handle_existing_rn_version_path(rn_path) if not rn_string: rn_string = self.build_rn_template(changed_files) if len(rn_string) > 0 or self.is_force: if self.is_bump_required(): self.write_metadata_to_file(new_metadata) self.create_markdown(rn_path, rn_string, changed_files) self.build_rn_config_file(new_version) try: run_command(f'git add {rn_path}', exit_on_error=False) except RuntimeError: print_warning( f'Could not add the release note files to git: {rn_path}') if self.is_bc and self.bc_path: try: run_command(f'git add {self.bc_path}', exit_on_error=False) except RuntimeError: print_warning( f'Could not add the release note config file to git: {rn_path}' ) if self.existing_rn_changed: print_color( f"Finished updating release notes for {self.pack}.", LOG_COLORS.GREEN) if not self.text: print_color( f"\nNext Steps:\n - Please review the " f"created release notes found at {rn_path} and document any changes you " f"made by replacing '%%UPDATE_RN%%'.\n - Commit " f"the new release notes to your branch.\nFor information regarding proper" f" format of the release notes, please refer to " f"https://xsoar.pan.dev/docs/integrations/changelog", LOG_COLORS.GREEN) return True else: print_color( f"No changes to {self.pack} pack files were detected from the previous time " "this command was run. The release notes have not been " "changed.", LOG_COLORS.GREEN) else: print_color( "No changes which would belong in release notes were detected.", LOG_COLORS.YELLOW) return False
def get_modified_and_added_files(self, tag='origin/master'): """Get lists of the modified and added files in your branch according to the git diff output. Args: tag (string): String of git tag used to update modified files Returns: (modified_files, added_files). Tuple of sets. """ # Two dots is the default in git diff, it will compare with the last known commit as the base # Three dots will compare with the last known shared commit as the base compare_type = '.' if 'master' in tag else '' all_changed_files_string = run_command( 'git diff --name-status {tag}..{compare_type}refs/heads/{branch}'. format(tag=tag, branch=self.branch_name, compare_type=compare_type)) modified_files, added_files, _, old_format_files = self.get_modified_files( all_changed_files_string, tag=tag, print_ignored_files=self.print_ignored_files) if not self.is_circle: files_string = run_command( 'git diff --name-status --no-merges HEAD') nc_modified_files, nc_added_files, nc_deleted_files, nc_old_format_files = self.get_modified_files( files_string, print_ignored_files=self.print_ignored_files) all_changed_files_string = run_command( 'git diff --name-status {}'.format(tag)) modified_files_from_tag, added_files_from_tag, _, _ = \ self.get_modified_files(all_changed_files_string, print_ignored_files=self.print_ignored_files) if self.file_path: if F'M\t{self.file_path}' in files_string: modified_files = {self.file_path} added_files = set() else: modified_files = set() added_files = {self.file_path} return modified_files, added_files, set(), set() old_format_files = old_format_files.union(nc_old_format_files) modified_files = modified_files.union( modified_files_from_tag.intersection(nc_modified_files)) added_files = added_files.union( added_files_from_tag.intersection(nc_added_files)) modified_files = modified_files - set(nc_deleted_files) added_files = added_files - set(nc_modified_files) - set( nc_deleted_files) packs = self.get_packs(modified_files, added_files) return modified_files, added_files, old_format_files, packs
def get_all_diff_text_files(self, branch_name, is_circle): """ Get all new/modified text files that need to be searched for secrets :param branch_name: current branch being worked on :param is_circle: boolean to check if being ran from circle :return: list: list of text files """ changed_files_string = run_command("git diff --name-status origin/master...{}".format(branch_name)) \ if is_circle else run_command("git diff --name-status --no-merges HEAD") return list(self.get_diff_text_files(changed_files_string))
def _docker_run(self, docker_image): workdir = '/devwork' # this is setup in CONTAINER_SETUP_SCRIPT lint_files = os.path.basename(self._get_lint_files()) run_params = [ 'docker', 'create', '-w', workdir, '-e', 'PYLINT_FILES={}'.format(lint_files) ] run_params.extend(['-e', f'PS_LINT_FILES={lint_files}']) if not self.root: run_params.extend(['-u', '{}:4000'.format(os.getuid())]) if not self.run_args['tests']: run_params.extend(['-e', 'PYTEST_SKIP=1']) run_params.extend(['-e', 'PS_TEST_SKIP=1']) if not self.run_args['pylint']: run_params.extend(['-e', 'PYLINT_SKIP=1']) if not self.run_args['pslint']: run_params.extend(['-e', 'PS_LINT_SKIP=1']) run_params.extend(['-e', 'CPU_NUM={}'.format(self.cpu_num)]) run_params.extend(['-e', 'CI={}'.format(os.getenv("CI", "false"))]) run_script_name = self.run_dev_tasks_script_name if self.script_type == TYPE_PYTHON else self.run_dev_tasks_script_pwsh_name run_script = self.run_dev_tasks_script if self.script_type == TYPE_PYTHON else self.run_dev_tasks_script_pwsh run_params.extend([docker_image, 'sh', './{}'.format(run_script_name)]) print_v(f'container create: {run_params}') output = subprocess.check_output(run_params, stderr=subprocess.STDOUT, universal_newlines=True) container_id = output.strip() try: output = output + '\n' + subprocess.check_output( [ 'docker', 'cp', self.project_dir + '/.', container_id + ':' + workdir ], stderr=subprocess.STDOUT, universal_newlines=True) output = output + '\n' + subprocess.check_output( ['docker', 'cp', run_script, container_id + ':' + workdir], universal_newlines=True, stderr=subprocess.STDOUT) output = output + '\n' + subprocess.check_output( ['docker', 'start', '-a', container_id], stderr=subprocess.STDOUT, universal_newlines=True) return output, 0 finally: if not self.keep_container: run_command(f'docker rm {container_id}') else: print("Test container [{}] was left available".format( container_id))
def main(): parser = argparse.ArgumentParser( description='Deploy a pack from a contribution PR to a branch') parser.add_argument('-p', '--pr_number', help='Contrib PR number') parser.add_argument('-b', '--branch', help='The contrib branch') parser.add_argument('-c', '--contrib_repo', help='The contrib repo') parser.add_argument( '-t', '--github_token', help='github token (used to fetch from forked repositories).') args = parser.parse_args() pr_number = args.pr_number repo = args.contrib_repo branch = args.branch token = args.github_token packs_dir_names = get_pack_dir(branch, pr_number, repo) if not packs_dir_names: print_error('Did not find a pack in the PR') sys.exit(1) print(f'Copy changes from the contributor branch {repo}/{branch} ' f'in the following packs: ' + '\n'.join(packs_dir_names)) try: for pack_dir in packs_dir_names: if os.path.isdir(f'Packs/{pack_dir}'): # Remove existing pack shutil.rmtree(f'Packs/{pack_dir}') # if packs_dir_names = ['pack_a', 'pack_b', 'pack_c'], # string_dir_names will be 'Packs/pack_a Packs/pack_b Packs/pack_c' string_dir_names = f'Packs/{" Packs/".join(packs_dir_names)}' try: with open('/dev/null', 'w') as dev_null: Popen( f'git fetch https://{token}@github.com/{repo}/content.git :{repo}/{branch}' .split(), stdout=dev_null) except SystemExit: pass command = f'git checkout {repo}/{branch} {string_dir_names}' print(f'Running command {command}') run_command(f'git checkout {repo}/{branch} {string_dir_names}') except Exception as e: print_error(f'Failed to deploy contributed pack to base branch: {e}') sys.exit(1) print_success( f'Successfully updated the base branch with the following contrib packs: ' f'{", ".join(packs_dir_names)}')
def create_instance(ami_name): print("Creating instance from the AMI image for {}".format( AMI_NAME_TO_READABLE[ami_name])) run_command( "./Tests/scripts/create_instance.sh instance.json {}".format(ami_name), False) # noqa with open('./Tests/instance_ids.txt', 'r') as instance_file: instance_id = instance_file.read() with open('image_id.txt', 'r') as image_id_file: image_data = image_id_file.read() print('Image data is {}'.format(image_data)) with open("./Tests/images_data.txt", "a") as image_data_file: image_data_file.write('{name} Image info is: {data}\n'.format( name=AMI_NAME_TO_READABLE[ami_name], data=image_data)) return instance_id
def update_object_in_id_set(obj_id, obj_data, file_path, instances_set): change_string = run_command("git diff HEAD {}".format(file_path)) is_added_from_version = True if re.search(r'\+fromversion: .*', change_string) else False is_added_to_version = True if re.search(r'\+toversion: .*', change_string) else False file_to_version = get_to_version(file_path) file_from_version = get_from_version(file_path) updated = False for instance in instances_set: instance_id = list(instance.keys())[0] integration_to_version = instance[instance_id].get( 'toversion', '99.99.99') integration_from_version = instance[instance_id].get( 'fromversion', '0.0.0') if obj_id == instance_id: if is_added_from_version or (not is_added_from_version and file_from_version == integration_from_version): if is_added_to_version or (not is_added_to_version and file_to_version == integration_to_version): instance[obj_id] = obj_data[obj_id] updated = True break if not updated: # in case we didn't found then we need to create one add_new_object_to_id_set(obj_id, obj_data, instances_set)
def get_current_working_branch() -> str: branches = run_command('git branch') branch_name_reg = re.search(r'\* (.*)', branches) if branch_name_reg: return branch_name_reg.group(1) return ''
def slack_notifier(build_url, slack_token, env_results_file_name, container): branches = run_command("git branch") branch_name_reg = re.search(r'\* (.*)', branches) branch_name = branch_name_reg.group(1) if branch_name == 'master': print("Extracting build status") # container 1: unit tests if int(container): print_color( "Starting Slack notifications about nightly build - unit tests", LOG_COLORS.GREEN) content_team_attachments = get_attachments_for_unit_test(build_url) # container 0: test playbooks else: print_color( "Starting Slack notifications about nightly build - tests playbook", LOG_COLORS.GREEN) content_team_attachments, _ = get_attachments_for_test_playbooks( build_url, env_results_file_name) print("Sending Slack messages to #content-team") slack_client = SlackClient(slack_token) slack_client.api_call("chat.postMessage", channel="dmst-content-team", username="******", as_user="******", attachments=content_team_attachments)
def slack_notifier(build_url, slack_token, test_type, env_results_file_name=None, packs_results_file=None, job_name=""): branches = run_command("git branch") branch_name_reg = re.search(r'\* (.*)', branches) branch_name = branch_name_reg.group(1) if branch_name == 'master': logging.info("Extracting build status") if test_type == UNITTESTS_TYPE: logging.info( "Starting Slack notifications about nightly build - unit tests" ) content_team_attachments = get_attachments_for_unit_test(build_url) elif test_type == SDK_UNITTESTS_TYPE: logging.info( "Starting Slack notifications about SDK nightly build - unit tests" ) content_team_attachments = get_attachments_for_unit_test( build_url, is_sdk_build=True) elif test_type == 'test_playbooks': logging.info( "Starting Slack notifications about nightly build - tests playbook" ) content_team_attachments, _ = get_attachments_for_test_playbooks( build_url, env_results_file_name) elif test_type == SDK_FAILED_STEPS_TYPE: logging.info( 'Starting Slack notifications about SDK nightly build - test playbook' ) content_team_attachments = get_attachments_for_all_steps( build_url, build_title=SDK_BUILD_TITLE) elif test_type == BucketUploadFlow.BUCKET_UPLOAD_TYPE: logging.info( 'Starting Slack notifications about upload to production bucket build' ) content_team_attachments = get_attachments_for_bucket_upload_flow( build_url=build_url, job_name=job_name, packs_results_file_path=packs_results_file) elif test_type == SDK_RUN_AGAINST_FAILED_STEPS_TYPE: content_team_attachments = get_attachments_for_all_steps( build_url, build_title=SDK_XSOAR_BUILD_TITLE) else: raise NotImplementedError( 'The test_type parameter must be only \'test_playbooks\' or \'unittests\'' ) logging.info(f'Content team attachments:\n{content_team_attachments}') logging.info("Sending Slack messages to #content-team") slack_client = SlackClient(slack_token) slack_client.api_call("chat.postMessage", json={ 'channel': 'dmst-content-team', 'username': '******', 'as_user': '******', 'attachments': content_team_attachments })
def get_new_and_modified_integration_files(git_sha1): """Return 2 lists - list of new integrations and list of modified integrations since the commit of the git_sha1. Args: git_sha1 (str): The git sha of the commit against which we will run the 'git diff' command. Returns: (tuple): Returns a tuple of two lists, the file paths of the new integrations and modified integrations. """ # get changed yaml files (filter only added and modified files) tag = get_last_release_version() file_validator = FilesValidator() change_log = run_command('git diff --name-status {}'.format(git_sha1)) modified_files, added_files, removed_files, old_format_files = file_validator.get_modified_files(change_log, tag) all_integration_regexes = YML_INTEGRATION_REGEXES all_integration_regexes.extend([INTEGRATION_REGEX, PACKS_INTEGRATION_REGEX, BETA_INTEGRATION_REGEX]) new_integration_files = [ file_path for file_path in added_files if checked_type(file_path, all_integration_regexes) ] modified_integration_files = [ file_path for file_path in modified_files if isinstance(file_path, str) and checked_type(file_path, all_integration_regexes) ] return new_integration_files, modified_integration_files
def get_secrets(self, branch_name, is_circle): secrets_found = {} # make sure not in middle of merge if not run_command('git rev-parse -q --verify MERGE_HEAD'): secrets_file_paths = self.get_all_diff_text_files( branch_name, is_circle) secrets_found = self.search_potential_secrets( secrets_file_paths, self.ignore_entropy) if secrets_found: secrets_found_string = 'Secrets were found in the following files:' for file_name in secrets_found: secrets_found_string += ('\n\nIn File: ' + file_name + '\n') secrets_found_string += '\nThe following expressions were marked as secrets: \n' secrets_found_string += json.dumps( secrets_found[file_name], indent=4) if not is_circle: secrets_found_string += '\n\nRemove or whitelist secrets in order to proceed, then re-commit\n' else: secrets_found_string += '\n\nThe secrets were exposed in public repository,' \ ' remove the files asap and report it.\n' secrets_found_string += 'For more information about whitelisting visit: ' \ 'https://github.com/demisto/internal-content/tree/master/documentation/secrets' print_error(secrets_found_string) return secrets_found
def get_new_packs(git_sha1): """ Gets all the existing modified/added file paths in the format */ReleaseNotes/*.md. Args: git_sha1 (str): The branch to make the diff with. Returns: (list) A list of the new/modified release notes file paths. """ diff_cmd = f'git diff --diff-filter=A --name-only {git_sha1} */{PACK_METADATA}' try: diff_result = run_command(diff_cmd, exit_on_error=False) except RuntimeError: logging.critical( 'Unable to get the SHA1 of the commit in which the version was released. This can happen if your ' 'branch is not updated with origin master. Merge from origin master and, try again.\n' 'If you\'re not on a fork, run "git merge origin/master".\n' 'If you are on a fork, first set https://github.com/demisto/content to be ' 'your upstream by running "git remote add upstream https://github.com/demisto/content". After ' 'setting the upstream, run "git fetch upstream", and then run "git merge upstream/master". Doing ' 'these steps will merge your branch with content master as a base.' ) sys.exit(1) pack_paths = [ os.path.dirname(file_path) for file_path in diff_result.split('\n') if file_path.startswith(PACKS_DIR) ] return pack_paths
def run_bandit(self, py_num) -> int: """Run bandit Args: py_num: The python version in use Returns: int. 0 on successful bandit run, 1 otherwise. """ lint_files = self._get_lint_files() python_exe = 'python2' if py_num < 3 else 'python3' output = run_command(' '.join( [python_exe, '-m', 'bandit', '-lll', '-iii', '-q', lint_files]), cwd=self.project_dir) self.lock.acquire() print("========= Running bandit on: {} ===============".format( lint_files)) print_v('Using: {} to run bandit'.format(python_exe)) if len(output) == 0: print_color("bandit completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) if self.lock.locked(): self.lock.release() return 0 else: print_error(output) if self.lock.locked(): self.lock.release() return 1
def run_flake8(self, py_num) -> int: """Runs flake8 Args: py_num (int): The python version in use Returns: int. 0 if flake8 is successful, 1 otherwise. """ lint_files = self._get_lint_files() python_exe = 'python2' if py_num < 3 else 'python3' print_v('Using: {} to run flake8'.format(python_exe)) output = run_command(f'{python_exe} -m flake8 {self.project_dir}', cwd=self.configuration.env_dir) self.lock.acquire() print("\n========= Running flake8 on: {}===============".format( lint_files)) if len(output) == 0: print_color("flake8 completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) if self.lock.locked(): self.lock.release() return 0 else: print_error(output) if self.lock.locked(): self.lock.release() return 1
def run_mypy(self, py_num) -> int: """Runs mypy Args: py_num: The python version in use Returns: int. 0 on successful mypy run, 1 otherwise. """ self.get_common_server_python() lint_files = self._get_lint_files() sys.stdout.flush() script_path = os.path.abspath( os.path.join(self.configuration.sdk_env_dir, self.run_mypy_script)) output = run_command(' '.join( ['bash', script_path, str(py_num), lint_files]), cwd=self.project_dir) self.lock.acquire() print( "========= Running mypy on: {} ===============".format(lint_files)) if 'Success: no issues found in 1 source file' in output: print(output) print_color("mypy completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) self.remove_common_server_python() if self.lock.locked(): self.lock.release() return 0 else: print_error(output) self.remove_common_server_python() if self.lock.locked(): self.lock.release() return 1
def get_secrets(self, branch_name, is_circle): secret_to_location_mapping = {} if self.input_paths: secrets_file_paths = self.input_paths else: secrets_file_paths = self.get_all_diff_text_files(branch_name, is_circle) # If a input path supplied, should not run on git. If not supplied make sure not in middle of merge. if not run_command('git rev-parse -q --verify MERGE_HEAD') or self.input_paths: secret_to_location_mapping = self.search_potential_secrets(secrets_file_paths, self.ignore_entropy) if secret_to_location_mapping: secrets_found_string = 'Secrets were found in the following files:' for file_name in secret_to_location_mapping: for line in sorted(secret_to_location_mapping[file_name]): secrets_found_string += ('\nIn File: ' + f'{file_name}:{line}' + '\n') if len(secret_to_location_mapping[file_name][line]) == 1: secrets_found_string += f'Secret found: {secret_to_location_mapping[file_name][line][0]}\n' else: secrets_found_string += f'Secrets found: {secret_to_location_mapping[file_name][line]}\n' if not is_circle: secrets_found_string += '\n\nRemove or whitelist secrets in order to proceed, then re-commit\n' else: secrets_found_string += '\n\nThe secrets were exposed in public repository,' \ ' remove the files asap and report it.\n' secrets_found_string += 'For more information about whitelisting visit: ' \ 'https://xsoar.pan.dev/docs/concepts/demisto-sdk#secrets' print_error(secrets_found_string) return secret_to_location_mapping
def main(): install_logging("TriggerPrivateBuild.log") # get github token parameter arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--github-token', help='Github token') args = arg_parser.parse_args() github_token = args.github_token # get branch name branches = tools.run_command("git branch") branch_name_regex = re.search(r"\* (.*)", branches) branch_name = branch_name_regex.group(1) if branch_has_private_build_infra_change(branch_name): # get the workflows ids before triggering the build pre_existing_workflow_ids = get_dispatch_workflows_ids(github_token, 'master') # trigger private build payload = {'event_type': f'Trigger private build from content/{branch_name}', 'client_payload': {'commit_sha1': branch_name, 'is_infra_build': 'True'}} res = requests.post(TRIGGER_BUILD_URL, headers={'Accept': 'application/vnd.github.everest-preview+json', 'Authorization': f'Bearer {github_token}'}, data=json.dumps(payload), verify=False) if res.status_code != 204: logging.critical(f'Failed to trigger private repo build, request to ' f'{TRIGGER_BUILD_URL} failed with error: {str(res.content)}') sys.exit(1) workflow_ids_diff = [] for i in range(GET_WORKFLOWS_MAX_RETRIES): # wait 5 seconds and get the workflow ids again time.sleep(5) workflow_ids_after_dispatch = get_dispatch_workflows_ids(github_token, 'master') # compare with the first workflows list to get the current id workflow_ids_diff = [x for x in workflow_ids_after_dispatch if x not in pre_existing_workflow_ids] if workflow_ids_diff: break if len(workflow_ids_diff) == 1: workflow_id = workflow_ids_diff[0] logging.success(f'Private repo build triggered successfully, workflow id: {workflow_id}\n URL:' f' {WORKFLOW_HTML_URL}/{workflow_id}') # write the workflow id to text file to use it in get_private_build_status.py with open(PRIVATE_REPO_WORKFLOW_ID_FILE, "w") as f: f.write(str(workflow_id)) sys.exit(0) else: logging.critical('Could not found the private repo workflow') sys.exit(1) else: logging.info('Build private repo skipped')
def check_docker_image_changed(main_branch: str, packfile: str) -> Optional[str]: """ Checks whether the docker image was changed in master. :param main_branch: The git main branch packfile: The added or modified yml path :rtype: ``Optional[str]`` :return The latest docker image """ try: diff = run_command(f'git diff {main_branch} -- {packfile}', exit_on_error=False) except RuntimeError as e: if any(['is outside repository' in exp for exp in e.args]): return None else: print_warning( f'skipping docker image check, Encountered the following error:\n{e.args[0]}' ) return None else: diff_lines = diff.splitlines() for diff_line in diff_lines: if 'dockerimage:' in diff_line: # search whether exists a line that notes that the Docker image was # changed. split_line = diff_line.split() if split_line[0].startswith('+'): return split_line[-1] return None
def get_all_diff_text_files(self, branch_name, is_circle): """ Get all new/modified text files that need to be searched for secrets :param branch_name: current branch being worked on :param is_circle: boolean to check if being ran from circle :return: list: list of text files """ if is_circle: if not self.prev_ver.startswith('origin'): self.prev_ver = 'origin/' + self.prev_ver print(f"Running secrets validation against {self.prev_ver}") changed_files_string = run_command(f"git diff --name-status {self.prev_ver}...{branch_name}") else: print("Running secrets validation on all changes") changed_files_string = run_command("git diff --name-status --no-merges HEAD") return list(self.get_diff_text_files(changed_files_string))
def create_test_file(is_nightly, skip_save=False): """Create a file containing all the tests we need to run for the CI""" tests_string = '' if not is_nightly: branches = tools.run_command("git branch") branch_name_reg = re.search(r"\* (.*)", branches) branch_name = branch_name_reg.group(1) print("Getting changed files from the branch: {0}".format(branch_name)) if branch_name != 'master': files_string = tools.run_command( "git diff --name-status origin/master...{0}".format( branch_name)) else: commit_string = tools.run_command("git log -n 2 --pretty='%H'") commit_string = commit_string.replace("'", "") last_commit, second_last_commit = commit_string.split() files_string = tools.run_command( "git diff --name-status {}...{}".format( second_last_commit, last_commit)) with open('./Tests/ami_builds.json', 'r') as ami_builds: # get versions to check if tests are runnable on those envs ami_builds = json.load(ami_builds) two_before_ga = ami_builds.get('TwoBefore-GA', '0').split('-')[0] one_before_ga = ami_builds.get('OneBefore-GA', '0').split('-')[0] ga = ami_builds.get('GA', '0').split('-')[0] conf = load_tests_conf() with open("./Tests/id_set.json", 'r') as conf_file: id_set = json.load(conf_file) tests = get_test_list(files_string, branch_name, two_before_ga, conf, id_set) create_filter_envs_file(tests, two_before_ga, one_before_ga, ga, conf, id_set) tests_string = '\n'.join(tests) if tests_string: print('Collected the following tests:\n{0}\n'.format(tests_string)) else: print('No filter configured, running all tests') if not skip_save: print("Creating filter_file.txt") with open("./Tests/filter_file.txt", "w") as filter_file: filter_file.write(tests_string)
def is_release_branch(): """Check if we are working on a release branch.""" diff_string_config_yml = run_command( "git diff origin/master .circleci/config.yml") if re.search(r'[+-][ ]+CONTENT_VERSION: ".*', diff_string_config_yml): return True return False
def get_packs_names(target_packs: str, previous_commit_hash: str = "HEAD^") -> set: """Detects and returns packs names to upload. In case that `Modified` is passed in target_packs input, checks the git difference between two commits, current and previous and greps only ones with prefix Packs/. By default this function will receive `All` as target_packs and will return all packs names from content repo. Args: target_packs (str): csv packs names or `All` for all available packs in content or `Modified` for only modified packs (currently not in use). previous_commit_hash (str): the previous commit to diff with. Returns: set: unique collection of packs names to upload. """ if target_packs.lower() == "all": if os.path.exists(PACKS_FULL_PATH): all_packs = { p for p in os.listdir(PACKS_FULL_PATH) if p not in IGNORED_FILES } logging.info( f"Number of selected packs to upload is: {len(all_packs)}") # return all available packs names return all_packs else: logging.error( f"Folder {PACKS_FOLDER} was not found at the following path: {PACKS_FULL_PATH}" ) sys.exit(1) elif target_packs.lower() == "modified": cmd = f"git diff --name-only HEAD..{previous_commit_hash} | grep 'Packs/'" modified_packs_path = run_command(cmd).splitlines() modified_packs = { p.split('/')[1] for p in modified_packs_path if p not in IGNORED_PATHS } logging.info(f"Number of modified packs is: {len(modified_packs)}") # return only modified packs between two commits return modified_packs elif target_packs and isinstance(target_packs, str): modified_packs = { p.strip() for p in target_packs.split(',') if p not in IGNORED_FILES } logging.info( f"Number of selected packs to upload is: {len(modified_packs)}") # return only packs from csv list return modified_packs else: logging.critical( "Not correct usage of flag -p. Please check help section of upload packs script." ) sys.exit(1)
def main(): parser = argparse.ArgumentParser( description='Deploy a pack from a contribution PR to a branch') parser.add_argument('-p', '--pr_number', help='Contrib PR number') parser.add_argument('-b', '--branch', help='The contrib branch') parser.add_argument('-c', '--contrib_repo', help='The contrib repo') args = parser.parse_args() pr_number = args.pr_number repo = args.contrib_repo branch = args.branch packs_dir_names = get_pack_dir(branch, pr_number, repo) if not packs_dir_names: print_error('Did not find a pack in the PR') sys.exit(1) print(f'Copy changes from the contributor branch {repo}/{branch} ' f'in the following packs: ' + '\n'.join(packs_dir_names)) try: for pack_dir in packs_dir_names: if os.path.isdir(f'Packs/{pack_dir}'): # Remove existing pack shutil.rmtree(f'Packs/{pack_dir}') # if packs_dir_names = ['pack_a', 'pack_b', 'pack_c'], # string_dir_names will be 'Packs/pack_a Packs/pack_b Packs/pack_c' string_dir_names = f'Packs/{" Packs/".join(packs_dir_names)}' commands = [ f'git remote add {repo} [email protected]:{repo}/content.git', f'git fetch {repo} {branch}', f'git checkout {repo}/{branch} {string_dir_names}' ] for command in commands: print(f'Running command {command}') run_command(command, is_silenced=False) except Exception as e: print_error(f'Failed to deploy contributed pack to base branch: {e}') sys.exit(1) print_success( f'Successfully updated the base branch with the following contrib packs: ' f'{", ".join(packs_dir_names)}')
def slack_notifier(build_url, slack_token, test_type, build_number, env_results_file_name=None, packs_results_file=None, job_name="", slack_channel=CONTENT_CHANNEL, gitlab_server=None): branches = run_command("git branch") branch_name_reg = re.search(r'\* (.*)', branches) branch_name = branch_name_reg.group(1) if branch_name == 'master' or slack_channel.lower() != CONTENT_CHANNEL: logging.info("Extracting build status") if test_type == UNITTESTS_TYPE: logging.info("Starting Slack notifications about nightly build - unit tests") content_team_attachments = get_attachments_for_unit_test(build_url, build_number) elif test_type == SDK_UNITTESTS_TYPE: logging.info("Starting Slack notifications about SDK nightly build - unit tests") content_team_attachments = get_attachments_for_unit_test(build_url, build_number, is_sdk_build=True) elif test_type == 'test_playbooks': logging.info("Starting Slack notifications about nightly build - tests playbook") content_team_attachments, _ = get_attachments_for_test_playbooks(build_url, env_results_file_name) elif test_type == SDK_FAILED_STEPS_TYPE: logging.info('Starting Slack notifications about SDK nightly build - test playbook') content_team_attachments = get_attachments_for_all_steps(build_url, SDK_BUILD_TITLE, build_number) elif test_type == BucketUploadFlow.BUCKET_UPLOAD_TYPE: logging.info('Starting Slack notifications about upload to production bucket build') content_team_attachments = get_attachments_for_bucket_upload_flow(build_url, job_name, build_number, packs_results_file) elif test_type == SDK_RUN_AGAINST_FAILED_STEPS_TYPE: logging.info("Starting Slack notifications about SDK nightly build - run against an xsoar instance") content_team_attachments = get_attachments_for_all_steps(build_url, SDK_XSOAR_BUILD_TITLE, build_number) elif job_name and test_type == job_name: if job_name.startswith(DMST_SDK_NIGHTLY_GITLAB_JOBS_PREFIX): # We run the various circleci sdk nightly builds in a single pipeline in GitLab # as different jobs so it requires different handling logging.info(f"Starting Slack notifications for {job_name}") if 'unittest' in job_name: content_team_attachments = get_attachments_for_unit_test(build_url, build_number, is_sdk_build=True) # override the 'title' from the attachment to be the job name content_team_attachments[0]['title'] = content_team_attachments[0]['title'].replace( 'SDK Nightly Unit Tests', job_name ) else: content_team_attachments = get_attachments_for_all_steps(build_url, job_name, build_number) # override the 'fields' from the attachment since any failure will be the same as the job name content_team_attachments[0]['fields'] = [] else: raise NotImplementedError('The test_type parameter must be only \'test_playbooks\' or \'unittests\'') logging.info(f'Content team attachments:\n{content_team_attachments}') logging.info(f"Sending Slack messages to {slack_channel}") slack_client = SlackClient(slack_token) username = '******' if gitlab_server else 'Content CircleCI' slack_client.api_call( "chat.postMessage", json={'channel': slack_channel, 'username': username, 'as_user': '******', 'attachments': content_team_attachments} )
def is_bump_required(self): try: diff = run_command( f"git diff master:{self.metadata_path} {self.metadata_path}") if "currentVersion" in diff: return False except RuntimeError: print_warning( f"Unable to locate a pack with the name {self.pack} in the git diff. " f"Please verify the pack exists and the pack name is correct.") return True
def get_master_diff(self): """Gets difference between current branch and origin/master git diff with the --unified=100 option means that if there exists a difference between origin/master and current branch, the output will have at most 100 lines of context. Returns: str. empty string if no changes made or no origin/master branch, otherwise full difference context. """ return run_command(F'git diff --unified=100 ' F'origin/master {self.release_notes_path}')
def main(): parser = argparse.ArgumentParser( description='Deploy a pack from a contribution PR to a branch') parser.add_argument('-p', '--pr_number', help='Contrib PR number') parser.add_argument('-b', '--branch', help='The contrib branch') parser.add_argument('-c', '--contrib_repo', help='The contrib repo') args = parser.parse_args() pr_number = args.pr_number repo = args.contrib_repo branch = args.branch pack_dir_name = get_pack_dir(branch, pr_number, repo) if not pack_dir_name: print_error('Did not find a pack in the PR') sys.exit(1) pack_dir = f'Packs/{pack_dir_name}' try: if os.path.isdir(pack_dir): # Remove existing pack shutil.rmtree(pack_dir) commands = [ f'git remote add {repo} [email protected]:{repo}/content.git', f'git fetch {repo} {branch}', f'git checkout {repo}/{branch} {pack_dir}' ] for command in commands: print(f'Running command {command}') run_command(command, is_silenced=False) except Exception as e: print_error(f'Failed to deploy contributed pack to base branch: {e}') sys.exit(1) print_success( f'Successfully updated the base branch with the contrib pack {pack_dir_name}' )