def release_notes_section(self, store, title_prefix, current_server_version): res = "" beta_rn_paths = list() if store: new_str = "" new_count = 0 for path in store: with open(path, 'r') as file_obj: raw_content = file_obj.read() cnt = self.load_data(raw_content) from_version = cnt.get("fromversion") or cnt.get("fromVersion") to_version = cnt.get("toversion") or cnt.get("toVersion") if from_version is not None and server_version_compare(current_server_version, from_version) < 0: print(f'{path}: Skipped because from version: {from_version}' f' is greater than current server version: {current_server_version}') beta_rn_paths.append(path) print(f"{path} has added to beta release notes") continue if to_version is not None and server_version_compare(to_version, current_server_version) < 0: print(f'{path}: Skipped because of to version" {to_version}' f' is smaller: than current server version: {current_server_version}') continue if title_prefix == NEW_RN: ans = self.added_release_notes(path, cnt) elif title_prefix == MODIFIED_RN: ans = self.modified_release_notes(path, cnt) else: # should never get here print_error('Error:\n Unknown release notes type {}'.format(title_prefix)) return None if ans is None: print_error("Error:\n[{}] is missing releaseNotes entry, Please add it under {}".format( path, get_release_notes_file_path(path))) self.is_missing_release_notes = True elif ans: new_count += 1 new_str += ans if new_str: if self.show_secondary_header: count_str = "" if new_count > 1: count_str = " " + str(new_count) res = "\n#### %s %s %s\n" % (count_str, title_prefix, self.get_header()) res += new_str print("Collected {} beta notes".format(len(beta_rn_paths))) return res, beta_rn_paths
def json_remove_releaseNote_record(file_path, current_server_version): """ locate and remove release notes from a json file. :param file_path: path of the file :param current_server_version: current server GA version :return: True if file was changed, otherwise False. """ with open(file_path, 'r') as f: json_text = f.read() f.seek(0) json_data = json.load(f) v = json_data.get('fromversion') or json_data.get('fromVersion') if v and server_version_compare(current_server_version, str(v)) < 0: print('keeping release notes for ({})\nto be published on {} version release'.format( file_path, current_server_version )) return False rn = json_data.get('releaseNotes') if rn: # try to remove with preceding comma json_text = re.sub(r'\s*"releaseNotes"\s*:\s*"{}",'.format(re.escape(rn)), '', json_text) # try to remove with leading comma (last value in json) json_text = re.sub(r',\s*"releaseNotes"\s*:\s*"{}"'.format(re.escape(rn)), '', json_text) with open(file_path, 'w') as f: f.write(json_text) return True return False
def yml_remove_releaseNote_record(file_path, current_server_version): """ locate and remove release notes from a yaml file. :param file_path: path of the file :param current_server_version: current server GA version :return: True if file was changed, otherwise False. """ with open(file_path, 'r') as f: yml_text = f.read() f.seek(0) yml_data = yaml.safe_load(f) v = yml_data.get('fromversion') or yml_data.get('fromVersion') if v and server_version_compare(current_server_version, str(v)) < 0: print('keeping release notes for ({})\nto be published on {} version release'.format( file_path, current_server_version )) return False rn = yml_data.get('releaseNotes') if rn: yml_text = re.sub(r'\n?releaseNotes: [\'"]?{}[\'"]?'.format(re.escape(rn).replace(r'\ ', r'\s+')), '', yml_text) with open(file_path, 'w') as f: f.write(yml_text) return True return False
def release_notes_section(self, store, title_prefix, current_server_version): res = "" missing_rn = False if len(store) > 0: new_str = "" new_count = 0 for path in store: with open(path, 'r') as f: print( ' - adding release notes ({}) for file - [{}]... '. format(path, title_prefix), ), raw_content = f.read() cnt = self.load_data(raw_content) from_version = cnt.get("fromversion") if from_version is not None and server_version_compare( current_server_version, from_version) < 0: print("Skipped because of version differences") continue if title_prefix == NEW_RN: ans = self.added_release_notes(cnt) elif title_prefix == MODIFIED_RN: ans = self.modified_release_notes(cnt) else: # should never get here print_error("Error:\n Unknown release notes type" % (title_prefix)) return None if ans is None: print_error( "Error:\n[%s] is missing releaseNotes/description entry" % (path, )) missing_rn = True elif ans: new_count += 1 new_str += ans print("Success") else: print("Skipped") if len(new_str) > 0: if self.show_secondary_header: count_str = "" if new_count > 1: count_str = " " + str(new_count) res = "\n#### %s %s %s\n" % (count_str, title_prefix, self.get_header()) res += new_str if missing_rn: return None return res
def json_remove_releaseNote_record(file_path, current_server_version): ''' locate and remove release notes from a json file. :param file_path: path of the file :return: True if file was changed, otherwise False. ''' with open(file_path, 'r') as f: lines = f.readlines() version_key = ('fromversion', 'fromVersion') clear_release_notes = False consider_multiline_notes = False new_lines = [] for line in lines: if line.strip().startswith(version_key): v = line.strip()[len(version_key[0]) + 1:] # compare server versions if server_version_compare(current_server_version, v) < 0: print( "keeping release notes for ({})\nto be published on {} version release " .format(file_path, current_server_version)) clear_release_notes = False break if line.strip().startswith('"releaseNotes"'): # releaseNote title: ignore current line and consider following lines as part of it (multiline notes) clear_release_notes = True consider_multiline_notes = True elif consider_multiline_notes: # not a releaseNote title (right after a releaseNote block (single or multi line) if line.strip(): if line.strip()[0] == '"': # regular line consider_multiline_notes = False new_lines.append(line) elif line.strip() == '}': # releaseNote was at end of dict # needs to remove ',' from last line idx = new_lines[-1].rfind(',') new_lines[-1] = new_lines[-1][:idx] + new_lines[-1][idx + 1:] consider_multiline_notes = False new_lines.append(line) pass else: # line is part of a multiline releaseNote: ignore it pass else: # regular line new_lines.append(line) if clear_release_notes: with open(file_path, 'w') as f: f.write(''.join(new_lines)) return clear_release_notes
def json_remove_releaseNote_record(file_path, current_server_version): ''' locate and remove release notes from a json file. :param file_path: path of the file :return: True if file was changed, otherwise False. ''' with open(file_path, 'r') as f: lines = f.readlines() version_key = ('fromversion', 'fromVersion') clear_release_notes = False consider_multiline_notes = False new_lines = [] for line in lines: if line.strip().startswith(version_key): v = line.strip()[len(version_key[0]) + 1:] # compare server versions if server_version_compare(current_server_version, v) < 0: print("keeping release notes for ({})\nto be published on {} version release ".format( file_path, current_server_version )) clear_release_notes = False break if line.strip().startswith('"releaseNotes"'): # releaseNote title: ignore current line and consider following lines as part of it (multiline notes) clear_release_notes = True consider_multiline_notes = True elif consider_multiline_notes: # not a releaseNote title (right after a releaseNote block (single or multi line) if line.strip(): if line.strip()[0] == '"': # regular line consider_multiline_notes = False new_lines.append(line) elif line.strip() == '}': # releaseNote was at end of dict # needs to remove ',' from last line idx = new_lines[-1].rfind(',') new_lines[-1] = new_lines[-1][:idx] + new_lines[-1][idx + 1:] consider_multiline_notes = False new_lines.append(line) pass else: # line is part of a multiline releaseNote: ignore it pass else: # regular line new_lines.append(line) if clear_release_notes: with open(file_path, 'w') as f: f.write(''.join(new_lines)) return clear_release_notes
def release_notes_section(self, store, title_prefix, current_server_version): res = "" if store: new_str = "" new_count = 0 for path in store: with open(path, 'r') as file_obj: print(' - adding release notes ({}) for file - [{}]... '. format(path, title_prefix), end='') raw_content = file_obj.read() cnt = self.load_data(raw_content) from_version = cnt.get("fromversion") if from_version is not None and server_version_compare( current_server_version, from_version) < 0: print("Skipped because of version differences") continue if title_prefix == NEW_RN: ans = self.added_release_notes(path, cnt) elif title_prefix == MODIFIED_RN: ans = self.modified_release_notes(path, cnt) else: # should never get here print_error( 'Error:\n Unknown release notes type {}'.format( title_prefix)) return None if ans is None: print_error( "Error:\n[{}] is missing releaseNotes entry, Please add it under {}" .format(path, get_release_notes_file_path(path))) self.is_missing_release_notes = True elif ans: new_count += 1 new_str += ans print("Success") else: print("Skipped") if new_str: if self.show_secondary_header: count_str = "" if new_count > 1: count_str = " " + str(new_count) res = "\n#### %s %s %s\n" % (count_str, title_prefix, self.get_header()) res += new_str return res
def is_docker_image_changed(self): """Check if the Docker image was changed or not.""" # Unnecessary to check docker image only on 5.0 and up if server_version_compare(self.old_integration.get('fromversion', '0'), '5.0.0') < 0: if self.old_integration.get('script', {}).get('dockerimage', "") != \ self.current_integration.get('script', {}).get('dockerimage', ""): print_error("Possible backwards compatibility break, You've changed the docker for the file {}" " this is not allowed.".format(self.file_path)) self._is_valid = False return True return False
def is_docker_image_changed(self): """Check if the docker image as been changed.""" # Unnecessary to check docker image only on 5.0 and up if server_version_compare(self.old_script.get('fromversion', '0'), '5.0.0') < 0: old_docker = get_dockerimage45(self.old_script) new_docker = get_dockerimage45(self.current_script) if old_docker != new_docker: print_error( "Possible backwards compatibility break, You've changed the docker for the file {}" " this is not allowed. Old: {}. New: {}".format( self.file_path, old_docker, new_docker)) return True return False
def yml_remove_releaseNote_record(file_path, current_server_version): ''' locate and remove release notes from a yaml file. :param file_path: path of the file :return: True if file was changed, otherwise False. ''' with open(file_path, 'r') as f: lines = f.readlines() version_key = ('fromversion', 'fromVersion') clear_release_notes = False consider_multiline_notes = False new_lines = [] for line in lines: if line.startswith(version_key): v = line[len(version_key[0]) + 1:].strip() # compare server versions if server_version_compare(current_server_version, v) < 0: print('keeping release notes for ({})\nto be published on {} version release'.format( file_path, current_server_version )) clear_release_notes = False break if line.startswith('releaseNotes:'): # releaseNote title: ignore current line and consider following lines as part of it (multiline notes) clear_release_notes = True consider_multiline_notes = True elif consider_multiline_notes: # not a releaseNote title (right after a releaseNote block (single or multi line) if not line[0].isspace(): # regular line consider_multiline_notes = False new_lines.append(line) else: # line is part of a multiline releaseNote: ignore it pass else: # regular line new_lines.append(line) if clear_release_notes: with open(file_path, 'w') as f: f.write(''.join(new_lines)) return clear_release_notes
def yml_remove_releaseNote_record(file_path, current_server_version): ''' locate and remove release notes from a yaml file. :param file_path: path of the file :return: True if file was changed, otherwise False. ''' with open(file_path, 'r') as f: lines = f.readlines() version_key = ('fromversion', 'fromVersion') clear_release_notes = False consider_multiline_notes = False new_lines = [] for line in lines: if line.startswith(version_key): v = line[len(version_key[0]) + 1:].strip() # compare server versions if server_version_compare(current_server_version, v) < 0: print( 'keeping release notes for ({})\nto be published on {} version release' .format(file_path, current_server_version)) clear_release_notes = False break if line.startswith('releaseNotes:'): # releaseNote title: ignore current line and consider following lines as part of it (multiline notes) clear_release_notes = True consider_multiline_notes = True elif consider_multiline_notes: # not a releaseNote title (right after a releaseNote block (single or multi line) if not line[0].isspace(): # regular line consider_multiline_notes = False new_lines.append(line) else: # line is part of a multiline releaseNote: ignore it pass else: # regular line new_lines.append(line) if clear_release_notes: with open(file_path, 'w') as f: f.write(''.join(new_lines)) return clear_release_notes
def should_clear(file_path, current_server_version="0.0.0"): """ scan folder and remove all references to release notes :param file_path: path of the yml/json file :param current_server_version: current server version """ data = get_file_data(file_path) version = data.get('fromversion') or data.get('fromVersion') if version and server_version_compare(current_server_version, str(version)) < 0: print_warning( 'keeping release notes for ({})\nto be published on {} version release' .format(file_path, version)) return False return True
def is_docker_image_changed(self): """Check if the Docker image was changed or not.""" # Unnecessary to check docker image only on 5.0 and up if server_version_compare(self.old_integration.get('fromversion', '0'), '5.0.0') < 0: old_docker = get_dockerimage45(self.old_integration.get('script', {})) new_docker = get_dockerimage45(self.current_integration.get('script', {})) if old_docker != new_docker and new_docker: print_error("Possible backwards compatibility break, You've changed the docker for the file {}" " this is not allowed. Old: {}. New: {}".format(self.file_path, old_docker, new_docker)) self._is_valid = False return True elif old_docker != new_docker and not new_docker: print_warning("Possible backwards compatibility break. You've removed " "the docker image for the file {0}, make sure this isn't a mistake. " "Old image: {1}".format(self.file_path, old_docker)) return False return False
def should_clear(file_path, current_server_version="0.0.0"): """ scan folder and remove all references to release notes :param file_path: path of the yml/json file :param current_server_version: current server version """ extension = os.path.splitext(file_path)[1] if extension not in FILE_TYPE_DICT: return False load_function = FILE_TYPE_DICT[extension] with open(file_path, 'r') as f: data = load_function(f) v = data.get('fromversion') or data.get('fromVersion') if v and server_version_compare(current_server_version, str(v)) < 0: print_warning('keeping release notes for ({})\nto be published on {} version release'.format(file_path, str(v))) return False return True
def run_test_scenario(t, c, proxy, default_test_timeout, skipped_tests_conf, nightly_integrations, skipped_integrations_conf, skipped_integration, is_nightly, run_all_tests, is_filter_configured, filtered_tests, skipped_tests, demisto_api_key, secret_params, failed_playbooks, unmockable_integrations, succeed_playbooks, slack, circle_ci, build_number, server, build_name, server_numeric_version, is_ami=True): playbook_id = t['playbookID'] nightly_test = t.get('nightly', False) integrations_conf = t.get('integrations', []) instance_names_conf = t.get('instance_names', []) test_message = 'playbook: ' + playbook_id test_options = {'timeout': t.get('timeout', default_test_timeout)} if not isinstance(integrations_conf, list): integrations_conf = [ integrations_conf, ] if not isinstance(instance_names_conf, list): instance_names_conf = [ instance_names_conf, ] has_skipped_integration, integrations, is_nightly_integration = collect_integrations( integrations_conf, skipped_integration, skipped_integrations_conf, nightly_integrations) skip_nightly_test = True if ( nightly_test or is_nightly_integration) and not is_nightly else False # Skip nightly test if skip_nightly_test: print('\n------ Test {} start ------'.format(test_message)) print('Skip test') print('------ Test {} end ------\n'.format(test_message)) return if not run_all_tests: # Skip filtered test if is_filter_configured and playbook_id not in filtered_tests: return # Skip bad test if playbook_id in skipped_tests_conf: skipped_tests.add("{0} - reason: {1}".format( playbook_id, skipped_tests_conf[playbook_id])) return # Skip integration if has_skipped_integration: return # Skip version mismatch test test_from_version = t.get('fromversion', '0.0.0') test_to_version = t.get('toversion', '99.99.99') if (server_version_compare(test_from_version, server_numeric_version) > 0 or server_version_compare(test_to_version, server_numeric_version) < 0): print('\n------ Test {} start ------'.format(test_message)) print_warning( 'Test {} ignored due to version mismatch (test versions: {}-{})'. format(test_message, test_from_version, test_to_version)) print('------ Test {} end ------\n'.format(test_message)) return are_params_set = set_integration_params(demisto_api_key, integrations, secret_params, instance_names_conf, playbook_id) if not are_params_set: failed_playbooks.append(playbook_id) return test_message = update_test_msg(integrations, test_message) run_test(c, proxy, failed_playbooks, integrations, unmockable_integrations, playbook_id, succeed_playbooks, test_message, test_options, slack, circle_ci, build_number, server, build_name, is_ami)
def write_yaml_with_docker(output_path, yml_text, yml_data, script_obj): """Write out the yaml file taking into account the dockerimage45 tag. If it is present will create 2 integration files One for 4.5 and below and one for 5.0. Arguments: output_path {str} -- output path yml_text {str} -- yml text yml_data {dict} -- yml object script_obj {dict} -- script object Returns: dict -- dictionary mapping output path to text data """ output_map = {output_path: yml_text} if 'dockerimage45' in script_obj: # we need to split into two files 45 and 50. Current one will be from version 5.0 yml_text = re.sub(r'^\s*dockerimage45:.*\n?', '', yml_text, flags=re.MULTILINE) # remove the dockerimage45 line yml_text45 = yml_text if 'fromversion' in yml_data: # validate that this is a script/integration which targets both 4.5 and 5.0+. if server_version_compare(yml_data['fromversion'], '5.0.0') >= 0: raise ValueError( 'Failed: {}. dockerimage45 set for 5.0 and later only'. format(output_path)) yml_text = re.sub(r'^fromversion:.*$', 'fromversion: 5.0.0', yml_text, flags=re.MULTILINE) else: yml_text = 'fromversion: 5.0.0\n' + yml_text if 'toversion' in yml_data: # validate that this is a script/integration which targets both 4.5 and 5.0+. if server_version_compare(yml_data['toversion'], '5.0.0') < 0: raise ValueError( 'Failed: {}. dockerimage45 set for 4.5 and earlier only'. format(output_path)) yml_text45 = re.sub(r'^toversion:.*$', 'toversion: 4.5.9', yml_text45, flags=re.MULTILINE) else: yml_text45 = 'toversion: 4.5.9\n' + yml_text45 if script_obj.get( 'dockerimage45' ): # we have a value for dockerimage45 set it as dockerimage yml_text45 = re.sub(r'(^\s*dockerimage:).*$', r'\1 ' + script_obj.get('dockerimage45'), yml_text45, flags=re.MULTILINE) else: # no value for dockerimage45 remove the dockerimage entry yml_text45 = re.sub(r'^\s*dockerimage:.*\n?', '', yml_text45, flags=re.MULTILINE) output_path45 = re.sub(r'\.yml$', '_45.yml', output_path) output_map = {output_path: yml_text, output_path45: yml_text45} for file_path, file_text in output_map.items(): if IS_CI and os.path.isfile(file_path): raise ValueError( 'Output file already exists: {}.' ' Make sure to remove this file from source control' ' or rename this package (for example if it is a v2).'.format( output_path)) with io.open(file_path, mode='w', encoding='utf-8') as file_: file_.write(file_text) return output_map
def run_test_scenario(tests_settings, t, proxy, default_test_timeout, skipped_tests_conf, nightly_integrations, skipped_integrations_conf, skipped_integration, is_nightly, run_all_tests, is_filter_configured, filtered_tests, skipped_tests, secret_params, failed_playbooks, unmockable_integrations, succeed_playbooks, slack, circle_ci, build_number, server, build_name, server_numeric_version, demisto_api_key, prints_manager, thread_index=0, is_ami=True): playbook_id = t['playbookID'] nightly_test = t.get('nightly', False) integrations_conf = t.get('integrations', []) instance_names_conf = t.get('instance_names', []) test_message = 'playbook: ' + playbook_id test_options = { 'timeout': t.get('timeout', default_test_timeout), 'memory_threshold': t.get('memory_threshold', Docker.DEFAULT_CONTAINER_MEMORY_USAGE), 'pid_threshold': t.get('pid_threshold', Docker.DEFAULT_CONTAINER_PIDS_USAGE) } if not isinstance(integrations_conf, list): integrations_conf = [integrations_conf, ] if not isinstance(instance_names_conf, list): instance_names_conf = [instance_names_conf, ] has_skipped_integration, integrations, is_nightly_integration = collect_integrations( integrations_conf, skipped_integration, skipped_integrations_conf, nightly_integrations) skip_nightly_test = True if (nightly_test or is_nightly_integration) and not is_nightly else False # Skip nightly test if skip_nightly_test: prints_manager.add_print_job('\n------ Test {} start ------'.format(test_message), print, thread_index) prints_manager.add_print_job('Skip test', print, thread_index) prints_manager.add_print_job('------ Test {} end ------\n'.format(test_message), print, thread_index) return if not run_all_tests: # Skip filtered test if is_filter_configured and playbook_id not in filtered_tests: return # Skip bad test if playbook_id in skipped_tests_conf: skipped_tests.add("{0} - reason: {1}".format(playbook_id, skipped_tests_conf[playbook_id])) return # Skip integration if has_skipped_integration: return # Skip version mismatch test test_from_version = t.get('fromversion', '0.0.0') test_to_version = t.get('toversion', '99.99.99') if (server_version_compare(test_from_version, server_numeric_version) > 0 or server_version_compare(test_to_version, server_numeric_version) < 0): prints_manager.add_print_job('\n------ Test {} start ------'.format(test_message), print, thread_index) warning_message = 'Test {} ignored due to version mismatch (test versions: {}-{})'.format(test_message, test_from_version, test_to_version) prints_manager.add_print_job(warning_message, print_warning, thread_index) prints_manager.add_print_job('------ Test {} end ------\n'.format(test_message), print, thread_index) return are_params_set = set_integration_params(demisto_api_key, integrations, secret_params, instance_names_conf, playbook_id, prints_manager, thread_index=thread_index) if not are_params_set: failed_playbooks.append(playbook_id) return test_message = update_test_msg(integrations, test_message) options = options_handler() stdout, stderr = get_docker_memory_data() text = 'Memory Usage: {}'.format(stdout) if not stderr else stderr if options.nightly and options.memCheck and not tests_settings.is_local_run: send_slack_message(slack, SLACK_MEM_CHANNEL_ID, text, 'Content CircleCI', 'False') stdout, stderr = get_docker_processes_data() text = stdout if not stderr else stderr send_slack_message(slack, SLACK_MEM_CHANNEL_ID, text, 'Content CircleCI', 'False') run_test(tests_settings, demisto_api_key, proxy, failed_playbooks, integrations, unmockable_integrations, playbook_id, succeed_playbooks, test_message, test_options, slack, circle_ci, build_number, server, build_name, prints_manager, is_ami, thread_index=thread_index)