def get_pack_names(target_packs: str) -> set: """ Retrieves the paths of all relevant packs (that aren't ignored) Args: target_packs (str): csv packs names or `All` for all available packs in content. Returns: The list of paths of the packs """ if target_packs.lower() == "all": if os.path.exists(PACKS_FULL_PATH): all_packs = {p for p in os.listdir(PACKS_FULL_PATH) if p not in IGNORED_FILES} logging.info(f"Number of selected packs to upload is: {len(all_packs)}") # return all available packs names return all_packs else: logging.error(f"Folder {PACKS_FOLDER} was not found at the following path: {PACKS_FULL_PATH}") sys.exit(1) elif target_packs and isinstance(target_packs, str): modified_packs = {p.strip() for p in target_packs.split(',') if p not in IGNORED_FILES} logging.info(f"Number of selected packs to upload is: {len(modified_packs)}") # return only packs from csv list return modified_packs else: logging.error("Not correct usage of flag -p. Please check help section of upload packs script.") sys.exit(1)
def get_latest_version_from_bucket(pack_id: str, production_bucket: Bucket) -> str: """ Retrieves the latest version of pack in the bucket Args: pack_id (str): The pack id to retrieve the latest version production_bucket (Bucket): The GCS production bucket Returns: The latest version of the pack as it is in the production bucket """ pack_bucket_path = os.path.join(GCPConfig.PRODUCTION_STORAGE_BASE_PATH, pack_id) logging.debug(f'Trying to get latest version for pack {pack_id} from bucket path {pack_bucket_path}') # Adding the '/' in the end of the prefix to search for the exact pack id pack_versions_paths = [f.name for f in production_bucket.list_blobs(prefix=f'{pack_bucket_path}/') if f.name.endswith('.zip')] pack_versions = [] for path in pack_versions_paths: versions = PACK_PATH_VERSION_REGEX.findall(path) if not versions: continue pack_versions.append(Version(versions[0])) logging.debug(f'Found the following zips for {pack_id} pack: {pack_versions}') if pack_versions: pack_latest_version = str(max(pack_versions)) return pack_latest_version else: logging.error(f'Could not find any versions for pack {pack_id} in bucket path {pack_bucket_path}') return ''
def get_dispatch_workflows_ids(github_token: str, branch: str) -> List[int]: """ Gets private repo dispatch workflows on the given branch. Args: github_token: Github bearer token. branch: The branch to get the workflows from. Returns: A list of workflows ids. """ res = requests.get(GET_DISPATCH_WORKFLOWS_URL, headers={'Authorization': f'Bearer {github_token}'}, params={ 'branch': branch, 'event': 'repository_dispatch' }, verify=False) if res.status_code != 200: logging.error( f'Failed to get private repo workflows, request to ' f'{GET_DISPATCH_WORKFLOWS_URL} failed with error: {str(res.content)}' ) sys.exit(1) try: workflows = json.loads(res.content) except ValueError: logging.error('Enable to parse private repo workflows response') sys.exit(1) workflows = workflows.get('workflow_runs', []) return [workflow.get('id') for workflow in workflows]
def copy_id_set(production_bucket: Bucket, build_bucket: Bucket, storage_base_path: str, build_bucket_base_path: str): """ Copies the id_set.json artifact from the build bucket to the production bucket. Args: production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where id_set is copied to. build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where id_set is copied from. storage_base_path (str): the path to upload the id_set.json to. build_bucket_base_path (str): the path in the build bucket of the id_set.json. """ build_id_set_path = os.path.join(os.path.dirname(build_bucket_base_path), 'id_set.json') build_id_set_blob = build_bucket.blob(build_id_set_path) if not build_id_set_blob.exists(): logging.error(f"id_set.json file does not exists in build bucket in path: {build_id_set_path}") sys.exit(1) prod_id_set_path = os.path.join(os.path.dirname(storage_base_path), 'id_set.json') try: copied_blob = build_bucket.copy_blob( blob=build_id_set_blob, destination_bucket=production_bucket, new_name=prod_id_set_path ) if not copied_blob.exists(): logging.error(f"Failed to upload id_set.json to {prod_id_set_path}") sys.exit(1) else: logging.success("Finished uploading id_set.json to storage.") except Exception as e: logging.exception(f"Failed copying ID Set. Additional Info: {str(e)}") sys.exit(1)
def copy_index(index_folder_path: str, build_index_blob: Blob, build_index_generation: str, production_bucket: Bucket, build_bucket: Bucket, storage_base_path: str, build_bucket_base_path: str): """ Copies the build bucket index to the production bucket index path. Args: index_folder_path (str): index folder full path. build_index_blob (Blob): google cloud storage object that represents build index.zip blob. build_index_generation (str): downloaded build index generation. production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where index is copied to. build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where index is copied from. storage_base_path (str): the path to upload the index to. build_bucket_base_path (str): the path in the build bucket of the index. """ try: build_index_blob.reload() build_current_index_generation = build_index_blob.generation # disabling caching for prod index blob prod_index_storage_path = os.path.join(storage_base_path, f"{GCPConfig.INDEX_NAME}.zip") prod_index_blob = production_bucket.blob(prod_index_storage_path) prod_index_blob.cache_control = "no-cache,max-age=0" prod_index_json_storage_path = os.path.join(storage_base_path, f"{GCPConfig.INDEX_NAME}.json") prod_index_json_blob = production_bucket.blob(prod_index_json_storage_path) prod_index_json_blob.cache_control = "no-cache,max-age=0" if build_current_index_generation == build_index_generation: copied_index = build_bucket.copy_blob( blob=build_index_blob, destination_bucket=production_bucket, new_name=prod_index_storage_path ) if copied_index.exists(): logging.success(f"Finished uploading {GCPConfig.INDEX_NAME}.zip to storage.") else: logging.error("Failed copying index.zip from build index - blob does not exist.") sys.exit(1) copied_index_json_blob = build_bucket.blob( os.path.join(build_bucket_base_path, f"{GCPConfig.INDEX_NAME}.json") ) copied_index_json = build_bucket.copy_blob( blob=copied_index_json_blob, destination_bucket=production_bucket, new_name=prod_index_json_storage_path ) if copied_index_json.exists(): logging.success(f"Finished uploading {GCPConfig.INDEX_NAME}.json to storage.") else: logging.error("Failed copying index.json from build index - blob does not exist.") sys.exit(1) else: logging.error(f"Failed in uploading {GCPConfig.INDEX_NAME}, mismatch in index file generation") logging.error(f"Downloaded build index generation: {build_index_generation}") logging.error(f"Current build index generation: {build_current_index_generation}") sys.exit(1) except Exception as e: logging.exception(f"Failed copying {GCPConfig.INDEX_NAME}. Additional Info: {str(e)}") sys.exit(1) finally: shutil.rmtree(index_folder_path)
def run_test_logic(tests_settings: Any, c: Any, failed_playbooks: list, integrations: list, playbook_id: str, succeed_playbooks: list, test_message: str, test_options: dict, slack: Any, circle_ci: str, build_number: str, server_url: str, demisto_user: str, demisto_pass: str, build_name: str) -> bool: """ run_test_logic handles the testing of the integration by triggering check_integration. afterwards it will check the status of the test and report success or add the failed test to the list of failed integrations. :param tests_settings: SettingsTester object which contains the test variables :param c: Client for connecting to XSOAR via demisto-py :param failed_playbooks: List of failed playbooks, additional failed playbooks will be added if they failed. :param integrations: List of integrations being tested. :param playbook_id: ID of the test playbook being tested. :param succeed_playbooks: List of playbooks which have passed tests. :param test_message: Name of the playbook/integration being tested. This is reported back in the build and used to print in the console the test being ran. :param test_options: Options being passed to the test. PID, Docker Threshold, Timeout, etc. :param slack: Slack client used for notifications. :param circle_ci: CircleCI token. Used to get name of dev who triggered the build. :param build_number: The build number of the CI run. Used in slack message. :param server_url: The FQDN of the server tests are being ran on. :param demisto_user: Username of the demisto user running the tests. :param demisto_pass: Password of the demisto user running the tests. :param build_name: Name of the build. (Nightly, etc.) :return: Boolean indicating if the test was successful. """ status, inc_id = check_integration(c, server_url, demisto_user, demisto_pass, integrations, playbook_id, options=test_options) if status == PB_Status.COMPLETED: logging.success(f'PASS: {test_message} succeed') succeed_playbooks.append(playbook_id) elif status == PB_Status.NOT_SUPPORTED_VERSION: logging.info(f'PASS: {test_message} skipped - not supported version') succeed_playbooks.append(playbook_id) else: logging.error(f'Failed: {test_message} failed') playbook_id_with_mock = playbook_id playbook_id_with_mock += " (Mock Disabled)" failed_playbooks.append(playbook_id_with_mock) succeed = status in (PB_Status.COMPLETED, PB_Status.NOT_SUPPORTED_VERSION) return succeed
def get_pack_dependencies(client: demisto_client, pack_data: dict, lock: Lock): """ Get the pack's required dependencies. Args: client (demisto_client): The configured client to use. pack_data (dict): Contains the pack ID and version. lock (Lock): A lock object. Returns: (list) The pack's dependencies. """ pack_id = pack_data['id'] logging.debug(f'Getting dependencies for pack {pack_id}') try: response_data, status_code, _ = demisto_client.generic_request_func( client, path='/contentpacks/marketplace/search/dependencies', method='POST', body=[pack_data], accept='application/json', _request_timeout=None) if 200 <= status_code < 300: dependencies_data: list = [] dependants_ids = [pack_id] reseponse_data = ast.literal_eval(response_data).get( 'dependencies', []) create_dependencies_data_structure(reseponse_data, dependants_ids, dependencies_data, dependants_ids) dependencies_str = ', '.join( [dep['id'] for dep in dependencies_data]) if dependencies_data: logging.debug( f'Found the following dependencies for pack {pack_id}: {dependencies_str}' ) return dependencies_data if status_code == 400: logging.error(f'Unable to find dependencies for {pack_id}.') return [] else: result_object = ast.literal_eval(response_data) msg = result_object.get('message', '') raise Exception( f'Failed to get pack {pack_id} dependencies - with status code {status_code}\n{msg}\n' ) except Exception: logging.exception( f'The request to get pack {pack_id} dependencies has failed.') lock.acquire() global SUCCESS_FLAG SUCCESS_FLAG = False lock.release()
def xsoar_configure_and_install_all_packs(options, branch_name: str, build_number: str): """ Args: options: script arguments. branch_name(str): name of the current branch. build_number(str): number of the current build flow """ # Get the host by the ami env server_to_port_mapping, server_version = XSOARBuild.get_servers( ami_env=options.ami_env) logging.info('Retrieving the credentials for Cortex XSOAR server') secret_conf_file = get_json(file_path=options.secret) username: str = secret_conf_file.get('username') password: str = secret_conf_file.get('userPassword') # Configure the Servers for server_url, port in server_to_port_mapping.items(): server = XSOARServer(internal_ip=server_url, port=port, user_name=username, password=password) logging.info(f'Adding Marketplace configuration to {server_url}') error_msg: str = 'Failed to set marketplace configuration.' server.add_server_configuration(config_dict=MARKET_PLACE_CONFIGURATION, error_msg=error_msg) XSOARBuild.set_marketplace_url(servers=[server], branch_name=branch_name, ci_build_number=build_number) # Acquire the server's host and install all content packs (one threaded execution) logging.info(f'Starting to install all content packs in {server_url}') server_host: str = server.client.api_client.configuration.host success_flag = install_all_content_packs_from_build_bucket( client=server.client, host=server_host, server_version=server_version, bucket_packs_root_path=GCPConfig. BUILD_BUCKET_PACKS_ROOT_PATH.format(branch=branch_name, build=build_number, marketplace='xsoar'), service_account=options.service_account, extract_destination_path=options.extract_path) if success_flag: logging.success( f'Finished installing all content packs in {server_url}') else: logging.error('Failed to install all packs.') sys.exit(1)
def log_message_if_statement(statement: bool, error_message: str, success_message: str = None) -> bool: """Log error message if statement is false, Log success otherwise Args: statement: The boolean statement to check. error_message: The error message to log if statement is false success_message: The success message to log if statement is true Returns: The statements boolean value. """ if not statement: logging.error(error_message) elif success_message: logging.success(success_message) return statement
def download_and_extract_index(build_bucket: Bucket, extract_destination_path: str, build_bucket_base_path: str): """Downloads and extracts production and build indexes zip from cloud storage. Args: build_bucket (google.cloud.storage.bucket.Bucket): google storage bucket where build index.zip is stored. extract_destination_path (str): the full path of extract folder. build_bucket_base_path (str): the path in the build bucket of the index. Returns: str: extracted build index folder full path. Blob: google cloud storage object that represents prod index.zip blob. Blob: google cloud storage object that represents build index.zip blob. str: downloaded prod index generation. str: downloaded build index generation. """ build_index_storage_path = os.path.join(build_bucket_base_path, f"{GCPConfig.INDEX_NAME}.zip") download_build_index_path = os.path.join(extract_destination_path, f"{GCPConfig.INDEX_NAME}.zip") build_index_blob = build_bucket.blob(build_index_storage_path) build_index_folder_path = os.path.join(extract_destination_path, GCPConfig.INDEX_NAME) if not os.path.exists(extract_destination_path): os.mkdir(extract_destination_path) if not build_index_blob.exists(): logging.error( f"No build index was found in path: {build_index_storage_path}") sys.exit(1) build_index_blob.reload() build_index_generation = build_index_blob.generation build_index_blob.download_to_filename( download_build_index_path, if_generation_match=build_index_generation) if os.path.exists(download_build_index_path): with ZipFile(download_build_index_path, 'r') as index_zip: index_zip.extractall(extract_destination_path) if not os.path.exists(build_index_folder_path): logging.error( f"Failed creating build {GCPConfig.INDEX_NAME} folder with extracted data." ) sys.exit(1) os.remove(download_build_index_path) logging.success( f"Finished downloading and extracting build {GCPConfig.INDEX_NAME} file to " f"{extract_destination_path}") return build_index_folder_path, build_index_blob, build_index_generation else: logging.error( f"Failed to download build {GCPConfig.INDEX_NAME}.zip file from cloud storage." ) sys.exit(1)
def main(): try: install_logging('Install_Packs.log', logger=logging) options = options_handler() branch_name: str = options.branch build_number: str = options.build_number if options.ami_env in ["XSIAM Master"]: xsiam_configure_and_install_flow(options, branch_name, build_number) elif options.override_all_packs: xsoar_configure_and_install_all_packs(options, branch_name, build_number) else: # Hot Fix: Upload flow failed when trying to install packs that has fromversion above 6.2 # CIAC-2626 issue in jira xsoar_configure_and_install_all_packs(options, branch_name, build_number) except Exception as e: logging.error(f'Failed to configure and install packs: {e}') logging.error(traceback.format_exc()) sys.exit(1)
def test_instances(secret_conf_path, server, username, password): integrations = get_integrations(secret_conf_path) instance_ids = [] failed_integrations = [] integrations_counter = 0 content_installation_client = demisto_client.configure(base_url=server, username=username, password=password, verify_ssl=False) install_new_content(content_installation_client, server) for integration in integrations: c = demisto_client.configure(base_url=server, username=username, password=password, verify_ssl=False) integrations_counter += 1 integration_name = integration.get('name') integration_instance_name = integration.get('instance_name', '') integration_params = integration.get('params') devops_comments = integration.get('devops_comments') product_description = integration.get('product_description', '') is_byoi = integration.get('byoi', True) has_integration = integration.get('has_integration', True) validate_test = integration.get('validate_test', True) if has_integration: try: instance_id, failure_message = __create_integration_instance( server, username, password, integration_name, integration_instance_name, integration_params, is_byoi, validate_test=validate_test) except Exception: logging.exception( f'Failed to configure integration with name {integration_name}' ) failed_integrations.append( f"{integration_name} {product_description} - devops comments: {devops_comments}" ) continue if failure_message == 'No configuration': logging.warning( f"skipping {integration_name} as it exists in content-test-conf conf.json but not in content repo" ) continue if not instance_id: logging.error( f'Failed to create instance of {integration_name} with message: {failure_message}' ) failed_integrations.append( "{} {} - devops comments: {}".format( integration_name, product_description, devops_comments)) else: instance_ids.append(instance_id) logging.success( f'Create integration {integration_name} succeed') __delete_integrations_instances(c, instance_ids) return failed_integrations, integrations_counter
'as_user': '******', 'text': "Detailed list of failing instances could be found in the following link:\n" "https://{}-60525392-gh.circle-artifacts.com/0/artifacts/failed_instances.txt" .format(build_number) }) if __name__ == "__main__": install_logging('Instance-Test.log', logger=logging) options = options_handler() if options.instance_tests: env_results_path = os.path.join( os.getenv('ARTIFACTS_FOLDER', './artifacts'), 'env_results.json') with open(env_results_path, 'r') as json_file: env_results = json.load(json_file) server = f'https://localhost:{env_results[0]["TunnelPort"]}' slack_notifier(options.slack, options.secret, server, options.user, options.password, options.buildUrl, options.buildNumber) # create this file for destroy_instances script with open( "./Tests/is_build_passed_{}.txt".format( env_results[0]["Role"].replace(' ', '')), 'a'): pass else: logging.error( "Not instance tests build, stopping Slack Notifications about instances" )