def _content_files_list_generator_factory(self, dir_name: str, suffix: str) -> Iterator[Any]: """Generic content objects iterable generator Args: dir_name: Directory name, for example: Integrations, Documentations etc. suffix: file suffix to search for, if not supplied then any suffix. Returns: object: Any valid content object found in the given directory. """ objects_path = (self._path / dir_name).glob(patterns=[f"*.{suffix}", f"*/*.{suffix}"]) for object_path in objects_path: content_object = path_to_pack_object(object_path) # skip content items that are not displayed in the id set, if the corresponding flag is used, # We excluding ReleaseNotes and TestPlaybooks, because they missing from the id set # but are needed in the pack's zip. if self._filter_items_by_id_set and content_object.type().value not in [FileType.RELEASE_NOTES.value, FileType.RELEASE_NOTES_CONFIG.value, FileType.TEST_PLAYBOOK.value]: object_id = content_object.get_id() if is_object_in_id_set(object_id, self._pack_info_from_id_set): yield content_object else: logging.warning(f'Skipping object {object_path} with id "{object_id}" since it\'s missing from ' f'the given id set') else: yield content_object
def _content_dirs_list_generator_factory(self, dir_name) -> Iterator[Any]: """Generic content objects iterable generator Args: dir_name: Directory name, for example: Tools. Returns: object: Any valid content object found in the given directory. """ objects_path = (self._path / dir_name).glob(patterns=["*/"]) for object_path in objects_path: yield path_to_pack_object(object_path)
def _content_files_list_generator_factory(self, dir_name: str, suffix: str) -> Iterator[Any]: """Generic content objcets iterable generator Args: dir_name: Directory name, for example: Integrations, Documentations etc. suffix: file suffix to search for, if not supplied then any suffix. Returns: object: Any valid content object found in the given directory. """ objects_path = (self._path / dir_name).glob( patterns=[f"*.{suffix}", f"*/*.{suffix}"]) for object_path in objects_path: yield path_to_pack_object(object_path)
def test_objects_factory(): obj = path_to_pack_object(DOC_FILE) assert isinstance(obj, DocFile)
def test_objects_factory(self): obj = path_to_pack_object(CLASSIFIER) assert isinstance(obj, Classifier)
def test_objects_factory(): obj = path_to_pack_object(WIDGET) assert isinstance(obj, Widget)
def test_objects_factory(): obj = path_to_pack_object(RELEASE_NOTE) assert isinstance(obj, ReleaseNote)
def test_objects_factory(self): obj = path_to_pack_object(INDICATOR_TYPE) assert isinstance(obj, IndicatorType)
def test_objects_factory(): obj = path_to_pack_object(PACK_IGNORE) assert isinstance(obj, PackIgnore)
def test_objects_factory(self, datadir, file: str): obj = path_to_pack_object(datadir[file]) assert isinstance(obj, LayoutsContainer)
def test_objects_factory(file: Path): obj = path_to_pack_object(README) assert isinstance(obj, Readme)
def file_uploader(self, path: str) -> int: """ Upload a file. Args: path: The path of the file to upload. The rest of the parameters are taken from self. Returns: """ try: upload_object: Union[YAMLObject, JSONObject] = path_to_pack_object(path) except ContentFactoryError: file_name = os.path.split(path)[-1] message = f"Cannot upload {path} as the file type is not supported for upload." if self.log_verbose: click.secho(message, fg='bright_red') self.failed_uploaded_files.append((file_name, "Unknown", message)) return ERROR_RETURN_CODE file_name = upload_object.path.name # type: ignore entity_type = find_type(str(upload_object.path)) if entity_type in UPLOAD_SUPPORTED_ENTITIES: if upload_object.from_version <= self.demisto_version <= upload_object.to_version: # type: ignore try: result = upload_object.upload(self.client) # type: ignore if self.log_verbose: print_v(f'Result:\n{result.to_str()}', self.log_verbose) click.secho( f'Uploaded {entity_type} - \'{os.path.basename(path)}\': successfully', fg='green') self.successfully_uploaded_files.append( (file_name, entity_type.value)) return SUCCESS_RETURN_CODE except Exception as err: message = parse_error_response(err, entity_type, file_name, self.log_verbose) self.failed_uploaded_files.append( (file_name, entity_type.value, message)) return ERROR_RETURN_CODE else: if self.log_verbose: click.secho( f"Input path {path} is not uploading due to version mismatch.\n" f"XSOAR version is: {self.demisto_version} while the file's version is " f"{upload_object.from_version} - {upload_object.to_version}", fg='bright_red') self.unuploaded_due_to_version.append( (file_name, entity_type.value, self.demisto_version, upload_object.from_version, upload_object.to_version)) return ERROR_RETURN_CODE else: if self.log_verbose: click.secho( f'\nError: Given input path: {path} is not uploadable. ' f'Input path should point to one of the following:\n' f' 1. Pack\n' f' 2. A content entity directory that is inside a pack. For example: an Integrations directory or ' f'a Layouts directory\n' f' 3. Valid file that can be imported to Cortex XSOAR manually. ' f'For example a playbook: helloWorld.yml', fg='bright_red') self.failed_uploaded_files.append( (file_name, entity_type.value, 'Unsuported file path/type')) return ERROR_RETURN_CODE
def test_objects_factory(): obj = path_to_pack_object(SECRETS_IGNORE) assert isinstance(obj, SecretIgnore)
def test_objects_factory(self, pack): modeling_rule = get_modeling_rule(pack, 'modeling_rule_name') obj = path_to_pack_object(modeling_rule.yml._tmp_path) assert isinstance(obj, ModelingRule)
def test_objects_factory(self, datadir): obj = path_to_pack_object(datadir[sample_file_path]) assert isinstance(obj, Wizard)
def test_objects_factory(self, pack): parsing_rule = get_parsing_rule(pack, 'parsing_rule_name') obj = path_to_pack_object(parsing_rule.yml._tmp_path) assert isinstance(obj, ParsingRule)
def test_objects_factory(pack): correlation_rule = get_correlation_rule(pack, 'correlation_rule_name') obj = path_to_pack_object(correlation_rule.correlation_rule_tmp_path) assert isinstance(obj, CorrelationRule)
def test_objects_factory(pack): xsiam_dashboard = get_xsiam_dashboard(pack, 'xsiam_dashboard_name') obj = path_to_pack_object(xsiam_dashboard.xsiam_dashboard_tmp_path) assert isinstance(obj, XSIAMDashboard)
def test_objects_factory(): obj = path_to_pack_object(DASHBOARD) assert isinstance(obj, Dashboard)
def test_objects_factory(self, datadir): obj = path_to_pack_object(datadir['old_classifier.json']) assert isinstance(obj, OldClassifier)
def test_objects_factory(pack): xsiam_report = get_xsiam_report(pack, 'xsiam_report_name') obj = path_to_pack_object(xsiam_report.xsiam_report_tmp_path) assert isinstance(obj, XSIAMReport)
def test_objects_factory(self, datadir): obj = path_to_pack_object(datadir['classifier_mapper.json']) assert isinstance(obj, ClassifierMapper)
def test_objects_factory(): obj = path_to_pack_object(INCIDENT_TYPE) assert isinstance(obj, IncidentType)
def test_objects_factory(): # Currently not supported auto-detect obj = path_to_pack_object(CONNECTION) assert isinstance(obj, Connection)
def test_objects_factory(self, datadir, file: str): obj = path_to_pack_object(datadir[file]) assert isinstance(obj, OldIndicatorType)
def test_objects_factory(self, datadir): obj = path_to_pack_object(datadir["FindSimilarIncidentsByText.yml"]) assert isinstance(obj, Script)
def test_objects_factory(): obj = path_to_pack_object(INCIDENT_FIELD) assert isinstance(obj, IncidentField)
def test_objects_factory(): obj = path_to_pack_object(CHNAGELOG) assert isinstance(obj, ChangeLog)
def test_objects_factory(): obj = path_to_pack_object(PLAYBOOK) assert isinstance(obj, Playbook)
def test_objects_factory(self, datadir): obj = path_to_pack_object(datadir["sample.yml"]) assert isinstance(obj, Integration)