def __init__(self, name: str = '', contribution: Union[str] = None, description: str = '', author: str = '', gh_user: str = '', create_new: bool = True, pack_dir_name: Union[str] = None, update_type: str = '', release_notes: str = '', detected_content_items: list = None, base_dir: Union[str] = None, no_pipenv: bool = False): """Initializes a ContributionConverter instance Note that when recieving a contribution that is an update to an existing pack that the values of 'name', 'description' and 'author' will be those of the existing pack. Args: name (str, optional): The name of the pack. Defaults to ''. contribution (Union[str], optional): The path to the contribution zipfile. Defaults to None. description (str, optional): The description for the contribution. Defaults to ''. author (str, optional): The author of the contribution. Defaults to ''. gh_user (str, optional): The github username of the person contributing. Defaults to ''. create_new (bool, optional): Whether the contribution is intended as a new pack. When the contribution is intended as an update to an existing pack, the value passed should be False. Defaults to True. pack_dir_name (Union[str], optional): Explicitly pass the name of the pack directory. Only useful when updating an existing pack and the pack's directory is not equivalent to the value returned from running `self.format_pack_dir_name(name)` base_dir (Union[str], optional): Used to explicitly pass the path to the top-level directory of the local content repo. If no value is passed, the `get_content_path()` function is used to determine the path. Defaults to None. """ self.configuration = Configuration() self.contribution = contribution self.description = description self.author = author self.update_type = update_type or 'revision' self.release_notes = release_notes self.detected_content_items = detected_content_items or [] self.gh_user = gh_user self.contrib_conversion_errs: List[str] = [] self.create_new = create_new self.no_pipenv = no_pipenv base_dir = base_dir or get_content_path() self.packs_dir_path = os.path.join(base_dir, 'Packs') if not os.path.isdir(self.packs_dir_path): os.makedirs(self.packs_dir_path) self.name = name self.dir_name = pack_dir_name or ContributionConverter.format_pack_dir_name( name) if create_new: # make sure that it doesn't conflict with an existing pack directory self.dir_name = self.ensure_unique_pack_dir_name(self.dir_name) self.pack_dir_path = os.path.join(self.packs_dir_path, self.dir_name) if not os.path.isdir(self.pack_dir_path): os.makedirs(self.pack_dir_path) self.readme_files: List[str] = []
def __init__(self, file_path: str, ignored_errors=None, print_as_warnings=False): super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings) self.content_path = get_content_path() self.file_path = Path(file_path) self.pack_path = self.file_path.parent self.node_modules_path = self.content_path / Path('node_modules')
def __init__(self, file_path: str, ignored_errors=None, print_as_warnings=False, suppress_print=False, json_file_path=None): super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings, suppress_print=suppress_print, json_file_path=json_file_path) self.content_path = get_content_path() self.file_path = Path(file_path) self.pack_path = self.file_path.parent self.node_modules_path = self.content_path / Path('node_modules') with open(self.file_path) as f: readme_content = f.read() self.readme_content = readme_content
def vulture_error_formatter(self, errors: Dict, json_contents: Dict) -> None: """Format vulture error strings to JSON format and add them the json_contents Args: errors (Dict): A dictionary containing vulture error strings json_contents (Dict): The JSON file outputs """ error_messages = errors.get('messages', '') error_messages = error_messages.split('\n') if error_messages else [] content_path = get_content_path() for message in error_messages: if message: file_name, line_number, error_contents = message.split(':', 2) file_path = self.get_full_file_path_for_vulture( file_name, content_path) output = { 'linter': 'vulture', 'severity': errors.get('type'), 'message': error_contents.lstrip(), 'line-number': line_number, } self.add_to_json_outputs(output, file_path, json_contents)
def convert_contribution_to_pack(self): """Create a Pack in the content repo from the contents of a contribution zipfile""" try: packs_dir = os.path.join(get_content_path(), 'Packs') metadata_dict = {} with zipfile.ZipFile(self.contribution) as zipped_contrib: with zipped_contrib.open('metadata.json') as metadata_file: click.echo( f'Pulling relevant information from {metadata_file.name}', color=LOG_COLORS.NATIVE) metadata = json.loads(metadata_file.read()) # a name passed on the cmd line should take precedence over one pulled # from contribution metadata pack_name = self.name or self.format_pack_dir_name( metadata.get('name', 'ContributionPack')) # a description passed on the cmd line should take precedence over one pulled # from contribution metadata metadata_dict[ 'description'] = self.description or metadata.get( 'description') metadata_dict['name'] = pack_name metadata_dict['author'] = metadata.get('author', '') metadata_dict['support'] = metadata.get('support', '') metadata_dict['url'] = metadata.get('supportDetails', {}).get('url', '') metadata_dict['email'] = metadata.get( 'supportDetails', {}).get('email', '') metadata_dict['categories'] = metadata.get( 'categories') if metadata.get('categories') else [] metadata_dict['tags'] = metadata.get( 'tags') if metadata.get('tags') else [] metadata_dict['useCases'] = metadata.get( 'useCases') if metadata.get('useCases') else [] metadata_dict['keywords'] = metadata.get( 'keywords') if metadata.get('keywords') else [] while os.path.exists(os.path.join(packs_dir, pack_name)): click.echo( f'Modifying pack name because pack {pack_name} already exists in the content repo', color=LOG_COLORS.NATIVE) if len(pack_name) >= 2 and pack_name[-2].lower( ) == 'v' and pack_name[-1].isdigit(): # increment by one pack_name = pack_name[:-1] + str(int(pack_name[-1]) + 1) else: pack_name += 'V2' click.echo(f'New pack name is "{pack_name}"', color=LOG_COLORS.NATIVE) pack_dir = os.path.join(packs_dir, pack_name) os.mkdir(pack_dir) shutil.unpack_archive(filename=self.contribution, extract_dir=pack_dir) pack_subdirectories = get_child_directories(pack_dir) for pack_subdir in pack_subdirectories: basename = os.path.basename(pack_subdir) if basename in ENTITY_TYPE_TO_DIR: dst_name = ENTITY_TYPE_TO_DIR.get(basename) src_path = os.path.join(pack_dir, basename) dst_path = os.path.join(pack_dir, dst_name) content_item_dir = shutil.move(src_path, dst_path) if basename in {SCRIPT, AUTOMATION, INTEGRATION}: self.content_item_to_package_format(content_item_dir, del_unified=True) # create pack's base files self.full_output_path = pack_dir self.create_pack_base_files() metadata_dict = Initiator.create_metadata(fill_manually=False, data=metadata_dict) metadata_path = os.path.join(self.full_output_path, 'pack_metadata.json') with open(metadata_path, 'w') as pack_metadata_file: json.dump(metadata_dict, pack_metadata_file, indent=4) # remove metadata.json file os.remove(os.path.join(pack_dir, 'metadata.json')) click.echo( f'Executing \'format\' on the restructured contribution zip files at "{pack_dir}"' ) format_manager(input=pack_dir) except Exception as e: click.echo( f'Creating a Pack from the contribution zip failed with error: {e}\n {traceback.format_exc()}', color=LOG_COLORS.RED) finally: if self.contrib_conversion_errs: click.echo( 'The following errors occurred while converting unified content YAMLs to package structure:' ) click.echo( textwrap.indent('\n'.join(self.contrib_conversion_errs), '\t'))
def __init__(self, file_path: str): self.content_path = get_content_path() self.file_path = Path(file_path) self.pack_path = self.file_path.parent self.node_modules_path = self.content_path / Path('node_modules')
def add_node_env_vars(): content_path = get_content_path() node_modules_path = content_path / Path('node_modules') os.environ['NODE_PATH'] = str( node_modules_path) + os.pathsep + os.getenv("NODE_PATH", "")