def create_distribution_bundle_file(): '''Creates a distribution bundle file as zip archiv.''' if not SCOPE['scripts'].get('build:export', SCOPE['scripts'].get( 'build', False )) or Platform.run('/usr/bin/env yarn %s' % ( 'build:export' if SCOPE['scripts'].get('build:export') else 'build' ), error=False, log=True)['return_code'] == 0: __logger__.info('Pack to a zip archive.') distribution_bundle_file = FileHandler( location=make_secure_temporary_file()[1]) current_directory_path = FileHandler()._path file_path_list = SCOPE.get('files', []) if 'main' in SCOPE: file_path_list.append(SCOPE['main']) if len(file_path_list) == 0: return None with zipfile.ZipFile( distribution_bundle_file.path, 'w' ) as zip_file: for file_path in file_path_list: file = FileHandler(location=file_path) __logger__.debug( 'Add "%s" to distribution bundle.', file.path) zip_file.write(file._path, file.name) if file.is_directory() and not is_file_ignored(file): def add(sub_file): if is_file_ignored(sub_file): return None __logger__.debug( 'Add "%s" to distribution bundle.', sub_file.path) zip_file.write(sub_file._path, sub_file._path[len( current_directory_path):]) return True file.iterate_directory(function=add, recursive=True) return distribution_bundle_file
def main(): '''Generates a python api documentation website.''' if FileHandler('documentation').is_directory(): current_working_directory = FileHandler() index_file = FileHandler('documentation/source/index.rst') modules_to_document = '\ninit' FileHandler( location='%sinit.rst' % index_file.directory.path ).content = ( (79 * '=') + '\n{name}\n' + (79 * '=') + '\n\n.. automodule::' + ' {name}\n :members:' ).format(name=current_working_directory.name) for file in FileHandler(): if Module.is_package(file.path): modules_to_document += '\n %s' % file.name FileHandler(location='%s%s.rst' % ( index_file.directory.path, file.name )).content = ( (79 * '=') + '\n{name}.{package}\n' + (79 * '=') + '\n\n.. automodule:: {name}.{package}\n' ' :members:' ).format( name=current_working_directory.name, package=file.name) for module in __get_all_modules__(file.path): modules_to_document += '\n %s.%s' % (file.name, module) FileHandler(location='%s%s.%s.rst' % ( index_file.directory.path, file.name, module )).content = ( (79 * '=') + '\n{name}.{package}.{module}\n' + (79 * '=') + '\n\n.. automodule:: {name}.{package}.' '{module}\n :members:' ).format( name=current_working_directory.name, package=file.name, module=module) index_file.content = regularExpression.compile( '\n ([a-z][a-zA-Z]+\n)+$', regularExpression.DOTALL ).sub(modules_to_document, index_file.content) Platform.run('/usr/bin/env git add --all', error=False, log=True) FileHandler('documentation').change_working_directory() makefile = FileHandler('Makefile') # # python3.5 FileHandler('MakefilePython3').copy(makefile) FileHandler('MakefilePython2').copy(makefile) Platform.run( command='make html', native_shell=True, error=False, log=True) makefile.remove_file() FileHandler('build/html').path = '../apiDocumentation' FileHandler('build').remove_deep()
def __init__( self, file=None, queue=None, support_multiprocessing=False, force_string=False ): # # ''' Saves the file path in the current instance. If "file" is "None" \ an instance variable is used as buffer. Examples: >>> Buffer( ... file=__test_folder__.path + '__init__' ... ).file # doctest: +ELLIPSIS Object of "Handler" with path "...__init__" ... >>> Buffer( ... queue=True, support_multiprocessing=True ... ).queue # doctest: +ELLIPSIS <multiprocessing.queues.Queue object at ...> ''' # # python3.5 pass self.force_string = force_string '''Saves the last written input.''' self.last_written = '' if support_multiprocessing: self._lock = multiprocessing.Lock() '''Saves the file handler instance for writing content into.''' self.file = None '''Saves the queue instance for writing content into.''' self.queue = None if queue is not None: self.queue = native_queue.Queue() if support_multiprocessing: self.queue = multiprocessing.Queue() if(builtins.isinstance(queue, native_queue.Queue) or support_multiprocessing and builtins.isinstance(queue, multiprocessing.queues.Queue)): self.queue = queue elif file is not None: self.file = FileHandler(location=file) ''' A lock object to guarantee that no other thread read from buffer \ during truncating or writing. ''' self._lock = threading.Lock() '''Saves the current buffer content.''' # # python3.5 # # self._content = '' self._content = builtins.str() if self.force_string else ''
def _determine_code_file(self, path): # # ''' Determines a code file which could make sense to run. It could \ depend on inputs which where made to this class. Searches in the \ current working directory. Examples: >>> FileHandler('temp_determine_code_file_main.py').content = '' >>> run = Run() >>> run._determine_code_file(path='') # doctest: +ELLIPSIS Object of "Handler" with path "..." (type: file). >>> run._command_line_arguments = ['--help'] >>> run._determine_code_file('not_existing') False >>> run._command_line_arguments = ['--help'] >>> run._determine_code_file('') # doctest: +ELLIPSIS Object of "Handler" with path "..." and initially given path "... ''' if path: if not self._command_line_arguments: self._command_line_arguments = sys.argv[2:] code_file = FileHandler(location=path) if not (code_file.is_file() and self._find_informations_by_extension( extension=code_file.extension, code_file=code_file)): return self._search_supported_file_by_path( path=code_file.path) return code_file if not self._command_line_arguments: self._command_line_arguments = sys.argv[1:] return self._search_supported_file_in_current_working_directory()
def _initialize( self, location=None, skip_self_file=False, extension='', first_line_regex_pattern='(?P<constant_version_pattern>^#!.*?' '(?P<current_version>[a-zA-Z0-9\.]+))\n', one_line_regex_pattern='\n(?P<prefix># #) ' '(?P<alternate_version>[^\n ]+) ?' '(?P<alternate_text>.*)\n' '(?P<current_text>.*)\n', more_line_regex_pattern='(?s)\n(?P<prefix># #) ' '(?P<alternate_version>[^ ]+)\n' '(?P<alternate_text>' '(?:(?:# # .*?\n)|' # in brackets '(?:# #\n))+' # in brackets ')(?P<current_text>.*?\n)# #(?:\n|\Z)', encoding=ENCODING, dry=False, _exclude_locations=(), _new_version='__determine_useful__', **keywords ): # # '''Triggers the conversion process with given arguments.''' # # # region properties '''Current location for deep code parsing.''' self.location = FileHandler( self.location, encoding=self.encoding, must_exist=True) '''NOTE: This additional declaration is needed to trigger setter.''' self.exclude_locations = self._exclude_locations ''' New version to convert given files to. NOTE: This property can \ only determined after all properties are set. This additional \ declaration is needed to trigger setter. ''' self.new_version = self._new_version '''Version of the giving source files.''' self._current_version = '' # # # endregion if not __test_mode__ and self._new_version: self._convert_path() return self
class Buffer(Class, LoggingStreamHandler): ''' This class represents a layer for writing and reading to an output \ buffer realized as file, queue or variable. **file** - a file path or file handler to use as \ buffer **queue** - a queue object to use as buffer **support_multiprocessing** - indicates whether buffer read and write \ requests should be multiprocessing save Examples: >>> buffer = Buffer(file=__test_folder__.path + 'Buffer') >>> buffer.clear() # doctest: +ELLIPSIS '...' >>> print('hans', file=buffer, end='+') >>> buffer.content 'hans+' ''' # region dynamic methods # # region public # # # region special @JointPoint # # python3.5 # # def __init__( # # self: Self, file=None, queue=None, support_multiprocessing=False # # ) -> None: def __init__( self, file=None, queue=None, support_multiprocessing=False, force_string=False ): # # ''' Saves the file path in the current instance. If "file" is "None" \ an instance variable is used as buffer. Examples: >>> Buffer( ... file=__test_folder__.path + '__init__' ... ).file # doctest: +ELLIPSIS Object of "Handler" with path "...__init__" ... >>> Buffer( ... queue=True, support_multiprocessing=True ... ).queue # doctest: +ELLIPSIS <multiprocessing.queues.Queue object at ...> ''' # # python3.5 pass self.force_string = force_string '''Saves the last written input.''' self.last_written = '' if support_multiprocessing: self._lock = multiprocessing.Lock() '''Saves the file handler instance for writing content into.''' self.file = None '''Saves the queue instance for writing content into.''' self.queue = None if queue is not None: self.queue = native_queue.Queue() if support_multiprocessing: self.queue = multiprocessing.Queue() if(builtins.isinstance(queue, native_queue.Queue) or support_multiprocessing and builtins.isinstance(queue, multiprocessing.queues.Queue)): self.queue = queue elif file is not None: self.file = FileHandler(location=file) ''' A lock object to guarantee that no other thread read from buffer \ during truncating or writing. ''' self._lock = threading.Lock() '''Saves the current buffer content.''' # # python3.5 # # self._content = '' self._content = builtins.str() if self.force_string else '' # # @JointPoint # # python3.5 def __repr__(self: Self) -> builtins.str: def __repr__(self): ''' Invokes if this object should describe itself by a string. Examples: >>> repr(Buffer()) 'Object of "Buffer" (memory buffered) with content "".' >>> buffer = Buffer(file=__test_folder__.path + '__repr__') >>> buffer.write('hans') # doctest: +ELLIPSIS Object of "Buffer" (file buffered with "...__repr__" (type: file... >>> repr(Buffer(queue=True)) 'Object of "Buffer" (queue buffered) with content "".' >>> repr(Buffer(queue=native_queue.Queue())) 'Object of "Buffer" (queue buffered) with content "".' ''' buffer_type = 'memory' type_addition = '' if self.file: buffer_type = 'file' type_addition = ' with "%s"' % builtins.repr(self.file) elif self.queue: buffer_type = 'queue' # # python3.5 # # pass if self.force_string: return ( 'Object of "{class_name}" ({type} buffered{type_addition})' ' with content "{content}".'.format( class_name=self.__class__.__name__, type=buffer_type, type_addition=type_addition, content=convert_to_unicode(self.content))) # # return ( 'Object of "{class_name}" ({type} buffered{type_addition}) ' 'with content "{content}".'.format( class_name=self.__class__.__name__, type=buffer_type, type_addition=type_addition, content=self.content)) @JointPoint # # python3.5 def __str__(self: Self) -> builtins.str: def __str__(self): ''' Invokes if this object is tried to interpreted as string. Examples: >>> str(Buffer().write('test')) 'test' ''' return self.content @JointPoint # # python3.5 def __bool__(self: Self) -> builtins.bool: def __nonzero__(self): ''' Invokes if this object is tried to interpreted as boolean. Examples: >>> bool(Buffer().write('test')) True >>> bool(Buffer()) False ''' return builtins.bool(self.content) # # # endregion # # endregion # # region getter @JointPoint # # python3.5 def get_content(self: Self) -> builtins.str: def get_content(self): ''' Getter for the current content. Examples: >>> Buffer().write('test').content 'test' >>> Buffer(queue=True).write('test').content 'test' ''' with self._lock: if self.file is not None: self._content = self.file.content elif self.queue: self._content = '' temp_buffer = [] while not self.queue.empty(): # # python3.5 # # temp_buffer.append(self.queue.get()) temp_buffer.append(convert_to_unicode( self.queue.get())) # # self._content += temp_buffer[-1] for content in temp_buffer: self.queue.put(content) # # python3.5 # # pass if self.force_string and builtins.isinstance( self._content, builtins.unicode ): self._content = convert_to_string(self._content) # # return self._content # # endregion @JointPoint # # python3.5 def write(self: Self, content: builtins.str) -> Self: def write(self, content): ''' Writes content to the current output buffer file. If the current \ given file "Buffer.file" doesn't exists it will be created. **content** - content to write into current buffer instance Examples: >>> buffer = Buffer(file=__test_folder__.path + 'write') >>> buffer.clear() # doctest: +ELLIPSIS '...' >>> buffer.write('hans') # doctest: +ELLIPSIS Object of "Buffer" (file buffered with "...write...nt "hans". >>> buffer.content 'hans' >>> buffer = Buffer() >>> buffer.write('hans') Object of "Buffer" (memory buffered) with content "hans". >>> buffer.content 'hans' ''' # # python3.5 # # pass if self.force_string and builtins.isinstance( content, builtins.unicode ): content = convert_to_string(content) # # with self._lock: self.last_written = content if self.file is not None: self.file.content += self.last_written elif self.queue: self.queue.put(self.last_written) else: self._content += self.last_written return self @JointPoint # # python3.5 def flush(self: Self) -> Self: def flush(self): ''' Flush methods usually called to guarantee that all objects putted \ to "write()" are materialized on their provided media. This \ implementation exists only for compatibility reasons. Examples: >>> Buffer().flush() Object of "Buffer" (memory buffered) with content "". ''' return self @JointPoint # # python3.5 def clear(self: Self, delete=True) -> builtins.str: def clear(self, delete=True): ''' Removes the current output buffer content. **delete** - indicates whether a file buffer should be deleted or \ truncated Examples: >>> buffer = Buffer(file=__test_folder__.path + 'clear') >>> buffer.clear() # doctest: +ELLIPSIS '...' >>> buffer.write('hans') # doctest: +ELLIPSIS Objec...(file buffered with "...clear...with content "hans". >>> buffer.clear(False) 'hans' >>> buffer.content '' >>> buffer = Buffer() >>> buffer.write('hans') Object of "Buffer" (memory buffered) with content "hans". >>> buffer.clear() 'hans' >>> buffer.content '' >>> buffer = Buffer(queue=True) >>> buffer.clear() '' >>> buffer.write('hans') Object of "Buffer" (queue buffered) with content "hans". >>> buffer.write('hans') Object of "Buffer" (queue buffered) with content "hanshans". >>> buffer.clear() 'hanshans' >>> buffer.content '' ''' with self._lock: if self.file is not None: content = self.file.content if delete: self.file.remove_file() else: self.file.content = '' elif self.queue: content = '' while not self.queue.empty(): content += self.queue.get() else: content = self._content self._content = '' # # python3.5 # # pass if self.force_string: self._content = builtins.str() content = convert_to_string(content) # # return content
def main(): '''Entry point for this script.''' global API_DOCUMENTATION_PATH_SUFFIX, CONTENT, SCOPE if markdown is None: __logger__.critical( "You haven't install a suitable markdown version. Documentation " "couldn't be updated.") return None CommandLine.argument_parser(module_name=__name__) if '* master' in Platform.run('/usr/bin/env git branch')[ 'standard_output' ] and 'gh-pages' in Platform.run('/usr/bin/env git branch --all')[ 'standard_output' ]: package_file = FileHandler('package.json') if package_file.is_file(): SCOPE = json.loads(package_file.content) API_DOCUMENTATION_PATH_SUFFIX = API_DOCUMENTATION_PATH_SUFFIX.format( **SCOPE) temporary_documentation_folder = FileHandler( location=DOCUMENTATION_REPOSITORY[DOCUMENTATION_REPOSITORY.find( '/' ) + 1:-1]) if temporary_documentation_folder: temporary_documentation_folder.remove_deep() __logger__.info('Compile all readme markdown files to html5.') FileHandler().iterate_directory(function=add_readme, recursive=True) CONTENT = markdown.markdown( CONTENT, output='html5', extensions=builtins.list(MARKDOWN_EXTENSIONS)) distribution_bundle_file = create_distribution_bundle_file() if distribution_bundle_file is not None: data_location = FileHandler(location=DATA_PATH) data_location.make_directories() distribution_bundle_file.directory = data_location has_api_documentation = SCOPE['scripts'].get('document', False) if has_api_documentation: has_api_documentation = Platform.run( '/usr/bin/env yarn document', error=False, log=True )['return_code'] == 0 if Platform.run( ('/usr/bin/env git checkout gh-pages', '/usr/bin/env git pull'), error=False, log=True )['return_code'][0] == 0: existing_api_documentation_directory = FileHandler(location='.%s' % API_DOCUMENTATION_PATH[1]) if existing_api_documentation_directory.is_directory(): existing_api_documentation_directory.remove_deep() FileHandler(location=API_DOCUMENTATION_PATH[0]).path = \ existing_api_documentation_directory local_documentation_website_location = FileHandler( location='../%s' % temporary_documentation_folder.name) if local_documentation_website_location.is_directory(): temporary_documentation_folder.make_directories() local_documentation_website_location.iterate_directory( function=copy_repository_file, recursive=True, source=local_documentation_website_location, target=temporary_documentation_folder) node_modules_directory = FileHandler(location='%s%s' % ( local_documentation_website_location.path, 'node_modules')) if node_modules_directory.is_directory(): temporary_documentation_node_modules_directory = \ FileHandler('%snode_modules' % temporary_documentation_folder.path) ''' NOTE: Symlinking doesn't work since some node modules need the right absolute location to work. node_modules_directory.make_symbolic_link( target='%s%s' % ( temporary_documentation_folder, 'node_modules') ) return_code = 0 NOTE: Coping complete "node_modules" folder takes to long. node_modules_directory.copy(target='%s%s' % ( temporary_documentation_folder, 'node_modules')) return_code = 0 NOTE: Mounting "node_modules" folder needs root privileges. temporary_documentation_node_modules_directory\ .make_directory(right=777) return_code = Platform.run( "/usr/bin/env sudo mount --bind --options ro '%s' " "'%s'" % ( node_modules_directory.path, temporary_documentation_node_modules_directory.path ), native_shell=True, error=False, log=True )['return_code'] ''' return_code = Platform.run( "/usr/bin/env cp --dereference --recursive --reflink=auto '%s' '%s'" % ( node_modules_directory.path, temporary_documentation_node_modules_directory.path ), native_shell=True, error=False, log=True )['return_code'] else: return_code = Platform.run( '/usr/bin/env yarn --production=false', native_shell=True, error=False, log=True )['return_code'] if return_code == 0: current_working_directory_backup = FileHandler() temporary_documentation_folder.change_working_directory() return_code = Platform.run( '/usr/bin/env yarn clear', native_shell=True, error=False, log=True )['return_code'] current_working_directory_backup.change_working_directory() else: return_code = Platform.run(( 'unset GIT_WORK_TREE; /usr/bin/env git clone %s;' 'yarn --production=false' ) % DOCUMENTATION_REPOSITORY, native_shell=True, error=False, log=True)['return_code'] if return_code == 0: generate_and_push_new_documentation_page( temporary_documentation_folder, distribution_bundle_file, has_api_documentation, temporary_documentation_node_modules_directory) if existing_api_documentation_directory.is_directory(): existing_api_documentation_directory.remove_deep()
def generate_and_push_new_documentation_page( temporary_documentation_folder, distribution_bundle_file, has_api_documentation, temporary_documentation_node_modules_directory ): # # ''' Renders a new index.html file and copies new assets to generate a new \ documentation homepage. ''' global BUILD_DOCUMENTATION_PAGE_COMMAND __logger__.info('Update documentation design.') if distribution_bundle_file: new_distribution_bundle_file = FileHandler(location='%s%s%s' % ( temporary_documentation_folder.path, DOCUMENTATION_BUILD_PATH, DISTRIBUTION_BUNDLE_FILE_PATH)) new_distribution_bundle_file.directory.make_directories() distribution_bundle_file.path = new_distribution_bundle_file new_distribution_bundle_directory = FileHandler(location='%s%s%s' % ( temporary_documentation_folder.path, DOCUMENTATION_BUILD_PATH, DISTRIBUTION_BUNDLE_DIRECTORY_PATH)) new_distribution_bundle_directory.make_directories() zipfile.ZipFile(distribution_bundle_file.path).extractall( new_distribution_bundle_directory.path) favicon = FileHandler(location='favicon.png') if favicon: favicon.copy(target='%s/source/image/favicon.ico' % temporary_documentation_folder.path) parameter = builtins.dict(builtins.map(lambda item: ( String(item[0]).camel_case_to_delimited.content.upper(), item[1] ), SCOPE.get('documentationWebsite', {}).items())) if 'TAGLINE' not in parameter and 'description' in SCOPE: parameter['TAGLINE'] = SCOPE['description'] if 'NAME' not in parameter and 'name' in SCOPE: parameter['NAME'] = SCOPE['name'] __logger__.debug('Found parameter "%s".', json.dumps(parameter)) api_documentation_path = None if has_api_documentation: api_documentation_path = '%s%s' % ( API_DOCUMENTATION_PATH[1], API_DOCUMENTATION_PATH_SUFFIX) if not FileHandler(location='%s%s' % ( FileHandler().path, api_documentation_path )).is_directory(): api_documentation_path = API_DOCUMENTATION_PATH[1] parameter.update({ 'CONTENT': CONTENT, 'CONTENT_FILE_PATH': None, 'RENDER_CONTENT': False, 'API_DOCUMENTATION_PATH': api_documentation_path, 'DISTRIBUTION_BUNDLE_FILE_PATH': DISTRIBUTION_BUNDLE_FILE_PATH if ( distribution_bundle_file and distribution_bundle_file.is_file() ) else None }) # # python3.5 # # parameter = Dictionary(parameter).convert( # # value_wrapper=lambda key, value: value.replace( # # '!', '#%%%#' # # ) if builtins.isinstance(value, builtins.str) else value # # ).content parameter = Dictionary(parameter).convert( value_wrapper=lambda key, value: value.replace( '!', '#%%%#' ) if builtins.isinstance(value, builtins.unicode) else value ).content # # if __logger__.isEnabledFor(logging.DEBUG): BUILD_DOCUMENTATION_PAGE_COMMAND = \ BUILD_DOCUMENTATION_PAGE_COMMAND[:-1] + [ '-debug' ] + BUILD_DOCUMENTATION_PAGE_COMMAND[-1:] serialized_parameter = json.dumps(parameter) parameter_file = FileHandler(location=make_secure_temporary_file('.json')[ 1]) parameter_file.content = \ BUILD_DOCUMENTATION_PAGE_PARAMETER_TEMPLATE.format( serializedParameter=serialized_parameter, **SCOPE) for index, command in builtins.enumerate(BUILD_DOCUMENTATION_PAGE_COMMAND): BUILD_DOCUMENTATION_PAGE_COMMAND[index] = \ BUILD_DOCUMENTATION_PAGE_COMMAND[index].format( serializedParameter=serialized_parameter, parameterFilePath=parameter_file._path, **SCOPE) __logger__.debug('Use parameter "%s".', serialized_parameter) __logger__.info('Run "%s".', ' '.join(BUILD_DOCUMENTATION_PAGE_COMMAND)) current_working_directory_backup = FileHandler() temporary_documentation_folder.change_working_directory() Platform.run( command=BUILD_DOCUMENTATION_PAGE_COMMAND[0], command_arguments=BUILD_DOCUMENTATION_PAGE_COMMAND[1:], error=False, log=True) current_working_directory_backup.change_working_directory() parameter_file.remove_file() for file in FileHandler(): if not (file in (temporary_documentation_folder, FileHandler( location='.%s' % API_DOCUMENTATION_PATH[1] )) or is_file_ignored(file)): file.remove_deep() documentation_build_folder = FileHandler(location='%s%s' % ( temporary_documentation_folder.path, DOCUMENTATION_BUILD_PATH ), must_exist=True) documentation_build_folder.iterate_directory( function=copy_repository_file, recursive=True, source=documentation_build_folder, target=FileHandler()) if (Platform.run( "/usr/bin/env sudo umount '%s'" % temporary_documentation_node_modules_directory.path, native_shell=True, error=False, log=True )['return_code'] == 0): temporary_documentation_folder.remove_deep() Platform.run( ( '/usr/bin/env git add --all', '/usr/bin/env git commit --message "%s" --all' % PROJECT_PAGE_COMMIT_MESSAGE, '/usr/bin/env git push', '/usr/bin/env git checkout master' ), native_shell=True, error=False, log=True )
class Replace(Class, Runnable): ''' Parse source code and replace version depended code snippets with the \ correct given version code snippets. NOTE: "(?s...)" is equivalent to regular expression flag \ "regularExpression.DOTALL". NOTE: That alternate version in one line regular expression pattern \ could be empty. **location** - Location to execute macro processing. **skip_self_file** - If setted to "True" and this script \ file is part of "location" this file \ will be ignored. **extension** - File extensions to handle. Others will \ be excluded. **first_line_regex_pattern** - Regular expression pattern to \ determine current version of given \ text file. **one_line_regex_pattern** - One line regular expression syntax to \ replace. **more_line_regex_pattern** - More line regular expression syntax to \ replace. **encoding** - Encoding to use. **dry** - Indicates whether a dry run with \ producing log output should be done. **_exclude_locations** - Locations to exclude. **_new_version** - Version description to convert to. Examples: >>> Replace( ... location='non_existing_file' ... ) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... boostnode.extension.native.FileError: Invalid path "...non_exist... >>> __test_globals__['__test_mode__'] = False >>> file = FileHandler(__test_folder__.path + '_initialize') >>> file.content = '#!/bin/python2.7\\n\\n# # python3.5 a\\nb\\n' >>> Replace(file) # doctest: +ELLIPSIS Object of "Replace" with file "..._initialize" to convert to "py... >>> __test_globals__['__test_mode__'] = True ''' # region properties COMMAND_LINE_ARGUMENTS = ( {'arguments': ('-p', '--path'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': builtins.str, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': 'Defines files to convert. A directory or file path is ' 'supported.', 'dest': 'location', 'metavar': 'PATH'}}, {'arguments': ('-n', '--new-version'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': 'Defines new version of converted files.', 'dest': '_new_version', 'metavar': 'VERSION'}}, {'arguments': ('-s', '--skip-self-file'), 'specification': { 'action': 'store_true', 'default': {'execute': '__initializer_default_value__'}, 'help': 'Determines if this file should be ignored for running ' 'any macros.', 'dest': 'skip_self_file'}}, {'arguments': ('-y', '--dry'), 'specification': { 'action': 'store_true', 'default': {'execute': '__initializer_default_value__'}, 'help': 'Define if there really should be done something.', 'dest': 'dry'}}, {'arguments': ('-e', '--extension'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'If setted only files with given extension will " '''be parsed (default: "%s").' % ''' "__initializer_default_value__.replace('%', " "'%%')"}, 'dest': 'extension', 'metavar': 'FILE_EXTENSION'}}, {'arguments': ('-a', '--exclude-locations'), 'specification': { 'action': 'store', 'nargs': '*', 'default': {'execute': '__initializer_default_value__'}, 'type': builtins.str, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'Select locations which should be ignored. %s " "doesn\\'t touch these files.' " '% module_name.capitalize()'}, 'dest': '_exclude_locations', 'metavar': 'PATHS'}}, {'arguments': ('-f', '--first-line-regex-pattern'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'Defines line pattern to determine current " '''version of file to parse (default: "%s").' %''' "__initializer_default_value__.replace('%', " "'%%')"}, 'dest': 'first_line_regex_pattern', 'metavar': 'REGEX'}}, {'arguments': ('-o', '--one-line-regex-pattern'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'Defines line to determine current version of " '''file to parse (default: "%s").' % ''' "__initializer_default_value__.replace('%', " "'%%')"}, 'dest': 'one_line_regex_pattern', 'metavar': 'REGEX'}}, {'arguments': ('-r', '--more-line-regex-pattern'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'Defines line to determine current version of " '''file to parse (default: "%s").' % ''' "__initializer_default_value__.replace('%', " "'%%')"}, 'dest': 'more_line_regex_pattern', 'metavar': 'REGEX'}}, {'arguments': ('-g', '--encoding'), 'specification': { 'action': 'store', 'default': {'execute': '__initializer_default_value__'}, 'type': {'execute': 'type(__initializer_default_value__)'}, 'required': {'execute': '__initializer_default_value__ is None'}, 'help': { 'execute': "'Define which encoding should be used (default: " '''"%s").' % __initializer_default_value__.''' "replace('%', '%%')"}, 'dest': 'encoding', 'metavar': 'ENCODING'}}) '''Defines options for manipulating the programs default behavior.''' # endregion # region dynamic methods # # region public # # # region special @JointPoint # # python3.5 def __repr__(self: Self) -> builtins.str: def __repr__(self): ''' Invokes if this object should describe itself by a string. Examples: >>> repr(Replace( ... location=__file_path__, _new_version='python3.5' ... )) # doctest: +ELLIPSIS '...Replace...file "...macro.py" to ...to "python3.5".' ''' return ( 'Object of "{class_name}" with {type} "{path}" to convert to ' '"{new_version}".'.format( class_name=self.__class__.__name__, type=self.location.type, path=self.location.path, new_version=self._new_version)) # # # endregion # # # region setter @JointPoint # # python3.5 # # def set_new_version(self: Self, version: builtins.str) -> builtins.str: def set_new_version(self, version): # # ''' Checks if an explicit new version was given or a useful should be \ determined. **version** - New version of current text file. Could be \ "__determine_useful__" if it should be guessed. Returns new version. Examples: >>> replace = Replace(location=__file_path__) >>> replace.set_new_version(version='python3.5') 'python3.5' >>> replace.new_version = '__determine_useful__' >>> replace._new_version # doctest: +ELLIPSIS 'python...' >>> file = FileHandler( ... location=__test_folder__.path + 'set_new_version') >>> file.content = ( ... '#!/usr/bin/env version\\n\\n# # alternate_version hans\\n' ... 'peter\\n') >>> replace = Replace(file)._convert_path() >>> replace.new_version = '__determine_useful__' >>> replace._new_version 'version' >>> __test_buffer__.clear() # doctest: +ELLIPSIS '...' >>> replace.location = __test_folder__ >>> replace._exclude_locations = [__test_folder__] >>> replace.new_version = '__determine_useful__' >>> replace._new_version '' ''' self._new_version = version if version == '__determine_useful__': self._new_version = self._determine_useful_version_in_location( location=self.location) if not self._new_version: __logger__.warning('No new version found to convert to.') return self._new_version @JointPoint # # python3.5 # # def set_exclude_locations( # # self: Self, paths: Iterable # # ) -> builtins.list: def set_exclude_locations(self, paths): # # ''' Converts all paths setted to "_exclude_locations" via string to \ high level file objects. **paths** - A list of paths to exclude from processing the macro. Returns a list of file objects to ignore. Examples: >>> replace = Replace(location=__file_path__) >>> replace.exclude_locations = [ ... __file_path__, FileHandler('not_existing')] >>> replace._exclude_locations # doctest: +ELLIPSIS [Object of "Handler" with path "...macro.py" (type: file).] ''' self._exclude_locations = [] for path in paths: file = FileHandler(location=path) if file: self._exclude_locations.append(file) return self._exclude_locations # # # endregion # # endregion # # region protected # # # region runnable implementation @JointPoint # # python3.5 def _run(self: Self) -> Self: def _run(self): ''' Entry point for command line call of this program. Validates the \ given input. Gives usage info or raises exception if the given \ inputs don't make sense. Examples: >>> from copy import copy >>> sys_argv_backup = copy(sys.argv) >>> sys.argv[1:] = ['--path', __file_path__, '--skip-self-file'] >>> Replace.run() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "...". >>> sys.arv = sys_argv_backup ''' command_line_arguments = CommandLine.argument_parser( arguments=self.COMMAND_LINE_ARGUMENTS, module_name=__name__, scope={'os': os, 'module_name': __module_name__, 'self': self}) return self._initialize(**self._command_line_arguments_to_dictionary( namespace=command_line_arguments)) @JointPoint(InstancePropertyInitializer) # # python3.5 # # def _initialize( # # self: Self, location=None, skip_self_file=False, extension='', # # first_line_regex_pattern='(?P<constant_version_pattern>^#!.*?' # # '(?P<current_version>[a-zA-Z0-9\.]+))\n', # # one_line_regex_pattern='\n(?P<prefix># #) ' # # '(?P<alternate_version>[^\n ]+) ?' # # '(?P<alternate_text>.*)\n' # # '(?P<current_text>.*)(?:\n|\Z)', # # more_line_regex_pattern='(?s)\n(?P<prefix># #) ' # # '(?P<alternate_version>[^ ]+)\n' # # '(?P<alternate_text>' # # '(?:(?:# # .*?\n)|' # in brackets # # '(?:# #\n))+' # in brackets # # ')(?P<current_text>.*?\n)# #(?:\n|\Z)', # # encoding=ENCODING, dry=False, _exclude_locations=(), # # _new_version='__determine_useful__', **keywords: builtins.object # # ) -> Self: def _initialize( self, location=None, skip_self_file=False, extension='', first_line_regex_pattern='(?P<constant_version_pattern>^#!.*?' '(?P<current_version>[a-zA-Z0-9\.]+))\n', one_line_regex_pattern='\n(?P<prefix># #) ' '(?P<alternate_version>[^\n ]+) ?' '(?P<alternate_text>.*)\n' '(?P<current_text>.*)\n', more_line_regex_pattern='(?s)\n(?P<prefix># #) ' '(?P<alternate_version>[^ ]+)\n' '(?P<alternate_text>' '(?:(?:# # .*?\n)|' # in brackets '(?:# #\n))+' # in brackets ')(?P<current_text>.*?\n)# #(?:\n|\Z)', encoding=ENCODING, dry=False, _exclude_locations=(), _new_version='__determine_useful__', **keywords ): # # '''Triggers the conversion process with given arguments.''' # # # region properties '''Current location for deep code parsing.''' self.location = FileHandler( self.location, encoding=self.encoding, must_exist=True) '''NOTE: This additional declaration is needed to trigger setter.''' self.exclude_locations = self._exclude_locations ''' New version to convert given files to. NOTE: This property can \ only determined after all properties are set. This additional \ declaration is needed to trigger setter. ''' self.new_version = self._new_version '''Version of the giving source files.''' self._current_version = '' # # # endregion if not __test_mode__ and self._new_version: self._convert_path() return self # # # endregion # # # region boolean @JointPoint # # python3.5 # # def _in_exclude_location( # # self: Self, location: FileHandler # # ) -> builtins.bool: def _in_exclude_location(self, location): # # ''' Returns "True" if given location is in one of initially defined \ exclude locations. Examples: >>> replace = Replace(location=__file_path__) >>> replace._exclude_locations = [__test_folder__] >>> replace._in_exclude_location(FileHandler(__test_folder__)) True >>> replace._exclude_locations = [ ... FileHandler(__test_folder__.path + '_in_exclude_location')] >>> replace._in_exclude_location(FileHandler(__test_folder__)) False ''' for file in self._exclude_locations: if location == file or (file.is_directory() and location in file): __logger__.info( 'Ignore exclude location "%s".', location.path) return True return False # # # endregion # # # region core concern @JointPoint # # python3.5 # # def _determine_useful_version_in_location( # # self: Self, location: FileHandler # # ) -> builtins.str: def _determine_useful_version_in_location(self, location): # # ''' Determines a useful version for replacing if nothing explicit was \ given. Examples: >>> folder = FileHandler( ... location=__test_folder__.path + ... '_determine_useful_version_in_location/sub') >>> folder.make_directories() True >>> file = FileHandler(location=folder.path + 'file') >>> file.content = 'hans\\n# # new_version peter\\nklaus\\n' >>> replace = Replace(location=__file_path__) >>> replace._exclude_locations = [file] >>> replace._determine_useful_version_in_location(location=folder) '' >>> replace._exclude_locations = [] >>> replace.extension = 'not_existing' >>> replace._determine_useful_version_in_location(location=file) '' ''' if not self._in_exclude_location(location): version = self._determine_useful_version_in_location_helper( location) if version: return version if location == self.location: __logger__.info('No macros found.') return '' @JointPoint # # python3.5 # # def _determine_useful_version_in_file( # # self: Self, file: FileHandler # # ) -> builtins.str: def _determine_useful_version_in_file(self, file): # # ''' Searches for first version replacement in macro language as good \ guess for new version if no new version was defined explicitly. Examples: >>> replace = Replace(location=__file_path__) >>> file = FileHandler( ... __test_folder__.path + '_determine_useful_version_in_file') >>> file.content = 'ä' >>> replace.encoding = 'ascii' >>> replace._determine_useful_version_in_file(file) '' ''' content = file.get_content(encoding=self.encoding) match = regularExpression.compile(self.one_line_regex_pattern).search( content) if match is None: match = regularExpression.compile( self.more_line_regex_pattern ).search(content) if match: __logger__.info( 'Detected "%s" as new version.', match.group('alternate_version')) return match.group('alternate_version') return '' @JointPoint # # python3.5 def _convert_path(self: Self) -> Self: def _convert_path(self): ''' Converts the given path to the specified format. Examples: >>> file = FileHandler(__test_folder__.path + '_convert_path') >>> file.content = '' >>> replace = Replace(location=file, _new_version='python2.7') >>> replace._exclude_locations = [file] >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..._convert_path" to convert to "... >>> replace.location = __test_folder__ >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". >>> file.content = 'hans' >>> Replace( ... file, _new_version='python3.5' ... )._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..._convert_path" to convert to ... >>> replace.location = file >>> replace._exclude_locations = [] >>> file.content = ('#!/usr/bin/python3.5\\n' ... '\\n' ... '# # python2.7 hans\\n' ... 'AB\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..._convert_path" to convert to ... >>> file.content '#!/usr/bin/python2.7\\n\\n# # python3.5 AB\\nhans\\n' >>> file.content = ('#!/bin/python3.5\\n' ... '\\n' ... '# # python2.7\\n' ... '# # A\\n' ... 'C\\n' ... 'D\\n' ... '# #\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python2.7". >>> file.content '#!/bin/python2.7\\n\\n# # python3.5\\n# # C\\n# # D\\nA\\n# #\\n' >>> file.content = ('#!/bin/python2.7\\n' ... '\\n' ... '# # python3.5\\n' ... '# # A\\n' ... 'B\\n' ... '#\\n' ... '# #\\n') >>> replace = Replace( ... location=__test_folder__.path + '_convert_path', ... _new_version='python3.5') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python3.5". >>> file.content '#!/bin/python3.5\\n\\n# # python2.7\\n# # B\\n# # #\\nA\\n# #\\n' >>> file.content = ('#!/bin/python3.5\\n' ... '\\n' ... '# # python2.7\\n' ... '# # A\\n' ... '# #\\n' ... '# # B\\n' ... 'B\\n' ... '# #\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python3.5". >>> file.content # doctest: +ELLIPSIS '#!/bin/python3.5\\n\\n# # python2.7\\n# # A\\n# #\\n# # B\\nB...' >>> file.content = ('#!/bin/python2.7\\n' ... '\\n' ... '# # python3.5\\n' ... '# # A\\n' ... 'B\\n' ... '\\n' ... 'C\\n' ... '# #\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python3.5". >>> file.content # doctest: +ELLIPSIS '#!/bin/python3.5\\n\\n# # python2.7\\n# # B\\n# #\\n# # C\\nA\...' >>> file.content = ('#!/bin/python2.7\\n' ... '\\n' ... '# # python3.5\\n' ... 'A\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python3.5". >>> file.content '#!/bin/python3.5\\n\\n# # python2.7 A\\n\\n' >>> file.content = ('#!/bin/python2.7\\n' ... '\\n' ... '# # python3.5 A\\n' ... '\\n') >>> replace._convert_path() # doctest: +ELLIPSIS Object of "Replace" with file "..." to convert to "python3.5". >>> file.content '#!/bin/python3.5\\n\\n# # python2.7\\nA\\n' ''' if not self._in_exclude_location(location=self.location): if(self.location.is_file() and ( not self.extension or self.location.extension == self.extension) ): self._convert_file(file=self.location) else: self._convert_directory(directory=self.location) return self @JointPoint # # python3.5 # # def _convert_directory(self: Self, directory: FileHandler) -> Self: def _convert_directory(self, directory): # # ''' Walks through a whole directory and its substructure to convert \ its text based files between different versions of marked \ code-snippets. **directory** - the directory location with text-files which \ should be converted. Examples: >>> folder = FileHandler( ... __test_folder__.path + '_convert_directory', ... make_directory=True) >>> replace = Replace(location=__test_folder__) >>> replace._convert_directory( ... __test_folder__ ... ) # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". >>> replace.extension = 'not_existing' >>> FileHandler(folder.path + 'file').content = '' >>> replace._convert_directory( ... __test_folder__ ... ) # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". ''' for file in directory: __logger__.debug('Check "%s".', file.path) if not self._in_exclude_location(location=file): if file.is_directory(allow_link=False): self._convert_directory(directory=file) elif file.is_file(allow_link=False) and ( not self.extension or file.extension == self.extension ): self._convert_file(file) return self @JointPoint # # python3.5 def _convert_file(self: Self, file: FileHandler) -> Self: def _convert_file(self, file): ''' Opens a given file and parses its content and convert it through \ different versions of code snippets. **file** - the file to be converted. Examples: >>> __test_buffer__.clear() # doctest: +ELLIPSIS '...' >>> Replace( ... location=__file_path__, skip_self_file=True ... )._convert_file( ... FileHandler(__file_path__) ... ) # doctest: +ELLIPSIS Object of "Replace" with file "...macro..." to convert to "...". >>> __test_buffer__.clear() # doctest: +ELLIPSIS '...Skip self file...' ''' self_file = FileHandler( location=inspect.currentframe().f_code.co_filename, respect_root_path=False) if self.skip_self_file and self_file == file: __logger__.info('Skip self file "%s".', self_file) else: self._convert_file_content(file) return self @JointPoint # # python3.5 # # def _convert_file_content(self: Self, file: FileHandler) -> Self: def _convert_file_content(self, file): # # ''' Converts source code of given file to new version. Examples: >>> file = FileHandler( ... __test_folder__.path + '_convert_file_content') >>> replace = Replace(__test_folder__, encoding='ascii') >>> file.set_content('ä', encoding='latin_1') # doctest: +ELLIPSIS Object of "Handler" with path "..._convert_file_content" and ... >>> __test_buffer__.clear() # doctest: +ELLIPSIS '...' >>> replace._convert_file_content(file) # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". >>> __test_buffer__.clear() # doctest: +ELLIPSIS '... decode file "..._file_content" with given encoding "ascii"...' >>> file.set_content( ... 'a\\nä', encoding='latin_1' ... ) # doctest: +ELLIPSIS Object of "Handler" with path "..._convert_file_content" and ... >>> replace._convert_file_content(file) # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". >>> __test_buffer__.clear() # doctest: +ELLIPSIS '...decode file "..._file_content" with given encoding "ascii"...' >>> file.content = '#!/usr/bin/env python3.5\\na' >>> replace.dry = True >>> replace._convert_file_content(file) # doctest: +ELLIPSIS Object of "Replace" with directory "..." to convert to "...". ''' # # python3.5 with builtins.open( with codecs.open( file.path, mode='r', encoding=self.encoding ) as file_handler: try: first_line = file_handler.readline() except builtins.UnicodeDecodeError: __logger__.warning( 'Can\'t decode file "%s" with given encoding "%s".', file.path, self.encoding) return self # # python3.5 # # match = regularExpression.compile( # # self.first_line_regex_pattern # # ).fullmatch(first_line) match = regularExpression.compile( '(?:%s)$' % self.first_line_regex_pattern ).match(first_line) # # if match is None: __logger__.warning( '"%s" hasn\'t path to version in first line.', file.path) return self self._current_version = match.group('current_version') new_interpreter = match.group('constant_version_pattern').replace( self._current_version, self._new_version) first_line = match.group().replace( match.group('constant_version_pattern'), new_interpreter) ''' NOTE: Calling "read()" twice is necessary to work around a \ python bug. First call only reads a part of corresponding \ file. \ NOTE: Catching an encoding error here isn't necessary, \ because former "readline()" call has already loaded the full \ file into buffer. An encoding error would already be throne. ''' file_content = file_handler.read() + file_handler.read() __logger__.info( 'Convert "{path}" from "{current_version}" to ' '"{new_version}".'.format( path=file.path, current_version=self._current_version, new_version=self._new_version)) file_content = '%s%s' % (first_line, regularExpression.compile( self.more_line_regex_pattern ).sub(self._replace_alternate_lines, file_content)) if not self.dry: file.content = regularExpression.compile( self.one_line_regex_pattern ).sub(self._replace_alternate_line, file_content) return self @JointPoint # # python3.5 # # def _replace_alternate_lines( # # self: Self, match: type(regularExpression.compile('').match('')) # # ) -> builtins.str: def _replace_alternate_lines(self, match): # # ''' Replaces various numbers of code lines with its corresponding \ code line in another version. **match** - is a regular expression match object with all needed \ infos about the current code snippet and its \ corresponding. ''' if match.group('alternate_version') == self._new_version: ''' "str.replace()" has to run over "current_text" twice. Two \ consecutive lines with whitespace at the end of line aren't \ matched in first run. ''' return ( '\n{prefix} {current_version}\n{prefix} {current_text}\n' '{alternate_text}{prefix}\n'.format( prefix=match.group('prefix'), current_version=self._current_version, current_text=match.group('current_text').replace( '\n', '\n%s ' % match.group('prefix') )[:-builtins.len(match.group('prefix')) - 2].replace( '\n%s \n' % match.group('prefix'), '\n%s\n' % match.group('prefix') ).replace( '\n%s \n' % match.group('prefix'), '\n%s\n' % match.group('prefix') ).rstrip(), alternate_text=regularExpression.compile( '\n%s ?' % String(match.group( 'prefix' )).regex_validated.content ).sub('\n', match.group( 'alternate_text' ))[builtins.len(match.group('prefix')) + 1:])) return match.group() @JointPoint # # python3.5 # # def _replace_alternate_line( # # self: Self, match: type(regularExpression.compile('').match('')) # # ) -> builtins.str: def _replace_alternate_line(self, match): # # ''' Replaces one code line with its corresponding code line in \ another version. **match** - is a regular expression match object with all needed \ infos about the current code snippet and its \ corresponding alternative. ''' if match.group('alternate_version') == self._new_version: current_text = match.group('current_text') if current_text: current_text = ' ' + current_text return ( '\n{prefix} {current_version}{current_text}\n{alternate_text}' '\n'.format( prefix=match.group('prefix'), current_version=self._current_version, current_text=current_text, alternate_text=match.group('alternate_text'))) return match.group() @JointPoint # # python3.5 # # def _determine_useful_version_in_location_helper( # # self: Self, location: FileHandler # # ) -> builtins.str: def _determine_useful_version_in_location_helper(self, location): # # ''' Searches in files in given locations the first occurrences of a \ useful conversion format. ''' if location.is_directory(): for sub_location in location: version = self._determine_useful_version_in_location( location=sub_location) if version: return version elif not self.extension or location.extension == self.extension: version = self._determine_useful_version_in_file(file=location) if version: return version return ''