def load_cache(self): """ Load the cache file pointed to by this FileCache instance. If no cache file is present a new one will be created. """ start_time = time.time() try: # Load the cache file so we know the compilation state of the # design with open(self.cache_path, 'rb') as pickeFile: self.cache = pickle.load(pickeFile) except: log.warning('The cache file was corrupted, re-initialising...') log.debug(traceback.format_exc()) self.initialise_cache() log.debug('Cache loaded in ' + utils.time_delta_string(start_time, time.time()))
def load_cache(self): """ Load the cache file pointed to by this FileCache instance. If no cache file is present a new one will be created. """ start_time = time.time() try: # Load the cache file so we know the compilation state of the # design with open(self.cache_path, 'rb') as pickeFile: self.cache = pickle.load(pickeFile) log.debug(self.__str__()) except: log.warning('The cache file was corrupted, re-initialising...') log.debug(traceback.format_exc()) self.initialise_cache() log.debug( 'Cache loaded in ' + utils.time_delta_string( start_time, time.time() ) )
def parse_project(filepath, project_object, synthesise=None): """Parse the XML project and update the project_dictionary or return a new dictionary if one is not supplied. """ log.info('Parsing: ' + str(filepath) + ' synthesis=' + str(synthesise)) start_time = time.time() project_root = os.path.dirname(os.path.realpath(filepath)) try: xml_obj = minidom.parse(filepath) for project_node in xml_obj.getElementsByTagName( ProjectAttributes.XML_NODE_PROJECT): # Project attributes (if any) # If this whole node should not be synthesised, ignore any # child flags otherwise get the child synthesis flag and use # that. if synthesise is None: project_attribs = XmlProjectParser._get_node_attributes( project_node, project_root) synthesis_enabled = project_attribs.get( ProjectAttributes.XML_ATTRIBUTE_SYNTHESIS, None) else: synthesis_enabled = synthesise for child in project_node.childNodes: if child.nodeName == ProjectAttributes.XML_NODE_PROJECT: attribs = XmlProjectParser._get_node_attributes( child, project_root) # If this whole node should not be synthesised, ignore # any child flags otherwise get the child synthesis # flag and use that. if synthesis_enabled is None: fileAttributes = ( XmlProjectParser._get_node_attributes( child, project_root)) try: synthesise = fileAttributes[ ProjectAttributes.XML_ATTRIBUTE_SYNTHESIS] except KeyError: synthesise = None else: synthesise = synthesis_enabled if ProjectAttributes.XML_ATTRIBUTE_PATH in attribs: log.debug('Found sub-project: ' + str(attribs[ ProjectAttributes.XML_ATTRIBUTE_PATH])) # Recursively call this parser with the new project # path XmlProjectParser.parse_project( str(attribs[ ProjectAttributes.XML_ATTRIBUTE_PATH]), project_object, synthesise) elif child.nodeName == ProjectAttributes.XML_NODE_CONFIG: XmlProjectParser._add_config(child, project_root, project_object) elif child.nodeName == ProjectAttributes.XML_NODE_LIBRARY: XmlProjectParser._add_library(child, project_root, project_object, synthesis_enabled) elif child.nodeName == ( ProjectAttributes.XML_NODE_CONSTRAINTS): XmlProjectParser._add_constraints( child, project_root, project_object, ) elif child.nodeName == ( ProjectAttributes.XML_NODE_UNITTEST): XmlProjectParser._add_unittest( child, project_root, project_object, ) elif child.nodeName == ProjectAttributes.XML_NODE_GENERIC: # Build a dictionary of generics using the attribute # name and value attribs = child.attributes if attribs is None: continue attribs = dict(attribs.items()) for attrName, attrVal in attribs.items(): project_object.add_generic(attrName, attrVal) elif child.nodeName == ProjectAttributes.XML_NODE_FILE: # Files should not be left unassociated with a library # unless you wish to add all files to the work library. # The default behavior will be to add parentless files # to the work library, but a configuration option could # make this post an error instead. log.warning('Found file with no parent library, ' + 'defaulting to work library') # If this whole node should not be synthesised, ignore # any child flags otherwise get the child synthesis # flag and use that. if synthesis_enabled is None: fileAttributes = ( XmlProjectParser._get_node_attributes( child, project_root)) try: synthesise = fileAttributes[ ProjectAttributes.XML_ATTRIBUTE_SYNTHESIS] except KeyError: synthesise = None else: synthesise = synthesis_enabled XmlProjectParser._add_file(child, 'work', project_root, project_object, synthesise=synthesise) elif child.nodeName == ProjectAttributes.XML_NODE_TEXT: pass elif child.nodeName == ProjectAttributes.XML_NODE_COMMENT: pass except xml.parsers.expat.ExpatError: log.error( 'Error found in XML file, check the formatting. ' + 'Refer to the traceback below for the line number and file.') log.error(traceback.format_exc()) project_object.initialise() return log.debug(filepath + ' parsed in ' + utils.time_delta_string(start_time, time.time()))
def compile_project(self, includes={}): self.libraries.update(includes) for libname, path in includes.items(): self.set_library_path(libname, path) # Load the cache cache = self.project.cache # Compile the project try: cwd = self.project.get_simulation_directory() # Placeholder arguments force = False # Compile each of the sources in the project file created_libraries = [] skipped = 0 count = 0 start_time = time.time() file_object = None try: for file_object in self.project.get_files(): libname = file_object.library count += 1 # Check the md5sum of this file and compare it to the # md5sum cache to see if it has changed since it was # last compiled if os.path.isfile(file_object.path): if ( not force and not cache.is_file_changed(file_object, self.name) ): # The hashes match. If the library already exists # then dont compile the file. if self.library_exists(libname, cwd): if libname not in created_libraries: skipped += 1 log.info( "...skipping: " + file_object.path ) continue cache.add_file(file_object, self.name) # Map or create the library, track which libraries # were already created if ( not cache.library_in_cache(libname, self.name) or not self.library_exists(libname, cwd) ): # If this library is in the cache file someone must # have deleted it since the last run, we need to # recompile all files that are targeted at this # library. created_libraries.append(libname) log.info("...adding library: " + libname) self.add_library(libname) cache.add_library(libname.lower(), self.name) # Map the library to work so files can be added self.set_working_library(libname, cwd=cwd) log.info( '...compiling {0} ({1}) into library {2}'.format( os.path.basename(file_object.path), file_object.fileType, libname) ) # Compile the source self.compile(file_object, cwd=cwd) else: raise FileNotFoundError( 'File could not be found: ' + '{0}, operation aborted.'.format( file_object.path ) ) except: # Clear the SHA1 for the file that failed so it will recompile # next time if file_object is not None: cache.remove_file(file_object, self.name) cache.save_cache() raise if skipped > 0: log.info( '...skipped ' + str(skipped) + ' unmodified file(s). Use \"clean\" to erase' + ' the file cache' ) log.info("...saving cache file") # Save the cache file cache.save_cache() log.info("...done") log.info( str(count) + ' file(s) processed in ' + utils.time_delta_string(start_time, time.time()) ) except exceptions.ProjectFileException: log.error('Compilation aborted due to error in project file.') return
def compile_project(self, includes={}): self.libraries.update(includes) for libname, path in includes.items(): self.set_library_path(libname, path) # Load the cache cache = self.project.cache # Compile the project try: cwd = self.project.get_simulation_directory() # Placeholder arguments force = False # Compile each of the sources in the project file created_libraries = [] skipped = 0 count = 0 start_time = time.time() file_object = None try: for file_object in self.project.get_files(): libname = file_object.library count += 1 # Check the md5sum of this file and compare it to the # md5sum cache to see if it has changed since it was # last compiled if os.path.isfile(file_object.path): if (not force and not cache.is_file_changed( file_object, self.name)): # The hashes match. If the library already exists # then dont compile the file. if self.library_exists(libname, cwd): if libname not in created_libraries: skipped += 1 log.info("...skipping: " + file_object.path) continue cache.add_file(file_object, self.name) # Map or create the library, track which libraries # were already created if (not cache.library_in_cache(libname, self.name) or not self.library_exists(libname, cwd)): # If this library is in the cache file someone must # have deleted it since the last run, we need to # recompile all files that are targeted at this # library. created_libraries.append(libname) log.info("...adding library: " + libname) self.add_library(libname) cache.add_library(libname.lower(), self.name) # Map the library to work so files can be added self.set_working_library(libname, cwd=cwd) log.info( '...compiling {0} ({1}) into library {2}'.format( os.path.basename(file_object.path), file_object.fileType, libname)) # Compile the source self.compile(file_object, cwd=cwd) else: log.error( 'File could not be found: ' + '{0}, operation aborted.'.format(file_object.path)) return except: # Clear the SHA1 for the file that failed so it will recompile # next time if file_object is not None: cache.remove_file(file_object, self.name) cache.save_cache() raise if skipped > 0: log.info('...skipped ' + str(skipped) + ' unmodified file(s). Use \"clean\" to erase' + ' the file cache') log.info("...saving cache file") # Save the cache file cache.save_cache() log.info("...done") log.info( str(count) + ' file(s) processed in ' + utils.time_delta_string(start_time, time.time())) except exceptions.ProjectFileException: log.error('Compilation aborted due to error in project file.') return
def parse_project( filepath, project_object, synthesise=None ): """Parse the XML project and update the project_dictionary or return a new dictionary if one is not supplied. """ log.info('Parsing: ' + str(filepath) + ' synthesis=' + str(synthesise)) start_time = time.time() project_root = os.path.dirname(os.path.realpath(filepath)) try: xml_obj = minidom.parse(filepath) for project_node in xml_obj.getElementsByTagName( ProjectAttributes.XML_NODE_PROJECT ): # Project attributes (if any) # If this whole node should not be synthesised, ignore any # child flags otherwise get the child synthesis flag and use # that. if synthesise is None: project_attribs = ProjectAttributes.process_attributes( project_node.attributes, project_root ) synthesis_enabled = project_attribs.get( ProjectAttributes.ATTRIBUTE_SYNTHESIS, None ) else: synthesis_enabled = synthesise for child in project_node.childNodes: if child.nodeName == ProjectAttributes.XML_NODE_PROJECT: attribs = ProjectAttributes.process_attributes( child.attributes, project_root, defaults=ProjectAttributes.PROJECT_NODE_DEFAULTS ) # If this whole node should not be synthesised, ignore # any child flags otherwise get the child synthesis # flag and use that. if synthesis_enabled is None: synthesise = attribs.get( ProjectAttributes.ATTRIBUTE_SYNTHESIS, None ) else: synthesise = synthesis_enabled if ProjectAttributes.ATTRIBUTE_PATH in attribs: log.debug( 'Found sub-project: ' + str( attribs[ ProjectAttributes.ATTRIBUTE_PATH ] ) ) # Recursively call this parser with the new project # path XmlProjectParser.parse_project( str( attribs[ ProjectAttributes.ATTRIBUTE_PATH ] ), project_object, synthesise ) elif child.nodeName == ProjectAttributes.XML_NODE_CONFIG: XmlProjectParser._add_config( child, project_root, project_object ) elif child.nodeName == ProjectAttributes.XML_NODE_LIBRARY: XmlProjectParser._add_library( child, project_root, project_object, synthesis_enabled ) elif child.nodeName == ( ProjectAttributes.XML_NODE_CONSTRAINTS ): XmlProjectParser._add_constraints( child, project_root, project_object, ) elif child.nodeName == ( ProjectAttributes.XML_NODE_UNITTEST ): XmlProjectParser._add_unittest( child, project_root, project_object, ) elif child.nodeName == ProjectAttributes.XML_NODE_GENERIC: # Build a dictionary of generics using the attribute # name and value attribs = child.attributes if attribs is None: continue attribs = dict(attribs.items()) for attrName, attrVal in attribs.items(): project_object.add_generic( attrName, attrVal ) elif child.nodeName == ProjectAttributes.XML_NODE_FILE: # Files should not be left unassociated with a library # unless you wish to add all files to the work library. # The default behavior will be to add parentless files # to the work library, but a configuration option could # make this post an error instead. log.warning( 'Found file with no parent library, ' + 'defaulting to work library' ) # If this whole node should not be synthesised, ignore # any child flags otherwise get the child synthesis # flag and use that. if synthesis_enabled is None: synthesise = ( ProjectAttributes.get_processed_attribute( child.attributes.get( ProjectAttributes.ATTRIBUTE_SYNTHESIS, None ), project_root, ProjectAttributes.ATTRIBUTE_SYNTHESIS ) ) else: synthesise = synthesis_enabled XmlProjectParser._add_file( child, 'work', project_root, project_object, synthesise=synthesise ) elif child.nodeName == ProjectAttributes.XML_NODE_TEXT: pass elif child.nodeName == ProjectAttributes.XML_NODE_COMMENT: pass except xml.parsers.expat.ExpatError: log.error( 'Error found in XML file, check the formatting. ' + 'Refer to the traceback below for the line number and file.' ) log.error(traceback.format_exc()) project_object.initialise() return log.debug(filepath + ' parsed in ' + utils.time_delta_string( start_time, time.time()) )
def simulate(self, library, entity, gui=False, generics={}, includes={}, args=[], duration=None): """ Compile and simulate the design. """ start_time = time.time() args = [] # Specify the output name args += ['-o', 'icarus_sim'] # Get the files for file_object in self.files: args.append(file_object.path) # TODO: Add additional custom compile args for each file. # Define the top level args += ['-s', entity] # Define top level parameters # TODO: Icarus does not seem to support parameter/generic overrides # in the latest version so `define overrides need to be used instead. if len(generics.keys()) > 0: log.warning( 'Icarus parameter overrides via the -P flag are not ' + 'supported. Parameter overrides will be translated into ' + '`define overrides via the -D command line switch.') for k, v in generics.items(): args += ['-D', '{0}={1}'.format(k, v)] # Add custom library paths (the library name is ignored) for k, v in includes.items(): args += ['-y' + v] # Call the Iverilog compilation stage Iverilog._call(self.iverilog, args, cwd=self.project.get_simulation_directory()) log.info("...done") log.info( str(len(self.files)) + ' file(s) processed in ' + utils.time_delta_string(start_time, time.time())) ###################################################################### # Invoke simulation # $ vvp [flags] foo.vvp [extended args] # Extended Args: # -none/-vcd-none/-vcd-off/-fst-none # -fst # -lxt/lxt2 # -sdf-warn # -sdf-info # -sdf-verbose extended_args = [ '-none', '-vcd-none', '-vcd-off', '-fst-none', '-fst', '-lxt', '-lxt2', '-sdf-warn', '-sdf-info', '-sdf-verbose' ] ###################################################################### # Get user specified args args = self.project.get_tool_arguments(self.name, 'simulate') flags = list(filter(lambda x: x not in extended_args, args)) extended = list(filter(lambda x: x in extended_args, args)) args = flags # Target application args += ['icarus_sim'] # Extended Args args += extended # Run the simulation ret, stdout, stderr = Iverilog._call( self.vvp, args, cwd=self.project.get_simulation_directory(), quiet=False) return ret, stdout, stderr
def simulate( self, library, entity, gui=False, generics={}, includes={}, args=[], duration=None ): """ Compile and simulate the design. """ start_time = time.time() args = [] # Specify the output name args += [ '-o', 'icarus_sim' ] # Get the files for file_object in self.files: args.append(file_object.path) # TODO: Add additional custom compile args for each file. # Define the top level args += [ '-s', entity ] # Define top level parameters # TODO: Icarus does not seem to support parameter/generic overrides # in the latest version so `define overrides need to be used instead. if len(generics.keys()) > 0: log.warning( 'Icarus parameter overrides via the -P flag are not ' + 'supported. Parameter overrides will be translated into ' + '`define overrides via the -D command line switch.' ) for k, v in generics.items(): args += [ '-D', '{0}={1}'.format(k, v) ] # Add custom library paths (the library name is ignored) for k, v in includes.items(): args += [ '-y' + v ] # Call the Iverilog compilation stage Iverilog._call( self.iverilog, args, cwd=self.project.get_simulation_directory() ) log.info("...done") log.info( str(len(self.files)) + ' file(s) processed in ' + utils.time_delta_string(start_time, time.time()) ) ###################################################################### # Invoke simulation # $ vvp [flags] foo.vvp [extended args] # Extended Args: # -none/-vcd-none/-vcd-off/-fst-none # -fst # -lxt/lxt2 # -sdf-warn # -sdf-info # -sdf-verbose extended_args = [ '-none', '-vcd-none', '-vcd-off', '-fst-none', '-fst', '-lxt', '-lxt2', '-sdf-warn', '-sdf-info', '-sdf-verbose' ] ###################################################################### # Get user specified args args = self.project.get_tool_arguments(self.name, 'simulate') flags = list(filter(lambda x: x not in extended_args, args)) extended = list(filter(lambda x: x in extended_args, args)) args = flags # Target application args += ['icarus_sim'] # Extended Args args += extended # Run the simulation ret, stdout, stderr = Iverilog._call( self.vvp, args, cwd=self.project.get_simulation_directory(), quiet=False ) return ret, stdout, stderr