Пример #1
0
def add_sim_modules_to_project(tags, sim_dict, user_paths):
    #utils.pretty_print_dict(tags)
    #utils.pretty_print_dict(sim_dict)
    #Get the directory of where to put the sim modules
    base_dir = utils.resolve_path(tags["BASE_DIR"])
    project_dir = tags["PROJECT_NAME"]
    out_dir = os.path.join(base_dir, "sim", "sim_modules")
    if not os.path.exists(out_dir):
        utils.create_dir(out_dir)


    #Find all the file locations
    module_filename = utils.find_module_filename(sim_dict["name"], user_paths)
    module_filepath = utils.find_rtl_file_location(module_filename, user_paths)

    out_file_path = os.path.join(out_dir, module_filename)
    #print "copy %s > %s" % (module_filepath, out_file_path)
    shutil.copy2(module_filepath, os.path.join(out_dir, out_file_path))

    #Get the locations for each of the auxilary files
    for f in sim_dict["aux_files"]:
        module_path = utils.find_rtl_file_location(f)
        out_file_path = os.path.join(out_dir, f)
        #print "copy %s > %s" % (module_path, out_file_path)
        shutil.copy2(module_path, os.path.join(out_dir, f))
Пример #2
0
def generate_sub_slave_dict(sim_dict, debug = False):
    #Get sim module tags
    filename = utils.find_module_filename(sim_dict["name"])
    filepath = utils.find_rtl_file_location(filename)
    sim_tags = vutils.get_module_tags(filepath)
    bind_dict = sim_tags["ports"]
    sim_tags["bind"] = {}
    #Go through each dictionary entry and determine the direction
    for signal in sim_dict["bind"]:
        #XXX: Don't support subset of busses yet
        sim_tags["bind"][signal] = {}
        sim_tags["bind"][signal]["loc"] = sim_dict["bind"][signal]
        sim_tags["bind"][signal]["direction"] = get_bind_direction(signal, sim_tags)

    return sim_tags
Пример #3
0
    def generate_sub_module(self,
                            invert_reset,
                            instance_name,
                            config_tags,
                            module_tags = None,
                            enable_unique_ports = True,
                            debug = False):


        if module_tags is None:
            filepath = utils.find_rtl_file_location(config_tags["filename"],
                                                    self.user_paths)
            module_tags = vutils.get_module_tags(filepath,
                                                user_paths = self.user_paths)
        #if debug:
            #print "Module Tags:"
            #utils.pretty_print_dict(module_tags)

        buf =  "//Module %s (  %s  )\n" % (module_tags["module"], instance_name)
        buf += "\n"
        prename = ""
        if enable_unique_ports:
            prename = instance_name
        buf += self.generate_sub_module_wires(invert_reset, prename, module_tags)


        buf += vutils.generate_module_port_signals(invert_reset = invert_reset,
                                                   name = instance_name,
                                                   prename = prename,
                                                   slave_tags = config_tags,
                                                   module_tags = module_tags)

        #Add the bindings for this modules to the bind dictionary
        if "bind" in config_tags:
            for bind in config_tags["bind"]:
                bname = bind
                if len(prename) > 0:
                    bname = "%s_%s" % (prename, bind)
                self.bindings[bname] = {}
                self.bindings[bname] = config_tags["bind"][bind]

        return buf
Пример #4
0
    def generate_sub_module(self,
                            invert_reset,
                            instance_name,
                            config_tags,
                            module_tags=None,
                            enable_unique_ports=True,
                            debug=False):

        if module_tags is None:
            filepath = utils.find_rtl_file_location(config_tags["filename"],
                                                    self.user_paths)
            module_tags = vutils.get_module_tags(filepath,
                                                 user_paths=self.user_paths)
        #if debug:
        #print "Module Tags:"
        #utils.pretty_print_dict(module_tags)

        buf = "//Module %s (  %s  )\n" % (module_tags["module"], instance_name)
        buf += "\n"
        prename = ""
        if enable_unique_ports:
            prename = instance_name
        buf += self.generate_sub_module_wires(invert_reset, prename,
                                              module_tags)

        buf += vutils.generate_module_port_signals(invert_reset=invert_reset,
                                                   name=instance_name,
                                                   prename=prename,
                                                   slave_tags=config_tags,
                                                   module_tags=module_tags)

        #Add the bindings for this modules to the bind dictionary
        if "bind" in config_tags:
            for bind in config_tags["bind"]:
                bname = bind
                if len(prename) > 0:
                    bname = "%s_%s" % (prename, bind)
                self.bindings[bname] = {}
                self.bindings[bname] = config_tags["bind"][bind]

        return buf
Пример #5
0
  def get_list_of_dependencies(self, filename, debug=False):
    """get_list_of_dependencies

    return a list of the files that this file depends on

    Args:
      filename: the name of the file to analyze

    Return:
      A list of files that specify the dependenies

    Raises:
      IOError
    """
    deps = []
    if debug:
      print "input file: " + filename
    #filename needs to be a verilog file
    if (filename.partition(".")[2] != "v"):
      if debug:
        print "File is not a recognized verilog source"
      return False

    fbuf = ""
    #the name is a verilog file, try and open is
    try:
      filein = open(filename)
      fbuf = filein.read()
      filein.close()
    except IOError as err:
      #if debug:
      #  print "the file is not a full path... searching RTL"
      #didn't find with full path, search for it
      try:
        filepath = utils.find_rtl_file_location(filename, self.user_paths)

        filein = open(filepath)
        fbuf = filein.read()
        filein.close()
      except IOError as err_int:
        ModuleFactoryError("Couldn't find file %s in the RTL directory" % filename)


    #we have an open file!
    if debug:
      print "found file!"

    #strip out everything we can't use
    fbuf = utils.remove_comments(fbuf)

    include_fbuf = fbuf
    #search for `include
    while (not len(include_fbuf.partition("`include")[2]) == 0):
      ifile_name = include_fbuf.partition("`include")[2]
      ifile_name = ifile_name.splitlines()[0]
      ifile_name = ifile_name.strip()
      ifile_name = ifile_name.strip("\"")
      if debug:
        print "found an include " + ifile_name + " ",
      if (ifile_name not in self.verilog_dependency_list) and (ifile_name not in self.verilog_file_list):
        self.verilog_dependency_list.append(ifile_name)
        if debug:
          print "adding " + ifile_name + " to the dependency list"
      else:
        if debug:
          print "... already in have it"
      include_fbuf = include_fbuf.partition("`include")[2]

    #remove the ports list and the module name
    fbuf = fbuf.partition(")")[2]

    #modules have lines that start with a '.'
    str_list = fbuf.splitlines()

    module_token = ""
    done = False
    while (not done):
      for i in range (0, len(str_list)):
        line = str_list[i]
        #remove white spaces
        line = line.strip()
        if (line.startswith(".") and line.endswith(",")):
          #if debug:
          #  print "found a possible module... with token: " + line
          module_token = line
          break
        #check if we reached the last line
        if (i >= len(str_list) - 1):
          done = True

      if (not done):
        #found a possible module
        #partitoin the fbuf
        #if debug:
        #  print "module token " + module_token
        module_string = fbuf.partition(module_token)[0]
        fbuf = fbuf.partition(module_token)[2]
        fbuf = fbuf.partition(";")[2]
        str_list = fbuf.splitlines()

        #get rid of everything before the possible module
        while (len(module_string.partition(";")[2]) > 0):
          module_string = module_string.partition(";")[2]

        module_string = module_string.partition("(")[0]
        module_string = module_string.strip("#")
        module_string = module_string.strip()

        m_name = module_string.partition(" ")[0]
        if debug:
          print "module name: " + m_name

        if (not deps.__contains__(m_name)):
          if debug:
            print "adding it to the deps list"
          deps.append(module_string.partition(" ")[0])


        #mlist = module_string.splitlines()
        #work backwords
        #look for the last line that has a '('
        #for i in range (0, len(mlist)):
        #  mstr = mlist[i]
        #  print "item: " + mlist[i]
        #  #mstr = mlist[len(mlist) - 1 - i]
        #  #mstr = mstr.strip()
        #  if (mstr.__contains__(" ")):
        #    if debug:
        #      print "found: " + mstr.partition(" ")[0]
        #    deps.append(mstr.partition(" ")[0])
        #    break


    return deps
Пример #6
0
  def has_dependencies(self, filename, debug = False):
    """has_dependencies

    returns true if the file specified has dependencies

    Args:
      filename: search for dependencies with this filename

    Return:
      True: The file has dependencies.
      False: The file doesn't have dependencies

    Raises:
      IOError
    """
    if debug:
      print "input file: " + filename
    #filename needs to be a verilog file
    if (filename.partition(".")[2] != "v"):
      if debug:
        print "File is not a recognized verilog source"
      return False

    fbuf = ""

    #the name is a verilog file, try and open is
    try:
      filein = open(filename)
      fbuf = filein.read()
      filein.close()
    except IOError as err:
      if debug:
        print "the file is not a full path, searching RTL... ",
      #didn't find with full path, search for it
      try:
        #print "self.user_paths: %s" % (self.user_paths)
        filepath = utils.find_rtl_file_location(filename, self.user_paths)

        filein = open(filepath)
        fbuf = filein.read()
        filein.close()
      except ModuleNotFound as err:
        fbuf = ""
      except IOError as err_int:
        if debug:
          print "couldn't find file in the RTL directory"
        ModuleFactoryError("Couldn't find file %s in the RTL directory" % filename)


    #we have an open file!
    if debug:
      print "found file!"

    #strip out everything we can't use
    fbuf = utils.remove_comments(fbuf)

    #modules have lines that start with a '.'
    str_list = fbuf.splitlines()

    for item in str_list:
      item = item.strip()
      if (item.startswith(".")):
        if debug:
          print "found a module!"
        return True
    return False
Пример #7
0
  def process_file(self, filename, file_dict, directory="", debug=False):
    """process_file

    read in a file, modify it (if necessary), then write it to the location
    specified by the directory variable

    Args:
      filename: the name of the file to process
      file_dict: dictionary associated with this file
      directory: output directory

    Return:

    Raises:
      ModuleFactoryError
      IOError

    """
    verbose = False
    debug = False
    if (filename.endswith(".v")):
        self.verilog_file_list.append(filename)

    if debug:
        print "in process file"
        print "\t%s" % filename
    #maybe load a tags??

    #using the location value in the file_dict find the file and
    #pull it into a buf

    self.buf = ""
    file_location = ""
    paths = self.user_paths


    #There are two types of files
    #ones that are copied over from a location
    #ones that are generated by scripts

    #The file is specified by a location and basically needs to be copied over
    if file_dict.has_key("location"):
        #print "Location: %s" % file_dict["location"]
        #file_location = os.path.join( utils.nysa_base,
        loc = file_dict["location"].split("/")
        #print "Loc list: %s" % str(loc)
        if loc[0] == "${NYSA}":
            loc[0]  = utils.nysa_base


        #print "Loc list: %s" % str(loc)

        file_location = "/"
        for d in loc:
            file_location = os.path.join(file_location, d)

        if (debug):
            print ("getting file: " + filename + " from location: " + file_location)

        found_file = False
        try:
            filein = open(os.path.join(utils.resolve_path(file_location), filename))
            self.buf = filein.read()
            filein.close()
            found_file = True
        except IOError as err:
            pass

        if not found_file:
            if debug:
                print "searching for file...",
            try:
                absfilename = utils.find_rtl_file_location(filename, self.user_paths)
                filepath = os.path.dirname(os.path.dirname(absfilename))
                paths.insert(0, filepath)
                paths = list(set(paths))

                filein = open(absfilename)
                self.buf = filein.read()
                filein.close()

            except:
                if debug:
                    print "Failed to find file"
                raise ModuleFactoryError("File %s not found searched %s and in the HDL dir (%s)" %  (filename, \
                                          file_location, \
                                          utils.nysa_base + os.path.sep + "cbuilder" + os.path.sep + "verilog"))


        if verbose:
          print "found file!"
          print "file content: " + self.buf

    #File is generated by a script
    elif (not file_dict.has_key("gen_script")):
      raise ModuleFactoryError( "File %s does not declare a location or a \
                                  script! Check the template file" % filename)

    if verbose:
      print "Project name: " + self.tags["PROJECT_NAME"]

    #if the generation flag is set in the dictionary
    if "gen_script" in file_dict:
      if debug:
        print "found the generation script"
        print "run generation script: " + file_dict["gen_script"]
      #open up the new gen module
      ms = sys.modules.keys()
      gs = ""
      for m in ms:
          if m.endswith("gen_scripts"):
              gs = m
      #print "gs: %s" % gs


      cl = __import__("%s.gen" % gs, fromlist=[gs])
      #cl = importlib.import_module("gen_scripts", "gen")
      #if debug:
      #  print "cl: " + str(cl)
      Gen = getattr(gen, "Gen")
      if debug:
        print "Gen: " + str(Gen)
      self.gen_module = __import__("%s.%s" % (gs, file_dict["gen_script"]), fromlist=[gs])
      gen_success_flag = False

      #find the script and dynamically add it
      for name in dir(self.gen_module):
        obj = getattr(self.gen_module, name)
  #      print "object type: " + str(obj)
#XXX: debug section start
        if verbose:
          print "name: " + name
        if isclass(obj):
          if verbose:
            print "\tobject type: " + str(obj)
            print "\tis class"
          if issubclass(obj, cl.Gen):
            if verbose:
              print "\t\tis subclass"
#XXX: debug section end
        if isclass(obj) and issubclass(obj, cl.Gen) and obj is not cl.Gen:
          self.gen = obj()
          if verbose:
            print "obj = " + str(self.gen)

          self.buf = self.gen.gen_script(tags = self.tags, buf = self.buf, user_paths = self.user_paths)
          gen_success_flag = True

      if not gen_success_flag:
        raise ModuleFactoryError("Failed to execute the generation script %s" %
                                  file_dict["gen_script"])
    else:
      #no script to execute, just tags
      self.apply_tags()

    if verbose:
      print self.buf
    if (len(self.buf) > 0):
      result = self.write_file(directory, filename)

    if self.has_dependencies(filename):
      deps = self.get_list_of_dependencies(filename)
      for d in deps:
        try:
          f = utils.find_module_filename(d, self.user_paths)
          if (len(f) == 0):
            print "Error: couldn't find dependency filename"
            continue
          if (f not in self.verilog_dependency_list and
            f not in self.verilog_file_list):
            if debug:
              print "found dependency: " + f
            self.verilog_dependency_list.append(f)
        except ModuleNotFound as err:
          continue
Пример #8
0
    def generate_project(self, config_filename, output_directory = None):
        """Generate the folders and files for the project

        Using the project tags and template tags this function generates all
        the directories and files of the project. It will go through the template
        structure and determine what files need to be added and call either
        a generation script (in the case of \"top.v\") or simply copy the file
        over (in the case of a peripheral or memory module.

        Args:
          config_filename: name of the JSON configuration file
            output_directory: Path to override default output directory

        Return:
          True: Success
          False: Failure

        Raises:
          TypeError
          IOError
          SapError
        """
        status = self.s
        if status: status.Debug("Openning site manager")

        sm = site_manager.SiteManager()
        path_dicts = sm.get_paths_dict()

        self.read_config_file(config_filename)
        path_dict = sm.get_paths_dict()
        if output_directory is not None:
            self.project_tags["BASE_DIR"] = output_directory

        board_dict = utils.get_board_config(self.project_tags["board"], debug = False)
        cfiles = []
        cpaths = []

        self.user_paths.extend(utils.get_local_verilog_paths())
        self.user_paths = list(set(self.user_paths))
        if "paths" in board_dict:
            self.user_paths.extend(board_dict["paths"])
            self.user_paths = list(set(self.user_paths))

        if "parent_board" in board_dict:
            self.user_paths.extend(get_parent_board_paths(board_dict))
            self.user_paths = list(set(self.user_paths))

        if self.s: self.s.Verbose("Paths:")
        for p in self.user_paths:
            if self.s: self.s.Verbose("\t%s" %p)

        # Go through the board dict and see if there is anything that needs to be
        # incorporated into the project tags
        for key in board_dict:
            if key not in self.project_tags:
                self.project_tags[key] = board_dict[key]
            elif isinstance(self.project_tags[key], OrderedDict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]
            elif isinstance(self.project_tags[key], list):
                self.project_tags[key].extend(board_dict[key])
            elif isinstance(self.project_tags[key], dict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]

        self.filegen = ModuleProcessor(user_paths = self.user_paths)

        pt = self.project_tags
        if "constraint_files" not in pt.keys():
            pt["constraint_files"] = []

        cfiles = pt["constraint_files"]
        for c in cfiles:
            board = self.project_tags["board"]
            try:
                cpaths.append(utils.get_constraint_file_path(board, c))
            except IBuilderError as e:
                if self.s: self.s.Verbose("Could not find constraint: %s in default board searching parent board..." % c)
                #path = os.path.join(utils.get_board_directory(board), board)
                board_dict = utils.get_board_config(board)

                mname = "default.json"
                if "default_project" in board_dict:
                    mname = board_dict["default_project"]

                if "parent_board" in board_dict:
                    for parent in board_dict["parent_board"]:
                        if self.s: self.s.Verbose("\tsearching: %s @ %s..." % (parent, utils.get_board_directory(parent)))
                        filepath = utils.get_constraint_file_path(parent, c)
                        if filepath is None:
                            if self.s: self.s.Verbose("Did not file file: %s in parent board" % (c))
                        else:
                            if self.s: self.s.Verbose("Found file, located at: %s" % filepath)
                            cpaths.append(utils.get_constraint_file_path(parent, c, debug = True))

        #if the user didn't specify any constraint files
        #load the default
        if len(cfiles) == 0:
            if status: status.Debug("loading default constraints for: %s" % board_dict["board_name"])
            cfiles = board_dict["default_constraint_files"]
            for c in cfiles:
                cpaths.append(utils.get_constraint_file_path(self.project_tags["board"], c))
                #cpaths.append(utils.get_constraint_file_path(c))


        #extrapolate the bus template
        clock_rate = ""
        if "clockrate" in board_dict:
            if self.s: self.s.Info("User Specified a clockrate of: %d" % board_dict["clockrate"])
            clock_rate = str(board_dict["clockrate"])
        if len(clock_rate) == 0:
            for c in cpaths:
                clock_rate = utils.read_clock_rate(c)
                if len(clock_rate) > 0:
                    #First place I can find the clock rate drop out
                    break

        if len (clock_rate) == 0:
            if self.s: self.s.Fatal("Unable to find the clock rate in any of the constraint"
                      "files: %s" % str(cpaths))
            raise PGE("Unable to find the clock rate in any of the constraint"
                      "files: %s" % str(cpaths))

        #self.project_tags["CLOCK_RATE"] = utils.read_clock_rate(cpaths[0])
        self.project_tags["CLOCK_RATE"] = clock_rate
        self.read_template_file(self.project_tags["TEMPLATE"])

        #set all the tags within the filegen structure
        if status: status.Verbose("set all tags wihin filegen structure")
        self.filegen.set_tags(self.project_tags)

        #generate the project directories and files
        utils.create_dir(self.project_tags["BASE_DIR"])
        if status: status.Verbose("generated project base direcotry: %s" %
            utils.resolve_path(self.project_tags["BASE_DIR"]))

        #generate the arbiter tags, this is important because the top
        #needs the arbiter tags
        arb_tags = arbiter.generate_arbiter_tags(self.project_tags, False)
        self.project_tags["ARBITERS"] = arb_tags

        #print "Parent dir: " + self.project_tags["BASE_DIR"]
        for key in self.template_tags["PROJECT_TEMPLATE"]["files"]:
            self.recursive_structure_generator(
                    self.template_tags["PROJECT_TEMPLATE"]["files"],
                    key,
                    self.project_tags["BASE_DIR"])

        if status: status.Verbose("finished generating project directories")

        if arbiter.is_arbiter_required(self.project_tags):
            if status: status.Verbose("generate the arbiters")
        self.generate_arbiters()

        #Generate all the slaves
        for slave in self.project_tags["SLAVES"]:
            fdict = {"location":""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "slave")
            #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave"
            fn = self.project_tags["SLAVES"][slave]["filename"]
            try:
                #self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = True)
                self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = False)
            except ModuleFactoryError as err:
                if status: status.Error("ModuleFactoryError while generating slave: %s" % str(err))
                raise ModuleFactoryError(err)

            slave_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["SLAVES"][slave]:
                temp_paths = self.user_paths
                temp_paths.append(slave_dir)

                for c in self.project_tags["SLAVES"][slave]["constraint_files"]:
                    file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["SLAVES"][slave]:
                if status: status.Verbose("User Specified an core(s) for a slave")
                for c in self.project_tags["SLAVES"][slave]["cores"]:

                    file_location = os.path.join(slave_dir, os.pardir, "cores", c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", c))

            #each slave

        if "MEMORY" in self.project_tags:
            for mem in self.project_tags["MEMORY"]:
                fdict = {"location":""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "mem")
                #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/mem"
                fn = self.project_tags["MEMORY"][mem]["filename"]
                try:
                    self.filegen.process_file(filename = fn, file_dict = fdict, directory = file_dest)
                except ModuleFactoryError as err:
                    if status: status.Error("ModuleFactoryError while generating memory: %s" % str(err))
                    raise ModuleFactoryError(err)

            mem_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["MEMORY"][mem]:
                temp_paths = self.user_paths
                temp_paths.append(mem_dir)

                for c in self.project_tags["MEMORY"][mem]["constraint_files"]:
                    file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["MEMORY"][mem]:
                if status: status.Verbose("User Specified an core(s) for a mem")
                for c in self.project_tags["MEMORY"][mem]["cores"]:

                    file_location = os.path.join(mem_dir, os.pardir, "cores", c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", c))



        '''
        if 'infrastructure' in self.project_tags:
            if status: status.Verbose("User Specified an infrastructure(s)")
            for entry in self.project_tags["infrastructure"]:
                name = entry.keys()[0]
                im = entry[name]
                path = utils.get_board_directory(name)
                path = os.path.join(path, name, "infrastructure", im["filename"])

                ftdict = {"location":path}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "infrastructure")
                fn = im["filename"]
                self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest)
        '''

        if "cores" in self.project_tags:
            if status: status.Verbose("User Specified an core(s)")
            for entry in self.project_tags["cores"]:
                name = entry.keys()[0]
                for core in entry[name]:
                    file_location = None
                    path = utils.get_board_directory(name)
                    path = os.path.join(path, name, "cores")
                    for root, dirs, files in os.walk(path):
                        if core in files:
                            file_location =  os.path.join(root, core)
                            break
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", core))

        #Copy the user specified constraint files to the constraints directory
        for constraint_fname in cfiles:
            abs_proj_base = utils.resolve_path(self.project_tags["BASE_DIR"])
            constraint_path = utils.get_constraint_file_path(self.project_tags["board"], constraint_fname)
            if os.path.exists(constraint_fname):
                constraint_fname = os.path.split(constraint_fname)[-1]
            #constraint_path = constraint_fname
            if len(constraint_path) == 0:
                print ("Couldn't find constraint: %s, searched in the current directory and %s/hdl/%s" %
                    (constraint_fname, abs_proj_base, self.project_tags["board"]))
                continue
            shutil.copy (constraint_path, os.path.join(abs_proj_base, "constraints", constraint_fname))
            #shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname)

        #Generate the IO handler
        interface_filename = self.project_tags["INTERFACE"]["filename"]
        fdict = {"location":""}
        #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface"
        file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "interface")
        result = self.filegen.process_file(filename = interface_filename, file_dict=fdict , directory=file_dest)

        if status:
            status.Verbose("verilog files: ")
            for f in self.filegen.verilog_file_list:
                status.Verbose("\t%s" % f)
                #if len(self.filegen.verilog_dependency_list) > 0:
                #    status.Verbose("\t\tdependent files: ")
        if status: status.Verbose("copy over the dependencies...")
        for d in self.filegen.verilog_dependency_list:
            fdict = {"location":""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies")
            result = self.filegen.process_file(filename = d, file_dict = fdict, directory = file_dest, debug = True)
            if status: status.Verbose("\tDependent File: %s" % d)

        if "dependencies" in self.project_tags:
            if status: status.Verbose("User Specified dependencies")
            for name in self.project_tags["dependencies"]:
                if status: status.Verbose("\tUser Dependent File: %s" % name)

                fdict = {"location":""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies")
                result = self.filegen.process_file(filename = name, file_dict = fdict, directory = file_dest, debug = True)



        return True
Пример #9
0
def get_list_of_dependencies(self, filename, debug=False):
    """get_list_of_dependencies

    return a list of the files that this file depends on

    Args:
      filename: the name of the file to analyze

    Return:
      A list of files that specify the dependenies

    Raises:
      IOError
    """
    deps = []
    if debug:
        print "input file: " + filename
    #filename needs to be a verilog file
    if (filename.partition(".")[2] != "v"):
        if debug:
            print "File is not a recognized verilog source"
        return False

    fbuf = ""
    #the name is a verilog file, try and open is
    try:
        filein = open(filename)
        fbuf = filein.read()
        filein.close()
    except IOError as err:
        #if debug:
        #  print "the file is not a full path... searching RTL"
        #didn't find with full path, search for it
        try:
            filepath = utils.find_rtl_file_location(filename, self.user_paths)

            filein = open(filepath)
            fbuf = filein.read()
            filein.close()
        except IOError as err_int:
            ModuleFactoryError("Couldn't find file %s in the RTL directory" %
                               filename)

    #we have an open file!
    if debug:
        print "found file!"

    #strip out everything we can't use
    fbuf = utils.remove_comments(fbuf)

    include_fbuf = fbuf
    #search for `include
    while (not len(include_fbuf.partition("`include")[2]) == 0):
        ifile_name = include_fbuf.partition("`include")[2]
        ifile_name = ifile_name.splitlines()[0]
        ifile_name = ifile_name.strip()
        ifile_name = ifile_name.strip("\"")
        if debug:
            print "found an include " + ifile_name + " ",
        if (not self.verilog_dependency_list.__contains__(ifile_name)
                and not self.verilog_file_list.__contains__(ifile_name)):
            self.verilog_dependency_list.append(ifile_name)
            if debug:
                print "adding " + ifile_name + " to the dependency list"
        else:
            if debug:
                print "... already in have it"
        include_fbuf = include_fbuf.partition("`include")[2]

    #remove the ports list and the module name
    fbuf = fbuf.partition(")")[2]

    #modules have lines that start with a '.'
    str_list = fbuf.splitlines()

    module_token = ""
    done = False
    while (not done):
        module_token = ""
        parameter_found = False
        parameter_flag = False
        parameter_debt = None
        for i in range(0, len(str_list)):
            line = str_list[i]
            #remove white spaces
            line = line.strip()
            if "#" in line:
                line = line.partition("#")[2]
                parameter_found = True

            if parameter_found:
                if parameter_debt == 0:
                    parameter_found = False
                    parameter_flag = True
                while ("(" in line) or (")" in line):
                    if "(" in line:
                        line = line.partition("(")[2]
                        if parameter_debt is None:
                            parameter_debt = 1
                        else:
                            parameter_debt += 1
                    else:
                        line = line.partition("(")[2]
                        parameter_debt -= 1

            if (line.startswith(".") and line.endswith(",")):
                #if debug:
                #  print "found a possible module... with token: " + line
                module_token = line
                continue
            if ";" in line and len(module_token) > 0:
                break
            #check if we reached the last line
            if (i >= len(str_list) - 1):
                done = True

        if (not done):
            module_string = fbuf.partition(module_token)[0]
            fbuf = fbuf.partition(module_token)[2]
            fbuf = fbuf.partition(";")[2]
            str_list = fbuf.splitlines()

            #get rid of everything before the possible module
            while (len(module_string.partition(";")[2]) > 0):
                module_string = module_string.partition(";")[2].strip()

            #Now we have a string that contains the module_type and name
            module_string = module_string.partition("(")[0].strip()
            m_name = ""
            if parameter_found:
                m_name = module_string.partition("#")[0].strip()
            else:
                m_name = module_string.partition(" ")[0].strip()

            if m_name not in deps:
                if debug:
                    print "adding it to the deps list"
                deps.append(m_name)

    return deps
Пример #10
0
def has_dependencies(self, filename, debug=False):
    """has_dependencies

    returns true if the file specified has dependencies

    Args:
        filename: search for dependencies with this filename

    Return:
        True: The file has dependencies.
        False: The file doesn't have dependencies

    Raises:
        IOError
    """

    if debug:
        print "input file: " + filename
    #filename needs to be a verilog file
    if (filename.partition(".")[2] != "v"):
        if debug:
            print "File is not a recognized verilog source"
        return False

    fbuf = ""

    #the name is a verilog file, try and open is
    try:
        filein = open(filename)
        fbuf = filein.read()
        filein.close()
    except IOError as err:
        if debug:
            print "the file is not a full path, searching RTL... ",
        #didn't find with full path, search for it
        try:
            #print "self.user_paths: %s" % (self.user_paths)
            filepath = utils.find_rtl_file_location(filename, self.user_paths)

            filein = open(filepath)
            fbuf = filein.read()
            filein.close()
        except ModuleError as err:
            fbuf = ""
        except IOError as err_int:
            if debug:
                print "couldn't find file in the RTL directory"
            ModuleFactoryError("Couldn't find file %s in the RTL directory" %
                               filename)

    #we have an open file!
    if debug:
        print "found file!"

    #strip out everything we can't use
    fbuf = utils.remove_comments(fbuf)

    #modules have lines that start with a '.'
    str_list = fbuf.splitlines()

    for item in str_list:
        item = item.strip()
        if (item.startswith(".")):
            if debug:
                print "found a module!"
            return True
    return False
def generate_define_table(filestring="", user_paths = [], debug = False):
    """Reads in a module as a buffer and returns a dictionary of defines

    Generates a table of defines that can be used to resolve values.
    If all the defines cannot be evaluated directly by the
    current module then this will search all the included modules

    Args:
      filestring: A buffer from the module's file

    Returns:
      A dictionary of defines

    Raises:
      PreProcessorError
    """
    define_dict = {}
    #from a file string find all the defines and generate an entry into a
    #dictionary
    filestring = utils.remove_comments(filestring)
    str_list = filestring.splitlines()

    for item in str_list:
        if debug: print "Working on: %s" % item
        item = item.strip()
        #look for include files
        if item.startswith("`include"):
            if debug: print "found an include: " + item
            #read int the include file, strip away the comments
            #then append everything to the end
            item = item.partition("`include")[2]
            item = item.strip()
            item = item.strip("\"")
            inc_file = utils.find_rtl_file_location(item, user_paths)
            if debug: print "include file location: " + inc_file

            #try and open the include file
            try:
                ifile = open(inc_file)
                fs = ifile.read()
                ifile.close()
            except:
                if item != "project_defines.v":
                    raise PreProcessorError("Error while attempting to the include file: %s" % inc_file)

            try:
                if debug:
                    print "got the new file string"
                include_defines = generate_define_table(fs, user_paths)
                if debug:
                    print "after include_define"
                    print "length of include defines: " + str(len(include_defines.keys()))
                for key in include_defines.keys():
                    #append the values found in the include back in the local dictionary
                    if debug: print "working on: " + key
                    if (not define_dict.has_key(key)):
                        define_dict[key] = include_defines[key]


                if debug: print "added new items onto the list"
##           except TypeError as terr:
##             print "Type Error: " + str(terr)
            except:
                if item != "project_defines.v":
                    raise PreProcessorError("Error while processing: %s: %s" %(item, sys.exc_info()[0]))
                #print "error while processing : ", item, ": ",  sys.exc_info()[0]
            continue

        if item.startswith("`define"):
            #if the string starts with `define split the name and value into the dictionary
##           if debug:
##             print "found a define: " + item
            item = item.partition("`define")[2]
            item = item.strip()
            if (len(item.partition(" ")[2]) > 0):
                name = item.partition(" ")[0].strip()
                value = item.partition(" ")[2].strip()
                if debug:
                    print "added " + name + "\n\tWith value: " + value
                define_dict[name] = value
                continue
            if (len(item.partition("\t")[2]) > 0):
                name = item.partition("\t")[0].strip()
                value = item.partition("\t")[2].strip()
                if debug:
                    print "added " + name + "\n\tWith value: " + value
                define_dict[name] = value
                continue
            if debug:
                print "found a define without a value: " + item

    return define_dict
Пример #12
0
    def generate_project(self, config_filename, output_directory=None):
        """Generate the folders and files for the project

        Using the project tags and template tags this function generates all
        the directories and files of the project. It will go through the template
        structure and determine what files need to be added and call either
        a generation script (in the case of \"top.v\") or simply copy the file
        over (in the case of a peripheral or memory module.

        Args:
          config_filename: name of the JSON configuration file
            output_directory: Path to override default output directory

        Return:
          True: Success
          False: Failure

        Raises:
          TypeError
          IOError
          SapError
        """
        status = self.s
        if status: status.Debug("Openning site manager")

        sm = site_manager.SiteManager()
        path_dicts = sm.get_paths_dict()

        self.read_config_file(config_filename)
        path_dict = sm.get_paths_dict()
        if output_directory is not None:
            self.project_tags["BASE_DIR"] = output_directory

        board_dict = utils.get_board_config(self.project_tags["board"],
                                            debug=False)
        cfiles = []
        cpaths = []

        self.user_paths.extend(utils.get_local_verilog_paths())
        self.user_paths = list(set(self.user_paths))
        if "paths" in board_dict:
            self.user_paths.extend(board_dict["paths"])
            self.user_paths = list(set(self.user_paths))

        if "parent_board" in board_dict:
            self.user_paths.extend(get_parent_board_paths(board_dict))
            self.user_paths = list(set(self.user_paths))

        if self.s: self.s.Verbose("Paths:")
        for p in self.user_paths:
            if self.s: self.s.Verbose("\t%s" % p)

        # Go through the board dict and see if there is anything that needs to be
        # incorporated into the project tags
        for key in board_dict:
            if key not in self.project_tags:
                self.project_tags[key] = board_dict[key]
            elif isinstance(self.project_tags[key], OrderedDict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]
            elif isinstance(self.project_tags[key], list):
                self.project_tags[key].extend(board_dict[key])
            elif isinstance(self.project_tags[key], dict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]

        self.filegen = ModuleProcessor(user_paths=self.user_paths)

        pt = self.project_tags
        if "constraint_files" not in pt.keys():
            pt["constraint_files"] = []

        cfiles = pt["constraint_files"]
        for c in cfiles:
            board = self.project_tags["board"]
            try:
                cpaths.append(utils.get_constraint_file_path(board, c))
            except IBuilderError as e:
                if self.s:
                    self.s.Verbose(
                        "Could not find constraint: %s in default board searching parent board..."
                        % c)
                #path = os.path.join(utils.get_board_directory(board), board)
                board_dict = utils.get_board_config(board)

                mname = "default.json"
                if "default_project" in board_dict:
                    mname = board_dict["default_project"]

                if "parent_board" in board_dict:
                    for parent in board_dict["parent_board"]:
                        if self.s:
                            self.s.Verbose(
                                "\tsearching: %s @ %s..." %
                                (parent, utils.get_board_directory(parent)))
                        filepath = utils.get_constraint_file_path(parent, c)
                        if filepath is None:
                            if self.s:
                                self.s.Verbose(
                                    "Did not file file: %s in parent board" %
                                    (c))
                        else:
                            if self.s:
                                self.s.Verbose("Found file, located at: %s" %
                                               filepath)
                            cpaths.append(
                                utils.get_constraint_file_path(parent,
                                                               c,
                                                               debug=True))

        #if the user didn't specify any constraint files
        #load the default
        if len(cfiles) == 0:
            if status:
                status.Debug("loading default constraints for: %s" %
                             board_dict["board_name"])
            cfiles = board_dict["default_constraint_files"]
            for c in cfiles:
                cpaths.append(
                    utils.get_constraint_file_path(self.project_tags["board"],
                                                   c))
                #cpaths.append(utils.get_constraint_file_path(c))

        #extrapolate the bus template
        clock_rate = ""
        if "clockrate" in board_dict:
            if self.s:
                self.s.Info("User Specified a clockrate of: %d" %
                            board_dict["clockrate"])
            clock_rate = str(board_dict["clockrate"])
        if len(clock_rate) == 0:
            for c in cpaths:
                clock_rate = utils.read_clock_rate(c)
                if len(clock_rate) > 0:
                    #First place I can find the clock rate drop out
                    break

        if len(clock_rate) == 0:
            if self.s:
                self.s.Fatal(
                    "Unable to find the clock rate in any of the constraint"
                    "files: %s" % str(cpaths))
            raise PGE("Unable to find the clock rate in any of the constraint"
                      "files: %s" % str(cpaths))

        #self.project_tags["CLOCK_RATE"] = utils.read_clock_rate(cpaths[0])
        self.project_tags["CLOCK_RATE"] = clock_rate
        self.read_template_file(self.project_tags["TEMPLATE"])

        #set all the tags within the filegen structure
        if status: status.Verbose("set all tags wihin filegen structure")
        self.filegen.set_tags(self.project_tags)

        #generate the project directories and files
        utils.create_dir(self.project_tags["BASE_DIR"])
        if status:
            status.Verbose("generated project base direcotry: %s" %
                           utils.resolve_path(self.project_tags["BASE_DIR"]))

        #generate the arbiter tags, this is important because the top
        #needs the arbiter tags
        arb_tags = arbiter.generate_arbiter_tags(self.project_tags, False)
        self.project_tags["ARBITERS"] = arb_tags

        #print "Parent dir: " + self.project_tags["BASE_DIR"]
        for key in self.template_tags["PROJECT_TEMPLATE"]["files"]:
            self.recursive_structure_generator(
                self.template_tags["PROJECT_TEMPLATE"]["files"], key,
                self.project_tags["BASE_DIR"])

        if status: status.Verbose("finished generating project directories")

        if arbiter.is_arbiter_required(self.project_tags):
            if status: status.Verbose("generate the arbiters")
        self.generate_arbiters()

        #Generate all the slaves
        for slave in self.project_tags["SLAVES"]:
            fdict = {"location": ""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl",
                                     "bus", "slave")
            #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave"
            fn = self.project_tags["SLAVES"][slave]["filename"]
            try:
                #self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = True)
                self.filegen.process_file(filename=fn,
                                          file_dict=fdict,
                                          directory=file_dest,
                                          debug=False)
            except ModuleFactoryError as err:
                if status:
                    status.Error(
                        "ModuleFactoryError while generating slave: %s" %
                        str(err))
                raise ModuleFactoryError(err)

            slave_dir = os.path.split(
                utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["SLAVES"][slave]:
                temp_paths = self.user_paths
                temp_paths.append(slave_dir)

                for c in self.project_tags["SLAVES"][slave][
                        "constraint_files"]:
                    file_location = utils.get_constraint_file_path(
                        self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(
                        self.project_tags["BASE_DIR"])
                    shutil.copy(file_location,
                                os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["SLAVES"][slave]:
                if status:
                    status.Verbose("User Specified an core(s) for a slave")
                for c in self.project_tags["SLAVES"][slave]["cores"]:

                    file_location = os.path.join(slave_dir, os.pardir, "cores",
                                                 c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(
                        self.project_tags["BASE_DIR"])
                    shutil.copy(file_location,
                                os.path.join(dest_path, "cores", c))

            #each slave

        if "MEMORY" in self.project_tags:
            for mem in self.project_tags["MEMORY"]:
                fdict = {"location": ""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl",
                                         "bus", "mem")
                #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/mem"
                fn = self.project_tags["MEMORY"][mem]["filename"]
                try:
                    self.filegen.process_file(filename=fn,
                                              file_dict=fdict,
                                              directory=file_dest)
                except ModuleFactoryError as err:
                    if status:
                        status.Error(
                            "ModuleFactoryError while generating memory: %s" %
                            str(err))
                    raise ModuleFactoryError(err)

            mem_dir = os.path.split(
                utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["MEMORY"][mem]:
                temp_paths = self.user_paths
                temp_paths.append(mem_dir)

                for c in self.project_tags["MEMORY"][mem]["constraint_files"]:
                    file_location = utils.get_constraint_file_path(
                        self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(
                        self.project_tags["BASE_DIR"])
                    shutil.copy(file_location,
                                os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["MEMORY"][mem]:
                if status:
                    status.Verbose("User Specified an core(s) for a mem")
                for c in self.project_tags["MEMORY"][mem]["cores"]:

                    file_location = os.path.join(mem_dir, os.pardir, "cores",
                                                 c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(
                        self.project_tags["BASE_DIR"])
                    shutil.copy(file_location,
                                os.path.join(dest_path, "cores", c))
        '''
        if 'infrastructure' in self.project_tags:
            if status: status.Verbose("User Specified an infrastructure(s)")
            for entry in self.project_tags["infrastructure"]:
                name = entry.keys()[0]
                im = entry[name]
                path = utils.get_board_directory(name)
                path = os.path.join(path, name, "infrastructure", im["filename"])

                ftdict = {"location":path}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "infrastructure")
                fn = im["filename"]
                self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest)
        '''

        if "cores" in self.project_tags:
            if status: status.Verbose("User Specified an core(s)")
            for entry in self.project_tags["cores"]:
                name = entry.keys()[0]
                for core in entry[name]:
                    file_location = None
                    path = utils.get_board_directory(name)
                    path = os.path.join(path, name, "cores")
                    for root, dirs, files in os.walk(path):
                        if core in files:
                            file_location = os.path.join(root, core)
                            break
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(
                        self.project_tags["BASE_DIR"])
                    shutil.copy(file_location,
                                os.path.join(dest_path, "cores", core))

        #Copy the user specified constraint files to the constraints directory
        for constraint_fname in cfiles:
            abs_proj_base = utils.resolve_path(self.project_tags["BASE_DIR"])
            constraint_path = utils.get_constraint_file_path(
                self.project_tags["board"], constraint_fname)
            if os.path.exists(constraint_fname):
                constraint_fname = os.path.split(constraint_fname)[-1]
            #constraint_path = constraint_fname
            if len(constraint_path) == 0:
                print(
                    "Couldn't find constraint: %s, searched in the current directory and %s/hdl/%s"
                    % (constraint_fname, abs_proj_base,
                       self.project_tags["board"]))
                continue
            shutil.copy(
                constraint_path,
                os.path.join(abs_proj_base, "constraints", constraint_fname))
            #shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname)

        #Generate the IO handler
        interface_filename = self.project_tags["INTERFACE"]["filename"]
        fdict = {"location": ""}
        #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface"
        file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus",
                                 "interface")
        result = self.filegen.process_file(filename=interface_filename,
                                           file_dict=fdict,
                                           directory=file_dest)

        if status:
            status.Verbose("verilog files: ")
            for f in self.filegen.verilog_file_list:
                status.Verbose("\t%s" % f)
                #if len(self.filegen.verilog_dependency_list) > 0:
                #    status.Verbose("\t\tdependent files: ")
        if status: status.Verbose("copy over the dependencies...")
        for d in self.filegen.verilog_dependency_list:
            fdict = {"location": ""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl",
                                     "dependencies")
            result = self.filegen.process_file(filename=d,
                                               file_dict=fdict,
                                               directory=file_dest,
                                               debug=True)
            if status: status.Verbose("\tDependent File: %s" % d)

        if "dependencies" in self.project_tags:
            if status: status.Verbose("User Specified dependencies")
            for name in self.project_tags["dependencies"]:
                if status: status.Verbose("\tUser Dependent File: %s" % name)

                fdict = {"location": ""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl",
                                         "dependencies")
                result = self.filegen.process_file(filename=name,
                                                   file_dict=fdict,
                                                   directory=file_dest,
                                                   debug=True)

        return True
Пример #13
0
def generate_define_table(filestring="", user_paths=[], debug=False):
    """Reads in a module as a buffer and returns a dictionary of defines

    Generates a table of defines that can be used to resolve values.
    If all the defines cannot be evaluated directly by the
    current module then this will search all the included modules

    Args:
      filestring: A buffer from the module's file

    Returns:
      A dictionary of defines

    Raises:
      PreProcessorError
    """
    import utils
    define_dict = {}
    #from a file string find all the defines and generate an entry into a
    #dictionary
    filestring = utils.remove_comments(filestring)
    str_list = filestring.splitlines()

    for item in str_list:
        if debug: print "Working on: %s" % item
        item = item.strip()
        #look for include files
        if item.startswith("`include"):
            if debug: print "found an include: " + item
            #read int the include file, strip away the comments
            #then append everything to the end
            item = item.partition("`include")[2]
            item = item.strip()
            item = item.strip("\"")
            inc_file = utils.find_rtl_file_location(item, user_paths)
            if debug: print "include file location: " + inc_file

            #try and open the include file
            try:
                ifile = open(inc_file)
                fs = ifile.read()
                ifile.close()
            except:
                if item != "project_defines.v":
                    raise PreProcessorError(
                        "Error while attempting to the include file: %s" %
                        inc_file)

            try:
                if debug:
                    print "got the new file string"
                include_defines = generate_define_table(fs, user_paths)
                if debug:
                    print "after include_define"
                    print "length of include defines: " + str(
                        len(include_defines.keys()))
                for key in include_defines.keys():
                    #append the values found in the include back in the local dictionary
                    if debug: print "working on: " + key
                    if (not define_dict.has_key(key)):
                        define_dict[key] = include_defines[key]

                if debug: print "added new items onto the list"


##           except TypeError as terr:
##             print "Type Error: " + str(terr)
            except:
                if item != "project_defines.v":
                    raise PreProcessorError("Error while processing: %s: %s" %
                                            (item, sys.exc_info()[0]))
                #print "error while processing : ", item, ": ",  sys.exc_info()[0]
            continue

        if item.startswith("`define"):
            #if the string starts with `define split the name and value into the dictionary
            ##           if debug:
            ##             print "found a define: " + item
            item = item.partition("`define")[2]
            item = item.strip()
            if (len(item.partition(" ")[2]) > 0):
                name = item.partition(" ")[0].strip()
                value = item.partition(" ")[2].strip()
                if debug:
                    print "added " + name + "\n\tWith value: " + value
                define_dict[name] = value
                continue
            if (len(item.partition("\t")[2]) > 0):
                name = item.partition("\t")[0].strip()
                value = item.partition("\t")[2].strip()
                if debug:
                    print "added " + name + "\n\tWith value: " + value
                define_dict[name] = value
                continue
            if debug:
                print "found a define without a value: " + item

    return define_dict