Exemple #1
0
    def find_or_declare_win32(self, lst):
        # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
        if isinstance(lst, str):
            lst = [x for x in Node.split_path(lst) if x and x != '.']

        node = self.get_bld().search(lst)
        if node:
            if not node.isfile_cached():
                node.sig = None
                try:
                    node.parent.mkdir()
                except:
                    pass
            return node
        self = self.get_src()
        node = self.find_node(lst)
        if node:
            if not node.isfile_cached():
                node.sig = None
                try:
                    node.parent.mkdir()
                except:
                    pass
            return node
        node = self.get_bld().make_node(lst)
        node.parent.mkdir()
        return node
Exemple #2
0
	def find_or_declare_win32(self, lst):
		# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
		if isinstance(lst, str):
			lst = [x for x in Node.split_path(lst) if x and x != '.']

		node = self.get_bld().search(lst)
		if node:
			if not node.isfile_cached():
				node.sig = None
				try:
					node.parent.mkdir()
				except OSError:
					pass
			return node
		self = self.get_src()
		node = self.find_node(lst)
		if node:
			if not node.isfile_cached():
				node.sig = None
				try:
					node.parent.mkdir()
				except OSError:
					pass
			return node
		node = self.get_bld().make_node(lst)
		node.parent.mkdir()
		return node
Exemple #3
0
def scan_verilog_file(node,cache,debug=False):
    leaf = Node.split_path(node.abspath())[-1]

    stack = [node.abspath()]

    deps = []
    asdditionals = []

    # check whether external packages are referenced in this file
    packages_missing = cache['packages_used'][leaf] - cache['packages_defined'][leaf]
    for pak in packages_missing:
        package_found = False
        for f,packages in cache['packages_defined'].iteritems():
            if pak in packages:
                package_found = True
                deps.append(cache['nodes'][f])

        if not package_found:
            pass

    deps.extend(add_include_deps(stack,cache,cache['includes_used'][leaf]))


    if debug:
        print node,[x.abspath() for x in deps]
    return (deps,[])
def scan_verilog_file(node, cache, debug=False):
    leaf = Node.split_path(node.abspath())[-1]

    stack = [node.abspath()]

    deps = []
    asdditionals = []

    # check whether external packages are referenced in this file
    packages_missing = cache["packages_used"][leaf] - cache["packages_defined"][leaf]
    for pak in packages_missing:
        package_found = False
        for f, packages in cache["packages_defined"].iteritems():
            if pak in packages:
                package_found = True
                deps.append(cache["nodes"][f])

        if not package_found:
            pass

    deps.extend(add_include_deps(stack, cache, cache["includes_used"][leaf]))

    if debug:
        print node, [x.abspath() for x in deps]
    return (deps, [])
Exemple #5
0
    def find_resource(self, lst) :
		if isinstance(lst, str):
			lst = [x for x in Node.split_path(lst) if x and x != '.']

		node = self.get_bld().search(lst)
		if not node:
			self = self.get_src()
			node = self.search(lst)
			if not node:
				node = self.find_node(lst)
		return node
Exemple #6
0
    def find_resource(self, lst):
        if isinstance(lst, str):
            lst = [x for x in Node.split_path(lst) if x and x != '.']

        node = self.get_bld().search(lst)
        if not node:
            self = self.get_src()
            node = self.search(lst)
            if not node:
                node = self.find_node(lst)
        return node
Exemple #7
0
    def __init__(self, base_path):

        if base_path:

            self.root = Node.Node('', None)
            self.base_node = self.root.make_node(base_path)
            self.srcnode = self.base_node.make_node('dev')
            self.bintemp_node = self.srcnode.make_node('BinTemp')
            self.path = self.srcnode.make_node('Code')

        self.env = {}
        self.project_overrides = {}
        self.file_overrides = {}
def process_autoconfig(self, node): 
	out = node.change_ext('')

	out = Node.split_path(out.abspath())[-1]

	appinfo_content=open('appinfo.json')
	appinfo_json=json.load(appinfo_content,object_pairs_hook=collections.OrderedDict)

	out = self.bld.path.get_bld().make_node([str(out)])

	tsk = self.create_task('autoconfig', [node], [out])
	tsk.appinfo = appinfo_json

	if out.suffix() in ['.c']:
		self.source.append(out)
Exemple #9
0
def process_autoconfig(self, node):
    out = node.change_ext('')

    out = Node.split_path(out.abspath())[-1]

    appinfo_content = open('appinfo.json')
    appinfo_json = json.load(appinfo_content,
                             object_pairs_hook=collections.OrderedDict)

    out = self.bld.path.get_bld().make_node([str(out)])

    tsk = self.create_task('autoconfig', [node], [out])
    tsk.appinfo = appinfo_json

    if out.suffix() in ['.c']:
        self.source.append(out)
Exemple #10
0
def check_files(files,debug=False):
    packages_used = {}
    packages_defined = {}
    includes_used = {}
    nodes = {}

    for file in files:
        leaf = Node.split_path(file.abspath())[-1]
        nodes[leaf] = file
        # This is the basic check, that looks in the current file for:
        #  - packages defined
        #  - packages used/imported
        #  - files included
        with open(file.abspath(),'r') as input:
            packages_used[leaf] = set()
            packages_defined[leaf] = set()
            includes_used[leaf] = []
            if debug:
                print "Processing file:" + node.abspath()
            # look for used packages and packages that are defined in the input file
            for line in input:
                # Poor man's comment detection
                if line.find('//') == 0:
                    continue
                m0 = re.search('package\s+(\w+);', line)
                m1 = re.search('import\s+(\w+)[\s:]+', line)
                m2 = re.search('\W(\w+)::', line)
                m3 = re.search('`include\s+"([\w\.]+)"', line)
                if (m0 is not None):
                    packages_defined[leaf].add(m0.group(1))
                if (m1 is not None):
                    packages_used[leaf].add(m1.group(1))
                if (m2 is not None):
                    packages_used[leaf].add(m2.group(1))
                if (m3 is not None):
                    includes_used[leaf].append(m3.group(1))

            if debug:
                print "Packages used:"+" ".join(packages_used[leaf])
                print "Packages defined:"+" ".join(packages_defined[leaf])
                print "Includes defined:"+" ".join(includes_used[leaf])

    return nodes, packages_used, packages_defined, includes_used
def check_files(files, debug=False):
    packages_used = {}
    packages_defined = {}
    includes_used = {}
    nodes = {}

    for file in files:
        leaf = Node.split_path(file.abspath())[-1]
        nodes[leaf] = file
        # This is the basic check, that looks in the current file for:
        #  - packages defined
        #  - packages used/imported
        #  - files included
        with open(file.abspath(), "r") as input:
            packages_used[leaf] = set()
            packages_defined[leaf] = set()
            includes_used[leaf] = []
            if debug:
                print "Processing file:" + node.abspath()
            # look for used packages and packages that are defined in the input file
            for line in input:
                # Poor man's comment detection
                if line.find("//") == 0:
                    continue
                m0 = re.search("package\s+(\w+);", line)
                m1 = re.search("import\s+(\w+)[\s:]+", line)
                m2 = re.search("\W(\w+)::", line)
                m3 = re.search('`include\s+"([\w\.]+)"', line)
                if m0 is not None:
                    packages_defined[leaf].add(m0.group(1))
                if m1 is not None:
                    packages_used[leaf].add(m1.group(1))
                if m2 is not None:
                    packages_used[leaf].add(m2.group(1))
                if m3 is not None:
                    includes_used[leaf].append(m3.group(1))

            if debug:
                print "Packages used:" + " ".join(packages_used[leaf])
                print "Packages defined:" + " ".join(packages_defined[leaf])
                print "Includes defined:" + " ".join(includes_used[leaf])

    return nodes, packages_used, packages_defined, includes_used
Exemple #12
0
def find_or_declare(self, lst):
	if isinstance(lst, str):
		lst = [x for x in Node.split_path(lst) if x and x != '.']

	if lst[0].startswith('\\\\'):
		if len(lst) < 3:
			return None
		node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
		node.cache_isdir = True
		node.parent.cache_isdir = True
		ret = node.find_node(lst[2:])
		if not ret:
			ret = node.make_node(lst[2:])
		if not os.path.isfile(ret.abspath()):
			ret.sig = None
			ret.parent.mkdir()
		return ret

	return self.find_or_declare_orig(lst)
Exemple #13
0
def find_or_declare(self, lst):
    if isinstance(lst, str):
        lst = [x for x in Node.split_path(lst) if x and x != '.']

    if lst[0].startswith('\\\\'):
        if len(lst) < 3:
            return None
        node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
        node.cache_isdir = True
        node.parent.cache_isdir = True
        ret = node.find_node(lst[2:])
        if not ret:
            ret = node.make_node(lst[2:])
        if not os.path.isfile(ret.abspath()):
            ret.sig = None
            ret.parent.mkdir()
        return ret

    return self.find_or_declare_orig(lst)
Exemple #14
0
def scan_verilog_task(task):
    #print "Scanning Task "+str(task)
    #print "Includes: "+" ".join([x.abspath() for x in getattr(task.generator,'verilog_search_paths',[])])
	# create a database of all packages used and defined in all .sv files in all incdirs
	# this will later be used in the scanner for the individual task sources
    cache = get_sv_files_from_include_dirs(task.inputs,getattr(task.generator,'verilog_search_paths',[]))

    ret = ([],[])
    debug = False
    for inp in task.inputs:
        #DELME
        if Node.split_path(inp.abspath())[-1] == 'tb_top_miniasic_0.sv':
            #print inp.abspath()
            #debug = True
            pass
        #END DELME
        new_dep = scan_verilog_file(inp,cache,debug)
        if debug:
            print new_dep
        ret[0].extend(new_dep[0])
        ret[1].extend(new_dep[1])

    return ret
def scan_verilog_task(task):
    # print "Scanning Task "+str(task)
    # print "Includes: "+" ".join([x.abspath() for x in getattr(task.generator,'verilog_search_paths',[])])
    # create a database of all packages used and defined in all .sv files in all incdirs
    # this will later be used in the scanner for the individual task sources
    cache = get_sv_files_from_include_dirs(task.inputs, getattr(task.generator, "verilog_search_paths", []))

    ret = ([], [])
    debug = False
    for inp in task.inputs:
        # DELME
        if Node.split_path(inp.abspath())[-1] == "tb_top_miniasic_0.sv":
            # print inp.abspath()
            # debug = True
            pass
        # END DELME
        new_dep = scan_verilog_file(inp, cache, debug)
        if debug:
            print new_dep
        ret[0].extend(new_dep[0])
        ret[1].extend(new_dep[1])

    return ret
Exemple #16
0
def scan_synplify_project_file(self):
	"""This function extracts the output file and inputs files for synthesis from a synplify project (i.e. tcl) file."""

	result_file = None

	self.project_file_node = self.path.find_node(getattr(self,'project_file',None))
	if not self.project_file_node:
		raise Errors.ConfigurationError('Project file for synplify not found: '+getattr(self,'project_file',''))

	# help file
	project_file_name = os.path.split(self.project_file_node.abspath())[1]
	help_file = self.bld.bldnode.make_node('brick_'+project_file_name)
	with open(help_file.abspath(),'w') as hf:
		hf.write('set results_dir ./results')

	# open the project file template
	input = open(self.project_file_node.abspath(),'r')
	inputs = [self.project_file_node]
	# split the filename into parts
	project_file_split = Node.split_path(self.project_file_node.abspath())
	# create the target project file
	self.project_file_node = self.path.get_bld().make_node(os.path.join(self.path.bld_dir(),project_file_split[len(project_file_split)-1]))
	output = open(self.project_file_node.abspath(),'w')
	variables = {}
	outputs = []
	for line in input:
		# copy file line by line
		output.write(line)
		# skip comments
		if re.match('\s*#',line):
			continue
		# replace env variables
		get_env = re.search('\[\s*get_env\s+(\w+)\s*\]',line)
		if get_env:
			if not get_env.group(1) in self.env:
				raise Errors.ConfigurationError('The environment variable '+get_env.group(1)+' used in synplify project file '+self.project_file_node.abspath()+' has not been defined.')

			line = re.sub('\[\s*get_env\s+\w+\s*\]',self.env[get_env.group(1)],line)

		# keep the rest
		#  _
		#  |
		#  v
		#
		# look for the results file
		m0 = re.search('project\s+-result_file\s+"(.+)"',line)
		if m0:
			# check if the line contains a reference to a variable
			m0_1 = re.search('\$(\w+)',m0.group(1))
			if m0_1:
				try:
					result_file = re.sub('\$(\w+)',variables[m0_1.group(1)],m0.group(1))
				except KeyError:
					print "Variable "+m0_1.group(1)+" not found in "+self.project_file

				outputs.append(self.bld.bldnode.make_node(result_file))
			else:
				# if the result path is given as a relative path,
				# synplify save the results relative to the project_file path,
				# not relative to the path where the program is executed in
				outputs.append(self.bld.bldnode.make_node(m0.group(1)))


		# look for variables
		m3 = re.search('set\s+(.+?)\s+(.+)',line)
		if m3:
			m3_1 = re.search('\[\s*get_env\s+(.+)\s*\]',m3.group(2))
			if m3_1:
				variables[m3.group(1)] = self.env[m3_1.group(1)]
			else:
				variables[m3.group(1)] = m3.group(2)

	input.close()

	for file in getattr(self,'source_files',[]):
		node = self.path.find_node(file)
		if not node:
			raise Errors.ConfigurationError('File '+file+' not found in task ' + self.name)

		if node.suffix() == '.v':
			output.write('add_file -verilog "'+node.abspath()+'"\n')
		elif node.suffix() == '.sv' or node.suffix() == '.svh':
			output.write('add_file -verilog -vlog_std sysv "'+node.abspath()+'"\n')
		elif node.suffix() == '.vhd' or node.suffix() == '.vhdl':
			output.write('add_file -vhdl "'+node.abspath()+'"\n')
		elif node.suffix() == '.sdc':
			output.write('add_file -constraint "'+node.abspath()+'"\n')
		else:
			raise Errors.ConfigurationError('Extension of file '+node.abspath()+' unknown.')

		inputs.append(node)

	for directory in getattr(self,'include_paths',[]):
		node = self.path.find_dir(directory)
		if not node:
			raise Errors.ConfigurationError('Include directory '+directory+' not found in synplify task.')

		output.write('set_option -include_path "'+node.abspath()+'"\n')

	output.close()

	self.logfile = outputs[0].change_ext('.srr')
	outputs.append(outputs[0].change_ext('.ncf'))
	outputs.append(outputs[0].parent.make_node('synplicity.ucf'))

	self.logfile = self.env.BRICK_LOGFILES+'/'+Node.split_path(self.project_file_node.abspath())[-1]

	# generate synthesis task
	self.synplify_task = self.create_task('synplifyTask', inputs, outputs)
Exemple #17
0
def get_encounter_step_name(self):
	return Node.split_path(self.get_encounter_main_tcl_script().change_ext('').abspath())[-1]