Exemple #1
0
def restore_dfs_model(types, struct, meta, lib, use_cached_proto=True) :
	m = dfs.GraphModel()
	m.enable_logging = False
	fact = core.create_block_factory() if lib is None else lib
	load_to_dfs_model(m, types, struct, meta, fact, deserializing=True,
		use_cached_proto=use_cached_proto)
	m.enable_logging = True
	return m
Exemple #2
0
def main() :
	"""
	standalone entry point, looking for arguments in sys.argv
	"""
#	import argparse
#	parser = argparse.ArgumentParser(description="bloced")
#	parser.add_argument("file", metavar="fname", type=str, nargs=1,
#                   help="input file")
#	args = parser.parse_args()
#	fname = args.file[0]
	import serializer

	if len(sys.argv) != 3 :
		print("expected exactly 2 arguments")
		exit(100)

	action = sys.argv[1]
	fname = os.path.abspath(sys.argv[2])

	if os.path.splitext(fname)[1].lower() != ".w" :#TODO path separator
		print("formats other than .w are not supported anymore")
		exit(1)

	main_lib = core.create_block_factory(scan_dir=os.path.join(os.getcwd(), "library"))#TODO multiple search dirs
	local_lib = core.BasicBlocksFactory(load_basic_blocks=False)
	local_lib.load_standalone_workbench_lib(fname, "<local>")
#	print here(), local_lib.block_list
	library = core.SuperLibrary([main_lib, local_lib])

	w = dfs.Workbench(
#		lib_dir=os.path.join(os.getcwd(), "library"),
		passive=True)

#	library = w.blockfactory

	try :
		with open(fname, "rb") as f :
			serializer.unpickle_workbench(f, w, use_cached_proto=False, library=library)
	except :
		print("error loading workbench file")
		raise
#		exit(666)
	sheets = w.sheets
	global_meta = w.get_meta()

	if action == "c" :
		import ccodegen as cg
	elif action == "f" :
		import fcodegen as cg
	else :
		exit(666)

	implement_workbench(w, sheets, global_meta, cg, core.KNOWN_TYPES, library, sys.stdout)
Exemple #3
0
def restore_dfs_model(types, struct, meta, lib, use_cached_proto=True):
    m = dfs.GraphModel()
    m.enable_logging = False
    fact = core.create_block_factory() if lib is None else lib
    load_to_dfs_model(m,
                      types,
                      struct,
                      meta,
                      fact,
                      deserializing=True,
                      use_cached_proto=use_cached_proto)
    m.enable_logging = True
    return m
Exemple #4
0
	def __init__(self, lib_dir=None,
			do_create_block_factory=True,
			blockfactory=None) :

		self.PERSISTENT = ( "__port", "__board" )

		self.blockfactory = blockfactory
		if do_create_block_factory :
			self.blockfactory = core.create_block_factory(
				scan_dir=lib_dir)

		self.lock = Lock()

		self.__sheets = {}
		self.__meta = {}
Exemple #5
0
    if os.path.splitext(fname)[1].lower() == ".w":
        w = Workbench(lib_dir=os.path.join(os.getcwd(), "library"),
                      passive=True)
        blockfactory = w.blockfactory
        try:
            with open(fname, "rb") as f:
                unpickle_workbench(f, w)
        except:
            print("error loading workbench file")
            raise
#			exit(666)
        sheets = w.sheets
        global_meta = w.get_meta()
    else:
        blockfactory = create_block_factory(
            scan_dir=os.path.join(os.getcwd(), "library"))
        try:
            with open(fname, "rb") as f:
                model = unpickle_dfs_model(f, lib=blockfactory)
        except:
            print("error loading sheet file")
            exit(666)
        sheets = {"tsk": model}
        global_meta = {}

    out_fobj = StringIO()
    implement_workbench(sheets, global_meta, ccodegen, KNOWN_TYPES,
                        blockfactory, out_fobj)

    source = out_fobj.getvalue()
    print source
Exemple #6
0
	def test_create_block_factory(self) :
		factory_instance = core.create_block_factory(scan_dir="library")
		factory_instance2 = core.create_block_factory(scan_dir="library")
		self.assertEqual(factory_instance, factory_instance2)#is singleton
Exemple #7
0
def main() :
	started = time.time()

	files = get_files("./examples")
#	print here(), files

	main_lib = core.create_block_factory(scan_dir=os.path.join(os.getcwd(), "library"))

#	all_in_one_arduino_dir = self.config.get("Path", "all_in_one_arduino_dir")
	libc_dir, tools_dir, boards_txt, target_files_dir = build.get_avr_arduino_paths()

	failed = []
	succeeded = []

	for fname in files :

		print here(), "loading:", fname

		local_lib = core.BasicBlocksFactory(load_basic_blocks=False)

		try :
			local_lib.load_standalone_workbench_lib(fname, "<local>")
		except Exception :
			print(here())
			traceback.print_exc()
			failed.append((fname, "loading_as_library"))
			continue

		library = core.SuperLibrary([main_lib, local_lib])

		w = dfs.Workbench(passive=True)

		try :
			with open(fname, "rb") as f :
				serializer.unpickle_workbench(f, w, use_cached_proto=False, library=library)
		except Exception :
			print(here())
			traceback.print_exc()
			failed.append((fname, "loading_worbench"))
			continue

		sheets = w.sheets
		global_meta = w.get_meta()

		out_fobj = StringIO()

		try :
			libs_used, = implement.implement_workbench(w, sheets, global_meta,
				ccodegen, core.KNOWN_TYPES, library, out_fobj)#sys.stdout)
		except Exception :
			print(here())
			traceback.print_exc()
			failed.append((fname, "implementing"))
			continue

		if out_fobj.tell() < 1 :
			print(here())
			failed.append((fname, "no_code_generated"))
			continue

		source = out_fobj.getvalue()

		source_dirs = set()
		for l in library.libs :
			if l.name in libs_used :
				for src_file in l.source_files :
					source_dirs.add(os.path.dirname(src_file))

		install_path = os.getcwd()
		blob_stream = StringIO()
		term_stream = StringIO()

		board_type = w.get_board()

		try :
			board_info = build.get_board_types()[board_type]
			variant = board_info["build.variant"] if "build.variant" in board_info else "standard" 
		except Exception :
			print(here())
			traceback.print_exc()
			failed.append((fname, "get_target_info"))
			continue


		try :
			rc, = build.build_source(board_type, source,
				aux_src_dirs=(
					(os.path.join(target_files_dir, "cores", "arduino"), False),
					(os.path.join(target_files_dir, "variants", variant), False),
	#				(os.path.join(install_path, "library", "arduino"), False),
				) + tuple( (path, True) for path in source_dirs ),#TODO derive from libraries used
				aux_idirs=[ os.path.join(install_path, "target", "arduino", "include") ],
				boards_txt=boards_txt,
				libc_dir=libc_dir,
	#			board_db={},
				ignore_file=None,#"amkignore",
	#			ignore_lines=( "*.cpp", "*.hpp", "*" + os.path.sep + "main.cpp", ), #TODO remove this filter with adding cpp support to build.py
				ignore_lines=( "*" + os.path.sep + "main.cpp", ),
	#			prog_port=None,
	#			prog_driver="avrdude", # or "dfu-programmer"
	#			prog_adapter="arduino", #None for dfu-programmer
				optimization="-Os",
				verbose=False,
				skip_programming=True,#False,
	#			dry_run=False,
				blob_stream=blob_stream,
				term=term_stream)
		except Exception :
			print(here())
			failed.append((fname, "build_failed"))
			continue



		succeeded.append((fname, ))

	finished = time.time()


	assert(len(failed) + len(succeeded) == len(files))

	print("")
	print("done in {:.3}s, {} of {} failed".format(finished - started, len(failed), len(files)))
	print("")
	print("failed files:")
	pprint(failed)
Exemple #8
0
 def test_create_block_factory(self):
     factory_instance = core.create_block_factory(scan_dir="library")
     factory_instance2 = core.create_block_factory(scan_dir="library")
     self.assertEqual(factory_instance, factory_instance2)  #is singleton
Exemple #9
0
	if os.path.splitext(fname)[1].lower() == ".w" :
		w = Workbench(
			lib_dir=os.path.join(os.getcwd(), "library"),
			passive=True)
		blockfactory = w.blockfactory
		try :
			with open(fname, "rb") as f :
				unpickle_workbench(f, w)
		except :
			print("error loading workbench file")
			raise
#			exit(666)
		sheets = w.sheets
		global_meta = w.get_meta()
	else :
		blockfactory = create_block_factory(
				scan_dir=os.path.join(os.getcwd(), "library"))
		try :
			with open(fname, "rb") as f :
				model = unpickle_dfs_model(f, lib=blockfactory)
		except :
			print("error loading sheet file")
			exit(666)
		sheets = { "tsk" : model }
		global_meta = {}

	out_fobj = StringIO()
	implement_workbench(sheets, global_meta,
		ccodegen, KNOWN_TYPES, blockfactory, out_fobj)

	source = out_fobj.getvalue()
	print source