def setUpClass(cls): cls.target = 'debug' cls.json_compilation_database = FileGenerator.gen_json_compilation_database( 'doesnt_matter.cpp') cls.cxxd_config = FileGenerator.gen_cxxd_config_filename( cls.target, os.path.dirname(cls.json_compilation_database.name)) cls.project_root_directory = tempfile.gettempdir()
def app(destination: str, file_size: int, chunk_size: int, debug=False): """ Simple program that creates fake a file, splits the file into chunks and makes lock file. """ if debug: logging.basicConfig(level=logging.DEBUG) file_name = 'data' # Generate a fake file FileGenerator.gen_fake_file( file_path=destination, file_name=file_name, size=file_size, ) # Generate a lock file FileGenerator.gen_lock_file( file_path=destination, file_name=file_name, ) # Split the fake file into chunks FileSystem.split_equal( file_name=file_name, source_path=destination, destination_path=destination, chunk_size=chunk_size, )
def test_if_cxxd_config_parser_returns_valid_configuration_when_there_are_multiple_configs_existing( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type" : "compilation-database", \n\ "compilation-database" : { \n\ "target" : { \n\ "rel" : "/tmp" \n\ } \n\ }, \n\ "compile-flags" : { \n\ "target" : { \n\ "rel" : "/tmp" \n\ } \n\ } \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'compilation-database') self.assertEqual( self.cxxd_config_parser.get_configuration_for_target('rel'), self.json_compilation_database.name) FileGenerator.close_gen_file(self.cxxd_config)
def setUpClass(cls): cls.file_to_perform_clang_tidy_on = FileGenerator.gen_simple_cpp_file() cls.project_root_directory = os.path.dirname( cls.file_to_perform_clang_tidy_on.name) cls.json_compilation_database = FileGenerator.gen_json_compilation_database( cls.file_to_perform_clang_tidy_on.name) cls.target = 'debug' cls.cxxd_config_with_json_comp_db = FileGenerator.gen_cxxd_config_filename( cls.target, os.path.dirname(cls.json_compilation_database.name))
def test_if_cxxd_config_parser_favors_compilation_db_over_txt_when_both_are_present( self): txt_comp_db = FileGenerator.gen_txt_compilation_database() self.assertEqual( os.path.normpath( self.parser_with_empty_config_file. get_configuration_for_target('')), self.json_compilation_database.name) FileGenerator.close_gen_file(txt_comp_db)
def setUpClass(cls): cls.test_file = FileGenerator.gen_simple_cpp_file() cls.test_file_edited = FileGenerator.gen_simple_cpp_file(edited=True) cls.txt_compilation_database = FileGenerator.gen_txt_compilation_database( ) cls.parser = parser.clang_parser.ClangParser( cls.txt_compilation_database.name, parser.tunit_cache.TranslationUnitCache( parser.tunit_cache.NoCache()))
def validateInfo(self): if all(self.registerContainer.data()): self.dataContainer.addParticipant(self.registerContainer.data()) FileGenerator.createTicket(self.dataContainer.participantNumber, self.registerContainer.data()) self.registerContainer.clear() success = RegisterSuccessful(self) success.mainloop() else: failed = RegisterFailed() failed.mainloop()
def setUpClass(cls): cls.file_to_perform_clang_format_on = FileGenerator.gen_simple_cpp_file( ) cls.clang_format_config_file = FileGenerator.gen_clang_format_config_file( ) cls.json_compilation_database = FileGenerator.gen_json_compilation_database( cls.file_to_perform_clang_format_on.name) cls.cxxd_config = FileGenerator.gen_cxxd_config_filename( 'debug', os.path.dirname(cls.json_compilation_database.name)) cls.project_root_directory = os.path.dirname( cls.file_to_perform_clang_format_on.name)
def setUpClass(cls): cls.test_file = FileGenerator.gen_simple_cpp_file() cls.test_file_with_includes_only = FileGenerator.gen_header_file_containing_includes_only( ) cls.txt_compilation_database = FileGenerator.gen_txt_compilation_database( ) cls.line_begin = 1 cls.line_end = 20 cls.parser = parser.clang_parser.ClangParser( cls.txt_compilation_database.name, parser.tunit_cache.TranslationUnitCache( parser.tunit_cache.NoCache()))
def test_if_cxxd_config_parser_returns_auto_discovery_try_harder_when_configuration_is_missing( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'auto-discovery-try-harder') FileGenerator.close_gen_file(self.cxxd_config)
def acknowledge(self): id1 = self.idEntry1.get() id2 = self.idEntry2.get() id3 = self.idEntry3.get() if (id1 in self.dataContainer.participants.keys()) and ( id2 in self.dataContainer.participants.keys()) and ( id3 in self.dataContainer.participants.keys()): FileGenerator.createAcknowledge( id1, self.dataContainer.participants[id1], id2, self.dataContainer.participants[id2], id3, self.dataContainer.participants[id3]) self.destroy() else: invalid = InvalidId() invalid.mainloop()
def test_if_cxxd_config_parser_returns_auto_discovery_for_type(self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type": "auto-discovery" \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'auto-discovery') FileGenerator.close_gen_file(self.cxxd_config)
def test_if_cxxd_config_parser_returns_none_when_type_is_not_one_of_valid_values( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type": "something-unsupported" \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), None) FileGenerator.close_gen_file(self.cxxd_config)
def main(): g = FileGenerator(n=31 * 4, N=256, dst_dir='src_folder', dst_pattern=[ 'K{0:0>6}QNP002', 'K{0:0>6}QNP003', 'K{0:0>6}QNP005', 'K{0:0>6}QNP006' ], job_done_pattern='queue_files/created_t{0}') c = FileConverter(n=31 * 4, N=256, nfiles=16, fft_threads=8, out_threads=1, iter0=0, src_dir='src_folder', dst_dir='scratch_folder', ready_to_start_pattern='queue_files/created_t{0}', job_done_pattern='queue_files/converted_t{0}', space_needed=1024**2, announce_folder='queue_files') ofile_list = c.get_ofile_list(field='u') nfiles = len(ofile_list) out_folder = ['out05', 'out06', 'out07', 'out08'] ndirs = len(out_folder) assert (nfiles % ndirs == 0) um = [ FileMover(src_dir='scratch_folder', dst_dir=out_folder[i], src_pattern=ofile_list[i * nfiles // ndirs:(i + 1) * nfiles // ndirs], dst_pattern=ofile_list[i * nfiles // ndirs:(i + 1) * nfiles // ndirs], ready_to_start_pattern='queue_files/uconverted_t{0}', job_done_pattern='queue_files/umoved_t{0}') for i in range(ndirs) ] ofile_list = c.get_ofile_list(field='b') bm = [ FileMover(src_dir='scratch_folder', dst_dir=out_folder[i], src_pattern=ofile_list[i * nfiles // ndirs:(i + 1) * nfiles // ndirs], dst_pattern=ofile_list[i * nfiles // ndirs:(i + 1) * nfiles // ndirs], ready_to_start_pattern='queue_files/bconverted_t{0}', job_done_pattern='queue_files/bmoved_t{0}') for i in range(ndirs) ] pg = mp.Process(target=g.work, args=(range(6), )) pc = mp.Process(target=c.work, args=(range(6), )) pm = [mp.Process(target=mm.work, args=(range(6), )) for mm in um + bm] for p in pm + [pg, pc]: p.start() for p in pm + [pg, pc]: p.join() return None
def test_if_cxxd_config_parser_returns_none_when_for_inexisting_target_section( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type": "compilation-database", \n\ "compilation-database" : { \n\ } \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual( self.cxxd_config_parser.get_configuration_for_target('whatever'), None) FileGenerator.close_gen_file(self.cxxd_config)
def test_if_cxxd_config_parser_returns_none_when_auto_discovery_does_not_contain_any_search_paths( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type": "auto-discovery", \n\ "auto-discovery" : { \n\ } \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'auto-discovery') self.assertEqual( self.cxxd_config_parser.get_configuration_for_target('whatever'), None) FileGenerator.close_gen_file(self.cxxd_config)
def test_if_cxxd_config_parser_returns_config_found_in_auto_discovery_search_path( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type": "auto-discovery", \n\ "auto-discovery" : { \n\ "search-paths" : ["/tmp"] \n\ } \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'auto-discovery') self.assertEqual( self.cxxd_config_parser.get_configuration_for_target('whatever'), self.json_compilation_database.name) FileGenerator.close_gen_file(self.cxxd_config)
def test_if_cxxd_config_parser_returns_none_configuration_when_config_for_selected_type_is_missing( self): self.cxxd_config = FileGenerator.gen_cxxd_config_filename_with_invalid_section( [ '\ { \n\ "configuration" : { \n\ "type" : "compilation-database", \n\ "compile-flags" : { \n\ "target" : { \n\ "rel" : "/rel" \n\ } \n\ } \n\ } \n\ } \n\ ' ]) self.cxxd_config_parser = CxxdConfigParser(self.cxxd_config.name, self.project_root_directory) self.assertEqual(self.cxxd_config_parser.get_configuration_type(), 'compilation-database') self.assertEqual( self.cxxd_config_parser.get_configuration_for_target('rel'), None) FileGenerator.close_gen_file(self.cxxd_config)
def finish(self): participant = self.dataContainer.participants[self.idx] if self.formatCombobox.get() == '.pdf': FileGenerator.createPDF(self.idx, participant[0], participant[1], participant[-1].lower()) elif self.formatCombobox.get() == '.txt': FileGenerator.createTXT(self.idx, participant[0], participant[1], participant[-1].lower()) else: FileGenerator.createDOCX(self.idx, participant[0], participant[1], participant[-1].lower()) self.running = False
def finish(self): idx = str(self.dataContainer.participantNumber) participant = self.dataContainer.participants[idx] if self.formatCombobox.get() == '.pdf': FileGenerator.createPDF(idx, participant[0], participant[1], participant[-1].lower()) elif self.formatCombobox.get() == '.txt': FileGenerator.createTXT(idx, participant[0], participant[1], participant[-1].lower()) else: FileGenerator.createDOCX(idx, participant[0], participant[1], participant[-1].lower()) self.destroy()
def query(self, sql): begin = re.search('FROM', sql).start() end = re.search('WHERE', sql).start() table_name = sql[begin + 5:end - 1] print('table name: ' + table_name) ID_project = re.findall(r'\d+', sql)[-1] # change all queries to selecting all module entries from a given project sql = 'SELECT * FROM ' + table_name + " WHERE ID_project = '"+ID_project+"';" print('ID_project = '+ID_project) for statement in re.sub(r'(\)\s*);', r'\1%;%', sql).split('%;%'): print('simulating executing: ' + statement) response = {} #subject_modules = ['X20AT2222', 'X20AI2622', 'X20DI9371', 'X20AO2622', 'X20DO9322'] subject_modules = ['X20PS9400a', 'X20DC1376', 'X20AT2222', 'X20AI2622', 'X20DI9371', 'X20AO2622', 'X20DO9322'] # simulation of database return data # data = [[1, 1, subject_modules[0], '[1,2]'], [1, 2, subject_modules[1], '[1,2]'], # [1, 3, subject_modules[2], '[1,2]'], [1, 4, subject_modules[3], '[1,2]'], # [1, 5, subject_modules[4], '[1,2]']] data = [[1, 1, subject_modules[0], '[]'], [1, 2, subject_modules[1], '[]'], [1, 3, subject_modules[2], '[]'], [1, 4, subject_modules[3], '[1]'], [1, 5, subject_modules[4], '[4]'], [1, 6, subject_modules[5], '[]'], [1, 7, subject_modules[6], '[1,11]']] data_processed = [] # First query - returns the list of all necessary module configuration files to automation studio if table_name == 'modules': self._fileGenerator = FileGenerator(template_path='templates') for row in data: print(row) module_sub_idx = row[1] module_name = row[2] active_ports = eval(row[3]) modules = self._fileGenerator.add_module(module_name, active_ports, module_sub_idx=module_sub_idx) # method add_module returns a list of generated module info (including file names and contents). If the # subject module is IO, the list contains 2 modules- subject and test if len(modules) == 2: test_module = modules[1] sub_module = modules[0] data_processed.append([row[1], test_module.file_name+'.ar', test_module.content_ar, test_module.file_name+'.io', test_module.content_io, sub_module.file_name+'.ar', sub_module.content_ar, sub_module.file_name+'.io', sub_module.content_io]) else: sub_module = modules[0] data_processed.append([row[1], 'empty.ar', '', 'empty.io', '', sub_module.file_name + '.ar', sub_module.content_ar, sub_module.file_name + '.io', sub_module.content_io]) response = sqlToJson_offline(['ID', 'test_name_ar', 'test_file_ar', 'test_name_io', 'test_file_io', 'sub_name_ar', 'sub_file_ar', 'sub_name_io', 'sub_file_io'], data_processed) # overwriting variable types- cursor.description contains information on the actual database # table that has only 4 columns. response['types'] = ['TINY', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING'] self._query_type = 'conf' print('sending module configuration files: ') for row in response['data']: print([row[content][0:min(len(row[content]), 1000)] if content != 'ID' else row[content] for content in row]) # second query - the data returned from the database after this query is discarded. # It generates main configuration file based on the previous query with all the module data elif table_name == 'conf': response = sqlToJson_offline(['config'], [[self._fileGenerator.generate_main_file()]]) response['types'] = ['VAR_STRING'] print('main configuration file: \n'+str(response)) self._query_type = 'io' # third query - based on active ports data received in the first query, a table of # desired connections is generated. The table has a following structure: # [[di_conn[0], do_conn[0], ai_conn[0], ao_conn[0]],[di_conn[1], do_conn[1], ai_conn[1], ao_conn[1]], ...] # it is necessary that all the connection types (di, do, ai, ao) have the same length. elif table_name == 'io': conn = self._fileGenerator.connections # to ensure that connection lists of each type have the same length, they are filled with empty strings max_len = max([len(conn[key]) for key in conn]) for key in conn: while len(conn[key]) < max_len: conn[key].append('') # the connection list is converted from # {'di':[di1, di2...], 'do':[do1, do2...], 'ai':[a1, ai2...], 'ao':[ao1, ao2...]} # to # [[d1, do1, ai1, ao1],[d2, do2, ai2, ao2],...] response = sqlToJson_offline(['di', 'do', 'ai', 'ao'], [[conn['di'][i], conn['do'][i], conn['ai'][i], conn['ao'][i]] for i in range(len(conn['di']))]) response['types'] = ['VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING'] self._query_type = 'modules' print('connections: \n'+str(response)) #debug_log(response) self._jsonResponse = makeJsonResponse(0, "", response) return json.dumps({"responseSize": len(self._jsonResponse)})
def query(self, sql): try: if args.sqlType == 'mssql': cursor = self._cnx.cursor() else: cursor = self._cnx.cursor(buffered=True) except Exception as ex: debug_print(1, str(ex)) return makeJsonResponse(1, "not connected to sql server", "") begin = re.search('FROM', sql).start() end = re.search('WHERE', sql).start() table_name = sql[begin + 5:end - 1] print('table name: ' + table_name) ID_project = re.findall(r'\d+', sql)[-1] # change all queries to selecting all module entries from a given project sql = "SELECT * FROM Configurations WHERE ID_project = '"+ID_project+"'" print('ID_project = '+ID_project) for statement in re.sub(r'(\)\s*);', r'\1%;%', sql).split('%;%'): print('executing: ' + statement) cursor.execute(statement) data = [] response = {} # Always try to fetch data independent of insert / select try: data = cursor.fetchall() except Exception as ex: pass self._cnx.commit() data_processed = [] # First query - returns the list of all necessary module configuration files to automation studio if self._query_type == 'modules': self._fileGenerator = FileGenerator(template_path='templates') for row in data: module_sub_idx = row[1] module_name = row[2].strip() active_ports = eval(row[3].strip()) modules = self._fileGenerator.add_module(module_name, active_ports, module_sub_idx=module_sub_idx) # method add_module returns a list of generated module info (including file names and contents). If the # subject module is IO, the list contains 2 modules- subject and test if len(modules) == 2: test_module = modules[1] sub_module = modules[0] data_processed.append([row[1], test_module.file_name+'.ar', test_module.content_ar, test_module.file_name+'.io', test_module.content_io, sub_module.file_name+'.ar', sub_module.content_ar, sub_module.file_name+'.io', sub_module.content_io]) else: sub_module = modules[0] data_processed.append([row[1], 'empty.ar', '', 'empty.io', '', sub_module.file_name + '.ar', sub_module.content_ar, sub_module.file_name + '.io', sub_module.content_io]) response = sqlToJson(['ID', 'test_name_ar', 'test_file_ar', 'test_name_io', 'test_file_io', 'sub_name_ar', 'sub_file_ar', 'sub_name_io', 'sub_file_io'], data_processed, cursor.description) # overwriting variable types- cursor.description contains information on the actual database # table that has only 4 columns. response['types'] = ['TINY', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING'] self._query_type = 'conf' print('sending module configuration files: ') for row in response['data']: print([row[content][0:min(len(row), 100)] if content != 'ID' else row[content] for content in row]) # second query - the data returned from the database after this query is discarded. # It generates main configuration file based on the previous query with all the module data elif self._query_type == 'conf': response = sqlToJson(['config'], [[self._fileGenerator.generate_main_file()]], cursor.description) response['types'] = ['VAR_STRING'] print('main configuration file: \n'+str(response)) self._query_type = 'io' # third query - based on active ports data received in the first query, a table of # desired connections is generated. The table has a following structure: # [[di_conn[0], do_conn[0], ai_conn[0], ao_conn[0]],[di_conn[1], do_conn[1], ai_conn[1], ao_conn[1]], ...] # it is necessary that all the connection types (di, do, ai, ao) have the same length. elif self._query_type == 'io': conn = self._fileGenerator.connections # to ensure that connection lists of each type have the same length, they are filled with empty strings max_len = max([len(conn[key]) for key in conn]) for key in conn: while len(conn[key]) < max_len: conn[key].append('') # the connection list is converted from # {'di':[di1, di2...], 'do':[do1, do2...], 'ai':[a1, ai2...], 'ao':[ao1, ao2...]} # to # [[d1, do1, ai1, ao1],[d2, do2, ai2, ao2],...] response = sqlToJson(['di', 'do', 'ai', 'ao'], [[conn['di'][i], conn['do'][i], conn['ai'][i], conn['ao'][i]] for i in range(len(conn['di']))], cursor.description) response['types'] = ['VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING'] self._query_type = 'modules' print('connections: \n'+str(response)) cursor.close() #debug_log(response) self._jsonResponse = makeJsonResponse(0, "", response) return json.dumps({"responseSize": len(self._jsonResponse)})
parser.add_argument('data_dir') parser.add_argument('-a', '--annotator', default='default') parser.add_argument('-i', '--interface', default='interface') parser.add_argument('--past_reports', default=False, action='store_true') parser.add_argument('-m', '--models', action='append') parser.add_argument('-d', '--device', default='cpu') parser.add_argument('-r', '--reload', default=False, action='store_true') parser.add_argument('-p', '--port') args = parser.parse_args() if args.data_dir is None: raise NotImplementedError startup['annotations_dir'] = join(args.data_dir, args.annotator + '_annotations') if not exists(startup['annotations_dir']): mkdir(startup['annotations_dir']) np.random.seed(0) startup['file_generator'] = FileGenerator(args.data_dir, startup['annotations_dir'], reload=args.reload) try: startup['file'] = next(startup['file_generator']) except StopIteration: startup['file'] = None with directory(args.interface_dir): exec('import ' + args.interface) models_to_load = args.models if args.models is not None else [] startup['interface'] = eval(args.interface).FullModelInterface( models_to_load=models_to_load, device=args.device) startup['include_past_reports'] = args.past_reports app.run(debug=True, port=args.port)
from file_generator import FileGenerator from template_generator import generate_templates processed_path = 'processed' generate_templates(ar_file_name="backup/arconfig.ar.bup", io_file_name="backup/iomap.io.bup", templates_path="templates") f = FileGenerator(template_path='templates') subject_modules = [ 'X20AI2622', 'X20AT2222', 'X20DI9371', 'X20AO2622', 'X20DO9322' ] for module in subject_modules: f.add_module(module, [1, 2]) f.store_files(processed_path)
def tearDownClass(cls): FileGenerator.close_gen_file(cls.test_file_with_no_diagnostics) FileGenerator.close_gen_file(cls.test_file_with_no_diagnostics_edited) FileGenerator.close_gen_file(cls.test_file_with_compile_errors) FileGenerator.close_gen_file(cls.test_file_with_compile_errors_edited) FileGenerator.close_gen_file(cls.txt_compilation_database)
def tearDownClass(cls): FileGenerator.close_gen_file(cls.test_file) FileGenerator.close_gen_file(cls.test_file_edited) FileGenerator.close_gen_file(cls.test_file_broken) FileGenerator.close_gen_file(cls.test_file_broken_edited) FileGenerator.close_gen_file(cls.txt_compilation_database)
import os import yaml #This script will generate a file that will fail a specific syntax, validity, or quality edit #(though others may also fail) #This script relies on the presence of a clean data file from file_generator import FileGenerator #Instantiating the file generator. file = FileGenerator() #Creates quality that pass syntax and validity for each test file #in the edits_files directory file.create_files(kind='error_files') #The following code validates quality edits to pass syntax and validity edits. #Stores a list of filenames from the quality edits directory. quality_files = os.listdir(file.filepaths['quality_filepath'].format( bank_name=file.data_map['name']['value'])) #Validates quality edits and stores them in a new directory specified in the test filepaths configuration. for quality_file in quality_files: file.validate_quality_edit_file(quality_filename=quality_file)
#This script will write an edit report for a submission file to a path and filename of the user's choosing. #Edit report configurations are located in configurations/edit_report_config.yaml from file_generator import FileGenerator #Instantiating the file generator. file = FileGenerator() #Writing edit report. file.edit_report()
def setUpClass(cls): cls.file_to_be_built = FileGenerator.gen_simple_cpp_file() cls.cxxd_config = FileGenerator.gen_cxxd_config_filename() cls.project_root_directory = os.path.dirname(cls.file_to_be_built.name)
def tearDownClass(cls): FileGenerator.close_gen_file(cls.file_to_be_built) FileGenerator.close_gen_file(cls.cxxd_config)