def get_source_modification_time(self): all_source_paths = tuple( itertools.chain( get_project_source_paths(self.contracts_source_dir), get_test_source_paths(self.tests_dir), )) return max( os.path.getmtime(source_file_path) for source_file_path in all_source_paths) if len(all_source_paths) > 0 else None
def compile_project_contracts(project): logger = logging.getLogger('populus.compilation.compile_project_contracts') project_contract_source_paths = tuple( itertools.chain.from_iterable( get_project_source_paths(source_dir) for source_dir in project.contracts_source_dirs)) logger.debug( "Found %s project source files: %s", len(project_contract_source_paths), ", ".join(project_contract_source_paths), ) test_contract_source_paths = get_test_source_paths(project.tests_dir) logger.debug( "Found %s test source files: %s", len(test_contract_source_paths), ", ".join(test_contract_source_paths), ) all_source_paths = tuple( itertools.chain( project_contract_source_paths, test_contract_source_paths, )) compiler_backend = project.get_compiler_backend() base_compiled_contracts = compiler_backend.get_compiled_contracts( source_file_paths=all_source_paths, import_remappings=project.config.get('compilation.import_remappings'), ) compiled_contracts = post_process_compiled_contracts( base_compiled_contracts) validate_compiled_contracts(compiled_contracts) logger.info("> Found %s contract source files", len(all_source_paths)) for path in sorted(all_source_paths): logger.info(" - %s", os.path.relpath(path)) logger.info("> Compiled %s contracts", len(compiled_contracts)) for contract_key in sorted(map(_get_contract_key, compiled_contracts)): logger.info(" - %s", ':'.join(contract_key)) duplicate_contract_names = get_duplicates( contract_data['name'] for contract_data in compiled_contracts) if duplicate_contract_names: raise ValueError( "Duplicate contract names '{0}'. Populus cannot currently support " "compilation which produces multiple contracts with the same name". format(", ".join(duplicate_contract_names), )) compiled_contracts_by_key = { contract_data['name']: contract_data for contract_data in compiled_contracts } return all_source_paths, compiled_contracts_by_key
def compile_project_contracts(project, compiler_settings=None): logger = logging.getLogger('populus.compilation.compile_project_contracts') if compiler_settings is None: compiler_settings = {} compiler_settings.setdefault('output_values', DEFAULT_COMPILER_OUTPUT_VALUES) logger.debug("Compiler Settings: %s", pprint.pformat(compiler_settings)) project_contract_source_paths = get_project_source_paths( project.contracts_source_dir) logger.debug( "Found %s project source files: %s", len(project_contract_source_paths), project_contract_source_paths, ) test_contract_source_paths = get_test_source_paths(project.tests_dir) logger.debug( "Found %s test source files: %s", len(test_contract_source_paths), test_contract_source_paths, ) all_source_paths = tuple( itertools.chain( project_contract_source_paths, test_contract_source_paths, )) try: compiled_contracts = compile_files(all_source_paths, **compiler_settings) except ContractsNotFound: return all_source_paths, {} normalized_compiled_contracts = dict( process_compiler_output(contract_name, contract_data) for contract_name, contract_data in compiled_contracts.items()) logger.info("> Found %s contract source files", len(all_source_paths)) for path in all_source_paths: logger.info(" - %s", os.path.relpath(path)) logger.info("> Compiled %s contracts", len(normalized_compiled_contracts)) for contract_name in sorted(normalized_compiled_contracts.keys()): logger.info(" - %s", contract_name) return all_source_paths, normalized_compiled_contracts
def compile_project_contracts(project): logger = logging.getLogger('populus.compilation.compile_project_contracts') project_contract_source_paths = get_project_source_paths( project.contracts_source_dir) logger.debug( "Found %s project source files: %s", len(project_contract_source_paths), ", ".join(project_contract_source_paths), ) test_contract_source_paths = get_test_source_paths(project.tests_dir) logger.debug( "Found %s test source files: %s", len(test_contract_source_paths), ", ".join(test_contract_source_paths), ) all_source_paths = tuple( itertools.chain( project_contract_source_paths, test_contract_source_paths, )) compiler_backend = project.get_compiler_backend() compiled_contract_data = compiler_backend.get_compiled_contract_data( source_file_paths=all_source_paths, import_remappings=None, ) logger.info("> Found %s contract source files", len(all_source_paths)) for path in all_source_paths: logger.info(" - %s", os.path.relpath(path)) logger.info("> Compiled %s contracts", len(compiled_contract_data)) for contract_name in sorted(compiled_contract_data.keys()): logger.info(" - %s", contract_name) return all_source_paths, compiled_contract_data
def test_gets_correct_files_default_dir(project, write_project_file): file_names = get_project_source_paths(project.contracts_source_dir) should_match = { 'contracts/SolidityContract.sol', 'contracts/AnotherFile.sol', } should_not_match = { 'contracts/BackedUpContract.sol.bak', 'contracts/Swapfile.sol.swp', 'contracts/not-contract.txt', } for filename in should_match: write_project_file(filename) for filename in should_not_match: write_project_file(filename) for file_name in file_names: assert os.path.exists(file_name) assert os.path.basename(file_name) in should_match assert os.path.basename(file_name) not in should_not_match
def get_all_source_file_paths(self): return tuple( itertools.chain( get_project_source_paths(self.contracts_source_dir), get_test_source_paths(self.tests_dir), ))