def count_bool_variables(): global bool_counter for f in get_files(): print(f) file = open(f, "r", encoding='utf8') text = file.read() try: source_unit = parser.parse(text=text, loc=True) except (TypeError, AttributeError): continue source_unit_object = parser.objectify(source_unit) contracts = source_unit_object.contracts.keys() for contract in contracts: functions = source_unit_object.contracts[contract].functions function_keys = source_unit_object.contracts[contract].functions.keys() for function_key in function_keys: function = functions[function_key] function_body = function._node.body if function_body: statements = function_body.statements for statement in statements: if isinstance(statement, str): # would result in an AttributeError when there is no statement type. example: 'throw;' continue if statement and statement.type == 'VariableDeclarationStatement' and statement.variables \ and len(statement.variables) == 1: for variable in statement.variables: if variable.type == 'VariableDeclaration' \ and variable.typeName.type == 'ElementaryTypeName' \ and variable.typeName.name == 'bool': statement_line = statement.loc['start']['line'] - 1 print('##### found boolean variable; line: ' + str(statement_line)) bool_counter += 1 print('##############################################') print('bool variables found: ' + str(bool_counter))
def count_de_morgan(): for f in get_files(): print(f) file = open(f, "r", encoding='utf8') text = file.read() try: source_unit = parser.parse(text=text, loc=True) except (TypeError, AttributeError): continue source_unit_object = parser.objectify(source_unit) contracts = source_unit_object.contracts.keys() for contract in contracts: functions = source_unit_object.contracts[contract].functions function_keys = source_unit_object.contracts[contract].functions.keys() for function_key in function_keys: function = functions[function_key] function_body = function._node.body if function_body: statements = function_body.statements for statement in statements: if isinstance(statement, str): # would result in an AttributeError when there is no statement type. example: 'throw;' continue if statement: if statement.type == 'IfStatement': check_if_statement(statement) print('##############################################') print('applied de morgan law found: ' + str(de_morgan_counter)) print('binary operation without negation in if statement found: ' + str(binary_if_statement_counter))
def get_solc_verion(file, logs): try: with open(file, 'r', encoding='utf-8') as fd: sourceUnit = parser.parse(fd.read()) solc_version = sourceUnit['children'][0]['value'] solc_version = solc_version.strip('^') solc_version = solc_version.split('.') return(int(solc_version[1]), int(solc_version[2])) except: print('\x1b[1;33m' + 'WARNING: could not parse solidity file to get solc version' + '\x1b[0m') logs.write('WARNING: could not parse solidity file to get solc version \n') return (None, None)
def count_loops(): global for_counter, while_counter, do_while_counter for f in get_files(): print(f) file = open(f, "r", encoding='utf8') text = file.read() try: source_unit = parser.parse(text=text, loc=True) except (TypeError, AttributeError): continue source_unit_object = parser.objectify(source_unit) contracts = source_unit_object.contracts.keys() for contract in contracts: functions = source_unit_object.contracts[contract].functions function_keys = source_unit_object.contracts[contract].functions.keys() for function_key in function_keys: function = functions[function_key] function_body = function._node.body if function_body: statements = function_body.statements for statement in statements: if statement is None or isinstance(statement, str): # would result in an AttributeError when there is no statement type. example: 'throw;' continue if statement.type in ['ForStatement', 'WhileStatement', 'DoWhileStatement']: check_loop(statement) elif statement.type == 'IfStatement': if statement.TrueBody is not None and not isinstance(statement.TrueBody, str) \ and statement.TrueBody.type == 'Block': # check true body for s in statement.TrueBody.statements: check_loop(s) if statement.FalseBody is not None and not isinstance(statement.TrueBody, str) \ and statement.FalseBody.type == 'Block': # check false body for s in statement.FalseBody.statements: check_loop(s) print('for loops found: ' + str(for_counter)) print('while loops found: ' + str(while_counter)) print('do-while loops found: ' + str(do_while_counter)) print('##############################################') print('for loops found: ' + str(for_counter)) print('while loops found: ' + str(while_counter)) print('do-while loops found: ' + str(do_while_counter))
def deploy(ctx_obj, contract_name, inputs, verbose, contract): """ Deploys a smart contract from the solidity source code specified CONTRACT: path to the solidity file Usage example: mv-cli deploy contracts/Microblog.sol --contractName=Microblog --constructorInputs='JSON representation of the constructor arguments in an array' """ constructor_input_prompt = False if contract_name: if verbose: click.echo('Got contract name: ') click.echo(contract_name) else: contract_name = click.prompt('Enter the contract name') if verbose: click.echo('Got constructor inputs: ') click.echo(inputs) if inputs: if verbose: click.echo('Got constructor inputs: ') click.echo(inputs) c_inputs = json.loads(inputs) else: constructor_input_prompt = True c_inputs = list() # an empty list sources = dict() if contract[0] == '~': contract_full_path = os.path.expanduser(contract) else: contract_full_path = contract resident_directory = ''.join( map(lambda x: x + '/', contract_full_path.split('/')[:-1])) contract_file_name = contract_full_path.split('/')[-1] contract_file_obj = open(file=contract_full_path) main_contract_src = '' while True: chunk = contract_file_obj.read(1024) if not chunk: break main_contract_src += chunk sources[f'mv-cli/{contract_file_name}'] = {'content': main_contract_src} # loop through imports and add them to sources source_unit = parser.parse(main_contract_src) source_unit_obj = parser.objectify(source_unit) for each in source_unit_obj.imports: import_location = each['path'].replace("'", "") # TODO: follow specified relative paths and import such files too if import_location[:2] != './': click.echo( f'You can only import files from within the same directory as of now', err=True) return # otherwise read the file into the contents mapping full_path = resident_directory + import_location[2:] imported_contract_obj = open(full_path, 'r') contract_src = '' while True: chunk = imported_contract_obj.read(1024) if not chunk: break contract_src += chunk sources[f'mv-cli/{import_location[2:]}'] = {'content': contract_src} if len(c_inputs) == 0 and constructor_input_prompt: abi_json = extract_abi(ctx_obj['settings'], { 'sources': sources, 'sourceFile': f'mv-cli/{contract_file_name}' }) abp = ABIParser(abi_json=abi_json) abp.load_abi() if len(abp.constructor_params()) > 0: click.echo('Enter constructor inputs...') for idx, each_param in enumerate(abp.constructor_params()): param_type = abp._constructor_mapping["constructor"][ "input_types"][idx] param_type_cat = abp.type_category(param_type) arg = click.prompt(f'{each_param}({param_type})') if param_type_cat == 'integer': arg = int(arg) elif param_type_cat == 'array': # check if it can be deserialized into a python dict try: arg_dict = json.loads(arg) except json.JSONDecodeError: click.echo( f'Parameter {each_param} of type {param_type} ' f'should be correctly passed as a JSON array', err=True) sys.exit(1) c_inputs.append(arg) msg = "Trying to deploy" message_hash = encode_defunct(text=msg) # deploy from alpha account signed_msg = Account.sign_message(message_hash, ctx_obj['settings']['PRIVATEKEY']) deploy_json = { 'msg': msg, 'sig': signed_msg.signature.hex(), 'name': contract_name, 'inputs': c_inputs, 'sources': sources, 'sourceFile': f'mv-cli/{contract_file_name}' } # click.echo(deploy_json) # --MATICVIGIL API CALL--- r = requests.post(ctx_obj['settings']['INTERNAL_API_ENDPOINT'] + '/deploy', json=deploy_json) if verbose: click.echo('MaticVigil deploy response: ') click.echo(r.text) if r.status_code == requests.codes.ok: click.echo(f'Contract {contract_name} deployed successfully') r = r.json() click.echo(f'Contract Address: {r["data"]["contract"]}') click.echo(f'Deploying tx: {r["data"]["hash"]}') else: click.echo('Contract deployment failed')
def deploy(self, contract_file, contract_name, inputs): """ Deploys a smart contract from the solidity source code specified :param contract_file : path to the contract file name :param contract_name : the contract name to be deployed from the file :param inputs : mapping of constructor arguments """ contract_src = "" if self._verbose: print('Got unordered constructor inputs: ') print(inputs) sources = dict() if contract_file[0] == '~': contract_full_path = os.path.expanduser(contract_file) else: contract_full_path = contract_file resident_directory = ''.join( map(lambda x: x + '/', contract_full_path.split('/')[:-1])) contract_file_name = contract_full_path.split('/')[-1] contract_file_obj = open(file=contract_full_path) main_contract_src = '' while True: chunk = contract_file_obj.read(1024) if not chunk: break main_contract_src += chunk sources[f'ev-py-sdk/{contract_file_name}'] = { 'content': main_contract_src } # loop through imports and add them to sources source_unit = parser.parse(main_contract_src) source_unit_obj = parser.objectify(source_unit) for each in source_unit_obj.imports: import_location = each['path'].replace("'", "") # TODO: follow specified relative paths and import such files too if import_location[:2] != './': ev_core_logger.error( 'You can only import files from within the same directory as of now' ) raise EVBaseException( 'You can only import files from within the same directory as of now' ) # otherwise read the file into the contents mapping full_path = resident_directory + import_location[2:] imported_contract_obj = open(full_path, 'r') contract_src = '' while True: chunk = imported_contract_obj.read(1024) if not chunk: break contract_src += chunk sources[f'ev-py-sdk/{import_location[2:]}'] = { 'content': contract_src } abi_json = extract_abi(self._settings, { 'sources': sources, 'sourceFile': f'ev-py-sdk/{contract_file_name}' }) abp = ABIParser(abi_json=abi_json) abp.load_abi() c_inputs = abp.ordered_map_to_ev_constructor_args(inputs) if self._verbose: print('Ordered constructor inputs: \n', c_inputs) msg = "Trying to deploy" message_hash = defunct_hash_message(text=msg) signed_msg = Account.signHash(message_hash, self._settings['PRIVATEKEY']) deploy_json = { 'msg': msg, 'sig': signed_msg.signature.hex(), 'name': contract_name, 'inputs': c_inputs, 'sources': sources, 'sourceFile': f'ev-py-sdk/{contract_file_name}' } # --MATICVIGIL API CALL--- r = make_http_call(request_type='post', url=self._settings['INTERNAL_API_ENDPOINT'] + '/deploy', params=deploy_json) if self._verbose: ev_core_logger.debug('MaticVigil deploy response: ') ev_core_logger.debug(r) return r['data']
import sys from solidity_parser import parser # If pragma not declared use 0.4.26 version = "0.4.26" path = sys.argv[1] if __name__ == '__main__': with open(path, 'r', encoding='utf-8') as fd: fst = parser.parse(fd.read())['children'][0] if fst['type'] == "PragmaDirective": version = f"{fst['value'].strip('^')}" print(version)
def main(): if len(sys.argv) > 1: if sys.argv[1] == '-initialize': initialize() return 0 elif sys.argv[1] == '-preprocess': preprocess() return 0 elif sys.argv[1] == '-demorgan': evaluation.count_de_morgan() return 0 elif sys.argv[1] == '-boolean': evaluation.count_bool_variables() return 0 elif sys.argv[1] == '-loops': evaluation.count_loops() return 0 elif sys.argv[1] == '-loopstatements': evaluation.count_loop_conditions() return 0 else: files = [] for r, d, f in os.walk("found_violations\\total"): for file in f: files.append(os.path.join(r, file)) for f in files: additional_lines = 0 print(f) # read input file = open(f, "r", encoding='utf8') text = file.read() # TODO: replace all \t characters with spaces try: source_unit = parser.parse(text=text, loc=True) except (TypeError, AttributeError): continue source_unit_object = parser.objectify(source_unit) contracts = source_unit_object.contracts.keys() # create output file input_file = open(f, "r", encoding='utf8') output_file = open('output\\opt_' + ntpath.basename(f), 'w', encoding='utf8') content = input_file.readlines() # get all functions from all contracts in the file all_functions = {} for contract in contracts: function_dictionary = source_unit_object.contracts[contract].functions all_functions = {**all_functions, **function_dictionary} loop_statements = ['ForStatement', 'WhileStatement', 'DoWhileStatement'] # process rules for contract in contracts: functions = source_unit_object.contracts[contract].functions function_keys = source_unit_object.contracts[contract].functions.keys() for function_key in function_keys: function = functions[function_key] function_body = function._node.body if function_body: statements = function_body.statements ##### PROCEDURE RULE 1 ###### additional_lines = procedure_rule_1.check_rule(additional_lines, content, statements, function_key, function.arguments, function._node.loc) first_for_statement = None for statement in statements: if isinstance(statement, str): # would result in an AttributeError when there is no statement type. example: 'throw;' first_for_statement = None continue if statement: ###### LOGIC RULE 1 ###### if statement.type == 'IfStatement': additional_lines = logic_rule_1.check_rule(additional_lines, content, statement) ###### LOGIC RULE 2 ###### if statement.type == 'VariableDeclarationStatement' and statement.variables \ and len(statement.variables) == 1: for variable in statement.variables: if variable.type == 'VariableDeclaration' \ and variable.typeName.type == 'ElementaryTypeName' \ and variable.typeName.name == 'bool': additional_lines = logic_rule_2.check_rule(additional_lines, content, statements, statement) ###### LOOP RULE 1 ###### if statement.type == 'ForStatement': additional_lines = loop_rule_1.check_rule(additional_lines, content, statement, all_functions) ###### LOOP RULE 2 ###### if statement.type in loop_statements: additional_lines = loop_rule_2.check_rule(additional_lines, content, statement) ###### LOOP RULE 3 ###### if statement.type == 'ForStatement': additional_lines = loop_rule_3.check_rule(additional_lines, content, statement) ###### LOOP RULE 4 ###### if statement.type == 'ForStatement': additional_lines = loop_rule_4.check_rule(additional_lines, content, statement) ###### LOOP RULE 5 ###### if statement.type == 'ForStatement': additional_lines = loop_rule_5.check_rule(additional_lines, content, statement) ###### LOOP RULE 6 ###### if statement.type == 'ForStatement': if first_for_statement is not None: additional_lines = loop_rule_6.check_rule(additional_lines, content, first_for_statement, statement) first_for_statement = statement else: first_for_statement = None # write output output_file.writelines(content) output_file.close() # summary of the findings print('########################################################################') print('######### SUMMARY OF RESULTS #########') print('########################################################################') print('######### number of instances loop rule 1: ' + str(loop_rule_1.get_instance_counter())) print('######### number of instances loop rule 2: ' + str(loop_rule_2.get_instance_counter())) print('######### number of instances loop rule 3: ' + str(loop_rule_3.get_instance_counter())) print('######### number of instances loop rule 4: ' + str(loop_rule_4.get_instance_counter())) print('######### number of instances loop rule 5: ' + str(loop_rule_5.get_instance_counter())) print('######### number of instances loop rule 6: ' + str(loop_rule_6.get_instance_counter())) print('######### number of instances logic rule 1: ' + str(logic_rule_1.get_instance_counter())) print('######### number of instances logic rule 2: ' + str(logic_rule_2.get_instance_counter())) print('######### number of instances procedure rule 1: ' + str(procedure_rule_1.get_instance_counter())) print('########################################################################')
def analyse_files(tool, file, logs, now): try: cfg_path = os.path.abspath('config/tools/'+tool+'.yaml') with open(cfg_path, 'r', encoding='utf-8') as ymlfile: try: cfg = yaml.safe_load(ymlfile) except yaml.YAMLError as exc: print(exc) logs.write(exc) #create result folder with time results_folder = 'results/' + tool + '/' + now if not os.path.exists(results_folder): os.makedirs(results_folder) #os.makedirs(os.path.dirname(results_folder), exist_ok=True) #check if config file as all required fields if 'default' not in cfg['docker_image'] or cfg['docker_image'] == None: logs.write(tool + ': default docker image not provided. please check you config file.\n') sys.exit(tool + ': default docker image not provided. please check you config file.') elif 'cmd' not in cfg or cfg['cmd'] == None: logs.write(tool + ': commands not provided. please check you config file.\n') sys.exit(tool + ': commands not provided. please check you config file.') #bind directory path instead of file path to allow imports in the same directory volume_bindings = mount_volumes(os.path.dirname(file), logs) file_name = os.path.basename(file) file_name = os.path.splitext(file_name)[0] start = time() (solc_version, solc_version_minor) = get_solc_verion(file, logs) if isinstance(solc_version, int) and solc_version < 5 and 'solc<5' in cfg['docker_image']: image = cfg['docker_image']['solc<5'] #if there's no version or version >5, choose default else: image = cfg['docker_image']['default'] if not client.images.list(image): pull_image(image, logs) cmd = cfg['cmd'] if tool == 'mythril': with open(file, 'r', encoding='utf-8') as fd: try: fst = parser.parse(fd.read())['children'][0] solv = f"--solv {fst['value'].strip('^')}" if fst and fst['type'] == "PragmaDirective" else "" cmd = cmd.format(solv=solv) except: print('\x1b[1;33m' + 'WARNING: could not parse solidity file to get solc version' + '\x1b[0m') logs.write('WARNING: could not parse solidity file to get solc version \n') if '{contract}' in cmd: cmd = cmd.replace('{contract}', '/' + file) else: cmd += ' /' + file container = None try: container = client.containers.run(image, cmd, detach=True, # cpu_quota=150000, volumes=volume_bindings) try: container.wait(timeout=(30 * 60)) except Exception as e: pass output = container.logs().decode('utf8').strip() if (output.count('Solc experienced a fatal error') >= 1 or output.count('compilation failed') >= 1): print('\x1b[1;31m' + 'ERROR: Solc experienced a fatal error. Check the results file for more info' + '\x1b[0m') logs.write('ERROR: Solc experienced a fatal error. Check the results file for more info\n') end = time() parse_results(output, tool, file_name, container, cfg, logs, results_folder, start, end) finally: stop_container(container, logs) remove_container(container, logs) except (docker.errors.APIError, docker.errors.ContainerError, docker.errors.ImageNotFound) as err: print(err) logs.write(err + '\n' )
def parse_sol(sol_code): sourceUnit = parser.parse(sol_code) sourceUnitObject = parser.objectify(sourceUnit) return sourceUnitObject