def runContainer(): container = self.dockerCl.containers.run( image=self.docker_image, command=cmd, detach=True, auto_remove=False, network_disabled=True, network_mode='none', tty=False, stdin_open=False, volumes={ path_source[0]: { 'bind': '/tmp/subject.sol', 'mode': 'ro' } }) # Purposely not checking the exit status container.wait() output = ''.join( line.decode(utf_8.getregentry().name) for line in container.logs( stdout=True, stderr=False, stream=True, tail='all')) STDERROut = ''.join( line.decode(utf_8.getregentry().name) for line in container.logs( stdout=False, stderr=True, stream=True, tail='all')) container.remove() return output, STDERROut
async def execTCs(args_execTC: Tuple[str, ...]) -> List[Union[bool, str]]: logger.trace(F'Executing test case with args: {args_execTC!r}') try: execTCRun = await asyncio.create_subprocess_exec( *args_execTC, stdout=subprocess.PIPE, stderr=sys.stderr, ) data_stdout, _ = await execTCRun.communicate() finally: if 'execTCRun' in vars() and execTCRun.returncode is None: execTCRun.kill() logger.trace('Finished executing TCRun') assert execTCRun.returncode == 0 stdout = data_stdout.decode(utf_8.getregentry().name) try: tcRst = json.loads(stdout) except: logger.error(F'Received output expected in json: \n{stdout}') raise return tcRst
def runContainer(): return self.dockerCl.containers.run( image=self.docker_image, command=cmd, entrypoint='/bin/bash -c', detach=False, auto_remove=True, network_disabled=True, network_mode='none', tty=False, stdin_open=False, stdout=True, stderr=False, volumes={ next(iter(path_source)): { 'bind': '/tmp/subject.sol', 'mode': 'ro' } }).decode(utf_8.getregentry().name)
from codecs import CodecInfo from encodings import utf_8 from io import StringIO from .tokenizer import transform_string utf8 = utf_8.getregentry() def decode(input, errors='strict'): cs, errors = utf_8.decode(input, errors) return transform_string(cs), errors class IncrementalDecoder(utf_8.IncrementalDecoder): def decode(self, input, final=False): return transform_string(super().decode(input, final)) class StreamReader(utf_8.StreamReader): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.stream = StringIO(transform_string(self.stream.getvalue())) def search_function(encoding): if encoding != 'quasiquotes': return None return CodecInfo( name='quasiquotes',
# -*- coding: UTF-8 -*- import MySQLdb from instance import Instance from table import Table from features import Type import codecs from encodings import utf_8 from impala.dbapi import connect import keyColumn_RandomForest import cw_filter import sys import env from datetime import datetime codecs.register(lambda encoding: utf_8.getregentry()) reload(sys) sys.setdefaultencoding('utf8') sys.setrecursionlimit(1000000) # scan input args = sys.argv if len(args) == 4: databaseName = args[1] tableName = args[2] colName = args[3] else: sys.exit('Need three arguments:"Databases","tableName". Args error! ') # read from mysql dbArgs = [ env.mysqlHost, env.mysqlPort, "root", "root", "userdb", "column_recommend"
def readFromStopWords(self): """Reading words from stopwords.txt""" file = 'textfiles\polish_stopwords.txt' if self.language == 'pl' else 'textfiles\english_stopwords.txt' f = open(file, encoding=utf_8.getregentry().name) self.stopWordsList = f.read().split('\n') f.close()
find_tapa_in_index, load_tapas_index, TapaSchema, parse_tapa_location, parse_index_location, DEFAULT_INDEX_LOCATION, load_tapa_from_gitthub, ) TAPA_FILE = "tapa.py" TEMPLATE_DIR = "template" ASK_FUNCTION = "ask" POST_INIT_FUNCTION = "post_init" UTF_8 = utf_8.getregentry().name def parse_path(path: str) -> Path: return Path(path).expanduser().resolve().absolute() class App: def __init__(self, args): self.version = self._load_version() parser = self._init_parser(self.version) parser.parse_args(args, self) if not self.list and self.tapa is None: parser.error("Missing tapa name")
def readFile(self): """Read strings from text.txt""" f = open(self.file, encoding=utf_8.getregentry().name) self.string = f.read() f.close()
def get_encoding(arg: Any): if arg.encoding is not None: return arg.encoding else: return utf_8.getregentry().name
async def detect(self, path_source: Sequence[Path], targetContractName: Optional[str], targetLocations: Optional[Sequence[CodeRange]] = None, targetedVul: Optional[Sequence[str]] = None, fastFail: bool = False, **_extra_args) -> ProblemDetectorResult: """ When fastFail is True, this results a single bool indicating whether all test cases are passing. When fastFail is False, this results a dict of {path_test_case: bool} """ if targetContractName is None: raise ValueError('targetContractName must be provided') # We might want to check whether the abi of contract has been changed args_buildContract = F'{self.Cmd_solcJSON}' os.environ['R'] = open(path_source[0], 'r').read() os.environ['C'] = targetContractName buildContractRun = await asyncio.create_subprocess_shell( args_buildContract, # input=inputJSON, # shell=True, # check=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=sys.stderr, # universal_newlines=True, encoding=utf_8.getregentry().name) stdout = ((await buildContractRun.communicate())[0]).decode( utf_8.getregentry().name) assert buildContractRun.returncode == 0, F'unexpected exit status {buildContractRun.returncode}' bytecode = stdout if len(bytecode) == 0: logger.error(F'Compilation output: \n{buildContractRun.stdout}') raise ValueError('Compiled bytecode is empty!') args_execTC = self.baseArgs_execTC + (F'--bin={bytecode}', F'--addr={self.addr}') + tuple( F'--path={p}' for p in self.paths_tc) if targetLocations is not None: args_execTC += (F'--LOCSTR={json.dumps(targetLocations)}', ) async with self.ctxManagerExecTC: logger.debug(F'Start executing {len(self.paths_tc)} test cases') tcRsts_ = await self.execTCs(args_execTC) tcRsts: List[VulnerabilityInfo] if fastFail and tcRsts_ and not tcRsts_[0]: tcRsts = [FailedTestCase(name='EthereumTestCase_StateTest')] else: tcRsts = [] for testFile, rst in zip(self.paths_tc, tcRsts_): vulInfoObj: Optional[VulnerabilityInfo] = None if rst is True: vulInfoObj = PassedTestCase(name=str(testFile)) elif rst is False: vulInfoObj = FailedTestCase(name=str(testFile)) elif rst == 'NotRelevant': vulInfoObj = None if vulInfoObj is not None: tcRsts.append(vulInfoObj) logger.debug(F'{len(tcRsts)} test cases finally executed') return tcRsts
def load_from(file_name): with open(path.join(project_directory, file_name), encoding=utf_8.getregentry().name) as f: return f.read()
__all__ = ['UTF_8'] from encodings import utf_8 from typing import Final UTF_8: Final[str] = utf_8.getregentry().name
async def __patchSource( self, path_ori_source: Sequence[Path], num_patched: int = 1, fault_specifier: Any = None, **extra_args ) -> AsyncGenerator[Tuple[PatchInfo, ...], Optional[RequestObject]]: """ NOTE: This whole function is in critical region """ unconsumedPatches: Deque[Dict[str, Union[PatchInfo, str]]] = deque() # Building CMD args = [ str(self.path_sm), 'iter-gen-mutations', '--only-compilable=true', str(path_ori_source[0]) ] if self.seed is not None: args.append(F"--seed={self.seed}") if fault_specifier is not None: args.append(F'--mutation-space={fault_specifier}') if self.output_mutation: args.append('--output-mutation') if self.for_node_types is not None: args.append('--for-node-types') args.extend(self.for_node_types) if self.replaceable_node_types is not None: args.append('--replaceable-node-types') args.extend(self.replaceable_node_types) logger.trace(F'Constructed base args for sm: {args!r}') if os.path.exists('/tmp/terminate_all'): os.unlink('/tmp/terminate_all') try: for space in self.spaceInfo: thisArgs = [*args, F'--patched-src-dir={self.spaceInfo[space]["tempPatchDir"]}'] # This should be the last one to add thisArgs.append('--mutation_types') thisArgs.extend(space) # This should be the last one to add thisArgs.append('--must-include-mutation-types') thisArgs.extend(space) logger.trace(F'Start sm for space {space} with args: {thisArgs!r}') self.spaceInfo[space]['proc'] = await asyncio.create_subprocess_exec(*thisArgs, # bufsize=0, limit=2**128, stdin=PIPE, stdout=PIPE, stderr=sys.stderr, # shell=False, # universal_newlines=True, encoding=utf_8.getregentry().name) logger.debug(F'sm for space {space} started: pid={self.spaceInfo[space]["proc"].pid}') request = None while True: requestObj: RequestObject if request is None: requestObj = RequestObject_Random(num_mutations=num_patched) else: assert (isinstance(request, RequestObject)) requestObj = request requestDict = attr.asdict(requestObj, recurse=True, retain_collection_types=True) # For debugging logger.trace(F'sm input: {requestDict!r}') jsonStr = json.dumps(requestDict) OkPatches = [x for x in unconsumedPatches if x['json'] == jsonStr] if OkPatches: # Consume first unconsumedPatches.remove(OkPatches[0]) request = yield (OkPatches[0]['payload'], ) continue self.checkIfProcessesOkay() if not self.allRunningProcesses(): break stdinDrainTasks = [] for _, proc in self.allRunningProcesses(): proc.stdin.write(bytes(jsonStr + os.linesep, utf_8.getregentry().name)) stdinDrainTasks.append(proc.stdin.drain()) await asyncio.gather(*stdinDrainTasks) readTasks = [asyncio.Task(proc.stdout.readline()) for _, proc in self.allRunningProcesses()] _, pending = await asyncio.wait(readTasks, return_when=asyncio.FIRST_COMPLETED) for _, proc in self.allRunningProcesses(): open(F'/tmp/terminate_{proc.pid}', 'w').close() open('/tmp/terminate_all', 'w').close() logger.trace('Sent SIGUSR2') await asyncio.gather(*pending) rs = [x.result() for x in readTasks] os.unlink('/tmp/terminate_all') gotHax = False for space, r in zip(self.spaceInfo, rs): out = r.decode(utf_8.getregentry().name) if out == '': raise RuntimeError( F"Unexpected empty output from sm for space {space}") try: resultObj = json.loads(out) except Exception: logger.error( F'sm for space {space} output is not JSON! output:{os.linesep}{out!r}' ) raise assert isinstance( resultObj.get('Result'), str ), F'Unexpected result object from sm for space {space} :\n{resultObj!r}' logger.trace(F'sm for space {space} OUTPUT: {resultObj!r}') if resultObj['Result'] == 'AllSpaceExhasuted': gotHax = True continue elif resultObj['Result'] == 'SpaceExhasutedForAST': newRst: Tuple[PatchInfo, ...] = () else: newRst = tuple( PatchInfo(MutationSequence=mutationSeq, PatchedFilePath=filePath, ModifiedLocations=modifiedLocations if modifiedLocations != 'unknown' else None) for mutationSeq, filePath, modifiedLocations in zip( resultObj['NewMutationSequences'], resultObj['PatchedFilePaths'], resultObj['ModifiedLocations'], )) for r in newRst: if r.ModifiedLocations is None: logger.debug(F'{r.PatchedFilePath} has unknown modified locations!') unconsumedPatches.extend(({"payload": x, "json": jsonStr} for x in newRst)) OkPatches = [x for x in unconsumedPatches if x['json'] == jsonStr] if not OkPatches: if not gotHax: request = yield () continue unconsumedPatches.remove(OkPatches[0]) request = yield (OkPatches[0]['payload'], ) return finally: self.killAllProcesses()