Example #1
0
    def processBitcode(self, bc, outResult, program_name, redisports,
                       OUT_FOLDER, onlybc):

        # mem2reg
        #mem2reg = BCMem2Reg(program_name, debug=True)

        sha = set()
        meta = dict()
        sizes = {}

        originalSha, originalSize, originalWasmName, _ = self.generateWasm(
            program_name, bc, OUT_FOLDER, program_name, generateOnlyBc=onlybc)
        sha.add(originalSha)
        sizes[originalSha] = [originalSize, []]

        meta[originalWasmName.split("/")[-1]] = dict(size=originalSize,
                                                     sha=originalSha)
        outResult["candidates"].append(
            dict(size=originalSize,
                 sha=originalSha,
                 name=originalWasmName.split("/")[-1]))

        try:

            futures = []
            order = list(
                map(lambda x: int(x), config["DEFAULT"]["order"].split(",")))
            LOGGER.info("ORDER", order)
            # split levels by redis interface

            works = self.chunkIt(order, len(redisports))

            for i, port in enumerate(redisports):
                job = levelPool.submit(self.processLevel, works[i],
                                       program_name, port, bc, OUT_FOLDER,
                                       onlybc, meta, outResult)
                # job.result()

                futures.append(job)

            timeout = config["DEFAULT"].getint("exploration-timeout")
            done, fail = wait(futures, return_when=ALL_COMPLETED)
            levelPool.shutdown(False)
            #Merging results

            LOGGER.info(program_name, "Merging exploration results...")

            merging = {}

            codeCount = -1
            for f in done:
                try:
                    r = f.result()
                    for k, v in r.items():
                        LOGGER.info(program_name,
                                    f"[{k}] {len(v)} code blocks")
                        #if len(v) != codeCount and codeCount != -1:
                        #    LOGGER.warning(program_name, f"Sanity check warning, different exploration stage with different code blocks")
                        codeCount = len(v)
                        for k1, v1 in v.items():
                            vSet = set(v1)
                            if k1 not in merging:
                                merging[k1] = []
                            merging[k1] += vSet
                            merging[k1] = list(set(merging[k1]))
                            LOGGER.info(
                                program_name,
                                f"\t - {len(merging[k1])} replacements")
                except Exception as e:
                    LOGGER.error(program_name, traceback.format_exc())

            # TODO Separate both stages to support continuing on
            variantsFile = open(
                f"{OUT_FOLDER}/{program_name}.exploration.json", 'w')
            variantsFile.write(
                json.dumps([[k, [v1 for v1 in v if v1 is not None]]
                            for k, v in merging.items()],
                           indent=4))
            variantsFile.close()
            # Call the generation stage
            # Split jobs
            #for k in merging.keys():
            LOGGER.info(
                program_name,
                f"Generating jobs for {len(redisports)} REDIS instances...")

            generationcount = 0
            futures = []
            variants = []

            showGenerationProgress = config["DEFAULT"].getboolean(
                "show-generation-progress")

            temptativeNumber = np.prod([len(v) + 1 for v in merging.values()])

            LOGGER.info(
                program_name,
                f"Temptative number of variants {temptativeNumber} (plus original). Expected ratio {len(redisports)} of programs in each iteration."
            )

            if showGenerationProgress:
                LOGGER.disable()
                printProgressBar(
                    generationcount,
                    temptativeNumber,
                    suffix=f'             {generationcount}/{temptativeNumber}'
                )

            for subset in getIteratorByName("keysSubset")(merging):

                job = generationPool.submit(
                    self.generateVariant, [subset], program_name, merging,
                    redisports[generationcount % len(redisports)], bc,
                    OUT_FOLDER, onlybc, meta, outResult, generationcount,
                    temptativeNumber)
                # job.result()

                futures.append(job)
                generationcount += 1

                if generationcount % len(redisports) == 0:
                    ## WAIT for it

                    generationStartTime = time.time_ns()
                    LOGGER.info(program_name,
                                f"Executing parallel generation job...")
                    done, fail = wait(futures, return_when=ALL_COMPLETED)
                    generationEndTime = time.time_ns() - generationStartTime

                    futures = []

                    LOGGER.info(program_name,
                                f"Disposing job...{len(done)} {len(fail)}")

                    for f in done:
                        variants += f.result()

                    if showGenerationProgress:
                        speed = len(redisports) / generationEndTime
                        eta = temptativeNumber / speed / 1e9

                        printProgressBar(
                            len(variants),
                            temptativeNumber,
                            suffix=
                            f'  {generationcount}/{temptativeNumber} eta:{eta}s'
                        )

            LOGGER.info(program_name,
                        f"Executing final parallel generation job...")
            done, fail = wait(futures, return_when=ALL_COMPLETED)
            futures = []
            LOGGER.info(program_name,
                        f"Disposing job...{len(done)} {len(fail)}")
            generationcount += len(done) + len(fail)

            for f in done:
                variants += f.result()
                # Save metadata

            if showGenerationProgress:
                printProgressBar(
                    len(variants),
                    temptativeNumber,
                    suffix=
                    f'  {generationcount}/{temptativeNumber}                       '
                )
                LOGGER.enable()

            LOGGER.info(program_name, f"Saving metadata...")
            variantsFile = open(f"{OUT_FOLDER}/{program_name}.variants.json",
                                'w')
            variantsFile.write(
                json.dumps(
                    {
                        "variants": variants,
                        "unique": len(set([v[0] for v in variants])),
                        "total": len([v[0] for v in variants])
                    },
                    indent=4))
            variantsFile.close()

        except BreakException:
            pass

        return dict(programs=meta, count=len(meta.keys()))