コード例 #1
0
def start_profiling(func, filepath, statistical=True):
    import pprofile, signal
    if statistical:
        prof = pprofile.StatisticalProfile()
    else:
        prof = pprofile.Profile()

    def stop_profiling(prof, filepath):
        print('Writing profiling data: %s' % filepath, file=sys.stderr)
        print('You can use kcachegrind to analyze it.', file=sys.stderr)
        with open(filepath, 'w') as f:
            prof.callgrind(f)

    # This makes the `finally` block work as expected if we're terminated by
    # SIGTERM, which happens by default when running `timeout 30 stig ...`.
    # https://stackoverflow.com/a/42200623
    # https://mail.python.org/pipermail/python-ideas/2016-February/038474.html
    import signal

    class SigTerm(SystemExit):
        pass

    def sigterm(sig, frame):
        raise SigTerm

    signal.signal(15, sigterm)

    try:
        with prof():
            func()
    finally:
        stop_profiling(prof, filepath)
コード例 #2
0
ファイル: cfg.py プロジェクト: xiaofeiguorex/Chestnut
def extract_syscalls(fname):
    profiler = pprofile.Profile()
    # clear the function cache if it is not empty, otherwise we get random syscalls based on the order of dependencies being processed
    if (len(function_cache) > 0):
        print("Clearing function cache")
        function_cache.clear()

    start_time()
    try:
        cfg = get_cfg(fname)
    except:
        print("[-] angr could not extract the CFG from %s" % fname)
        return None
    stop_time("Getting CFG")

    insn = syscalls.init(fname)
    stop_time("Syscall Init")
    addrs = syscalls.find_syscall_locations(insn)
    stop_time("Syscall locations")
    sys_addrs = [x[1] for x in addrs]

    syslist = function_calling_syscalls(cfg, sys_addrs)
    stop_time("Syslist")
    callees = get_call_targets(cfg)
    stop_time("Callee list")

    syscaller = syscalls_per_function(cfg, callees, syslist)
    stop_time("Extract syscalls per function")

    whitelist = {}
    try:
        with open("function_whitelist.json") as wl:
            whitelist = json.loads(wl.read())
    except:
        pass

    insn_to_syscall = {}
    used_syscalls = {}
    for fnc in syscaller:
        insn_list = []
        for sysc in syscaller[fnc]:
            for addr in addrs:
                if addr[1] == sysc:
                    if addr[0] not in insn_to_syscall:
                        insn_to_syscall[addr[0]] = syscalls.find_syscall_nr(
                            insn, addr[0])
                    insn_list.append(insn_to_syscall[addr[0]])
        if fnc in whitelist:
            print("Found %s in whitelist, adding %d syscall(s)" %
                  (fnc, len(whitelist[fnc])))
            insn_list += whitelist[fnc]
        used_syscalls[fnc] = sorted(list(set(insn_list))).copy()
    stop_time("Find syscall numbers")

    all_syscalls = set()
    for f in used_syscalls:
        all_syscalls.update(set(used_syscalls[f]))
    used_syscalls[":all"] = sorted(list(all_syscalls))

    return used_syscalls
コード例 #3
0
ファイル: genjutsu_test.py プロジェクト: zeprone/genjutsu
def profiler(request):
    profile = pprofile.Profile()
    yield profile
    filename = request.config.getoption('--profile')
    with filename.open('w', encoding='utf-8') as out:
        profile.annotate(out)
    with filename.with_suffix('.callgrind').open('w') as out:
        profile.callgrind(out)
コード例 #4
0
    def __init__(self, sample_mode=False, thread_mode=False, period=0.01):
        self._sample_mode = sample_mode

        if sample_mode:
            self._profiler = pprofile.StatisticThread(single=not thread_mode,
                                                      period=period)
        elif thread_mode:
            self._profiler = pprofile.ThreadProfile()
        else:
            self._profiler = pprofile.Profile()
コード例 #5
0
def profile_algorithm(algorithm_name, filename):
    dataset = _numeric_columns_from_csv(filename)
    dataset.dropna(axis=0, how='any', inplace=True)

    prof = pprofile.Profile()
    with prof:
        _benchmark_algorithm(algorithm_name, dataset)
    prof.dump_stats(filename="./calc/benchmarks/profiling.log")

    with open("./calc/benchmarks/profiling.log", 'r') as prof_file:
        with open("./calc/benchmarks/profiling_stripped.log",
                  'w') as stripped_file:
            for line in prof_file:
                line = re.sub(r'^.*\|.*0\.00%\|.*$\n', r'', line)
                stripped_file.write(line)
コード例 #6
0
        def inner_wrapper(*args, **kwargs):
            r.incr(key)
            call_num = int(r.get(key))
            if call_num > max_call_num or (call_num - 1) % step != 0:
                return func(*args, **kwargs)
            print_title = (' ' * 30 + f"-*-pprofile_print_stats-*-|{call_num}")

            prof = pprofile.Profile()
            with prof():
                result = func(*args, **kwargs)
            print('-' * 100)
            print(print_title)
            print('-' * 100)
            print('')
            _pprofile_dump(prof,
                           f"{PROFILE_ROOT_PATH}/cachegrind.out.{call_num}")
            return result
コード例 #7
0
        def inner_wrapper(*args, **kwargs):
            r.incr(key)
            call_num = int(r.get(key))
            if call_num > max_call_num or (call_num - 1) % step != 0:
                return func(*args, **kwargs)
            print_title = (' ' * 30 + f'-*-pprofile_print_stats-*-|{call_num}')

            prof = pprofile.Profile()
            with prof():
                result = func(*args, **kwargs)
            print('-' * 100)
            print(print_title)
            print('-' * 100)
            print('')
            _pprofile_dump(
                prof, f"/tmp/{PROJECT_NAME}/pp/cachegrind.out.{call_num}",
                call_num == 1)
            return result
コード例 #8
0
def line_profiler():
    profiler = None
    try:
        if settings.PROFILER["line-profiler"]:
            import pprofile

            if settings.PROFILER["line-profiler-type"] == "deterministic":
                profiler = pprofile.Profile()
            elif settings.PROFILER["line-profiler-type"] == "statistic":
                prof = pprofile.StatisticalProfile()
                profiler = prof(
                    period=0.001,  # Sample every 1ms
                    single=True,  # Only sample current thread
                )
    except ImportError:
        print("Unable to create line_profiler : ImportError")
    except Exception as e:
        print("Unable to create line_profiler : " + str(e))
    return profiler
コード例 #9
0
    def harvest(self):
        import pprofile
        profiler = pprofile.Profile()
        profiler.enable()
        objects = []
        div = self.soup.findAll('div', {'id': "mw-content-text"})[0]

        pattern = re.compile("\w*, \w*")
        for tag in div.findAll('a'):
            name = re.findall(pattern, tag.text)
            if name:
                objects.append({
                    'name': name[0],
                })
        profiler.disable()
        with open('names_harvest.prof', 'w+') as out:
            profiler.callgrind(out)

        return objects
コード例 #10
0
ファイル: runner.py プロジェクト: pums974/KalmanFilter
def run_all_tests_cases():
    """
        Run all the test cases
    :return:
    """
    for edp in test_cases():
        if profiling:
            profiler = pprofile.Profile()
            with profiler:
                run_a_test_case(edp)
            # profiler.print_stats()
            with open("profile." + edp.name, 'w') as fich:
                profiler.callgrind(fich)
            # cProfile.run("edp.run_test_case(graphs)", "profile." + edp.name)
            # p = pstats.Stats("profile." + edp.name)
            # p.sort_stats('time').print_stats(10)
            # p.sort_stats('cumulative').print_stats(10)
            # os.remove("profile." + edp.name)
        else:
            run_a_test_case(edp)
コード例 #11
0
def benchmark_dataloader(
    loader: DataLoader,
    max_iterations: int = -1,
    profile: bool = False,
    profile_callgrind: Path = None,
) -> None:
    dataloader_iter = iter(loader)

    def run_dataloader():
        end_of_iter_time = time()
        total_iterations = (
            len(dataloader_iter)
            if max_iterations < 0
            else min(len(dataloader_iter), max_iterations)
        )
        for i, batch in enumerate(dataloader_iter):
            if 0 < max_iterations <= i:
                break
            start_of_iter_time = time()
            dataloader_duration_s = start_of_iter_time - end_of_iter_time
            examples_per_second = loader.batch_size / dataloader_duration_s

            print(
                "batch[{}/{}] {:.2f} examples/s".format(
                    i + 1, total_iterations, examples_per_second
                )
            )
            end_of_iter_time = start_of_iter_time

    if profile:
        prof = pprofile.Profile()
        with prof():
            run_dataloader()
        if profile_callgrind is not None:
            with open(str(profile_callgrind), "w", encoding="utf8") as f:
                prof.callgrind(f)
            print("Wrote callgrind profile log to {}".format(profile_callgrind))
        else:
            prof.print_stats()
    else:
        run_dataloader()
コード例 #12
0
  def profile(self, func, suffix='', args=(), kw=None):
    """Profile `func(*args, **kw)` with selected profiler,
      and dump output in a file called `func.__name__ + suffix`
    """
    if not kw:
      kw = {}

    if PROFILER == 'pprofile':
      import pprofile
      prof = pprofile.Profile()
    else:
      from cProfile import Profile
      prof = Profile()

    prof_file = '%s%s' % (func.__name__, suffix)
    try:
      os.unlink(prof_file)
    except OSError:
      pass
    prof.runcall(func, *args, **kw)
    prof.dump_stats(prof_file)
コード例 #13
0
 async def execute(self, message, profile=False):
     if message.author.id == self.user.id:
         return
     if message.content.startswith(self.profile.prefix):
         raw = message.content[len(self.profile.prefix):].split()
     else:
         raw = message.content.split(" ", 2)[1:]
     cmd = raw[0].lower()
     args = " ".join(raw[1:])
     args = re.compile(r'''((?:[^\s"']|"[^"]*"|'[^']*')+)''').split(
         args)[1::2]
     new = []
     for arg in args:
         new.append(arg.strip("\""))
     args = new
     for ext in self.extensions:
         for c in ext.commands:
             if c.name == cmd:
                 if bot.in_role_list(message.author, c.roles):
                     if profile:
                         import pprofile
                         profiler = pprofile.Profile()
                         with profiler():
                             await self.command(
                                 c, bot.Context(self, ext, message), args)
                         out = io.StringIO()
                         with redirect_stdout(out):
                             profiler.print_stats()
                         resp = out.getvalue().split("\n")
                         await bot.send_stats(c, self.profile.prefix, resp,
                                              raw, message.channel)
                     else:
                         await self.command(c,
                                            bot.Context(self, ext, message),
                                            args)
                 else:
                     await message.channel.send(
                         "You are not allowed to run that command.")
コード例 #14
0
ファイル: MonteCarlo.py プロジェクト: WarField95/Prog3
import random, math, time
import pprofile

profiler = pprofile.Profile()


class MonteCarlo():
    def __init__(self, mode=0, runAmount=900):
        self.mode = mode
        self.runAmount = runAmount
        self.timeRand = []
        self.congruentialRand = None

    def getRandom(self, min=0, max=1000):
        rand = 1
        rand = random.randint(min, max)
        return rand

    def getCongruentialRand(self, a, b, m):
        if self.congruentialRand == None:
            self.congruentialRand = int(m / 2)  # r_0

        self.congruentialRand = (a * self.congruentialRand + b) % m  # r_n++

        return self.congruentialRand

    # Function to check if a random point is inside the circle quarter
    def checkIfInside(self):
        x = 0
        y = 0
        m = 1000
コード例 #15
0
ファイル: pyheat.py プロジェクト: barleyj/pyheat
 def __profile_file(self):
     """Method used to profile the given file line by line."""
     self.line_profiler = pprofile.Profile()
     self.line_profiler.runfile(open(self.pyfile.path, 'r'), {},
                                self.pyfile.path)
コード例 #16
0
def execute_wfc(
    filename: Optional[str] = None,
    tile_size: int = 1,
    pattern_width: int = 2,
    rotations: int = 8,
    output_size: Tuple[int, int] = (48, 48),
    ground: Optional[int] = None,
    attempt_limit: int = 10,
    output_periodic: bool = True,
    input_periodic: bool = True,
    loc_heuristic: Literal["lexical", "hilbert", "spiral", "entropy", "anti-entropy", "simple", "random"] = "entropy",
    choice_heuristic: Literal["lexical", "rarest", "weighted", "random"] = "weighted",
    visualize: bool = False,
    global_constraint: Literal[False, "allpatterns"] = False,
    backtracking: bool = False,
    log_filename: str = "log",
    logging: bool = False,
    global_constraints: None = None,
    log_stats_to_output: Optional[Callable[[Dict[str, Any], str], None]] = None,
    *,
    image: Optional[NDArray[np.integer]] = None,
) -> NDArray[np.integer]:
    timecode = datetime.datetime.now().isoformat().replace(":", ".")
    time_begin = time.perf_counter()
    output_destination = r"./output/"
    input_folder = r"./images/samples/"

    rotations -= 1  # change to zero-based

    input_stats = {
        "filename": str(filename),
        "tile_size": tile_size,
        "pattern_width": pattern_width,
        "rotations": rotations,
        "output_size": output_size,
        "ground": ground,
        "attempt_limit": attempt_limit,
        "output_periodic": output_periodic,
        "input_periodic": input_periodic,
        "location heuristic": loc_heuristic,
        "choice heuristic": choice_heuristic,
        "global constraint": global_constraint,
        "backtracking": backtracking,
    }

    # Load the image
    if filename:
        if image is not None:
            raise TypeError("Only filename or image can be provided, not both.")
        image = imageio.imread(input_folder + filename + ".png")[:, :, :3]  # TODO: handle alpha channels

    if image is None:
        raise TypeError("An image must be given.")

    # TODO: generalize this to more than the four cardinal directions
    direction_offsets = list(enumerate([(0, -1), (1, 0), (0, 1), (-1, 0)]))

    tile_catalog, tile_grid, _code_list, _unique_tiles = make_tile_catalog(image, tile_size)
    (
        pattern_catalog,
        pattern_weights,
        pattern_list,
        pattern_grid,
    ) = make_pattern_catalog_with_rotations(
        tile_grid, pattern_width, input_is_periodic=input_periodic, rotations=rotations
    )

    logger.debug("pattern catalog")

    # visualize_tiles(unique_tiles, tile_catalog, tile_grid)
    # visualize_patterns(pattern_catalog, tile_catalog, pattern_weights, pattern_width)
    # figure_list_of_tiles(unique_tiles, tile_catalog, output_filename=f"visualization/tilelist_{filename}_{timecode}")
    # figure_false_color_tile_grid(tile_grid, output_filename=f"visualization/tile_falsecolor_{filename}_{timecode}")
    if visualize and filename:
        figure_pattern_catalog(
            pattern_catalog,
            tile_catalog,
            pattern_weights,
            pattern_width,
            output_filename=f"visualization/pattern_catalog_{filename}_{timecode}",
        )

    logger.debug("profiling adjacency relations")
    if False:
        import pprofile  # type: ignore
        profiler = pprofile.Profile()
        with profiler:
            adjacency_relations = adjacency_extraction(
                pattern_grid,
                pattern_catalog,
                direction_offsets,
                [pattern_width, pattern_width],
            )
        profiler.dump_stats(f"logs/profile_adj_{filename}_{timecode}.txt")
    else:
        adjacency_relations = adjacency_extraction(
            pattern_grid,
            pattern_catalog,
            direction_offsets,
            (pattern_width, pattern_width),
        )

    logger.debug("adjacency_relations")

    if visualize:
        figure_adjacencies(
            adjacency_relations,
            direction_offsets,
            tile_catalog,
            pattern_catalog,
            pattern_width,
            [tile_size, tile_size],
            output_filename=f"visualization/adjacency_{filename}_{timecode}_A",
        )
        # figure_adjacencies(adjacency_relations, direction_offsets, tile_catalog, pattern_catalog, pattern_width, [tile_size, tile_size], output_filename=f"visualization/adjacency_{filename}_{timecode}_B", render_b_first=True)

    logger.debug(f"output size: {output_size}\noutput periodic: {output_periodic}")
    number_of_patterns = len(pattern_weights)
    logger.debug(f"# patterns: {number_of_patterns}")
    decode_patterns = dict(enumerate(pattern_list))
    encode_patterns = {x: i for i, x in enumerate(pattern_list)}
    _encode_directions = {j: i for i, j in direction_offsets}

    adjacency_list: Dict[Tuple[int, int], List[Set[int]]] = {}
    for _, adjacency in direction_offsets:
        adjacency_list[adjacency] = [set() for _ in pattern_weights]
    # logger.debug(adjacency_list)
    for adjacency, pattern1, pattern2 in adjacency_relations:
        # logger.debug(adjacency)
        # logger.debug(decode_patterns[pattern1])
        adjacency_list[adjacency][encode_patterns[pattern1]].add(encode_patterns[pattern2])

    logger.debug(f"adjacency: {len(adjacency_list)}")

    time_adjacency = time.perf_counter()

    ### Ground ###

    ground_list: Optional[NDArray[np.int64]] = None
    if ground:
        ground_list = np.vectorize(lambda x: encode_patterns[x])(
            pattern_grid.flat[(ground - 1) :]
        )
    if ground_list is None or ground_list.size == 0:
        ground_list = None

    if ground_list is not None:
        ground_catalog = {
            encode_patterns[k]: v
            for k, v in pattern_catalog.items()
            if encode_patterns[k] in ground_list
        }
        if visualize:
            figure_pattern_catalog(
                ground_catalog,
                tile_catalog,
                pattern_weights,
                pattern_width,
                output_filename=f"visualization/patterns_ground_{filename}_{timecode}",
            )

    wave = makeWave(
        number_of_patterns, output_size[0], output_size[1], ground=ground_list
    )
    adjacency_matrix = makeAdj(adjacency_list)

    ### Heuristics ###

    encoded_weights: NDArray[np.float64] = np.zeros((number_of_patterns), dtype=np.float64)
    for w_id, w_val in pattern_weights.items():
        encoded_weights[encode_patterns[w_id]] = w_val
    choice_random_weighting: NDArray[np.float64] = np.random.random_sample(wave.shape[1:]) * 0.1

    pattern_heuristic: Callable[[NDArray[np.bool_], NDArray[np.bool_]], int] = lexicalPatternHeuristic
    if choice_heuristic == "rarest":
        pattern_heuristic = makeRarestPatternHeuristic(encoded_weights)
    if choice_heuristic == "weighted":
        pattern_heuristic = makeWeightedPatternHeuristic(encoded_weights)
    if choice_heuristic == "random":
        pattern_heuristic = makeRandomPatternHeuristic(encoded_weights)

    logger.debug(loc_heuristic)
    location_heuristic: Callable[[NDArray[np.bool_]], Tuple[int, int]] = lexicalLocationHeuristic
    if loc_heuristic == "anti-entropy":
        location_heuristic = makeAntiEntropyLocationHeuristic(choice_random_weighting)
    if loc_heuristic == "entropy":
        location_heuristic = makeEntropyLocationHeuristic(choice_random_weighting)
    if loc_heuristic == "random":
        location_heuristic = makeRandomLocationHeuristic(choice_random_weighting)
    if loc_heuristic == "simple":
        location_heuristic = simpleLocationHeuristic
    if loc_heuristic == "spiral":
        location_heuristic = makeSpiralLocationHeuristic(choice_random_weighting)
    if loc_heuristic == "hilbert":
        location_heuristic = makeHilbertLocationHeuristic(choice_random_weighting)

    ### Visualization ###

    (
        visualize_choice,
        visualize_wave,
        visualize_backtracking,
        visualize_propagate,
        visualize_final,
        visualize_after,
    ) = (None, None, None, None, None, None)
    if filename and visualize:
        (
            visualize_choice,
            visualize_wave,
            visualize_backtracking,
            visualize_propagate,
            visualize_final,
            visualize_after,
        ) = make_solver_visualizers(
            f"{filename}_{timecode}",
            wave,
            decode_patterns=decode_patterns,
            pattern_catalog=pattern_catalog,
            tile_catalog=tile_catalog,
            tile_size=[tile_size, tile_size],
        )
    if filename and logging:
        (
            visualize_choice,
            visualize_wave,
            visualize_backtracking,
            visualize_propagate,
            visualize_final,
            visualize_after,
        ) = make_solver_loggers(f"{filename}_{timecode}", input_stats.copy())
    if filename and logging and visualize:
        vis = make_solver_visualizers(
            f"{filename}_{timecode}",
            wave,
            decode_patterns=decode_patterns,
            pattern_catalog=pattern_catalog,
            tile_catalog=tile_catalog,
            tile_size=[tile_size, tile_size],
        )
        log = make_solver_loggers(f"{filename}_{timecode}", input_stats.copy())

        def visfunc(idx: int):
            def vf(*args, **kwargs):
                if vis[idx]:
                    vis[idx](*args, **kwargs)
                if log[idx]:
                    return log[idx](*args, **kwargs)

            return vf

        (
            visualize_choice,
            visualize_wave,
            visualize_backtracking,
            visualize_propagate,
            visualize_final,
            visualize_after,
        ) = [visfunc(x) for x in range(len(vis))]

    ### Global Constraints ###
    active_global_constraint = lambda wave: True
    if global_constraint == "allpatterns":
        active_global_constraint = make_global_use_all_patterns()
    logger.debug(active_global_constraint)
    combined_constraints = [active_global_constraint]

    def combinedConstraints(wave: NDArray[np.bool_]) -> bool:
        return all(fn(wave) for fn in combined_constraints)

    ### Solving ###

    time_solve_start = None
    time_solve_end = None

    solution_tile_grid = None
    logger.debug("solving...")
    attempts = 0
    while attempts < attempt_limit:
        attempts += 1
        time_solve_start = time.perf_counter()
        stats = {}
        # profiler = pprofile.Profile()
        # with profiler:
        # with PyCallGraph(output=GraphvizOutput(output_file=f"visualization/pycallgraph_{filename}_{timecode}.png")):
        try:
            solution = run(
                wave.copy(),
                adjacency_matrix,
                locationHeuristic=location_heuristic,
                patternHeuristic=pattern_heuristic,
                periodic=output_periodic,
                backtracking=backtracking,
                onChoice=visualize_choice,
                onBacktrack=visualize_backtracking,
                onObserve=visualize_wave,
                onPropagate=visualize_propagate,
                onFinal=visualize_final,
                checkFeasible=combinedConstraints,
            )
            if visualize_after:
                stats = visualize_after()
            # logger.debug(solution)
            # logger.debug(stats)
            solution_as_ids = np.vectorize(lambda x: decode_patterns[x])(solution)
            solution_tile_grid = pattern_grid_to_tiles(
                solution_as_ids, pattern_catalog
            )

            logger.debug("Solution:")
            # logger.debug(solution_tile_grid)
            if filename:
                render_tiles_to_output(
                    solution_tile_grid,
                    tile_catalog,
                    (tile_size, tile_size),
                    output_destination + filename + "_" + timecode + ".png",
                )

            time_solve_end = time.perf_counter()
            stats.update({"outcome": "success"})
        except StopEarly:
            logger.debug("Skipping...")
            stats.update({"outcome": "skipped"})
            raise
        except TimedOut:
            logger.debug("Timed Out")
            if visualize_after:
                stats = visualize_after()
            stats.update({"outcome": "timed_out"})
        except Contradiction as exc:
            logger.warning(f"Contradiction: {exc}")
            if visualize_after:
                stats = visualize_after()
            stats.update({"outcome": "contradiction"})
        finally:
            # profiler.dump_stats(f"logs/profile_{filename}_{timecode}.txt")
            outstats = {}
            outstats.update(input_stats)
            solve_duration = time.perf_counter() - time_solve_start
            if time_solve_end is not None:
                solve_duration = time_solve_end - time_solve_start
            adjacency_duration = time_solve_start - time_adjacency
            outstats.update(
                {
                    "attempts": attempts,
                    "time_start": time_begin,
                    "time_adjacency": time_adjacency,
                    "adjacency_duration": adjacency_duration,
                    "time solve start": time_solve_start,
                    "time solve end": time_solve_end,
                    "solve duration": solve_duration,
                    "pattern count": number_of_patterns,
                }
            )
            outstats.update(stats)
            if log_stats_to_output is not None:
                log_stats_to_output(outstats, output_destination + log_filename + ".tsv")
        if solution_tile_grid is not None:
            return tile_grid_to_image(solution_tile_grid, tile_catalog, (tile_size, tile_size))

    raise TimedOut("Attempt limit exceeded.")
コード例 #17
0
def main(daemon_mode=True):
    global prof
    # parse option
    parser = argparse.ArgumentParser()
    parser.add_argument('--pid',
                        action='store',
                        dest='pid',
                        default=None,
                        help='pid filename')
    parser.add_argument('--single',
                        action='store_true',
                        dest='singleMode',
                        default=False,
                        help='use single mode')
    parser.add_argument(
        '--hostname_file',
        action='store',
        dest='hostNameFile',
        default=None,
        help='to record the hostname where harvester is launched')
    parser.add_argument('--rotate_log',
                        action='store_true',
                        dest='rotateLog',
                        default=False,
                        help='rollover log files before launching harvester')
    parser.add_argument('--version',
                        action='store_true',
                        dest='showVersion',
                        default=False,
                        help='show version information and exit')
    parser.add_argument('--profile_output',
                        action='store',
                        dest='profileOutput',
                        default=None,
                        help='filename to save the results of profiler')
    parser.add_argument(
        '--profile_mode',
        action='store',
        dest='profileMode',
        default='s',
        help=
        'profile mode. s (statistic), d (deterministic), or t (thread-aware)')
    parser.add_argument(
        '--memory_logging',
        action='store_true',
        dest='memLogging',
        default=False,
        help='add information of memory usage in each logging message')
    parser.add_argument('--foreground',
                        action='store_true',
                        dest='foreground',
                        default=False,
                        help='run in the foreground not to be daemonized')
    options = parser.parse_args()
    # show version information
    if options.showVersion:
        print("Version : {0}".format(panda_pkg_info.release_version))
        print("Last commit : {0}".format(commit_timestamp.timestamp))
        return
    # check pid
    if options.pid is not None and os.path.exists(options.pid):
        print("ERROR: Cannot start since lock file {0} already exists".format(
            options.pid))
        return
    # uid and gid
    uid = pwd.getpwnam(harvester_config.master.uname).pw_uid
    gid = grp.getgrnam(harvester_config.master.gname).gr_gid
    # get umask
    umask = os.umask(0)
    os.umask(umask)
    # memory logging
    if options.memLogging:
        core_utils.enable_memory_profiling()
    # hostname
    if options.hostNameFile is not None:
        with open(options.hostNameFile, 'w') as f:
            f.write(socket.getfqdn())
    # rollover log files
    if options.rotateLog:
        core_utils.do_log_rollover()
        if hasattr(_logger.handlers[0], 'doRollover'):
            _logger.handlers[0].doRollover()
    if daemon_mode and not options.foreground:
        # redirect messages to stdout
        stdoutHandler = logging.StreamHandler(sys.stdout)
        stdoutHandler.setFormatter(_logger.handlers[0].formatter)
        _logger.addHandler(stdoutHandler)
        # collect streams not to be closed by daemon
        files_preserve = []
        for loggerName, loggerObj in iteritems(
                logging.Logger.manager.loggerDict):
            if loggerName.startswith('panda'):
                for handler in loggerObj.handlers:
                    if hasattr(handler, 'stream'):
                        files_preserve.append(handler.stream)
        sys.stderr = StdErrWrapper()
        # make daemon context
        dc = daemon.DaemonContext(stdout=sys.stdout,
                                  stderr=sys.stderr,
                                  uid=uid,
                                  gid=gid,
                                  umask=umask,
                                  files_preserve=files_preserve,
                                  pidfile=daemon.pidfile.PIDLockFile(
                                      options.pid))
    else:
        dc = DummyContext()
    with dc:
        # remove pidfile to prevent child processes crashing in atexit
        if not options.singleMode:
            dc.pidfile = None
        core_utils.set_file_permission(options.pid)
        core_utils.set_file_permission(logger_config.daemon['logdir'])
        _logger.info("start : version = {0}, last_commit = {1}".format(
            panda_pkg_info.release_version, commit_timestamp.timestamp))

        # stop event
        stopEvent = threading.Event()

        # profiler
        prof = None
        if options.profileOutput is not None:
            # run with profiler
            if options.profileMode == 'd':
                # deterministic
                prof = pprofile.Profile()
            elif options.profileMode == 't':
                # thread-aware
                prof = pprofile.ThreadProfile()
            else:
                # statistic
                prof = cProfile.Profile()

        # post process for profiler
        def disable_profiler():
            global prof
            if prof is not None:
                # disable profiler
                prof.disable()
                # dump results
                prof.dump_stats(options.profileOutput)
                prof = None

        # signal handlers
        def catch_sigkill(sig, frame):
            disable_profiler()
            _logger.info('got signal={0}'.format(sig))
            try:
                os.remove(options.pid)
            except:
                pass
            if os.getppid() == 1:
                os.killpg(os.getpgrp(), signal.SIGKILL)
            else:
                os.kill(os.getpid(), signal.SIGKILL)

        def catch_sigterm(sig, frame):
            stopEvent.set()
            try:
                os.remove(options.pid)
            except:
                pass

        # set handler
        if daemon_mode:
            signal.signal(signal.SIGINT, catch_sigkill)
            signal.signal(signal.SIGHUP, catch_sigkill)
            signal.signal(signal.SIGTERM, catch_sigkill)
            signal.signal(signal.SIGUSR2, catch_sigterm)
        # start master
        master = Master(single_mode=options.singleMode,
                        stop_event=stopEvent,
                        daemon_mode=daemon_mode)
        if master is None:
            prof = None
        else:
            # enable profiler
            if prof is not None:
                prof.enable()
            # run master
            master.start()
            # disable profiler
            disable_profiler()
        if daemon_mode:
            _logger.info('terminated')
コード例 #18
0
ファイル: embedded.py プロジェクト: xmorgan/pprofile
#!/usr/bin/env python
import threading
import pprofile
import time
import sys


def func():
    # Busy loop, so context switches happen
    end = time.time() + 1
    while time.time() < end:
        pass


# Single-treaded run
prof = pprofile.Profile()
with prof:
    func()
prof.annotate(sys.stdout, __file__)

# Dual-threaded run
t1 = threading.Thread(target=func)
prof = pprofile.Profile()
with prof:
    t1.start()
    func()
    t1.join()
prof.annotate(sys.stdout, __file__)
コード例 #19
0
    def wrapper(*args, **kwargs):
        """ Define the input object decorator.
        """
        # Get the function parameters
        arg_spec = inspect.getargspec(obj)
        defaults = [repr(item) for item in arg_spec.defaults or []]
        optional = dict(zip(reversed(arg_spec.args or []), reversed(defaults)))
        for name, value in kwargs.items():
            if name in optional:
                optional[name] = object_repr(value)
        mandatory = []
        self_parameter = None
        for index in range(len(arg_spec.args) - len(optional)):
            try:
                if index < len(args):
                    value = args[index]
                else:
                    value = kwargs[arg_spec.args[index]]
                if arg_spec.args[index] == "self":
                    self_parameter = value
                if arg_spec.args[index] == "cls":
                    args = args[1:]
                value_repr = object_repr(value)
                mandatory.append((arg_spec.args[index], value_repr))
            except:
                mandatory.append((arg_spec.args[index], None))
                raise

        # Create the function signature
        params = ["{0}={1}".format(name, val) for name, val in mandatory]
        params.extend(
            ["{0}={1}".format(name, val) for name, val in optional.items()])
        signature = "{0}({1})".format(obj.__name__, ", ".join(params))

        # Display a start call message
        module = obj.__module__
        package_name = module.split(".")[0]
        if is_method:
            obj_name = (module + "." + self_parameter.__class__.__name__ +
                        "." + obj.__name__)
        else:
            obj_name = module + "." + obj.__name__
        print("{0}\n[{1}] Calling {2}...\n{3}".format(80 * "_", package_name,
                                                      obj_name, signature))

        # A type signature if requested
        _obj = obj
        for decorator, registered_types in type_decorators:
            _obj = decorator(*registered_types)(_obj)

        # Call
        start_time = time.time()
        if use_profiler:
            profiler = pprofile.Profile()
            returncode = profiler.runcall(obj, *args, **kwargs)
        else:
            returncode = _obj(*args, **kwargs)
        duration = time.time() - start_time

        # Display execution profile
        if use_profiler:
            obj_code = inspect.getsourcelines(obj)[0]
            annotate(profiler, sys.stdout,
                     inspect.getmodule(obj).__file__, obj_code)

        # Display an end message
        msg = "{0:.1f}s, {1:.1f}min".format(duration, duration / 60.)
        print(max(0, (80 - len(msg))) * '_' + msg)

        return returncode
コード例 #20
0
def execute_wfc(filename,
                tile_size=0,
                pattern_width=2,
                rotations=8,
                output_size=[48, 48],
                ground=None,
                attempt_limit=10,
                output_periodic=True,
                input_periodic=True,
                loc_heuristic="lexical",
                choice_heuristic="lexical",
                visualize=True,
                global_constraint=False,
                backtracking=False,
                log_filename="log",
                logging=True,
                global_constraints=None,
                log_stats_to_output=None):
    timecode = f"{time.time()}"
    time_begin = time.time()
    output_destination = r"./output/"
    input_folder = r"./images/samples/"

    rotations -= 1  # change to zero-based

    input_stats = {
        "filename": filename,
        "tile_size": tile_size,
        "pattern_width": pattern_width,
        "rotations": rotations,
        "output_size": output_size,
        "ground": ground,
        "attempt_limit": attempt_limit,
        "output_periodic": output_periodic,
        "input_periodic": input_periodic,
        "location heuristic": loc_heuristic,
        "choice heuristic": choice_heuristic,
        "global constraint": global_constraint,
        "backtracking": backtracking
    }

    # Load the image
    img = imageio.imread(input_folder + filename + ".png")
    img = img[:, :, :3]  # TODO: handle alpha channels

    # TODO: generalize this to more than the four cardinal directions
    direction_offsets = list(enumerate([(0, -1), (1, 0), (0, 1), (-1, 0)]))

    tile_catalog, tile_grid, code_list, unique_tiles = make_tile_catalog(
        img, tile_size)
    pattern_catalog, pattern_weights, pattern_list, pattern_grid = make_pattern_catalog_with_rotations(
        tile_grid,
        pattern_width,
        input_is_periodic=input_periodic,
        rotations=rotations)

    print("pattern catalog")

    #visualize_tiles(unique_tiles, tile_catalog, tile_grid)
    #visualize_patterns(pattern_catalog, tile_catalog, pattern_weights, pattern_width)
    #figure_list_of_tiles(unique_tiles, tile_catalog, output_filename=f"visualization/tilelist_{filename}_{timecode}")
    #figure_false_color_tile_grid(tile_grid, output_filename=f"visualization/tile_falsecolor_{filename}_{timecode}")
    if visualize:
        figure_pattern_catalog(
            pattern_catalog,
            tile_catalog,
            pattern_weights,
            pattern_width,
            output_filename=
            f"visualization/pattern_catalog_{filename}_{timecode}")

    print("profiling adjacency relations")
    adjacency_relations = None

    if False:
        profiler = pprofile.Profile()
        with profiler:
            adjacency_relations = adjacency_extraction(
                pattern_grid, pattern_catalog, direction_offsets,
                [pattern_width, pattern_width])
        profiler.dump_stats(f"logs/profile_adj_{filename}_{timecode}.txt")
    else:
        adjacency_relations = adjacency_extraction(
            pattern_grid, pattern_catalog, direction_offsets,
            [pattern_width, pattern_width])

    print("adjacency_relations")

    if visualize:
        figure_adjacencies(
            adjacency_relations,
            direction_offsets,
            tile_catalog,
            pattern_catalog,
            pattern_width, [tile_size, tile_size],
            output_filename=f"visualization/adjacency_{filename}_{timecode}_A")
        #figure_adjacencies(adjacency_relations, direction_offsets, tile_catalog, pattern_catalog, pattern_width, [tile_size, tile_size], output_filename=f"visualization/adjacency_{filename}_{timecode}_B", render_b_first=True)

    print(f"output size: {output_size}\noutput periodic: {output_periodic}")
    number_of_patterns = len(pattern_weights)
    print(f"# patterns: {number_of_patterns}")
    decode_patterns = dict(enumerate(pattern_list))
    encode_patterns = {x: i for i, x in enumerate(pattern_list)}
    encode_directions = {j: i for i, j in direction_offsets}

    adjacency_list = {}
    for i, d in direction_offsets:
        adjacency_list[d] = [set() for i in pattern_weights]
    #print(adjacency_list)
    for i in adjacency_relations:
        #print(i)
        #print(decode_patterns[i[1]])
        adjacency_list[i[0]][encode_patterns[i[1]]].add(encode_patterns[i[2]])

    print(f"adjacency: {len(adjacency_list)}")

    time_adjacency = time.time()

    ### Ground ###

    ground_list = []
    if not (ground is 0):
        ground_list = np.vectorize(lambda x: encode_patterns[x])(
            pattern_grid.flat[(ground - 1):])
    if len(ground_list) < 1:
        ground_list = None

    if not (ground_list is None):
        ground_catalog = {
            encode_patterns[k]: v
            for k, v in pattern_catalog.items()
            if encode_patterns[k] in ground_list
        }
        if visualize:
            figure_pattern_catalog(
                ground_catalog,
                tile_catalog,
                pattern_weights,
                pattern_width,
                output_filename=
                f"visualization/patterns_ground_{filename}_{timecode}")

    wave = makeWave(number_of_patterns,
                    output_size[0],
                    output_size[1],
                    ground=ground_list)
    adjacency_matrix = makeAdj(adjacency_list)

    ### Heuristics ###

    encoded_weights = np.zeros((number_of_patterns), dtype=np.float64)
    for w_id, w_val in pattern_weights.items():
        encoded_weights[encode_patterns[w_id]] = w_val
    choice_random_weighting = np.random.random(wave.shape[1:]) * 0.1

    pattern_heuristic = lexicalPatternHeuristic
    if choice_heuristic == "weighted":
        pattern_heuristic = makeWeightedPatternHeuristic(encoded_weights)
    if choice_heuristic == "random":
        pattern_heuristic = makeRandomPatternHeuristic(encoded_weights)

    print(loc_heuristic)
    location_heuristic = lexicalLocationHeuristic
    if loc_heuristic == "anti-entropy":
        location_heuristic = makeAntiEntropyLocationHeuristic(
            choice_random_weighting)
    if loc_heuristic == "entropy":
        location_heuristic = makeEntropyLocationHeuristic(
            choice_random_weighting)
    if loc_heuristic == "random":
        location_heuristic = makeRandomLocationHeuristic(
            choice_random_weighting)
    if loc_heuristic == "simple":
        location_heuristic = simpleLocationHeuristic
    if loc_heuristic == "spiral":
        location_heuristic = makeSpiralLocationHeuristic(
            choice_random_weighting)
    if loc_heuristic == "hilbert":
        location_heuristic = makeHilbertLocationHeuristic(
            choice_random_weighting)

    ### Visualization ###

    visualize_choice, visualize_wave, visualize_backtracking, visualize_propagate, visualize_final, visualize_after = None, None, None, None, None, None
    if visualize:
        visualize_choice, visualize_wave, visualize_backtracking, visualize_propagate, visualize_final, visualize_after = make_solver_visualizers(
            f"{filename}_{timecode}",
            wave,
            decode_patterns=decode_patterns,
            pattern_catalog=pattern_catalog,
            tile_catalog=tile_catalog,
            tile_size=[tile_size, tile_size])
    if logging:
        visualize_choice, visualize_wave, visualize_backtracking, visualize_propagate, visualize_final, visualize_after = make_solver_loggers(
            f"{filename}_{timecode}", input_stats.copy())
    if logging and visualize:
        vis = make_solver_visualizers(f"{filename}_{timecode}",
                                      wave,
                                      decode_patterns=decode_patterns,
                                      pattern_catalog=pattern_catalog,
                                      tile_catalog=tile_catalog,
                                      tile_size=[tile_size, tile_size])
        log = make_solver_loggers(f"{filename}_{timecode}", input_stats.copy())

        def visfunc(idx):
            def vf(*args, **kwargs):
                if vis[idx]:
                    vis[idx](*args, **kwargs)
                if log[idx]:
                    return log[idx](*args, **kwargs)

            return vf

        visualize_choice, visualize_wave, visualize_backtracking, visualize_propagate, visualize_final, visualize_after = [
            visfunc(x) for x in range(len(vis))
        ]

    ### Global Constraints ###
    active_global_constraint = lambda wave: True
    if global_constraint == "allpatterns":
        active_global_constraint = make_global_use_all_patterns()
    print(active_global_constraint)

    ### Search Depth Limit
    def makeSearchLengthLimit(max_limit):
        search_length_counter = 0

        def searchLengthLimit(wave):
            nonlocal search_length_counter
            search_length_counter += 1
            return search_length_counter <= max_limit

        return searchLengthLimit

    combined_constraints = [
        active_global_constraint,
        makeSearchLengthLimit(1200)
    ]

    def combinedConstraints(wave):
        print
        return all([fn(wave) for fn in combined_constraints])

    ### Solving ###

    time_solve_start = None
    time_solve_end = None

    solution_tile_grid = None
    print("solving...")
    attempts = 0
    while attempts < attempt_limit:
        attempts += 1
        end_early = False
        time_solve_start = time.time()
        stats = {}
        profiler = pprofile.Profile()
        if True:
            #with profiler:
            #with PyCallGraph(output=GraphvizOutput(output_file=f"visualization/pycallgraph_{filename}_{timecode}.png")):
            try:
                solution = run(wave.copy(),
                               adjacency_matrix,
                               locationHeuristic=location_heuristic,
                               patternHeuristic=pattern_heuristic,
                               periodic=output_periodic,
                               backtracking=backtracking,
                               onChoice=visualize_choice,
                               onBacktrack=visualize_backtracking,
                               onObserve=visualize_wave,
                               onPropagate=visualize_propagate,
                               onFinal=visualize_final,
                               checkFeasible=combinedConstraints)
                if visualize_after:
                    stats = visualize_after()
                #print(solution)
                #print(stats)
                solution_as_ids = np.vectorize(lambda x: decode_patterns[x])(
                    solution)
                solution_tile_grid = pattern_grid_to_tiles(
                    solution_as_ids, pattern_catalog)

                print("Solution:")
                #print(solution_tile_grid)
                render_tiles_to_output(
                    solution_tile_grid, tile_catalog, [tile_size, tile_size],
                    output_destination + filename + "_" + timecode + ".png")

                time_solve_end = time.time()
                stats.update({"outcome": "success"})
                succeeded = True
            except StopEarly:
                print("Skipping...")
                end_early = True
                stats.update({"outcome": "skipped"})
            except TimedOut as e_c:
                print("Timed Out")
                if visualize_after:
                    stats = visualize_after()
                stats.update({"outcome": "timed_out"})
            except Contradiction as e_c:
                print("Contradiction")
                if visualize_after:
                    stats = visualize_after()
                stats.update({"outcome": "contradiction"})
        profiler.dump_stats(f"logs/profile_{filename}_{timecode}.txt")

        outstats = {}
        outstats.update(input_stats)
        solve_duration = time.time() - time_solve_start
        try:
            solve_duration = (time_solve_end - time_solve_start)
        except TypeError:
            pass
        adjacency_duration = 0
        try:
            adjacency_duration = time_solve_start - time_adjacency
        except TypeError:
            pass
        outstats.update({
            "attempts": attempts,
            "time_start": time_begin,
            "time_adjacency": time_adjacency,
            "adjacency_duration": adjacency_duration,
            "time solve start": time_solve_start,
            "time solve end": time_solve_end,
            "solve duration": solve_duration,
            "pattern count": number_of_patterns
        })
        outstats.update(stats)
        if not log_stats_to_output is None:
            log_stats_to_output(outstats,
                                output_destination + log_filename + ".tsv")
        if not solution_tile_grid is None:
            return solution_tile_grid
        if end_early:
            return None

    return None
コード例 #21
0
#%% Script
if __name__ == '__main__':

    do_board = 'none' # from {'none', 'small', 'large', 'both'}

    # convert board to numeric representation for efficiency
    board1 = knight.char_board_to_nums(knight.BOARD1)
    board2 = knight.char_board_to_nums(knight.BOARD2)

    # create the output folder
    folder = os.path.join(get_output_dir(), 'knight')
    setup_dir(folder)

    # create profiler
    profile = pprofile.Profile()

    # Small board
    if do_board in {'small', 'both'}:
        print('\nSolve the smaller board for the minimum length solution.')
        # enable/disable profiler while running solver
        profile.enable()
        moves3 = knight.solve_min_puzzle(board1)
        profile.disable()

        # print solution
        print(moves3)
        is_valid3 = knight.check_valid_sequence(board1, moves3, print_status=True)
        if is_valid3:
            knight.print_sequence(board1, moves3)