Ejemplo n.º 1
0
    def run(self, mvWindow):
        """
        Plays the video, frame by frame, or wait for the video to be unpaused, until end of video or quitting.

        Lit la vidéo image par image ou attends une instruction de l'utilisateur.
        """
        self.timeController.reset()
        while not (self.playbackStatus.endReached or self.playbackStatus.quitting):
            if self.playbackStatus.play or self.playbackStatus.refreshNeeded:
                self.playbackStatus.refreshNeeded = False
                controlledTime = self.timeController.getControlledTime()
                util.timed(mvWindow.waitkey)(controlledTime, self.playbackStatus)

                imageSet = collections.OrderedDict()

                self.playbackStatus.endReached, frame = self.getFrame()
                if self.playbackStatus.endReached:
                    break
                
                imageSet["counting"] = frame
                imageSet["frame"] = frame
                imageSet["trackers"] = np.array(frame)

                self.processingTool.run(imageSet, self.frameIndex)

                advancementPercentage = self.cap.get(cv2.CAP_PROP_POS_FRAMES) / self.frameCount
                mvWindow.update(imageSet, advancementPercentage, self.processingTool.analyseData.segmenter)

                controlledTime = self.timeController.getControlledTime()
                mvWindow.waitkey(controlledTime, self.playbackStatus)
            else:
                duration = 0.05
                mvWindow.waitkey(duration, self.playbackStatus)
Ejemplo n.º 2
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="8-input.txt")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        program = Program.parse(f)
        vm = VM(program)
        timed(vm.run)()
        print(vm.accumulator)
Ejemplo n.º 3
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="7-input.txt")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        rules = parse_rules(f)
        print(timed(find_needed_bags)('shiny gold', rules))
Ejemplo n.º 4
0
def solve(year: int, day: int) -> None:
    click.echo(f'Year {year}, Day {day}')
    module = import_module(f'{year}.{day:02d}')
    data = read_input(year, day)

    tc1 = read_tc(year, day, 1)
    if tc1:
        test(module.solve_1, tc1)
    part_1_time, part_1_solution = timed(module.solve_1)(data)
    click.echo(f'Solution 1: {part_1_solution}, Took: {part_1_time}ms')

    tc2 = read_tc(year, day, 2)
    if tc2:
        test(module.solve_2, tc2)
    part_2_time, part_2_solution = timed(module.solve_2)(data)
    click.echo(f'Solution 2: {part_2_solution}, Took: {part_2_time}ms')
Ejemplo n.º 5
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="8-input.txt")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        program = parse_program(f)
        print(timed(find_jmp_nop_swap)(program))
Ejemplo n.º 6
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="9-input.txt")
    argparse.add_argument("--preamble-length", "-p", type=int, default=25, help="Use 5 for test input")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        code = [int(stripped) for stripped in (line.strip() for line in f) if stripped]

    print(timed(find_first_not_2sum)(code, preamble_length=args.preamble_length))
Ejemplo n.º 7
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="10-input.txt")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        adapters = {
            int(stripped)
            for stripped in (line.strip() for line in f) if stripped
        }

    arrangements = timed(count_arrangements)(0, max(adapters) + 3, adapters)
    print(arrangements)
Ejemplo n.º 8
0
def main():
    argparse = ArgumentParser()
    argparse.add_argument("file", nargs='?', type=str, default="10-input.txt")
    args = argparse.parse_args()
    with open(args.file, "r") as f:
        adapters = [
            int(stripped) for stripped in (line.strip() for line in f)
            if stripped
        ]

    distribution = timed(find_1_2_3_jolt_steps)(adapters)
    print(distribution)
    print(distribution[1] * distribution[3])
Ejemplo n.º 9
0
    def run(self, im, frameIndex):
        """
        Executer l'analyse d'une trame.

        :param: np.array im: ensemble de trames
        :param: int frameIndex: index de chaque trame
        
        """
        runArgs = self.processingToolsConfig.backgroundSubtractor[0]["runArgs"]
        sub = util.timed(self.bgSub.apply)(image=im["frame"], **runArgs)
        im["fgMask"] = sub

        # Two-frame bitwise AND
        if self.last_fgMask is None:
            self.last_fgMask = im["fgMask"]
        if self.oneBeforeLast_fgMask is None:
            self.oneBeforeLast_fgMask = self.last_fgMask

        im["bitwise_fgMask_and"] = cv2.bitwise_and(im["fgMask"],
                                                   self.last_fgMask,
                                                   self.oneBeforeLast_fgMask)

        # erodeAndDilate
        ptc = self.processingToolsConfig
        mask = util.timed(self.erodeAndDilate)(
            im,
            eadPre=ptc.erodeAndDilatePreBitwiseAnd,
            eadPost=ptc.erodeAndDilatePostBitwiseAnd,
        )
        _ = mask

        # Blob Detector
        blobKeypoints = util.timed(self.blobDetection)(
            im, nameOfImageToUse="dilateC")

        # temporalDifferentiation
        frame = im["frame"]
        if self.last_frame is None:
            self.last_frame = frame
        last = self.last_frame
        if self.processingToolsConfig.temporalDifferentiation:
            im["temporal_xor"] = cv2.bitwise_xor(frame, last)
            im["tp_diff+128"] = frame - last + 128
            im["tp_diff+128>20"] = ((im["tp_diff+128"] > 20 + 128) *
                                    64).astype(np.uint8)
            im["tp_diff+128>20:sum"] = np.sum(im["tp_diff+128>20"],
                                              axis=-1).astype(np.uint8)
            im["tp_diff+128>20:s1"] = ((im["tp_diff+128>20:sum"] > 64) *
                                       255).astype(np.uint8)
            # im["tp_diff+128>20:s2"] = ((im["tp_diff+128>20:sum"] > 128) * 255).astype(np.uint8)
            del im["tp_diff+128"]
            del im["tp_diff+128>20:sum"]
            del im["tp_diff+128>20"]

        # opticalFlow
        if self.processingToolsConfig.opticalFlow:
            im["opticalFlowH"], im["opticalFlowV"] = util.timed(
                self.opticalFlow)(im["frame"])

        # Contour
        if self.processingToolsConfig.contour:
            util.timed(self.contour)(im, np.array(im["dilateC"]))

        # Tracking
        frame = im["frame"]
        util.timed(self.mvMultiTracker.mvTracking)(im, frameIndex, frame,
                                                   blobKeypoints)

        self.analyseData.tick()

        self.last_fgMask, self.oneBeforeLast_fgMask = im[
            "fgMask"], self.last_fgMask
        self.last_frame, self.oneBeforeLast_frame = im[
            "frame"], self.last_frame