Exemple #1
0
def test_prefetch_with_cache(
    nydus_anchor: NydusAnchor,
    nydus_scratch_image: RafsImage,
    nydus_scratch_parent_image: RafsImage,
    rafs_conf: RafsConf,
):
    parent_rootfs = nydus_scratch_parent_image.rootfs()
    upper_rootfs = nydus_scratch_image.rootfs()

    rafs_conf.enable_validation()
    rafs_conf.set_rafs_backend(Backend.OSS)
    rafs_conf.enable_rafs_blobcache()
    rafs_conf.enable_fs_prefetch(threads_count=4, merging_size=512 * 1024)
    rafs_conf.dump_rafs_conf()

    dist_parent = Distributor(parent_rootfs, 6, 4)
    dist_parent.generate_tree()
    dist_parent.put_directories(20)
    dist_parent.put_multiple_files(100, Size(64, Unit.KB))
    dist_parent.put_symlinks(30)
    dist_parent.put_hardlinks(20)

    dist_upper = Distributor(upper_rootfs, 3, 8)
    dist_upper.generate_tree()
    dist_upper.put_multiple_files(27, Size(3, Unit.MB))
    dist_upper.put_symlinks(5)

    # hint_files_parent = dist_parent.put_multiple_files(1000, Size(8, Unit.KB))
    # hint_files_parent = [os.path.join(parent_rootfs, p) for p in hint_files_parent]
    # hint_files_parent = "\n".join(hint_files_parent)

    nydus_scratch_parent_image.set_backend(Backend.OSS).create_image(
        readahead_policy="fs", readahead_files="/".encode())

    hint_files = dist_upper.put_multiple_files(1000, Size(8, Unit.KB))
    hint_files.extend(dist_upper.put_multiple_empty_files(200))

    hint_files = [os.path.join("/", p) for p in hint_files]
    hint_files = "\n".join(hint_files)

    nydus_scratch_image.set_backend(Backend.OSS).create_image(
        parent_image=nydus_scratch_parent_image,
        readahead_policy="fs",
        readahead_files=hint_files.encode(),
    )

    nydus_anchor.mount_overlayfs(
        [nydus_scratch_image.rootfs(),
         nydus_scratch_parent_image.rootfs()])

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.thread_num(5).mount()

    workload_gen = WorkloadGen(nydus_anchor.mount_point,
                               nydus_anchor.overlayfs)
    workload_gen.setup_workload_generator()

    assert workload_gen.verify_entire_fs()
    workload_gen.torture_read(5, 20)
    workload_gen.finish_torture_read()
Exemple #2
0
def test_build_image(nydus_anchor, nydus_scratch_image: RafsImage,
                     rafs_conf: RafsConf):
    """
    title: Build nydus image
    description: Build nydus image from rootfs generating proper bootstrap and
    blob
    pass_criteria:
      - Image can successfully builded and mounted
      - Rafs can be unmounted and do a small account of read io and attr
        operation
      - Try let image builder upload blob itself.
    """

    dist = Distributor(nydus_scratch_image.rootfs(), 80, 1)
    dist.generate_tree()
    dist.put_directories(100)
    dist.put_hardlinks(90)
    dist.put_symlinks(200)
    dist.put_multiple_files(random.randint(20, 28), Size(10, Unit.MB))
    dist.put_multiple_chinese_files(random.randint(20, 28), Size(20, Unit.KB))

    Whiteout().whiteout_one_file(nydus_scratch_image.rootfs(),
                                 "i/am/troublemaker/foo")

    nydus_scratch_image.set_backend(Backend.BACKEND_PROXY).create_image()

    rafs_conf.set_rafs_backend(backend_type=Backend.BACKEND_PROXY)
    rafs_conf.dump_rafs_conf()

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs())

    rafs.mount()
    assert wg.verify_entire_fs()
    rafs.umount()
Exemple #3
0
def test_large_file(nydus_anchor, compressor, backend, amplified_size):
    _tmp_dir = tempfile.TemporaryDirectory(dir=nydus_anchor.workspace)
    large_file_dir = _tmp_dir.name

    dist = Distributor(large_file_dir, 3, 3)
    dist.generate_tree()
    dist.put_single_file(Size(20, Unit.MB))
    dist.put_single_file(Size(10891, Unit.KB))
    dist.put_multiple_files(10, Size(2, Unit.MB))
    dist.put_multiple_files(10, Size(4, Unit.MB))

    image = RafsImage(nydus_anchor, large_file_dir, "bs_large", "blob_large")
    image.set_backend(backend).create_image(compressor=compressor)

    rafs_conf = (RafsConf(nydus_anchor,
                          image).enable_rafs_blobcache().amplify_io(
                              amplified_size).set_rafs_backend(backend,
                                                               image=image))

    rafs = RafsMount(nydus_anchor, image, rafs_conf)
    rafs.thread_num(4).mount()

    workload_gen = WorkloadGen(nydus_anchor.mount_point, large_file_dir)

    workload_gen.setup_workload_generator()
    workload_gen.torture_read(8, 5)
    workload_gen.finish_torture_read()

    assert not workload_gen.io_error

    rafs.umount()
    image.clean_up()
Exemple #4
0
def test_specified_prefetch(
    nydus_anchor: NydusAnchor,
    rafs_conf: RafsConf,
    nydus_scratch_image: RafsImage,
    backend,
):
    """
    description:
        Nydusd can have a list including files and directories input when started.
        Then it can prefetch files from backend per as to the list.
    """

    rafs_conf.set_rafs_backend(backend)
    rafs_conf.enable_fs_prefetch(prefetch_all=True)
    rafs_conf.enable_rafs_blobcache()
    rafs_conf.dump_rafs_conf()

    dist = Distributor(nydus_scratch_image.rootfs(), 8, 2)
    dist.generate_tree()
    dirs = dist.put_directories(20)
    dist.put_multiple_files(100, Size(64, Unit.KB))
    dist.put_symlinks(30)
    dist.put_hardlinks(20)
    dist.put_multiple_files(40, Size(64, Unit.KB))
    dist.put_single_file(Size(3, Unit.MB), name="test")

    nydus_scratch_image.set_backend(backend).create_image()

    prefetching_files = dirs
    prefetching_files += dist.files[:-10]
    prefetching_files += dist.dirs[:-5]
    prefetching_files += dist.symlinks[:-10]
    # Fuzz
    prefetching_files.append("/a/b/c/d")
    prefetching_files.append(os.path.join("/", "f/g/h/"))

    specified_dirs = " ".join(
        [os.path.join("/", d) for d in prefetching_files])

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.prefetch_files(specified_dirs).mount()
    wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs())

    nc = NydusAPIClient(rafs.get_apisock())
    wg.setup_workload_generator()
    blobcache_metrics = nc.get_blobcache_metrics()
    wg.torture_read(5, 10)

    while blobcache_metrics["prefetch_workers"] != 0:
        time.sleep(0.5)
        blobcache_metrics = nc.get_blobcache_metrics()

    begin = nc.get_backend_metrics()["read_amount_total"]
    time.sleep(1)
    end = nc.get_backend_metrics()["read_amount_total"]

    assert end == begin
    wg.finish_torture_read()
Exemple #5
0
def test_prefetch_with_cache(
    nydus_anchor,
    nydus_scratch_image: RafsImage,
    rafs_conf: RafsConf,
    thread_cnt,
    compressor,
    is_cache_compressed,
):
    """
    title: Prefetch from various backend
    description:
      - Enable rafs backend blob cache, as it is disabled by default
    pass_criteria:
      - Rafs can be mounted.
      - Rafs can be unmounted.
    """
    rafs_conf.enable_rafs_blobcache(is_compressed=is_cache_compressed)
    rafs_conf.set_rafs_backend(Backend.BACKEND_PROXY, prefix="object_prefix/")
    rafs_conf.enable_fs_prefetch(threads_count=4,
                                 bandwidth_rate=Size(40, Unit.MB).B)
    rafs_conf.dump_rafs_conf()

    dist = Distributor(nydus_scratch_image.rootfs(), 4, 4)
    dist.generate_tree()
    dist.put_directories(20)
    dist.put_multiple_files(40, Size(3, Unit.MB))
    dist.put_hardlinks(6)
    dist.put_multiple_chinese_files(random.randint(20, 28), Size(20, Unit.KB))

    nydus_scratch_image.set_backend(Backend.BACKEND_PROXY,
                                    prefix="object_prefix/").create_image(
                                        compressor=compressor,
                                        readahead_policy="fs",
                                        readahead_files="/".encode(),
                                    )

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.thread_num(4).mount()

    nc = NydusAPIClient(rafs.get_apisock())
    workload_gen = WorkloadGen(nydus_anchor.mount_point,
                               nydus_scratch_image.rootfs())
    m = nc.get_blobcache_metrics()
    time.sleep(0.3)
    assert m["prefetch_data_amount"] != 0

    workload_gen.setup_workload_generator()
    workload_gen.torture_read(thread_cnt, 10)

    assert NydusAnchor.check_nydusd_health()

    workload_gen.finish_torture_read()
    assert not workload_gen.io_error

    # In this way, we can check if nydusd is crashed.
    assert rafs.is_mounted()
    rafs.umount()
Exemple #6
0
def test_file_tail(nydus_anchor: NydusAnchor, nydus_scratch_image: RafsImage,
                   backend):
    """
    description: Read data from file tail
        - Create several files of different sizes
        - Punch hole to each file of which some should have hole tail
        - Create rafs image from test scratch directory.
        - Mount rafs
        - Do some test.
    """
    file_size_list = [
        Size(1, Unit.KB),
        Size(6, Unit.KB),
        Size(2, Unit.MB),
        Size(10034, Unit.KB),
    ]
    file_list = []

    dist = Distributor(nydus_anchor.scratch_dir, 2, 2)
    dist.generate_tree()

    for f_s in file_size_list:
        f_name = dist.put_single_file(f_s)
        file_list.append(f_name)
        # Punch hole
        with utils.pushd(nydus_anchor.scratch_dir):
            with open(f_name, "a+b") as f:
                fallocate(
                    f,
                    f_s.B - 500,
                    1000,
                    mode=FALLOC_FL_PUNCH_HOLE | FALLOC_FL_KEEP_SIZE,
                )

    nydus_scratch_image.set_backend(backend).create_image()

    rafs_conf = RafsConf(nydus_anchor, nydus_scratch_image)
    rafs_conf.set_rafs_backend(backend, image=nydus_scratch_image)

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.mount()

    with utils.pushd(nydus_anchor.mount_point):
        for name in file_list:
            with open(name, "rb") as f:
                size = os.stat(name).st_size
                f.seek(size - 300)
                buf = f.read(1000)
                assert len(buf) == 300

    wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs())
    for f in file_list:
        wg.verify_single_file(os.path.join(nydus_anchor.mount_point, f))

    assert wg.io_error == False
    def __init__(self, **kwargs):
        self.__NGCNo = kwargs['NGCNo']
        self.__RA_2000 = kwargs['RA_2000']
        self.__DEC_2000 = kwargs['DEC_2000']
        self.__Const = kwargs['Const']
        self.__ObjectType = kwargs['ObjectType']
        self.__Size = kwargs['Size']
        self.__Bmag = kwargs['Bmag']
        self.__Vmag = kwargs['Vmag']
        self.__AlsoCatalogedAs = kwargs['AlsoCatalogedAs']
        self.__PA = kwargs['PA']

        self.ra = EquatorialCoordinate(self.__RA_2000)
        self.dec = EquatorialCoordinate(self.__DEC_2000)

        # parse the size
        size_match = ngc_size_re.match(self.__Size)
        if size_match:
            major = size_match.groups()[0]
            minor = size_match.groups()[2]
            self.size = Size(major=float(major),
                             minor=(float(minor) if minor is not None else 0))
        else:
            self.size = Size(major=0, minor=0)

        # Some objects don't have a Vmag... I'm not sure why.
        try:
            self.magnitude = float(self.__Vmag)
        except ValueError as e:
            try:
                self.magnitude = float(self.__Bmag)
            except Exception as e:
                self.magnitude = 20

        try:
            self.angle = float(self.__PA)
        except ValueError:
            self.angle = None

        self.identifier = self.__NGCNo
        self.catalog = 'NGC'

        # Lookup the NGC type and map to one of our types
        self.type = ngc_object_types[self.__ObjectType]

        # The HCNGC Catalog gives us some aliases. Add them.
        # This is imperfect because the catalog doesn't use a consistent
        # seperator between catalog and identifier.
        aliases = [
            re.split('[ -]', a.strip(), 1)
            for a in self.__AlsoCatalogedAs.split(',')
        ]
        for pair in aliases:
            self.add_alias(*reversed(pair))
Exemple #8
0
    def __call__(self, data, label, gt):
        #-----------------------------------------------------------------------
        # Check whether to sample or not
        #-----------------------------------------------------------------------
        if not self.sample:
            return data, label, gt

        #-----------------------------------------------------------------------
        # Retry sampling a couple of times
        #-----------------------------------------------------------------------
        source_boxes = anchors2array(gt.boxes, gt.imgsize)
        box = None
        box_arr = None
        for _ in range(self.max_trials):
            #-------------------------------------------------------------------
            # Sample a bounding box
            #-------------------------------------------------------------------
            scale = random.uniform(self.min_scale, self.max_scale)
            aspect_ratio = random.uniform(self.min_aspect_ratio,
                                          self.max_aspect_ratio)

            # make sure width and height will not be larger than 1
            aspect_ratio = max(aspect_ratio, scale**2)
            aspect_ratio = min(aspect_ratio, 1 / (scale**2))

            width = scale * sqrt(aspect_ratio)
            height = scale / sqrt(aspect_ratio)
            cx = 0.5 * width + random.uniform(0, 1 - width)
            cy = 0.5 * height + random.uniform(0, 1 - height)
            center = Point(cx, cy)
            size = Size(width, height)

            #-------------------------------------------------------------------
            # Check if the box satisfies the jaccard overlap constraint
            #-------------------------------------------------------------------
            box_arr = np.array(prop2abs(center, size, gt.imgsize))
            overlap = compute_overlap(box_arr, source_boxes, 0)
            if overlap.best and overlap.best.score >= self.min_jaccard_overlap:
                box = Box(None, None, center, size)
                break

        if box is None:
            return None

        #-----------------------------------------------------------------------
        # Crop the box and adjust the ground truth
        #-----------------------------------------------------------------------
        new_size = Size(box_arr[1] - box_arr[0], box_arr[3] - box_arr[2])
        w_off = -box_arr[0]
        h_off = -box_arr[2]
        data = data[box_arr[2]:box_arr[3], box_arr[0]:box_arr[1]]
        gt = transform_gt(gt, new_size, h_off, w_off)

        return data, label, gt
Exemple #9
0
def test_blob_prefetch(nydus_anchor: NydusAnchor,
                       nydus_scratch_image: RafsImage, readahead_policy):
    """
    description:
        For rafs, there are two types of prefetching.
        1. Prefetch files from fs-layer, which means each file is prefetched one by one.
        2. Prefetch directly from backend/blob layer, which means a range will be fetched from blob
    """
    # Try to delete any access log since if it present, bootstrap blob prefetch won't work.
    utils.execute("rm -rf *.access", shell=True)

    dist = Distributor(nydus_scratch_image.rootfs(), 8, 2)
    dist.generate_tree()
    dist.put_directories(20)
    dist.put_multiple_files(100, Size(64, Unit.KB))
    dist.put_symlinks(30)
    dist.put_hardlinks(20)
    dist.put_multiple_files(40, Size(64, Unit.KB))

    utils.clean_pagecache()

    hint_files = dist.files[-40:]
    hint_files.extend(dist.symlinks[-20:])

    hint_files = [os.path.join("/", p) for p in hint_files]
    hint_files = "\n".join(hint_files)

    nydus_scratch_image.set_backend(Backend.LOCALFS).create_image(
        readahead_policy=readahead_policy,
        readahead_files=hint_files.encode(),
    )

    rafs_conf = RafsConf(nydus_anchor, nydus_scratch_image)
    rafs_conf.set_rafs_backend(Backend.LOCALFS, image=nydus_scratch_image)
    rafs_conf.enable_records_readahead(interval=1)
    rafs_conf.dump_rafs_conf()

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    with utils.timer("Mount elapse"):
        rafs.thread_num(7).mount()
    assert rafs.is_mounted()

    wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs())

    # TODO: Run several parallel read workers against the mount_point
    wg.setup_workload_generator()
    wg.torture_read(5, 5)
    wg.finish_torture_read()

    utils.clean_pagecache()
Exemple #10
0
def test_blobcache_recovery(
    nydus_anchor: NydusAnchor,
    rafs_conf: RafsConf,
    nydus_scratch_image: RafsImage,
):
    rafs_conf.set_rafs_backend(Backend.BACKEND_PROXY)
    rafs_conf.enable_fs_prefetch()
    rafs_conf.enable_rafs_blobcache()
    rafs_conf.dump_rafs_conf()

    dist = Distributor(nydus_scratch_image.rootfs(), 8, 2)
    dist.generate_tree()
    dirs = dist.put_directories(20)
    dist.put_multiple_files(100, Size(64, Unit.KB))
    dist.put_symlinks(30)
    dist.put_hardlinks(20)
    dist.put_multiple_files(40, Size(64, Unit.KB))
    dist.put_single_file(Size(3, Unit.MB), name="test")

    nydus_scratch_image.set_backend(Backend.BACKEND_PROXY).create_image()

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.prefetch_files("/").mount()
    wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs())

    wg.setup_workload_generator()
    wg.torture_read(4, 4)

    # Hopefully, prefetch can be done in 5 secondes.
    time.sleep(5)

    wg.finish_torture_read()
    rafs.umount()

    rafs2 = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs2.mount()

    wg.torture_read(4, 4)
    time.sleep(0.5)

    nc = NydusAPIClient(rafs2.get_apisock())

    begin = nc.get_backend_metrics()["read_amount_total"]
    time.sleep(1)
    end = nc.get_backend_metrics()["read_amount_total"]

    assert end == begin == 0

    wg.finish_torture_read()
Exemple #11
0
def test_prefetch_without_cache(nydus_anchor: NydusAnchor,
                                nydus_scratch_image: RafsImage,
                                rafs_conf: RafsConf):
    """Files prefetch test

    1. relative hinted prefetch files
    2. absolute hinted prefetch files
    3. source rootfs root dir.
    """

    rafs_conf.enable_fs_prefetch().set_rafs_backend(Backend.BACKEND_PROXY)
    rafs_conf.dump_rafs_conf()

    dist = Distributor(nydus_scratch_image.rootfs(), 4, 4)
    dist.generate_tree()
    dist.put_directories(20)
    dist.put_multiple_files(40, Size(8, Unit.KB))
    dist.put_hardlinks(6)
    dist.put_multiple_chinese_files(random.randint(20, 28), Size(20, Unit.KB))

    hint_files = ["/"]
    hint_files.extend(dist.files)
    hint_files.extend(dist.dirs)
    hint_files.extend(dist.symlinks)
    hint_files.extend(dist.hardlinks)

    hint_files = [os.path.join("/", p) for p in hint_files]
    hint_files = "\n".join(hint_files)

    nydus_scratch_image.set_backend(Backend.BACKEND_PROXY).create_image(
        readahead_policy="fs", readahead_files=hint_files.encode())

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.mount()
    assert rafs.is_mounted()

    workload_gen = WorkloadGen(nydus_anchor.mount_point,
                               nydus_scratch_image.rootfs())

    # TODO: Run several parallel read workers against the mount_point
    workload_gen.setup_workload_generator()
    workload_gen.torture_read(8, 5)
    workload_gen.finish_torture_read()

    assert NydusAnchor.check_nydusd_health()
    assert not workload_gen.io_error

    assert rafs.is_mounted()
    rafs.umount()
Exemple #12
0
def test_syscalls(
    nydus_anchor: NydusAnchor,
    nydus_scratch_image: RafsImage,
    rafs_conf: RafsConf,
):

    syscall_helper = "framework/test_syscalls"
    ret, _ = utils.execute(
        ["gcc", "framework/test_syscalls.c", "-o", syscall_helper],
        shell=False,
        print_output=True,
    )
    assert ret

    dist = Distributor(nydus_scratch_image.rootfs(), 2, 2)
    dist.generate_tree()
    dist.put_single_file(Size(8, Unit.KB),
                         pos=nydus_scratch_image.rootfs(),
                         name="xattr_no_kv")
    dist.put_single_file_with_xattr(
        Size(8, Unit.KB),
        ("trusted.nydus.key", ""),
        pos=nydus_scratch_image.rootfs(),
        name="xattr_empty_value",
    )
    dist.put_single_file_with_xattr(
        Size(8, Unit.KB),
        ("trusted.nydus.key", "1234567890"),
        pos=nydus_scratch_image.rootfs(),
        name="xattr_insufficient_buffer",
    )

    nydus_scratch_image.set_backend(Backend.BACKEND_PROXY).create_image()

    rafs_conf.enable_xattr().set_rafs_backend(Backend.BACKEND_PROXY)
    rafs_conf.dump_rafs_conf()

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.mount()

    for no in [58]:
        ret, _ = utils.execute(
            [syscall_helper, nydus_anchor.mount_point,
             str(no)],
            shell=False,
            print_output=True,
        )
        assert ret
Exemple #13
0
 def add_detections(self, filename, boxes):
     fileid = os.path.basename(filename)
     fileid = ''.join(fileid.split('.')[:-1])
     img = cv2.imread(filename)
     img_size = Size(img.shape[1], img.shape[0])
     for conf, box in boxes:
         xmin, xmax, ymin, ymax = prop2abs(box.center, box.size, img_size)
         if xmin < 0:
             xmin = 0
         if xmin >= img_size.w:
             xmin = img_size.w - 1
         if xmax < 0:
             xmax = 0
         if xmax >= img_size.w:
             xmax = img_size.w - 1
         if ymin < 0:
             ymin = 0
         if ymin >= img_size.h:
             ymin = img_size.h - 1
         if ymax < 0:
             ymax = 0
         if ymax >= img_size.h:
             ymax = img_size.h - 1
         det = Detection(fileid, conf, float(xmin + 1), float(ymin + 1),
                         float(xmax + 1), float(ymax + 1))
         self.boxes[box.label].append(det)
Exemple #14
0
    def scratch(self, scratch_dir):
        """Put various kinds of xattr value into.
        1. Very long value
        2. a common short value
        3. Nothing resides in value field
        4. Single file, multiple pairs.
        5. /n
        6. whitespace
        7. 中文
        8. Binary
        9. Only key?
        """
        self.dist.put_symlinks(100)
        files_cnt = 20
        self.dist.put_multiple_files(files_cnt, Size(9, Unit.KB))
        self.scratch_dir = os.path.abspath(scratch_dir)
        self.source_files = {}
        self.source_xattrs = {}
        self.source_dirs = {}
        self.source_dirs_xattrs = {}
        self.encoding = "gb2312"

        self.xattr_pairs = 50 if utils.get_fs_type(
            os.getcwd()) == "xfs" else 20

        # TODO: Only key without values?
        with pushd(self.scratch_dir):
            for f in self.dist.files[-files_cnt:]:
                relative_path = os.path.relpath(f, start=self.scratch_dir)
                self.source_xattrs[relative_path] = {}
                for idx in range(0, self.xattr_pairs):
                    # TODO: Random this Key
                    k = f"trusted.nydus.{Distributor.generate_random_name(20, chinese=True)}"
                    v = f"_{Distributor.generate_random_length_name(20, chinese=True)}"
                    xattr.setxattr(f, k.encode(self.encoding),
                                   v.encode(self.encoding))
                    # Use relative or canonicalized names as key to locate
                    # path in source rootfs directory. So we verify if image is
                    # packed correctly.
                    self.source_files[relative_path] = os.path.abspath(f)
                    self.source_xattrs[relative_path][k] = v

        dir_cnt = 20
        self.dist.put_directories(dir_cnt)

        # Add xattr key-value paris to directories.
        with pushd(self.scratch_dir):
            for d in self.dist.dirs[-dir_cnt:]:
                relative_path = os.path.relpath(d, start=self.scratch_dir)
                self.source_dirs_xattrs[relative_path] = {}
                for idx in range(0, self.xattr_pairs):
                    # TODO: Random this Key
                    k = f"trusted.{Distributor.generate_random_name(20)}"
                    v = f"{Distributor.generate_random_length_name(50)}"
                    xattr.setxattr(d, k, v.encode())
                    # Use relative or canonicalized names as key to locate
                    # path in source rootfs directory. So we verify if image is
                    # packed correctly.
                    self.source_dirs[relative_path] = os.path.abspath(d)
                    self.source_dirs_xattrs[relative_path][k] = v
Exemple #15
0
def test_limited_mem(nydus_anchor, rafs_conf, nydus_image):
    """
    description: Run nydusd in a memory limited environment.
        - Use `ulimit` to limit virtual memory nydusd can use.
        - Mount rafs
        - Torture rafs
    """

    rafs_conf.enable_rafs_blobcache()
    rafs_conf.dump_rafs_conf()

    rafs = RafsMount(nydus_anchor, nydus_image, rafs_conf)
    rafs.mount(limited_mem=Size(3, Unit.GB))

    wg = WorkloadGen(nydus_anchor.mount_point, nydus_image.rootfs())

    wg.setup_workload_generator()
    wg.torture_read(8, 10)

    nydus_anchor.start_stats_checker()
    wg.finish_torture_read()
    nydus_anchor.stop_stats_checker()

    assert wg.io_error == False
    assert nydus_anchor.check_nydusd_health()
Exemple #16
0
def put_files(dist: Distributor, f_type, count, size):
    """Example:
    depth: 4
    width: 4
    layers:
    - layer1:
        - size: 10KB
            type: regular
            count: 2000
        - size: 12MB
            type: regular
            count: 10
        - size: 90MB
            type: regular
            count: 1
        - type: symlink
            count: 100

    """

    logging.info("putting %s, count %d", f_type, count)
    if f_type == "regular":
        size_in_bytes = utils.parse_size(size)
        dist.put_multiple_files(count, Size(size_in_bytes))
    elif f_type == "dir":
        dist.put_directories(count)
    elif f_type == "symlink":
        dist.put_symlinks(count)
    elif f_type == "hardlink":
        dist.put_hardlinks(count)
Exemple #17
0
 def initialize(self):
     self.anchors = get_anchors_for_preset(self.preset)
     self.vheight = len(self.anchors)
     self.vwidth = self.num_classes + 5  # background class + location offsets
     self.img_size = Size(1000, 1000)
     self.anchors_arr = anchors2array(self.anchors, self.img_size)
     self.initialized = True
 def __build_sample_list(self, root, annot_files, dataset_name):
     image_root = root + '/JPEGImages/'
     samples = []
     for fn in tqdm(annot_files, desc=dataset_name, unit='samples'):
         with open(fn, 'r') as f:
             doc = lxml.etree.parse(f)
             filename = image_root + doc.xpath(
                 '/annotation/filename')[0].text
             if not os.path.exists(filename):
                 continue
             img = cv2.imread(filename)
             imgsize = Size(img.shape[1], img.shape[0])
             boxes = []
             objects = doc.xpath('/annotation/object')
             for obj in objects:
                 label = obj.xpath('name')[0].text
                 if label == myObject:
                     xmin = int(float(obj.xpath('bndbox/xmin')[0].text))
                     xmax = int(float(obj.xpath('bndbox/xmax')[0].text))
                     ymin = int(float(obj.xpath('bndbox/ymin')[0].text))
                     ymax = int(float(obj.xpath('bndbox/ymax')[0].text))
                     center, size = abs2prop(xmin, xmax, ymin, ymax,
                                             imgsize)
                     box = Box(label, self.lname2id[label], center, size)
                     boxes.append(box)
             if not boxes:
                 continue
             sample = Sample(filename, boxes, imgsize)
             samples.append(sample)
     return samples
    def __init__(self, **kwargs):
        self.__StarID = kwargs['StarID']
        self.__HIP = kwargs['HIP']
        self.__HD = kwargs['HD']
        self.__HR = kwargs['HR']
        self.__BayerFlamsteed = kwargs['BayerFlamsteed']
        self.__ProperName = kwargs['ProperName']
        self.__RA = kwargs['RA']
        self.__Dec = kwargs['Dec']
        self.__Mag = kwargs['Mag']
        self.__AbsMag = kwargs['AbsMag']
        self.__Spectrum = kwargs['Spectrum']
        self.__ColorIndex = kwargs['ColorIndex']

        # The HYG catalog contains HIP, HD, and HR identifiers the
        # `catalog` property will corrospond to the one we prefer for
        # the `identifier`.
        self.identifier = self.__HIP
        self.catalog = 'HIP'

        self.type = 0
        self.size = Size(-1, -1)

        # NOTE: HYG RA is in degrees.
        self.ra = EquatorialCoordinate(self.__RA, hours=True)
        self.dec = EquatorialCoordinate(self.__Dec, degrees=True)
        self.magnitude = float(self.__Mag)

        # HYG gives us a lot of aliases. Add them.
        self.add_alias(self.__HD, 'HD')
        self.add_alias(self.__HR, 'HR')
        self.add_alias(self.__ProperName)
    def __call__(self, data, label, gt):
        #-----------------------------------------------------------------------
        # Calculate sizes and offsets
        #-----------------------------------------------------------------------
        ratio = random.uniform(1, self.max_ratio)  # 2

        orig_size = gt.imgsize  # 400,300
        new_size = Size(int(orig_size.w * ratio),
                        int(orig_size.h * ratio))  # 800, 600
        h_off = random.randint(0, new_size.h -
                               orig_size.h)  # 0 ~ (800-400) => 100
        w_off = random.randint(0,
                               new_size.w - orig_size.w)  # 0 ~ (600-300) => 50

        #-----------------------------------------------------------------------
        # Create the new image and place the input image in it
        #-----------------------------------------------------------------------
        imsi = Image.new('RGB', (new_size.w, new_size.h), self.mean_value)
        imsi.paste(data,
                   (w_off, h_off, w_off + orig_size.w, h_off + orig_size.h))

        #-----------------------------------------------------------------------
        # Transform the ground truth
        #-----------------------------------------------------------------------
        gt = transform_gt(gt, new_size, h_off, w_off)

        return imsi, label, gt
Exemple #21
0
    def mount(self, limited_mem=False, wait_mount=True, dump_config=True):
        """
        :limited_mem: Unit is KB, limit nydusd process virtual memory usage thus to
                     inject some faults.
        """
        cmd = str(self).split()
        self.anchor.checker_sock = self.get_apisock()

        if dump_config:
            self.conf.dump_rafs_conf()

        if isinstance(limited_mem, Size):
            limit_kb = limited_mem.B // Size(1, Unit.KB).B
            cmd = f"ulimit -v {limit_kb};" + cmd

        _, p = utils.run(
            cmd,
            False,
            shell=False,
            stdout=self.anchor.logging_file,
            stderr=self.anchor.logging_file,
        )
        self.p = p

        if wait_mount:
            self._wait_for_mount()

        return self
Exemple #22
0
    def calc_file_md5(path):
        md5 = hashlib.md5()
        with open(path, "rb") as f:
            for block in iter(lambda: f.read(Size(128, Unit.KB).B), b""):
                md5.update(block)

        return md5.digest()
Exemple #23
0
def test_different_partitions(nydus_anchor: NydusAnchor, rafs_conf):
    loop_file_1 = tempfile.NamedTemporaryFile(suffix="loop")
    loop_file_2 = tempfile.NamedTemporaryFile(suffix="loop")
    loop_mnt_1 = tempfile.TemporaryDirectory(dir=nydus_anchor.workspace)
    loop_mnt_2 = tempfile.TemporaryDirectory(dir=nydus_anchor.workspace)

    os.posix_fallocate(loop_file_1.fileno(), 0, Size(400, Unit.MB).B)
    os.posix_fallocate(loop_file_2.fileno(), 0, Size(400, Unit.MB).B)

    utils.execute(["mkfs.ext4", "-F", loop_file_1.name])
    utils.execute(["mkfs.ext4", "-F", loop_file_2.name])
    utils.execute(["mount", loop_file_1.name, loop_mnt_1.name])
    utils.execute(["mount", loop_file_2.name, loop_mnt_2.name])

    # TODO: Put more special files into
    dist1 = Distributor(loop_mnt_1.name, 5, 7)
    dist1.generate_tree()
    dist1.put_multiple_files(100, Size(12, Unit.KB))

    dist2 = Distributor(loop_mnt_2.name, 5, 7)
    dist2.generate_tree()
    dist2.put_symlinks(20)
    dist2.put_multiple_files(50, Size(12, Unit.KB))

    Whiteout.mirror_files(dist2.files[:20], loop_mnt_2.name, loop_mnt_1.name)

    parent_image = (RafsImage(nydus_anchor, loop_mnt_1.name).set_backend(
        Backend.OSS).create_image())

    image = RafsImage(nydus_anchor, loop_mnt_2.name)
    image.set_backend(Backend.OSS).create_image(parent_image=parent_image)

    rafs_conf.set_rafs_backend(Backend.OSS)
    rafs = RafsMount(nydus_anchor, image, rafs_conf)
    rafs.mount()

    nydus_anchor.mount_overlayfs([image.rootfs(), parent_image.rootfs()])

    wg = WorkloadGen(nydus_anchor.mount_point, nydus_anchor.overlayfs)
    wg.setup_workload_generator()
    wg.torture_read(5, 5)
    wg.finish_torture_read()

    utils.execute(["umount", loop_mnt_1.name])
    utils.execute(["umount", loop_mnt_2.name])

    nydus_anchor.umount_overlayfs()
Exemple #24
0
def decode_location(box, anchor):
    box[box > 100] = 100  # only happens early training

    x = box[0] / 10 * anchor.size.w + anchor.center.x
    y = box[1] / 10 * anchor.size.h + anchor.center.y
    w = exp(box[2] / 5) * anchor.size.w
    h = exp(box[3] / 5) * anchor.size.h
    return Point(x, y), Size(w, h)
Exemple #25
0
def test_layered_rebuild(
    nydus_anchor,
    nydus_scratch_image: RafsImage,
    nydus_scratch_parent_image: RafsImage,
    rafs_conf: RafsConf,
    backend,
):
    """
    title: Layered image rebuild
        description:
            - Parent and upper have files whose contents are exactly the same.
            - Use files stats to check if file is overlayed.
            - Files with the same name but different modes.
            - Files with xattr in parent should be shadowed.
        pass_criteria:
          - Mount successfully.
          - No data corruption.
    """
    rafs_conf.set_rafs_backend(backend)
    rafs_conf.enable_rafs_blobcache()
    rafs_conf.dump_rafs_conf()

    parent_rootfs = nydus_scratch_parent_image.rootfs()
    upper_rootfs = nydus_scratch_image.rootfs()

    nydus_anchor.mount_overlayfs(
        [nydus_scratch_image.rootfs(),
         nydus_scratch_parent_image.rootfs()])

    shared_files = []

    dist_parent = Distributor(parent_rootfs, 6, 4)
    dist_parent.generate_tree()
    shared_files.extend(dist_parent.put_multiple_files(100, Size(64, Unit.KB)))
    shared_files.extend(dist_parent.put_symlinks(30))
    shared_files.extend(dist_parent.put_hardlinks(30))
    xattr_verifier = XattrVerifier(parent_rootfs, dist_parent)
    Whiteout.mirror_files(shared_files, parent_rootfs, upper_rootfs)

    xattr_verifier.scratch(parent_rootfs)

    nydus_scratch_parent_image.set_backend(backend).create_image()
    nydus_scratch_image.set_backend(backend).create_image(
        parent_image=nydus_scratch_parent_image)

    rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf)
    rafs.mount()

    workload_gen = WorkloadGen(nydus_anchor.mount_point,
                               nydus_anchor.overlayfs)
    workload_gen.setup_workload_generator()

    xattr_verifier.verify(nydus_anchor.mount_point)

    assert workload_gen.verify_entire_fs()
    workload_gen.torture_read(5, 4)

    workload_gen.finish_torture_read()
Exemple #26
0
    def __init__(self):
        self.bg = pygame.image.load('game_1/game_1.jpg')
        self.body_frame = None
        self.body_index_frame = None
        self.solved = True
        self.time_angles = None
        self.threshold = 5  # in degrees

        self.player_size = Size(SCREEN_SIZE.W / 3, SCREEN_SIZE.H / 3)

        KINECT_EVENT_STREAM.subscribe(self.event_handler)
Exemple #27
0
    def _put_single_file(
        self,
        parent_dir,
        file_size: Size,
        specified_name=None,
        letters=False,
        chinese=False,
        name_len=32,
    ):
        if specified_name is None:
            name = Distributor.generate_random_length_name(
                name_len, suffix="regular", chinese=chinese
            )
        else:
            name = specified_name

        this_path = os.path.join(parent_dir, name)

        with pushd(parent_dir):
            if chinese:
                fd = os.open(name.encode("gb2312"), os.O_CREAT | os.O_RDWR)
            else:
                fd = os.open(name.encode("ascii"), os.O_CREAT | os.O_RDWR)

            if file_size.B != 0:
                left = file_size.B
                logging.debug("Putting file %s", this_path)
                while left:
                    length = Size(1, Unit.MB).B if Size(1, Unit.MB).B < left else left
                    if not letters:
                        left -= os.write(fd, os.urandom(length))
                    else:
                        picked_list = "".join(
                            random.choices(string.ascii_lowercase[1:4], k=length)
                        )
                        left -= os.write(fd, picked_list.encode())

            os.close(fd)

        self.files.append(self._relative_path_to_top(this_path))
        return name, this_path
    def __build_sample_list(self, root, annot_files):
        """
        Build a list of samples for the VOC dataset (either trainval or test)
        """
        image_root = os.path.join(root, 'rgb-images')
        samples = []
        #-----------------------------------------------------------------------
        # Process each annotated sample
        #-----------------------------------------------------------------------
        for fn in tqdm(annot_files, desc='ucf_24_frame', unit='samples'):
            act = fn.split('/')[4]
            video = fn.split('/')[5]
            frame_id = fn.split('/')[-1][:-4]
            image_path = os.path.join(image_root, act, video,
                                      '{}.jpg'.format(frame_id))

            #---------------------------------------------------------------
            # Get the file dimensions
            #---------------------------------------------------------------
            if not os.path.exists(image_path):
                continue

            img = cv2.imread(image_path)
            imgsize = Size(img.shape[1], img.shape[0])

            #---------------------------------------------------------------
            # Get boxes for all the objects
            #---------------------------------------------------------------
            boxes = []
            with open(fn, 'r') as fin:
                objects = fin.readlines()
            for line in objects:
                line = line[:-1]
                #-----------------------------------------------------------
                # Get the properties of the box and convert them to the
                # proportional terms
                #-----------------------------------------------------------
                obj = line.split(' ')
                label = int(obj[0]) - 1
                xmin = int(float(obj[1]))
                ymin = int(float(obj[2]))
                xmax = int(float(obj[3]))
                ymax = int(float(obj[4]))

                center, size = abs2prop(xmin, xmax, ymin, ymax, imgsize)
                box = Box(self.lid2name[label], label, center, size)
                boxes.append(box)
            if not boxes:
                continue
            sample = Sample(image_path, boxes, imgsize)
            samples.append(sample)

        return samples
    def _build_sample_list(self, root, annot_files):
        image_root = root + 'VOC_Jpeg/'
        image_seg_root = root + 'VOC_Segmentation/'
        samples = []

        for fn in tqdm(annot_files, unit='samples'):
            with open(fn, 'r') as f:
                doc = lxml.etree.parse(f)
                filename = image_root + doc.xpath(
                    '/annotation/filename')[0].text
                with open(fn, 'r') as f1:
                    doc1 = lxml.etree.parse(f1)
                    seg_gt = image_seg_root + doc1.xpath(
                        '/annotation/filename')[0].text
                    seg_gt = seg_gt.replace('jpg', 'png')
                    seg_gt_to_compare = seg_gt

                #---------------------------------------------------------------
                # Get the file dimensions
                #---------------------------------------------------------------
                if not os.path.exists(filename):
                    continue

                img = cv2.imread(filename)
                img_seg_gt = cv2.imread(seg_gt)
                imgsize = Size(img.shape[1], img.shape[0])

                #---------------------------------------------------------------
                # Get boxes for all the objects
                #---------------------------------------------------------------
                boxes = []
                objects = doc.xpath('/annotation/object')
                for obj in objects:
                    #-----------------------------------------------------------
                    # Get the properties of the box and convert them to the
                    # proportional terms
                    #-----------------------------------------------------------
                    label = obj.xpath('name')[0].text
                    xmin = int(float(obj.xpath('bndbox/xmin')[0].text))
                    xmax = int(float(obj.xpath('bndbox/xmax')[0].text))
                    ymin = int(float(obj.xpath('bndbox/ymin')[0].text))
                    ymax = int(float(obj.xpath('bndbox/ymax')[0].text))
                    center, size = abs2prop(xmin, xmax, ymin, ymax, imgsize)
                    box = Box(label, self.lname2id[label], center, size)
                    boxes.append(box)
                if not boxes:
                    continue
                sample = Sample(filename, boxes, imgsize, seg_gt,
                                seg_gt_to_compare)
                samples.append(sample)

        return samples
    def __build_sample_list(self, root, dataset_name):
        """
        Build a list of samples for the VOC dataset (either trainval or test)
        """
        image_root = root + '/JPEGImages/'
        annot_root = root + '/Annotations/'
        annot_files = glob(annot_root + '/*xml')
        samples = []

        #-----------------------------------------------------------------------
        # Process each annotated sample
        #-----------------------------------------------------------------------
        for fn in tqdm(annot_files, desc=dataset_name, unit='samples'):
            with open(fn, 'r') as f:
                doc = lxml.etree.parse(f)
                filename = image_root + doc.xpath(
                    '/annotation/filename')[0].text

                #---------------------------------------------------------------
                # Get the file dimensions
                #---------------------------------------------------------------
                if not os.path.exists(filename):
                    continue

                img = cv2.imread(filename)
                imgsize = Size(img.shape[1], img.shape[0])

                #---------------------------------------------------------------
                # Get boxes for all the objects
                #---------------------------------------------------------------
                boxes = []
                objects = doc.xpath('/annotation/object')
                for obj in objects:
                    #-----------------------------------------------------------
                    # Get the properties of the box and convert them to the
                    # proportional terms
                    #-----------------------------------------------------------
                    label = obj.xpath('name')[0].text
                    xmin = int(float(obj.xpath('bndbox/xmin')[0].text))
                    xmax = int(float(obj.xpath('bndbox/xmax')[0].text))
                    ymin = int(float(obj.xpath('bndbox/ymin')[0].text))
                    ymax = int(float(obj.xpath('bndbox/ymax')[0].text))
                    center, size = abs2prop(xmin, xmax, ymin, ymax, imgsize)
                    box = Box(label, self.lname2id[label], center, size)
                    boxes.append(box)
                if not boxes:
                    continue
                sample = Sample(filename, boxes, imgsize)
                samples.append(sample)

        return samples