Пример #1
0
    def update_image_2(self, container_name, image_name):
        """
        update a container's image,
        :param container_name: `class`:`str`, container name
        :param image_name: `class`:`str`, the full image name, like alpine:3.3
        :return: `class`:`bool`, True if success, otherwise False.
        """
        code, container = self.get_container(container_name)
        if code == httplib.NOT_FOUND:
            raise ContainerNotFound(container_name)
        elif code != httplib.OK:
            raise GeneralError(code)

        _, old_image_name, _ = utils.parse_image_name(container.image)
        repository, name, version = utils.parse_image_name(image_name)
        if not repository or repository.lower() != DOCKER_NEG:
            image_name = "{0}/{1}:{2}".format(DOCKER_NEG, name, version)

        if not repo.image_exists(name, tag=version):
            raise ImageNotFound("{0} do not location in docker.neg repository.".format(image_name))

        if old_image_name.lower() != name.lower():
            raise ImageConflict("{0} is not be same with container's Image.".format(image_name))

        code, result = self.pull_image(name, version)
        if code != httplib.OK:
            raise GeneralError(
                "pull image {0}:{1} failure, status code {2}, result: {3}".format(name, version, code, result)
            )

        code, result = self.update(container_name, tag=version)
        if code != httplib.OK:
            raise GeneralError(
                "Update container {0} failure, status code {1}, result: {2}".format(container_name, code, result)
            )

        return True
Пример #2
0
    def update_image(self, container_name, image_name):
        """
        update a container's image,
        :param container_name: `class`:`str`, container name
        :param image_name: `class`:`str`, the full image name, like alpine:3.3
        :return: `class`:`bool`, True if success, otherwise False.
        """
        code, container = self.get_container(container_name)
        if code != httplib.OK:
            self.logger.error(
                "Container %s is not exists. error code %s, error message %s", container_name, code, container
            )
            return False

        _, old_image_name, _ = utils.parse_image_name(container.image)
        repository, name, version = utils.parse_image_name(image_name)
        if not repository or repository.lower() != DOCKER_NEG:
            self.logger.error("You image %s must have a 'docker.neg/' prefix string", image_name)
            return False

        if not repo.image_exists(name, tag=version):
            self.logger.error("You image %s must be location in docker.neg repository.", image_name)
            return False

        if old_image_name.lower() != name.lower():
            self.logger.error("You image %s must be same with container's Image.", image_name, container.image)
            return False

        code, result = self.update(container_name, tag=version)
        if code != httplib.OK:
            self.logger.error(
                "Update container %s with image failure, code %s, result %s", container_name, code, result
            )
            return False

        return True
Пример #3
0
    def create_container(
        self,
        name,
        image,
        hostname="dfis",
        networkmode="bridge",
        ports=None,
        volumes=None,
        env=None,
        restartpolicy="no",
        restartretrycount="2",
        command="",
    ):
        """testing

        :param name:
        :param image:
        :param hostname:
        :param networkmode: `class`:`str`, host | bridge
        :param ports: `class`:`list`, [{'type':'tcp', 'publicport':8080, 'privateport':80, 'ip':'0.0.0.0}]
        :param volumes: `class`:`list`, [{"containervolume":"/app-conf", "hostvolume":"/opt/app/app-conf"}]
        :param env: `class`:`list`, ["var=value", "var1=value1"]
        :param restartpolicy: `class`:`str`, always | on-failure | no(default)
        :param restartretrycount: 仅当 restartpolicy 是 on-failure 时才有用
        :param command:
        :return:
        """
        restartpolicy = restartpolicy.lower()
        repository, image_name, version = utils.parse_image_name(image)
        image = "{0}/{1}:{2}".format(DOCKER_NEG, image_name, version)
        body = dict(
            name=name,
            image=image,
            hostname=hostname,
            networkmode=networkmode,
            ports=ports or [],
            volumes=volumes or [],
            env=env or [],
            restartpolicy=restartpolicy,
            command=command,
        )
        if restartpolicy == "on-failure":
            body["restartretrycount"] = restartretrycount

        return "/dockerapi/v2/containers", body
Пример #4
0
 def verify_image_hash(self, repoTag):
     _, namespace, _, _ = parse_image_name(repoTag)
     addresses = Hub().addresses(namespace).get(namespace)
     if not addresses:
         return False, False
     image_hash = self.get_image_hash_uint(repoTag)
     d = DaoHubVerify()
     signed = False
     verify = False
     for address in addresses:
         try:
             hash = d.queryImage(address, repoTag)
             if hash[0]:
                 signed = True
             if hash[0] == image_hash:
                 verify = True
         except DecodingError:
             continue
     return signed, verify
def create_data_file(data_file,
                     data_source,
                     tile_map,
                     channel='gfp',
                     drop_categories=None,
                     max_tiles=MAX_TILES,
                     max_timepoint=MAX_TIMEPOINT,
                     sequence_len=SEQUENCE_LEN):
    """ Create a data file

    :param Path data_file:
        The data file to write
    :param Path data_source:
        The base experiment directory to load
    :param dict tile_map:
        A map of experiment data to tile numbers and categories
    :param int max_tiles:
        If not None, the maximum number of tiles to select (rounded up by category)
    :param int max_timepoint:
        If not None, the maximum timepoint to use for a pair
    :param int sequence_len:
        How many sequential files to load for a given tile
    """

    if drop_categories is None:
        drop_categories = [0]

    # Collect all the tile data and group by category
    categories = {}
    for experiment in sorted(tile_map):
        category_map = tile_map[experiment]
        experiment_dir = data_source / experiment

        if not experiment_dir.is_dir():
            print('Skipping {}'.format(experiment_dir))
            continue
        channel_dir = guess_channel_dir(experiment_dir / 'Corrected',
                                        channel)[1]

        for tiledir in channel_dir.iterdir():
            if not tiledir.is_dir():
                continue
            tile_num = parse_tile_name(tiledir.name)['tile']
            if tile_num in category_map:
                categories.setdefault(category_map[tile_num],
                                      []).append(tiledir)

    # Drop the bad categories
    for drop_category in drop_categories:
        if drop_category in categories:
            del categories[drop_category]

    print('Number of tiles/category:')
    for category in sorted(categories):
        print('* {}: {}'.format(category, len(categories[category])))

    # How many tiles per category for a balanced selection
    min_tiles_per_cat = min([len(c) for c in categories.values()])
    if max_tiles is None:
        tiles_per_cat = min_tiles_per_cat
    else:
        tiles_per_cat = math.ceil(max_tiles / len(categories))
        tiles_per_cat = min([min_tiles_per_cat, tiles_per_cat])
    print('Loading {} tiles/category'.format(tiles_per_cat))

    # Select evenly from each category, with sequential timepoints, without replacement
    final_images = []
    for category in categories.values():
        target_tiles = random.choices(category, k=tiles_per_cat)
        for target_tile in target_tiles:
            target_images = [
                t for t in target_tile.iterdir()
                if t.is_file() and t.suffix in ('.tif', )
            ]
            target_images = [
                t for t in sorted(target_images)
                if parse_image_name(t.name)['timepoint'] <= max_timepoint
            ]
            target_index = random.randint(0, len(target_images) - sequence_len)
            final_images.extend(target_images[target_index:target_index +
                                              sequence_len])

    # Finally, shuffle everything one last time
    random.shuffle(final_images)

    # Write the results to a data file
    data_file.parent.mkdir(exist_ok=True, parents=True)
    with data_file.open('wt') as fp:
        fp.write('#Image,Rotation,HorizontalFlip' + os.linesep)
        for final_image in final_images:
            fp.write('{},{},{}{}'.format(final_image, random.randint(0, 3),
                                         random.randint(0, 1), os.linesep))
Пример #6
0
    def pull_image(self, repository, tag=None, username=None, password=None):
        _, _, _, _tag = parse_image_name(repository)
        if not tag:
            tag = _tag
        auth_config = None
        if username and password:
            auth_config = dict(username=username, password=password)
        resp = self.pull(repository,
                         tag,
                         stream=True,
                         insecure_registry=True,
                         auth_config=auth_config)

        def iter_json():
            for i in resp:
                i = i.strip()
                if not i:
                    continue
                try:
                    j = json.loads(i)
                    yield j
                except ValueError:
                    continue

        layers = {}
        for j in iter_json():
            if j.get('status') == 'Pulling fs layer':
                layers[j.get('id')] = {}
            elif layers or j.get('status') == 'Downloading':
                break
            elif 'error' in j:
                if 'not found' in j['error']:
                    raise NotFound(j)
                else:
                    raise APIException(j)

        def iter_progress():
            for _j in iter_json():
                if _j.get('status') == 'Downloading':
                    layers[_j.get('id')] = _j.get('progressDetail')
                    total = None
                    current = None
                    if all(layers):
                        total = sum(
                            [i.get('total', 0) for i in layers.values()])
                        current = sum(
                            [i.get('current', 0) for i in layers.values()])
                    yield dict(layer_count=len(layers),
                               layers=layers,
                               current=current,
                               total=total,
                               percent=current * 100 / total,
                               finished=False)

        task_id = 'p_%s' % gen_random_str(8)

        def consume():
            cache = Cache()
            for i in iter_progress():
                cache.set(task_id, i)
            cache.set(task_id, {'finished': True, 'percent': 100})

        gevent.spawn(consume)
        # consume()
        return task_id