def insert_item(self, data, src, target, pedigree, contentsource):
        """Queue item to be inserted in subsequent call to insert_version

        This adds the item to self.inserts which is then handled in
        insert_version.  That allows the code to have context on
        all the items for a given version, and "choose" one.  Ie,
        if both root.tar.xz and squashfs are available, preference
        can be given to the root.tar.gz.
        """

        product_name, version_name, item_name = pedigree
        if product_name not in self.inserts:
            self.inserts[product_name] = {}
        if version_name not in self.inserts[product_name]:
            self.inserts[product_name][version_name] = {}

        if 'ftype' in data:
            ftype = data['ftype']
        else:
            flat = util.products_exdata(src, pedigree, include_top=False)
            ftype = flat.get('ftype')
        self.inserts[product_name][version_name][item_name] = (ftype,
                                                               (data, src,
                                                                target,
                                                                pedigree,
                                                                contentsource))
Exemple #2
0
    def get_product_items(self, itemfilter=None):
        """
        Parse products from this ContentSource, matching the filter.

        :type itemfilter: Optional[SSFilter]
        """
        itemfilter = itemfilter or AndFilter()  # empty AndFilter is true

        contents = super().read()
        super().close()
        stream = json.loads(contents)
        assert stream.get('format') == 'products:1.0', \
            'simplestreams product stream is of supported version'

        expand_tree(stream)

        for product_name, product in stream.get('products', {}).items():
            for version_name, version in product.get('versions', {}).items():
                for item_name, item in version.get('items', {}).items():

                    pedigree = (product_name, version_name, item_name)
                    item = products_exdata(stream, pedigree)
                    item = self._extend_item_info(item)

                    if itemfilter.matches(item):
                        yield item
Exemple #3
0
    def insert_item(self, data, src, target, pedigree, contentsource):
        """Overridable from `BasicMirrorWriter`."""
        item = products_exdata(src, pedigree)
        checksums = item_checksums(data)
        tag = checksums["sha256"]
        size = data["size"]
        ftype = item["ftype"]
        filename = os.path.basename(item["path"])
        if ftype == "archive.tar.xz":
            links = extract_archive_tar(
                self.store, filename, tag, checksums, size, contentsource
            )
        else:
            links = insert_file(
                self.store, filename, tag, checksums, size, contentsource
            )

        osystem = get_os_from_product(item)

        # link_resources creates a hardlink for every subarch. Every Ubuntu
        # product in a SimpleStream contains a list of subarches which list
        # what subarches are a subset of that subarch. For example Xenial
        # ga-16.04 has the subarches list hwe-{p,q,r,s,t,u,v,w},ga-16.04.
        # Kernel flavors are the same arch, the only difference is the kernel
        # config. So ga-16.04-lowlatency has the same subarch list as ga-16.04.
        # If we create hard links for all subarches a kernel flavor may
        # overwrite the generic kernel hard link. This happens if a kernel
        # flavor is processed after the generic kernel. Since MAAS doesn't use
        # the other hard links only create hard links for the subarch of the
        # product we have and a rolling link if it's a rolling kernel.
        if "subarch" in item:
            # MAAS uses the 'generic' subarch when it doesn't know which
            # subarch to use. This happens during enlistment and commissioning.
            # Allow the 'generic' kflavor to own the 'generic' hardlink. The
            # generic kernel should always be the ga kernel for xenial+,
            # hwe-<first letter of release> for older releases.
            if item.get("kflavor") == "generic" and (
                item["subarch"].startswith("ga-")
                or item["subarch"] == "hwe-%s" % item["release"][0]
            ):
                subarches = {item["subarch"], "generic"}
            else:
                subarches = {item["subarch"]}
        else:
            subarches = {"generic"}

        if item.get("rolling", False):
            subarch_parts = item["subarch"].split("-")
            subarch_parts[1] = "rolling"
            subarches.add("-".join(subarch_parts))
        link_resources(
            snapshot_path=self.root_path,
            links=links,
            osystem=osystem,
            arch=item["arch"],
            release=item["release"],
            label=item["label"],
            subarches=subarches,
            bootloader_type=item.get("bootloader-type"),
        )
Exemple #4
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     # src and target are top level products:1.0
     # data is src['products'][ped[0]]['versions'][ped[1]]['items'][ped[2]]
     # contentsource is a ContentSource if 'path' exists in data or None
     data = s_util.products_exdata(src, pedigree)
     if 'path' in data:
         data.update({'item_url': contentsource.url})
     self.json_entries.append(data)
Exemple #5
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     """See `ObjectStoreMirrorWriter`."""
     super(MAASMirrorWriter, self).insert_item(
         data, src, target, pedigree, contentsource)
     path = data.get('path', None)
     flat = util.products_exdata(src, pedigree)
     if path is not None:
         self.extract_item(path, flat)
Exemple #6
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     # src and target are top level products:1.0
     # data is src['products'][ped[0]]['versions'][ped[1]]['items'][ped[2]]
     # contentsource is a ContentSource if 'path' exists in data or None
     data = s_util.products_exdata(src, pedigree)
     if 'path' in data:
         data.update({'item_url': contentsource.url})
     self.json_entries.append(data)
def _moditem(src, path, pedigree, modfunc):
    # load the products data at 'path' in 'src' mirror, then call modfunc
    # on the data found at pedigree. and store the updated data.
    sobj = src.objectstore
    tree = util.load_content(sobj.source(path).read())
    item = util.products_exdata(tree, pedigree, insert_fieldnames=False)
    util.products_set(tree, modfunc(item), pedigree)
    sobj.insert_content(path, util.dump_data(tree))
    def insert_item(self, data, src, target, pedigree, contentsource):
        """Overridable from `BasicMirrorWriter`."""
        item = products_exdata(src, pedigree)
        if self.validate_products and not validate_product(item, pedigree[0]):
            maaslog.warning("Ignoring unsupported product %s" % pedigree[0])
            return
        os = get_os_from_product(item)
        arch = item["arch"]
        subarches = item.get("subarches", "generic")
        if item.get("bootloader-type") is None:
            release = item["release"]
            kflavor = item.get("kflavor", "generic")
        else:
            release = item["bootloader-type"]
            kflavor = "bootloader"
        label = item["label"]
        base_image = ImageSpec(os, arch, None, kflavor, release, label)
        compact_item = clean_up_repo_item(item)

        if os == "ubuntu-core":
            # For Ubuntu Core we only want one entry per release/arch/gadget
            gadget = item.get("gadget_snap", "generic")
            kflavor = item.get("kernel_snap", "generic")
            release = "%s-%s" % (release, gadget)
            self.boot_images_dict.setdefault(
                base_image._replace(
                    subarch="generic", kflavor=kflavor, release=release
                ),
                compact_item,
            )
        else:
            for subarch in subarches.split(","):
                self.boot_images_dict.setdefault(
                    base_image._replace(subarch=subarch), compact_item
                )

            # HWE resources need to map to a specfic resource, and not just to
            # any of the supported subarchitectures for that resource.
            subarch = item.get("subarch", "generic")
            self.boot_images_dict.set(
                base_image._replace(subarch=subarch), compact_item
            )

            if os == "ubuntu" and item.get("version") is not None:
                # HWE resources with generic, should map to the HWE that ships
                # with that release. Starting with Xenial kernels changed from
                # using the naming format hwe-<letter> to ga-<version>. Look
                # for both.
                hwe_archs = ["ga-%s" % item["version"], "hwe-%s" % release[0]]
                if subarch in hwe_archs and "generic" in subarches:
                    self.boot_images_dict.set(
                        base_image._replace(subarch="generic"), compact_item
                    )
Exemple #9
0
    def insert_item(self, data, src, target, pedigree, contentsource):
        """Overridable from `BasicMirrorWriter`."""
        item = products_exdata(src, pedigree)
        checksums = item_checksums(data)
        tag = checksums['sha256']
        size = data['size']
        ftype = item['ftype']
        filename = os.path.basename(item['path'])
        if ftype == 'archive.tar.xz':
            links = extract_archive_tar(
                self.store, filename, tag, checksums, size, contentsource)
        elif ftype == 'root-image.gz':
            links = insert_root_image(
                self.store, tag, checksums, size, contentsource)
        else:
            links = insert_file(
                self.store, filename, tag, checksums, size, contentsource)

        osystem = get_os_from_product(item)

        # link_resources creates a hardlink for every subarch. Every Ubuntu
        # product in a SimpleStream contains a list of subarches which list
        # what subarches are a subset of that subarch. For example Xenial
        # ga-16.04 has the subarches list hwe-{p,q,r,s,t,u,v,w},ga-16.04.
        # Kernel flavors are the same arch, the only difference is the kernel
        # config. So ga-16.04-lowlatency has the same subarch list as ga-16.04.
        # If we create hard links for all subarches a kernel flavor may
        # overwrite the generic kernel hard link. This happens if a kernel
        # flavor is processed after the generic kernel. Since MAAS doesn't use
        # the other hard links only create hard links for the subarch of the
        # product we have and a rolling link if it's a rolling kernel.
        if 'subarch' in item:
            # MAAS uses the 'generic' subarch when it doesn't know which
            # subarch to use. This happens during enlistment and commissioning.
            # Allow the 'generic' kflavor to own the 'generic' hardlink.
            if item.get('kflavor') == 'generic':
                subarches = {item['subarch'], 'generic'}
            else:
                subarches = {item['subarch']}
        else:
            subarches = {'generic'}

        if item.get('rolling', False):
            subarch_parts = item['subarch'].split('-')
            subarch_parts[1] = 'rolling'
            subarches.add('-'.join(subarch_parts))
        link_resources(
            snapshot_path=self.root_path, links=links,
            osystem=osystem, arch=item['arch'], release=item['release'],
            label=item['label'], subarches=subarches,
            bootloader_type=item.get('bootloader-type'))
Exemple #10
0
    def remove_item(self, data, src, target, pedigree):
        """See `ObjectStoreMirrorWriter`.

        Remove items from our local mirror that are no longer available
        upstream.
        """
        if not self.delete:
            # Caller didn't ask for obsolete items to be deleted.
            return

        super(MAASMirrorWriter, self).remove_item(data, src, target, pedigree)
        metadata = util.products_exdata(src, pedigree)

        name = get_target_name(**metadata)
        tgt_admin_delete(name)
        remove(get_conf_path(self.local_path, name))

        shutil.rmtree(self._target_dir(metadata))
Exemple #11
0
    def insert_item(self, data, src, target, pedigree, contentsource):
        """Insert item received.

        src and target are top level products:1.0
        data is src['products'][ped[0]]['versions'][ped[1]]['items'][ped[2]]
        contentsource is a ContentSource if 'path' exists in data or None

        Args:
            data: Data from simplestreams
            src: TBD
            target: TBD
            pedigree: TBD
            contentsource: TBD
        """
        data = s_util.products_exdata(src, pedigree)
        if 'path' in data:
            data.update({'item_url': contentsource.url})
        self.json_entries.append(data)
    def insert_item(self, data, src, target, pedigree, contentsource):
        mdata = util.products_exdata(src, pedigree)

        tmp_path = None
        tmp_del = None
        extra = {}
        if 'path' in data:
            extra.update({'item_url': contentsource.url})
            if not self.config.get('item_skip_download', False):
                try:
                    (tmp_path, tmp_del) = util.get_local_copy(contentsource)
                    extra['path_local'] = tmp_path
                finally:
                    contentsource.close()

        try:
            ret = self.call_hook('insert_item', data=mdata, extra=extra)
        finally:
            if tmp_del and os.path.exists(tmp_path):
                os.unlink(tmp_path)
        return ret
Exemple #13
0
def query_ptree(ptree, max_num=None, ifilters=None, path2url=None):
    results = []
    pkey = 'products'
    verkey = 'versions'
    for prodname, proddata in sorted(ptree.get(pkey, {}).items()):
        if verkey not in proddata:
            continue
        cur = 0
        for vername in sorted(proddata[verkey].keys(), reverse=True):
            if max_num is not None and cur >= max_num:
                break
            verdata = proddata[verkey][vername]
            cur += 1
            for itemname, itemdata in sorted(verdata.get('items', {}).items()):
                flat = sutil.products_exdata(ptree,
                                             (prodname, vername, itemname))
                if ifilters is not None and len(ifilters) > 0:
                    if not filters.filter_dict(ifilters, flat):
                        continue
                if path2url and 'path' in flat:
                    flat['item_url'] = path2url(flat['path'])
                results.append(flat)
    return results
    def load_products(self, path=None, content_id=None):
        """
        Load metadata for all currently uploaded active images in Glance.

        Uses glance as the definitive store, but loads metadata from existing
        simplestreams indexes as well.
        """
        my_cid = self.content_id

        # glance is the definitive store.  Any data loaded from the store
        # is secondary.
        store_t = None
        if self.store:
            try:
                path = self._cidpath(my_cid)
                store_t = util.load_content(self.store.source(path).read())
            except IOError as e:
                if e.errno != errno.ENOENT:
                    raise
        if not store_t:
            store_t = empty_iid_products(my_cid)

        glance_t = empty_iid_products(my_cid)

        images = self.gclient.images.list()
        for image in images:
            if self.glance_api_version == "1":
                image = image.to_dict()
                props = image['properties']
            else:
                props = copy.deepcopy(image)

            if image['owner'] != self.tenant_id:
                continue

            if props.get('content_id') != my_cid:
                continue

            if image.get('status') != "active":
                LOG.warn("Ignoring inactive image %s with status '%s'" %
                         (image['id'], image.get('status')))
                continue

            source_content_id = props.get('source_content_id')

            product = props.get('product_name')
            version = props.get('version_name')
            item = props.get('item_name')
            if not (version and product and item and source_content_id):
                LOG.warn("%s missing required fields" % image['id'])
                continue

            # get data from the datastore for this item, if it exists
            # and then update that with glance data (just in case different)
            try:
                item_data = util.products_exdata(store_t, (
                    product,
                    version,
                    item,
                ),
                                                 include_top=False,
                                                 insert_fieldnames=False)
            except KeyError:
                item_data = {}

            # If original simplestreams-metadata is stored on the image,
            # use that as well.
            if 'simplestreams_metadata' in props:
                simplestreams_metadata = json.loads(
                    props.get('simplestreams_metadata'))
            else:
                simplestreams_metadata = {}
            item_data.update(simplestreams_metadata)

            item_data.update({'name': image['name'], 'id': image['id']})
            if 'owner_id' not in item_data:
                item_data['owner_id'] = self.tenant_id

            util.products_set(glance_t, item_data, (
                product,
                version,
                item,
            ))

        for product in glance_t['products']:
            glance_t['products'][product]['region'] = self.region
            glance_t['products'][product]['endpoint'] = self.auth_url

        return glance_t
def filter_item(filters, data, src, pedigree):
    "Apply filter list to a products entity.  Flatten before doing so."
    return filter_dict(filters, util.products_exdata(src, pedigree))
Exemple #16
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     data = util.products_exdata(src, pedigree)
     self.items.append(dict_to_item(data))
 def remove_version(self, data, src, target, pedigree):
     return self.call_hook('remove_version',
                           data=util.products_exdata(src, pedigree))
 def remove_item(self, data, src, target, pedigree):
     return self.call_hook('remove_item',
                           data=util.products_exdata(target, pedigree))
Exemple #19
0
    def sync_products(self, reader, path=None, src=None, content=None):
        (src, content) = _get_data_content(path, src, content, reader)

        util.expand_tree(src)

        check_tree_paths(src)

        content_id = src['content_id']
        target = self.load_products(path, content_id)
        if not target:
            target = util.stringitems(src)

        util.expand_tree(target)

        stree = src.get('products', {})
        if 'products' not in target:
            target['products'] = {}

        tproducts = target['products']

        filtered_products = []
        prodname = None

        # Apply filters to items before filtering versions
        for prodname, product in list(stree.items()):

            for vername, version in list(product.get('versions', {}).items()):
                for itemname, item in list(version.get('items', {}).items()):
                    pgree = (prodname, vername, itemname)
                    if not self.filter_item(item, src, target, pgree):
                        LOG.debug("Filtered out item: %s/%s", itemname, item)
                        del stree[prodname]['versions'][vername]['items'][
                            itemname]
                        if not stree[prodname]['versions'][vername].get(
                                'items', {}):
                            del stree[prodname]['versions'][vername]
                        if not stree[prodname].get('versions', {}):
                            del stree[prodname]

        for prodname, product in stree.items():
            if not self.filter_product(product, src, target, (prodname, )):
                filtered_products.append(prodname)
                continue

            if prodname not in tproducts:
                tproducts[prodname] = util.stringitems(product)
            tproduct = tproducts[prodname]
            if 'versions' not in tproduct:
                tproduct['versions'] = {}

            src_filtered_items = []

            def _filter(itemkey):
                ret = self.filter_version(product['versions'][itemkey], src,
                                          target, (prodname, itemkey))
                if not ret:
                    src_filtered_items.append(itemkey)
                return ret

            (to_add, to_remove) = util.resolve_work(
                src=list(product.get('versions', {}).keys()),
                target=list(tproduct.get('versions', {}).keys()),
                maxnum=self.config.get('max_items'),
                keep=self.config.get('keep_items'),
                itemfilter=_filter)

            LOG.info("%s/%s: to_add=%s to_remove=%s", content_id, prodname,
                     to_add, to_remove)

            tversions = tproduct['versions']
            skipped_versions = []
            for vername in to_add:
                version = product['versions'][vername]

                if vername not in tversions:
                    tversions[vername] = util.stringitems(version)

                added_items = []
                for itemname, item in version.get('items', {}).items():
                    pgree = (prodname, vername, itemname)

                    added_items.append(itemname)

                    ipath = item.get('path', None)
                    ipath_cs = None
                    if ipath and reader:
                        if self.checksumming_reader:
                            flat = util.products_exdata(src, pgree)
                            ipath_cs = cs.ChecksummingContentSource(
                                csrc=reader.source(ipath),
                                size=flat.get('size'),
                                checksums=checksum_util.item_checksums(flat))
                        else:
                            ipath_cs = reader.source(ipath)

                    self.insert_item(item, src, target, pgree, ipath_cs)

                if len(added_items):
                    # do not insert versions that had all items filtered
                    self.insert_version(version, src, target,
                                        (prodname, vername))
                else:
                    skipped_versions.append(vername)

            for vername in skipped_versions:
                if vername in tproduct['versions']:
                    del tproduct['versions'][vername]

            if self.config.get('delete_filtered_items', False):
                tkeys = tproduct.get('versions', {}).keys()
                for v in src_filtered_items:
                    if v not in to_remove and v in tkeys:
                        to_remove.append(v)
                LOG.info("After deletions %s/%s: to_add=%s to_remove=%s",
                         content_id, prodname, to_add, to_remove)

            for vername in to_remove:
                tversion = tversions[vername]
                for itemname in list(tversion.get('items', {}).keys()):
                    self.remove_item(tversion['items'][itemname], src, target,
                                     (prodname, vername, itemname))

                self.remove_version(tversion, src, target, (prodname, vername))
                del tversions[vername]

            self.insert_product(tproduct, src, target, (prodname, ))

        # FIXME: below will remove products if they're in target
        # (result of load_products) but not in the source products.
        # that could accidentally delete a lot.
        #
        del_products = []
        if self.config.get('delete_products', False):
            del_products.extend(
                [p for p in list(tproducts.keys()) if p not in stree])
        if self.config.get('delete_filtered_products', False):
            del_products.extend(
                [p for p in filtered_products if p not in stree])

        for prodname in del_products:
            # FIXME: we remove a product here, but unless that acts
            # recursively, nothing will remove the items in that product
            self.remove_product(tproducts[prodname], src, target, (prodname, ))
            del tproducts[prodname]

        self.insert_products(path, target, content)
 def insert_product(self, data, src, target, pedigree):
     return self.call_hook('insert_product',
                           data=util.products_exdata(src, pedigree))
 def _call_filter(self, name, src, pedigree):
     data = util.products_exdata(src, pedigree)
     (ret, _output) = self.call_hook(name, data=data, rcs=[0, 1])
     return ret == 0
    def _insert_item(self, data, src, target, pedigree, contentsource):
        """
        Upload image into glance and add image metadata to simplestreams index.

        `data` is the metadata for a particular image file from the source:
            unused since all that data is present in the `src` entry for
            the corresponding image as well.
        `src` contains the entire simplestreams index from the image syncing
            source.
        `target` is the simplestreams index for currently available images
            in glance (generated by load_products()) to add this item to.
        `pedigree` is a "path" to get to the `data` for the image we desire,
            a tuple of (product_name, version_name, image_type).
        `contentsource` is a ContentSource to download the actual image data
            from.
        """
        # Extract and flatten metadata for a product image matching
        #   (product-name, version-name, image-type)
        # from the tuple `pedigree` in the source simplestreams index.
        flattened_img_data = util.products_exdata(src,
                                                  pedigree,
                                                  include_top=False)

        tmp_path = None

        full_image_name = "{}{}".format(
            self.name_prefix,
            flattened_img_data.get('pubname', flattened_img_data.get('name')))
        if not full_image_name.endswith(flattened_img_data['item_name']):
            full_image_name += "-{}".format(flattened_img_data['item_name'])

        # Download images locally into a temporary file.
        tmp_path, new_size, new_md5 = self.download_image(
            contentsource, flattened_img_data)

        hypervisor_mapping = self.config.get('hypervisor_mapping', False)

        glance_props = self.create_glance_properties(target['content_id'],
                                                     src['content_id'],
                                                     flattened_img_data,
                                                     hypervisor_mapping)
        create_kwargs = self.prepare_glance_arguments(full_image_name,
                                                      flattened_img_data,
                                                      new_md5, new_size,
                                                      glance_props)

        target_sstream_item = self.adapt_source_entry(flattened_img_data,
                                                      hypervisor_mapping,
                                                      full_image_name, new_md5,
                                                      new_size)

        try:
            if self.glance_api_version == "1":
                # Set data as string if v1
                create_kwargs['data'] = open(tmp_path, 'rb')
            else:
                # Keep properties for v2 update call
                _properties = create_kwargs['properties']
                del create_kwargs['properties']

            glance_image = self.gclient.images.create(**create_kwargs)
            target_sstream_item['id'] = glance_image.id

            if self.glance_api_version == "2":
                # Upload for v2
                self.gclient.images.upload(glance_image.id,
                                           open(tmp_path, 'rb'))
                # Update properties for v2
                self.gclient.images.update(glance_image.id, **_properties)

            # Validate the image checksum and size. This will throw an
            # IOError if they do not match.
            self.validate_image(glance_image.id, new_md5, new_size)

            print("created %s: %s" % (glance_image.id, full_image_name))

        finally:
            if tmp_path and os.path.exists(tmp_path):
                os.unlink(tmp_path)

        util.products_set(target, target_sstream_item, pedigree)
        # We can safely ignore path and content arguments since they are
        # unused in insert_products below.
        self.insert_products(None, target, None)
Exemple #23
0
 def filter_version(self, data, src, target, pedigree):
     """Overridable from `BasicMirrorWriter`."""
     return self.product_mapping.contains(products_exdata(src, pedigree))
Exemple #24
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     data = util.products_exdata(src, pedigree)
     if 'size' in data and 'path' in data:
         self.downloading.append(
             (pedigree, data['path'], int(data['size'])))
 def insert_item(self, data, src, target, pedigree, contentsource):
     data = util.products_exdata(src, pedigree)
     if 'size' in data and 'path' in data and 'pubname' in data:
         self.items[data['pubname']] = int(data['size'])
Exemple #26
0
 def remove_item(self, data, src, target, pedigree):
     data = util.products_exdata(src, pedigree)
     if 'size' in data and 'path' in data:
         self.removing.append((pedigree, data['path'], int(data['size'])))
Exemple #27
0
 def insert_item(self, data, src, target, pedigree, contentsource):
     data = util.products_exdata(src, pedigree)
     self.items.append(dict_to_item(data))