def write_streams(out_d, trees, updated, namer=None, condense=True):
    if namer is None:
        namer = FileNamer
    index = generate_index(trees, updated, namer)
    to_write = [(
        namer.get_index_path(),
        index,
    )]
    # Don't let products_condense modify the input
    trees = deepcopy(trees)
    for content_id in trees:
        if condense:
            util.products_condense(trees[content_id],
                                   sticky=['path', 'sha256', 'md5', 'size'])
        content = trees[content_id]
        to_write.append((
            index['index'][content_id]['path'],
            content,
        ))
    out_filenames = []
    for (outfile, data) in to_write:
        filef = os.path.join(out_d, outfile)
        util.mkdir_p(os.path.dirname(filef))
        json_dump(data, filef)
        out_filenames.append(filef)
    return out_filenames
Пример #2
0
def write_juju_streams(out_d, trees, updated, sticky):
    # Based on simplestreams.json2streams.write_juju_streams +
    # simplestreams.generate_simplestreams.write_streams,
    # but allows sticky to be specified.
    namer = WindowsFriendlyNamer
    index = generate_index(trees, updated, namer)
    to_write = [(
        namer.get_index_path(),
        index,
    )]
    # Don't let products_condense modify the input
    trees = deepcopy(trees)
    for content_id in trees:
        util.products_condense(trees[content_id], sticky=sticky)
        content = trees[content_id]
        to_write.append((
            index['index'][content_id]['path'],
            content,
        ))
    out_filenames = []
    for (outfile, data) in to_write:
        filef = os.path.join(out_d, outfile)
        util.mkdir_p(os.path.dirname(filef))
        json_dump(data, filef)
        out_filenames.append(filef)
    return out_filenames
Пример #3
0
    def test_condense_unicode(self):
        tree = {'products': {'P1': {'versions': {'1': {'A': u'B'},
                                                 '2': {'A': u'B'}}}}}
        exp = {'A': u'B',
               'products': {'P1': {'versions': {'1': {}, '2': {}}}}}

        util.products_condense(tree, top='products')
        self.assertEqual(tree, exp)
Пример #4
0
    def test_default_top_is_version(self):
        # default top has to be version for backwards compat
        tree = {'products': {'P1': {'versions': {'1': {'A': 'B'},
                                                 '2': {'A': 'B'}}}}}
        exp = {'products': {'P1': {'A': 'B',
                                   'versions': {'1': {}, '2': {}}}}}

        util.products_condense(tree)
        self.assertEqual(tree, exp)
Пример #5
0
    def test_repeats_removed(self):
        tree = {'products': {'P1': {'A': 'B',
                                    'versions': {'1': {'A': 'B'},
                                                 '2': {'A': 'B'}}}}}
        exp = {'A': 'B',
               'products': {'P1': {'versions': {'1': {}, '2': {}}}}}

        util.products_condense(tree, top='products')
        self.assertEqual(tree, exp)
Пример #6
0
    def test_condense_no_arch(self):
        tree = {'products': {'P1': {'versions': {
            '1': {'items': {'thing1': {'arch': 'amd64'},
                            'thing2': {'arch': 'amd64'}}},
            '2': {'items': {'thing3': {}}}}}}}

        exp = {'products': {'P1': {'versions': {
            '1': {'arch': 'amd64',
                  'items': {'thing1': {},
                            'thing2': {}}},
            '2': {'items': {'thing3': {}}}}}}}

        util.products_condense(tree)
        self.assertEqual(tree, exp)
Пример #7
0
    def insert_products(self, path, target, content):
        if not self.store:
            return

        tree = copy.deepcopy(target)
        util.products_prune(tree, preserve_empty_products=True)

        # stop these items from copying up when we call condense
        sticky = ['ftype', 'md5', 'sha256', 'size', 'name', 'id']

        # LP: #1329805. Juju expects these on the item.
        if self.config.get('sticky_endpoint_region', True):
            sticky += ['endpoint', 'region']

        util.products_condense(tree, sticky=sticky)

        tsnow = util.timestamp()
        tree['updated'] = tsnow

        dpath = self._cidpath(tree['content_id'])
        LOG.info("writing data: %s", dpath)
        self.store.insert_content(dpath, util.dump_data(tree))

        # now insert or update an index
        ipath = "streams/v1/index.json"
        try:
            index = util.load_content(self.store.source(ipath).read())
        except IOError as exc:
            if exc.errno != errno.ENOENT:
                raise
            index = {
                "index": {},
                'format': 'index:1.0',
                'updated': util.timestamp()
            }

        index['index'][tree['content_id']] = {
            'updated': tsnow,
            'datatype': 'image-ids',
            'clouds': [{
                'region': self.region,
                'endpoint': self.auth_url
            }],
            'cloudname': self.cloudname,
            'path': dpath,
            'products': list(tree['products'].keys()),
            'format': tree['format'],
        }
        LOG.info("writing data: %s", ipath)
        self.store.insert_content(ipath, util.dump_data(index))
Пример #8
0
def write_juju_streams(out_d, trees, updated, sticky):
    # Based on simplestreams.json2streams.write_juju_streams +
    # simplestreams.generate_simplestreams.write_streams,
    # but allows sticky to be specified.
    namer = WindowsFriendlyNamer
    index = generate_index(trees, updated, namer)
    to_write = [(namer.get_index_path(), index,)]
    # Don't let products_condense modify the input
    trees = deepcopy(trees)
    for content_id in trees:
        util.products_condense(
            trees[content_id], sticky=sticky)
        content = trees[content_id]
        to_write.append((index['index'][content_id]['path'], content,))
    out_filenames = []
    for (outfile, data) in to_write:
        filef = os.path.join(out_d, outfile)
        util.mkdir_p(os.path.dirname(filef))
        json_dump(data, filef)
        out_filenames.append(filef)
    return out_filenames