def _make_details(self, feed_name, feed_file):
        feed_dom = xml.dom.minidom.parse(feed_file)
        a_list = []
        appliance_list = []
        for entry in feed_dom.getElementsByTagName("entry"):
            info = {}
            info["provider_id"] = self.get_provider_id(feed_name)
            info["provider"] = self.get_provider(feed_name)
            info["provider_url"] = self.get_provider_url(feed_name)
            info["provider_logo_url"] = self.get_logo_url(feed_name)

            for text in ("title", "id", "popularity_score", "description",
                         "short_description"):
                info[text] = getText(entry, text)

            populate_node(info, entry, "link", {"link": "href"})

            download_nodes = entry.getElementsByTagName("download")
            for download_node in download_nodes:
                download_info = {}
                populate_attrs(download_info, download_node, {
                    "href": "href",
                    "type": "type"
                })

                populate_node(download_info, download_node, "platform",
                              {"platform": "name"})

                populate_node(
                    download_info, download_node, "package", {
                        "filename": "filename",
                        "compressed": "compressed",
                        "archive": "archive",
                        "size": "size",
                        "installed_size": "installed_size"
                    })
                populate_node(download_info, download_node, "kernel", {
                    "PAE": "PAE",
                    "arch": "arch",
                    "is_hvm": "is_hvm"
                })

                # normalize x86, x86_32 to x86
                if download_info.get("arch"):
                    if download_info["arch"].upper() == "X86_32":
                        download_info["arch"] = "x86"

                for t in ("updated", ):
                    download_info[t] = getText(download_node, t)

                # merge feed and download in to a feed entry.
                feed_info = {}
                for k in info.keys():
                    feed_info[k] = info[k]

                for k in download_info.keys():
                    if feed_info.get(k) is None:
                        feed_info[k] = download_info[k]
                    else:
                        print "ERROR : collision in feed and download entry"

                appliance = Appliance(to_unicode(info['title']))
                appliance.catalog_id = to_unicode(info['id'])
                appliance.provider_id = to_unicode(info['provider_id'])
                appliance.link_href = to_unicode(info['link'])
                appliance.description = info['description']
                appliance.popularity_score = to_unicode(
                    info['popularity_score'])
                appliance.short_description = info['short_description']
                if download_info[
                        'PAE'] is not None and download_info['PAE'] != '':
                    appliance.PAE = eval(download_info['PAE'])
                appliance.arch = to_unicode(download_info['arch'])
                appliance.archive = to_unicode(download_info['archive'])
                appliance.compression_type = to_unicode(
                    download_info['compressed'])
                appliance.download_href = to_unicode(download_info['href'])
                appliance.filename = to_unicode(download_info['filename'])
                appliance.installed_size = download_info['installed_size']
                if download_info['is_hvm'] is not None and download_info[
                        'is_hvm'] != '':
                    appliance.is_hvm = eval(download_info['is_hvm'])
                appliance.platform = to_unicode(download_info['platform'])
                appliance.size = download_info['size']
                appliance.type = to_unicode(download_info['type'])
                appliance.updated_date = to_unicode(download_info['updated'])
                appliance.version = to_unicode(
                    download_info.get('version', None))
                appliance_list.append(appliance)

                a_list.append(feed_info)
        provider_id = self.get_provider_id(feed_name)
        #delete existing appliances for this feed and add new data
        DBHelper().delete_all(
            Appliance, [], [Appliance.provider_id == to_unicode(provider_id)])
        DBHelper().add_all(appliance_list)

        return a_list
Exemple #2
0
            LOGGER.error("Error fetching updates:"+to_str(ex))
            try:
                if os.path.exists(self.updates_file):
                    os.remove(self.updates_file)
            except:
                pass
            return update_items

        if os.path.exists(self.updates_file):
            updates_dom = xml.dom.minidom.parse(self.updates_file)
            for entry in updates_dom.getElementsByTagName("entry"):
                info = {}
                for text in ("title","link","description", "pubDate",
                             "product_id", "product_version","platform"):
                    info[text] = getText(entry, text)
                populate_node(info,entry,"link",
                          { "link" : "href"})
                update_items.append(info)

        # cleanup the file
        try:
            if os.path.exists(self.updates_file):
                os.remove(self.updates_file)
        except:
            pass

        return update_items

    def send_update_mails(self,updates):

        grp=Group.by_group_name(to_unicode('adminGroup'))
        emailer=EmailManager()
    def _make_details(self, feed_name, feed_file):
        feed_dom = xml.dom.minidom.parse(feed_file)
        a_list = []
        appliance_list = []
        for entry in feed_dom.getElementsByTagName("entry"):
            info = {}
            info["provider_id"] = self.get_provider_id(feed_name)
            info["provider"] = self.get_provider(feed_name)
            info["provider_url"] = self.get_provider_url(feed_name)
            info["provider_logo_url"] = self.get_logo_url(feed_name)

            for text in ("title", "id", "popularity_score", "description", "short_description"):
                info[text] = getText(entry, text)

            populate_node(info, entry, "link", {"link": "href"})

            download_nodes = entry.getElementsByTagName("download")
            for download_node in download_nodes:
                download_info = {}
                populate_attrs(download_info, download_node, {"href": "href", "type": "type"})

                populate_node(download_info, download_node, "platform", {"platform": "name"})

                populate_node(
                    download_info,
                    download_node,
                    "package",
                    {
                        "filename": "filename",
                        "compressed": "compressed",
                        "archive": "archive",
                        "size": "size",
                        "installed_size": "installed_size",
                    },
                )
                populate_node(
                    download_info, download_node, "kernel", {"PAE": "PAE", "arch": "arch", "is_hvm": "is_hvm"}
                )

                # normalize x86, x86_32 to x86
                if download_info.get("arch"):
                    if download_info["arch"].upper() == "X86_32":
                        download_info["arch"] = "x86"

                for t in ("updated",):
                    download_info[t] = getText(download_node, t)

                # merge feed and download in to a feed entry.
                feed_info = {}
                for k in info.keys():
                    feed_info[k] = info[k]

                for k in download_info.keys():
                    if feed_info.get(k) is None:
                        feed_info[k] = download_info[k]
                    else:
                        print "ERROR : collision in feed and download entry"

                appliance = Appliance(to_unicode(info["title"]))
                appliance.catalog_id = to_unicode(info["id"])
                appliance.provider_id = to_unicode(info["provider_id"])
                appliance.link_href = to_unicode(info["link"])
                appliance.description = info["description"]
                appliance.popularity_score = to_unicode(info["popularity_score"])
                appliance.short_description = info["short_description"]
                if download_info["PAE"] is not None and download_info["PAE"] != "":
                    appliance.PAE = eval(download_info["PAE"])
                appliance.arch = to_unicode(download_info["arch"])
                appliance.archive = to_unicode(download_info["archive"])
                appliance.compression_type = to_unicode(download_info["compressed"])
                appliance.download_href = to_unicode(download_info["href"])
                appliance.filename = to_unicode(download_info["filename"])
                appliance.installed_size = download_info["installed_size"]
                if download_info["is_hvm"] is not None and download_info["is_hvm"] != "":
                    appliance.is_hvm = eval(download_info["is_hvm"])
                appliance.platform = to_unicode(download_info["platform"])
                appliance.size = download_info["size"]
                appliance.type = to_unicode(download_info["type"])
                appliance.updated_date = to_unicode(download_info["updated"])
                appliance.version = to_unicode(download_info.get("version", None))
                appliance_list.append(appliance)

                a_list.append(feed_info)
        provider_id = self.get_provider_id(feed_name)
        # delete existing appliances for this feed and add new data
        DBHelper().delete_all(Appliance, [], [Appliance.provider_id == to_unicode(provider_id)])
        DBHelper().add_all(appliance_list)

        return a_list
Exemple #4
0
            LOGGER.error("Error fetching updates:" + to_str(ex))
            try:
                if os.path.exists(self.updates_file):
                    os.remove(self.updates_file)
            except:
                pass
            return update_items

        if os.path.exists(self.updates_file):
            updates_dom = xml.dom.minidom.parse(self.updates_file)
            for entry in updates_dom.getElementsByTagName("entry"):
                info = {}
                for text in ("title", "link", "description", "pubDate",
                             "product_id", "product_version", "platform"):
                    info[text] = getText(entry, text)
                populate_node(info, entry, "link", {"link": "href"})
                update_items.append(info)

        # cleanup the file
        try:
            if os.path.exists(self.updates_file):
                os.remove(self.updates_file)
        except:
            pass

        return update_items

    def send_update_mails(self, updates):

        grp = Group.by_group_name(to_unicode('adminGroup'))
        emailer = EmailManager()