def fetch_catalog(self, catalog, url): feed_conf = self.get_conf_name(catalog) feed_conf_dir = os.path.dirname(feed_conf) mkdir2(self.local_node, feed_conf_dir) fetch_isp(url, feed_conf, "text/plain") print "fetched ", url, feed_conf return feed_conf
def retrieve_updates(self, guid): return [] update_items = [] try: if not os.access(self.updates_file, os.W_OK): (t_handle, t_name)= tempfile.mkstemp(prefix='updates.xml') self.updates_file = t_name os.close(t_handle) self.update_url += '?guid=' + guid fetch_isp(self.update_url, self.updates_file, '/xml') except Exception, ex: traceback.print_exc() LOGGER.error('Error fetching updates:' + to_str(ex)) try: if os.path.exists(self.updates_file): os.remove(self.updates_file) except: pass return update_items
def populate_appliances(self, feed_name): feed = self.feeds.get(feed_name) if feed is None: return None cache_dir = self.get_feed_cache_dir(feed_name) utils.mkdir2(self.local_node, cache_dir) cache_file = self.get_feed_file_name(feed_name) feed_dest = os.path.join(cache_dir, cache_file) url = self.get_feed_url(feed_name) try: fetch_isp(url, feed_dest, "/xml") except Exception as ex: print "Error downloading feed ", url, ex print "Will try to use cached copy if available." details = [] if self.local_node.node_proxy.file_exists(feed_dest): details = self._make_details(feed_name, feed_dest) else: print "Skipping ", feed_dest, " not found." return details