def update(self): '''update the store with package list''' hostid = self.hosts.current_host['hostid'] LOG.debug("Updating package list") newpkg_list = self.distro.compute_local_packagelist() LOG.debug("Creating the checksum") # We need to get a reliable checksum for the dictionary in # newpkg_list. Dictionary order is unpredictable, so to get a # reproducible checksum, we need a predictable string representation # of the dictionary. pprint.pformat() seems to give us the best # option here since it guarantees that dictionary keys are sorted. # hashlib works on bytes only though, so assume utf-8. hash_input = pformat(newpkg_list).encode('utf-8') checksum = hashlib.sha224(hash_input).hexdigest() LOG.debug("Package list need refresh") self.package_list[hostid] = { 'valid': True, 'package_list': newpkg_list } utils.save_json_file_update( os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)), self.package_list[hostid]['package_list']) if self.hosts.current_host['packages_checksum'] != checksum: self.hosts.current_host['packages_checksum'] = checksum self.hosts.save_current_host() LOG.debug("Update done")
def update(self): '''update the store with package list''' hostid = self.hosts.current_host['hostid'] LOG.debug("Updating package list") newpkg_list = self.distro.compute_local_packagelist() LOG.debug("Creating the checksum") # We need to get a reliable checksum for the dictionary in # newpkg_list. Dictionary order is unpredictable, so to get a # reproducible checksum, we need a predictable string representation # of the dictionary. pprint.pformat() seems to give us the best # option here since it guarantees that dictionary keys are sorted. # hashlib works on bytes only though, so assume utf-8. hash_input = pformat(newpkg_list).encode('utf-8') checksum = hashlib.sha224(hash_input).hexdigest() LOG.debug("Package list need refresh") self.package_list[hostid] = {'valid': True, 'package_list': newpkg_list} utils.save_json_file_update(os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)), self.package_list[hostid]['package_list']) if self.hosts.current_host['packages_checksum'] != checksum: self.hosts.current_host['packages_checksum'] = checksum self.hosts.save_current_host() LOG.debug("Update done")
def save_current_host(self, arg=None): '''Save current host on disk''' LOG.debug("Save current host to disk") utils.save_json_file_update( os.path.join(self._host_file_dir, HOST_DATA_FILENAME), self.current_host)
def get_books__detail_from_source(self): finded_books = [] tags = '' if len(self.tags) > 0: for tag in self.tags: tags += " " + tag tags = tags.strip() else: raise('length of "tags" is ZERO. function needs tags to search') #request pages START_PAGE = 1 END_PAGE = 1 CURRENT_PAGE = START_PAGE while CURRENT_PAGE <= END_PAGE: url = 'http://it-ebooks-api.info/v1/search/'+tags+'/page/'+str(CURRENT_PAGE) request = self.request(url) if CURRENT_PAGE == 1: self.total_result = request["Total"] self.all_pages = int(request['Total']) // 10 + 1 #prepare END_PAGE if (self.limit_in_pages > 0) and (self.all_pages > self.limit_in_pages): END_PAGE = self.limit_in_pages else: END_PAGE = self.all_pages #append new books finded_books.extend(request["Books"]) CURRENT_PAGE += 1 #extract other detail of books if self.show_progressbar: progressbar = Bar('Searching ', max=len(finded_books)) for book_index in range(len(finded_books)): url = "http://it-ebooks-api.info/v1/book/"+str(finded_books[book_index]["ID"]) other_details = self.request(url) for detail in other_details: if detail not in {"Error", "Time"}: if detail in {"Year", "ISBN", "isbn", "Page"}: #need this for sorting finded_books[book_index][detail] = int(other_details[detail]) else: finded_books[book_index][detail] = other_details[detail] if self.show_progressbar: progressbar.next() if self.show_progressbar: progressbar.finish() #save data as json file name = 'books-%s-[%sfrom%s].json' % (tags.replace(" ", "-"), len(finded_books), self.total_result) save_json_file_update(name, finded_books) print('"%s" Saved!' % name) return finded_books
def add_hostid_pending_change(self, change): '''Pend a scheduled change for another host on disk change has a {hostid: {key: value, key2: value2}} format''' LOG.debug("Pend a change for another host on disk") try: with open(os.path.join(self._host_file_dir, PENDING_UPLOAD_FILENAME), 'r') as f: pending_changes = json.load(f) except (IOError, ValueError): pending_changes = {} # merge existing changes with new ones for hostid in change: if not hostid in pending_changes: pending_changes[hostid] = {} pending_changes[hostid].update(change[hostid]) utils.save_json_file_update(os.path.join(self._host_file_dir, PENDING_UPLOAD_FILENAME), pending_changes)
def update(self): '''update the store with package list''' hostid = self.hosts.current_host['hostid'] LOG.debug("Updating package list") newpkg_list = self.distro.compute_local_packagelist() LOG.debug("Creating the checksum") checksum = hashlib.sha224(str(newpkg_list)).hexdigest() LOG.debug("Package list need refresh") self.package_list[hostid] = {'valid': True, 'package_list': newpkg_list} utils.save_json_file_update(os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)), self.package_list[hostid]['package_list']) if self.hosts.current_host['packages_checksum'] != checksum: self.hosts.current_host['packages_checksum'] = checksum self.hosts.save_current_host() LOG.debug("Update done")
def add_hostid_pending_change(self, change): '''Pend a scheduled change for another host on disk change has a {hostid: {key: value, key2: value2}} format''' LOG.debug("Pend a change for another host on disk") try: with open( os.path.join(self._host_file_dir, PENDING_UPLOAD_FILENAME), 'r') as f: pending_changes = json.load(f) except (IOError, ValueError): pending_changes = {} # merge existing changes with new ones for hostid in change: if not hostid in pending_changes: pending_changes[hostid] = {} pending_changes[hostid].update(change[hostid]) utils.save_json_file_update( os.path.join(self._host_file_dir, PENDING_UPLOAD_FILENAME), pending_changes)
def process_sync(self): '''start syncing what's needed if can sync process sync can be either started directly, or when can_sync changed''' # we can't no more sync, removing the timeout if not self._can_sync: return False LOG.debug("Start processing sync") # Check server connection try: if self.infraclient.server_status() != 'ok': LOG.error("WebClient server answering but not available") return True except (APIError, socket.error, ValueError, ServerNotFoundError, BadStatusLine, RedirectLimit) as e: LOG.error("WebClient server answer error: %s", e) return True # Try to do every other hosts pending changes first (we will get fresh # data then) try: pending_upload_filename = os.path.join( self.hosts.get_currenthost_dir(), PENDING_UPLOAD_FILENAME) with open(pending_upload_filename, 'r') as f: pending_changes = json.load(f) # We're going to mutate the dictionary inside the loop, so we need # to make a copy of the keys dictionary view. for hostid in list(pending_changes.keys()): # now do action depending on what needs to be refreshed try: # we can only remove distant machines for now, not # register new ones try: if not pending_changes[hostid].pop('share_inventory'): LOG.debug('Removing machine %s requested as a ' 'pending change' % hostid) self.infraclient.delete_machine( machine_uuid=hostid) except APIError as e: LOG.error("WebClient server doesn't want to remove " "hostid (%s): %s" % (hostid, e)) # append it again to be done pending_changes[hostid]['share_inventory'] = False except KeyError: pass # after all changes, is hostid still relevant? if not pending_changes[hostid]: pending_changes.pop(hostid) # no more change, remove the file if not pending_changes: LOG.debug( "No more pending changes remaining, removing the file") os.remove(pending_upload_filename) # update the remaining tasks else: utils.save_json_file_update(pending_upload_filename, pending_changes) except IOError: pass except ValueError: LOG.warning("The pending file is broken, ignoring") current_hostid = self.hosts.current_host['hostid'] old_hosts = self.hosts.other_hosts hostlist_changed = None packagelist_changed = [] logo_changed = [] # Get all machines try: full_hosts_list = self.infraclient.list_machines() except APIError as e: LOG.error("Invalid machine list from server, stopping sync: %s" % e) return True other_hosts = {} distant_current_host = {} for machine in full_hosts_list: hostid = machine.pop("uuid") if hostid != current_hostid: other_hosts[hostid] = machine else: distant_current_host = machine # now refresh packages list for every hosts for hostid in other_hosts: # init the list as the infra can not send it if not "packages_checksum" in other_hosts[hostid]: other_hosts[hostid]["packages_checksum"] = None packagelist_filename = os.path.join( self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)) if self.check_if_refresh_needed(old_hosts, other_hosts, hostid, 'packages'): try: new_package_list = self.infraclient.list_packages( machine_uuid=hostid) utils.save_json_file_update(packagelist_filename, new_package_list) # if already loaded, unload the package cache if self.package_handler: try: self.package_handler.package_list[hostid][ 'valid'] = False except KeyError: pass packagelist_changed.append(hostid) except APIError as e: LOG.error("Invalid package data from server: %s", e) try: old_checksum = old_hosts[hostid]['packages_checksum'] except KeyError: old_checksum = None other_hosts[hostid]['packages_checksum'] = old_checksum # refresh the logo for every hosts as well # WORKING but not wanted on the isd side for now #if self.check_if_refresh_needed(old_hosts, other_hosts, hostid, 'logo'): # try: # logo_content = self.infraclient.get_machine_logo(machine_uuid=hostid) # logo_file = open(os.path.join(self.hosts.get_currenthost_dir(), "%s_%s.png" % (LOGO_PREFIX, hostid)), 'wb+') # logo_file.write(self.infraclient.get_machine_logo(machine_uuid=hostid)) # logo_file.close() # logo_changed.append(hostid) # except APIError, e: # LOG.error ("Invalid data from server: %s", e) # try: # old_checksum = old_hosts[hostid]['logo_checksum'] # except KeyError: # old_checksum = None # other_hosts[hostid]['logo_checksum'] = old_checksum # Now that the package list and logo are successfully downloaded, save # the hosts metadata there. This removes as well the remaining package list and logo LOG.debug("Check if other hosts metadata needs to be refreshed") if other_hosts != old_hosts: LOG.debug("Refresh new host") hostlist_changed = True other_host_filename = os.path.join(ONECONF_CACHE_DIR, current_hostid, OTHER_HOST_FILENAME) utils.save_json_file_update(other_host_filename, other_hosts) self.hosts.update_other_hosts() # now push current host if not self.hosts.current_host['share_inventory']: LOG.debug("Ensure that current host is not shared") try: self.infraclient.delete_machine(machine_uuid=current_hostid) except APIError as e: # just a debug message as it can be already not shared LOG.debug("Can't delete current host from infra: %s" % e) else: LOG.debug("Push current host to infra now") # check if current host changed try: if self.hosts.current_host['hostname'] != distant_current_host[ 'hostname']: try: self.infraclient.update_machine( machine_uuid=current_hostid, hostname=self.hosts.current_host['hostname']) LOG.debug("Host data refreshed") except APIError as e: LOG.error("Can't update machine: %s", e) except KeyError: try: self.infraclient.update_machine( machine_uuid=current_hostid, hostname=self.hosts.current_host['hostname']) LOG.debug("New host registered done") distant_current_host = { 'packages_checksum': None, 'logo_checksum': None } except APIError as e: LOG.error("Can't register new host: %s", e) # local package list if self.check_if_push_needed(self.hosts.current_host, distant_current_host, 'packages'): local_packagelist_filename = os.path.join( self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, current_hostid)) try: with open(local_packagelist_filename, 'r') as f: self.infraclient.update_packages( machine_uuid=current_hostid, packages_checksum=self.hosts. current_host['packages_checksum'], package_list=json.load(f)) except (APIError, IOError) as e: LOG.error("Can't push current package list: %s", e) # local logo # WORKING but not wanted on the isd side for now #if self.check_if_push_needed(self.hosts.current_host, distant_current_host, 'logo'): # logo_file = open(os.path.join(self.hosts.get_currenthost_dir(), "%s_%s.png" % (LOGO_PREFIX, current_hostid))).read() # try: # self.infraclient.update_machine_logo(machine_uuid=current_hostid, logo_checksum=self.hosts.current_host['logo_checksum'], logo_content=logo_file) # LOG.debug ("refresh done") # except APIError, e: # LOG.error ("Error while pushing current logo: %s", e) # write the last sync date timestamp = str(time.time()) content = {"last_sync": timestamp} utils.save_json_file_update( os.path.join(self.hosts.get_currenthost_dir(), LAST_SYNC_DATE_FILENAME), content) # send dbus signal if needed events (just now so that we don't block on remaining operations) if hostlist_changed: self.emit_new_hostlist() for hostid in packagelist_changed: self.emit_new_packagelist(hostid) for hostid in logo_changed: self.emit_new_logo(hostid) self.emit_new_latestsync(timestamp) # continue syncing in the main loop return True
def save_current_host(self, arg=None): '''Save current host on disk''' LOG.debug("Save current host to disk") utils.save_json_file_update(os.path.join(self._host_file_dir, HOST_DATA_FILENAME), self.current_host)
def process_sync(self): '''start syncing what's needed if can sync process sync can be either started directly, or when can_sync changed''' # we can't no more sync, removing the timeout if not self._can_sync: return False LOG.debug("Start processing sync") # Check server connection try: if self.infraclient.server_status() != 'ok': LOG.error("WebClient server answering but not available") return True except (APIError, socket.error, ValueError, ServerNotFoundError, BadStatusLine, RedirectLimit) as e: LOG.error ("WebClient server answer error: %s", e) return True # Try to do every other hosts pending changes first (we will get fresh # data then) try: pending_upload_filename = os.path.join( self.hosts.get_currenthost_dir(), PENDING_UPLOAD_FILENAME) with open(pending_upload_filename, 'r') as f: pending_changes = json.load(f) # We're going to mutate the dictionary inside the loop, so we need # to make a copy of the keys dictionary view. for hostid in list(pending_changes.keys()): # now do action depending on what needs to be refreshed try: # we can only remove distant machines for now, not # register new ones try: if not pending_changes[hostid].pop('share_inventory'): LOG.debug('Removing machine %s requested as a ' 'pending change' % hostid) self.infraclient.delete_machine( machine_uuid=hostid) except APIError as e: LOG.error("WebClient server doesn't want to remove " "hostid (%s): %s" % (hostid, e)) # append it again to be done pending_changes[hostid]['share_inventory'] = False except KeyError: pass # after all changes, is hostid still relevant? if not pending_changes[hostid]: pending_changes.pop(hostid) # no more change, remove the file if not pending_changes: LOG.debug( "No more pending changes remaining, removing the file") os.remove(pending_upload_filename) # update the remaining tasks else: utils.save_json_file_update( pending_upload_filename, pending_changes) except IOError: pass except ValueError: LOG.warning("The pending file is broken, ignoring") current_hostid = self.hosts.current_host['hostid'] old_hosts = self.hosts.other_hosts hostlist_changed = None packagelist_changed = [] logo_changed = [] # Get all machines try: full_hosts_list = self.infraclient.list_machines() except APIError as e: LOG.error("Invalid machine list from server, stopping sync: %s" % e) return True other_hosts = {} distant_current_host = {} for machine in full_hosts_list: hostid = machine.pop("uuid") if hostid != current_hostid: other_hosts[hostid] = machine else: distant_current_host = machine # now refresh packages list for every hosts for hostid in other_hosts: # init the list as the infra can not send it if not "packages_checksum" in other_hosts[hostid]: other_hosts[hostid]["packages_checksum"] = None packagelist_filename = os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)) if self.check_if_refresh_needed(old_hosts, other_hosts, hostid, 'packages'): try: new_package_list = self.infraclient.list_packages(machine_uuid=hostid) utils.save_json_file_update(packagelist_filename, new_package_list) # if already loaded, unload the package cache if self.package_handler: try: self.package_handler.package_list[hostid]['valid'] = False except KeyError: pass packagelist_changed.append(hostid) except APIError as e: LOG.error ("Invalid package data from server: %s", e) try: old_checksum = old_hosts[hostid]['packages_checksum'] except KeyError: old_checksum = None other_hosts[hostid]['packages_checksum'] = old_checksum # refresh the logo for every hosts as well # WORKING but not wanted on the isd side for now #if self.check_if_refresh_needed(old_hosts, other_hosts, hostid, 'logo'): # try: # logo_content = self.infraclient.get_machine_logo(machine_uuid=hostid) # logo_file = open(os.path.join(self.hosts.get_currenthost_dir(), "%s_%s.png" % (LOGO_PREFIX, hostid)), 'wb+') # logo_file.write(self.infraclient.get_machine_logo(machine_uuid=hostid)) # logo_file.close() # logo_changed.append(hostid) # except APIError, e: # LOG.error ("Invalid data from server: %s", e) # try: # old_checksum = old_hosts[hostid]['logo_checksum'] # except KeyError: # old_checksum = None # other_hosts[hostid]['logo_checksum'] = old_checksum # Now that the package list and logo are successfully downloaded, save # the hosts metadata there. This removes as well the remaining package list and logo LOG.debug("Check if other hosts metadata needs to be refreshed") if other_hosts != old_hosts: LOG.debug("Refresh new host") hostlist_changed = True other_host_filename = os.path.join(ONECONF_CACHE_DIR, current_hostid, OTHER_HOST_FILENAME) utils.save_json_file_update(other_host_filename, other_hosts) self.hosts.update_other_hosts() # now push current host if not self.hosts.current_host['share_inventory']: LOG.debug("Ensure that current host is not shared") try: self.infraclient.delete_machine(machine_uuid=current_hostid) except APIError as e: # just a debug message as it can be already not shared LOG.debug ("Can't delete current host from infra: %s" % e) else: LOG.debug("Push current host to infra now") # check if current host changed try: if self.hosts.current_host['hostname'] != distant_current_host['hostname']: try: self.infraclient.update_machine(machine_uuid=current_hostid, hostname=self.hosts.current_host['hostname']) LOG.debug ("Host data refreshed") except APIError as e: LOG.error ("Can't update machine: %s", e) except KeyError: try: self.infraclient.update_machine(machine_uuid=current_hostid, hostname=self.hosts.current_host['hostname']) LOG.debug ("New host registered done") distant_current_host = {'packages_checksum': None, 'logo_checksum': None} except APIError as e: LOG.error ("Can't register new host: %s", e) # local package list if self.check_if_push_needed(self.hosts.current_host, distant_current_host, 'packages'): local_packagelist_filename = os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, current_hostid)) try: with open(local_packagelist_filename, 'r') as f: self.infraclient.update_packages(machine_uuid=current_hostid, packages_checksum=self.hosts.current_host['packages_checksum'], package_list=json.load(f)) except (APIError, IOError) as e: LOG.error ("Can't push current package list: %s", e) # local logo # WORKING but not wanted on the isd side for now #if self.check_if_push_needed(self.hosts.current_host, distant_current_host, 'logo'): # logo_file = open(os.path.join(self.hosts.get_currenthost_dir(), "%s_%s.png" % (LOGO_PREFIX, current_hostid))).read() # try: # self.infraclient.update_machine_logo(machine_uuid=current_hostid, logo_checksum=self.hosts.current_host['logo_checksum'], logo_content=logo_file) # LOG.debug ("refresh done") # except APIError, e: # LOG.error ("Error while pushing current logo: %s", e) # write the last sync date timestamp = str(time.time()) content = {"last_sync": timestamp} utils.save_json_file_update(os.path.join(self.hosts.get_currenthost_dir(), LAST_SYNC_DATE_FILENAME), content) # send dbus signal if needed events (just now so that we don't block on remaining operations) if hostlist_changed: self.emit_new_hostlist() for hostid in packagelist_changed: self.emit_new_packagelist(hostid) for hostid in logo_changed: self.emit_new_logo(hostid) self.emit_new_latestsync(timestamp) # continue syncing in the main loop return True