Exemple #1
0
 def rescan_repos(self, srv_com):
     def _read_file(f_name):
         return [
             line.strip() for line in open(f_name, "r").read().split("\n") if line.strip() and not line.strip().startswith("#")
         ]
     self.log("debian scan")
     _src_list_dir = "/etc/apt"
     _src_list = _read_file(os.path.join(_src_list_dir, "sources.list"))
     _sub_dir = os.path.join(_src_list_dir, "sources.list.d")
     if os.path.isdir(_sub_dir):
         for entry in os.listdir(_sub_dir):
             _path = os.path.join(_sub_dir, entry)
             _src_list.extend(_read_file(_path))
     self.log("src_list has {}".format(logging_tools.get_plural("line", len(_src_list))))
     old_repos = set(package_repo.objects.all().values_list("name", flat=True))
     new_repos = []
     found_repos = []
     for _line in _src_list:
         _parts = _line.split()
         if len(_parts) == 4:
             if _parts[0] == "deb":
                 parsed, netloc, user, password = self._parse_url(_parts[1])
                 name = "{}{}".format(netloc, parsed.path)
                 try:
                     cur_repo = package_repo.objects.get(Q(name=name))
                 except package_repo.DoesNotExist:
                     cur_repo = package_repo(name=name)
                     new_repos.append(cur_repo)
                 old_repos -= {cur_repo.name}
                 cur_repo.alias = ""
                 cur_repo.deb_distribution = _parts[2]
                 cur_repo.deb_components = _parts[3]
                 cur_repo.repo_type = ""
                 cur_repo.username = user or ""
                 cur_repo.password = password or ""
                 cur_repo.url = "{}://{}{}".format(
                     parsed.scheme,
                     netloc,
                     parsed.path,
                 )
                 cur_repo.enabled = True
                 cur_repo.priority = 0
                 cur_repo.autorefresh = True
                 cur_repo.gpg_check = False
                 cur_repo.save()
             else:
                 self.log("skipping line with type {} ('{}')".format(_parts[0], _line), logging_tools.LOG_LEVEL_WARN)
             pass
         else:
             self.log("unparseable line '{}'".format(_line), logging_tools.LOG_LEVEL_ERROR)
     self.log("found {}".format(logging_tools.get_plural("new repository", len(new_repos))))
     if old_repos:
         self.log(
             "found {}: {}".format(
                 logging_tools.get_plural("old repository", len(old_repos)),
                 ", ".join(sorted(old_repos))
             ),
             logging_tools.LOG_LEVEL_ERROR
         )
         if global_config["DELETE_MISSING_REPOS"]:
             self.log(" ... removing them from DB", logging_tools.LOG_LEVEL_WARN)
             package_repo.objects.filter(Q(name__in=old_repos)).delete()
     if srv_com is not None:
         srv_com.set_result(
             "rescanned {}".format(logging_tools.get_plural("repository", len(found_repos)))
         )
         self.master_process.send_pool_message(
             "remote_call_async_result",
             str(srv_com),
         )
     # self.master_process._reload_searches()
     return None
Exemple #2
0
 def repo_scan_result(self, s_struct):
     self.log("got repo scan result")
     repo_xml = etree.fromstring(s_struct.read())
     new_repos = []
     found_repos = []
     old_repos = set(package_repo.objects.all().values_list("name", flat=True))
     priority_found = False
     for repo in repo_xml.xpath(".//repo", smart_strings=False):
         if repo.getparent().tag == "service":
             service_xml = repo.getparent()
             try:
                 cur_srv = package_service.objects.get(Q(name=service_xml.attrib["name"]))
             except package_service.DoesNotExist:
                 cur_srv = package_service(
                     name=service_xml.attrib["name"],
                     alias=service_xml.attrib["alias"],
                     url=service_xml.attrib["url"],
                     type=service_xml.attrib["type"],
                     enabled=True if int(service_xml.attrib["enabled"]) else False,
                     autorefresh=True if int(service_xml.attrib["autorefresh"]) else False,
                 )
                 cur_srv.save()
         else:
             cur_srv = None
         try:
             cur_repo = package_repo.objects.get(Q(name=repo.attrib["name"]))
         except package_repo.DoesNotExist:
             cur_repo = package_repo(name=repo.attrib["name"])
             new_repos.append(cur_repo)
         found_repos.append(cur_repo)
         old_repos -= {cur_repo.name}
         cur_repo.alias = repo.attrib["alias"]
         cur_repo.repo_type = repo.attrib.get("type", "")
         if "priority" in repo.attrib:
             priority_found = True
         cur_repo.priority = int(repo.attrib.get("priority", "99"))
         cur_repo.enabled = True if int(repo.attrib["enabled"]) else False
         cur_repo.autorefresh = True if int(repo.attrib["autorefresh"]) else False
         cur_repo.gpg_check = True if int(repo.attrib["gpgcheck"]) else False
         cur_repo.url = repo.findtext("url")
         cur_repo.service = cur_srv
         cur_repo.save()
     if not priority_found:
         self.log("no priorities defined in XML-output, rescanning using normal output", logging_tools.LOG_LEVEL_ERROR)
         _zypper_com = "/usr/bin/zypper lr -p"
         _stat, _out = subprocess.getstatusoutput(_zypper_com)
         if _stat:
             self.log("error scanning via '{}' ({:d}): {}".format(_zypper_com, _stat, _out))
         else:
             _lines = _out.strip().split("\n")[2:]
             for _line in _lines:
                 _parts = [_p.strip() for _p in _line.split("|")]
                 if len(_parts) == 6:
                     _name, _pri = (_parts[2], int(_parts[5]))
                     try:
                         cur_repo = package_repo.objects.get(Q(name=_name))
                     except package_repo.DoesNotExist:
                         self.log("no repository with name '{}' found".format(_name), logging_tools.LOG_LEVEL_ERROR)
                     else:
                         if _pri != cur_repo.priority:
                             self.log(
                                 "changing priority of {} from {:d} to {:d}".format(
                                     cur_repo.name,
                                     cur_repo.priority,
                                     _pri,
                                 )
                             )
                         cur_repo.priority = _pri
                         cur_repo.save()
     self.log("found {}".format(logging_tools.get_plural("new repository", len(new_repos))))
     if old_repos:
         self.log(
             "found {}: {}".format(
                 logging_tools.get_plural("old repository", len(old_repos)),
                 ", ".join(sorted(old_repos))
             ),
             logging_tools.LOG_LEVEL_ERROR
         )
         if global_config["DELETE_MISSING_REPOS"]:
             self.log(" ... removing them from DB", logging_tools.LOG_LEVEL_WARN)
             package_repo.objects.filter(Q(name__in=old_repos)).delete()
     if s_struct.srv_com is not None:
         s_struct.srv_com.set_result(
             "rescanned {}".format(logging_tools.get_plural("repository", len(found_repos)))
         )
         self.master_process.send_pool_message(
             "remote_call_async_result",
             str(s_struct.srv_com),
         )
     self.master_process._reload_searches()
Exemple #3
0
 def repo_scan_result(self, s_struct):
     self.log("got repo scan result")
     new_repos = []
     found_repos = []
     old_repos = set(package_repo.objects.all().values_list("name", flat=True))
     repo_list = []
     cur_repo_dict = {}
     for line in s_struct.read().split("\n"):
         # strip spaces
         line = line.strip()
         if line.count(":"):
             key, value = line.split(":", 1)
             key = key.strip().lower()
             value = value.strip()
             if key.startswith("repo-"):
                 key = key[5:]
                 cur_repo_dict[key] = value
         else:
             # empty line, set repo_id to zero
             if cur_repo_dict:
                 repo_list.append(cur_repo_dict)
             cur_repo_dict = {}
     if cur_repo_dict:
         repo_list.append(cur_repo_dict)
     # map:
     # id ........ name
     # status .... disabled / enabled
     # baseurl ... url
     # name ...... alias
     for _dict in repo_list:
         try:
             cur_repo = package_repo.objects.get(Q(name=_dict["id"]))
         except package_repo.DoesNotExist:
             cur_repo = package_repo(name=_dict["id"])
             new_repos.append(cur_repo)
         repo_enabled = True if _dict.get("status", "disabled").lower() == "enabled" else False
         found_repos.append(cur_repo)
         old_repos -= {cur_repo.name}
         # print repo_name, repo_enabled, repo_info
         cur_repo.alias = _dict["name"]
         cur_repo.enabled = repo_enabled
         cur_repo.url = _dict.get("baseurl", "http://").split()[0]
         cur_repo.gpg_check = False
         # dummy value
         cur_repo.repo_type = _dict.get("type", "yum")
         cur_repo.save()
     self.log("found {}".format(logging_tools.get_plural("new repository", len(new_repos))))
     if old_repos:
         self.log(
             "found {}: {}".format(
                 logging_tools.get_plural("old repository", len(old_repos)),
                 ", ".join(sorted(old_repos))
             ),
             logging_tools.LOG_LEVEL_ERROR
         )
         if global_config["DELETE_MISSING_REPOS"]:
             self.log(" ... removing them from DB", logging_tools.LOG_LEVEL_WARN)
             package_repo.objects.filter(Q(name__in=old_repos)).delete()
     if s_struct.srv_com is not None:
         s_struct.srv_com.set_result(
             "rescanned {}".format(logging_tools.get_plural("repository", len(found_repos)))
         )
         self.master_process.send_pool_message(
             "remote_call_async_result",
             str(s_struct.srv_com),
         )
     self.master_process._reload_searches()