Example #1
0
def main(pkgname, real_root):
    instdb = dbapi.InstallDB()
    filesdb = dbapi.FilesDB()

    # start remove operation
    repo, category, name, version = pkgname
    # initialize remove class
    rmpkg = Remove(repo, category, name, version, real_root)
    
    lpms.logger.info("removing %s/%s/%s-%s from %s" % \
            (repo, category, name, version, rmpkg.real_root))
    out.normal("removing %s/%s/%s-%s from %s" % \
            (repo, category, name, version, rmpkg.real_root))
    
    # remove the package content
    rmpkg.remove_content()
    # remove entries from the database
    package_id = instdb.find_package(package_repo=repo, package_category=category, \
            package_name=name, package_version=version).get(0).id
    instdb.database.delete_build_info(package_id)
    instdb.delete_conditional_versions(package_id=package_id)
    instdb.delete_inline_options(package_id=package_id)
    instdb.delete_package(package_repo=repo, package_category=category, \
            package_name=name, package_version=version, commit=True)
    # remove paths from files table
    filesdb.delete_item_by_pkgdata(category, name, version, commit=True)
    # unlock
    if shelltools.is_exists(cst.lock_file):
        shelltools.remove_file(cst.lock_file)
Example #2
0
def usage():
    out.normal("A tool that creates \'hashes\' file for lpms packages.")
    out.green("General Usage:\n")
    out.write(" $ lhashgen <spec name>\n")
    out.write("\nIf you do not give any package name with command, it scans the directory and operates all valid specs\n")
    out.write("\nUse \'--version\' to see program's version\n")
    lpms.terminate()
Example #3
0
def main():
    available_repositories = utils.available_repositories()
    for item in os.listdir(cst.repos):
        repo_conf = os.path.join(cst.repos, item, cst.repo_file)
        if os.access(repo_conf, os.F_OK):
            with open(repo_conf) as data:
                data = conf.ReadConfig(data.read().splitlines(), delimiter="@")
            if item in available_repositories:
                out.normal("%s [%s]" %
                           (item, out.color("enabled", "brightgreen")))
            else:
                out.normal("%s [%s]" %
                           (item, out.color("disabled", "brightred")))
            out.notify("system name: %s" % item)
            if hasattr(data, "name"):
                out.notify("development name: %s" % data.name)
            else:
                out.warn("development name is not defined!")
            if hasattr(data, "summary"):
                out.notify("summary: %s" % data.summary)
            else:
                out.warn("summary is not defined!")
            if hasattr(data, "maintainer"):
                out.notify("maintainer: %s" % data.maintainer)
            else:
                out.warn("maintainer is not defined!")
            out.write("\n")
Example #4
0
    def run_install(self):
        utils.xterm_title("(%s/%s) lpms: installing %s/%s-%s from %s" % (self.environment.index, self.environment.count, 
            self.environment.category, self.environment.name, self.environment.version, self.environment.repo))
        out.normal("installing %s to %s" % (self.environment.fullname, self.environment.install_dir))
        installed_file = os.path.join(os.path.dirname(os.path.dirname(
            self.environment.build_dir)), ".installed")

        if os.path.isfile(installed_file) and self.environment.resume_build:
            out.warn_notify("%s had been already installed." % self.environment.fullname)
            return True
        
        lpms.logger.info("installing to %s" % self.environment.build_dir)

        self.run_stage("install")
        if self.environment.docs is not None:
            for doc in self.environment.docs:
                if isinstance(doc, list) or isinstance(doc, tuple):
                    source_file, target_file = doc
                    namestr = self.environment.fullname if self.environment.slot != "0" else self.environment.name
                    target = self.environment.fix_target_path("/usr/share/doc/%s/%s" % (namestr, target_file))
                    source = os.path.join(self.environment.build_dir, source_file)
                #    self.environment.index, insfile(source, target)
                #else:
                #    self.environment.index, insdoc(doc)
        out.notify("%s has been installed." % self.environment.fullname)
        if not os.path.isfile(installed_file):
            touch(installed_file)
        if self.environment.stage == "install":
            lpms.terminate()
Example #5
0
    def run_configure(self):
        utils.xterm_title("(%s/%s) lpms: configuring %s/%s-%s from %s" %
                          (self.environment.index, self.environment.count,
                           self.environment.category, self.environment.name,
                           self.environment.version, self.environment.repo))

        out.normal("configuring source in %s" % self.environment.build_dir)

        configured_file = os.path.join(
            os.path.dirname(os.path.dirname(self.environment.build_dir)),
            ".configured")

        if os.path.isfile(configured_file) and self.environment.resume_build:
            out.warn_notify("%s had been already configured." %
                            self.environment.fullname)
            return True

        lpms.logger.info("configuring in %s" % self.environment.build_dir)

        self.run_stage("configure")
        out.notify("%s has been configured." % self.environment.fullname)

        if not os.path.isfile(configured_file):
            touch(configured_file)
        if self.environment.stage == "configure":
            lpms.terminate()
Example #6
0
    def clean_obsolete_content(self):
        '''Cleans obsolete content which belogs to previous installs'''
        if self.instdb.find_package(package_name=self.environment.name, \
                package_category=self.environment.category,
                package_slot=self.environment.slot):
            obsolete = self.compare_different_versions()
            if not obsolete:
                return
            out.normal("cleaning obsolete content")
            directories = []
            for item in obsolete:
                target = os.path.join(self.environment.real_root, item[0][1:])
                if not os.path.exists(target):
                    continue
                if os.path.islink(target):
                    os.unlink(target)
                elif os.path.isfile(target):
                    shelltools.remove_file(target)
                else:
                    directories.append(target)

            directories.reverse()
            for directory in directories:
                if not os.listdir(directory):
                    # Remove directory if it does not include anything
                    shelltools.remove_dir(directory)
Example #7
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(content["metadata"]).keys():
                    write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo("hashes", "%s %s %s" %  (path, utils.sha1sum(path), os.path.getsize(path)))
Example #8
0
    def usage(self):
        """
        Prints available commands with their descriptions.
        """
        out.normal("lpms -- %s Package Management System %s\n" % \
                (out.color("L", "red"), out.color("v"+__VERSION__, "green")))
        out.write("In order to build a package:\n\n")
        out.write(" # lpms <package-name> <extra-command>\n\n")
        out.write("To see extra commands use --help parameter.\n\n")
        out.write("Build related arguments:\n")
        for build_argument in self.build_arguments:
            if hasattr(build_argument, "short"):
                out.write(('%-29s %-10s : %s\n' % \
                        (out.color(build_argument.arg, 'green'), \
                        out.color(build_argument.short, 'green'), \
                        build_argument.description)))
            else:
                out.write(('%-32s : %s\n' % \
                        (out.color(build_argument.arg, 'green'), \
                        build_argument.description)))

        out.write("\nOther arguments:\n")
        for other_argument in self.other_arguments:
            if hasattr(other_argument, "short"):
                out.write(('%-29s %-10s : %s\n' % \
                        (out.color(other_argument.arg, 'green'), \
                        out.color(other_argument.short, 'green'), \
                        other_argument.description)))
            else:
                out.write(('%-32s : %s\n' % \
                        (out.color(other_argument.arg, 'green'), \
                        other_argument.description)))

        # Do nothing after showing help message
        sys.exit(0)
Example #9
0
    def main(self):
        out.normal("searching for %s\n" % self.keyword)

        for (repo, category, name, version, path) in self.search():
            try:
                replace = re.compile("(%s)" % "|".join([self.keyword]), re.I)
                out.write(
                    "%s/%s/%s-%s -- %s\n"
                    % (
                        out.color(repo, "green"),
                        out.color(category, "green"),
                        out.color(name, "green"),
                        out.color(version, "green"),
                        replace.sub(out.color(r"\1", "brightred"), path),
                    )
                )
            except:
                out.write(
                    "%s/%s/%s-%s -- %s\n"
                    % (
                        out.color(repo, "green"),
                        out.color(category, "green"),
                        out.color(name, "green"),
                        out.color(version, "green"),
                        path,
                    )
                )
Example #10
0
 def show_info(self, package):
     items = self.instdb.database.get_package_build_info(package.id)   
     template = (
             'Start_Time',
             'End Time',
             'Requestor',
             'Requestor ID',
             'HOST',
             'CFLAGS',
             'CXXFLAGS',
             'LDFLAGS',
             'JOBS',
             'CC',
             'CXX'
     )
     out.normal("Build information for %s/%s/%s-%s {%s:%s}" % (package.repo, \
             package.category, package.name, package.version, package.slot, package.arch))
     for index, item in enumerate(template, 1):
         if index in (1, 2):
             out.write("%s: %s\n" % (out.color(item, "green"), \
                     datetime.datetime.fromtimestamp(items[index]).strftime('%Y-%m-%d %H:%M:%S')))
             if index == 2:
                 delta = datetime.datetime.fromtimestamp(items[2])-\
                         datetime.datetime.fromtimestamp(items[1])
                 operation_time = str(round(float(delta.seconds)/60, 1))+" minutes" if delta.seconds >= 60 \
                         else str(delta.seconds)+" seconds"
                 out.write("%s: %s\n" % (out.color("Operation Time", "green"), operation_time))
             continue
         out.write("%s: %s\n" % (out.color(item, "green"), items[index]))
Example #11
0
def main():
    available_repositories = utils.available_repositories()
    for item in os.listdir(cst.repos):
        repo_conf = os.path.join(cst.repos, item, cst.repo_file)
        if os.access(repo_conf, os.F_OK):
            with open(repo_conf) as data:
                data = conf.ReadConfig(data.read().splitlines(), delimiter="@")
            if item in available_repositories:
                out.normal("%s [%s]" % (item, out.color("enabled", "brightgreen")))
            else:
                out.normal("%s [%s]" % (item, out.color("disabled", "brightred")))
            out.notify("system name: %s" % item)
            if hasattr(data, "name"):
                out.notify("development name: %s" % data.name)
            else:
                out.warn("development name is not defined!")
            if hasattr(data, "summary"):
                out.notify("summary: %s" % data.summary)
            else:
                out.warn("summary is not defined!")
            if hasattr(data, "maintainer"):
                out.notify("maintainer: %s" % data.maintainer)
            else:
                out.warn("maintainer is not defined!")
            out.write("\n")
Example #12
0
    def usage(self):
        """
        Prints available commands with their descriptions.
        """
        out.normal("lpms -- %s Package Management System %s\n" % \
                (out.color("L", "red"), out.color("v"+__VERSION__, "green")))
        out.write("In order to build a package:\n\n")
        out.write(" # lpms <package-name> <extra-command>\n\n")
        out.write("To see extra commands use --help parameter.\n\n")
        out.write("Build related arguments:\n")
        for build_argument in self.build_arguments:
            if hasattr(build_argument, "short"):
                out.write(('%-29s %-10s : %s\n' % \
                        (out.color(build_argument.arg, 'green'), \
                        out.color(build_argument.short, 'green'), \
                        build_argument.description)))
            else:
                out.write(('%-32s : %s\n' % \
                        (out.color(build_argument.arg, 'green'), \
                        build_argument.description)))

        out.write("\nOther arguments:\n")
        for other_argument in self.other_arguments:
            if hasattr(other_argument, "short"):
                out.write(('%-29s %-10s : %s\n' % \
                        (out.color(other_argument.arg, 'green'), \
                        out.color(other_argument.short, 'green'), \
                        other_argument.description)))
            else:
                out.write(('%-32s : %s\n' % \
                        (out.color(other_argument.arg, 'green'), \
                        other_argument.description)))

        # Do nothing after showing help message
        sys.exit(0)
Example #13
0
    def clean_obsolete_content(self):
        '''Cleans obsolete content which belogs to previous installs'''
        if self.instdb.find_package(package_name=self.environment.name, \
                package_category=self.environment.category,
                package_slot=self.environment.slot):
            obsolete = self.compare_different_versions()
            if not obsolete:
                return
            out.normal("cleaning obsolete content")
            directories = []
            for item in obsolete:
                target = os.path.join(self.environment.real_root, item[0][1:])
                if not os.path.exists(target):
                    continue
                if os.path.islink(target):
                    os.unlink(target)
                elif os.path.isfile(target):
                    shelltools.remove_file(target)
                else:
                    directories.append(target)

            directories.reverse()
            for directory in directories:
                if not os.listdir(directory):
                    # Remove directory if it does not include anything
                    shelltools.remove_dir(directory)
Example #14
0
def usage():
    out.normal("A tool that creates \'hashes\' file for lpms packages.")
    out.green("General Usage:\n")
    out.write(" $ lhashgen <spec name>\n")
    out.write(
        "\nIf you do not give any package name with command, it scans the directory and operates all valid specs\n"
    )
    out.write("\nUse \'--version\' to see program's version\n")
    lpms.terminate()
Example #15
0
 def usage(self):
     out.normal("Search given keywords in database")
     out.green("General Usage:\n")
     out.write(" $ lpms -s <keyword>\n")
     out.write("\nOther options:\n")
     for item in help_output:
         if len(item) == 2:
             out.write("%-28s: %s\n" %
                       (out.color(item[0], "green"), item[1]))
     lpms.terminate()
Example #16
0
 def run_pre_merge(self):
     out.normal("preparing system for the package...")
     pre_merge_file = os.path.join(os.path.dirname(os.path.dirname(self.environment.build_dir)), ".pre_merge")
     if os.path.isfile(pre_merge_file): 
         return True
     lpms.logger.info("running pre_merge function")
     # sandbox must be disabled
     self.environment.sandbox = False
     self.run_stage("pre_merge")
     if not os.path.isfile(pre_merge_file):
         touch(pre_merge_file)
Example #17
0
def reload_previous_repodb():
    dirname = os.path.dirname(cst.repos)
    for _file in os.listdir(dirname):
        if _file.startswith("repositorydb") and _file.count(".") == 2:
            shelltools.copy(os.path.join(dirname, _file), cst.repositorydb_path)
            from datetime import datetime
            timestamp = _file.split(".")[-1]
            previous_date = datetime.fromtimestamp(float(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
            out.normal("loading previous database copy: %s" %
                    out.color(previous_date, "red"))
            return
    out.error("no repodb backup found.")
Example #18
0
 def run_prepare(self):
     out.normal("preparing source...")
     prepared_file = os.path.join(os.path.dirname(os.path.dirname(self.environment.build_dir)), ".prepared")
     if os.path.isfile(prepared_file):
         out.warn_notify("%s had been already prepared." % self.environment.fullname)
         return True
     self.run_stage("prepare")
     out.notify("%s has been prepared." % self.environment.fullname)
     if not os.path.isfile(prepared_file):
         touch(prepared_file)
     if self.environment.stage == "prepare":
         lpms.terminate()
Example #19
0
    def main(self):
        parsed = self.pkgname.split("/")
        if len(parsed) == 3:
            repo, category, name = parsed
            name, version = utils.parse_pkgname(name)
            packages = self.instdb.find_package(
                    package_repo=repo,
                    package_category=category,
                    package_name=name,
                    package_version=version
                    )
        elif len(parsed) == 2:
            category, name = parsed
            name, version = utils.parse_pkgname(name)
            packages = self.instdb.find_package(
                    package_category=category,
                    package_name=name,
                    package_version=version
                    )
        elif len(parsed) == 1:
            name, version = utils.parse_pkgname(self.pkgname)
            packages = self.instdb.find_package(
                    package_name=name,
                    package_version=version
                    )
        else:
            out.error("%s could not be recognized." % self.pkgname)
            lpms.terminate()

        if not packages:
            out.error("%s not installed." % self.pkgname)
            lpms.terminate()
        
        for package in packages:        
            symdirs = {}
            out.normal("%s/%s/%s-%s" % (package.repo, package.category, \
                    package.name, package.version))
            content = self.filesdb.get_paths_by_package(package.name, \
                    category=package.category, version=package.version)
            for item in content:
                item = item[0].encode('UTF-8')
                if os.path.islink(item):
                    out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item)))
                    if os.path.isdir(os.path.realpath(item)):
                        symdirs[os.path.realpath(item)+"/"] = item+"/"
                else:
                    out.write(item+"\n")
                if symdirs:
                    for symdir in symdirs:
                        if item.startswith(symdir):
                            out.write("%s -> %s\n" % (out.color(item.replace(symdir, \
                                    symdirs[symdir]), "brightwhite"), out.color(item, "brightwhite")))
Example #20
0
 def run_pre_merge(self):
     out.normal("preparing system for the package...")
     pre_merge_file = os.path.join(
         os.path.dirname(os.path.dirname(self.environment.build_dir)),
         ".pre_merge")
     if os.path.isfile(pre_merge_file):
         return True
     lpms.logger.info("running pre_merge function")
     # sandbox must be disabled
     self.environment.sandbox = False
     self.run_stage("pre_merge")
     if not os.path.isfile(pre_merge_file):
         touch(pre_merge_file)
Example #21
0
def system(*args, **kwargs):
    result = shelltools.system(" ".join(args), stage=current_stage, \
            sandbox=False if current_stage in cst.sandbox_exception_stages else None)
    if isinstance(result, bool): return result

    if len(result) == 2:
        if result[1]:
            logfile =  "%s/build.log" % dirname(dirname(build_dir))
            if isfile(logfile):
                shelltools.remove_file(logfile)
            echo(result[1],logfile)
            out.normal("for detalied output, view %s" % logfile)
        return result[0]
    return result
Example #22
0
def reload_previous_repodb():
    dirname = os.path.dirname(cst.repos)
    for _file in os.listdir(dirname):
        if _file.startswith("repositorydb") and _file.count(".") == 2:
            shelltools.copy(os.path.join(dirname, _file),
                            cst.repositorydb_path)
            from datetime import datetime
            timestamp = _file.split(".")[-1]
            previous_date = datetime.fromtimestamp(
                float(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
            out.normal("loading previous database copy: %s" %
                       out.color(previous_date, "red"))
            return
    out.error("no repodb backup found.")
Example #23
0
 def run_prepare(self):
     out.normal("preparing source...")
     prepared_file = os.path.join(
         os.path.dirname(os.path.dirname(self.environment.build_dir)),
         ".prepared")
     if os.path.isfile(prepared_file):
         out.warn_notify("%s had been already prepared." %
                         self.environment.fullname)
         return True
     self.run_stage("prepare")
     out.notify("%s has been prepared." % self.environment.fullname)
     if not os.path.isfile(prepared_file):
         touch(prepared_file)
     if self.environment.stage == "prepare":
         lpms.terminate()
Example #24
0
    def main(self):
        parsed = self.pkgname.split("/")
        if len(parsed) == 3:
            repo, category, name = parsed
            name, version = utils.parse_pkgname(name)
            packages = self.instdb.find_package(
                package_repo=repo, package_category=category, package_name=name, package_version=version
            )
        elif len(parsed) == 2:
            category, name = parsed
            name, version = utils.parse_pkgname(name)
            packages = self.instdb.find_package(package_category=category, package_name=name, package_version=version)
        elif len(parsed) == 1:
            name, version = utils.parse_pkgname(self.pkgname)
            packages = self.instdb.find_package(package_name=name, package_version=version)
        else:
            out.error("%s could not be recognized." % self.pkgname)
            lpms.terminate()

        if not packages:
            out.error("%s not installed." % self.pkgname)
            lpms.terminate()

        for package in packages:
            symdirs = {}
            out.normal("%s/%s/%s-%s" % (package.repo, package.category, package.name, package.version))
            content = self.filesdb.get_paths_by_package(
                package.name, category=package.category, version=package.version
            )
            for item in content:
                item = item[0].encode("UTF-8")
                if os.path.islink(item):
                    out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item)))
                    if os.path.isdir(os.path.realpath(item)):
                        symdirs[os.path.realpath(item) + "/"] = item + "/"
                else:
                    out.write(item + "\n")
                if symdirs:
                    for symdir in symdirs:
                        if item.startswith(symdir):
                            out.write(
                                "%s -> %s\n"
                                % (
                                    out.color(item.replace(symdir, symdirs[symdir]), "brightwhite"),
                                    out.color(item, "brightwhite"),
                                )
                            )
Example #25
0
    def main(self):
        out.normal("searching for %s\n" % self.keyword)

        for (repo, category, name, version, path) in self.search():
            try:
                replace = re.compile("(%s)" % '|'.join([self.keyword]), re.I)
                out.write(
                    "%s/%s/%s-%s -- %s\n" %
                    (out.color(repo, "green"), out.color(category, "green"),
                     out.color(name, "green"), out.color(version, "green"),
                     replace.sub(out.color(r"\1", "brightred"), path)))
            except:
                out.write(
                    "%s/%s/%s-%s -- %s\n" %
                    (out.color(repo, "green"), out.color(
                        category, "green"), out.color(
                            name, "green"), out.color(version, "green"), path))
Example #26
0
def set_parser(set_name):
    sets = []
    for repo in available_repositories():
        repo_set_file = os.path.join(cst.repos, repo, "info/sets",
                                     "%s.set" % set_name)
        if os.path.isfile((repo_set_file)):
            sets.append(repo_set_file)

    user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name)

    if os.path.isfile(user_set_file):
        sets.append(user_set_file)

    if len(sets) > 1:
        out.normal("ambiguous for %s\n" % out.color(set_name, "green"))

        def ask():
            for c, s in enumerate(sets):
                out.write("	" + out.color(str(c + 1), "green") + ") " + s +
                          "\n")
            out.write("\nselect one of them:\n")
            out.write("to exit, press Q or q.\n")

        while True:
            ask()
            answer = sys.stdin.readline().strip()
            if answer == "Q" or answer == "q":
                lpms.terminate()
            elif answer.isalpha():
                out.warn("please give a number.")
                continue

            try:
                set_file = sets[int(answer) - 1]
                break
            except (IndexError, ValueError):
                out.warn("invalid command.")
                continue
    elif len(sets) == 1:
        set_file = sets[0]
    else:
        out.warn("%s not found!" % out.color(set_name, "red"))
        return []

    return [line for line in file(set_file).read().strip().split("\n") \
            if not line.startswith("#") and line != ""]
Example #27
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache,
                                            archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo(
                    "hashes", "%s %s %s" %
                    (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo(
                    "hashes",
                    "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(
                        content["metadata"]).keys():
                    write_archive_hash(
                        utils.metadata_parser(content["metadata"])["src_url"],
                        f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo(
                        "hashes", "%s %s %s" %
                        (path, utils.sha1sum(path), os.path.getsize(path)))
Example #28
0
    def run_build(self):
        utils.xterm_title("(%s/%s) lpms: building %s/%s-%s from %s" % (self.environment.index, self.environment.count, 
            self.environment.category, self.environment.name, self.environment.version, self.environment.repo))
        out.normal("compiling source in %s" % self.environment.build_dir)
        built_file = os.path.join(os.path.dirname(os.path.dirname(
            self.environment.build_dir)), ".built")
        if os.path.isfile(built_file) and self.environment.resume_build:
            out.warn_notify("%s had been already built." % self.environment.fullname)
            return True
        
        lpms.logger.info("building in %s" % self.environment.build_dir)

        self.run_stage("build")
        out.notify("%s has been built." % self.environment.fullname)
        if not os.path.isfile(built_file):
            touch(built_file)
        if self.environment.stage == "build":
            lpms.terminate()
Example #29
0
 def run(self, instruct):
     all_packages = self.instdb.get_all_packages()
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             self.removable_packages.add((category, name, slot))
     packages = []
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             packages.append((category + "/" + name + "-" + version))
     if packages:
         out.normal("these package(s) is/are no longer required.")
         # FIXME: This is no good
         # I must find a new method to manage built-in variables and general purpose instructions
         instruct["ask"] = True
         api.remove_package(packages, instruct)
     else:
         out.warn("no package found.")
Example #30
0
def set_parser(set_name):
    sets = []
    for repo in available_repositories():
        repo_set_file = os.path.join(cst.repos, repo, "info/sets", "%s.set" % set_name)
        if os.path.isfile((repo_set_file)):
            sets.append(repo_set_file)
            
    user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name)

    if os.path.isfile(user_set_file):
        sets.append(user_set_file)

    if len(sets) > 1:
        out.normal("ambiguous for %s\n" % out.color(set_name, "green"))
        def ask():
            for c, s in enumerate(sets):
                out.write("	"+out.color(str(c+1), "green")+") "+s+"\n")
            out.write("\nselect one of them:\n")
            out.write("to exit, press Q or q.\n")
            
        while True:
            ask()
            answer = sys.stdin.readline().strip()
            if answer == "Q" or answer == "q":
                lpms.terminate()
            elif answer.isalpha():
                out.warn("please give a number.")
                continue
            
            try:
                set_file = sets[int(answer)-1]
                break
            except (IndexError, ValueError):
                out.warn("invalid command.")
                continue
    elif len(sets) == 1:
        set_file = sets[0]
    else:
        out.warn("%s not found!" % out.color(set_name, "red"))
        return []
    
    return [line for line in file(set_file).read().strip().split("\n") \
            if not line.startswith("#") and line != ""]
Example #31
0
        def collision_check():
            # TODO: This is a temporary solution. collision_check function
            # must be a reusable part for using in remove operation
            out.normal("checking file collisions...")
            lpms.logger.info("checking file collisions")
            collision_object = file_collisions.CollisionProtect(
                environment.category,
                environment.name,
                environment.slot,
                real_root=environment.real_root,
                source_dir=environment.install_dir)
            collision_object.handle_collisions()
            if collision_object.orphans:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "these files are orphan. the package will adopt the files:\n"
                )
                index = 0
                for orphan in collision_object.orphans:
                    out.notify(orphan)
                    index += 1
                    if index > 100:
                        # FIXME: the files must be logged
                        out.write(
                            out.color(" > ", "brightyellow") +
                            "...and many others.")
                        break

            if collision_object.collisions:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "file collisions detected:\n")
            for item in collision_object.collisions:
                (category, name, slot, version), path = item
                out.write(out.color(" -- ", "red")+category+"/"+name+"-"\
                        +version+":"+slot+" -> "+path+"\n")
            if collision_object.collisions and self.config.collision_protect:
                if environment.force_file_collision:
                    out.warn(
                        "Disregarding these collisions, you have been warned!")
                else:
                    return False
            return True
Example #32
0
    def list_news(self):
        self.cursor.get_all_news()
        i = 0
        if not self.cursor.data:
            out.warn("no readable news.")
            return
        out.normal("readable messages listed:")
        out.write("index   repo            priority     summary\n")
        out.write("===============================================\n")
        for news in self.cursor.data:
            repo, metadata = news[:-1]
            if not "%s/%s" % (repo, metadata["summary"]) in self.read_items:
                read = out.color("*", "brightgreen")
            else:
                read = ""

            out.write("[%s]%s\t%-15s\t%-12s %s\n" % (out.color(str(i), "green"), read, repo, \
                    metadata["priority"], metadata["summary"]))
            i += 1
Example #33
0
 def run(self, instruct):
     all_packages = self.instdb.get_all_packages()
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             self.removable_packages.add((category, name, slot))
     packages = []
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             packages.append((category+"/"+name+"-"+version))
     if packages:
         out.normal("these package(s) is/are no longer required.")
         # FIXME: This is no good
         # I must find a new method to manage built-in variables and general purpose instructions 
         instruct["ask"] = True
         api.remove_package(packages, instruct)
     else:
         out.warn("no package found.")
Example #34
0
    def list_news(self):
        self.cursor.get_all_news()
        i = 0
        if not self.cursor.data:
            out.warn("no readable news.")
            return
        out.normal("readable messages listed:")
        out.write("index   repo            priority     summary\n")
        out.write("===============================================\n")
        for news in self.cursor.data:
            repo, metadata = news[:-1]
            if not "%s/%s" % (repo, metadata["summary"]) in self.read_items:
                read = out.color("*", "brightgreen")
            else:
                read = ""

            out.write("[%s]%s\t%-15s\t%-12s %s\n" % (out.color(str(i), "green"), read, repo, \
                    metadata["priority"], metadata["summary"]))
            i += 1
Example #35
0
def configure_pending(packages, instruct):
    '''Configure packages that do not configured after merge operation'''

    if not utils.check_root(msg=False):
        lpms.terminate("you must be root.")

    root = instruct["real_root"]
    if not root:
        root = cst.root
        instruct["real_root"] = root

    pending_file = os.path.join(root, cst.configure_pending_file)

    failed = []

    if not os.access(pending_file, os.F_OK):
        lpms.terminate("there are no pending packages.")

    with open(pending_file, 'rb') as data:
        pending_packages = pickle.load(data)
        for package in pending_packages:
            repo, category, name, version = package
            spec = os.path.join(cst.repos, repo, category,
                                name) + "/" + name + "-" + version + ".py"
            out.normal("configuring %s/%s/%s-%s" %
                       (repo, category, name, version))
            if not os.access(spec, os.R_OK):
                out.warn("%s seems not exist or not readable. skipping..." %
                         spec)
                failed.append(package)
                continue
            if not initpreter.InitializeInterpreter(
                    package, instruct, ['post_install']).initialize():
                out.warn("%s/%s/%s-%s could not configured." %
                         (repo, category, name, version))
                failed.append(package)

    shelltools.remove_file(pending_file)
    if failed:
        with open(pending_file, 'wb') as data:
            pickle.dump(failed, data)
Example #36
0
 def show_info(self, package):
     items = self.instdb.database.get_package_build_info(package.id)
     template = (
         "Start_Time",
         "End Time",
         "Requestor",
         "Requestor ID",
         "HOST",
         "CFLAGS",
         "CXXFLAGS",
         "LDFLAGS",
         "JOBS",
         "CC",
         "CXX",
     )
     out.normal(
         "Build information for %s/%s/%s-%s {%s:%s}"
         % (package.repo, package.category, package.name, package.version, package.slot, package.arch)
     )
     for index, item in enumerate(template, 1):
         if index in (1, 2):
             out.write(
                 "%s: %s\n"
                 % (
                     out.color(item, "green"),
                     datetime.datetime.fromtimestamp(items[index]).strftime("%Y-%m-%d %H:%M:%S"),
                 )
             )
             if index == 2:
                 delta = datetime.datetime.fromtimestamp(items[2]) - datetime.datetime.fromtimestamp(items[1])
                 operation_time = (
                     str(round(float(delta.seconds) / 60, 1)) + " minutes"
                     if delta.seconds >= 60
                     else str(delta.seconds) + " seconds"
                 )
                 out.write("%s: %s\n" % (out.color("Operation Time", "green"), operation_time))
             continue
         out.write("%s: %s\n" % (out.color(item, "green"), items[index]))
Example #37
0
    def run_install(self):
        utils.xterm_title("(%s/%s) lpms: installing %s/%s-%s from %s" %
                          (self.environment.index, self.environment.count,
                           self.environment.category, self.environment.name,
                           self.environment.version, self.environment.repo))
        out.normal("installing %s to %s" %
                   (self.environment.fullname, self.environment.install_dir))
        installed_file = os.path.join(
            os.path.dirname(os.path.dirname(self.environment.build_dir)),
            ".installed")

        if os.path.isfile(installed_file) and self.environment.resume_build:
            out.warn_notify("%s had been already installed." %
                            self.environment.fullname)
            return True

        lpms.logger.info("installing to %s" % self.environment.build_dir)

        self.run_stage("install")
        if self.environment.docs is not None:
            for doc in self.environment.docs:
                if isinstance(doc, list) or isinstance(doc, tuple):
                    source_file, target_file = doc
                    namestr = self.environment.fullname if self.environment.slot != "0" else self.environment.name
                    target = self.environment.fix_target_path(
                        "/usr/share/doc/%s/%s" % (namestr, target_file))
                    source = os.path.join(self.environment.build_dir,
                                          source_file)
                #    self.environment.index, insfile(source, target)
                #else:
                #    self.environment.index, insdoc(doc)
        out.notify("%s has been installed." % self.environment.fullname)
        if not os.path.isfile(installed_file):
            touch(installed_file)
        if self.environment.stage == "install":
            lpms.terminate()
Example #38
0
def configure_pending(packages, instruct):
    '''Configure packages that do not configured after merge operation'''

    if not utils.check_root(msg=False):
        lpms.terminate("you must be root.")

    root = instruct["real_root"]
    if not root:
        root = cst.root
        instruct["real_root"] = root

    pending_file = os.path.join(root, cst.configure_pending_file)

    failed = []

    if not os.access(pending_file, os.F_OK):
        lpms.terminate("there are no pending packages.")

    with open(pending_file, 'rb') as data:
        pending_packages = pickle.load(data)
        for package in pending_packages:
            repo, category, name, version = package
            spec = os.path.join(cst.repos, repo, category, name)+"/"+name+"-"+version+".py"
            out.normal("configuring %s/%s/%s-%s" % (repo, category, name, version))
            if not os.access(spec, os.R_OK):
                out.warn("%s seems not exist or not readable. skipping..." % spec)
                failed.append(package)
                continue
            if not initpreter.InitializeInterpreter(package, instruct, ['post_install']).initialize():
                out.warn("%s/%s/%s-%s could not configured." % (repo, category, name, version))
                failed.append(package)

    shelltools.remove_file(pending_file)
    if failed:
        with open(pending_file, 'wb') as data:
            pickle.dump(failed, data)
Example #39
0
    def merge_package(self):
        '''Moves files to the target destination in the most safest way.'''
        def get_perms(path):
            '''Get permissions of given path, it may be file or directory'''
            return {"uid": utils.get_uid(path),
                    "gid": utils.get_gid(path),
                    "mod": utils.get_mod(path)
            }
        out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \
                self.environment.name, self.environment.version, self.environment.slot, \
                self.environment.real_root))
        # Remove files db entries for this package:slot if it exists
        self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \
            self.environment.previous_version, commit=True)

        # Remove file_relations db entries for this package:slot if it exists
        self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \
                self.environment.name, self.environment.previous_version, commit=True)

        # Merge the package, now
        walk_iter = os.walk(self.environment.install_dir, followlinks=True)
        while True:
            try:
                parent, directories, files = next(walk_iter)
                # TODO: Check the target path's permissions for writing or reading
                # Remove install_dir from parent to get real parent path
                pruned_parent = parent.replace(self.environment.install_dir, "")
                # create directories
                for directory in directories:
                    source = os.path.join(parent, directory)
                    target = os.path.join(self.environment.real_root, pruned_parent, directory)
                    real_target = "/".join([pruned_parent, directory])
                    if self.is_parent_symlink(target):
                        break
                    if os.path.islink(source):
                        self.symlinks.append(target+"/")
                        realpath = os.path.realpath(source)
                        if os.path.islink(target):
                            shelltools.remove_file(target)
                        # create real directory
                        if len(realpath.split(self.environment.install_dir)) > 1:
                            realpath = realpath.split(self.environment.install_dir)[1][1:]

                        shelltools.makedirs(os.path.join(self.environment.real_root, realpath))
                        # make symlink
                        if os.path.isdir(target):
                            shelltools.remove_dir(target)
                        elif os.path.isfile(target):
                            shelltools.remove_file(target)
                        shelltools.make_symlink(os.readlink(source), target)
                    else:
                        if os.path.isfile(target):
                            # TODO: Rename this file and warn the user
                            shelltools.remove_file(target)
                        shelltools.makedirs(target)
                    # Get permissions
                    perms = get_perms(source)
                    # if path is a symlink, pass permission mumbo-jumbos
                    if not os.path.islink(source):
                        # Set permissions
                        shelltools.set_id(target, perms["uid"], perms["gid"])
                        shelltools.set_mod(target, perms["mod"])
                        # TODO: Common items?
                        # Add the item to filesdb
                        self.append_filesdb("dir", real_target, perms)
                    else:
                        # Add the item to filesdb
                        self.append_filesdb("link", real_target, perms, \
                                realpath=os.path.realpath(source))

                # Merge regular files to the target
                # Firstly, handle reserved files
                reserve_files = []
                if self.environment.reserve_files:
                    if isinstance(self.environment.reserve_files, basestring):
                        reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \
                                if f_item != ""])
                    elif isinstance(self.environment.reserve_files, list) or isinstance(self.environment.reserve_files, tuple):
                        reserve_files.extend(self.environment.reserve_files)

                if os.path.isfile(os.path.join(cst.user_dir, cst.protect_file)):
                    with open(os.path.join(cst.user_dir, cst.protect_file)) as data:
                        for rf in data.readlines():
                            if not rf.startswith("#"):
                                reserve_files.append(rf.strip())

                # Here we are starting to merge
                for _file in files:
                    source = os.path.join(parent, _file)
                    target = os.path.join(self.environment.real_root, pruned_parent, _file)
                    real_target = "/".join([pruned_parent, _file])
                    if self.is_parent_symlink(target):
                        break
                    # Keep file relations for using after to handle reverse dependencies
                    if os.path.exists(source) and os.access(source, os.X_OK):
                        if utils.get_mimetype(source) in self.binary_filetypes:
                            self.file_relationsdb.append_query((
                                self.environment.repo,
                                self.environment.category,
                                self.environment.name,
                                self.environment.version,
                                target,
                                file_relations.get_depends(source))
                            )
                    # Strip binary files and keep them smaller
                    if self.strip_debug_symbols and utils.get_mimetype(source) in self.binary_filetypes:
                        utils.run_strip(source)
                    if self.environment.ignore_reserve_files:
                        reserve_files = []
                        self.environment.reserve_files = True

                    def add_file_item():
                        # Prevent code duplication
                        if not os.path.islink(target):
                            shelltools.set_id(target, perms["uid"], perms["gid"])
                            shelltools.set_mod(target, perms["mod"])
                            self.append_filesdb("file", real_target, perms, \
                                    sha1sum=utils.sha1sum(target),
                                    size = utils.get_size(source, dec=True)
                            )
                        else:
                            self.append_filesdb("link", real_target, perms,\
                                    realpath=os.path.realpath(source))

                    if self.environment.reserve_files is not False:
                        conf_file = os.path.join(pruned_parent, _file)
                        isconf = (_file.endswith(".conf") or _file.endswith(".cfg"))
                        def is_reserve():
                            if self.environment.ignore_reserve_files:
                                return False
                            elif not conf_file in reserve_files:
                                return False
                            return True

                        if os.path.exists(target) and not is_reserve():
                            if pruned_parent[0:4] == "/etc" or isconf:
                                if os.path.isfile(conf_file) and utils.sha1sum(source) != utils.sha1sum(conf_file):
                                    self.append_merge_conf(conf_file)
                                    target = target+".lpms-backup" 
                                    self.backup.append(target)

                        if os.path.exists(target) and is_reserve():
                            # The file is reserved.
                            # Adds to filesdb
                            add_file_item()
                            # We don't need the following operations
                            continue

                    if os.path.islink(source):
                        sha1 = False
                        realpath = os.readlink(source)
                        if self.environment.install_dir in realpath:
                            realpath = realpath.split(self.environment.install_dir)[1]

                        if os.path.isdir(target):
                            shelltools.remove_dir(target)
                        elif os.path.isfile(target) or os.path.islink(target):
                            shelltools.remove_file(target)
                        shelltools.make_symlink(realpath, target)
                    else:
                        sha1 = utils.sha1sum(source)
                        perms = get_perms(source)
                        shelltools.move(source, target)
                    # Adds to filesdb
                    add_file_item()
            except StopIteration as err:
                break

        self.file_relationsdb.insert_query(commit=True)
        self.filesdb.insert_query(commit=True)

        lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \
                self.environment.real_root))
Example #40
0
    def perform_operation(self):
        '''Handles command line arguments and drive building operation'''
        self.set_environment_variables()
        # Check /proc and /dev. These filesystems must be mounted 
        # to perform operations properly.
        for item in ('/proc', '/dev'):
            if not os.path.ismount(item):
                out.warn("%s is not mounted. You have been warned." % item)

        # clean source code extraction directory if it is wanted
        # TODO: check the following condition when resume functionality is back
        if self.instruction.clean_tmp:
            if self.instruction.resume_build is not None:
                out.warn("clean-tmp is disabled because of resume-build is enabled.")
            else:
                self.clean_temporary_directory()

        # we want to save starting time of the build operation to calculate building time
        # The starting point of logging
        lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" % (
            self.instruction.index,
            self.instruction.count,
            self.internals.env.repo,
            self.internals.env.category,
            self.internals.env.name,
            self.internals.env.version
            )
        )

        out.normal("(%s/%s) building %s/%s from %s" % (
            self.instruction.index,
            self.instruction.count,
            out.color(self.internals.env.category, "green"),
            out.color(self.internals.env.name+"-"+self.internals.env.version, "green"),
            self.internals.env.repo
            )
        )

        if self.internals.env.sandbox:
            lpms.logger.info("sandbox enabled build")
            out.notify("sandbox is enabled")
        else:
            lpms.logger.warning("sandbox disabled build")
            out.warn_notify("sandbox is disabled")

        # fetch packages which are in download_plan list
        if self.internals.env.src_url is not None:
            # preprocess url shortcuts such as $name, $version and etc
            self.parse_src_url_field()
            # if the package is revisioned, override build_dir and install_dir. 
            # remove revision number from these variables.
            if self.revisioned:
                for variable in ("build_dir", "install_dir"):
                    new_variable = "".join(os.path.basename(getattr(self.internals.env, \
                            variable)).split(self.revision))
                    setattr(self.internals.env, variable, \
                            os.path.join(os.path.dirname(getattr(self.internals.env, \
                            variable)), new_variable))

            utils.xterm_title("lpms: downloading %s/%s/%s-%s" % (
                self.internals.env.repo,
                self.internals.env.category,
                self.internals.env.name,
                self.internals.env.version
                )
            )

            self.prepare_download_plan(self.internals.env.applied_options)

            if not fetcher.URLFetcher().run(self.download_plan):
                lpms.terminate("\nplease check the spec")

        if self.internals.env.applied_options is not None and self.internals.env.applied_options:
            out.notify("applied options: %s" %
                    " ".join(self.internals.env.applied_options))

        if self.internals.env.src_url is None and not self.extract_plan \
                and hasattr(self.internals.env, "extract"):
            # Workaround for #208
            self.internals.env.extract_nevertheless = True

        # Remove previous sandbox log if it is exist.
        if os.path.exists(cst.sandbox_log):
            shelltools.remove_file(cst.sandbox_log)

        # Enter the building directory
        os.chdir(self.internals.env.build_dir)

        # Manage ccache
        if hasattr(self.config, "ccache") and self.config.ccache:
            if utils.drive_ccache(config=self.config):
                out.notify("ccache is enabled.")
            else:
                out.warn("ccache could not be enabled. so you should check dev-util/ccache")

        self.internals.env.start_time = time.time()
        return True, self.internals.env
Example #41
0
def main(params):
    # determine operation type
    repo_name = None
    if params:
        repo_name = params[0]

    # create operation object
    operation = Update()

    repo_num = 0
    if repo_name is None:
        # firstly, lpms tries to create a copy of current repository database.
        db_backup()

        out.normal("updating repository database...")
        operation.repodb.database.begin_transaction()
        for repo_name in os.listdir(cst.repos):
            if not repo_name in utils.available_repositories():
                continue
            if os.path.isfile(
                    os.path.join(cst.repos, repo_name, "info/repo.conf")):
                out.write(out.color(" * ", "red") + repo_name + "\n")

                operation.update_repository(repo_name)
                repo_num += 1

        operation.repodb.database.commit()
        out.normal("%s repository(ies) is/are updated." % repo_num)
    else:
        if repo_name == ".":
            current_path = os.getcwd()
            for repo_path in [os.path.join(cst.repos, item) \
                    for item in utils.available_repositories()]:
                if current_path == repo_path or len(
                        current_path.split(repo_path)) == 2:
                    # convert it a valid repo_name variable from the path
                    repo_name = current_path.split(cst.repos)[1][1:]
                    break
            if repo_name == ".":
                out.warn("%s does not seem a valid repository path." % \
                        out.color(current_path, "red"))
                lpms.terminate()

        if len(repo_name.split("/")) == 2:
            out.normal("updating %s" % repo_name)
            repo, category = repo_name.split("/")
            repo_path = os.path.join(cst.repos, repo)

            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            operation.repodb.database.begin_transaction()
            for pkg in os.listdir(os.path.join(repo_path, category)):
                try:
                    operation.update_package(repo_path,
                                             category,
                                             pkg,
                                             update=True)
                except IntegrityError:
                    continue
            operation.repodb.database.commit()

        elif len(repo_name.split("/")) == 3:
            version = None
            repo, category, name = repo_name.split("/")

            if repo.startswith("="):
                repo = repo[1:]
                try:
                    name, version = utils.parse_pkgname(name)
                except TypeError:
                    out.error("you should give a version number")
                    lpms.terminate()
            else:
                if utils.parse_pkgname(name) is not None and len(
                        utils.parse_pkgname(name)) == 2:
                    out.error("you must use %s" %
                              (out.color("=" + repo_name, "red")))
                    lpms.terminate()

            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            repo_path = os.path.join(cst.repos, repo)
            out.normal("updating %s/%s/%s" % (repo, category, name))
            operation.repodb.database.begin_transaction()
            operation.update_package(repo_path,
                                     category,
                                     name,
                                     my_version=version,
                                     update=True)
            operation.repodb.database.commit()

        else:
            if not repo_name in utils.available_repositories():
                out.error("%s is not a repository." %
                          out.color(repo_name, "red"))
                lpms.terminate()

            repo_dir = os.path.join(cst.repos, repo_name)
            if os.path.isdir(repo_dir):
                repo_path = os.path.join(repo_dir, cst.repo_file)
                if os.path.isfile(repo_path):
                    operation.repodb.database.begin_transaction()
                    out.normal("updating repository: %s" %
                               out.color(repo_name, "green"))
                    operation.update_repository(repo_name)
                    operation.repodb.database.commit()
                else:
                    lpms.terminate("repo.conf file could not found in %s" %
                                   repo_dir + "/info")
            else:
                lpms.terminate("repo.conf not found in %s" %
                               os.path.join(cst.repos, repo_name))

    out.normal("Total %s packages have been processed." %
               operation.packages_num)

    # Drop inactive repository from the database
    for name in operation.repodb.get_repository_names():
        if not name in utils.available_repositories():
            operation.repodb.delete_repository(name, commit=True)
            out.warn("%s dropped." % name)

    # Close the database connection
    operation.repodb.database.close()
Example #42
0
 def usage(self):
     out.normal("Get information about given package")
     out.green("General Usage:\n")
     out.write(" $ lpms -i repo/category/pkgname\n")
     out.write("\nrepo and category keywords are optional.\n")
     lpms.terminate()
Example #43
0
 def usage(self):
     out.normal("news reader for lpms")
     out.write(
         "Use 'list' command to list available messages. For reading these messages, use read <id>\n"
     )
Example #44
0
    def merge_package(self):
        '''Moves files to the target destination in the most safest way.'''
        def get_perms(path):
            '''Get permissions of given path, it may be file or directory'''
            return {
                "uid": utils.get_uid(path),
                "gid": utils.get_gid(path),
                "mod": utils.get_mod(path)
            }
        out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \
                self.environment.name, self.environment.version, self.environment.slot, \
                self.environment.real_root))
        # Remove files db entries for this package:slot if it exists
        self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \
            self.environment.previous_version, commit=True)

        # Remove file_relations db entries for this package:slot if it exists
        self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \
                self.environment.name, self.environment.previous_version, commit=True)

        # Merge the package, now
        walk_iter = os.walk(self.environment.install_dir, followlinks=True)
        while True:
            try:
                parent, directories, files = next(walk_iter)
                # TODO: Check the target path's permissions for writing or reading
                # Remove install_dir from parent to get real parent path
                pruned_parent = parent.replace(self.environment.install_dir,
                                               "")
                # create directories
                for directory in directories:
                    source = os.path.join(parent, directory)
                    target = os.path.join(self.environment.real_root,
                                          pruned_parent, directory)
                    real_target = "/".join([pruned_parent, directory])
                    if self.is_parent_symlink(target):
                        break
                    if os.path.islink(source):
                        self.symlinks.append(target + "/")
                        realpath = os.path.realpath(source)
                        if os.path.islink(target):
                            shelltools.remove_file(target)
                        # create real directory
                        if len(realpath.split(
                                self.environment.install_dir)) > 1:
                            realpath = realpath.split(
                                self.environment.install_dir)[1][1:]

                        shelltools.makedirs(
                            os.path.join(self.environment.real_root, realpath))
                        # make symlink
                        if os.path.isdir(target):
                            shelltools.remove_dir(target)
                        elif os.path.isfile(target):
                            shelltools.remove_file(target)
                        shelltools.make_symlink(os.readlink(source), target)
                    else:
                        if os.path.isfile(target):
                            # TODO: Rename this file and warn the user
                            shelltools.remove_file(target)
                        shelltools.makedirs(target)
                    # Get permissions
                    perms = get_perms(source)
                    # if path is a symlink, pass permission mumbo-jumbos
                    if not os.path.islink(source):
                        # Set permissions
                        shelltools.set_id(target, perms["uid"], perms["gid"])
                        shelltools.set_mod(target, perms["mod"])
                        # TODO: Common items?
                        # Add the item to filesdb
                        self.append_filesdb("dir", real_target, perms)
                    else:
                        # Add the item to filesdb
                        self.append_filesdb("link", real_target, perms, \
                                realpath=os.path.realpath(source))

                # Merge regular files to the target
                # Firstly, handle reserved files
                reserve_files = []
                if self.environment.reserve_files:
                    if isinstance(self.environment.reserve_files, basestring):
                        reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \
                                if f_item != ""])
                    elif isinstance(self.environment.reserve_files,
                                    list) or isinstance(
                                        self.environment.reserve_files, tuple):
                        reserve_files.extend(self.environment.reserve_files)

                if os.path.isfile(os.path.join(cst.user_dir,
                                               cst.protect_file)):
                    with open(os.path.join(cst.user_dir,
                                           cst.protect_file)) as data:
                        for rf in data.readlines():
                            if not rf.startswith("#"):
                                reserve_files.append(rf.strip())

                # Here we are starting to merge
                for _file in files:
                    source = os.path.join(parent, _file)
                    target = os.path.join(self.environment.real_root,
                                          pruned_parent, _file)
                    real_target = "/".join([pruned_parent, _file])
                    if self.is_parent_symlink(target):
                        break
                    # Keep file relations for using after to handle reverse dependencies
                    if os.path.exists(source) and os.access(source, os.X_OK):
                        if utils.get_mimetype(source) in self.binary_filetypes:
                            self.file_relationsdb.append_query(
                                (self.environment.repo,
                                 self.environment.category,
                                 self.environment.name,
                                 self.environment.version, target,
                                 file_relations.get_depends(source)))
                    # Strip binary files and keep them smaller
                    if self.strip_debug_symbols and utils.get_mimetype(
                            source) in self.binary_filetypes:
                        utils.run_strip(source)
                    if self.environment.ignore_reserve_files:
                        reserve_files = []
                        self.environment.reserve_files = True

                    def add_file_item():
                        # Prevent code duplication
                        if not os.path.islink(target):
                            shelltools.set_id(target, perms["uid"],
                                              perms["gid"])
                            shelltools.set_mod(target, perms["mod"])
                            self.append_filesdb("file", real_target, perms, \
                                    sha1sum=utils.sha1sum(target),
                                    size = utils.get_size(source, dec=True)
                            )
                        else:
                            self.append_filesdb("link", real_target, perms,\
                                    realpath=os.path.realpath(source))

                    if self.environment.reserve_files is not False:
                        conf_file = os.path.join(pruned_parent, _file)
                        isconf = (_file.endswith(".conf")
                                  or _file.endswith(".cfg"))

                        def is_reserve():
                            if self.environment.ignore_reserve_files:
                                return False
                            elif not conf_file in reserve_files:
                                return False
                            return True

                        if os.path.exists(target) and not is_reserve():
                            if pruned_parent[0:4] == "/etc" or isconf:
                                if os.path.isfile(conf_file) and utils.sha1sum(
                                        source) != utils.sha1sum(conf_file):
                                    self.append_merge_conf(conf_file)
                                    target = target + ".lpms-backup"
                                    self.backup.append(target)

                        if os.path.exists(target) and is_reserve():
                            # The file is reserved.
                            # Adds to filesdb
                            add_file_item()
                            # We don't need the following operations
                            continue

                    if os.path.islink(source):
                        sha1 = False
                        realpath = os.readlink(source)
                        if self.environment.install_dir in realpath:
                            realpath = realpath.split(
                                self.environment.install_dir)[1]

                        if os.path.isdir(target):
                            shelltools.remove_dir(target)
                        elif os.path.isfile(target) or os.path.islink(target):
                            shelltools.remove_file(target)
                        shelltools.make_symlink(realpath, target)
                    else:
                        sha1 = utils.sha1sum(source)
                        perms = get_perms(source)
                        shelltools.move(source, target)
                    # Adds to filesdb
                    add_file_item()
            except StopIteration as err:
                break

        self.file_relationsdb.insert_query(commit=True)
        self.filesdb.insert_query(commit=True)

        lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \
                self.environment.real_root))
Example #45
0
    def perform_operation(self):
        '''Handles command line arguments and drive building operation'''
        self.set_environment_variables()
        # Check /proc and /dev. These filesystems must be mounted
        # to perform operations properly.
        for item in ('/proc', '/dev'):
            if not os.path.ismount(item):
                out.warn("%s is not mounted. You have been warned." % item)

        # clean source code extraction directory if it is wanted
        # TODO: check the following condition when resume functionality is back
        if self.instruction.clean_tmp:
            if self.instruction.resume_build is not None:
                out.warn(
                    "clean-tmp is disabled because of resume-build is enabled."
                )
            else:
                self.clean_temporary_directory()

        # we want to save starting time of the build operation to calculate building time
        # The starting point of logging
        lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" %
                         (self.instruction.index, self.instruction.count,
                          self.internals.env.repo, self.internals.env.category,
                          self.internals.env.name, self.internals.env.version))

        out.normal(
            "(%s/%s) building %s/%s from %s" %
            (self.instruction.index, self.instruction.count,
             out.color(self.internals.env.category, "green"),
             out.color(
                 self.internals.env.name + "-" + self.internals.env.version,
                 "green"), self.internals.env.repo))

        if self.internals.env.sandbox:
            lpms.logger.info("sandbox enabled build")
            out.notify("sandbox is enabled")
        else:
            lpms.logger.warning("sandbox disabled build")
            out.warn_notify("sandbox is disabled")

        # fetch packages which are in download_plan list
        if self.internals.env.src_url is not None:
            # preprocess url shortcuts such as $name, $version and etc
            self.parse_src_url_field()
            # if the package is revisioned, override build_dir and install_dir.
            # remove revision number from these variables.
            if self.revisioned:
                for variable in ("build_dir", "install_dir"):
                    new_variable = "".join(os.path.basename(getattr(self.internals.env, \
                            variable)).split(self.revision))
                    setattr(self.internals.env, variable, \
                            os.path.join(os.path.dirname(getattr(self.internals.env, \
                            variable)), new_variable))

            utils.xterm_title(
                "lpms: downloading %s/%s/%s-%s" %
                (self.internals.env.repo, self.internals.env.category,
                 self.internals.env.name, self.internals.env.version))

            self.prepare_download_plan(self.internals.env.applied_options)

            if not fetcher.URLFetcher().run(self.download_plan):
                lpms.terminate("\nplease check the spec")

        if self.internals.env.applied_options is not None and self.internals.env.applied_options:
            out.notify("applied options: %s" %
                       " ".join(self.internals.env.applied_options))

        if self.internals.env.src_url is None and not self.extract_plan \
                and hasattr(self.internals.env, "extract"):
            # Workaround for #208
            self.internals.env.extract_nevertheless = True

        # Remove previous sandbox log if it is exist.
        if os.path.exists(cst.sandbox_log):
            shelltools.remove_file(cst.sandbox_log)

        # Enter the building directory
        os.chdir(self.internals.env.build_dir)

        # Manage ccache
        if hasattr(self.config, "ccache") and self.config.ccache:
            if utils.drive_ccache(config=self.config):
                out.notify("ccache is enabled.")
            else:
                out.warn(
                    "ccache could not be enabled. so you should check dev-util/ccache"
                )

        self.internals.env.start_time = time.time()
        return True, self.internals.env
Example #46
0
import os
import sys

from lpms import out
from lpms import utils
from lpms import shelltools

from lpms.db import dbapi

filesdb = dbapi.FilesDB()
installdb = dbapi.InstallDB()

cmdline = sys.argv

if "--help" in cmdline:
    out.normal("A tool that to create reverse depends database from scratch.")
    out.write("To hide outputs use '--quiet' parameter.\n")

if "--quiet" in cmdline:
    out.normal("creating reverse depends database...")

if os.path.exists("/var/db/lpms/reverse_depends.db"):
    shelltools.remove_file("/var/db/lpms/reverse_depends.db")

reversedb = dbapi.ReverseDependsDB()

for package in installdb.get_all_names():
    reverse_repo, reverse_category, reverse_name = package
    versions = []
    map(
        lambda ver: versions.extend(ver),
Example #47
0
import sys

from lpms import out
from lpms import utils
from lpms import shelltools
from lpms import file_relations

from lpms.db import dbapi

filesdb = dbapi.FilesDB()
installdb = dbapi.InstallDB()

cmdline = sys.argv

if "--help" in cmdline:
    out.normal("A tool that to create file relations database from scratch.")
    out.write("To hide outputs use '--quiet' parameter.\n")

if "--quiet" in cmdline:
    out.normal("creating file relations database...")

if os.path.exists("/var/db/lpms/file_relations.db"):
    shelltools.remove_file("/var/db/lpms/file_relations.db")

relationsdb = dbapi.FileRelationsDB()

for package in installdb.get_all_names():
    repo, category, name = package
    versions = []
    map(lambda ver: versions.extend(ver),
        installdb.get_version(name, pkg_category=category).values())
Example #48
0
def main(repos):
    db = dbapi.RepositoryDB()
    for repo in repos:
        out.normal("dropping %s" % repo)
        db.drop_repo(repo)
Example #49
0
    def package_mangler(self, **kwargs):
        def collision_check():
            # TODO: This is a temporary solution. collision_check function
            # must be a reusable part for using in remove operation
            out.normal("checking file collisions...")
            lpms.logger.info("checking file collisions")
            collision_object = file_collisions.CollisionProtect(
                environment.category,
                environment.name,
                environment.slot,
                real_root=environment.real_root,
                source_dir=environment.install_dir)
            collision_object.handle_collisions()
            if collision_object.orphans:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "these files are orphan. the package will adopt the files:\n"
                )
                index = 0
                for orphan in collision_object.orphans:
                    out.notify(orphan)
                    index += 1
                    if index > 100:
                        # FIXME: the files must be logged
                        out.write(
                            out.color(" > ", "brightyellow") +
                            "...and many others.")
                        break

            if collision_object.collisions:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "file collisions detected:\n")
            for item in collision_object.collisions:
                (category, name, slot, version), path = item
                out.write(out.color(" -- ", "red")+category+"/"+name+"-"\
                        +version+":"+slot+" -> "+path+"\n")
            if collision_object.collisions and self.config.collision_protect:
                if environment.force_file_collision:
                    out.warn(
                        "Disregarding these collisions, you have been warned!")
                else:
                    return False
            return True

        names = kwargs.get("names", self.request.names)
        # Prepare build environment
        out.normal("resolving dependencies")
        targets = api.resolve_dependencies(names, self.request.instruction)
        if self.request.instruction.pretend:
            out.write("\n")
            out.normal("these packages will be merged, respectively:\n")
            showplan.show(targets.packages,
                          targets.conflicts,
                          targets.options,
                          installdb=dbapi.InstallDB())
            out.write("\ntotal %s package(s) listed.\n\n" \
                    % out.color(str(len(targets.packages)), "green"))
            raise LpmsTerminate

        if self.request.instruction.ask:
            out.write("\n")
            out.normal("these packages will be merged, respectively:\n")
            showplan.show(targets.packages,
                          targets.conflicts,
                          targets.options,
                          installdb=dbapi.InstallDB())
            utils.xterm_title("lpms: confirmation request")
            out.write("\ntotal %s package(s) will be merged.\n\n" \
                    % out.color(str(len(targets.packages)), "green"))

            if not utils.confirm("Would you like to continue?"):
                # Reset terminal title and terminate lpms.
                utils.xterm_title_reset()
                raise LpmsTerminate

        self.request.instruction.count = len(targets.packages)
        for index, package in enumerate(targets.packages, 1):
            self.request.instruction.index = index
            retval, environment = api.prepare_environment(
                    package,
                    self.request.instruction,
                    dependencies=targets.dependencies[package.id] if package.id in \
                            targets.dependencies else None,
                    options=targets.options[package.id] if package.id in \
                            targets.options else None,
                    conditional_versions=targets.conditional_versions[package.id] \
                            if package.id in targets.conditional_versions else None,
                    conflicts=targets.conflicts[package.id] if package.id \
                            in targets.conflicts else None,
                    inline_option_targets=targets.inline_option_targets[package.id] \
                            if package.id in targets.inline_option_targets else None
            )
            if not retval:
                out.error(
                    "There are some errors while preparing environment to build FOO."
                )
                out.error(
                    "So you should submit a bug report to fix the issue.")
                raise LpmsTerminate("thanks to flying with lpms.")
            # Now, run package script(spec) for configuring, building and install
            retval, environment = self.interpreter.initialize(environment)
            if retval is False:
                out.error(
                    "There are some errors while building FOO from source.")
                out.error("Error messages should be seen above.")
                out.error(
                    "If you want to submit a bug report, please attatch BAR or send above messages in a proper way."
                )
                raise LpmsTerminate("thanks to flying with lpms.")
            elif retval is None:
                raise LpmsTerminate

            if not collision_check():
                out.error(
                    "File collisions detected. If you want to overwrite these files,"
                )
                out.error(
                    "You have to use --force-file-collisions parameter or disable collision_protect in configuration file."
                )
                raise LpmsTerminate("thanks to flying with lpms.")

            # Merge package to livefs
            if environment.not_merge:
                raise LpmsTerminate("not merging...")
            retval, environment = merge.Merge(environment).perform_operation()
            if not retval:
                raise LpmsTerminate("Some errors occured while merging %s" %
                                    environment.fullname)

            lpms.logger.info("finished %s/%s/%s-%s" %
                             (package.repo, package.category, package.name,
                              package.version))

            utils.xterm_title("lpms: %s/%s finished" %
                              (package.category, package.name))

            out.normal("Cleaning build directory")
            shelltools.remove_dir(os.path.dirname(environment.install_dir))
            catdir = os.path.dirname(os.path.dirname(environment.install_dir))
            if not os.listdir(catdir):
                shelltools.remove_dir(catdir)

            # There is no error, exitting...
            out.normal("Completed.")
Example #50
0
 def usage(self):
     out.normal("news reader for lpms")
     out.write("Use 'list' command to list available messages. For reading these messages, use read <id>\n")
Example #51
0
 def usage(self):
     out.normal("Get information about given package")
     out.green("General Usage:\n")
     out.write(" $ lpms -i repo/category/pkgname\n")
     out.write("\nrepo and category keywords are optional.\n")
     lpms.terminate()
import os
import sys

from lpms import out
from lpms import utils
from lpms import shelltools

from lpms.db import dbapi

filesdb = dbapi.FilesDB()
installdb = dbapi.InstallDB()

cmdline = sys.argv

if "--help" in cmdline:
    out.normal("A tool that to create reverse depends database from scratch.")
    out.write("To hide outputs use '--quiet' parameter.\n")

if "--quiet" in cmdline:
    out.normal("creating reverse depends database...") 

if os.path.exists("/var/db/lpms/reverse_depends.db"):
    shelltools.remove_file("/var/db/lpms/reverse_depends.db")

reversedb = dbapi.ReverseDependsDB()

for package in installdb.get_all_names():
    reverse_repo, reverse_category, reverse_name = package
    versions = []
    map(lambda ver: versions.extend(ver), installdb.get_version(reverse_name, pkg_category=reverse_category).values())
    for reverse_version in versions:
Example #53
0
def main(params):
    # determine operation type
    repo_name = None
    if params:
        repo_name = params[0]

    # create operation object
    operation = Update()

    repo_num = 0 
    if repo_name is None:
        # firstly, lpms tries to create a copy of current repository database.
        db_backup()

        out.normal("updating repository database...")
        operation.repodb.database.begin_transaction()
        for repo_name in os.listdir(cst.repos):
            if not repo_name in utils.available_repositories():
                continue
            if os.path.isfile(os.path.join(cst.repos, repo_name, "info/repo.conf")):
                out.write(out.color(" * ", "red") + repo_name+"\n")
                
                operation.update_repository(repo_name)
                repo_num += 1

        operation.repodb.database.commit()
        out.normal("%s repository(ies) is/are updated." % repo_num)
    else:
        if repo_name == ".":
            current_path = os.getcwd()
            for repo_path in [os.path.join(cst.repos, item) \
                    for item in utils.available_repositories()]:
                if current_path == repo_path or len(current_path.split(repo_path)) == 2:
                    # convert it a valid repo_name variable from the path
                    repo_name = current_path.split(cst.repos)[1][1:]
                    break
            if repo_name == ".":
                out.warn("%s does not seem a valid repository path." % \
                        out.color(current_path, "red"))
                lpms.terminate()

        if len(repo_name.split("/")) == 2:
            out.normal("updating %s" % repo_name)
            repo, category = repo_name.split("/")
            repo_path = os.path.join(cst.repos, repo)
            
            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            operation.repodb.database.begin_transaction()
            for pkg in os.listdir(os.path.join(repo_path, category)):
                try:
                    operation.update_package(repo_path, category, pkg, update=True)
                except IntegrityError:
                    continue
            operation.repodb.database.commit()

        elif len(repo_name.split("/")) == 3:
            version = None
            repo, category, name = repo_name.split("/")
            
            if repo.startswith("="):
                repo = repo[1:]
                try:
                    name, version = utils.parse_pkgname(name)
                except TypeError:
                    out.error("you should give a version number")
                    lpms.terminate()
            else:
                if utils.parse_pkgname(name) is not None and len(utils.parse_pkgname(name)) == 2:
                    out.error("you must use %s" % (out.color("="+repo_name, "red")))
                    lpms.terminate()
            
            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            repo_path = os.path.join(cst.repos, repo)
            out.normal("updating %s/%s/%s" % (repo, category, name))
            operation.repodb.database.begin_transaction()
            operation.update_package(repo_path, category, name, my_version = version, update = True)
            operation.repodb.database.commit()
        
        else:
            if not repo_name in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo_name, "red"))
                lpms.terminate()
            
            repo_dir = os.path.join(cst.repos, repo_name)
            if os.path.isdir(repo_dir):
                repo_path = os.path.join(repo_dir, cst.repo_file)
                if os.path.isfile(repo_path):
                    operation.repodb.database.begin_transaction()
                    out.normal("updating repository: %s" % out.color(repo_name, "green"))
                    operation.update_repository(repo_name)
                    operation.repodb.database.commit()
                else:
                    lpms.terminate("repo.conf file could not found in %s" % repo_dir+"/info")
            else:
                lpms.terminate("repo.conf not found in %s" % os.path.join(cst.repos, repo_name))

    out.normal("Total %s packages have been processed." % operation.packages_num)
    
    # Drop inactive repository from the database
    for name in operation.repodb.get_repository_names():
        if not name in utils.available_repositories():
            operation.repodb.delete_repository(name, commit=True)
            out.warn("%s dropped." % name)
    
    # Close the database connection
    operation.repodb.database.close()