Example #1
0
def run_cmd(cmd, show=True, enable_sandbox=True):
    stdout = None
    stderr = None
    if enable_sandbox:
        # FIXME: getopt should not do this.
        # the verbosity of messages, defaults to 1
        # 1 - error
        # 2 - warning
        # 3 - normal
        # 4 - verbose
        # 5 - debug
        # 6 - crazy debug
        log_level = lpms.getopt("--sandbox-log-level", like=True)
        if log_level is None:
            log_level = "1"
        if not log_level in ('1', '2', '3', '4', '5', '6'):
            out.warn("%s is an invalid sandbox log level." % log_level)
        cmd = "%s --config=%s --log-level=%s --log-file=%s -- %s" % (cst.sandbox_app, cst.sandbox_config, \
                log_level, cst.sandbox_log, cmd)
    if not show:
        stdout = subprocess.PIPE
        stderr = subprocess.PIPE
    result = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr)
    output, err = result.communicate()
    return result.returncode, output, err
Example #2
0
def system(cmd, show=False, stage=None, sandbox=None):
    cfg = conf.LPMSConfig()
    if sandbox is None:
        sandbox = True if cfg.sandbox else False
        # override 'sandbox' variable if the user wants to modifiy from cli
        if lpms.getopt('--enable-sandbox'):
            sandbox = True
        elif lpms.getopt('--disable-sandbox'):
            sandbox = False
    if lpms.getopt("--verbose"):
        ret, output, err = run_cmd(cmd, True)
    elif (not cfg.print_output or lpms.getopt("--quiet")) \
            and not show:
        ret, output, err = run_cmd(cmd, show=False, enable_sandbox=sandbox)
    else:
        ret, output, err = run_cmd(cmd, show=True, enable_sandbox=sandbox)

    if ret != 0:
        if not conf.LPMSConfig().print_output or lpms.getopt("--quiet"):
            out.brightred("\n>> error messages:\n")
            out.write(err)
        out.warn("command failed: %s" % out.color(cmd, "red"))
        if stage and output and err:
            return False, output + err
        return False
    return True
Example #3
0
    def function_collisions(self):
        '''Checks the build environment to deal with function collisions if primary_library is not defined'''
        if self.environment.primary_library: return
        preserved_names = [
                'extract',
                'prepare',
                'configure',
                'build',
                'install',
                'collision_check',
                'pre_merge',
                'post_install'
                'remove'
        ]

        race_list = {}
        for library in self.environment.libraries:
            for preserved_name in preserved_names:
                if preserved_name in self.environment.raw:
                    continue
                if library+"_"+preserved_name in self.environment.raw:
                    if preserved_name in race_list:
                        if not library in race_list[preserved_name]:
                            race_list[preserved_name].append(library)
                    else:
                        race_list.update({preserved_name: [library]})
                        
        result = [(key, race_list[key]) for key in race_list if len(race_list[key]) > 1]
        if result:
            out.warn("function collision detected in these stages. you should use primary_library keyword.")
            for item in result:
                stage, libraries = item
                out.notify(stage+": "+", ".join(libraries))
            lpms.terminate("please contact the package maintainer.")
Example #4
0
def conf(*args, **kwargs):
    '''Runs configure script with standard and given parameters'''
    conf_command = './configure'
    if "run_dir" in kwargs:
        conf_command = os.path.join(kwargs["run_dir"], "configure")

    if os.access(conf_command, os.F_OK):
        if os.access(conf_command, os.X_OK):
            args = '%s \
                --prefix=/%s \
                --build=%s \
                --mandir=/%s \
                --infodir=/%s \
                --datadir=/%s \
                --sysconfdir=/%s \
                --localstatedir=/%s \
                --libexecdir=/%s \
                --libdir=/%s \
                %s' % (conf_command, cst.prefix, \
                cfg.LPMSConfig().CHOST, cst.man, \
                cst.info, cst.data, \
                cst.conf, cst.localstate, \
                cst.libexec, cst.libdir, " ".join(args))
            args = [arg for arg in args.split(" ") if arg.strip()]
            out.notify("running %s" % "\n\t".join(args))
            if not system(" ".join(args)):
                raise BuildError("conf failed.")
        else:
            raise BuildError("configure script is not executable.")
    else:
        out.warn("no configure script found.")
Example #5
0
    def function_collisions(self):
        '''Checks the build environment to deal with function collisions if primary_library is not defined'''
        if self.environment.primary_library: return
        preserved_names = [
            'extract', 'prepare', 'configure', 'build', 'install',
            'collision_check', 'pre_merge', 'post_install'
            'remove'
        ]

        race_list = {}
        for library in self.environment.libraries:
            for preserved_name in preserved_names:
                if preserved_name in self.environment.raw:
                    continue
                if library + "_" + preserved_name in self.environment.raw:
                    if preserved_name in race_list:
                        if not library in race_list[preserved_name]:
                            race_list[preserved_name].append(library)
                    else:
                        race_list.update({preserved_name: [library]})

        result = [(key, race_list[key]) for key in race_list
                  if len(race_list[key]) > 1]
        if result:
            out.warn(
                "function collision detected in these stages. you should use primary_library keyword."
            )
            for item in result:
                stage, libraries = item
                out.notify(stage + ": " + ", ".join(libraries))
            lpms.terminate("please contact the package maintainer.")
Example #6
0
def system(cmd, show=False, stage=None, sandbox=None):
    cfg = conf.LPMSConfig()
    if sandbox is None:
        sandbox = True if cfg.sandbox else False
        # override 'sandbox' variable if the user wants to modifiy from cli
        if lpms.getopt("--enable-sandbox"):
            sandbox = True
        elif lpms.getopt("--disable-sandbox"):
            sandbox = False
    if lpms.getopt("--verbose"):
        ret, output, err = run_cmd(cmd, True)
    elif (not cfg.print_output or lpms.getopt("--quiet")) and not show:
        ret, output, err = run_cmd(cmd, show=False, enable_sandbox=sandbox)
    else:
        ret, output, err = run_cmd(cmd, show=True, enable_sandbox=sandbox)

    if ret != 0:
        if not conf.LPMSConfig().print_output or lpms.getopt("--quiet"):
            out.brightred("\n>> error messages:\n")
            out.write(err)
        out.warn("command failed: %s" % out.color(cmd, "red"))
        if stage and output and err:
            return False, output + err
        return False
    return True
Example #7
0
def conf(*args, **kwargs):
    '''Runs configure script with standard and given parameters'''
    conf_command = './configure'
    if "run_dir" in kwargs:
        conf_command = os.path.join(kwargs["run_dir"], "configure")

    if os.access(conf_command, os.F_OK):
        if os.access(conf_command, os.X_OK):
            args = '%s \
                --prefix=/%s \
                --build=%s \
                --mandir=/%s \
                --infodir=/%s \
                --datadir=/%s \
                --sysconfdir=/%s \
                --localstatedir=/%s \
                --libexecdir=/%s \
                --libdir=/%s \
                %s'                    % (conf_command, cst.prefix, \
                cfg.LPMSConfig().CHOST, cst.man, \
                cst.info, cst.data, \
                cst.conf, cst.localstate, \
                cst.libexec, cst.libdir, " ".join(args))
            args = [arg for arg in args.split(" ") if arg.strip()]
            out.notify("running %s" % "\n\t".join(args))
            if not system(" ".join(args)):
                raise BuildError("conf failed.")
        else:
            raise BuildError("configure script is not executable.")
    else:
        out.warn("no configure script found.")
Example #8
0
def run_cmd(cmd, show=True, enable_sandbox=True):
    stdout = None
    stderr = None
    if enable_sandbox:
        # FIXME: getopt should not do this.
        # the verbosity of messages, defaults to 1
        # 1 - error
        # 2 - warning
        # 3 - normal
        # 4 - verbose
        # 5 - debug
        # 6 - crazy debug
        log_level = lpms.getopt("--sandbox-log-level", like=True)
        if log_level is None:
            log_level = "1"
        if not log_level in ("1", "2", "3", "4", "5", "6"):
            out.warn("%s is an invalid sandbox log level." % log_level)
        cmd = "%s --config=%s --log-level=%s --log-file=%s -- %s" % (
            cst.sandbox_app,
            cst.sandbox_config,
            log_level,
            cst.sandbox_log,
            cmd,
        )
    if not show:
        stdout = subprocess.PIPE
        stderr = subprocess.PIPE
    result = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr)
    output, err = result.communicate()
    return result.returncode, output, err
Example #9
0
def available_repositories():
    if not os.path.isfile(cst.repo_conf):
        out.warn("%s not found!" % cst.repo_conf)
        return []

    with open(cst.repo_conf) as repo_file:
        return [repo[1:-1] for repo in repo_file.read().split("\n") \
                if repo.startswith("[") and repo.endswith("]")]
Example #10
0
def available_repositories():
    if not os.path.isfile(cst.repo_conf):
        out.warn("%s not found!" % cst.repo_conf)
        return []

    with open(cst.repo_conf) as repo_file:
        return [repo[1:-1] for repo in repo_file.read().split("\n") \
                if repo.startswith("[") and repo.endswith("]")]
Example #11
0
 def sync(self):
     if self.git_repo():
         os.chdir(self.repo_path)
         if lpms.getopt("--reset"):
             out.warn("forcing git to overwrite local files")
             shelltools.system("%s reset --hard HEAD" % self.git_binary, sandbox=False)
             shelltools.system("%s clean -f -d" % self.git_binary, sandbox=False)
         shelltools.system("%s pull -f -u origin" % self.git_binary, sandbox=False)
     else:
         os.chdir(os.path.dirname(self.repo_path))
         shelltools.system("%s clone %s %s" % (self.git_binary, self.remote, self.repo), sandbox=False)
Example #12
0
def confirm(text):
    turns = 5
    while turns:
        turns -= 1
        out.warn(text+"["+out.color("yes", "green")+"/"+out.color("no", "red")+"]")
        answer = sys.stdin.readline().strip()
        if answer == "yes" or answer == "y" or answer == "":
            return True
        elif answer == "no" or answer == "n":
            return False
        out.write(out.color("Sorry, response " + answer + " not understood! yes/y or no/n\n", "red"))
Example #13
0
def sandbox_dirs():
    dirs = []
    sandbox_config = os.path.join(cst.config_dir, cst.sandbox_file)
    if not os.path.isfile(sandbox_config):
        out.warn("%s is not found! So this may be harmfull!" % sandbox_config)
        return dirs

    for line in file(sandbox_config):
        line = line.strip()
        if not line.startswith("#") and len(line) > 0:
            dirs.append(line)
    return dirs
Example #14
0
def sandbox_dirs():
    dirs = []
    sandbox_config = os.path.join(cst.config_dir, cst.sandbox_file)
    if not os.path.isfile(sandbox_config):
        out.warn("%s is not found! So this may be harmfull!" % sandbox_config)
        return dirs

    for line in file(sandbox_config):
        line = line.strip()
        if not line.startswith("#") and len(line) > 0:
            dirs.append(line)
    return dirs
Example #15
0
 def import_repo_news(self, repo):
     '''Imports news of given repository'''
     my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
     if not os.path.isdir(my_news_dir):
         return
     
     for news in os.listdir(my_news_dir):
         local = utils.import_script(os.path.join(my_news_dir, news))
         try:
             metadata = utils.metadata_parser(local["metadata"], keys=metadata_keys)
         except IndexError:
             out.warn("Syntax errors found in %s" % os.path.join(my_news_dir, news))
             continue
         self.data.append((repo, metadata, local["message"]))
Example #16
0
def confirm(text):
    turns = 5
    while turns:
        turns -= 1
        out.warn(text + "[" + out.color("yes", "green") + "/" +
                 out.color("no", "red") + "]")
        answer = sys.stdin.readline().strip()
        if answer == "yes" or answer == "y" or answer == "":
            return True
        elif answer == "no" or answer == "n":
            return False
        out.write(
            out.color(
                "Sorry, response " + answer +
                " not understood! yes/y or no/n\n", "red"))
Example #17
0
 def sync(self):
     if self.git_repo():
         os.chdir(self.repo_path)
         if lpms.getopt("--reset"):
             out.warn("forcing git to overwrite local files")
             shelltools.system("%s reset --hard HEAD" % self.git_binary,
                               sandbox=False)
             shelltools.system("%s clean -f -d" % self.git_binary,
                               sandbox=False)
         shelltools.system("%s pull -f -u origin" % self.git_binary,
                           sandbox=False)
     else:
         os.chdir(os.path.dirname(self.repo_path))
         shelltools.system("%s clone %s %s" %
                           (self.git_binary, self.remote, self.repo),
                           sandbox=False)
Example #18
0
    def import_repo_news(self, repo):
        '''Imports news of given repository'''
        my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
        if not os.path.isdir(my_news_dir):
            return

        for news in os.listdir(my_news_dir):
            local = utils.import_script(os.path.join(my_news_dir, news))
            try:
                metadata = utils.metadata_parser(local["metadata"],
                                                 keys=metadata_keys)
            except IndexError:
                out.warn("Syntax errors found in %s" %
                         os.path.join(my_news_dir, news))
                continue
            self.data.append((repo, metadata, local["message"]))
Example #19
0
def get_size(path, dec=False):
    if os.path.isfile(path):
        if dec:
            return decimal.Decimal(os.path.getsize(path) / (1024 * 1024.0))
        return os.path.getsize(path) / (1024 * 1024.0)
    else:
        foldersize = 0
        for path, dirs, files in os.walk(path):
            for f in files:
                filename = os.path.join(path, f)
                try:
                    foldersize += os.path.getsize(filename)
                except:
                    out.warn("file size not calculated: %s" % filename)
        if dec:
            return decimal.Decimal(foldersize / (1024 * 1024.0))
        return foldersize / (1024 * 1024.0)
Example #20
0
def get_size(path, dec=False):
    if os.path.isfile(path):
        if dec:
            return decimal.Decimal(os.path.getsize(path)/(1024*1024.0))
        return os.path.getsize(path)/(1024*1024.0)
    else:
        foldersize = 0
        for path, dirs, files in os.walk(path):
            for f in files:
                filename = os.path.join(path, f)
                try:
                    foldersize += os.path.getsize(filename)
                except:
                    out.warn("file size not calculated: %s" % filename)
        if dec:
            return decimal.Decimal(foldersize/(1024*1024.0))
        return foldersize/(1024*1024.0)
Example #21
0
def list_disk_pkgs(repo, category):
    '''Lists pkgnames in the disk using repo and category name'''
    packages = []
    source_dir = os.path.join(cst.repos, repo, category)
    if not os.path.isdir(source_dir):
        out.warn("%s does not exist." % out.color(source_dir, "red"))
        return packages

    sources = os.listdir(source_dir)
    if not sources:
        out.warn("%s seems empty." % out.color(source_dir, "red"))
        return packages

    for source in sources:
        if glob.glob(os.path.join(source_dir, source) + "/*.py"):
            packages.append(source)
    return packages
Example #22
0
def list_disk_pkgs(repo, category):
    '''Lists pkgnames in the disk using repo and category name'''
    packages = []
    source_dir = os.path.join(cst.repos, repo, category)
    if not os.path.isdir(source_dir):
        out.warn("%s does not exist." % out.color(source_dir, "red"))
        return packages

    sources = os.listdir(source_dir)
    if not sources:
        out.warn("%s seems empty." % out.color(source_dir, "red"))
        return packages

    for source in sources:
        if glob.glob(os.path.join(source_dir, source)+"/*.py"):
            packages.append(source)
    return packages
Example #23
0
    def handle_arguments(self):
        """
        Parses arguments and sets some variables
        """
        def append_argument():
            if hasattr(available_argument, "action"):
                if not available_argument.action in self.router:
                    self.router.append(available_argument.action)
            else:
                setattr(self.instruction, available_argument.env_key, True)

        self.invalid = []
        for argument in self.arguments:
            if not argument.startswith("-"):
                self.names.append(argument)
                continue
            elif argument.startswith("--"):
                valid, value = False, None
                if "=" in argument:
                    argument, value = argument.split("=")
                for available_argument in self.available_arguments:
                    if argument == available_argument.arg:
                        if value is not None:
                            self.argument_values[
                                available_argument.action] = value
                        append_argument()
                        valid = True
                        break
                if not valid:
                    self.invalid.append(argument)
            elif argument.startswith("-") and argument[1].isalpha():
                for item in argument[1:]:
                    valid = False
                    for available_argument in self.available_arguments:
                        cond = (hasattr(available_argument, "short") \
                                and available_argument.short[1:] == item)
                        if cond:
                            append_argument()
                            valid = True
                            break
                    if not valid:
                        self.invalid.append("-" + item)
        if self.invalid:
            out.warn("these commands seem invalid: %s" % \
                    ", ".join(self.invalid))
Example #24
0
    def handle_arguments(self):
        """
        Parses arguments and sets some variables
        """
        def append_argument():
            if hasattr(available_argument, "action"):
                if not available_argument.action in self.router:
                    self.router.append(available_argument.action)
            else:
                setattr(self.instruction, available_argument.env_key, True)

        self.invalid = []
        for argument in self.arguments:
            if not argument.startswith("-"):
                self.names.append(argument)
                continue
            elif argument.startswith("--"):
                valid, value = False, None
                if "=" in argument:
                    argument, value = argument.split("=")
                for available_argument in self.available_arguments:
                    if argument == available_argument.arg:
                        if value is not None:
                            self.argument_values[available_argument.action] = value
                        append_argument()
                        valid = True
                        break
                if not valid:
                    self.invalid.append(argument)
            elif argument.startswith("-") and argument[1].isalpha():
                for item in argument[1:]:
                    valid = False
                    for available_argument in self.available_arguments:
                        cond = (hasattr(available_argument, "short") \
                                and available_argument.short[1:] == item)
                        if cond:                           
                            append_argument()
                            valid = True
                            break
                    if not valid:
                        self.invalid.append("-"+item)
        if self.invalid:
            out.warn("these commands seem invalid: %s" % \
                    ", ".join(self.invalid))
Example #25
0
 def run(self, instruct):
     all_packages = self.instdb.get_all_packages()
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             self.removable_packages.add((category, name, slot))
     packages = []
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             packages.append((category+"/"+name+"-"+version))
     if packages:
         out.normal("these package(s) is/are no longer required.")
         # FIXME: This is no good
         # I must find a new method to manage built-in variables and general purpose instructions 
         instruct["ask"] = True
         api.remove_package(packages, instruct)
     else:
         out.warn("no package found.")
Example #26
0
    def list_news(self):
        self.cursor.get_all_news()
        i = 0
        if not self.cursor.data:
            out.warn("no readable news.")
            return
        out.normal("readable messages listed:")
        out.write("index   repo            priority     summary\n")
        out.write("===============================================\n")
        for news in self.cursor.data:
            repo, metadata = news[:-1]
            if not "%s/%s" % (repo, metadata["summary"]) in self.read_items:
                read = out.color("*", "brightgreen")
            else:
                read = ""

            out.write("[%s]%s\t%-15s\t%-12s %s\n" % (out.color(str(i), "green"), read, repo, \
                    metadata["priority"], metadata["summary"]))
            i += 1
Example #27
0
    def list_news(self):
        self.cursor.get_all_news()
        i = 0
        if not self.cursor.data:
            out.warn("no readable news.")
            return
        out.normal("readable messages listed:")
        out.write("index   repo            priority     summary\n")
        out.write("===============================================\n")
        for news in self.cursor.data:
            repo, metadata = news[:-1]
            if not "%s/%s" % (repo, metadata["summary"]) in self.read_items:
                read = out.color("*", "brightgreen")
            else:
                read = ""

            out.write("[%s]%s\t%-15s\t%-12s %s\n" % (out.color(str(i), "green"), read, repo, \
                    metadata["priority"], metadata["summary"]))
            i += 1
Example #28
0
 def run(self, instruct):
     all_packages = self.instdb.get_all_packages()
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             self.removable_packages.add((category, name, slot))
     packages = []
     for installed_item in all_packages:
         category, name, version, slot = installed_item[1:]
         if self.process_packages(category, name, version, slot) is False:
             packages.append((category + "/" + name + "-" + version))
     if packages:
         out.normal("these package(s) is/are no longer required.")
         # FIXME: This is no good
         # I must find a new method to manage built-in variables and general purpose instructions
         instruct["ask"] = True
         api.remove_package(packages, instruct)
     else:
         out.warn("no package found.")
Example #29
0
        def collision_check():
            # TODO: This is a temporary solution. collision_check function
            # must be a reusable part for using in remove operation
            out.normal("checking file collisions...")
            lpms.logger.info("checking file collisions")
            collision_object = file_collisions.CollisionProtect(
                environment.category,
                environment.name,
                environment.slot,
                real_root=environment.real_root,
                source_dir=environment.install_dir)
            collision_object.handle_collisions()
            if collision_object.orphans:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "these files are orphan. the package will adopt the files:\n"
                )
                index = 0
                for orphan in collision_object.orphans:
                    out.notify(orphan)
                    index += 1
                    if index > 100:
                        # FIXME: the files must be logged
                        out.write(
                            out.color(" > ", "brightyellow") +
                            "...and many others.")
                        break

            if collision_object.collisions:
                out.write(
                    out.color(" > ", "brightyellow") +
                    "file collisions detected:\n")
            for item in collision_object.collisions:
                (category, name, slot, version), path = item
                out.write(out.color(" -- ", "red")+category+"/"+name+"-"\
                        +version+":"+slot+" -> "+path+"\n")
            if collision_object.collisions and self.config.collision_protect:
                if environment.force_file_collision:
                    out.warn(
                        "Disregarding these collisions, you have been warned!")
                else:
                    return False
            return True
Example #30
0
        def inline_options_management(inline_options):
            # TODO: inline_options variable must be a set
            # Check inline options, if an option is not available for the package, warn the user
            for inline_option in inline_options:
                if not inline_option in package.options:
                    out.warn("%s option is not available for %s/%s/%s-%s. So that the option is removing..." % (
                        inline_option,
                        package.repo,
                        package.category,
                        package.name,
                        package.version
                    ))
                    inline_options.remove(inline_option)

            if inline_options:
                target = self.current_package.id if self.current_package is not \
                        None else self.parent_package.id
                my_package = package.category+"/"+package.name+"/"+package.slot
                if target in self.inline_option_targets:
                    if my_package in self.inline_option_targets[target]:
                        for option in inline_options:
                            self.inline_option_targets[target][my_package].add(option)
                    else:
                        self.inline_option_targets[target][my_package] = set(inline_options)
                else:
                    self.inline_option_targets[target] = {my_package: set(inline_options)}
                
                if package.id in self.inline_options:
                    if not package.id in self.package_options:
                        self.package_options[package.id] = set()
                    for option in inline_options:
                        if not option in self.inline_options[package.id]:
                            self.inline_options[package.id].append(option)
                            if package.id in self.package_options:
                                self.package_options[package.id].add(option)
                else:
                    self.inline_options[package.id] = inline_options
                    if package.id in self.package_options:
                        for inline_option in inline_options:
                            self.package_options[package.id].add(inline_option)
                    else:
                        self.package_options[package.id] = set(inline_options)
Example #31
0
def configure_pending(packages, instruct):
    '''Configure packages that do not configured after merge operation'''

    if not utils.check_root(msg=False):
        lpms.terminate("you must be root.")

    root = instruct["real_root"]
    if not root:
        root = cst.root
        instruct["real_root"] = root

    pending_file = os.path.join(root, cst.configure_pending_file)

    failed = []

    if not os.access(pending_file, os.F_OK):
        lpms.terminate("there are no pending packages.")

    with open(pending_file, 'rb') as data:
        pending_packages = pickle.load(data)
        for package in pending_packages:
            repo, category, name, version = package
            spec = os.path.join(cst.repos, repo, category,
                                name) + "/" + name + "-" + version + ".py"
            out.normal("configuring %s/%s/%s-%s" %
                       (repo, category, name, version))
            if not os.access(spec, os.R_OK):
                out.warn("%s seems not exist or not readable. skipping..." %
                         spec)
                failed.append(package)
                continue
            if not initpreter.InitializeInterpreter(
                    package, instruct, ['post_install']).initialize():
                out.warn("%s/%s/%s-%s could not configured." %
                         (repo, category, name, version))
                failed.append(package)

    shelltools.remove_file(pending_file)
    if failed:
        with open(pending_file, 'wb') as data:
            pickle.dump(failed, data)
Example #32
0
def main():
    available_repositories = utils.available_repositories()
    for item in os.listdir(cst.repos):
        repo_conf = os.path.join(cst.repos, item, cst.repo_file)
        if os.access(repo_conf, os.F_OK):
            with open(repo_conf) as data:
                data = conf.ReadConfig(data.read().splitlines(), delimiter="@")
            if item in available_repositories:
                out.normal("%s [%s]" % (item, out.color("enabled", "brightgreen")))
            else:
                out.normal("%s [%s]" % (item, out.color("disabled", "brightred")))
            out.notify("system name: %s" % item)
            if hasattr(data, "name"):
                out.notify("development name: %s" % data.name)
            else:
                out.warn("development name is not defined!")
            if hasattr(data, "summary"):
                out.notify("summary: %s" % data.summary)
            else:
                out.warn("summary is not defined!")
            if hasattr(data, "maintainer"):
                out.notify("maintainer: %s" % data.maintainer)
            else:
                out.warn("maintainer is not defined!")
            out.write("\n")
Example #33
0
def main():
    available_repositories = utils.available_repositories()
    for item in os.listdir(cst.repos):
        repo_conf = os.path.join(cst.repos, item, cst.repo_file)
        if os.access(repo_conf, os.F_OK):
            with open(repo_conf) as data:
                data = conf.ReadConfig(data.read().splitlines(), delimiter="@")
            if item in available_repositories:
                out.normal("%s [%s]" %
                           (item, out.color("enabled", "brightgreen")))
            else:
                out.normal("%s [%s]" %
                           (item, out.color("disabled", "brightred")))
            out.notify("system name: %s" % item)
            if hasattr(data, "name"):
                out.notify("development name: %s" % data.name)
            else:
                out.warn("development name is not defined!")
            if hasattr(data, "summary"):
                out.notify("summary: %s" % data.summary)
            else:
                out.warn("summary is not defined!")
            if hasattr(data, "maintainer"):
                out.notify("maintainer: %s" % data.maintainer)
            else:
                out.warn("maintainer is not defined!")
            out.write("\n")
Example #34
0
def configure_pending(packages, instruct):
    '''Configure packages that do not configured after merge operation'''

    if not utils.check_root(msg=False):
        lpms.terminate("you must be root.")

    root = instruct["real_root"]
    if not root:
        root = cst.root
        instruct["real_root"] = root

    pending_file = os.path.join(root, cst.configure_pending_file)

    failed = []

    if not os.access(pending_file, os.F_OK):
        lpms.terminate("there are no pending packages.")

    with open(pending_file, 'rb') as data:
        pending_packages = pickle.load(data)
        for package in pending_packages:
            repo, category, name, version = package
            spec = os.path.join(cst.repos, repo, category, name)+"/"+name+"-"+version+".py"
            out.normal("configuring %s/%s/%s-%s" % (repo, category, name, version))
            if not os.access(spec, os.R_OK):
                out.warn("%s seems not exist or not readable. skipping..." % spec)
                failed.append(package)
                continue
            if not initpreter.InitializeInterpreter(package, instruct, ['post_install']).initialize():
                out.warn("%s/%s/%s-%s could not configured." % (repo, category, name, version))
                failed.append(package)

    shelltools.remove_file(pending_file)
    if failed:
        with open(pending_file, 'wb') as data:
            pickle.dump(failed, data)
Example #35
0
def set_parser(set_name):
    sets = []
    for repo in available_repositories():
        repo_set_file = os.path.join(cst.repos, repo, "info/sets",
                                     "%s.set" % set_name)
        if os.path.isfile((repo_set_file)):
            sets.append(repo_set_file)

    user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name)

    if os.path.isfile(user_set_file):
        sets.append(user_set_file)

    if len(sets) > 1:
        out.normal("ambiguous for %s\n" % out.color(set_name, "green"))

        def ask():
            for c, s in enumerate(sets):
                out.write("	" + out.color(str(c + 1), "green") + ") " + s +
                          "\n")
            out.write("\nselect one of them:\n")
            out.write("to exit, press Q or q.\n")

        while True:
            ask()
            answer = sys.stdin.readline().strip()
            if answer == "Q" or answer == "q":
                lpms.terminate()
            elif answer.isalpha():
                out.warn("please give a number.")
                continue

            try:
                set_file = sets[int(answer) - 1]
                break
            except (IndexError, ValueError):
                out.warn("invalid command.")
                continue
    elif len(sets) == 1:
        set_file = sets[0]
    else:
        out.warn("%s not found!" % out.color(set_name, "red"))
        return []

    return [line for line in file(set_file).read().strip().split("\n") \
            if not line.startswith("#") and line != ""]
Example #36
0
def set_parser(set_name):
    sets = []
    for repo in available_repositories():
        repo_set_file = os.path.join(cst.repos, repo, "info/sets", "%s.set" % set_name)
        if os.path.isfile((repo_set_file)):
            sets.append(repo_set_file)
            
    user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name)

    if os.path.isfile(user_set_file):
        sets.append(user_set_file)

    if len(sets) > 1:
        out.normal("ambiguous for %s\n" % out.color(set_name, "green"))
        def ask():
            for c, s in enumerate(sets):
                out.write("	"+out.color(str(c+1), "green")+") "+s+"\n")
            out.write("\nselect one of them:\n")
            out.write("to exit, press Q or q.\n")
            
        while True:
            ask()
            answer = sys.stdin.readline().strip()
            if answer == "Q" or answer == "q":
                lpms.terminate()
            elif answer.isalpha():
                out.warn("please give a number.")
                continue
            
            try:
                set_file = sets[int(answer)-1]
                break
            except (IndexError, ValueError):
                out.warn("invalid command.")
                continue
    elif len(sets) == 1:
        set_file = sets[0]
    else:
        out.warn("%s not found!" % out.color(set_name, "red"))
        return []
    
    return [line for line in file(set_file).read().strip().split("\n") \
            if not line.startswith("#") and line != ""]
Example #37
0
def remove_package(pkgnames, instruction):
    '''Triggers remove operation for given packages'''
    if instruction.like:
        # handle shortened package names
        database = dbapi.InstallDB()
        for item in instruction.like:
            query = database.db.cursor.execute("SELECT name FROM package where name LIKE ?", (item,))
            results = query.fetchall()
            if results:
                for result in results:
                    pkgnames.append(result[0])
        del database
    file_relationsdb = dbapi.FileRelationsDB()
    #try:
    packages = [GetPackage(pkgname, installdb=True).select() for pkgname in pkgnames]
    #except PackageNotFound as package_name:
    #    out.error("%s seems not installed." % package_name)
    #    lpms.terminate()

    instruction.count = len(packages); index = 0;
    # FIXME: I must create a new reverse dependency handler implementation

    #if instruct["show-reverse-depends"]:
    #    instruct["ask"] = True
    #    # WARNING: the mechanism only shows directly reverse dependencies
    #    # supposing that if A is a reverse dependency of B and C is depends on A.
    #    # when the user removes B, A and C will be broken. But lpms will warn the user about A.
    #    broken_packages = []
    #    reversedb = dbapi.ReverseDependsDB()
    #    out.normal("resolving primary reverse dependencies...\n")
    #    for package in packages:
    #        category, name, version = package[1:]
    #        if lpms.getopt("--use-file-relations"):
    #            broken_packages.extend(file_relations.get_packages(category, name, version))
    #        else:
    #            broken_packages.extend(reversedb.get_reverse_depends(category, name))

    #    if broken_packages:
    #        out.warn("the following packages will be broken:\n")
    #        for broken_package in broken_packages:
    #            broken_repo, broken_category, broken_name, broken_version = broken_package
    #            out.write(" %s %s/%s/%s-%s\n" % (out.color(">", "brightred"), broken_repo, broken_category, \
    #                    broken_name, broken_version))
    #    else:
    #        out.warn("no reverse dependency found.")

    if instruction.ask:
        out.write("\n")
        for package in packages:
            out.write(" %s %s/%s/%s-%s\n" % (out.color(">", "brightgreen"), out.color(package.repo, "green"), 
                out.color(package.category, "green"), out.color(package.name, "green"), 
                out.color(package.version, "green")))
        utils.xterm_title("lpms: confirmation request")
        out.write("\nTotal %s package will be removed.\n\n" % out.color(str(instruction.count), "green"))
        if not utils.confirm("Would you like to continue?"):
            out.write("quitting...\n")
            utils.xterm_title_reset()
            lpms.terminate()

    realroot = instruction.new_root if instruction.new_root else cst.root
    config = conf.LPMSConfig()
    for package in packages:
        fdb = file_collisions.CollisionProtect(package.category, package.name, \
                package.slot, version=package.version, real_root=realroot)
        fdb.handle_collisions()
        if fdb.collisions:
            out.write(out.color(" > ", "brightyellow")+"file collisions detected while removing %s/%s/%s-%s\n\n" \
                    % (package.repo, package.category, package.name, package.version))
        for (c_package, c_path) in fdb.collisions:
            c_category, c_name, c_slot, c_version = c_package
            out.write(out.color(" -- ", "red")+c_category+"/"+c_name+"-"\
                    +c_version+":"+c_slot+" -> "+c_path+"\n")
            if fdb.collisions and config.collision_protect and not \
                    lpms.getopt('--force-file-collision'):
                        out.write("\nquitting... use '--force-file-collision' to continue.\n")
                        lpms.terminate()
        index += 1;
        instruction.index = index
        if not initpreter.InitializeInterpreter(package, instruction, ['remove'], remove=True).initialize():
            out.warn("an error occured during remove operation: %s/%s/%s-%s" % (package.repo, package.category, \
                    package.name, package.version))
        else:
            file_relationsdb.delete_item_by_pkgdata(package.category, package.name, package.version, commit=True)
Example #38
0
    def set_local_environment_variables(self):
        '''
        Sets environment variables such as CFLAGS, CXXFLAGS and LDFLAGS if the user
        defines a local file which is included them
        '''
        switches = ["ADD", "REMOVE", "GLOBAL"]
        for item in cst.local_env_variable_files:
            if not os.access(item, os.R_OK):
                continue
            variable_type = item.split("/")[-1].upper()
            with open(item) as data:
                for line in data.readlines():
                    add = []
                    remove = []
                    global_flags = []
                    if line.startswith("#"):
                        continue
                    myline = [i.strip() for i in line.split(" ")]
                    target = myline[0]
                    if len(target.split("/")) == 2:
                        if target != self.internals.env.category + "/" + self.internals.env.name:
                            continue
                    elif len(target.split("/")) == 1:
                        if target != self.internals.env.category:
                            if len(target.split("-")) == 1:
                                out.warn("warning: invalid line found in %s:" %
                                         item)
                                out.red("   " + line)
                            continue
                    else:
                        if len(target.split("-")) == 1:
                            out.warn("warning: invalid line found in %s:" %
                                     item)
                            out.red("   " + line)
                            continue

                    if variable_type == "ENV":
                        if myline[1] == "UNSET":
                            variable = myline[2]
                            if variable in os.environ:
                                del os.environ[variable]
                        else:
                            try:
                                variable, value = myline[1:]
                            except ValueError:
                                out.warn("warning: invalid line found in %s:" %
                                         item)
                                out.red("   " + line)
                            else:
                                os.environ[variable] = value

                    for switch in switches:
                        if not switch in myline[1:]:
                            continue
                        switch_index = myline.index(switch)
                        for word in myline[switch_index + 1:]:
                            if word in switches:
                                break
                            if switch == "GLOBAL":
                                global_flags.append(word)
                            if switch == "ADD":
                                add.append(word)
                            elif switch == "REMOVE":
                                remove.append(word)

                    if global_flags:
                        if variable_type in os.environ:
                            del os.environ[variable_type]
                            os.environ[variable_type] = " ".join(global_flags)
                    else:
                        if add:
                            if variable_type in os.environ:
                                current = os.environ[variable_type]
                                current += " " + " ".join(add)
                                os.environ[variable_type] = current
                            else:
                                out.warn("%s not defined in your environment" %
                                         variable_type)
                        if remove:
                            if variable_type in os.environ:
                                current = os.environ[variable_type]
                                new = [
                                    atom for atom in current.split(" ")
                                    if not atom in remove
                                ]
                                os.environ[variable_type] = " ".join(new)
                            else:
                                out.warn("%s not defined in your environment" %
                                         variable_type)
Example #39
0
def remove_package(pkgnames, instruction):
    '''Triggers remove operation for given packages'''
    if instruction.like:
        # handle shortened package names
        database = dbapi.InstallDB()
        for item in instruction.like:
            query = database.db.cursor.execute(
                "SELECT name FROM package where name LIKE ?", (item, ))
            results = query.fetchall()
            if results:
                for result in results:
                    pkgnames.append(result[0])
        del database
    file_relationsdb = dbapi.FileRelationsDB()
    #try:
    packages = [
        GetPackage(pkgname, installdb=True).select() for pkgname in pkgnames
    ]
    #except PackageNotFound as package_name:
    #    out.error("%s seems not installed." % package_name)
    #    lpms.terminate()

    instruction.count = len(packages)
    index = 0
    # FIXME: I must create a new reverse dependency handler implementation

    #if instruct["show-reverse-depends"]:
    #    instruct["ask"] = True
    #    # WARNING: the mechanism only shows directly reverse dependencies
    #    # supposing that if A is a reverse dependency of B and C is depends on A.
    #    # when the user removes B, A and C will be broken. But lpms will warn the user about A.
    #    broken_packages = []
    #    reversedb = dbapi.ReverseDependsDB()
    #    out.normal("resolving primary reverse dependencies...\n")
    #    for package in packages:
    #        category, name, version = package[1:]
    #        if lpms.getopt("--use-file-relations"):
    #            broken_packages.extend(file_relations.get_packages(category, name, version))
    #        else:
    #            broken_packages.extend(reversedb.get_reverse_depends(category, name))

    #    if broken_packages:
    #        out.warn("the following packages will be broken:\n")
    #        for broken_package in broken_packages:
    #            broken_repo, broken_category, broken_name, broken_version = broken_package
    #            out.write(" %s %s/%s/%s-%s\n" % (out.color(">", "brightred"), broken_repo, broken_category, \
    #                    broken_name, broken_version))
    #    else:
    #        out.warn("no reverse dependency found.")

    if instruction.ask:
        out.write("\n")
        for package in packages:
            out.write(
                " %s %s/%s/%s-%s\n" % (out.color(
                    ">", "brightgreen"), out.color(package.repo, "green"),
                                       out.color(package.category, "green"),
                                       out.color(package.name, "green"),
                                       out.color(package.version, "green")))
        utils.xterm_title("lpms: confirmation request")
        out.write("\nTotal %s package will be removed.\n\n" %
                  out.color(str(instruction.count), "green"))
        if not utils.confirm("Would you like to continue?"):
            out.write("quitting...\n")
            utils.xterm_title_reset()
            lpms.terminate()

    realroot = instruction.new_root if instruction.new_root else cst.root
    config = conf.LPMSConfig()
    for package in packages:
        fdb = file_collisions.CollisionProtect(package.category, package.name, \
                package.slot, version=package.version, real_root=realroot)
        fdb.handle_collisions()
        if fdb.collisions:
            out.write(out.color(" > ", "brightyellow")+"file collisions detected while removing %s/%s/%s-%s\n\n" \
                    % (package.repo, package.category, package.name, package.version))
        for (c_package, c_path) in fdb.collisions:
            c_category, c_name, c_slot, c_version = c_package
            out.write(out.color(" -- ", "red")+c_category+"/"+c_name+"-"\
                    +c_version+":"+c_slot+" -> "+c_path+"\n")
            if fdb.collisions and config.collision_protect and not \
                    lpms.getopt('--force-file-collision'):
                out.write(
                    "\nquitting... use '--force-file-collision' to continue.\n"
                )
                lpms.terminate()
        index += 1
        instruction.index = index
        if not initpreter.InitializeInterpreter(
                package, instruction, ['remove'], remove=True).initialize():
            out.warn("an error occured during remove operation: %s/%s/%s-%s" % (package.repo, package.category, \
                    package.name, package.version))
        else:
            file_relationsdb.delete_item_by_pkgdata(package.category,
                                                    package.name,
                                                    package.version,
                                                    commit=True)
Example #40
0
    def update_package(self, repo_path, category, my_pkg, my_version = None, update = False):
        dataset = LCollect()
        # Register some variables to use after
        self.env.repo = os.path.basename(repo_path)
        self.env.category = category

        dataset.repo = self.env.repo
        dataset.category = category

        os.chdir(os.path.join(repo_path, category, my_pkg))
        for pkg in glob.glob("*"+cst.spec_suffix):
            script_path = os.path.join(repo_path, category, my_pkg, pkg)

            self.env.name, self.env.version = utils.parse_pkgname(pkg.split(cst.spec_suffix)[0])

            dataset.name = self.env.name
            dataset.version = self.env.version

            # FIXME: We must develop a upper-class or environment to 
            # use that cases to prevent code duplication

            # Begins code duplication
            interphase = re.search(r'-r[0-9][0-9]', self.env.version)
            if not interphase:
                interphase = re.search(r'-r[0-9]', self.env.version)
            self.env.raw_version = self.env.version
            self.env.revision = ""
            # Now, set real values of these variables if package revisioned. 
            if interphase is not None and interphase.group():
                self.env.raw_version = self.env.version.replace(interphase.group(), "")
                self.env.revision = interphase.group()
            # End of code duplication

            self.env.__dict__["fullname"] = self.env.name+"-"+self.env.version

            if not self.import_script(script_path):
                out.error("an error occured while processing the spec: %s" \
                        % out.color(script_path, "red"))
                out.error("please report the above error messages to the package maintainer.")
                continue

            metadata = utils.metadata_parser(self.env.metadata)
            metadata.update({"name": self.env.name, "version": self.env.version})
            # This method checks metadata integrity. 
            # It warn the user and pass the spec if a spec is broken
            self.check_metadata_integrity(metadata)
            # These values are optional
            if not "options" in metadata:
                metadata.update({"options": None})
            if not "slot" in metadata:
                metadata.update({"slot": "0"})
            if not "src_url" in metadata:
                metadata.update({"src_url": None})

            if lpms.getopt("--verbose"):
                out.write("    %s-%s\n" % (self.env.name, self.env.version))
            
            try:
                dataset.summary = metadata['summary']
                dataset.homepage = metadata['homepage']
                dataset.license = metadata['license']
                dataset.src_uri = metadata['src_url']
                if metadata['options'] is None:
                    dataset.options = None
                else:
                    dataset.options = metadata['options'].split(" ")
                dataset.slot = metadata['slot']

            except KeyError as err:
                out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \
                        self.env.name, self.env.version))
                out.warn("repository update was failed and the repository database was removed.")
                out.warn("you can run 'lpms --reload-previous-repodb' command to reload previous db version.")
                lpms.terminate("good luck!")

            if update:
                self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \
                        package_name=self.env.name, package_version=self.env.version)

            static_depends_runtime = []; static_depends_build = []; static_depends_postmerge = []; static_depends_conflict = []
            if 'depends' in self.env.__dict__.keys():
                deps = utils.depends_parser(self.env.depends)
                if 'runtime' in deps:
                    static_depends_runtime.extend(deps['runtime'])
                if 'build' in deps:
                    static_depends_build.extend(deps['build'])
                if 'common' in deps:
                    static_depends_runtime.extend(deps['common'])
                    static_depends_build.extend(deps['common'])
                if 'postmerge' in deps:
                    static_depends_postmerge.extend(deps['postmerge'])
                if 'conflict' in deps:
                    static_depends_conflict.extend(deps['conflict'])

            optional_depends_runtime = []; optional_depends_build = []; optional_depends_postmerge = []; optional_depends_conflict = []
            for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'):
                try:
                    deps = utils.parse_opt_deps(getattr(self.env, opt))
                    if opt.split("_")[1] == "runtime":
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "build":
                        optional_depends_build.append(deps)
                    elif opt.split("_")[1] == "common":
                        optional_depends_build.append(deps)
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "postmerge":
                        optional_depends_postmerge.append(deps)
                    elif opt.split("_")[1] == "conflict":
                        optional_depends_conflict.append(deps)
                    del deps
                except AttributeError:
                    continue

            dataset.optional_depends_runtime = optional_depends_runtime
            dataset.optional_depends_build = optional_depends_build
            dataset.optional_depends_postmerge = optional_depends_postmerge
            dataset.optional_depends_conflict = optional_depends_conflict

            dataset.static_depends_runtime = static_depends_runtime
            dataset.static_depends_build = static_depends_build
            dataset.static_depends_postmerge = static_depends_postmerge
            dataset.static_depends_conflict = static_depends_conflict

            if metadata['arch'] is not None:
                arches = metadata['arch'].split(" ")
                for arch in arches:
                    dataset.arch = arch
                    self.repodb.insert_package(dataset)
            else:
                dataset.arch = None
                self.repodb.insert_package(dataset)

            # remove optional keys
            for key in ('depends', 'options', 'opt_runtime', 'opt_build', \
                    'opt_conflict', 'opt_common', 'opt_postmerge'):
                try:
                    del self.env.__dict__[key]
                except KeyError:
                    pass
            self.packages_num += 1
Example #41
0
    def perform_operation(self):
        '''Handles command line arguments and drive building operation'''
        self.set_environment_variables()
        # Check /proc and /dev. These filesystems must be mounted
        # to perform operations properly.
        for item in ('/proc', '/dev'):
            if not os.path.ismount(item):
                out.warn("%s is not mounted. You have been warned." % item)

        # clean source code extraction directory if it is wanted
        # TODO: check the following condition when resume functionality is back
        if self.instruction.clean_tmp:
            if self.instruction.resume_build is not None:
                out.warn(
                    "clean-tmp is disabled because of resume-build is enabled."
                )
            else:
                self.clean_temporary_directory()

        # we want to save starting time of the build operation to calculate building time
        # The starting point of logging
        lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" %
                         (self.instruction.index, self.instruction.count,
                          self.internals.env.repo, self.internals.env.category,
                          self.internals.env.name, self.internals.env.version))

        out.normal(
            "(%s/%s) building %s/%s from %s" %
            (self.instruction.index, self.instruction.count,
             out.color(self.internals.env.category, "green"),
             out.color(
                 self.internals.env.name + "-" + self.internals.env.version,
                 "green"), self.internals.env.repo))

        if self.internals.env.sandbox:
            lpms.logger.info("sandbox enabled build")
            out.notify("sandbox is enabled")
        else:
            lpms.logger.warning("sandbox disabled build")
            out.warn_notify("sandbox is disabled")

        # fetch packages which are in download_plan list
        if self.internals.env.src_url is not None:
            # preprocess url shortcuts such as $name, $version and etc
            self.parse_src_url_field()
            # if the package is revisioned, override build_dir and install_dir.
            # remove revision number from these variables.
            if self.revisioned:
                for variable in ("build_dir", "install_dir"):
                    new_variable = "".join(os.path.basename(getattr(self.internals.env, \
                            variable)).split(self.revision))
                    setattr(self.internals.env, variable, \
                            os.path.join(os.path.dirname(getattr(self.internals.env, \
                            variable)), new_variable))

            utils.xterm_title(
                "lpms: downloading %s/%s/%s-%s" %
                (self.internals.env.repo, self.internals.env.category,
                 self.internals.env.name, self.internals.env.version))

            self.prepare_download_plan(self.internals.env.applied_options)

            if not fetcher.URLFetcher().run(self.download_plan):
                lpms.terminate("\nplease check the spec")

        if self.internals.env.applied_options is not None and self.internals.env.applied_options:
            out.notify("applied options: %s" %
                       " ".join(self.internals.env.applied_options))

        if self.internals.env.src_url is None and not self.extract_plan \
                and hasattr(self.internals.env, "extract"):
            # Workaround for #208
            self.internals.env.extract_nevertheless = True

        # Remove previous sandbox log if it is exist.
        if os.path.exists(cst.sandbox_log):
            shelltools.remove_file(cst.sandbox_log)

        # Enter the building directory
        os.chdir(self.internals.env.build_dir)

        # Manage ccache
        if hasattr(self.config, "ccache") and self.config.ccache:
            if utils.drive_ccache(config=self.config):
                out.notify("ccache is enabled.")
            else:
                out.warn(
                    "ccache could not be enabled. so you should check dev-util/ccache"
                )

        self.internals.env.start_time = time.time()
        return True, self.internals.env
Example #42
0
def run_strip(path):
    p = os.popen("/usr/bin/strip --strip-unneeded %s" % path)
    ret = p.close()
    if ret:
        out.warn("/usr/bin/strip/ --strip-unneeded command failed for %s" %
                 path)
Example #43
0
def main(params):
    # determine operation type
    repo_name = None
    if params:
        repo_name = params[0]

    # create operation object
    operation = Update()

    repo_num = 0 
    if repo_name is None:
        # firstly, lpms tries to create a copy of current repository database.
        db_backup()

        out.normal("updating repository database...")
        operation.repodb.database.begin_transaction()
        for repo_name in os.listdir(cst.repos):
            if not repo_name in utils.available_repositories():
                continue
            if os.path.isfile(os.path.join(cst.repos, repo_name, "info/repo.conf")):
                out.write(out.color(" * ", "red") + repo_name+"\n")
                
                operation.update_repository(repo_name)
                repo_num += 1

        operation.repodb.database.commit()
        out.normal("%s repository(ies) is/are updated." % repo_num)
    else:
        if repo_name == ".":
            current_path = os.getcwd()
            for repo_path in [os.path.join(cst.repos, item) \
                    for item in utils.available_repositories()]:
                if current_path == repo_path or len(current_path.split(repo_path)) == 2:
                    # convert it a valid repo_name variable from the path
                    repo_name = current_path.split(cst.repos)[1][1:]
                    break
            if repo_name == ".":
                out.warn("%s does not seem a valid repository path." % \
                        out.color(current_path, "red"))
                lpms.terminate()

        if len(repo_name.split("/")) == 2:
            out.normal("updating %s" % repo_name)
            repo, category = repo_name.split("/")
            repo_path = os.path.join(cst.repos, repo)
            
            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            operation.repodb.database.begin_transaction()
            for pkg in os.listdir(os.path.join(repo_path, category)):
                try:
                    operation.update_package(repo_path, category, pkg, update=True)
                except IntegrityError:
                    continue
            operation.repodb.database.commit()

        elif len(repo_name.split("/")) == 3:
            version = None
            repo, category, name = repo_name.split("/")
            
            if repo.startswith("="):
                repo = repo[1:]
                try:
                    name, version = utils.parse_pkgname(name)
                except TypeError:
                    out.error("you should give a version number")
                    lpms.terminate()
            else:
                if utils.parse_pkgname(name) is not None and len(utils.parse_pkgname(name)) == 2:
                    out.error("you must use %s" % (out.color("="+repo_name, "red")))
                    lpms.terminate()
            
            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            repo_path = os.path.join(cst.repos, repo)
            out.normal("updating %s/%s/%s" % (repo, category, name))
            operation.repodb.database.begin_transaction()
            operation.update_package(repo_path, category, name, my_version = version, update = True)
            operation.repodb.database.commit()
        
        else:
            if not repo_name in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo_name, "red"))
                lpms.terminate()
            
            repo_dir = os.path.join(cst.repos, repo_name)
            if os.path.isdir(repo_dir):
                repo_path = os.path.join(repo_dir, cst.repo_file)
                if os.path.isfile(repo_path):
                    operation.repodb.database.begin_transaction()
                    out.normal("updating repository: %s" % out.color(repo_name, "green"))
                    operation.update_repository(repo_name)
                    operation.repodb.database.commit()
                else:
                    lpms.terminate("repo.conf file could not found in %s" % repo_dir+"/info")
            else:
                lpms.terminate("repo.conf not found in %s" % os.path.join(cst.repos, repo_name))

    out.normal("Total %s packages have been processed." % operation.packages_num)
    
    # Drop inactive repository from the database
    for name in operation.repodb.get_repository_names():
        if not name in utils.available_repositories():
            operation.repodb.delete_repository(name, commit=True)
            out.warn("%s dropped." % name)
    
    # Close the database connection
    operation.repodb.database.close()
Example #44
0
    def search(self):
        if not list(self.keyword) and lpms.getopt("--only-installed"):
            total = 0
            for package in self.instdb.get_all_names():
                repo, category, name = package
                version_data = self.instdb.get_version(name, repo_name=repo, \
                        pkg_category=category)
                total += 1
                for slot in version_data:
                    out.notify("%s/%s/%s [slot:%s] -> %s" % (repo, category, name, \
                            slot, ", ".join(version_data[slot])))
            out.write("\npackage count: %d\n" % total)
            lpms.terminate()

        if lpms.getopt("--help") or len(self.keyword) == 0:
            self.usage()

        available = True
        results = []
        if not lpms.getopt("--in-summary") and not lpms.getopt("--in-name"):
            self.cursor.execute(
                '''SELECT repo, category, name, version, summary, slot FROM \
                    package WHERE name LIKE (?) OR summary LIKE (?)''',
                ("%" + self.keyword + "%", "%" + self.keyword + "%"))
            results.extend(self.cursor.fetchall())
        elif lpms.getopt("--in-summary"):
            self.cursor.execute(
                '''SELECT repo, category, name, version, summary, slot FROM \
                    package WHERE summary LIKE (?)''',
                ("%" + self.keyword + "%", ))
            results.extend(self.cursor.fetchall())
        else:
            self.cursor.execute(
                '''SELECT repo, category, name, version, summary, slot FROM \
                    package WHERE name LIKE (?)''',
                ("%" + self.keyword + "%", ))
            results.extend(self.cursor.fetchall())

        if not results:
            # if no result, search given keyword in installed packages database
            connection = sqlite3.connect(cst.installdb_path)
            cursor = connection.cursor()
            cursor.execute(
                '''SELECT repo, category, name, version, summary, slot FROM \
                    package WHERE name LIKE (?) OR summary LIKE (?)''',
                ("%" + self.keyword + "%", "%" + self.keyword + "%"))
            results.extend(cursor.fetchall())
            if results:
                out.notify(
                    "these packages are installed but no longer available.")
                available = False

        packages = self.classificate_packages(results)

        for index, package in enumerate(packages, 1):
            category, name = package
            if lpms.getopt("--interactive"):
                out.write("[" + str(index) + "] " +
                          out.color(category, "green") + "/" +
                          out.color(name, "green") + " - ")
            else:
                out.write(
                    out.color(category, "green") + "/" +
                    out.color(name, "green") + " - ")
            items = {}
            for item in packages[package]:
                if item[0] in items:
                    items[item[0]].append(item[3])
                else:
                    items[item[0]] = [item[3]]
            for item in items:
                out.write(
                    out.color(item, "yellow") + "(" + ", ".join(items[item]) +
                    ") ")
            out.write("\n")
            out.write("    " + packages[package][0][4] + "\n")

        # shows a dialogue, selects the packages and triggers api's build function
        if results and lpms.getopt("--interactive"):
            my_packages = []

            def ask():
                out.write("\ngive number(s):\n")
                out.write(
                    "in order to select more than one package, use space between numbers:\n"
                )
                out.write("to exit, press Q or q.\n")

            while True:
                ask()
                answers = sys.stdin.readline().strip()
                if answers == "Q" or answers == "q":
                    lpms.terminate()
                else:
                    targets = set()
                    for answer in answers.split(" "):
                        if not answer.isdigit():
                            out.warn("%s is invalid. please give a number!" %
                                     out.color(answer, "red"))
                            continue
                        else:
                            targets.add(answer)
                try:
                    my_items = packages.keys()
                    for target in targets:
                        my_packages.append("/".join(my_items[int(target) - 1]))
                    break
                except (IndexError, ValueError):
                    out.warn("invalid command.")
                    continue

            if my_packages:
                api.pkgbuild(my_packages, self.instruct)
Example #45
0
    def perform_operation(self):
        '''Handles command line arguments and drive building operation'''
        self.set_environment_variables()
        # Check /proc and /dev. These filesystems must be mounted 
        # to perform operations properly.
        for item in ('/proc', '/dev'):
            if not os.path.ismount(item):
                out.warn("%s is not mounted. You have been warned." % item)

        # clean source code extraction directory if it is wanted
        # TODO: check the following condition when resume functionality is back
        if self.instruction.clean_tmp:
            if self.instruction.resume_build is not None:
                out.warn("clean-tmp is disabled because of resume-build is enabled.")
            else:
                self.clean_temporary_directory()

        # we want to save starting time of the build operation to calculate building time
        # The starting point of logging
        lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" % (
            self.instruction.index,
            self.instruction.count,
            self.internals.env.repo,
            self.internals.env.category,
            self.internals.env.name,
            self.internals.env.version
            )
        )

        out.normal("(%s/%s) building %s/%s from %s" % (
            self.instruction.index,
            self.instruction.count,
            out.color(self.internals.env.category, "green"),
            out.color(self.internals.env.name+"-"+self.internals.env.version, "green"),
            self.internals.env.repo
            )
        )

        if self.internals.env.sandbox:
            lpms.logger.info("sandbox enabled build")
            out.notify("sandbox is enabled")
        else:
            lpms.logger.warning("sandbox disabled build")
            out.warn_notify("sandbox is disabled")

        # fetch packages which are in download_plan list
        if self.internals.env.src_url is not None:
            # preprocess url shortcuts such as $name, $version and etc
            self.parse_src_url_field()
            # if the package is revisioned, override build_dir and install_dir. 
            # remove revision number from these variables.
            if self.revisioned:
                for variable in ("build_dir", "install_dir"):
                    new_variable = "".join(os.path.basename(getattr(self.internals.env, \
                            variable)).split(self.revision))
                    setattr(self.internals.env, variable, \
                            os.path.join(os.path.dirname(getattr(self.internals.env, \
                            variable)), new_variable))

            utils.xterm_title("lpms: downloading %s/%s/%s-%s" % (
                self.internals.env.repo,
                self.internals.env.category,
                self.internals.env.name,
                self.internals.env.version
                )
            )

            self.prepare_download_plan(self.internals.env.applied_options)

            if not fetcher.URLFetcher().run(self.download_plan):
                lpms.terminate("\nplease check the spec")

        if self.internals.env.applied_options is not None and self.internals.env.applied_options:
            out.notify("applied options: %s" %
                    " ".join(self.internals.env.applied_options))

        if self.internals.env.src_url is None and not self.extract_plan \
                and hasattr(self.internals.env, "extract"):
            # Workaround for #208
            self.internals.env.extract_nevertheless = True

        # Remove previous sandbox log if it is exist.
        if os.path.exists(cst.sandbox_log):
            shelltools.remove_file(cst.sandbox_log)

        # Enter the building directory
        os.chdir(self.internals.env.build_dir)

        # Manage ccache
        if hasattr(self.config, "ccache") and self.config.ccache:
            if utils.drive_ccache(config=self.config):
                out.notify("ccache is enabled.")
            else:
                out.warn("ccache could not be enabled. so you should check dev-util/ccache")

        self.internals.env.start_time = time.time()
        return True, self.internals.env
Example #46
0
def touch(path):
    if os.path.isfile(path):
        out.warn("%s is already exist" % path)
        return
    open(path, 'w').close()
Example #47
0
def run_strip(path):
    p = os.popen("/usr/bin/strip --strip-unneeded %s" % path)
    ret = p.close()
    if ret:
        out.warn("/usr/bin/strip/ --strip-unneeded command failed for %s" % path)
Example #48
0
 def __init__(self, package):
     if len(package) > 1:
         out.warn("this command takes only one package name.")
     self.package = package[0]
     self.instdb = api.InstallDB()
Example #49
0
 def __init__(self, package):
     if len(package) > 1:
         out.warn("this command takes only one package name.")
     self.package = package[0]
     self.instdb = api.InstallDB()
Example #50
0
def touch(path):
    if os.path.isfile(path):
        out.warn("%s is already exist" % path)
        return
    open(path, "w").close()
Example #51
0
    def create_operation_plan(self):
        '''Resolve dependencies and prepares a convenient operation plan'''
        single_packages = PackageItem()
        for package in self.packages:
            self.parent_package = package
            self.current_package = None
            self.package_heap[package.id] = package
            dependencies = []
            package_dependencies = self.collect_dependencies(package)
            if not package_dependencies:
                single_packages.add(package)
                continue
            # Create a list that consists of parent and child items
            for dependency in package_dependencies:
                dependency.parent = package.category+"/"+package.name+"/"+package.slot
                dependencies.append((package.id, dependency))
            while True:
                buff = []
                for parent, dependency in dependencies:
                    self.current_package = dependency
                    self.parent_package = None
                    self.package_query.append((dependency.id, parent))
                    if dependency.id in self.processed:
                        if self.processed[dependency.id] == self.package_options.get(dependency.id, None):
                            # This package was processed and it has no option changes
                            continue

                    # Keep the package options to prevent extra transaction
                    self.processed[dependency.id] = self.package_options.get(dependency.id, None)

                    # Keep the package information for the next operations.
                    # We don't want to create a new transaction for it.
                    self.package_heap[dependency.id] = dependency

                    # Get its dependencies
                    package_collection = self.collect_dependencies(dependency)
                    if not package_collection:
                        # The item has no dependency
                        continue
                    # Create a list that consists of parent and child items
                    for item in package_collection:
                        item.parent = package.category+"/"+package.name+"/"+package.slot
                        buff.append((dependency.id, item))
                if not buff:
                    # End of the node
                    break
                dependencies = buff

        try:
            # Sort packages for building operation
            plan = sorter.topsort(self.package_query)
        except sorter.CycleError as err:
            answer, num_parents, children = err
            out.brightred("Circular dependency detected:\n")
            for items in sorter.find_cycles(parent_children=children):
                for item in items:
                    package = self.repodb.find_package(package_id=item).get(0)
                    out.write(package.repo+"/"+package.category+"/"+package.name+"-"\
                            +package.version+":"+package.slot+"  ")
            out.write("\n")
            raise DependencyError

        # This part detects inline option conflicts
        removed = {}
        option_conflict = set()
        for package_id in self.inline_option_targets:
            for target in self.inline_option_targets[package_id]:
                for option in self.inline_option_targets[package_id][target]:
                    if option.startswith("-"):
                        if option in removed:
                            removed[option].add((package_id, target))
                        else:
                            removed[option] = set([(package_id, target)])
                    else:
                        if "-"+option in removed:
                            for (my_pkg_id, my_target)  in removed["-"+option]:
                                if my_target == target:
                                    option_conflict.add((my_target, \
                                            self.package_heap[package_id], \
                                            self.package_heap[my_pkg_id],\
                                            option))
        if option_conflict:
            out.error("option conflict detected:\n")
            for (pkg, add, remove, option)in option_conflict:
                out.error(out.color(option, "red")+" option on "+pkg+"\n")
                out.warn("%s/%s/%s/%s adds the option." % (add.repo, add.category, \
                        add.name, add.version))
                out.warn("%s/%s/%s/%s removes the option." % (remove.repo, remove.category, \
                        remove.name, remove.version))
            lpms.terminate()

        self.conditional_versions = {}
        for (key, values) in self.conditional_packages.items():
            for value in values:
                target_package = self.package_heap[key]
                my_item = {
                            "type": value["type"],
                            "version": value["version"],
                            "target": target_package.category+"/"+target_package.name+\
                                    "/"+target_package.slot,
                }
                if not value["owner_id"] in self.conditional_versions:
                    self.conditional_versions[value["owner_id"]] = [my_item]
                else:
                    self.conditional_versions[value["owner_id"]].append(my_item)

        # TODO: I think I must use most professional way for ignore-depends feature.
        if lpms.getopt("--ignore-deps"):
            result = LCollect()
            result.packages = self.packages
            result.dependencies = self.package_dependencies
            result.options = self.package_options
            result.inline_option_targets = self.inline_option_targets
            result.conditional_versions = self.conditional_versions
            result.conflicts = self.conflicts
            return result

        # Workaround for postmerge dependencies
        for (id_dependency, id_package) in self.postmerge_dependencies:
            plan.remove(id_dependency)
            plan.insert(plan.index(id_package)+1, id_dependency)

        final_plan = PackageItem()
        required_package_ids = [package.id for package in self.packages]
        for package_id in plan:
            package = self.package_heap[package_id]
            continue_conditional = False
            # If a package has a conditional decision point,
            # we should consider the condition
            if package.id not in self.conditional_packages:
                for c_package_id in self.conditional_packages:
                    c_package = self.package_heap[c_package_id]
                    if package.pk == c_package.pk:
                        continue_conditional = True
                        if package_id in required_package_ids:
                            final_plan.add_by_pk(c_package)
                            break
                if package_id in required_package_ids:
                    if continue_conditional is False:
                        final_plan.add_by_pk(package)
            if continue_conditional:
                continue
            installed_package = self.instdb.find_package(
                    package_category=package.category,
                    package_name=package.name,
                    package_slot=package.slot
            )
            if installed_package:
                if package.id in self.inline_options:
                    if installed_package.get(0).applied_options is None:
                        final_plan.add_by_pk(package)
                        continue
                    continue_inline = False
                    for inline_option in self.inline_options[package.id]:
                        if not inline_option in installed_package.get(0).applied_options:
                            final_plan.add_by_pk(package)
                            continue_inline = True
                            break
                    if continue_inline:
                        continue
                try:
                    conditional_versions_query = self.instdb.find_conditional_versions(
                            target=package.category+"/"+package.name+"/"+package.slot)
                    if conditional_versions_query:
                        for item in conditional_versions_query:
                            item.decision_point["package_id"]=item.package_id
                            if package.id in self.conditional_packages:
                                if not item.decision_point in self.conditional_packages[package.id]:
                                    self.conditional_packages[package.id].append(item.decision_point)
                            else:
                                self.conditional_packages[package.id] = [item.decision_point]
                    if package.id in self.conditional_packages:
                        decision_points = self.conditional_packages[package.id]
                        for decision_point in decision_points:
                            comparison = utils.vercmp(installed_package.get(0).version, \
                                        decision_point["version"])
                            if decision_point["type"] == ">=":
                                if self.handle_condition_conflict(decision_point, final_plan, \
                                        package.pk, ("<", ">"), (0, 1)) is False:
                                    continue
                                if not comparison in (1, 0) or package.id in required_package_ids:
                                    final_plan.add_by_pk(package)
                            elif decision_point["type"] == "<":
                                if self.handle_condition_conflict(decision_point, final_plan, \
                                        package.pk, (">", "<"), (0, -1)) is False:
                                    continue
                                if comparison != -1:
                                    final_plan.add_by_pk(package)
                            elif decision_point["type"] == ">":
                                if self.handle_condition_conflict(decision_point, final_plan, \
                                        package.pk, ("<", ">"), (0, 1)) is False:
                                    continue
                                if comparison != 1 or package.id in required_package_ids:
                                    final_plan.add_by_pk(package)
                            elif decision_point["type"] == "<=":
                                if self.handle_condition_conflict(decision_point, final_plan, \
                                        package.pk, (">", "<"), (0, -1)) is False:
                                    continue
                                if not comparison in (-1, 0) or package.id in required_package_ids:
                                    final_plan.add_by_pk(package)
                            elif decision_point["type"] == "==":
                                if comparison != 0 or package.id in required_package_ids:
                                    final_plan.add_by_pk(package)
                except ConditionConflict:
                    if not "owner_package" in decision_point:
                        conflict_package = self.instdb.find_package(package_id=\
                                decision_point["package_id"]).get(0)
                        decision_point["owner_package"] = conflict_package.repo+"/"+ \
                        conflict_package.category+"/"+ \
                        conflict_package.name+"/"+ \
                        conflict_package.version

                    out.error("while selecting a convenient version of %s, a conflict detected:\n" % \
                            out.color(package.pk, "red"))
                    out.notify(decision_point["owner_package"]+" wants "+\
                            decision_point["type"]+decision_point["version"])
                    out.notify(self.conflict_point["owner_package"]+" wants "+\
                            self.conflict_point["type"]+self.conflict_point["version"])
                    lpms.terminate("\nplease contact the package maintainers.")

                # Use new options if the package is effected
                if self.use_new_options and not package in final_plan:
                    if package.id in self.package_options:
                        for option in self.package_options[package.id]:
                            if not option in installed_package.get(0).applied_options:
                                final_plan.add_by_pk(package)
                                break
            else:
                final_plan.add_by_pk(package)

        # Oh my god! Some packages have no dependency.
        if single_packages:
            for single_package in single_packages:
                for item_id in plan:
                    if self.package_heap[item_id].pk == single_package.pk:
                        single_packages.remove(single_package)
                        break
            for single_package in single_packages:
                final_plan.insert_into(0, single_package)

        # Create LCollect object to manage package dependency data
        operation_plan = LCollect()
        operation_plan.packages = final_plan
        operation_plan.dependencies = self.package_dependencies
        operation_plan.options = self.package_options
        operation_plan.inline_option_targets = self.inline_option_targets
        operation_plan.conditional_versions = self.conditional_versions
        operation_plan.conflicts = self.conflicts
        return operation_plan
Example #52
0
    def update_package(self,
                       repo_path,
                       category,
                       my_pkg,
                       my_version=None,
                       update=False):
        dataset = LCollect()
        # Register some variables to use after
        self.env.repo = os.path.basename(repo_path)
        self.env.category = category

        dataset.repo = self.env.repo
        dataset.category = category

        os.chdir(os.path.join(repo_path, category, my_pkg))
        for pkg in glob.glob("*" + cst.spec_suffix):
            script_path = os.path.join(repo_path, category, my_pkg, pkg)

            self.env.name, self.env.version = utils.parse_pkgname(
                pkg.split(cst.spec_suffix)[0])

            dataset.name = self.env.name
            dataset.version = self.env.version

            # FIXME: We must develop a upper-class or environment to
            # use that cases to prevent code duplication

            # Begins code duplication
            interphase = re.search(r'-r[0-9][0-9]', self.env.version)
            if not interphase:
                interphase = re.search(r'-r[0-9]', self.env.version)
            self.env.raw_version = self.env.version
            self.env.revision = ""
            # Now, set real values of these variables if package revisioned.
            if interphase is not None and interphase.group():
                self.env.raw_version = self.env.version.replace(
                    interphase.group(), "")
                self.env.revision = interphase.group()
            # End of code duplication

            self.env.__dict__[
                "fullname"] = self.env.name + "-" + self.env.version

            if not self.import_script(script_path):
                out.error("an error occured while processing the spec: %s" \
                        % out.color(script_path, "red"))
                out.error(
                    "please report the above error messages to the package maintainer."
                )
                continue

            metadata = utils.metadata_parser(self.env.metadata)
            metadata.update({
                "name": self.env.name,
                "version": self.env.version
            })
            # This method checks metadata integrity.
            # It warn the user and pass the spec if a spec is broken
            self.check_metadata_integrity(metadata)
            # These values are optional
            if not "options" in metadata:
                metadata.update({"options": None})
            if not "slot" in metadata:
                metadata.update({"slot": "0"})
            if not "src_url" in metadata:
                metadata.update({"src_url": None})

            if lpms.getopt("--verbose"):
                out.write("    %s-%s\n" % (self.env.name, self.env.version))

            try:
                dataset.summary = metadata['summary']
                dataset.homepage = metadata['homepage']
                dataset.license = metadata['license']
                dataset.src_uri = metadata['src_url']
                if metadata['options'] is None:
                    dataset.options = None
                else:
                    dataset.options = metadata['options'].split(" ")
                dataset.slot = metadata['slot']

            except KeyError as err:
                out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \
                        self.env.name, self.env.version))
                out.warn(
                    "repository update was failed and the repository database was removed."
                )
                out.warn(
                    "you can run 'lpms --reload-previous-repodb' command to reload previous db version."
                )
                lpms.terminate("good luck!")

            if update:
                self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \
                        package_name=self.env.name, package_version=self.env.version)

            static_depends_runtime = []
            static_depends_build = []
            static_depends_postmerge = []
            static_depends_conflict = []
            if 'depends' in self.env.__dict__.keys():
                deps = utils.depends_parser(self.env.depends)
                if 'runtime' in deps:
                    static_depends_runtime.extend(deps['runtime'])
                if 'build' in deps:
                    static_depends_build.extend(deps['build'])
                if 'common' in deps:
                    static_depends_runtime.extend(deps['common'])
                    static_depends_build.extend(deps['common'])
                if 'postmerge' in deps:
                    static_depends_postmerge.extend(deps['postmerge'])
                if 'conflict' in deps:
                    static_depends_conflict.extend(deps['conflict'])

            optional_depends_runtime = []
            optional_depends_build = []
            optional_depends_postmerge = []
            optional_depends_conflict = []
            for opt in ('opt_common', 'opt_conflict', 'opt_postmerge',
                        'opt_runtime', 'opt_build'):
                try:
                    deps = utils.parse_opt_deps(getattr(self.env, opt))
                    if opt.split("_")[1] == "runtime":
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "build":
                        optional_depends_build.append(deps)
                    elif opt.split("_")[1] == "common":
                        optional_depends_build.append(deps)
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "postmerge":
                        optional_depends_postmerge.append(deps)
                    elif opt.split("_")[1] == "conflict":
                        optional_depends_conflict.append(deps)
                    del deps
                except AttributeError:
                    continue

            dataset.optional_depends_runtime = optional_depends_runtime
            dataset.optional_depends_build = optional_depends_build
            dataset.optional_depends_postmerge = optional_depends_postmerge
            dataset.optional_depends_conflict = optional_depends_conflict

            dataset.static_depends_runtime = static_depends_runtime
            dataset.static_depends_build = static_depends_build
            dataset.static_depends_postmerge = static_depends_postmerge
            dataset.static_depends_conflict = static_depends_conflict

            if metadata['arch'] is not None:
                arches = metadata['arch'].split(" ")
                for arch in arches:
                    dataset.arch = arch
                    self.repodb.insert_package(dataset)
            else:
                dataset.arch = None
                self.repodb.insert_package(dataset)

            # remove optional keys
            for key in ('depends', 'options', 'opt_runtime', 'opt_build', \
                    'opt_conflict', 'opt_common', 'opt_postmerge'):
                try:
                    del self.env.__dict__[key]
                except KeyError:
                    pass
            self.packages_num += 1
Example #53
0
    def set_local_environment_variables(self):
        '''
        Sets environment variables such as CFLAGS, CXXFLAGS and LDFLAGS if the user
        defines a local file which is included them
        '''
        switches = ["ADD", "REMOVE", "GLOBAL"]
        for item in cst.local_env_variable_files:
            if not os.access(item, os.R_OK):
                continue
            variable_type = item.split("/")[-1].upper()
            with open(item) as data:
                for line in data.readlines():
                    add = []; remove = []; global_flags = []
                    if line.startswith("#"):
                        continue
                    myline = [i.strip() for i in line.split(" ")]
                    target = myline[0]
                    if len(target.split("/")) == 2:
                        if target != self.internals.env.category+"/"+self.internals.env.name:
                            continue
                    elif len(target.split("/")) == 1:
                        if target != self.internals.env.category:
                            if len(target.split("-")) == 1:
                                out.warn("warning: invalid line found in %s:" % item)
                                out.red("   "+line)
                            continue
                    else:
                        if len(target.split("-")) == 1:
                            out.warn("warning: invalid line found in %s:" % item)
                            out.red("   "+line)
                            continue

                    if variable_type == "ENV":
                        if myline[1] == "UNSET":
                            variable = myline[2]
                            if variable in os.environ:
                                del os.environ[variable]
                        else:
                            try:
                                variable, value = myline[1:]
                            except ValueError:
                                out.warn("warning: invalid line found in %s:" % item)
                                out.red("   "+line)
                            else:
                                os.environ[variable] = value

                    for switch in switches:
                        if not switch in myline[1:]:
                            continue
                        switch_index = myline.index(switch)
                        for word in myline[switch_index+1:]:
                            if word in switches: 
                                break
                            if switch == "GLOBAL":
                                global_flags.append(word)
                            if switch == "ADD":
                                add.append(word)
                            elif switch == "REMOVE":
                                remove.append(word)
                    
                    if global_flags:
                        if variable_type in os.environ:
                            del os.environ[variable_type]
                            os.environ[variable_type] = " ".join(global_flags)
                    else:
                        if add:
                            if variable_type in os.environ:
                                current = os.environ[variable_type]
                                current += " "+" ".join(add)
                                os.environ[variable_type] = current
                            else:
                                out.warn("%s not defined in your environment" % variable_type)
                        if remove:
                            if variable_type in os.environ:
                                current = os.environ[variable_type]
                                new = [atom for atom in current.split(" ") if not atom in remove]
                                os.environ[variable_type] = " ".join(new)
                            else:
                                out.warn("%s not defined in your environment" % variable_type)
Example #54
0
def main(params):
    # determine operation type
    repo_name = None
    if params:
        repo_name = params[0]

    # create operation object
    operation = Update()

    repo_num = 0
    if repo_name is None:
        # firstly, lpms tries to create a copy of current repository database.
        db_backup()

        out.normal("updating repository database...")
        operation.repodb.database.begin_transaction()
        for repo_name in os.listdir(cst.repos):
            if not repo_name in utils.available_repositories():
                continue
            if os.path.isfile(
                    os.path.join(cst.repos, repo_name, "info/repo.conf")):
                out.write(out.color(" * ", "red") + repo_name + "\n")

                operation.update_repository(repo_name)
                repo_num += 1

        operation.repodb.database.commit()
        out.normal("%s repository(ies) is/are updated." % repo_num)
    else:
        if repo_name == ".":
            current_path = os.getcwd()
            for repo_path in [os.path.join(cst.repos, item) \
                    for item in utils.available_repositories()]:
                if current_path == repo_path or len(
                        current_path.split(repo_path)) == 2:
                    # convert it a valid repo_name variable from the path
                    repo_name = current_path.split(cst.repos)[1][1:]
                    break
            if repo_name == ".":
                out.warn("%s does not seem a valid repository path." % \
                        out.color(current_path, "red"))
                lpms.terminate()

        if len(repo_name.split("/")) == 2:
            out.normal("updating %s" % repo_name)
            repo, category = repo_name.split("/")
            repo_path = os.path.join(cst.repos, repo)

            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            operation.repodb.database.begin_transaction()
            for pkg in os.listdir(os.path.join(repo_path, category)):
                try:
                    operation.update_package(repo_path,
                                             category,
                                             pkg,
                                             update=True)
                except IntegrityError:
                    continue
            operation.repodb.database.commit()

        elif len(repo_name.split("/")) == 3:
            version = None
            repo, category, name = repo_name.split("/")

            if repo.startswith("="):
                repo = repo[1:]
                try:
                    name, version = utils.parse_pkgname(name)
                except TypeError:
                    out.error("you should give a version number")
                    lpms.terminate()
            else:
                if utils.parse_pkgname(name) is not None and len(
                        utils.parse_pkgname(name)) == 2:
                    out.error("you must use %s" %
                              (out.color("=" + repo_name, "red")))
                    lpms.terminate()

            if not repo in utils.available_repositories():
                out.error("%s is not a repository." % out.color(repo, "red"))
                lpms.terminate()

            repo_path = os.path.join(cst.repos, repo)
            out.normal("updating %s/%s/%s" % (repo, category, name))
            operation.repodb.database.begin_transaction()
            operation.update_package(repo_path,
                                     category,
                                     name,
                                     my_version=version,
                                     update=True)
            operation.repodb.database.commit()

        else:
            if not repo_name in utils.available_repositories():
                out.error("%s is not a repository." %
                          out.color(repo_name, "red"))
                lpms.terminate()

            repo_dir = os.path.join(cst.repos, repo_name)
            if os.path.isdir(repo_dir):
                repo_path = os.path.join(repo_dir, cst.repo_file)
                if os.path.isfile(repo_path):
                    operation.repodb.database.begin_transaction()
                    out.normal("updating repository: %s" %
                               out.color(repo_name, "green"))
                    operation.update_repository(repo_name)
                    operation.repodb.database.commit()
                else:
                    lpms.terminate("repo.conf file could not found in %s" %
                                   repo_dir + "/info")
            else:
                lpms.terminate("repo.conf not found in %s" %
                               os.path.join(cst.repos, repo_name))

    out.normal("Total %s packages have been processed." %
               operation.packages_num)

    # Drop inactive repository from the database
    for name in operation.repodb.get_repository_names():
        if not name in utils.available_repositories():
            operation.repodb.delete_repository(name, commit=True)
            out.warn("%s dropped." % name)

    # Close the database connection
    operation.repodb.database.close()