def __init__(self, blade_root_dir, distcc_hosts_list=[]):
        # ccache
        self.blade_root_dir = blade_root_dir
        self.ccache_installed = self._check_ccache_install()

        # distcc
        self.distcc_env_prepared = False
        self.distcc_installed = self._check_distcc_install()
        if distcc_hosts_list:
            self.distcc_host_list = distcc_hosts_list
        else:
            self.distcc_host_list = os.environ.get("DISTCC_HOSTS", "")
        if self.distcc_installed and self.distcc_host_list:
            self.distcc_env_prepared = True
        if self.distcc_installed and not self.distcc_host_list:
            warning("DISTCC_HOSTS not set but you have " "distcc installed, will just build locally")
        self.distcc_log_file = os.environ.get("DISTCC_LOG", "")
        if self.distcc_log_file:
            info("distcc log: %s" % self.distcc_log_file)

        # dccc
        self.dccc_env_prepared = True
        self.dccc_master = os.environ.get("MASTER_HOSTS", "")
        self.dccc_hosts_list = os.environ.get("DISTLD_HOSTS", "")
        self.dccc_installed = self._check_dccc_install()
        if self.dccc_installed:
            if not self.dccc_master and not self.dccc_hosts_list:
                self.dccc_env_prepared = False
                warning("MASTER_HOSTS and DISTLD_HOSTS not set " "but you have dccc installed, will just build locally")
        else:
            self.dccc_env_prepared = False

        self.rules_buf = []
Exemple #2
0
 def _check_dccc_install():
     """Check dccc is installed or not. """
     home_dir = os.environ.get("HOME", "")
     if home_dir and os.path.exists(os.path.join(home_dir, "bin", "dccc")):
         info("dccc found")
         return True
     return False
Exemple #3
0
    def __init__(self, blade_root_dir, distcc_hosts_list=[]):
        # ccache
        self.blade_root_dir = blade_root_dir
        self.ccache_installed = self._check_ccache_install()

        # distcc
        self.distcc_env_prepared = False
        self.distcc_installed = self._check_distcc_install()
        if distcc_hosts_list:
            self.distcc_host_list = distcc_hosts_list
        else:
            self.distcc_host_list = os.environ.get('DISTCC_HOSTS', '')
        if self.distcc_installed and self.distcc_host_list:
            self.distcc_env_prepared = True
        if self.distcc_installed and not self.distcc_host_list:
            warning("DISTCC_HOSTS not set but you have "
                    "distcc installed, will just build locally")
        self.distcc_log_file = os.environ.get('DISTCC_LOG', '')
        if self.distcc_log_file:
            info("distcc log: %s" % self.distcc_log_file)

        # dccc
        self.dccc_env_prepared = True
        self.dccc_master = os.environ.get('MASTER_HOSTS', '')
        self.dccc_hosts_list = os.environ.get('DISTLD_HOSTS', '')
        self.dccc_installed = self._check_dccc_install()
        if self.dccc_installed:
            if not self.dccc_master and not self.dccc_hosts_list:
                self.dccc_env_prepared = False
                warning("MASTER_HOSTS and DISTLD_HOSTS not set "
                        "but you have dccc installed, will just build locally")
        else:
            self.dccc_env_prepared = False

        self.rules_buf = []
def _blade_action_postfunc(closing_message):
    """To do post jobs if blade's own actions failed to build. """
    info(closing_message)
    # Remember to write the dblite incase of re-linking once fail to
    # build last time. We should elaborate a way to avoid rebuilding
    # after failure of our own builders or actions.
    SCons.SConsign.write()
Exemple #5
0
    def run_target(self, target_key):
        """Run one single target. """
        target = self.targets.get(target_key, {})
        if not target:
            error_exit("target %s:%s is not in the target databases" % (
                       target_key[0], target_key[1]))
        if target['type'] not in self.run_list:
            error_exit("target %s:%s is not a target that could run" % (
                       target_key[0], target_key[1]))
        self._prepare_run_env(target)
        old_pwd = get_cwd()
        cmd = "%s " % os.path.abspath(self._test_executable(target))
        if self.options.runargs:
            cmd += "%s" % self.options.runargs
        info("it will run '%s' " % cmd )
        sys.stdout.flush()

        target_dir = os.path.dirname(self._test_executable(target))
        os.chdir(target_dir)
        run_env = dict(os.environ)
        run_env['LD_LIBRARY_PATH'] = target_dir
        p = subprocess.Popen(cmd,
                             env=run_env,
                             shell=True)
        p.wait()
        os.chdir(old_pwd)
        return p.returncode
 def _check_dccc_install():
     """Check dccc is installed or not. """
     home_dir = os.environ.get("HOME", "")
     if home_dir and os.path.exists(os.path.join(home_dir, "bin", "dccc")):
         info("dccc found")
         return True
     return False
Exemple #7
0
    def run(self):
        """Run all the cc_test target programs. """
        failed_targets = []
        self._get_inctest_run_list()
        tests_run_list = []
        old_pwd = get_cwd()
        for target in self.targets.values():
            if not (target['type'] == 'cc_test' or
                    target['type'] == 'dynamic_cc_test'):
                continue
            if (not self.run_all) and target not in self.inctest_run_list:
                if not target.get('options', {}).get('always_run', False):
                    self.skipped_tests.append((target['path'], target['name']))
                    continue
            self._prepare_test_env(target)
            cmd = "%s --gtest_output=xml" % os.path.abspath(self._test_executable(target))
            if self.options.testargs:
                cmd = "%s %s" % (cmd, self.options.testargs)

            sys.stdout.flush() # make sure output before scons if redirected

            test_env = dict(os.environ)
            test_env['LD_LIBRARY_PATH'] = self._runfiles_dir(target)
            test_env['GTEST_COLOR'] = 'yes' if blade_util.color_enabled else 'no'
            test_env['HEAPCHECK'] = target.get('options', {}).get('heap_check', '')
            tests_run_list.append((target,
                                   self._runfiles_dir(target),
                                   test_env,
                                   cmd))
        concurrent_jobs = 0
        if hasattr(self.options, 'test_jobs'):
            concurrent_jobs = self.options.test_jobs
        scheduler = TestScheduler(tests_run_list,
                                  concurrent_jobs,
                                  self.tests_run_map)
        scheduler.schedule_jobs()

        os.chdir(old_pwd)
        self._clean_test_env()
        info("%s Testing Summary %s" % (self.title_str, self.title_str))
        info("Run %d test targets" % scheduler.num_of_run_tests)

        failed_targets = scheduler.failed_targets
        if failed_targets:
            info("%d tests failed:" % len(failed_targets))
            for i in failed_targets:
                print "%s/%s, exit code: %s" % (
                    i["path"], i["name"], i["test_exit_code"])
                test_file_name = os.path.abspath(self._test_executable(i))
                # Do not skip failed test by default
                if self.cur_target_dict.has_key(test_file_name):
                    self.cur_target_dict[test_file_name] = (0, 0)
            info("%d tests passed" % (
                scheduler.num_of_run_tests - len(failed_targets)))
            self._finish_tests()
            return 1
        else:
            info("All tests passed!")
            self._finish_tests()
            return 0
Exemple #8
0
def _blade_action_postfunc(closing_message):
    """To do post jobs if blade's own actions failed to build. """
    info(closing_message)
    # Remember to write the dblite incase of re-linking once fail to
    # build last time. We should elaborate a way to avoid rebuilding
    # after failure of our own builders or actions.
    SCons.SConsign.write()
Exemple #9
0
 def analyze_targets(self):
     """Expand the targets. """
     info("analyzing dependency graph...")
     self.deps_analyzer = DependenciesAnalyzer(self)
     self.deps_analyzer.analyze_deps()
     info("analyzing done.")
     return self.all_targets_expanded
Exemple #10
0
def clean(options):
    info("cleaning...(hint: please specify --generate-dynamic to clean your so)")
    p = subprocess.Popen(
    "scons --duplicate=soft-copy -c -s --cache-show", shell=True)
    p.wait()
    info("cleaning done.")
    return p.returncode
Exemple #11
0
 def analyze_targets(self):
     """Expand the targets. """
     info("analyzing dependency graph...")
     self.deps_analyzer = DependenciesAnalyzer(self)
     self.deps_analyzer.analyze_deps()
     info("analyzing done.")
     return self.all_targets_expanded
Exemple #12
0
    def schedule_jobs(self):
        """scheduler. """
        if self.num_of_tests <= 0:
            return True

        num_of_workers = self.__get_workers_num()
        info("spawn %d worker(s) to run tests" % num_of_workers)

        for i in self.tests_list:
            target = i[0]
            if target.get('options', {}).get('exclusive', False):
                self.exclusive_job_queue.put(i)
            else:
                self.job_queue.put(i)

        test_arg = [self.job_queue, num_of_workers > 1]
        for i in range(num_of_workers):
            t = WorkerThread((i), self._process_command, args=test_arg)
            t.start()
            self.threads.append(t)
        for t in self.threads:
            t.join()

        if not self.exclusive_job_queue.empty():
            info("spawn 1 worker to run exclusive tests")
            test_arg = [self.exclusive_job_queue, False]
            last_t = WorkerThread((num_of_workers), self._process_command, args=test_arg)
            last_t.start()
            last_t.join()

        self.print_summary()
        return True
Exemple #13
0
 def generate_build_rules(self):
     """Generate the constructing rules. """
     info("generating build rules...")
     self.build_rules_generator = SconsRulesGenerator(
         'SConstruct', self.blade_path, self)
     rules_buf = self.build_rules_generator.generate_scons_script()
     info("generating done.")
     return rules_buf
Exemple #14
0
 def _show_skipped_tests_detail(self):
     """show tests skipped. """
     if not self.skipped_tests:
         return
     self.skipped_tests.sort()
     info("skipped tests")
     for target_key in self.skipped_tests:
         print "%s:%s" % (target_key[0], target_key[1])
Exemple #15
0
 def generate_build_rules(self):
     """Generate the constructing rules. """
     info("generating build rules...")
     self.build_rules_generator = SconsRulesGenerator('SConstruct',
                                                      self.blade_path, self)
     rules_buf = self.build_rules_generator.generate_scons_script()
     info("generating done.")
     return rules_buf
Exemple #16
0
 def __init__(self, worker_args, proc_func, args):
     """Init methods for this thread. """
     threading.Thread.__init__(self)
     self.worker_args = worker_args
     self.func_args = args
     self.job_handler = proc_func
     self.thread_id = int(self.worker_args)
     self.start_working_time = time.time()
     self.end_working_time = None
     self.ret = None
     info("blade test executor %d starts to work" % self.thread_id)
Exemple #17
0
 def _check_distcc_install():
     """Check distcc is installed or not. """
     p = subprocess.Popen("distcc --version",
                          env={},
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE,
                          shell=True,
                          universal_newlines=True)
     (stdout, stderr) = p.communicate()
     if p.returncode == 0:
         version_line = stdout.splitlines(True)[0]
         if version_line and version_line.find("distcc") != -1:
             info("distcc found")
             return True
Exemple #18
0
 def _check_ccache_install():
     """Check ccache is installed or not. """
     p = subprocess.Popen("ccache --version",
                          env=os.environ,
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE,
                          shell=True,
                          universal_newlines=True)
     (stdout, stderr) = p.communicate()
     if p.returncode == 0:
         version_line = stdout.splitlines(True)[0]
         if version_line and version_line.find("ccache version") != -1:
             info("ccache found")
             return True
     return False
 def _check_distcc_install():
     """Check distcc is installed or not. """
     p = subprocess.Popen(
         "distcc --version",
         env={},
         stderr=subprocess.PIPE,
         stdout=subprocess.PIPE,
         shell=True,
         universal_newlines=True,
     )
     (stdout, stderr) = p.communicate()
     if p.returncode == 0:
         version_line = stdout.splitlines(True)[0]
         if version_line and version_line.find("distcc") != -1:
             info("distcc found")
             return True
Exemple #20
0
def generate_resource_file(target, source, env):
    src_path = str(source[0])
    new_src_path = str(target[0])
    cmd = "xxd -i %s | sed 's/unsigned char /const char RESOURCE_/g' > %s" % (
        src_path, new_src_path)
    p = subprocess.Popen(cmd,
                         env={},
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         shell=True,
                         universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to generate resource file")
    return p.returncode
 def _check_ccache_install():
     """Check ccache is installed or not. """
     p = subprocess.Popen(
         "ccache --version",
         env=os.environ,
         stderr=subprocess.PIPE,
         stdout=subprocess.PIPE,
         shell=True,
         universal_newlines=True,
     )
     (stdout, stderr) = p.communicate()
     if p.returncode == 0:
         version_line = stdout.splitlines(True)[0]
         if version_line and version_line.find("ccache version") != -1:
             info("ccache found")
             return True
     return False
def generate_resource_file(target, source, env):
    src_path = str(source[0])
    new_src_path = str(target[0])
    cmd = "xxd -i %s | sed 's/unsigned char /const char RESOURCE_/g' > %s" % (
           src_path, new_src_path)
    p = subprocess.Popen(
            cmd,
            env={},
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            shell=True,
            universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to generate resource file")
    return p.returncode
Exemple #23
0
    def _show_tests_detail(self):
        """show the tests detail after scheduling them. """
        sort_buf = []
        for key in self.tests_run_map.keys():
            costtime = self.tests_run_map.get(key, {}).get('costtime', 0)
            sort_buf.append((key, costtime))
        sort_buf.sort(key=lambda x : x[1])

        if self.tests_run_map.keys():
            info("%s Testing detail %s" %(self.title_str, self.title_str))
        for key, costtime in sort_buf:
            reason = self.tests_run_map.get(key, {}).get('reason', 'UNKNOWN')
            result = self.tests_run_map.get(key, {}).get('result',
                                                         'INTERRUPTED')
            if 'SIG' in result:
                result = "with %s" % result
            print info_str("%s triggered by %s, exit(%s), cost %.2f s" % (
                           key, reason, result, costtime))
Exemple #24
0
    def _run_job(self, job):
        """run job, do not redirect the output. """
        (target, run_dir, test_env, cmd) = job
        info("Running %s/%s" % (target['path'], target['name']))

        start_time = time.time()
        p = subprocess.Popen(cmd,
                             env=test_env,
                             cwd=run_dir,
                             shell=True)

        p.wait()
        cost_time = time.time() - start_time
        result = self.__get_result(p.returncode)
        info("%s/%s finished : %s\n" % (
             target['path'], target['name'], result))

        return (target, p.returncode, cost_time)
Exemple #25
0
    def load_targets(self):
        """Load the targets. """
        info("loading BUILDs...")
        if self.kwargs.get('blade_command', '') == 'query':
            working_dir = self.current_source_path

            if '...' not in self.command_targets:
                new_target_list = []
                for target in self.command_targets:
                    new_target_list.append("%s:%s" %
                                           self._get_normpath_target(target))
                self.command_targets = new_target_list
        else:
            working_dir = self.working_dir
        (self.direct_targets, self.all_command_targets) = load_targets(
            self.command_targets, working_dir, self.current_source_path, self)
        info("loading done.")
        return self.direct_targets, self.all_command_targets
Exemple #26
0
def create_fast_link_builders(env):
    """Creates fast link builders - Program and  SharedLibrary. """
    # Check requirement
    acquire_temp_place = "df | grep tmpfs | awk '{print $5, $6}'"
    p = subprocess.Popen(acquire_temp_place,
                         env=os.environ,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         shell=True,
                         universal_newlines=True)
    std_out, std_err = p.communicate()

    # Do not try to overwrite builder with error
    if p.returncode:
        warning(
            "you have link on tmp enabled, but it is not fullfilled to make it."
        )
        return

    # No tmpfs to do fastlink, will not overwrite the builder
    if not std_out:
        warning(
            "you have link on tmp enabled, but there is no tmpfs to make it.")
        return

    # Use the first one
    global linking_tmp_dir
    usage, linking_tmp_dir = tuple(std_out.splitlines(False)[0].split())

    # Do not try to do that if there is no memory space left
    usage = int(usage.replace("%", ""))
    if usage > 90:
        warning("you have link on tmp enabled, "
                "but there is not enough space on %s to make it." %
                linking_tmp_dir)
        return

    info("building in link on tmpfs mode")

    create_fast_link_sharelib_builder(env)
    create_fast_link_prog_builder(env)
Exemple #27
0
    def _run_job_redirect(self, job):
        """run job, redirect the output. """
        (target, run_dir, test_env, cmd) = job
        test_name = "%s/%s" % (target['path'], target['name'])

        info("Running %s" % test_name)
        start_time = time.time()
        p = subprocess.Popen(cmd,
                             env=test_env,
                             cwd=run_dir,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT,
                             shell=True)

        (stdoutdata, stderrdata) = p.communicate()
        cost_time = time.time() - start_time
        result = self.__get_result(p.returncode)
        print "%s\n%s%s\n" % (info_str("Output of %s" % test_name),
                stdoutdata, info_str("%s finished: %s" % (test_name, result)))

        return (target, p.returncode, cost_time)
Exemple #28
0
    def load_targets(self):
        """Load the targets. """
        info("loading BUILDs...")
        if self.kwargs.get('blade_command', '') == 'query':
            working_dir = self.current_source_path

            if '...' not in self.command_targets:
                new_target_list = []
                for target in self.command_targets:
                    new_target_list.append("%s:%s" %
                            self._get_normpath_target(target))
                self.command_targets = new_target_list
        else:
            working_dir = self.working_dir
        (self.direct_targets,
         self.all_command_targets) = load_targets(self.command_targets,
                                                  working_dir,
                                                  self.current_source_path,
                                                  self)
        info("loading done.")
        return self.direct_targets, self.all_command_targets
Exemple #29
0
 def tune_parallel_jobs_num(self):
     """Tune the jobs num. """
     user_jobs_num = self.options.jobs
     jobs_num = 0
     cpu_core_num = multiprocessing.cpu_count()
     if self.distcc_enabled and self.build_environment.distcc_env_prepared:
         jobs_num = int(1.5*len(self.build_environment.get_distcc_hosts_list())) + 1
         if jobs_num > 20:
             jobs_num = 20
         if jobs_num and self.options.jobs != jobs_num:
             self.options.jobs = jobs_num
     elif self.options.jobs < 1:
         if cpu_core_num <= 4:
             self.options.jobs = 2*cpu_core_num
         else:
             self.options.jobs = cpu_core_num
             if self.options.jobs > 8:
                 self.options.jobs = 8
     if self.options.jobs != user_jobs_num:
         info("tunes the parallel jobs number(-j N) to be %d" % (
                      self.options.jobs))
     return self.options.jobs
def create_fast_link_builders(env):
    """Creates fast link builders - Program and  SharedLibrary. """
    # Check requirement
    acquire_temp_place = "df | grep tmpfs | awk '{print $5, $6}'"
    p = subprocess.Popen(
                        acquire_temp_place,
                        env=os.environ,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.PIPE,
                        shell=True,
                        universal_newlines=True)
    std_out, std_err = p.communicate()

    # Do not try to overwrite builder with error
    if p.returncode:
        warning("you have link on tmp enabled, but it is not fullfilled to make it.")
        return

    # No tmpfs to do fastlink, will not overwrite the builder
    if not std_out:
        warning("you have link on tmp enabled, but there is no tmpfs to make it.")
        return

    # Use the first one
    global linking_tmp_dir
    usage, linking_tmp_dir = tuple(std_out.splitlines(False)[0].split())

    # Do not try to do that if there is no memory space left
    usage = int(usage.replace("%", ""))
    if usage > 90:
        warning("you have link on tmp enabled, "
                "but there is not enough space on %s to make it." % linking_tmp_dir)
        return

    info("building in link on tmpfs mode")

    create_fast_link_sharelib_builder(env)
    create_fast_link_prog_builder(env)
Exemple #31
0
 def query(self, targets):
     """Query the targets. """
     print_deps = hasattr(self.options, 'deps') and (self.options.deps)
     print_depended = hasattr(self.options,
                              'depended') and (self.options.depended)
     result_map = self.query_helper(targets)
     for key in result_map.keys():
         if print_deps:
             print "\n"
             deps = result_map[key][0]
             info("//%s:%s depends on the following targets:" %
                  (key[0], key[1]))
             for d in deps:
                 print "%s:%s" % (d[0], d[1])
         if print_depended:
             print "\n"
             depended_by = result_map[key][1]
             info("//%s:%s is depeneded by the following targets:" %
                  (key[0], key[1]))
             depended_by.sort(key=lambda x: x, reverse=False)
             for d in depended_by:
                 print "%s:%s" % (d[0], d[1])
     return 0
Exemple #32
0
 def tune_parallel_jobs_num(self):
     """Tune the jobs num. """
     user_jobs_num = self.options.jobs
     jobs_num = 0
     cpu_core_num = multiprocessing.cpu_count()
     if self.distcc_enabled and self.build_environment.distcc_env_prepared:
         jobs_num = int(
             1.5 * len(self.build_environment.get_distcc_hosts_list())) + 1
         if jobs_num > 20:
             jobs_num = 20
         if jobs_num and self.options.jobs != jobs_num:
             self.options.jobs = jobs_num
     elif self.options.jobs < 1:
         if cpu_core_num <= 4:
             self.options.jobs = 2 * cpu_core_num
         else:
             self.options.jobs = cpu_core_num
             if self.options.jobs > 8:
                 self.options.jobs = 8
     if self.options.jobs != user_jobs_num:
         info("tunes the parallel jobs number(-j N) to be %d" %
              (self.options.jobs))
     return self.options.jobs
Exemple #33
0
 def query(self, targets):
     """Query the targets. """
     print_deps = hasattr(self.options, 'deps') and (
                     self.options.deps)
     print_depended = hasattr(self.options, 'depended') and (
                     self.options.depended)
     result_map = self.query_helper(targets)
     for key in result_map.keys():
         if print_deps:
             print "\n"
             deps = result_map[key][0]
             info("//%s:%s depends on the following targets:" % (
                     key[0], key[1]))
             for d in deps:
                 print "%s:%s" % (d[0], d[1])
         if print_depended:
             print "\n"
             depended_by = result_map[key][1]
             info("//%s:%s is depeneded by the following targets:" % (
                     key[0], key[1]))
             depended_by.sort(key=lambda x:x, reverse=False)
             for d in depended_by:
                 print "%s:%s" % (d[0], d[1])
     return 0
Exemple #34
0
 def print_summary(self):
     """print the summary output of tests. """
     info("There are %d tests scheduled to run by scheduler" % (len(self.tests_list)))
Exemple #35
0
 def __process(self):
     """Private handler to handle one job. """
     info("blade worker %d starts to process" % self.thread_id)
     info("blade worker %d finish" % self.thread_id)
     return
 def purge(self, file_list):
     self.purge_cnt += 1
     if not file_list:
         return
     map(self.cache_remove, file_list)
     info("scons cache purged")
Exemple #37
0
    def __init__(self, targets, options, prebuilt_file_map={}, target_database={}):
        """Init method. """
        self.targets = targets
        self.build_dir = "build%s_%s" % (options.m, options.profile)
        self.options = options
        self.run_list = ['cc_binary',
                         'dynamic_cc_binary',
                         'cc_test',
                         'dynamic_cc_test']
        self.prebuilt_file_map = prebuilt_file_map
        self.target_database = target_database

        self.inctest_md5_file = ".blade.test.stamp"
        self.tests_detail_file = "./blade_tests_detail"
        self.run_all = False
        self.inctest_run_list = []
        self.testarg_dict = {}
        self.env_dict = {}
        self.cur_testarg_dict = {}
        self.cur_env_dict = {}
        self.inctest_md5_buffer = []
        self.target_dict = {}
        self.cur_target_dict = {}
        self.option_has_fulltest = False
        self.valid_inctest_time_interval = 86400
        self.last_inctest_time_dict = {}
        self.this_inctest_time_dict = {}
        self.tests_run_map = {}
        self.run_all_reason = ''
        self.title_str = '='*13
        self.skipped_tests = []
        if hasattr(self.options, 'fulltest'):
            self.option_has_fulltest = True
        if self.option_has_fulltest and (not self.options.fulltest):
            if os.path.exists(self.inctest_md5_file):
                for line in open(self.inctest_md5_file):
                    self.inctest_md5_buffer.append(line[:-1])
            buf_len = len(self.inctest_md5_buffer)
            if buf_len < 2 and buf_len > 0 :
                if os.path.exists(self.inctest_md5_file):
                    os.remove(self.inctest_md5_file)
                error_exit("bad incremental test md5 file, removed")
            if self.inctest_md5_buffer:
                self.testarg_dict = eval(self.inctest_md5_buffer[0])
                self.env_dict = eval(self.inctest_md5_buffer[1])
            if buf_len >= 3:
                self.target_dict = eval(self.inctest_md5_buffer[2])
            if buf_len >= 4:
                self.last_inctest_time_dict = eval(self.inctest_md5_buffer[3])
        if hasattr(self.options, 'testargs'):
            self.cur_testarg_dict['testarg'] = md5sum(self.options.testargs)
        else:
            self.cur_testarg_dict['testarg'] = None
        env_keys = os.environ.keys()
        env_keys = list(set(env_keys).difference(env_ignore_set))
        env_keys.sort()
        env_dict = {}
        for env_key in env_keys:
            env_dict[env_key] = os.environ[env_key]
        self.cur_env_dict['env'] = env_dict
        self.this_inctest_time_dict['inctest_time'] = time.time()

        if self.option_has_fulltest and (not self.options.fulltest):
            if self.cur_testarg_dict['testarg'] != (
                    self.testarg_dict.get('testarg', None)):
                self.run_all = True
                self.run_all_reason = 'ARGUMENT'
                info("all tests will run due to test arguments changed")

            new_env = self.cur_env_dict['env']
            old_env = self.env_dict.get('env', {})
            if isinstance(old_env, str): # For old test record
                old_env = {}
            if new_env != old_env:
                self.run_all = True
                self.run_all_reason = 'ENVIRONMENT'
                (new, old) = _diff_env(new_env, old_env)
                info("all tests will run due to test environments changed:")
                if new:
                    info("new environments: %s" % new)
                if old:
                    info("old environments: %s" % old)

            this_time = int(round(self.this_inctest_time_dict['inctest_time']))
            last_time = int(round(self.last_inctest_time_dict.get('inctest_time', 0)))
            interval = this_time - last_time

            if interval >= self.valid_inctest_time_interval or interval < 0:
                self.run_all = True
                self.run_all_reason = 'STALE'
                info("all tests will run due to all passed tests are invalid now")
        if self.option_has_fulltest and self.options.fulltest:
            self.run_all = True
            self.run_all_reason = 'FULLTEST'
Exemple #38
0
 def purge(self, file_list):
     self.purge_cnt += 1
     if not file_list:
         return
     map(self.cache_remove, file_list)
     info('scons cache purged')
Exemple #39
0
 def _show_skipped_tests_summary(self):
     """show tests skipped summary. """
     info("%d tests skipped when doing incremental test" % len(self.skipped_tests))
     info("to run all tests, please specify --full-test argument")
Exemple #40
0
def generate_python_binary(target, source, env):
    setup_file = ''
    if not str(source[0]).endswith("setup.py"):
        warning("setup.py not existed to generate target %s, "
                "blade will generate a default one for you" % str(target[0]))
    else:
        setup_file = str(source[0])
    init_file = ''
    source_index = 2
    if not setup_file:
        source_index = 1
        init_file = str(source[0])
    else:
        init_file = str(source[1])

    init_file_dir = os.path.dirname(init_file)

    dep_source_list = []
    for s in source[source_index:]:
        dep_source_list.append(str(s))

    target_file = str(target[0])
    target_file_dir_list = target_file.split('/')
    target_profile = target_file_dir_list[0]
    target_dir = '/'.join(target_file_dir_list[0:-1])

    if not os.path.exists(target_dir):
        os.makedirs(target_dir)

    if setup_file:
        shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py'))
    else:
        target_name = os.path.basename(init_file_dir)
        if not target_name:
            error_exit("invalid package for target %s" % str(target[0]))
        # generate default setup.py for user
        setup_str = """
#!/usr/bin/env python
# This file was generated by blade

from setuptools import find_packages, setup


setup(
      name='%s',
      version='0.1.0',
      packages=find_packages(),
      zip_safe=True
)
""" % target_name
        default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w")
        default_setup_file.write(setup_str)
        default_setup_file.close()

    package_dir = os.path.join(target_profile, init_file_dir)
    if os.path.exists(package_dir):
        shutil.rmtree(package_dir, ignore_errors=True)

    cmd = "cp -r %s %s" % (init_file_dir, target_dir)
    p = subprocess.Popen(cmd,
                         env={},
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         shell=True,
                         universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to copy source files from %s to %s" %
                   (init_file_dir, target_dir))
        return p.returncode

    # copy file to package_dir
    for f in dep_source_list:
        dep_file_basename = os.path.basename(f)
        dep_file_dir = os.path.dirname(f)
        sub_dir = ''
        sub_dir_list = dep_file_dir.split('/')
        if len(sub_dir_list) > 1:
            sub_dir = '/'.join(dep_file_dir.split('/')[1:])
        if sub_dir:
            package_sub_dir = os.path.join(package_dir, sub_dir)
            if not os.path.exists(package_sub_dir):
                os.makedirs(package_sub_dir)
            sub_init_file = os.path.join(package_sub_dir, "__init__.py")
            if not os.path.exists(sub_init_file):
                sub_f = open(sub_init_file, "w")
                sub_f.close()
            shutil.copyfile(f, os.path.join(package_sub_dir,
                                            dep_file_basename))

    make_egg_cmd = "python setup.py bdist_egg"
    p = subprocess.Popen(make_egg_cmd,
                         env={},
                         cwd=target_dir,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         shell=True,
                         universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to generate python binary in %s" % target_dir)
        return p.returncode
    return 0
def generate_python_binary(target, source, env):
    setup_file = ''
    if not str(source[0]).endswith("setup.py"):
        warning("setup.py not existed to generate target %s, "
                "blade will generate a default one for you" % str(target[0]))
    else:
        setup_file = str(source[0])
    init_file = ''
    source_index = 2
    if not setup_file:
        source_index = 1
        init_file = str(source[0])
    else:
        init_file = str(source[1])

    init_file_dir = os.path.dirname(init_file)

    dep_source_list = []
    for s in source[source_index:]:
        dep_source_list.append(str(s))

    target_file = str(target[0])
    target_file_dir_list = target_file.split('/')
    target_profile = target_file_dir_list[0]
    target_dir = '/'.join(target_file_dir_list[0:-1])

    if not os.path.exists(target_dir):
        os.makedirs(target_dir)

    if setup_file:
        shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py'))
    else:
        target_name = os.path.basename(init_file_dir)
        if not target_name:
            error_exit("invalid package for target %s" % str(target[0]))
        # generate default setup.py for user
        setup_str = """
#!/usr/bin/env python
# This file was generated by blade

from setuptools import find_packages, setup


setup(
      name='%s',
      version='0.1.0',
      packages=find_packages(),
      zip_safe=True
)
""" % target_name
        default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w")
        default_setup_file.write(setup_str)
        default_setup_file.close()

    package_dir = os.path.join(target_profile, init_file_dir)
    if os.path.exists(package_dir):
        shutil.rmtree(package_dir, ignore_errors=True)

    cmd = "cp -r %s %s" % (init_file_dir, target_dir)
    p = subprocess.Popen(
            cmd,
            env={},
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            shell=True,
            universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to copy source files from %s to %s" % (
                   init_file_dir, target_dir))
        return p.returncode

    # copy file to package_dir
    for f in dep_source_list:
        dep_file_basename = os.path.basename(f)
        dep_file_dir = os.path.dirname(f)
        sub_dir = ''
        sub_dir_list = dep_file_dir.split('/')
        if len(sub_dir_list) > 1:
            sub_dir = '/'.join(dep_file_dir.split('/')[1:])
        if sub_dir:
            package_sub_dir = os.path.join(package_dir, sub_dir)
            if not os.path.exists(package_sub_dir):
                os.makedirs(package_sub_dir)
            sub_init_file = os.path.join(package_sub_dir, "__init__.py")
            if not os.path.exists(sub_init_file):
                sub_f = open(sub_init_file, "w")
                sub_f.close()
            shutil.copyfile(f, os.path.join(package_sub_dir, dep_file_basename))

    make_egg_cmd = "python setup.py bdist_egg"
    p = subprocess.Popen(
            make_egg_cmd,
            env={},
            cwd=target_dir,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            shell=True,
            universal_newlines=True)
    std_out, std_err = p.communicate()
    if p.returncode:
        info(std_out)
        info(std_err)
        error_exit("failed to generate python binary in %s" % target_dir)
        return p.returncode
    return 0