def _run_job_redirect(self, job, job_thread): """run job and redirect the output. """ target, run_dir, test_env, cmd = job test_name = target.fullname shell = target.data.get('run_in_shell', False) if shell: cmd = subprocess.list2cmdline(cmd) timeout = target.data.get('test_timeout') self._show_progress(cmd) p = subprocess.Popen(cmd, env=test_env, cwd=run_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True, shell=shell) job_thread.set_job_data(p, test_name, timeout) stdout = p.communicate()[0] result = self._get_result(p.returncode) msg = 'Output of %s:\n%s\n%s finished: %s\n' % ( test_name, stdout, test_name, result) if console.verbosity_le('quiet') and p.returncode != 0: console.error(msg, prefix=False) else: console.info(msg) console.flush() return p.returncode
def schedule_jobs(self): """scheduler. """ if not self.tests_list: return num_of_workers = self._get_workers_num() console.info('spawn %d worker(s) to run tests' % num_of_workers) for i in self.tests_list: target = i[0] if target.data.get('exclusive'): self.exclusive_job_queue.put(i) else: self.job_queue.put(i) quiet = console.verbosity_le('quiet') redirect = num_of_workers > 1 or quiet threads = [] for i in range(num_of_workers): t = WorkerThread(i, self.job_queue, self._process_job, redirect) t.start() threads.append(t) self._wait_worker_threads(threads) if not self.exclusive_job_queue.empty(): console.info('spawn 1 worker to run exclusive tests') last_t = WorkerThread(num_of_workers, self.exclusive_job_queue, self._process_job, quiet) last_t.start() self._wait_worker_threads([last_t])
def main(_): # REVIEW josephz: This paradigm was copied from inference-hack.py # initialize_globals() sample_dir = "sample" # sample_names = ["new_test"] sample_names = ["rolling_in_the_deep"] post_processor = PostProcessor() post_processor.load_weights("weights.h5") # sample_names = ["perfect_features"] # sample_names = ["rolling_in_the_one_more_time"] for sample_name in sample_names: console.h1("Processing %s" % sample_name) console.time("total processing for " + sample_name) sample_path = sample_dir + "/" + sample_name style_path = sample_path + "/style.mp3" content_path = sample_path + "/content.mp3" stylized_img_path = sample_path + "/stylized.png" stylized_img_raw_path = sample_path + "/stylized_raw.png" stylized_audio_path = sample_path + "/stylized.mp3" stylized_audio_raw_path = sample_path + "/stylized_raw.mp3" # Read style audio to spectrograms. style_audio, style_sample_rate = conversion.file_to_audio(style_path) style_img, style_phase = conversion.audio_to_spectrogram( style_audio, fft_window_size=1536) # Read content audio to spectrograms. content_audio, content_sample_rate = conversion.file_to_audio( content_path) content_img, content_phase = conversion.audio_to_spectrogram( content_audio, fft_window_size=1536) stylized_img_raw, stylized_img = stylize(content_img, style_img, content_phase, style_phase, content_path, style_path, post_processor) # Save raw stylized spectrogram and audio. stylized_audio_raw = conversion.amplitude_to_audio( stylized_img_raw, fft_window_size=1536, phase_iterations=15, phase=content_phase) conversion.image_to_file(stylized_img_raw, stylized_img_raw_path) conversion.audio_to_file(stylized_audio_raw, stylized_audio_raw_path) # Save post-processed stylized spectrogram and audio. stylized_audio = conversion.amplitude_to_audio(stylized_img, fft_window_size=1536, phase_iterations=15, phase=content_phase) # np.save("stylized_img.npy", stylized_img) # np.save("content_phase.npy", content_phase) conversion.image_to_file(stylized_img, stylized_img_path) conversion.audio_to_file(stylized_audio, stylized_audio_path) console.timeEnd("total processing for " + sample_name) console.info("Finished processing %s; saved to %s" % (sample_name, stylized_audio_path))
def authenticate(serial=None, email=None, password=None): serial = raw_input("Serial:") if not serial else serial email = raw_input("E-mail:") if not email else email password = getpass.getpass("Password:"******"Serial, E-mail or Password Invalid!") else: http = urllib3.PoolManager() url = system_config['ROUTES'].get('auth') params = {'email': email, 'password': password} account = Account response = {} try: response = http.request('POST', url, params, encode_multipart=False) except Exception, e: console.error("Check your connection", exc_info=True) return None if response.status == 200: data = json.loads(response.data) return { 'serial': serial, 'account': account(data['email'], response.getheaders()['user-key'], data['apiKey']) } else: console.error(response.data, True) return None
def _download_dependency(self, id, classifier): group, artifact, version = id.split(':') target_path = self._generate_jar_path(id) log, classpath = artifact + '__classpath.log', 'classpath.txt' log = os.path.join(target_path, log) if os.path.isfile(os.path.join(target_path, classpath)): if not version.endswith('-SNAPSHOT'): return True if os.path.isfile(log) and not self._is_log_expired(log): return True # if classifier: # id = '%s:%s' % (id, classifier) # # Currently analyzing dependencies of classifier jar # # usually fails. Here when there is no classpath.txt # # file but classpath.log exists, that means the failure # # of analyzing dependencies last time # if (not os.path.exists(os.path.join(target_path, classpath)) # and os.path.exists(log)): # return False console.info('Downloading %s dependencies...' % id) pom = os.path.join(target_path, artifact + '-' + version + '.pom') cmd = ' '.join([self.__maven, 'dependency:build-classpath', '-DincludeScope=runtime', '-Dmdep.outputFile=%s' % classpath]) cmd += ' -f %s > %s' % (pom, log) if subprocess.call(cmd, shell=True): console.warning('Error occurred when resolving %s dependencies. ' 'Check %s for more details.' % (id, log)) return False return True
def setup_scons_cache(self, options): """Setup scons cache""" cache_dir = getattr( options, 'cache_dir', os.environ.get('BLADE_CACHE_DIR', '~/.bladescache')) if not cache_dir: # '' to disable cache return cache_size = getattr(options, 'cache_size', os.environ.get('BLADE_CACHE_SIZE', '2')) if cache_size == 'unlimited': cache_size = -1 else: cache_size = int(cache_size) * 1024 * 1024 * 1024 cache_dir = os.path.expanduser(cache_dir) self._add_rule('CacheDir("%s")' % cache_dir) self._add_rule( 'scache_manager = build_environment.ScacheManager("%s", cache_limit=%d)' % (cache_dir, cache_size)) self._add_rule('Progress(scache_manager, interval=100)') console.info('using cache directory %s' % cache_dir) console.info('scache size %d' % cache_size)
def _show_skipped_tests(self): """Show tests skipped. """ if self.skipped_tests: console.info('%d skipped tests:' % len(self.skipped_tests)) self.skipped_tests.sort() for key in self.skipped_tests: console.info('//%s:%s' % key, prefix=False)
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = [ 'java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd_str) if subprocess.call(cmd_str, shell=True): console.warning('Failed to generate java coverage report')
def copy(self, tmp): # Get all dependent files if puke.FileSystem.exists('technicolor/%s' % self.name): console.info('Have dependent files to copy:') list = puke.FileList('technicolor/%s' % self.name, filter = '*') for i in list.get(): puke.copyfile(i, tmp)
def __dogit(packpath, dest, version = ""): # Require git on the system to have it puke.System.check_package('git') # If directory exist, then update the tree if puke.FileSystem.exists(dest): console.info('Updating') that = 'cd %s; git stash; git stash drop' % dest puke.sh(that, output=True) that = 'cd %s; git checkout master; git pull --rebase; ' % dest else: if not puke.FileSystem.exists(puke.FileSystem.dirname(dest)): puke.FileSystem.makedir(puke.FileSystem.dirname(dest)) console.info('Cloning') that = 'cd %s; git clone %s; ' % (puke.FileSystem.dirname(dest), packpath) if version: that = "%s git checkout %s" % (that, version) # Do the deed try: std = puke.Std() puke.sh(that, std=std, output=True) if std.err: raise std.err except: # if puke.FileSystem.exists(dest): # puke.FileSystem.remove(dest) console.error('Git operation failed! %s You need to manually fix or remove the directory.' % std.err)
def patch(self, tmp, target): # Get all patches for ourselves # XXX achtung accident de bicyclette si technicolor list = puke.FileList('technicolor/', filter = '*%s.all.*.patch,*%s.%s.*.patch' % (self.name, self.name, target)) for i in list.get(): console.info('Applying', puke.FileSystem.abspath(i)) puke.patch(tmp, puke.FileSystem.abspath(i), p=0)
def parallel_jobs_num(self): """Tune the jobs num. """ # User has the highest priority user_jobs_num = self.__options.jobs if user_jobs_num > 0: return user_jobs_num # Calculate job numbers smartly jobs_num = 0 distcc_enabled = configparse.blade_config.get_config('distcc_config')['enabled'] if distcc_enabled and self.build_environment.distcc_env_prepared: # Distcc cost doesn;t much local cpu, jobs can be quite large. distcc_num = len(self.build_environment.get_distcc_hosts_list()) jobs_num = min(max(int(1.5 * distcc_num), 1), 20) else: cpu_core_num = cpu_count() # machines with cpu_core_num > 4 is usually shared by multiple users, # set an upper bound to avoid interfering other users jobs_num = min(2 * cpu_core_num, 8) if jobs_num != user_jobs_num: console.info('tunes the parallel jobs number(-j N) to be %d' % ( jobs_num)) return jobs_num
def execute(envcopy, command, path = "", preexec = "", silent = False): if path: envcopy.append('cd "%s"' % path) if preexec: envcopy.append(preexec) # envcopy.append('entryenv=`env`') # envcopy.append('if %s; then exitenv=`env`; if [[ "$entryenv" == "$exitenv" ]]; then exit 0; fi; echo "Env change!" >&2; echo $entryenv >&2; echo $exitenv >&2; fi; exit 1' % command) envcopy.append('if %s; then exit 0; fi; exit 1' % command) std = puke.Std() console.info('Running command:') for i in envcopy: console.info(i) puke.sh(envcopy, std=std, output = False) if std.code == 1: if silent: raise() console.debug("Monade shell stdout:", std.out) console.fail("Monade shell stderr: %s" % std.err) console.debug("Monade shell stdout:", std.out) if std.err: console.warn("Monade shell stderr:", std.err) return std
def _check_dccc_install(): """Check dccc is installed or not. """ home_dir = os.environ.get("HOME", "") if home_dir and os.path.exists(os.path.join(home_dir, "bin", "dccc")): console.info("dccc found") return True return False
def analyze_targets(self): """Expand the targets. """ console.info("analyzing dependency graph...") self.deps_analyzer = DependenciesAnalyzer(self) self.deps_analyzer.analyze_deps() console.info("analyzing done.") return self.all_targets_expanded
def authenticate(serial=None, email=None, password=None): serial = raw_input("Serial:") if not serial else serial email = raw_input("E-mail:") if not email else email password = getpass.getpass("Password:"******"Serial, E-mail or Password Invalid!") else: http = urllib3.PoolManager() url = system_config['ROUTES'].get('auth') params = {'email': email, 'password': password} account = Account response = {} try: response = http.request('POST', url, params, encode_multipart=False) except Exception, e: console.error("Check your connection", exc_info=True) return None if response.status == 200: data = json.loads(response.data) return {'serial': serial, 'account': account(data['email'], response.getheaders()['user-key'], data['apiKey'])} else: console.error(response.data, True) return None
def schedule_jobs(self): """scheduler. """ if self.num_of_tests <= 0: return True num_of_workers = self.__get_workers_num() console.info("spawn %d worker(s) to run tests" % num_of_workers) for i in self.tests_list: target = i[0] if target.get('options', {}).get('exclusive', False): self.exclusive_job_queue.put(i) else: self.job_queue.put(i) test_arg = [self.job_queue, num_of_workers > 1] for i in range(num_of_workers): t = WorkerThread((i), self._process_command, args=test_arg) t.start() self.threads.append(t) for t in self.threads: self._join_thread(t) if not self.exclusive_job_queue.empty(): console.info("spawn 1 worker to run exclusive tests") test_arg = [self.exclusive_job_queue, False] last_t = WorkerThread((num_of_workers), self._process_command, args=test_arg) last_t.start() self._join_thread(last_t) self.print_summary() return True
def _check_ccache_install(): """Check ccache is installed or not. """ CC = os.getenv('CC') CXX = os.getenv('CXX') # clang scan-build always fail with ccache. if CC and os.path.basename(CC) == 'ccc-analyzer' and CXX and os.path.basename(CXX) == 'c++-analyzer': console.info('ccache is disabled for scan-build') return False try: p = subprocess.Popen( ['ccache', '-V'], env=os.environ, stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode == 0: version_line = stdout.splitlines(True)[0] if version_line and version_line.find('ccache version') != -1: console.info('ccache found') return True except OSError: pass return False
def tune_parallel_jobs_num(self): """Tune the jobs num. """ user_jobs_num = self.__options.jobs jobs_num = 0 cpu_core_num = cpu_count() distcc_enabled = configparse.blade_config.get_config( 'distcc_config')['enabled'] if distcc_enabled and self.build_environment.distcc_env_prepared: jobs_num = int( 1.5 * len(self.build_environment.get_distcc_hosts_list())) + 1 if jobs_num > 20: jobs_num = 20 if jobs_num and self.__options.jobs != jobs_num: self.__options.jobs = jobs_num elif self.__options.jobs < 1: if cpu_core_num <= 4: self.__options.jobs = 2 * cpu_core_num else: self.__options.jobs = cpu_core_num if self.__options.jobs > 8: self.__options.jobs = 8 if self.__options.jobs != user_jobs_num: console.info('tunes the parallel jobs number(-j N) to be %d' % (self.__options.jobs)) return self.__options.jobs
def __init__(self, blade_root_dir, distcc_hosts_list=[]): # ccache self.blade_root_dir = blade_root_dir self.ccache_installed = self._check_ccache_install() # distcc self.distcc_env_prepared = False self.distcc_installed = self._check_distcc_install() if distcc_hosts_list: self.distcc_host_list = distcc_hosts_list else: self.distcc_host_list = os.environ.get('DISTCC_HOSTS', '') if self.distcc_installed and self.distcc_host_list: self.distcc_env_prepared = True if self.distcc_installed and not self.distcc_host_list: console.warning("DISTCC_HOSTS not set but you have " "distcc installed, will just build locally") self.distcc_log_file = os.environ.get('DISTCC_LOG', '') if self.distcc_log_file: console.info("distcc log: %s" % self.distcc_log_file) # dccc self.dccc_env_prepared = True self.dccc_master = os.environ.get('MASTER_HOSTS', '') self.dccc_hosts_list = os.environ.get('DISTLD_HOSTS', '') self.dccc_installed = self._check_dccc_install() if self.dccc_installed: if not self.dccc_master and not self.dccc_hosts_list: self.dccc_env_prepared = False console.warning("MASTER_HOSTS and DISTLD_HOSTS not set " "but you have dccc installed, will just build locally") else: self.dccc_env_prepared = False self.rules_buf = []
def query(self, targets): """Query the targets. """ print_deps = self.__options.deps print_depended = self.__options.depended dot_file = self.__options.output_to_dot print_dep_tree = self.__options.output_tree result_map = self.query_helper(targets) if dot_file: print_mode = 0 if print_depended: print_mode = 1 dot_file = os.path.join(self.__working_dir, dot_file) self.output_dot(result_map, print_mode, dot_file) else: if print_deps: if print_dep_tree: self.query_dependency_tree(targets) else: for key in result_map: print deps = result_map[key][0] console.info('//%s:%s depends on the following targets:' % ( key[0], key[1])) for d in deps: print '%s:%s' % (d[0], d[1]) if print_depended: for key in result_map: print depended_by = result_map[key][1] console.info('//%s:%s is depended by the following targets:' % ( key[0], key[1])) for d in depended_by: print '%s:%s' % (d[0], d[1]) return 0
def _blade_action_postfunc(closing_message): """To do post jobs if blade's own actions failed to build. """ console.info(closing_message) # Remember to write the dblite incase of re-linking once fail to # build last time. We should elaborate a way to avoid rebuilding # after failure of our own builders or actions. SCons.SConsign.write()
def query(self, targets): """Query the targets. """ print_deps = getattr(self.__options, 'deps', False) print_depended = getattr(self.__options, 'depended', False) dot_file = getattr(self.__options, 'output_to_dot', '') result_map = self.query_helper(targets) if dot_file: print_mode = 0 if print_deps: print_mode = 0 if print_depended: print_mode = 1 dot_file = os.path.join(self.__working_dir, dot_file) self.output_dot(result_map, print_mode, dot_file) else: if print_deps: for key in result_map: print '\n' deps = result_map[key][0] console.info('//%s:%s depends on the following targets:' % ( key[0], key[1])) for d in deps: print '%s:%s' % (d[0], d[1]) if print_depended: for key in result_map: print '\n' depended_by = result_map[key][1] console.info('//%s:%s is depended by the following targets:' % ( key[0], key[1])) depended_by.sort(key=lambda x: x, reverse=False) for d in depended_by: print '%s:%s' % (d[0], d[1]) return 0
def _check_code_style(opened_files): if not opened_files: return 0 cpplint = configparse.blade_config.configs["cc_config"]["cpplint"] if not cpplint: console.info("cpplint disabled") return 0 console.info("Begin to check code style for changed source code") p = subprocess.Popen(("%s %s" % (cpplint, " ".join(opened_files))), shell=True) try: p.wait() if p.returncode: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct." ).format(cpplint) else: msg = "Please fixing style warnings before submitting the code!" console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def _check_dccc_install(): """Check dccc is installed or not. """ home_dir = os.environ.get('HOME', '') if home_dir and os.path.exists(os.path.join(home_dir, 'bin', 'dccc')): console.info('dccc found') return True return False
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def getJsonFile(path, category): if os.path.isfile(path): with open(path, "r") as f: content = f.read() #console.info("Reading file " + path) try: jsonStuff = json.loads(content) except Exception as e: console.error("No " + category + " avaiable. ") jsonStuff = None return jsonStuff else: console.warning("Couldn't find " + str(path)) with open(path, "w") as f: toFille = { category: [] } f.write(json.dumps(toFille)) console.info("File " + path + " created") return toFille
def __init__(self, work_dir, name, deps=[], roots=[]): """ deps: e.g. current dir: glog BUILD_ROOT: /glog/glog workspace: //3rd/glog/glog external: #glog """ console.info('Loaded target: %s' % name) self.work_dir = work_dir self.name = name self.deps = to_list(deps) self.roots = to_list(roots) self.path = os.path.join(self.work_dir, self.name) self.full_name = self.path.replace(os.sep, NAME_SEP) self.env = ENV_PREFIX + self.full_name self.dep_paths = self.norm_deps(self.deps) self.all_dep_targets = [] self.rules = []
def _run_job_redirect(self, job, job_thread): """run job and redirect the output. """ target, run_dir, test_env, cmd = job test_name = target.fullname shell = target.data.get('run_in_shell', False) if shell: cmd = subprocess.list2cmdline(cmd) timeout = target.data.get('test_timeout') self._show_progress(cmd) p = subprocess.Popen(cmd, env=test_env, cwd=run_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True, shell=shell) job_thread.set_job_data(p, test_name, timeout) stdout = p.communicate()[0] result = self._get_result(p.returncode) msg = 'Output of %s:\n%s\n%s finished: %s\n' % (test_name, stdout, test_name, result) if console.verbosity_le('quiet') and p.returncode != 0: console.error(msg, prefix=False) else: console.info(msg) console.flush() return p.returncode
def main(_): initialize_globals() # scrape data from folder RAW_DATA_DIR = "../data/studio_acapellas" PROCESSED_DATA_DIR = "../data/processed" # for each one, generate the data using sst methods, and save the data for file_name in os.listdir(RAW_DATA_DIR): file_path = os.path.join(RAW_DATA_DIR, file_name) if file_path.endswith(".mp3"): processed_file_name = file_name.replace("mp3", "npy") # haha # todo: rewrite all this using pathlib processed_file_path_x = PROCESSED_DATA_DIR + "/x/" + processed_file_name processed_file_path_y = PROCESSED_DATA_DIR + "/y/" + processed_file_name console.h1("Processing", file_path) processed_file_path_style = PROCESSED_DATA_DIR + "/style/" + processed_file_name x_arr, y_arr, style_arr = generate_data_arrs(file_path) # for debugging just save as images console.stats(x_arr, "x_arr") console.stats(y_arr, "y_arr") console.stats(style_arr, "style_arr") #ipdb.set_trace() io.imsave(processed_file_path_x + ".jpg", x_arr / x_arr.max()) io.imsave(processed_file_path_y + ".jpg", y_arr / y_arr.max()) np.save(processed_file_path_x, x_arr) np.save(processed_file_path_y, y_arr) np.save(processed_file_path_style, style_arr) else: console.info("Skipping", file_path)
def generate_build_rules(self): """Generate the constructing rules. """ console.info('generating build rules...') generator = self.get_build_rules_generator() rules = generator.generate_build_script() console.info('generating done.') return rules
def _generate_java_coverage_report(self): config = configparse.blade_config.get_config('java_test_config') jacoco_home = config['jacoco_home'] coverage_reporter = config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = ['java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % ( coverage_reporter, jacoco_libs)] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd) if subprocess.call(cmd, shell=True): console.warning('Failed to generate java coverage report')
def schedule_jobs(self): """scheduler. """ if self.num_of_tests <= 0: return num_of_workers = self.__get_workers_num() console.info('spawn %d worker(s) to run tests' % num_of_workers) for i in self.tests_list: target = i[0] if target.data.get('exclusive'): self.exclusive_job_queue.put(i) else: self.job_queue.put(i) redirect = num_of_workers > 1 threads = [] for i in range(num_of_workers): t = WorkerThread(i, self.job_queue, self._process_job, redirect) t.start() threads.append(t) self._wait_worker_threads(threads) if not self.exclusive_job_queue.empty(): console.info('spawn 1 worker to run exclusive tests') last_t = WorkerThread(num_of_workers, self.exclusive_job_queue, self._process_job, False) last_t.start() self._wait_worker_threads([last_t]) self.print_summary()
def query(self, targets): """Query the targets. """ print_deps = getattr(self.__options, 'deps', False) print_depended = getattr(self.__options, 'depended', False) dot_file = getattr(self.__options, 'output_to_dot', '') result_map = self.query_helper(targets) if dot_file: print_mode = 0 if print_deps: print_mode = 0 if print_depended: print_mode = 1 dot_file = os.path.join(self.__working_dir, dot_file) self.output_dot(result_map, print_mode, dot_file) else: if print_deps: for key in result_map: print '\n' deps = result_map[key][0] console.info('//%s:%s depends on the following targets:' % (key[0], key[1])) for d in deps: print '%s:%s' % (d[0], d[1]) if print_depended: for key in result_map: print '\n' depended_by = result_map[key][1] console.info( '//%s:%s is depended by the following targets:' % (key[0], key[1])) depended_by.sort(key=lambda x: x, reverse=False) for d in depended_by: print '%s:%s' % (d[0], d[1]) return 0
def _setup_cache(self): if self.build_environment.ccache_installed: self.build_environment.setup_ccache_env() else: cache_dir = os.path.expanduser('~/.bladescache') cache_size = 4 * 1024 * 1024 * 1024 if hasattr(self.options, 'cache_dir'): if not self.options.cache_dir: return cache_dir = self.options.cache_dir else: console.info('using default cache dir: %s' % cache_dir) if hasattr(self.options, 'cache_size') and (self.options.cache_size != -1): cache_size = self.options.cache_size self._add_rule('CacheDir("%s")' % cache_dir) self._add_rule( 'scache_manager = ScacheManager("%s", cache_limit=%d)' % (cache_dir, cache_size)) self._add_rule('Progress(scache_manager, interval=100)') self._add_rule('console.info("using cache directory %s")' % cache_dir) self._add_rule('console.info("scache size %d")' % cache_size)
def load(self, save_data_as_h5=True): def check_filename(f): return (f.endswith(".mp3") or f.endswith("_all.wav")) \ and not f.startswith(".") h5_path = self.get_data_path() if os.path.isfile(h5_path): h5f = h5py.File(h5_path, "r") mashup = h5f["mashup"] vocal = h5f["vocal"] instrumental = h5f["instrumental"] self.track_names = [ name.decode("utf8") for name in h5f["names"]["track"] ] self.mashup = dict(mashup) self.vocal = dict(vocal) self.instrumental = dict(instrumental) else: for dirPath, dirNames, file_names in os.walk(self.in_path): filtered_files = filter(check_filename, file_names) for file_name in filtered_files: name = file_name.replace("_all.wav", "") file_name = os.path.join(self.in_path, file_name) vocal_file = file_name.replace("_all.wav", "_vocal.wav") instrumental_file = file_name.replace( "_all.wav", "_instrumental.wav") if not all([ os.path.exists(vocal_file), os.path.exists(instrumental_file) ]): continue audio, sample_rate = conversion.load_audio_file(file_name) spectrogram = conversion.audio_file_to_stft( audio, self.fft_window_size) mashup = spectrogram audio, sample_rate = \ conversion.load_audio_file(vocal_file) spectrogram = conversion.audio_file_to_stft( audio, self.fft_window_size) vocal = spectrogram audio, sample_rate = \ conversion.load_audio_file(instrumental_file) spectrogram = conversion.audio_file_to_stft( audio, self.fft_window_size) instrumental = spectrogram console.info("Created spectrogram for", file_name, "with shape", spectrogram.shape) self.mashup[name] = mashup self.vocal[name] = vocal self.instrumental[name] = instrumental self.track_names.append(name) console.info("Created", len(self.mashup), "total spectras") # Save to file if save_data_as_h5: self.save()
def _show_skipped_tests_detail(self): """show tests skipped. """ if not self.skipped_tests: return self.skipped_tests.sort() console.info('skipped tests') for target_key in self.skipped_tests: print '%s:%s' % (target_key[0], target_key[1])
def _show_skipped_tests_detail(self): """show tests skipped. """ if not self.skipped_tests: return self.skipped_tests.sort() console.info("skipped tests") for target_key in self.skipped_tests: print "%s:%s" % (target_key[0], target_key[1])
def analyze_targets(self): """Expand the targets. """ console.info('analyzing dependency graph...') self.__sorted_targets_keys = analyze_deps(self.__build_targets) self.__targets_expanded = True console.info('analyzing done.') return self.__build_targets # For test
def generate_build_rules(self): """Generate the constructing rules. """ console.info("generating build rules...") self.build_rules_generator = SconsRulesGenerator('SConstruct', self.blade_path, self) rules_buf = self.build_rules_generator.generate_scons_script() console.info("generating done.") return rules_buf
def generate_build_rules(self): """Generate the constructing rules. """ console.info('generating build rules...') build_rules_generator = SconsRulesGenerator('SConstruct', self.__blade_path, self) rules_buf = build_rules_generator.generate_scons_script() console.info('generating done.') return rules_buf
def load_targets(self): """Load the targets. """ console.info('loading BUILDs...') (self.__direct_targets, self.__all_command_targets, self.__build_targets) = load_targets(self.__command_targets, self.__root_dir, self) console.info('loading done.') return self.__direct_targets, self.__all_command_targets # For test
def clean(options): console.info('cleaning...(hint: please specify --generate-dynamic to ' 'clean your so)') p = subprocess.Popen('scons --duplicate=soft-copy -c -s --cache-show', shell=True) p.wait() console.info('cleaning done.') return p.returncode
def __dopkg(path): console.info('Processing pkg') try: std = puke.Std() puke.sh('sudo installer -pkg %s -target /' % path, std = std) if std.err: raise std.err except: console.fail('Retry your task with the following stance: sudo echo "lame workaround"; puke MYTASK')
def clean(options): console.info("cleaning...(hint: please specify --generate-dynamic to " "clean your so)") scons_exe = os.environ.get('SCONS_EXE', 'scons') p = subprocess.Popen( "%s --duplicate=soft-copy -c -s --cache-show" % scons_exe, shell=True) p.wait() console.info("cleaning done.") return p.returncode
def create_config_file(): account = authenticate() if account: device = None console.info("Checking device...") for dev in get_devices(account.get('account')): if dev.serial == account.get('serial'): device = dev console.info("Creating config file...") generate_config_file(account.get('account'), device)
def _run_job(self, job): """run job, do not redirect the output. """ (target, run_dir, test_env, cmd) = job console.info("Running %s" % cmd) p = subprocess.Popen(cmd, env=test_env, cwd=run_dir, close_fds=True) p.wait() result = self.__get_result(p.returncode) console.info("%s/%s finished : %s\n" % ( target['path'], target['name'], result)) return p.returncode
def __init__(self, worker_args, proc_func, args): """Init methods for this thread. """ threading.Thread.__init__(self) self.worker_args = worker_args self.func_args = args self.job_handler = proc_func self.thread_id = int(self.worker_args) self.start_working_time = time.time() self.end_working_time = None self.ret = None console.info("blade test executor %d starts to work" % self.thread_id)
def main(blade_path): exit_code = 0 try: start_time = time.time() exit_code = _main(blade_path) cost_time = int(time.time() - start_time) if exit_code == 0: console.info('success') console.info('cost time is %ss' % datetime.timedelta(seconds=cost_time)) except SystemExit, e: exit_code = e.code
def __init__(self, name): yawn = puke.FileSystem.join('technicolor', '%s.yaml' % name) if not puke.FileSystem.exists(yawn): console.fail('The requested yawn (%s, at path %s) doesn\'t exist!' % (name, yawn)) data = yaml.load(puke.FileSystem.readfile(yawn)) self.name = name # Extend this so that multiple targets can be supported self.done = {} if 'LICENSE' in data: self.license = data['LICENSE'] else: console.error('We are missing license information!') self.license = 'UNKNOWN' if 'VERSION' in data: self.version = data['VERSION'] else: console.warn('We are missing version information') self.version = 'No version specified!' if 'URL' in data: self.url = data['URL'].replace('{VERSION}', self.version) self.__checksum = data['CHECKSUM'] if 'LOCAL' in data: self.local = data['LOCAL'].replace('{VERSION}', self.version) else: self.local = self.url.split('/').pop().split('.').shift() self.production = data['PRODUCTION'] else: console.info('This is a dummy package') self.url = '' self.__checksum = '' self.local = '' self.production = '' self.hack = False if 'HACK' in data: self.hack = data['HACK'] if 'DEPENDS' in data: self.depends = data['DEPENDS'] if isinstance(self.depends, str) and self.depends: self.depends = [self.depends] else: self.depends = [] if 'TOOLCHAIN' in data: self.toolchain = data['TOOLCHAIN'] if isinstance(self.toolchain, str) and self.toolchain: self.toolchain = [self.toolchain] else: self.toolchain = []