def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): subprocess.check_call(['wget', URL, '--output-document=cmake.tar.gz']) subprocess.check_call(['tar', '--extract', '--gunzip', '--file', 'cmake.tar.gz', '--directory', target_dir, '--strip-components', '1'])
def create_asset(chrome_src_path, browser_executable, target_dir, upload_to_partner_bucket): """Create the asset.""" browser_executable = os.path.realpath(browser_executable) chrome_src_path = os.path.realpath(chrome_src_path) target_dir = os.path.realpath(target_dir) if not os.path.exists(target_dir): os.makedirs(target_dir) with utils.tmp_dir(): if os.environ.get('CHROME_HEADLESS'): # Start Xvfb if running on a bot. try: subprocess.Popen( ['sudo', 'Xvfb', ':0', '-screen', '0', '1280x1024x24']) except Exception: # It is ok if the above command fails, it just means that DISPLAY=:0 # is already up. pass webpages_playback_cmd = [ 'python', os.path.join(SKIA_TOOLS, 'skp', 'webpages_playback.py'), '--page_sets', 'all', '--browser_executable', browser_executable, '--non-interactive', '--output_dir', os.getcwd(), '--chrome_src_path', chrome_src_path, ] if upload_to_partner_bucket: webpages_playback_cmd.append('--upload_to_partner_bucket') print 'Running webpages_playback command:\n$ %s' % ( ' '.join(webpages_playback_cmd)) try: subprocess.check_call(webpages_playback_cmd) finally: # Clean up any leftover browser instances. This can happen if there are # telemetry crashes, processes are not always cleaned up appropriately by # the webpagereplay and telemetry frameworks. procs = subprocess.check_output(['ps', 'ax']) for line in procs.splitlines(): if browser_executable in line: pid = line.strip().split(' ')[0] if pid != str(os.getpid()) and not 'python' in line: try: subprocess.check_call(['kill', '-9', pid]) except subprocess.CalledProcessError as e: print e else: print 'Refusing to kill self.' src = os.path.join(os.getcwd(), 'playback', 'skps') for f in os.listdir(src): if f.endswith('.skp'): shutil.copyfile(os.path.join(src, f), os.path.join(target_dir, f))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--local_svgs_dir', '-l', default='', help='Directory containing additional SVGs we want to upload.') parser.add_argument('--gsutil') args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: subprocess.check_call([ 'python', create_script, '-t', cwd, '-l', args.local_svgs_dir ]) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--gsutil') parser.add_argument('--chrome_path') parser.add_argument('--msvs_version', required=True) args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: cmd = [ 'python', create_script, '-t', cwd, '--msvs_version', args.msvs_version ] if args.chrome_path: cmd.extend(['--chrome_path', args.chrome_path]) subprocess.check_call(cmd) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def main(): if sys.platform != 'win32': print >> sys.stderr, 'This script only runs on Windows.' sys.exit(1) parser = argparse.ArgumentParser() parser.add_argument('--gsutil') parser.add_argument('--sdk_path', '-s', required=True) parser.add_argument('--runtime_path', '-r', default=os.path.join("C:","System32","vulkan-1.dll"), required=True) args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: cmd = ['python', create_script, '-t', cwd, '--sdk_path', args.sdk_path, '--runtime_path', args.runtime_path] subprocess.check_call(cmd) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def main(): if 'linux' not in sys.platform: print >> sys.stderr, 'This script only runs on Linux.' sys.exit(1) parser = argparse.ArgumentParser() parser.add_argument('--gsutil') parser.add_argument('--lib_path', '-l', required=True) args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: subprocess.check_call(['python', create_script, '-t', cwd, '-l', args.lib_path]) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def create_asset(target_dir): """Create the asset.""" # This is all a bit hacky. Rather than installing to a chroot, we just extract # all the packages to the target dir, then fix things up so that it can be # used in our recipes. with utils.tmp_dir(): # Download required Debian packages. subprocess.check_call(['apt-get', 'download'] + PKGS) for f in os.listdir('.'): subprocess.check_call(['dpkg-deb', '--extract', f, target_dir]) parent_dir = os.path.join(target_dir, 'usr') # Remove unnecessary files that cause problems with zipping (due to dangling # symlinks). os.remove(os.path.join(parent_dir, 'lib/gcc-cross/mips64el-linux-gnuabi64/7/libcc1.so')) shutil.rmtree(os.path.join(parent_dir, 'share')) # Remove usr/ prefix. for d in os.listdir(parent_dir): os.rename(os.path.join(parent_dir, d), os.path.join(target_dir, d)) os.rmdir(parent_dir) # Remove absolute paths in GNU ld scripts. lib_dir = os.path.join(target_dir, 'mips64el-linux-gnuabi64/lib') ld_script_token = 'OUTPUT_FORMAT(elf64-tradlittlemips)' ld_script_files = subprocess.check_output( ['grep', '--recursive', '--files-with-matches', '--binary-files=without-match', '--fixed-strings', ld_script_token, lib_dir]).split() abs_path = '/usr/mips64el-linux-gnuabi64/lib/' for f in ld_script_files: with open(f) as script: contents = script.read() contents = contents.replace(abs_path, '') with open(f, 'w') as script: script.write(contents)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--msvs_version', required=True) parser.add_argument('--chrome_path') parser.add_argument('--skia_path') args = parser.parse_args() isolate_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'win_toolchain.isolate') with utils.print_timings(): with utils.tmp_dir() as tmp_dir: chrome_path = args.chrome_path if not chrome_path: print( 'Syncing Chrome from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') chrome_path = os.path.join(tmp_dir.name, 'src') if not os.path.isdir(chrome_path): utils.git_clone(REPO_CHROME, chrome_path) skia_path = args.skia_path if not skia_path: print( 'Syncing Skia from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') skia_path = os.path.join(tmp_dir.name, 'skia') if not os.path.isdir(skia_path): utils.git_clone(REPO_SKIA, skia_path) isolated_hash = gen_toolchain(chrome_path, args.msvs_version, isolate_file) update_toolchain_file(skia_path, args.msvs_version, isolated_hash)
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): cwd = os.getcwd() zipfile = os.path.join(cwd, 'go.zip') subprocess.check_call(["wget", '-O', zipfile, GO_URL]) subprocess.check_call(["unzip", zipfile, "-d", target_dir])
def main(): if sys.platform != 'win32': print >> sys.stderr, 'This script only runs on Windows.' sys.exit(1) parser = argparse.ArgumentParser() parser.add_argument('--gsutil') parser.add_argument('--sdk_path', '-s', required=True) args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: cwd = os.path.join(cwd, 'sdk') cmd = [ 'python', create_script, '-t', cwd, '--sdk_path', args.sdk_path ] subprocess.check_call(cmd) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def test_to_skip(self): with utils.tmp_dir(): # Create input files and directories. fw = test_utils.FileWriter(os.path.join(os.getcwd(), 'input')) fw.mkdir('.git') fw.write(os.path.join('.git', 'index')) fw.write('somefile') fw.write('.DS_STORE') fw.write('leftover.pyc') fw.write('.pycfile') # Zip, unzip. zip_utils.zip('input', 'test.zip', to_skip=['.git', '.DS*', '*.pyc']) zip_utils.unzip('test.zip', 'output') # Remove the files/dirs we don't expect to see in output, so that we can # use self._compare_trees to check the results. fw.remove(os.path.join('.git', 'index')) fw.remove('.git') fw.remove('.DS_STORE') fw.remove('leftover.pyc') # Compare results. test_utils.compare_trees(self, 'input', 'output')
def upload_new_version(self, target_dir, commit=False): """Upload a new version and update the version file for the asset.""" version = self.get_next_version() target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) zip_utils.zip(target_dir, zip_file, blacklist=ZIP_BLACKLIST) gs_path = GS_PATH_TMPL % (self._gs_subdir, str(version)) self._gs.copy(zip_file, gs_path) def _write_version(): with open(self.version_file, 'w') as f: f.write(str(version)) subprocess.check_call([utils.GIT, 'add', self.version_file]) with utils.chdir(SKIA_DIR): if commit: with utils.git_branch(): _write_version() subprocess.check_call([ utils.GIT, 'commit', '-m', 'Update %s version' % self._name ]) subprocess.check_call( [utils.GIT, 'cl', 'upload', '--bypass-hooks']) else: _write_version()
def main(): parser = argparse.ArgumentParser() parser.add_argument('--msvs_version', required=True) parser.add_argument('--chrome_path') parser.add_argument('--skia_path') args = parser.parse_args() isolate_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'win_toolchain.isolate') with utils.print_timings(): with utils.tmp_dir() as tmp_dir: chrome_path = args.chrome_path if not chrome_path: print ('Syncing Chrome from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') chrome_path = os.path.join(tmp_dir.name, 'src') if not os.path.isdir(chrome_path): utils.git_clone(REPO_CHROME, chrome_path) skia_path = args.skia_path if not skia_path: print ('Syncing Skia from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') skia_path = os.path.join(tmp_dir.name, 'skia') if not os.path.isdir(skia_path): utils.git_clone(REPO_SKIA, skia_path) isolated_hash = gen_toolchain(chrome_path, args.msvs_version, isolate_file) update_toolchain_file(skia_path, args.msvs_version, isolated_hash)
def download_version(self, version, target_dir): """Download the specified version of the asset.""" gs_path = GS_PATH_TMPL % (self._gs_subdir, str(version)) target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) self._gs.copy(gs_path, zip_file) zip_utils.unzip(zip_file, target_dir)
def download(self, name, version, target_dir): """Download from GS.""" gs_path = GS_PATH_TMPL % (GS_SUBDIR_TMPL % (self._gs_bucket, name), str(version)) target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), "%d.zip" % version) self.copy(gs_path, zip_file) zip_utils.unzip(zip_file, target_dir)
def upload(self, name, version, target_dir): """Upload to GS.""" target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), "%d.zip" % version) zip_utils.zip(target_dir, zip_file, blacklist=ZIP_BLACKLIST) gs_path = GS_PATH_TMPL % (GS_SUBDIR_TMPL % (self._gs_bucket, name), str(version)) self.copy(zip_file, gs_path)
def _write_to_storage(task): """Writes the specified compile task to Google storage.""" with utils.tmp_dir(): json_file = os.path.join(os.getcwd(), _get_task_file_name(task)) with open(json_file, 'w') as f: json.dump(task, f) subprocess.check_call(['gsutil', 'cp', json_file, '%s/' % _get_gs_bucket()]) print 'Created %s/%s' % (_get_gs_bucket(), os.path.basename(json_file))
def _json_output(self, cmd): """Run the given command, return the JSON output.""" with utils.tmp_dir(): json_output = os.path.join(os.getcwd(), 'output.json') self._run(cmd + ['--json-output', json_output]) with open(json_output) as f: parsed = json.load(f) return parsed.get('result', [])
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): p1 = subprocess.Popen(["curl", URL], stdout=subprocess.PIPE) p2 = subprocess.Popen(["tar", "-xzf" "-"], stdin=p1.stdout) p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. _, _ = p2.communicate() shutil.move('./cockroach-v20.2.8.linux-amd64/cockroach', target_dir)
def upload(self, name, version, target_dir, extra_tags=None): """Upload to GS.""" target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) zip_utils.zip(target_dir, zip_file, to_skip=PATTERNS_TO_SKIP) gs_path = GS_PATH_TMPL % (GS_SUBDIR_TMPL % (self._gs_bucket, name), str(version)) self.copy(zip_file, gs_path)
def upload(self, name, version, target_dir): """Upload to GS.""" target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) zip_utils.zip(target_dir, zip_file, blacklist=ZIP_BLACKLIST) gs_path = GS_PATH_TMPL % (GS_SUBDIR_TMPL % (self._gs_bucket, name), str(version)) self.copy(zip_file, gs_path)
def download(self, name, version, target_dir): """Download from GS.""" gs_path = GS_PATH_TMPL % (GS_SUBDIR_TMPL % (self._gs_bucket, name), str(version)) target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) self.copy(gs_path, zip_file) zip_utils.unzip(zip_file, target_dir)
def create_asset(chrome_src_path, browser_executable, target_dir, upload_to_partner_bucket): """Create the asset.""" browser_executable = os.path.realpath(browser_executable) chrome_src_path = os.path.realpath(chrome_src_path) target_dir = os.path.realpath(target_dir) if not os.path.exists(target_dir): os.makedirs(target_dir) with utils.tmp_dir(): if os.environ.get('CHROME_HEADLESS'): # Start Xvfb if running on a bot. try: subprocess.Popen(['sudo', 'Xvfb', ':0', '-screen', '0', '1280x1024x24']) except Exception: # It is ok if the above command fails, it just means that DISPLAY=:0 # is already up. pass webpages_playback_cmd = [ 'python', os.path.join(SKIA_TOOLS, 'skp', 'webpages_playback.py'), '--page_sets', 'all', '--browser_executable', browser_executable, '--non-interactive', '--output_dir', os.getcwd(), '--chrome_src_path', chrome_src_path, ] if upload_to_partner_bucket: webpages_playback_cmd.append('--upload_to_partner_bucket') print 'Running webpages_playback command:\n$ %s' % ( ' '.join(webpages_playback_cmd)) try: subprocess.check_call(webpages_playback_cmd) finally: # Clean up any leftover browser instances. This can happen if there are # telemetry crashes, processes are not always cleaned up appropriately by # the webpagereplay and telemetry frameworks. procs = subprocess.check_output(['ps', 'ax']) for line in procs.splitlines(): if browser_executable in line: pid = line.strip().split(' ')[0] if pid != str(os.getpid()) and not 'python' in line: try: subprocess.check_call(['kill', '-9', pid]) except subprocess.CalledProcessError as e: print e else: print 'Refusing to kill self.' src = os.path.join(os.getcwd(), 'playback', 'skps') for f in os.listdir(src): if f.endswith('.skp'): shutil.copyfile(os.path.join(src, f), os.path.join(target_dir, f)) # Copy over private SKPs from Google storage into the target_dir. subprocess.check_call([ 'gsutil', 'cp', os.path.join(PRIVATE_SKPS_GS, '*'), target_dir])
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): tarball = 'vulkansdk-linux.tar.gz' subprocess.check_call(['curl', SDK_URL, '--output', tarball]) subprocess.check_call(['tar', '--extract', '--verbose', '--file=%s' % tarball, '--gunzip', '--directory=%s' % target_dir, '--strip-components=2', '%s/x86_64' % SDK_VERSION])
def download(self, name, version, target_dir): """Download a CIPD package.""" pkg_name = CIPD_PACKAGE_NAME_TMPL % name version_tag = TAG_VERSION_TMPL % version target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): infile = os.path.join(os.getcwd(), "input") with open(infile, "w") as f: f.write("%s %s" % (pkg_name, version_tag)) self._run(["ensure", "--root", target_dir, "--list", infile])
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): # Download required Debian packages. subprocess.check_call(['apt-get', 'download'] + PKGS) # Extract to CWD. for f in os.listdir('.'): subprocess.check_call(['dpkg-deb', '--extract', f, '.']) # Copy usr/include/CL to target_dir. shutil.move(os.path.join(os.getcwd(), 'usr', 'include', 'CL'), target_dir)
def create_asset(target_dir): with utils.tmp_dir(): # Check out bloaty subprocess.check_call(['git', 'clone', '--depth', '1', '-b', TAG, '--single-branch', REPO]) os.chdir('bloaty') # Build bloaty subprocess.check_call(['cmake', '.']) subprocess.check_call(['make', '-j']) shutil.move('./bloaty', target_dir)
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): tarball = 'vulkansdk-linux.tar.gz' subprocess.check_call(['curl', SDK_URL, '--output', tarball]) subprocess.check_call([ 'tar', '--extract', '--verbose', '--file=%s' % tarball, '--gunzip', '--directory=%s' % target_dir, '--strip-components=2', '%s/x86_64' % SDK_VERSION ])
def start_wheel_analysis(): wheel_tracking_file = os.path.join(tmp_dir(), 'w_res.pkl') if os.path.isfile(wheel_tracking_file): r = dill.load(open(wheel_tracking_file, 'rb')) else: r = analyze_video() dill.dump(r, open(wheel_tracking_file, 'wb')) a = extract_lap_frames(r) frames_seconds = frames_to_seconds(np.array([c[1] for c in a])) print(frames_seconds) return frames_seconds
def _test_versions(self, store): with utils.tmp_dir(): # Create input files and directories. input_dir = os.path.join(os.getcwd(), 'input') _write_stuff(input_dir) self.assertEqual(store.get_available_versions(self.asset_name), []) store.upload(self.asset_name, 0, input_dir) self.assertEqual(store.get_available_versions(self.asset_name), [0]) store.upload(self.asset_name, 1, input_dir) self.assertEqual(store.get_available_versions(self.asset_name), [0, 1]) store.delete_contents(self.asset_name) self.assertEqual(store.get_available_versions(self.asset_name), [])
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): # Download required Debian packages. subprocess.check_call(['apt-get', 'download'] + PKGS) # Extract to CWD. for f in os.listdir('.'): subprocess.check_call(['dpkg-deb', '--extract', f, '.']) # Copy usr/lib/x86_64-linux-gnu/* to target_dir. lib_dir = os.path.join(os.getcwd(), 'usr', 'lib', 'x86_64-linux-gnu') for f in os.listdir(lib_dir): shutil.move(os.path.join(lib_dir, f), target_dir)
def _test_upload_download(self, store): with utils.tmp_dir(): # Create input files and directories. input_dir = os.path.join(os.getcwd(), 'input') _write_stuff(input_dir) # Upload a version, download it again. store.upload(self.asset_name, 0, input_dir) output_dir = os.path.join(os.getcwd(), 'output') store.download(self.asset_name, 0, output_dir) # Compare. test_utils.compare_trees(self, input_dir, output_dir)
def test_upload_download(self): with utils.tmp_dir(): # Create input files and directories. input_dir = os.path.join(os.getcwd(), 'input') _write_stuff(input_dir) # Upload a version, download it again. self.a.upload_new_version(input_dir) output_dir = os.path.join(os.getcwd(), 'output') self.a.download_current_version(output_dir) # Compare. test_utils.compare_trees(self, input_dir, output_dir)
def download(self, name, version, target_dir): """Download a CIPD package.""" pkg_name = CIPD_PACKAGE_NAME_TMPL % name version_tag = TAG_VERSION_TMPL % version target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): infile = os.path.join(os.getcwd(), 'input') with open(infile, 'w') as f: f.write('%s %s' % (pkg_name, version_tag)) self._run([ 'ensure', '--root', target_dir, '--list', infile, ])
def create_asset(target_dir, msvs_version, chrome_path=None): """Create the asset.""" if not os.path.isdir(target_dir): os.makedirs(target_dir) with utils.tmp_dir() as tmp_dir: if not chrome_path: print ('Syncing Chrome from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') chrome_path = os.path.join(tmp_dir.name, 'src') if not os.path.isdir(chrome_path): subprocess.check_call([utils.GCLIENT, 'config', REPO_CHROME, '--managed']) subprocess.check_call([utils.GCLIENT, 'sync']) gen_toolchain(chrome_path, msvs_version, target_dir)
def create_asset(target_dir): # configure --prefix requires an absolute path. target_dir = os.path.abspath(target_dir) # Download and extract the source. with utils.tmp_dir(): subprocess.check_call(["wget", "-O", VERSION + ".tar.gz", "https://github.com/ccache/ccache/releases/download/v3.7.7/ccache-3.7.7.tar.gz"]) subprocess.check_call(["tar", "-xzf", VERSION + ".tar.gz"]) os.chdir(VERSION) subprocess.check_call(["./configure", "--disable-man", "--prefix=" + target_dir]) subprocess.check_call(["make"]) subprocess.check_call(["make" ,"install"])
def create_asset(target_dir): """Create the asset.""" # Check out and build the Intel NEO driver. Following instructions here: # https://github.com/intel/compute-runtime/blob/master/documentation/BUILD_Ubuntu.md with utils.tmp_dir(): # Install build deps. neo_build_deps = [ 'ccache', 'flex', 'bison', 'clang-4.0', 'cmake', 'g++', 'git', 'patch', 'zlib1g-dev', 'autoconf', 'xutils-dev', 'libtool', 'pkg-config', 'libpciaccess-dev' ] apt_get_cmd = ['sudo', 'apt-get', 'install'] + neo_build_deps print('Running "%s"' % ' '.join(apt_get_cmd)) subprocess.check_call(apt_get_cmd) # Check out repos. for [repo, branch, local_name ] in [['llvm-mirror/clang', 'release_40', 'clang_source'], ['intel/opencl-clang', 'master', 'common_clang'], ['intel/llvm-patches', 'master', 'llvm_patches'], ['llvm-mirror/llvm', 'release_40', 'llvm_source'], ['intel/gmmlib', 'master', 'gmmlib'], ['intel/intel-graphics-compiler', 'master', 'igc'], ['KhronosGroup/OpenCL-Headers', 'master', 'opencl_headers'], ['intel/compute-runtime', 'master', 'neo']]: subprocess.check_call([ 'git', 'clone', '--depth', '1', '--branch', branch, 'https://github.com/' + repo, local_name ]) # Configure the build. build_dir = os.path.join(os.getcwd(), 'build') os.mkdir(build_dir) os.chdir(build_dir) subprocess.check_call([ 'cmake', '-DBUILD_TYPE=Release', '-DCMAKE_BUILD_TYPE=Release', '../neo' ]) # Build and package the library. subprocess.check_call( ['make', '-j%d' % multiprocessing.cpu_count(), 'package']) # Extract library and move necessary files to target_dir. We ignore the ICD # file because it's generated on the bot after we know the path to the CIPD # package. subprocess.check_call([ 'dpkg-deb', '--extract', 'intel-opencl-1.0-0.x86_64-igdrcl.deb', build_dir ]) lib_dir = os.path.join(build_dir, 'usr', 'local', 'lib') for f in os.listdir(lib_dir): shutil.move(os.path.join(lib_dir, f), target_dir)
def create_asset(target_dir, msvs_version, chrome_path=None): """Create the asset.""" if not os.path.isdir(target_dir): os.makedirs(target_dir) with utils.tmp_dir() as tmp_dir: if not chrome_path: print( 'Syncing Chrome from scratch. If you already have a checkout, ' 'specify --chrome_path to save time.') chrome_path = os.path.join(tmp_dir.name, 'src') if not os.path.isdir(chrome_path): subprocess.check_call( [utils.GCLIENT, 'config', REPO_CHROME, '--managed']) subprocess.check_call([utils.GCLIENT, 'sync']) gen_toolchain(chrome_path, msvs_version, target_dir)
def get_flutter_skps(target_dir): """Creates SKPs using Flutter's skp_generator tool. Documentation is at https://github.com/flutter/tests/tree/master/skp_generator """ with utils.tmp_dir(): utils.git_clone('https://github.com/flutter/tests.git', '.') os.chdir('skp_generator') subprocess.check_call(['bash', 'build.sh']) # Fix invalid SKP file names. for f in os.listdir('skps'): original_file_name = os.path.splitext(f)[0] new_file_name = ''.join( [x if x.isalnum() else "_" for x in original_file_name]) if new_file_name != original_file_name: os.rename(os.path.join('skps', f), os.path.join('skps', new_file_name + '.skp')) copy_tree('skps', target_dir)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--gsutil') args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, 'create.py') upload_script = os.path.join(common.FILE_DIR, 'upload.py') try: subprocess.check_call(['python', create_script, '-t', cwd]) cmd = ['python', upload_script, '-t', cwd] if args.gsutil: cmd.extend(['--gsutil', args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def test_zip_unzip(self): with utils.tmp_dir(): fw = test_utils.FileWriter(os.path.join(os.getcwd(), 'input')) # Create input files and directories. fw.mkdir('mydir') fw.mkdir('anotherdir', 0666) fw.mkdir('dir3', 0600) fw.mkdir('subdir') fw.write('a.txt', 0777) fw.write('b.txt', 0751) fw.write('c.txt', 0640) fw.write(os.path.join('subdir', 'd.txt'), 0640) # Zip, unzip. zip_utils.zip('input', 'test.zip') zip_utils.unzip('test.zip', 'output') # Compare the inputs and outputs. test_utils.compare_trees(self, 'input', 'output')
def test_versions(self): with utils.tmp_dir(): # Create input files and directories. input_dir = os.path.join(os.getcwd(), 'input') _write_stuff(input_dir) self.assertEqual(self.a.get_current_version(), -1) self.assertEqual(self.a.get_available_versions(), []) self.assertEqual(self.a.get_next_version(), 0) self.a.upload_new_version(input_dir) self.assertEqual(self.a.get_current_version(), 0) self.assertEqual(self.a.get_available_versions(), [0]) self.assertEqual(self.a.get_next_version(), 1) self.a.upload_new_version(input_dir) self.assertEqual(self.a.get_current_version(), 1) self.assertEqual(self.a.get_available_versions(), [0, 1]) self.assertEqual(self.a.get_next_version(), 2)
def main(): parser = argparse.ArgumentParser() parser.add_argument( "--local_svgs_dir", "-l", default="", help="Directory containing additional SVGs we want to upload." ) parser.add_argument("--gsutil") args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, "create.py") upload_script = os.path.join(common.FILE_DIR, "upload.py") try: subprocess.check_call(["python", create_script, "-t", cwd, "-l", args.local_svgs_dir]) cmd = ["python", upload_script, "-t", cwd] if args.gsutil: cmd.extend(["--gsutil", args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def isolate_android_sdk(android_sdk_root): """Isolate the Android SDK and return the isolated hash.""" repo_isolate_file = os.path.join(INFRA_BOTS_DIR, ISOLATE_FILE_NAME) with utils.tmp_dir(): # Copy the SDK dir contents into a directory with a known name. sdk_dir = os.path.join(os.getcwd(), SDK_DIR_NAME) shutil.copytree(android_sdk_root, sdk_dir) isolate_file = os.path.join(os.getcwd(), ISOLATE_FILE_NAME) shutil.copyfile(repo_isolate_file, isolate_file) # Isolate the SDK. isolate = get_isolate_binary() check_isolate_auth(isolate) android_sdk_relpath = os.path.relpath( sdk_dir, os.path.dirname(isolate_file)) isolate_cmd = [isolate, 'archive', '--quiet', '--isolate-server', 'https://isolateserver.appspot.com', '-i', isolate_file, '-s', 'android_sdk.isolated', '--extra-variable', 'ANDROID_SDK_DIR=%s' % android_sdk_relpath] isolate_out = subprocess.check_output(isolate_cmd).rstrip() return shlex.split(isolate_out)[0]
def main(): parser = argparse.ArgumentParser() parser.add_argument("--gsutil") parser.add_argument("--chrome_path") parser.add_argument("--msvs_version", required=True) args = parser.parse_args() with utils.tmp_dir(): cwd = os.getcwd() create_script = os.path.join(common.FILE_DIR, "create.py") upload_script = os.path.join(common.FILE_DIR, "upload.py") try: cmd = ["python", create_script, "-t", cwd, "--msvs_version", args.msvs_version] if args.chrome_path: cmd.extend(["--chrome_path", args.chrome_path]) subprocess.check_call(cmd) cmd = ["python", upload_script, "-t", cwd] if args.gsutil: cmd.extend(["--gsutil", args.gsutil]) subprocess.check_call(cmd) except subprocess.CalledProcessError: # Trap exceptions to avoid printing two stacktraces. sys.exit(1)
def test_blacklist(self): with utils.tmp_dir(): # Create input files and directories. fw = test_utils.FileWriter(os.path.join(os.getcwd(), 'input')) fw.mkdir('.git') fw.write(os.path.join('.git', 'index')) fw.write('somefile') fw.write('.DS_STORE') fw.write('leftover.pyc') fw.write('.pycfile') # Zip, unzip. zip_utils.zip('input', 'test.zip', blacklist=['.git', '.DS*', '*.pyc']) zip_utils.unzip('test.zip', 'output') # Remove the files/dirs we don't expect to see in output, so that we can # use self._compare_trees to check the results. fw.remove(os.path.join('.git', 'index')) fw.remove('.git') fw.remove('.DS_STORE') fw.remove('leftover.pyc') # Compare results. test_utils.compare_trees(self, 'input', 'output')
def upload_new_version(self, target_dir, commit=False): """Upload a new version and update the version file for the asset.""" version = self.get_next_version() target_dir = os.path.abspath(target_dir) with utils.tmp_dir(): zip_file = os.path.join(os.getcwd(), '%d.zip' % version) zip_utils.zip(target_dir, zip_file, blacklist=ZIP_BLACKLIST) gs_path = GS_PATH_TMPL % (self._gs_subdir, str(version)) self._gs.copy(zip_file, gs_path) def _write_version(): with open(self.version_file, 'w') as f: f.write(str(version)) subprocess.check_call([utils.GIT, 'add', self.version_file]) with utils.chdir(SKIA_DIR): if commit: with utils.git_branch(): _write_version() subprocess.check_call([ utils.GIT, 'commit', '-m', 'Update %s version' % self._name]) subprocess.check_call([utils.GIT, 'cl', 'upload', '--bypass-hooks']) else: _write_version()
def create_asset(target_dir): """Create the asset.""" with utils.tmp_dir(): subprocess.check_call(["curl", PROCDUMP_URL, "-o", "procdump.zip"]) subprocess.check_call(["unzip", "procdump.zip", "-d", target_dir])