def verify(self, target_dir): """""" with pushd(target_dir): for f in self.source_files.keys(): fp = os.path.join(target_dir, f) attrs = os.listxattr(path=fp, follow_symlinks=False) assert len(attrs) == self.xattr_pairs for k in self.source_xattrs[f].keys(): v = os.getxattr(fp, k.encode(self.encoding)).decode( self.encoding) assert v == self.source_xattrs[f][k] attrs = os.listxattr(fp, follow_symlinks=False) if self.encoding != "gb2312": for attr in attrs: v = xattr.getxattr(f, attr) assert attr in self.source_xattrs[f].keys() assert v.decode( self.encoding) == self.source_xattrs[f][attr] with pushd(target_dir): for d in self.source_dirs.keys(): dp = os.path.join(target_dir, d) attrs = xattr.listxattr(dp) assert len(attrs) == self.xattr_pairs for attr in attrs: v = xattr.getxattr(d, attr) assert attr in self.source_dirs_xattrs[d].keys() assert v.decode( self.encoding) == self.source_dirs_xattrs[d][attr]
def scratch(self, scratch_dir): """Put various kinds of xattr value into. 1. Very long value 2. a common short value 3. Nothing resides in value field 4. Single file, multiple pairs. 5. /n 6. whitespace 7. 中文 8. Binary 9. Only key? """ self.dist.put_symlinks(100) files_cnt = 20 self.dist.put_multiple_files(files_cnt, Size(9, Unit.KB)) self.scratch_dir = os.path.abspath(scratch_dir) self.source_files = {} self.source_xattrs = {} self.source_dirs = {} self.source_dirs_xattrs = {} self.encoding = "gb2312" self.xattr_pairs = 50 if utils.get_fs_type( os.getcwd()) == "xfs" else 20 # TODO: Only key without values? with pushd(self.scratch_dir): for f in self.dist.files[-files_cnt:]: relative_path = os.path.relpath(f, start=self.scratch_dir) self.source_xattrs[relative_path] = {} for idx in range(0, self.xattr_pairs): # TODO: Random this Key k = f"trusted.nydus.{Distributor.generate_random_name(20, chinese=True)}" v = f"_{Distributor.generate_random_length_name(20, chinese=True)}" xattr.setxattr(f, k.encode(self.encoding), v.encode(self.encoding)) # Use relative or canonicalized names as key to locate # path in source rootfs directory. So we verify if image is # packed correctly. self.source_files[relative_path] = os.path.abspath(f) self.source_xattrs[relative_path][k] = v dir_cnt = 20 self.dist.put_directories(dir_cnt) # Add xattr key-value paris to directories. with pushd(self.scratch_dir): for d in self.dist.dirs[-dir_cnt:]: relative_path = os.path.relpath(d, start=self.scratch_dir) self.source_dirs_xattrs[relative_path] = {} for idx in range(0, self.xattr_pairs): # TODO: Random this Key k = f"trusted.{Distributor.generate_random_name(20)}" v = f"{Distributor.generate_random_length_name(50)}" xattr.setxattr(d, k, v.encode()) # Use relative or canonicalized names as key to locate # path in source rootfs directory. So we verify if image is # packed correctly. self.source_dirs[relative_path] = os.path.abspath(d) self.source_dirs_xattrs[relative_path][k] = v
def install(should_identify=True): # Based on ideas from https://github.com/harvimt/quamash/blob/master/.travis.yml if should_identify: system_identify() td = Travis_Dispatcher() xqt( # Cached Downloads 'sudo mkdir -p /downloads', 'sudo chmod a+rw /downloads') sip_ver = 'sip-4.17' if not isfile('/downloads/sip.tar.gz'): wget('http://downloads.sourceforge.net/project/pyqt/sip/{}/{}'. format(sip_ver, _gz(sip_ver)), '/downloads/sip.tar.gz') # _`pyqt_ver`: Select a PyQt version. See also qt5_Linux_ and qt5_OS_X_. pyqt_ver = '5.5.1' pyqt_gpl_ver = 'PyQt-gpl-' + pyqt_ver if not isfile('/downloads/pyqt5.tar.gz'): wget('http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-{}/{}'. format(pyqt_ver, _gz(pyqt_gpl_ver)), '/downloads/pyqt5.tar.gz') # Builds xqt('sudo mkdir -p /builds', 'sudo chmod a+rw /builds') # Qt5 td.qt5() # SIP. With Linux or OS_X, don't use the package manager to install these, # since they're installed for the system python, not the pyenv version # we're testing with. with pushd('/builds'): xqt('tar xzf /downloads/sip.tar.gz --keep-newer-files') chdir(sip_ver) xqt('python configure.py', 'make', 'sudo make install') # PyQt5 with pushd('/builds'): xqt('tar xzf /downloads/pyqt5.tar.gz --keep-newer-files') chdir(pyqt_gpl_ver) td.pyqt5_configure() xqt('make', 'sudo make install') # PCRE td.pcre() # Qutepart if build_os == 'Linux': set_display() xqt('sh -e /etc/init.d/xvfb start') # Install, which also builds Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py install')
def test_file_tail(nydus_anchor: NydusAnchor, nydus_scratch_image: RafsImage, backend): """ description: Read data from file tail - Create several files of different sizes - Punch hole to each file of which some should have hole tail - Create rafs image from test scratch directory. - Mount rafs - Do some test. """ file_size_list = [ Size(1, Unit.KB), Size(6, Unit.KB), Size(2, Unit.MB), Size(10034, Unit.KB), ] file_list = [] dist = Distributor(nydus_anchor.scratch_dir, 2, 2) dist.generate_tree() for f_s in file_size_list: f_name = dist.put_single_file(f_s) file_list.append(f_name) # Punch hole with utils.pushd(nydus_anchor.scratch_dir): with open(f_name, "a+b") as f: fallocate( f, f_s.B - 500, 1000, mode=FALLOC_FL_PUNCH_HOLE | FALLOC_FL_KEEP_SIZE, ) nydus_scratch_image.set_backend(backend).create_image() rafs_conf = RafsConf(nydus_anchor, nydus_scratch_image) rafs_conf.set_rafs_backend(backend, image=nydus_scratch_image) rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf) rafs.mount() with utils.pushd(nydus_anchor.mount_point): for name in file_list: with open(name, "rb") as f: size = os.stat(name).st_size f.seek(size - 300) buf = f.read(1000) assert len(buf) == 300 wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs()) for f in file_list: wg.verify_single_file(os.path.join(nydus_anchor.mount_point, f)) assert wg.io_error == False
def ExportExcelData(AreaStr): if utils.pushd( _ReadExcelBuilderPath ): cmd = 'call ' + _ReadExcelWriteLuaFileName + ' ' + AreaStr os.system( cmd ) utils.popd() #清除服务器无用文件 if utils.pushd( '..\..\\server\gameserver\src\config' ): cmd = 'call ' + 'del "cfg_strings*" /s /q' os.system( cmd ) utils.popd() print( 'export lua reader for cs' ) clientPath = os.path.abspath( os.getcwd() + os.sep + _ClientPath ) os.system( 'lua ' + _CSReaderForLuaExporter + ' \"' + clientPath + '\"' + ' \"' + AreaStr + '\"')
def checkInNetMessage(): svn = pysvn.Client() print( 'update %s' % _NetMessageProtoPath ) svn.update( _NetMessageProtoPath ) print( 'update %s' % _ClientNetMessageFilePath ) svn.update( _ClientNetMessageFilePath ) print( 'update %s' % _ServerNetMessageFilePath ) svn.update( _ServerNetMessageFilePath ) if utils.pushd( _NetMessageBuilderPath ): cmd = 'call ' + _NetMessageBuilderFileName os.system( cmd ) utils.popd() if utils.queryYesNo('Upload to SVN Server ?'): print( 'commit %s' % _NetMessageProtoPath ) checkInDir( svn, _NetMessageProtoPath ) svn.checkin( _NetMessageProtoPath, 'PythonTool: update net proto source' ) print( 'commit %s' % _ClientNetMessageFilePath ) checkInDir( svn, _ClientNetMessageFilePath ) svn.checkin( _ClientNetMessageFilePath, 'PythonTool: update net proto for client' ) print( 'commit %s' % _ServerNetMessageFilePath ) checkInDir( svn, _ServerNetMessageFilePath ) svn.checkin( _ServerNetMessageFilePath, 'PythonTool: update net proto for server' ) print( 'commit %s' % _ClientLuaNetMessageFilePath ) checkInDir( svn, _ClientLuaNetMessageFilePath ) svn.checkin( _ClientLuaNetMessageFilePath, 'PythonTool: update net proto for lua' )
def resolve_deps(self, force=False): if force: self.dependencies = [] if len(self.dependencies) > 0: return self.dependencies pip_install_opts = ['--no-install', '--verbose', '-e'] rm('-r', '-f', "/tmp/pip_build_{0}".format(getuser())) with pushd(self.path): print('') print('Gathering package requirements ...') for line in pip('install', pip_install_opts, '.'): string = line.rstrip() match = re.search('Downloading/unpacking (.*?) \(from (.*?)\)', string) if match: self._add_dependency(match.group(1), dependents=match.group(2)) continue match = re.search( 'Requirement already satisfied.*?: (.*?) in .*?\(from (.*?)\)', string) if match: self._add_dependency(match.group(1), dependents=match.group(2)) continue print("... done. {0} records found.".format(len(self.dependencies))) return self.dependencies
def reset(self): with pushd(self.path): print('') print("Resetting Git repository in '{0}' ...".format(self.path)) git('reset', '--hard') git('clean', '-f', '-d', '-x') print('... done')
def update(self): with pushd(self.path): print('') print("Updating Git repository in '{0}' ...".format(self.path)) git('remote', 'update') git('pull', '--rebase') print('... done')
def verify(self, target_dir, source_dir): for links in self.dist.hardlinks.values(): try: links_iter = iter(links) l = next(links_iter) except StopIteration: continue t_hl_path = os.path.join(target_dir, l) last_md5 = WorkloadGen.calc_file_md5(t_hl_path) last_stat = os.stat(t_hl_path) last_path = t_hl_path for l in links_iter: t_hl_path = os.path.join(target_dir, l) t_hl_md5 = WorkloadGen.calc_file_md5(t_hl_path) t_hl_stat = os.stat(t_hl_path) assert last_md5 == t_hl_md5 assert ( last_stat == t_hl_stat ), f"last hardlink path {last_path}, cur hardlink path {t_hl_path}" last_md5 = t_hl_md5 last_stat = t_hl_stat last_path = t_hl_path with pushd(target_dir): assert (os.stat(os.path.join( target_dir, self.inner_hardlink_name)).st_nlink == 1)
def install(): system_identify() # First, install OS-independent items. Install the development version of # CodeChat, rather than the (older) released version on PyPI that the pip # install would do. xqt('git clone https://github.com/bjones1/CodeChat.git') with pushd('CodeChat'): xqt('python setup.py install') xqt('python -m pip install -U -r requirements.txt') # Install OS-dependent items. cid = CI_Dispatcher() # The qutepart install script assumes the working directory is qutepart. with pushd('qutepart'): cid.install_qutepart() cid.install()
def nydus_image_output(self): with utils.pushd(os.path.join(self.work_dir, "bootstraps")): outputs = [ o for o in os.listdir() if re.match(r".*json$", o) is not None ] outputs.sort(key=lambda x: int(x.split("-")[0])) with open(outputs[0], "r") as f: return json.load(f)
def checkout(self, branch=None): if not branch: raise Exception("No branch given to chechout") with pushd(self.path): print('') print("Switching to branch '{0}' ...".format(branch)) git('checkout', branch) print('... done')
def collect_all_dirs(self): # In case this function is called more than once. if hasattr(self, "collected"): return self.collected = True self._collected_dirs = [] self._collected_dirs.append(self.target_dir) with pushd(self.target_dir): self._collect_each_dir(self.target_dir, self.target_dir)
def main(argv): project_dir = get_project_dir() utils.cmd_exists("clang-format") utils.cmd_exists("yapf") with utils.pushd(project_dir): code_format(os.path.join(project_dir, "app")) code_format(os.path.join(project_dir, "experiments")) code_format(os.path.join(project_dir, "scripts"))
def test(): if build_os == 'Windows': # The PATH can't be set in install_, since changes to the environment # get lost when Python quits. os.environ['PATH'] = CTAGS_VER + '\\;' + os.environ['PATH'] else: qutepart_travis.set_display() with pushd('tests'): xqt('python run_all.py')
def _collect_each_dir(self, root_dir, parent_dir): files = os.listdir(parent_dir) with pushd(parent_dir): for one in files: st = os.lstat(one) if S_ISDIR(st.st_mode) and len(os.listdir(one)) != 0: realpath = os.path.realpath(one) self._collected_dirs.append(realpath) self._collect_each_dir(root_dir, one) else: continue
def __init__(self, path): if os.path.exists(path): self.path = path else: raise Exception("Path '{0}' does not exist.".format(path)) self.dependencies = [] with pushd(self.path): self.package_name = tail(python('setup.py', '--name'), '-1').rstrip()
def iter_all_files(self, file_op, dir_op=None): for (cur_dir, subdirs, files) in os.walk( self.target_dir, topdown=True, followlinks=False ): with pushd(cur_dir): for f in files: file_op(f) if dir_op is not None: for d in subdirs: dir_op(d)
def locate_bootstrap(self): bootstraps_dir = os.path.join(self.work_dir, "bootstraps") with utils.pushd(bootstraps_dir): each_layers = os.listdir() if len(each_layers) == 0: return None each_layers = [l.split("-") for l in each_layers] each_layers.sort(key=lambda x: int(x[0])) return os.path.join(bootstraps_dir, "-".join(each_layers[-1]))
def status(self, long=False, show=False): opts = '--long' if long else '--short' with pushd(self.path): git_status = git('status', opts) if show: print('') print("'git status' in '{0}':".format(self.path)) for line in git_status: print("| {0}".format(line.rstrip())) return git_status
def test_various_file_types(nydus_anchor: NydusAnchor, rafs_conf: RafsConf, nydus_scratch_image: RafsImage): """ description: Put various types of files into rootfs. - Regular, dir, char, block, fifo, sock, symlink """ with utils.pushd(nydus_scratch_image.rootfs()): fd = os.open("regular", os.O_CREAT | os.O_RDWR) os.close(fd) os.mkfifo("fifo") os.mknod("blk", 0o600 | stat.S_IFBLK, device=random.randint(0, 2 ^ 64)) os.mknod("char", 0o600 | stat.S_IFCHR, device=random.randint(0, 2 ^ 64)) os.mknod("sock", 0o600 | stat.S_IFSOCK, device=random.randint(0, 2 ^ 64)) os.symlink("regular", "symlink") nydus_scratch_image.set_backend(Backend.BACKEND_PROXY).create_image() rafs_conf.set_rafs_backend(Backend.BACKEND_PROXY) rafs = RafsMount(nydus_anchor, nydus_scratch_image, rafs_conf) rafs.mount() with utils.pushd(nydus_anchor.mount_point): assert os.path.exists("fifo") assert os.path.exists("blk") assert os.path.exists("char") assert os.path.exists("sock") assert os.path.exists("symlink") wg = WorkloadGen(nydus_anchor.mount_point, nydus_scratch_image.rootfs()) wg.setup_workload_generator() assert wg.verify_entire_fs() wg.torture_read(2, 4) wg.finish_torture_read()
def main(argv): project_dir = get_project_dir() with utils.pushd(project_dir): test_installation = os.path.join(project_dir, "scripts", "detail", "test_installation.py") command = f"python {test_installation} --target windows" utils.execute(command) bootstrap = os.path.join(project_dir, "scripts", "bootstrap.py") command = f"python {bootstrap} --target windows" utils.execute(command) build = os.path.join(project_dir, "scripts", "build.py") command = f"python {build} --target windows" utils.execute(command)
def _random_pos_dir(self): level = randint(0, self.levels) with pushd(os.path.join(self.top_dir, random.choice(self.planted_tree_root))): while level: files = os.listdir() level -= 1 files = [f for f in files if os.path.isdir(f)] if len(files) != 0: next_level = files[randint(0, len(files) - 1)] else: break os.chdir(next_level) return os.getcwd()
def mount_overlayfs(self, layers, base=os.getcwd()): """ We usually use overlayfs to act as a verifying dir. Some cases may scratch the original source dir. :source_dir: A directory acts on a layer of overlayfs, from which to build the image :layers: tail item from layers is the bottom layer. Cited: ``` Multiple lower layers --------------------- Multiple lower layers can now be given using the the colon (":") as a separator character between the directory names. For example: mount -t overlay overlay -o lowerdir=/lower1:/lower2:/lower3 /merged As the example shows, "upperdir=" and "workdir=" may be omitted. In that case the overlay will be read-only. The specified lower directories will be stacked beginning from the rightmost one and going left. In the above example lower1 will be the top, lower2 the middle and lower3 the bottom layer. ``` """ handled_layers = [l.replace(":", "\\:") for l in layers] if len(handled_layers) == 1: self.sticky_lower_dir = tempfile.TemporaryDirectory( dir=self.workspace) handled_layers.append(self.sticky_lower_dir.name) layers_set = ":".join(handled_layers) with utils.pushd(base): cmd = [ "mount", "-t", "overlay", "-o", f"lowerdir={layers_set}", "rafs_ci_overlay", self.overlayfs, ] ret, _ = utils.execute(cmd) assert ret
def scratch(self): self.dist.put_hardlinks(30) self.outer_source_name = "outer_source" self.inner_hardlink_name = "inner_hardlink" with pushd(os.path.dirname(os.path.realpath(self.dist.top_dir))): fd = os.open(self.outer_source_name, os.O_CREAT | os.O_RDWR) os.close(fd) os.link( self.outer_source_name, os.path.join(self.target, self.inner_hardlink_name), ) assert (os.stat(os.path.join(self.target, self.inner_hardlink_name)).st_nlink == 2)
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and install PyQt5. Only download if we don't have a cached copy # available. install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe') if not isfile(install_PyQt5): wget( 'http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/' 'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe', install_PyQt5) # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001. xqt( 'REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve ' '/t REG_SZ /d C:\\Python34', install_PyQt5 + ' /S') # Download and compile PCRE. pcre_ver = 'pcre-8.38' pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget( 'http://downloads.sourceforge.net/project/pcre/pcre/8.38/' + pcre_zip, pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') xqt( 'cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"', 'cmake --build . --config Release') # Install, which also builds Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py install --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
def main(): project_dir = get_project_dir() flatc_path = os.path.join(project_dir, ".vendor", "installed", "msvc-2017-x64-windows-release", "bin", "flatc.exe") msg_fbs = os.path.join(project_dir, "app", "messages", "messages.fbs") udp_hdr_fbs = os.path.join(project_dir, "app", "messages", "udpheader.fbs") utils.check_path(flatc_path) utils.check_path(msg_fbs) utils.check_path(udp_hdr_fbs) gen_msgs = f"{flatc_path} --cpp {msg_fbs}" gen_udp_hdr = f"{flatc_path} --cpp {udp_hdr_fbs}" with utils.pushd(os.path.join(project_dir, "app", "messages")): utils.execute(gen_msgs.split()) utils.execute(gen_udp_hdr.split())
def update_cache(self): if not self.test_cache(): rm(self.path, '-rf') mkdir('-p', self.path) index_file_url = '/'.join([self.repo_url.url.geturl(), 'Packages.gz']) index_file_path = os.path.join(self.path, self.index_file) print("Downloading index file '{0}' --> '{1}' ...".format( index_file_url, index_file_path )) try: with pushd(self.path): wget(index_file_url, '-O', self.index_file + '.gz') gzip('-d', self.index_file + '.gz') except Exception as err: print(str(err)) self.broken = True
def update_cache(self): if not self.test_cache(): rm(self.path, '-rf') mkdir('-p', self.path) index_file_url = '/'.join( [self.repo_url.url.geturl(), 'Packages.gz']) index_file_path = os.path.join(self.path, self.index_file) print("Downloading index file '{0}' --> '{1}' ...".format( index_file_url, index_file_path)) try: with pushd(self.path): wget(index_file_url, '-O', self.index_file + '.gz') gzip('-d', self.index_file + '.gz') except Exception as err: print(str(err)) self.broken = True
def put_directories(self, count): for i in range(0, count): dst_path = os.path.join( self._random_pos_dir(), Distributor.generate_random_name(30, suffix="dir"), ) # We might have a very long name of `dst_path`. So better to mkdir one by one dst_relpath = os.path.relpath(dst_path, start=self.top_dir) with pushd(self.top_dir): for d in dst_relpath.split("/")[0:]: try: os.chdir(d) except FileNotFoundError: os.mkdir(d) os.chdir(d) self.dirs.append(os.path.relpath(dst_path, start=self.top_dir)) return self.dirs[-count:]
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and install PyQt5. Only download if we don't have a cached copy # available. install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe') if not isfile(install_PyQt5): wget('http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/' 'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe', install_PyQt5) # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001. xqt('REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve ' '/t REG_SZ /d C:\\Python34', install_PyQt5 + ' /S') # Download and compile PCRE. pcre_ver = 'pcre-8.38' pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget('http://downloads.sourceforge.net/project/pcre/pcre/8.38/' + pcre_zip, pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"', 'cmake --build . --config Release') # Install, which also builds Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py install --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and compile PCRE. pcre_raw_ver = '8.39' pcre_ver = 'pcre-' + pcre_raw_ver pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget('http://downloads.sourceforge.net/project/pcre/pcre/{}/{}'. format(pcre_raw_ver, pcre_zip), pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') # Per https://cmake.org/cmake/help/latest/generator/Visual%20Studio%2014%202015.html, # add the Win64 string for 64-bit Python. use_Win64 = ' Win64' if is_64bits else '' xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 14 2015{}"'. format(use_Win64), 'cmake --build . --config Release') # First, build Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py build_ext --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver)) # Next, install it along with its dependencies. See comments at # ``install_requires`` on why this is necessary. xqt('python -m pip install -e .')