def _adjustFileRef(self,fileRef,basedir): basename = ntpath.basename(fileRef['path']) dirname=ntpath.normpath(ntpath.join(basedir,ntpath.dirname(fileRef['path']))) retval=ntpath.join(dirname,basename) if os.path.sep == '/': #are we running in cygwin/Linux? retval = retval.replace(r'\\','/') return retval
def main(): if len(sys.argv) <= 1 or sys.argv[1] == '--help': print usage_text return with open(sys.argv[1]) as f: config = json.load(f) basedir = os.path.abspath(config['basedir']) os.chdir(basedir) output_directories = set() output_files = [] for fileset in config['files']: os.chdir(basedir) os.chdir(fileset['source']) for pattern in fileset['patterns']: files = ant_glob(pattern) for filename in files: frompath = ntpath.normpath(ntpath.join(fileset['source'], filename)) topath = ntpath.normpath(ntpath.join(fileset['target'], filename)) output_directories.update(topath[:index+1] for (index, ch) in enumerate(topath) if ch=='\\') output_files.append((frompath, topath)) print " <ItemGroup>" for dirname in sorted(output_directories): print " <Folder Include={0} />".format(quoteattr(dirname)) print " </ItemGroup>" print " <ItemGroup>" for (frompath, topath) in output_files: print " <Content Include=\"{0}\">\n <Link>{1}</Link>\n </Content>".format(escape(frompath), escape(topath)) print " </ItemGroup>"
def do_cd(self, line): p = string.replace(line,'/','\\') oldpwd = self.pwd newPath = ntpath.normpath(ntpath.join(self.pwd,p)) if newPath == self.pwd: # Nothing changed return common = ntpath.commonprefix([newPath,oldpwd]) if common == oldpwd: res = self.findPathName(ntpath.normpath(p)) else: res = self.findPathName(newPath) if res is None: logging.error("Directory not found") self.pwd = oldpwd return if res.isDirectory() == 0: logging.error("Not a directory!") self.pwd = oldpwd return else: self.currentINode = res self.do_ls('', False) self.pwd = ntpath.join(self.pwd,p) self.pwd = ntpath.normpath(self.pwd) self.prompt = self.pwd + '>'
def adjustDcfFileRef(self,dcf,basedir): for elem in dcf['fileRef']: basename = ntpath.basename(elem['path']) dirname=ntpath.normpath(ntpath.join(basedir,ntpath.dirname(elem['path']))) elem['path']=ntpath.join(dirname,basename) if os.path.sep == '/': #are we running in cygwin/Linux? elem['path'] = elem['path'].replace(r'\\','/')
def sysprep(self): """Run the Cloudbase-Init.""" # TODO(mmicu): Restructure this method to have a more explicit name # that works for Windows Nano too exe_path = ntpath.join(self._BASE_DIR, "Python", "Scripts", "cloudbase-init.exe") config_unattend_path = ntpath.join( self._BASE_DIR, "conf", "cloudbase-init-unattend.conf") cmd = (r"& '{exe_path}' --config-file '{config_unattend}'").format( exe_path=exe_path, config_unattend=config_unattend_path) try: # Unattend phase self._client.run_remote_cmd(cmd, util.POWERSHELL) except exceptions.ArgusError as ex: LOG.debug("Exception in Unattend phase %s", ex) try: self._client.run_remote_cmd("Restart-Computer", util.POWERSHELL) except (IOError, winrm_exceptions.WinRMTransportError, winrm_exceptions.InvalidCredentialsError): # NOTE(mmicu): When we reboot the machine, it is possible to # have connectivity issues. # This fixes errors that stop scenarios from getting # created on different windows images. LOG.debug("Currently rebooting...") LOG.info("Wait for the machine to finish rebooting ...") self.wait_boot_completion()
def cygwin2nt(path): parts = path.split("/") if path.startswith("/cygdrive"): return "%s:\\%s" % (parts[2].upper(), ntpath.join(*tuple(parts[3:]))) elif not parts[0]: # empty is root return ntpath.join("C:\\cygwin", *tuple(parts)) else: return ntpath.join(*tuple(parts))
def _saveClassifiers(classifiers, path): parts = [ config.CLASSIFIERS, path, 'classifiers.pkl'] # Create folder if does not exist if not ntpath.exists(ntpath.join(*parts[:-1])): os.makedirs(ntpath.join(*parts[:-1])) # Save models joblib.dump(classifiers, ntpath.join(*parts))
def creteEmptyDir(folder): if ntpath.exists(folder): for file in os.listdir(folder): os.chmod(ntpath.join(folder, file), stat.S_IWRITE) os.remove(ntpath.join(folder, file)) os.chmod(folder, stat.S_IWRITE) shutil.rmtree(folder) logging.debug('delete: {0}'.format(folder)) os.makedirs(folder) logging.debug('create: {0}'.format(folder))
def posixToNT( path ): split = os.path.split( path ) res = None while split[0] is not None and split[0] != '' and split[0] != '/': if res is None: res = split[1] else: res = ntpath.join( split[1], res ) split = os.path.split( split[0] ) return ntpath.join( 'z:\\', split[1], res )
def _config_specific_paths(self): """Populate the ConfigParser object with instance specific values.""" cbinit_dir = introspect.get_cbinit_dir(self._execute) self.set_conf_value("bsdtar_path", ntpath.join(cbinit_dir, r'bin\bsdtar.exe')) self.set_conf_value("local_scripts_path", ntpath.join(cbinit_dir, 'LocalScripts\\')) self.set_conf_value("logdir", ntpath.join(cbinit_dir, "log\\")) self.set_conf_value("mtools_path", ntpath.join(cbinit_dir, "bin\\"))
def test_windbg_version(): from winappdbg import System, win32 dbghelp = System.load_dbghelp() pathname = win32.GetModuleFileNameEx(-1, dbghelp._handle) sysroot = os.getenv("SystemRoot") if not sysroot: sysroot = os.getenv("SYSTEMROOT") system = ntpath.join(sysroot, "System32") syswow = ntpath.join(sysroot, "SysWoW64") if (pathname.lower().startswith(system.lower()) or pathname.lower().startswith(syswow.lower()) ): raise RuntimeError("WinDbg not found")
def _add_source_to_target(sftp, source_base_path, remote_base_path): # Try to see which files/directories are missing remotely or have different size and create/copy. for (dirpath, entry_to_stat) in filtered_walk(source_base_path, ignore_patterns=source_ignore_patterns, compare_path=source_base_path): remote_dirpath = _convert_path(source_base_path, remote_base_path, dirpath) sftp.mkdir_if_not_exist(remote_dirpath) for dirname in [k for k, s in entry_to_stat.items() if stat.S_ISDIR(s.st_mode)]: sftp.mkdir_if_not_exist(rpath.join(remote_dirpath, dirname)) for fname, f_stat in [(f, s) for f, s in entry_to_stat.items() if stat.S_ISREG(s.st_mode)]: if not python_mode or not fnmatch(fname, '*.pyc'): remote_path = rpath.join(remote_dirpath, fname) _sync_file(sftp, os.path.join(dirpath, fname), f_stat, remote_path)
def do_cd(self, s): self.execute_remote('cd ' + s) if len(self.__outputBuffer.strip('\r\n')) > 0: print(self.__outputBuffer) self.__outputBuffer = '' else: if PY2: self.__pwd = ntpath.normpath(ntpath.join(self.__pwd, s.decode(sys.stdin.encoding))) else: self.__pwd = ntpath.normpath(ntpath.join(self.__pwd, s)) self.execute_remote('cd ') self.__pwd = self.__outputBuffer.strip('\r\n') self.prompt = (self.__pwd + '>') self.__outputBuffer = ''
def replace_code(self): """Replace the code of Cloudbase-Init.""" if not CONFIG.argus.git_command: # Nothing to replace. return LOG.info("Replacing Cloudbase-Init's code " "with %s", CONFIG.argus.git_command) LOG.debug("Getting Cloudbase-Init location...") # Get Cloudbase-Init python location. python_dir = introspection.get_python_dir(self._execute) # Remove everything from the Cloudbase-Init installation. LOG.debug("Recursively removing Cloudbase-Init...") cloudbaseinit = ntpath.join( python_dir, "Lib", "site-packages", "cloudbaseinit") self._execute('rmdir "{}" /S /q'.format(cloudbaseinit), command_type=util.CMD) # Clone the repository clone_res = self._backend.remote_client.manager.git_clone( repo_url=_CBINIT_REPO, location=_CBINIT_TARGET_LOCATION) if not clone_res: raise exceptions.ArgusError('Code repository could not ' 'be cloned.') # Run the command provided at cli. LOG.debug("Applying cli patch...") self._execute("cd {location} && {command}".format( location=_CBINIT_TARGET_LOCATION, command=CONFIG.argus.git_command), command_type=util.CMD) # Replace the code, by moving the code from Cloudbase-Init # to the installed location. LOG.debug("Replacing code...") self._execute('Copy-Item {location}\\cloudbaseinit \'{folder}\'' '-Recurse'.format(location=_CBINIT_TARGET_LOCATION, folder=cloudbaseinit), command_type=util.POWERSHELL) # Auto-install packages from the new requirements.txt python = ntpath.join(python_dir, "python.exe") command = '"{folder}" -m pip install -r {location}\\requirements.txt' self._execute(command.format(folder=python, location=_CBINIT_TARGET_LOCATION), command_type=util.CMD)
def inject_cbinit_config(self): """Inject the Cloudbase-Init config in the right place.""" cbinit_dir = introspection.get_cbinit_dir(self._execute) conf_dir = ntpath.join(cbinit_dir, "conf") needed_directories = [ ntpath.join(cbinit_dir, "log"), conf_dir, ] for directory in needed_directories: self._make_dir_if_needed(directory) self._cbinit_conf.apply_config(conf_dir) self._cbinit_unattend_conf.apply_config(conf_dir)
def full_path_from_sharename(share_name, path): # If full_share is a drive, map it back to a real drive (_1, _2, unc_host, unc_share) = share_name.split('\\') if unc_host.startswith('hostluna_drive_'): drive_letter = unc_host.split('_')[2] path_prefix = drive_letter + ':\\' full_filename = ntpath.join(path_prefix, path) elif unc_host.startswith('hostluna_nfs'): path_prefix = base64.b64decode(unc_share) path_suffix = path.replace('\\', '/') full_filename = os.path.join(path_prefix, path_suffix) else: full_filename = ntpath.join(share_name, path) return full_filename
def __init__(self, owner, section, key_path, default_value): values = {} for name, description, depend, rel_path in self._dirlist: values[name] = section.get(name, default=default_value) self._values = [] for name, description, depend, rel_path in self._dirlist: value = values[name] if values[name] == '<<default>>': value = rel_path if depend is not None: if values[depend] is not None: if owner.is_platform_windows: value = ntpath.join(values[depend].replace('/', '\\'), value.replace('/', '\\')) else: value = posixpath.join(values[depend], value) else: if owner.is_platform_windows: value = '<<%s>>\\%s' % (depend, value.replace('/', '\\')) else: value = '<<%s>>/%s' % (depend, value) if value is not None: if owner.is_platform_windows: value = ntpath.normpath(value.replace('/', '\\')) else: value = posixpath.normpath(value) self._values.append( _Terra3DDirectories._item(name, description, value) )
def get_cbinit_dir(execute_function): """Get the location of Cloudbase-Init from the instance.""" stdout = execute_function( '$ENV:PROCESSOR_ARCHITECTURE', command_type=util.POWERSHELL) architecture = stdout.strip() locations = [execute_function('echo "$ENV:ProgramFiles"', command_type=util.POWERSHELL)] if architecture == 'AMD64': location = execute_function( 'echo "${ENV:ProgramFiles(x86)}"', command_type=util.POWERSHELL) locations.append(location) for location in locations: location = location.strip() _location = escape_path(location) status = execute_function( 'Test-Path "{}\\Cloudbase` Solutions"'.format( _location), command_type=util.POWERSHELL).strip().lower() if status == "true": return ntpath.join( location, "Cloudbase Solutions", "Cloudbase-Init" ) raise exceptions.ArgusError('Cloudbase-Init installation directory' ' not found')
def prepare_shortcuts(self): """Prepare shortcut files in the build directory. If entry_point is specified, write the script. If script is specified, copy to the build directory. Prepare target and parameters for these shortcuts. Also copies shortcut icons """ files = set() for scname, sc in self.shortcuts.items(): if not sc.get('target'): if sc.get('entry_point'): sc['script'] = script = scname.replace(' ', '_') + '.launch.py' \ + ('' if sc['console'] else 'w') self.write_script(sc['entry_point'], pjoin(self.build_dir, script), sc.get('extra_preamble', '')) else: shutil.copy2(sc['script'], self.build_dir) sc['target'] = 'py' if sc['console'] else 'pyw' sc['parameters'] = '"%s"' % ntpath.join('$INSTDIR', sc['script']) files.add(os.path.basename(sc['script'])) shutil.copy2(sc['icon'], self.build_dir) sc['icon'] = os.path.basename(sc['icon']) files.add(sc['icon']) self.install_files.extend([(f, '$INSTDIR') for f in files])
def nt_relpath(path, start=curdir): """Implementa os.path.relpath para Windows ya que en python 2.5 no esta implementada""" from ntpath import abspath, splitunc, sep, pardir, join if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = splitunc(path) unc_start, rest = splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list)
def child(self, name): if self.path is None: child = self.__class__(name) else: child = self.__class__(ntpath.join(self.path, name)) self.dirs.append(child) return child
def wbatch_log_vbm(*args): argl = list(*args) for index, arg in enumerate(argl): if re.match("--hostonlyadapter", arg): # The next arg is the host-only interface name -> change it argl[index+1] = '"' + vboxnet_to_win_adapter_num(argl[index+1]) + \ '"' elif re.match("--hostpath", arg): # The next arg is the shared dir -> change it argl[index+1] = r'%SHAREDIR%' elif re.search(r"\.(iso|vdi)$", arg): # Fix path of ISO or VDI image img_name = os.path.basename(arg) argl[index] = ntpath.join("%IMGDIR%", img_name) # Have Windows echo what we are about to do wbatch_write("ECHO VBoxManage " + " ". join(argl)) wbatch_write("VBoxManage " + " ". join(argl)) # Abort if VBoxManage call raised errorlevel wbatch_write("IF %errorlevel% NEQ 0 GOTO :vbm_error") # Blank line for readability wbatch_write()
def test_inject_cbinit_config(self, mock_get_cbinit_dir, mock_make_dir): mock_get_cbinit_dir.return_value = "fake dir" self._recipe._cbinit_conf = mock.Mock() self._recipe._cbinit_unattend_conf = mock.Mock() conf_dir = ntpath.join(mock_get_cbinit_dir.return_value, "conf") cbinit_dir = mock_get_cbinit_dir.return_value needed_directories = [ ntpath.join(cbinit_dir, "log"), conf_dir, ] self._recipe.inject_cbinit_config() self.assertEqual(mock_make_dir.call_count, len(needed_directories)) (self._recipe._cbinit_conf.apply_config. assert_called_once_with(conf_dir)) (self._recipe._cbinit_unattend_conf.apply_config. assert_called_once_with(conf_dir))
def OnStartup(self): stdout, _ = self.RemoteCommand('echo $env:TEMP') self.temp_dir = ntpath.join(stdout.strip(), 'pkb') stdout, _ = self.RemoteCommand('echo $env:SystemDrive') self.system_drive = stdout.strip() self.RemoteCommand('mkdir %s' % self.temp_dir) self.DisableGuestFirewall()
def relntpath(path, start): import ntpath # the windows version of os.path #available in python 2.6 # return ntpath.relpath(path, start) if start == None: start = os.getcwd() path = ntpath.normpath(path) start = ntpath.normpath(start) (drivep, tailp) = ntpath.splitdrive(path) (drives, tails) = ntpath.splitdrive(start) # if one of the paths has no drive letter, treat both of them so if drivep == "" or drives == "": path = tailp start = tails elif drivep != drives: # ntpath.relpath returns error if drive letters differ, but we wont return path pathl = path.replace("/", "\\").split("\\") startl = start.replace("/", "\\").split("\\") # print "path: %s, start:%s"%(path, start ) while len(pathl) and len(startl) and pathl[0] == startl[0]: # print "removing "+pathl[0] del pathl[0] del startl[0] for i in range(len(startl)): pathl.insert(0, "..") return ntpath.join(".", *pathl)
def get_cbinit_dir(execute_function): """Get the location of cloudbase-init from the instance.""" stdout = execute_function( '(Get-WmiObject Win32_OperatingSystem).' 'OSArchitecture', command_type=util.POWERSHELL) architecture = stdout.strip() locations = [execute_function('powershell "$ENV:ProgramFiles"', command_type=util.CMD)] if architecture == '64-bit': location = execute_function( 'powershell "${ENV:ProgramFiles(x86)}"', command_type=util.CMD) locations.append(location) for location in locations: location = location.strip() _location = escape_path(location) status = execute_function( 'Test-Path "{}\\Cloudbase` Solutions"'.format( _location), command_type=util.POWERSHELL).strip().lower() if status == "true": return ntpath.join( location, "Cloudbase Solutions", "Cloudbase-Init" ) raise exceptions.ArgusError('cloudbase-init installation dir not found')
def relpath_win(self, path, start="."): """Return a relative version of a path""" sep="\\" if not path: raise ValueError("no path specified") start_list = ntpath.abspath(start).split(sep) path_list = ntpath.abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = ntpath.splitunc(path) unc_start, rest = ntpath.splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = ['..'] * (len(start_list)-i) + path_list[i:] if not rel_list: return "." return ntpath.join(*rel_list)
def Install(vm): """Installs the psping package on the VM.""" zip_path = ntpath.join(vm.temp_dir, PSPING_ZIP) vm.DownloadFile(PSPING_URL, zip_path) vm.UnzipFile(zip_path, vm.temp_dir) vm.AllowPort(TEST_PORT) vm.SetProcessPriorityToHighByFlag('psping.exe')
def download(self, uri, location): resource_path = ntpath.join(self._RESOURCE_DIRECTORY, self._DOWNLOAD_SCRIPT) cmd = r"{script_path} -Uri {uri} -OutFile '{outfile}'".format( script_path=resource_path, uri=uri, outfile=location) self._client.run_command_with_retry( cmd, command_type=util.POWERSHELL)
def specific_prepare(self): if not self.is_dir(self._RESOURCE_DIRECTORY): self.mkdir(self._RESOURCE_DIRECTORY) resource_path = self._get_resource_path(self._COMMON) self._client.copy_file( resource_path, ntpath.join(self._RESOURCE_DIRECTORY, self._COMMON)) LOG.info("Copy Download script for Windows NanoServer.") resource_path = self._get_resource_path(self._DOWNLOAD_SCRIPT) self._client.copy_file( resource_path, ntpath.join(self._RESOURCE_DIRECTORY, self._DOWNLOAD_SCRIPT)) super(WindowsNanoActionManager, self).specific_prepare()
def AcquireWritePermissionsWindows(cls, vm): """Prepare boto file on a remote Windows instance. If the boto file specifies a service key file, copy that service key file to the VM and modify the .boto file on the VM to point to the copied file. Args: vm: gce virtual machine object. """ boto_src = object_storage_service.FindBotoFile() boto_des = ntpath.join(vm.home_dir, posixpath.basename(boto_src)) stdout, _ = vm.RemoteCommand(f'Test-Path {boto_des}') if 'True' in stdout: return with open(boto_src) as f: boto_contents = f.read() match = re.search(r'gs_service_key_file\s*=\s*(.*)', boto_contents) if match: service_key_src = match.group(1) service_key_des = ntpath.join(vm.home_dir, posixpath.basename(service_key_src)) boto_src = cls._PrepareGcsServiceKey(vm, boto_src, service_key_src, service_key_des) vm.PushFile(boto_src, boto_des)
def Parse(self, response, knowledge_base): system_drive = artifact_utils.ExpandWindowsEnvironmentVariables( "%systemdrive%", knowledge_base) for message in json.loads(response.json_messages): if message[0] == "r": protection = message[1].get("protection", {}).get("enum", "") if "EXECUTE" not in protection: continue filename = message[1].get("filename", "") if filename and filename != "Pagefile-backed section": yield rdf_paths.PathSpec( path=ntpath.normpath(ntpath.join(system_drive, filename)), pathtype=rdf_paths.PathSpec.PathType.OS)
def OutputFilePath(in_path, out_path): ''' creates a path to the resulting file @param in_path: Input raster path (string) @param out_path Input path to the destination folder (string) @return full_path Output directory + filename path (string) ''' f_name = ntpath.basename(in_path) match = re.search(r'(.+)\.\w+', f_name) f_name = match.group(1) + '_unified.tif' delimiter = '/' out_path = out_path + delimiter full_path = ntpath.join(out_path, f_name) return full_path
def assembleOutputPath(out_param, input_path): if out_param: # Make sure that output directory exists if output param exists abs_out_path = ntpath.abspath(out_param) output_dir, output_filename = ntpath.split(abs_out_path) if not ntpath.exists(output_dir): os.mkdir(output_dir) return abs_out_path else: # Default output is same directory and name as input .bin file output_dir, input_filename = ntpath.split(input_path) output_filename = input_filename[:-4] + '.hex' return ntpath.join(output_dir, output_filename)
def ls(self, path, display=True): self.check_share() if not path: pwd = ntpath.join(self.pwd, '*') else: pwd = ntpath.join(self.pwd, path) self.completion = [] pwd = ntpath.normpath(pwd) try: files = self.smb.listPath(self.share, pwd) except SessionError, e: if not display: pass elif e.getErrorCode() in (nt_errors.STATUS_OBJECT_NAME_NOT_FOUND, nt_errors.STATUS_NO_SUCH_FILE): logger.warn('File not found') else: logger.warn('Unable to list files: %s' % (e.getErrorString(), )) return
def FromDict(cls, arguments): benchmark = arguments.get('benchmark') if not benchmark: raise TypeError('Missing "benchmark" argument.') if _IsWindows(arguments): results_filename = ntpath.join(benchmark, 'perf_results.json') else: results_filename = posixpath.join(benchmark, 'perf_results.json') chart = arguments.get('chart') tir_label = arguments.get('tir_label') trace = arguments.get('trace') statistic = arguments.get('statistic') return cls(results_filename, chart, tir_label, trace, statistic)
def _construct_revision(self, branch_path, version_number): """Construct a revisioned path from a branch path and version ID. Args: branch_path (unicode): The path of a branch. version_number (unicode): The version number of the revision. Returns: unicode: The combined revision. """ return cpath.join(branch_path, version_number)
def Install(vm): """Installs the nuttcp package on the VM.""" zip_path = ntpath.join(vm.temp_dir, NUTTCP_ZIP) @vm_util.Retry() def DownloadWithRetry(): vm.DownloadFile(NUTTCP_URL, zip_path) try: DownloadWithRetry() except errors.VirtualMachine.RemoteCommandError as e: # The mirror to download nuttcp from is temporarily unavailable. raise errors.Benchmarks.KnownIntermittentError( 'Failed to download nuttcp package: %s' % e) vm.UnzipFile(zip_path, vm.temp_dir)
def spider(self, share, root, maxdepth): if maxdepth < 0: return [] try: files = self.ls(share, root) except Exception as e: if self.verbose: print ("Error in ls("+share+","+root+","+str(maxdepth)+") : " + str(e)) return [] for f in files: new_root = ntpath.join(root, f.get_longname()) new_root = ntpath.normpath(new_root) self.outwriter.write(self.host, self.nbtname, share, f, new_root) if f.is_directory(): self.spider(share, root + f.get_longname() + '\\', maxdepth - 1)
def pre_sysprep(self): # Patch the installation of Cloudbase-Init in order to create # a file when the execution ends. We're doing this instead of # monitoring the service, because on some OSes, just checking # if the service is stopped leads to errors, due to the # fact that the service starts later on. python_dir = introspection.get_python_dir(self._execute) cbinit = ntpath.join(python_dir, 'Lib', 'site-packages', 'cloudbaseinit') # Get the shell patching script and patch the installation. resource_location = "windows/patch_shell.ps1" params = r' "{}"'.format(cbinit) self._backend.remote_client.manager.execute_powershell_resource_script( resource_location=resource_location, parameters=params)
def test_realpath_symlink_loops(self): # Bug #930024, return the path unchanged if we get into an infinite # symlink loop. ABSTFN = ntpath.abspath(support.TESTFN) self.addCleanup(support.unlink, ABSTFN) self.addCleanup(support.unlink, ABSTFN + "1") self.addCleanup(support.unlink, ABSTFN + "2") self.addCleanup(support.unlink, ABSTFN + "y") self.addCleanup(support.unlink, ABSTFN + "c") self.addCleanup(support.unlink, ABSTFN + "a") P = "\\\\?\\" os.symlink(ABSTFN, ABSTFN) self.assertEqual(ntpath.realpath(ABSTFN), P + ABSTFN) # cycles are non-deterministic as to which path is returned, but # it will always be the fully resolved path of one member of the cycle os.symlink(ABSTFN + "1", ABSTFN + "2") os.symlink(ABSTFN + "2", ABSTFN + "1") expected = (P + ABSTFN + "1", P + ABSTFN + "2") self.assertIn(ntpath.realpath(ABSTFN + "1"), expected) self.assertIn(ntpath.realpath(ABSTFN + "2"), expected) self.assertIn(ntpath.realpath(ABSTFN + "1\\x"), (ntpath.join(r, "x") for r in expected)) self.assertEqual(ntpath.realpath(ABSTFN + "1\\.."), ntpath.dirname(ABSTFN)) self.assertEqual(ntpath.realpath(ABSTFN + "1\\..\\x"), ntpath.dirname(ABSTFN) + "\\x") os.symlink(ABSTFN + "x", ABSTFN + "y") self.assertEqual( ntpath.realpath(ABSTFN + "1\\..\\" + ntpath.basename(ABSTFN) + "y"), ABSTFN + "x") self.assertIn( ntpath.realpath(ABSTFN + "1\\..\\" + ntpath.basename(ABSTFN) + "1"), expected) os.symlink(ntpath.basename(ABSTFN) + "a\\b", ABSTFN + "a") self.assertEqual(ntpath.realpath(ABSTFN + "a"), P + ABSTFN + "a") os.symlink( "..\\" + ntpath.basename(ntpath.dirname(ABSTFN)) + "\\" + ntpath.basename(ABSTFN) + "c", ABSTFN + "c") self.assertEqual(ntpath.realpath(ABSTFN + "c"), P + ABSTFN + "c") # Test using relative path as well. self.assertEqual(ntpath.realpath(ntpath.basename(ABSTFN)), P + ABSTFN)
def run(self): self.init_dlls = ('ntdll.dll', 'kernel32.dll', 'user32.dll') self.sys_dlls = ('ntdll.dll', 'kernel32.dll', 'ucrtbase.dll') if self.ql.code: pe = None self.is_driver = False else: pe = pefile.PE(self.path, fast_load=True) self.is_driver = pe.is_driver() ossection = f'OS{self.ql.arch.bits}' self.stack_address = self.ql.os.profile.getint(ossection, 'stack_address') self.stack_size = self.ql.os.profile.getint(ossection, 'stack_size') self.image_address = self.ql.os.profile.getint(ossection, 'image_address') self.dll_address = self.ql.os.profile.getint(ossection, 'dll_address') self.entry_point = self.ql.os.profile.getint(ossection, 'entry_point') self.structure_last_addr = { 32: FS_SEGMENT_ADDR, 64: GS_SEGMENT_ADDR }[self.ql.arch.bits] self.import_symbols = {} self.export_symbols = {} self.import_address_table = {} self.ldr_list = [] self.pe_image_address = 0 self.pe_image_size = 0 self.dll_size = 0 self.dll_last_address = self.dll_address # not used, but here to remain compatible with ql.do_bin_patch self.load_address = 0 cmdline = ntpath.join(self.ql.os.userprofile, 'Desktop', self.ql.targetname) cmdargs = ' '.join(f'"{arg}"' if ' ' in arg else arg for arg in self.argv[1:]) self.filepath = bytes(f'{cmdline}\x00', "utf-8") self.cmdline = bytes(f'{cmdline} {cmdargs}\x00', "utf-8") self.load(pe)
def _export_textures_and_materials(blender_objects, saved_mod): textures = get_textures_from_blender_objects(blender_objects) blender_materials = get_materials_from_blender_objects(blender_objects) textures_array = ((ctypes.c_char * 64) * len(textures))() materials_data_array = (MaterialData * len(blender_materials))() materials_mapping = {} # blender_material.name: material_id texture_dirs = get_texture_dirs(saved_mod) default_texture_dir = get_default_texture_dir(saved_mod) for i, texture in enumerate(textures): texture_dir = texture_dirs.get(texture.name) if not texture_dir: texture_dir = default_texture_dir texture_dirs[texture.name] = texture_dir # TODO: no default texture_dir means the original mod had no textures file_name = os.path.basename(bpy.path.abspath(texture.image.filepath)) file_path = ntpath.join(texture_dir, file_name) try: file_path = file_path.encode('ascii') except UnicodeEncodeError: raise RuntimeError( 'Texture path {} is not in ascii'.format(file_path)) if len(file_path) > 64: # TODO: what if relative path are used? raise RuntimeError( 'File path to texture {} is longer than 64 characters'.format( file_path)) file_path, _ = ntpath.splitext(file_path) textures_array[i] = (ctypes.c_char * 64)(*file_path) for mat_index, mat in enumerate(blender_materials): material_data = MaterialData() for texture_slot in mat.texture_slots: if not texture_slot or not texture_slot.texture: continue texture = texture_slot.texture # texture_indices expects index-1 based texture_index = textures.index(texture) + 1 texture_code = blender_texture_to_texture_code(texture_slot) material_data.texture_indices[texture_code] = texture_index materials_data_array[mat_index] = material_data materials_mapping[mat.name] = mat_index return ExportedMaterials(textures_array, materials_data_array, materials_mapping, textures, texture_dirs)
def do_get(self, src_path): try: import ntpath newPath = ntpath.normpath(ntpath.join(self.__pwd, src_path)) drive, tail = ntpath.splitdrive(newPath) filename = ntpath.basename(tail) fh = open(filename,'wb') self.__transferClient.getFile(drive[:-1]+'$', tail, fh.write) fh.close() except Exception as e: logging.error(str(e)) if os.path.exists(filename): os.remove(filename)
def __setup_components(self): reghive = self.path.transform_to_real_path( ntpath.join(self.windir, 'registry')) self.handle_manager = handle.HandleManager() self.registry_manager = registry.RegistryManager(self.ql, reghive) self.clipboard = clipboard.Clipboard(self) self.fiber_manager = fiber.FiberManager(self.ql) main_thread = thread.QlWindowsThread(self.ql) self.thread_manager = thread.QlWindowsThreadManagement( self.ql, self, main_thread) # more handle manager new_handle = handle.Handle(obj=main_thread) self.handle_manager.append(new_handle)
def _get_libs(self, project_data): project_data['lib_paths'] = [] project_data['libraries'] = [] for lib in project_data['source_files_lib']: head, tail = ntpath.split(lib) file = tail if (os.path.splitext(file)[1] != ".lib"): self.logging.debug( "Found %s lib with non-valid extension (!=.lib)" % file) continue else: lib_path = ntpath.join(self.workspace['output_dir']['path'], head) lib_path = ntpath.abspath(lib_path) project_data['lib_paths'].append(lib_path) project_data['libraries'].append(file)
def __scmr_create(self, srvname, remote_file, displayname=None): ''' Create the service ''' logger.info('Creating the service %s' % srvname) if not displayname: displayname = srvname self.__pathname = ntpath.join(DataStore.share_path, remote_file) scmr.hRCreateServiceW(self.__rpc, self.__mgr_handle, '%s\x00' % srvname, '%s\x00' % displayname, lpBinaryPathName='%s\x00' % self.__pathname, dwStartType=scmr.SERVICE_DEMAND_START)
def do_get(self, src_path): try: import ntpath newPath = ntpath.normpath(ntpath.join(self.__pwd, src_path)) drive, tail = ntpath.splitdrive(newPath) filename = ntpath.basename(tail) local_file_path = os.path.join(self.__secrets_dir.name, "monkey-" + filename) fh = open(local_file_path, "wb") LOG.info("Downloading %s\\%s" % (drive, tail)) self.__transferClient.getFile(drive[:-1] + "$", tail, fh.write) fh.close() except Exception as e: LOG.error(str(e)) if os.path.exists(local_file_path): os.remove(local_file_path)
def svcexec(self, command, mode='SHARE', display=True): if mode == 'SERVER' and not is_local_admin(): err = ( "keimpx needs to be run as Administrator/root to use svcshell. " "Privileged port is needed to run SMB server.") raise missingPermission(err) command_and_args = shlex.split(command) if os.path.exists(command_and_args[0]): self.use(DataStore.writable_share) self.upload(command_and_args[0]) self.__scmr_connect() try: if mode == 'SERVER': self.__serverThread = SMBServer(self.smbserver_share) self.__serverThread.daemon = True self.__serverThread.start() if os.path.exists(command_and_args[0]): command = ntpath.join(DataStore.share_path, os.path.basename(command)) self.svc_shell = SvcShell(self.__rpc, self.__mgr_handle, self.trans, self.smbserver_share, mode, display) self.svc_shell.onecmd(command) if mode == 'SERVER': self.__serverThread.stop() except SessionError as e: # traceback.print_exc() logger.error('SMB error: %s' % (e.getErrorString(), )) except KeyboardInterrupt as _: print() logger.info('User aborted') except Exception as e: # traceback.print_exc() logger.error(str(e)) sys.stdout.flush() self.__scmr_disconnect() if os.path.exists(command_and_args[0]): self.rm(os.path.basename(command_and_args[0]))
def CreateGraph(options): if not isinstance(options, TaskOptions): raise ValueError( 'options must be an instance of read_value.TaskOptions') subgraph = run_test.CreateGraph(options.test_options) path = None if read_value_quest.IsWindows( {'dimensions': options.test_options.dimensions}): path = ntpath.join(options.benchmark, 'perf_results.json') else: path = posixpath.join(options.benchmark, 'perf_results.json') # We create a 1:1 mapping between a read_value task and a run_test task. def GenerateVertexAndDep(attempts): for attempt in range(attempts): change_id = find_isolate.ChangeId( options.test_options.build_options.change) read_value_id = 'read_value_%s_%s' % (change_id, attempt) run_test_id = run_test.TaskId(change_id, attempt) yield (task_module.TaskVertex( id=read_value_id, vertex_type='read_value', payload={ 'benchmark': options.benchmark, 'mode': options.mode, 'results_filename': path, 'histogram_options': { 'grouping_label': options.histogram_options.grouping_label, 'story': options.histogram_options.story, 'statistic': options.histogram_options.statistic, }, 'graph_json_options': { 'chart': options.graph_json_options.chart, 'trace': options.graph_json_options.trace }, 'change': options.test_options.build_options.change.AsDict(), 'index': attempt, }), task_module.Dependency(from_=read_value_id, to=run_test_id)) for vertex, edge in GenerateVertexAndDep(options.test_options.attempts): subgraph.vertices.append(vertex) subgraph.edges.append(edge) return subgraph
def put_file( state, host, filename_or_io, remote_filename, print_output=False, print_input=False, remote_temp_filename=None, # ignored **command_kwargs, ): """ Upload file by chunking and sending base64 encoded via winrm """ # TODO: fix this? Workaround for circular import from pyinfra.facts.windows_files import WindowsTempDir # Always use temp file here in case of failure temp_file = ntpath.join( host.get_fact(WindowsTempDir), "pyinfra-{0}".format(sha1_hash(remote_filename)), ) if not _put_file(state, host, filename_or_io, temp_file): return False # Execute run_shell_command w/sudo and/or su_user command = "Move-Item -Path {0} -Destination {1} -Force".format( temp_file, remote_filename) status, _, stderr = run_shell_command(state, host, command, print_output=print_output, print_input=print_input, **command_kwargs) if status is False: logger.error("File upload error: {0}".format("\n".join(stderr))) return False if print_output: click.echo( "{0}file uploaded: {1}".format(host.print_prefix, remote_filename), err=True, ) return True
def prepare_shortcuts(self): """Prepare shortcut files in the build directory. If entry_point is specified, write the script. If script is specified, copy to the build directory. Prepare target and parameters for these shortcuts. Also copies shortcut icons """ files = set() for scname, sc in self.shortcuts.items(): if not sc.get('target'): if sc.get('entry_point'): sc['script'] = script = scname.replace(' ', '_') + '.launch.py' \ + ('' if sc['console'] else 'w') specified_preamble = sc.get('extra_preamble', None) if isinstance(specified_preamble, text_types): # Filename extra_preamble = io.open(specified_preamble, encoding='utf-8') elif specified_preamble is None: extra_preamble = io.StringIO() # Empty else: # Passed a StringIO or similar object extra_preamble = specified_preamble self.write_script(sc['entry_point'], pjoin(self.build_dir, script), extra_preamble.read().rstrip()) else: shutil.copy2(sc['script'], self.build_dir) if self.py_format == 'bundled': target = '$INSTDIR\Python\python{}.exe' else: target = 'py{}' sc['target'] = target.format('' if sc['console'] else 'w') sc['parameters'] = '"%s"' % ntpath.join( '$INSTDIR', sc['script']) files.add(os.path.basename(sc['script'])) shutil.copy2(sc['icon'], self.build_dir) sc['icon'] = os.path.basename(sc['icon']) files.add(sc['icon']) self.install_files.extend([(f, '$INSTDIR') for f in files])
def _get_useful_paths(self, installer): """Get useful paths for installing Cloudbase-Init. :param installer: Name of the Cloudbase-Init installer. :type installer: str :return: A tuple containing the `url` for the Cloudbase-Init installer and the `zip_path` where we should download the installer. :rtype: tuple """ installer = self._get_installer_name() url = urlparse.urljoin(self._CBINIT_URL, installer) zip_path = ntpath.join("C:\\", installer) return url, zip_path
def dumpCachedHashes(self): if self.__securityFile is None: # No SECURITY file provided return # Let's first see if there are cached entries values = self.enumValues('\\Cache') if values == None: # No cache entries return try: # Remove unnecesary value values.remove('NL$Control') except: pass self.__getLSASecretKey() self.__getNLKMSecret() for value in values: # logging.debug('Looking into %s' % value) record = NL_RECORD(self.getValue(ntpath.join('\\Cache', value))[1]) if record['CH'] != 16 * '\x00': if self.__vistaStyle is True: plainText = self.__decryptAES(self.__NKLMKey[16:32], record['EncryptedData'], record['CH']) else: plainText = self.__decryptHash(self.__NKLMKey, record['EncryptedData'], record['CH']) pass encHash = plainText[:0x10] plainText = plainText[0x48:] userName = plainText[:record['UserLength']].decode('utf-16le') plainText = plainText[self.__pad(record['UserLength']):] domain = plainText[:record['DomainNameLength']].decode( 'utf-16le') plainText = plainText[self.__pad(record['DomainNameLength']):] domainLong = plainText[:self.__pad(record['FullDomainLength'] )].decode('utf-16le') answer = "%s:%s:%s:%s:::" % (userName, encHash.encode('hex'), domainLong, domain) self.__cachedItems.append(answer) return __cachedItems
def publish(self): sourceFiles = [ f for f in listdir(self.sourceDir) if isfile(join(self.sourceDir, f)) ] for source in sourceFiles: copyfile(join(self.sourceDir, source), join(self.intermediateDir, source)) config = ConfigParser.ConfigParser() config.readfp(open('training.ini')) type = config.get("webPublish", "type") openAfter = (config.get("webPublish", "open_after") == "yes") destination = config.get("webPublish", "destination") if type == "FTP": session = ftplib.FTP("ftp.%s" % destination) session.login(self.domain, self.password) elif type == "AWS": s3 = boto.connect_s3(is_secure=False) bucket = s3.get_bucket(destination, validate=False) for source in sourceFiles: file = open(join(self.intermediateDir, source), 'rb') if type == "FTP": session.storbinary( join("STOR wwwroot\\", ntpath.join(self.targetDir, source)), file) elif type == "AWS": k = Key(bucket) k.key = posixpath.join(self.targetDir, source) k.set_contents_from_file(file) if openAfter and ".html" in source: if self.targetDir == "": webbrowser.open("http://www.%s/%s" % (self.domain, source)) else: webbrowser.open("http://www.%s/%s/%s" % (self.domain, self.targetDir, source)) file.close() if type == "FTP": session.quit()
def prepare_shortcuts(self): """Prepare shortcut files in the build directory. If entry_point is specified, write the script. If script is specified, copy to the build directory. Prepare target and parameters for these shortcuts. Also copies shortcut icons. """ files = set() for scname, sc in self.shortcuts.items(): if not sc.get("target"): if sc.get("entry_point"): sc["script"] = script = (scname.replace(" ", "_") + ".launch.py" + ("" if sc["console"] else "w")) specified_preamble = sc.get("extra_preamble", None) if isinstance(specified_preamble, str): # Filename extra_preamble = io.open(specified_preamble, encoding="utf-8") elif specified_preamble is None: extra_preamble = io.StringIO() # Empty else: # Passed a StringIO or similar object extra_preamble = specified_preamble self.write_script( sc["entry_point"], pjoin(self.build_dir, script), extra_preamble.read().rstrip(), ) else: shutil.copy2(sc["script"], self.build_dir) target = "$INSTDIR\Python\python{}.exe" sc["target"] = target.format("" if sc["console"] else "w") sc["parameters"] = '"-Es" "%s"' % ntpath.join( "$INSTDIR", sc["script"]) files.add(os.path.basename(sc["script"])) shutil.copy2(sc["icon"], self.build_dir) sc["icon"] = os.path.basename(sc["icon"]) files.add(sc["icon"]) self.install_files.extend([(f, "$INSTDIR") for f in files])
def __init__(self): self.first_run = False self._load_registry() self.settings = self._load_settings() if self.system_os == "Windows": self.skins_dir = ntpath.join(self.steam_dir, "skins") else: self.skins_dir = os.path.join(self.steam_linux_dir, "skins") self.steam_skins = self.get_steam_skins() self.default_avatar = os.path.join(self.changer_path, "avatars/avatar.png") self.parser = argparse.ArgumentParser( prog="main.pyw", usage="%(prog)s [options]", description="Program to quickly switch between steam accounts.") self.args = self.arg_setup() self.parse(self.args)
def writeProject(self): self.writeHeader() f = self.project_file self.project_file.write(' <ItemGroup>\r\n') for item in self.items: path = winpath(os.path.relpath(item.path(), self.project_dir)) props = '' tag = item.tag() if item.is_excluded(): props = ' <ExcludedFromBuild>True</ExcludedFromBuild>\r\n' elif item.builder() == 'Object': props = '' for config, output in xsorted(item.node.items()): name = config.name env = output.get_build_env() variant = config.variant platform = config.platform props += self.makeListTag( self.extraRelPaths(xsorted(env['CPPPATH']), config.env['CPPPATH']), ' ', 'AdditionalIncludeDirectories', ''' Condition="'$(Configuration)|$(Platform)'=='%(variant)s|%(platform)s'"''' % locals(), True) elif item.builder() == 'Protoc': for config, output in xsorted(item.node.items()): name = config.name out_dir = os.path.relpath(os.path.dirname(str(output)), self.project_dir) cpp_out = winpath(out_dir) out_parts = out_dir.split(os.sep) out_parts.append( os.path.splitext(os.path.basename(item.path()))[0]) base_out = ntpath.join(*out_parts) props += V12CustomBuildProtoc % locals() f.write(' <%(tag)s Include="%(path)s">\r\n' % locals()) f.write(props) f.write(' </%(tag)s>\r\n' % locals()) f.write(' </ItemGroup>\r\n') f.write( ' <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />\r\n' ' <ImportGroup Label="ExtensionTargets">\r\n' ' </ImportGroup>\r\n' '</Project>\r\n')
def unextend_path(self, extended_path): """Remove ClearCase revision and branch informations from path. ClearCase paths contain additional informations about branch and file version preceded by @@. This function remove this parts from ClearCase path to make it more readable For example this function convert extended path:: /vobs/comm@@/main/122/network@@/main/55/sntp @@/main/4/src@@/main/1/sntp.c@@/main/8 to the the to regular path:: /vobs/comm/network/sntp/src/sntp.c """ if '@@' not in extended_path: return HEAD, extended_path # Result of regular expression search result is list of tuples. We must # flat this to one list. The best way is use list comprehension. b is # first because it frequently occure in tuples. Before that remove @@ # from path. unextended_chunks = [ b or a for a, b, foo in self.UNEXTENDED.findall( extended_path.replace('@@', '')) ] if sys.platform.startswith('win'): # Properly handle full (with drive letter) and UNC paths if unextended_chunks[0].endswith(':'): unextended_chunks[0] = '%s\\' % unextended_chunks[0] elif unextended_chunks[0] == '/' or unextended_chunks[0] == os.sep: unextended_chunks[0] = '\\\\' # Purpose of realpath is remove parts like /./ generated by # ClearCase when vobs branch was fresh created unextended_path = cpath.realpath( cpath.join(*unextended_chunks) ) revision = extended_path.rsplit('@@', 1)[1] if revision.endswith('CHECKEDOUT'): revision = HEAD return (revision, unextended_path)
def do_list_snapshots(self, line): l = line.split(' ') if len(l) > 0: pathName= l[0].replace('/','\\') # Relative or absolute path? if pathName.startswith('\\') is not True: pathName = ntpath.join(self.pwd, pathName) snapshotList = self.smb.listSnapshots(self.tid, pathName) if not snapshotList: print("No snapshots found") return for timestamp in snapshotList: print(timestamp)