def deploy(self, host, source, destination, _logger, use_rsync=False, dry_run=False): """Deploy target directory on the active remote host Upload the target destination and then submit all *.pbs files. Args: host: a host object source: a string representing the directory (contains .pbs files) to upload destination: a string representing the path on remote host _logger: the logging logger use_rsync: if `True`, use rsync instead of scp dry_run: if `True`, dry run the code Returns: None """ if destination is None: destination = '/tmp' if dry_run: print("Running deploy", source, "to", destination, "on", tuple(host.active_host[1:])) sys.exit(0) if not isdir(source): print("Error: directory %s does not exist" % source) sys.exit(1) source = [source] host.upload(source, destination, _logger, use_rsync=use_rsync) self.sub(host, [destination + '/*.pbs'], True, destination, _logger) return
def run(self, inpath, outpath, n=sys.maxsize): if exists(outpath): exit("Warning: output already exists") elif isdir(inpath): self.classify_directory(inpath, outpath) elif isfile(inpath): self.classify_file(inpath, outpath)
def gen_pbs_example(self, outdir, _logger, dry_run=False): """Generate example files for pbsgen command to specified directory Args: outdir: a string representing the output directory _logger: the logging logger dry_run: if `True`, dry run the code Returns: None """ if not isdir(outdir): print("Directory %s does not exist, creating it" % outdir) os.makedirs(outdir) pbs_template = os.path.join(outdir, os.path.basename(self.pbs_template)) samplefile = os.path.join(outdir, os.path.basename(self.samplefile)) mapfile = os.path.join(outdir, os.path.basename(self.mapfile)) print("=====================") print("Output path : " + outdir) print("PBS Template: " + pbs_template) print("Sample file : " + samplefile) print("Mapping file: " + mapfile) print("=====================") if dry_run: sys.exit(0) copyfile(self.pbs_template, pbs_template) copyfile(self.samplefile, samplefile) copyfile(self.mapfile, mapfile) print("Done.") return
def download(self, source, destination, _logger, use_rsync=False, dry_run=False): """Download files to local machine from active remote host. Currently, it is dependent on scp command. Args: source: list of files (directories) in remote host destination: destination directory in local machine _logger: the logging logger use_rsync: if `True`, use rsync instead of scp dry_run: if `True`, dry run the code Returns: None """ username, host, port = self.active_host[1:] if dry_run: print("Running download", ' '.join(source), "to", destination, "from", tuple(self.active_host[1:])) sys.exit(0) if not isdir(os.path.expanduser(destination)): os.makedirs(os.path.expanduser(destination)) # Make sure scp/rsync recognize destination as directory # Path must end with '/' if list(destination)[-1] != '/': destination = destination + '/' print("=> Starting downloading...", end="\n\n") now = datetime.now() if use_rsync: if sys.platform == 'win32': print("--rsync is disabled in Windows, please don't use it.") sys.exit(0) cmds = "rsync -azP -e 'ssh -p {port}' {username}@{host}:'{source}' {destination}".format( port=port, source=' '.join(source), username=username, host=host, destination=os.path.expanduser(destination)) else: cmds = "scp -pr -P {port} {username}@{host}:'{source}' {destination}".format( port=port, source=' '.join(source), username=username, host=host, destination=os.path.expanduser(destination)) _logger.info("Running " + cmds) run_res = run(cmds, shell=True) _logger.info("Status code: " + str(run_res.returncode)) if run_res.returncode != 0: print("Error: an error occurred, please check the info!") sys.exit(run_res.returncode) taken = datetime.now() - now print("\n=> Finished downloading in %ss" % taken.seconds) return
def add_local(): log("add_local:") dialog = xbmcgui.Dialog() nzb_file = dialog.browse(0, 'Pick a folder', 'video') # XBMC outputs utf-8 path = unicode(nzb_file, 'utf-8') log("add_local: path: %s" % path) if not utils.isdir(path): return None else: folder_list = __settings__.getSetting("nzb_folder_list").split(';') folder_list.append(nzb_file) new_folder_list = ';'.join(folder_list) log("add_local: new_folder_list: %s" % new_folder_list) __settings__.setSetting("nzb_folder_list", new_folder_list) xbmc.executebuiltin("Container.Refresh")
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and install PyQt5. Only download if we don't have a cached copy # available. install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe') if not isfile(install_PyQt5): wget( 'http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/' 'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe', install_PyQt5) # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001. xqt( 'REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve ' '/t REG_SZ /d C:\\Python34', install_PyQt5 + ' /S') # Download and compile PCRE. pcre_ver = 'pcre-8.38' pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget( 'http://downloads.sourceforge.net/project/pcre/pcre/8.38/' + pcre_zip, pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') xqt( 'cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"', 'cmake --build . --config Release') # Install, which also builds Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py install --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and install PyQt5. Only download if we don't have a cached copy # available. install_PyQt5 = os.path.join(DOWNLOADS, 'install-PyQt5.exe') if not isfile(install_PyQt5): wget('http://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.5.1/' 'PyQt5-5.5.1-gpl-Py3.4-Qt5.5.1-x32.exe', install_PyQt5) # See https://github.com/appveyor/ci/issues/363#issuecomment-148915001. xqt('REG ADD HKCU\\Software\\Python\\PythonCore\\3.4\\InstallPath /f /ve ' '/t REG_SZ /d C:\\Python34', install_PyQt5 + ' /S') # Download and compile PCRE. pcre_ver = 'pcre-8.38' pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget('http://downloads.sourceforge.net/project/pcre/pcre/8.38/' + pcre_zip, pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 10 2010"', 'cmake --build . --config Release') # Install, which also builds Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py install --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver))
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and compile PCRE. pcre_raw_ver = '8.42' pcre_ver = 'pcre-' + pcre_raw_ver pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget( 'http://downloads.sourceforge.net/project/pcre/pcre/{}/{}'.format( pcre_raw_ver, pcre_zip), pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') # Per https://cmake.org/cmake/help/latest/generator/Visual%20Studio%2014%202015.html, # add the Win64 string for 64-bit Python. use_Win64 = ' Win64' if is_64bits else '' xqt( 'cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 14 2015{}"'.format( use_Win64), 'cmake --build . --config Release') # First, build Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py build_ext --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver)) # Next, install it along with its dependencies. See comments at # ``install_requires`` on why this is necessary. xqt('python -m pip install -e .')
def install(should_identify=True): if should_identify: system_identify() # Create a place to store downloads. if not isdir(DOWNLOADS): mkdir(DOWNLOADS) # Download and compile PCRE. pcre_raw_ver = '8.39' pcre_ver = 'pcre-' + pcre_raw_ver pcre_zip = pcre_ver + '.zip' pcre_zip_path = os.path.join(DOWNLOADS, pcre_zip) if not isfile(pcre_zip_path): # Note: Don't use ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/, # because this sometimes hangs during download, causing the build to # fail. Instead, use the more reliable SourceForge mirror. wget('http://downloads.sourceforge.net/project/pcre/pcre/{}/{}'. format(pcre_raw_ver, pcre_zip), pcre_zip_path) # See https://sevenzip.osdn.jp/chm/cmdline/commands/extract_full.htm. xqt('7z x {} > nul'.format(pcre_zip_path)) with pushd(pcre_ver): mkdir('build') chdir('build') # Per https://cmake.org/cmake/help/latest/generator/Visual%20Studio%2014%202015.html, # add the Win64 string for 64-bit Python. use_Win64 = ' Win64' if is_64bits else '' xqt('cmake .. -DBUILD_SHARED_LIBS:BOOL=OFF -DPCRE_SUPPORT_UTF:BOOL=ON ' '-DPCRE_SUPPORT_JIT:BOOL=ON -G "Visual Studio 14 2015{}"'. format(use_Win64), 'cmake --build . --config Release') # First, build Python C extensions. Use this instead of # ``build_ext`` so that Enki will have an already-installed qutepart, # rather than needing to regenrate the command below. xqt('python setup.py build_ext --include-dir={}/build ' '--lib-dir={}/build/Release --force'.format(pcre_ver, pcre_ver)) # Next, install it along with its dependencies. See comments at # ``install_requires`` on why this is necessary. xqt('python -m pip install -e .')
def sub(self, host, tasks, remote, workdir, _logger, dry_run=False): """Submit pbs tasks Args: host: a host object tasks: a list of PBS files, glob pattern is supported remote: if `True`, means that PBS task files are located at the active remote host workdir: a directory representing the working directory _logger: the logging logger dry_run: if `True`, dry run the code Returns: A list of files """ print('NOTE: PBS file must be LF mode (Unix), not CRLF mode (Windows)') print('====================================================') filelist = [] if remote: tasks = ' '.join(tasks) host.connect() _logger.info('ls -p ' + tasks) host.channel.execute('ls -p ' + tasks) filelist = host.get_result(print_info=False).split('\n') if '' in filelist: filelist.remove('') fl_bk = filelist.copy() for f in fl_bk: if len(f) > 1 and (f[-1] == '/' or f[-1] == ':'): filelist.remove(f) if f == '' or f == ' ': filelist.remove(f) _logger.info(filelist) if workdir is None: workdir = '/tmp' cmds = 'cd {}; for i in {}; do qsub $i; done'.format( workdir, ' '.join(filelist)) if dry_run: print(cmds) sys.exit(0) _logger.info(cmds) host.cmd(cmds, _logger=_logger) else: if workdir is None: workdir = os.getcwd() for fp in tasks: fs = glob.glob(fp) for f in fs: if isdir(f): print( "Warning: directory %s is detected, note anything in it will be ignored to execute." % f) elif isfile(f): filelist.append(f) cmds = 'cd ' + workdir + ';qsub ' + f if dry_run: print(cmds) else: _logger.info(cmds) run(cmds, shell=True) else: print('Error: file %s does not exist.' % f) sys.exit(1) return filelist
def gen_pbs(self, template, samplefile, mapfile, outdir, _logger, pbs_mode=True, dry_run=False): """Generate a batch of (script) files (PBS tasks) based on template and mapping file Args: template: a string representing the path to the template file samplefile: a string representing the path to the sample file mapfile: a string representing the path to the mapping file outdir: a string representing the path to output directory _logger: the logging logger pbs_mode: if `True`, use PBS mode dry_run: if `True`, dry run the code Returns: None """ if not isdir(outdir): print("Directory %s does not exist, creating it" % outdir) os.makedirs(outdir) if not isfile(template): print("Error: file %s does not exist" % template) if not isfile(samplefile): print("Error: file %s does not exist" % samplefile) if not isfile(mapfile): print("Error: file %s does not exist" % mapfile) print("=====================") print("Output path : " + outdir) if pbs_mode: print("PBS Template: " + template) else: print("Template: " + template) print("Sample file : " + samplefile) print("Mapping file: " + mapfile) print("=====================") if dry_run: sys.exit(0) print("=> Reading %s ..." % samplefile) sample_data = read_csv(samplefile) print("=> Reading %s ..." % mapfile) map_data = read_csv(mapfile) # Check if input files are valid check_list = [i[0] for i in sample_data] check_list = set(check_list) if len(sample_data) != len(check_list): print("Error: the first column is not unique!") sys.exit(1) for row in map_data: if len(row) != 2: print("Error: only two columns are quired in mapfile!") try: _ = int(row[1]) except Exception: print( "Error: the second column must be (or can be transformed to) an integer!" ) sys.exit(1) print("=> Reading %s ..." % template) with open(template, 'r') as f: temp_data = f.read() print("Generating...") for row in sample_data: if pbs_mode: pbsfile = os.path.join(outdir, row[0] + '.pbs') else: pbsfile = os.path.join(outdir, row[0]) _logger.info("Generating %s" % pbsfile) content = temp_data for i in map_data: try: _logger.info("Replacing %s with %s" % (i[0], row[int(i[1])])) content = content.replace(i[0], row[int(i[1])]) except Exception: print( "Error: the second column out of range for label %s!" % i[0]) with io.open(pbsfile, 'w', encoding='utf-8', newline='\n') as f: f.write(content) print("Done.") return
def cmd(self, commands, _logger=None, run_file=False, data_dir=None, remote_file=False, dir='/tmp', prog=None, dry_run=False): """Run command(s) in active remote host using channel session Therefore, `open_channel` in `connect` method must be `True` before using it. Args: commands: commands/scripts run on active remote host _logger: the logging logger run_file: if `True`, run scripts instead of commands data_dir: a path representing data directory remote_file: if `True`, collect input from remote host instead of local machine dir: Remote directory for storing local scripts prog: a string representing the program to run the commands dry_run: if `True`, dry run the code Returns: A string containing result information """ if dry_run: print("Running", "files:" if run_file else "commands:", commands) sys.exit(0) if not run_file: self.connect() self.channel.execute(commands) else: # Run scripts _logger.info(commands) scripts = commands # commands are scripts here if remote_file: # Run remote scripts # Support some wildcards # *,?,{} wildcards = r'\*|\?|\{\}' matches = [ re.compile(wildcards).search(i) is not None for i in scripts ] if any(matches): commands_1 = list(map(lambda x: 'ls ' + x, scripts)) commands_1 = ';'.join(commands_1) self.connect() self.channel.execute(commands_1) scripts = self.get_result(print_info=False).split('\n') if '' in scripts: scripts.remove('') if prog is None: commands_1 = list(map(lambda x: 'chmod u+x ' + x, scripts)) commands_1 = ';'.join(commands_1) commands_2 = ';'.join(scripts) commands = commands_1 + ';' + commands_2 else: commands = list( map(lambda x: '{} '.format(prog) + x, scripts)) commands = ';'.join(commands) _logger.info(commands) self.connect() print("=> Getting results:") self.channel.execute(commands) else: # Run local scripts # # 1) upload self.upload(scripts, dir, _logger) if data_dir is not None: self.upload(data_dir, dir, _logger) # 2) get all file names if len(scripts) == 1: if isdir(scripts[0]): if list(scripts[0])[-1] == '/': dir = os.path.join( dir, os.path.basename(os.path.dirname(scripts[0]))) else: dir = os.path.join(dir, os.path.basename(scripts[0])) scripts = glob.glob(scripts[0] + '/*') filelist = [] for fp in scripts: _logger.info("fp:%s" % fp) fs = glob.glob(fp) _logger.info("fs:%s" % fs) for f in fs: _logger.info("f:%s" % f) if isdir(f): print( "Warning: directory %s is detected, note anything in it will be ignored to execute." % f) elif isfile(f): filelist.append(f) else: print('Error: file %s does not exist.' % f) sys.exit(1) filelist = list(map(os.path.basename, filelist)) _logger.info(filelist) # 3) run them one by one scripts = list(map(lambda x: '/'.join([dir, x]), filelist)) if prog is None: commands_1 = list(map(lambda x: 'chmod u+x ' + x, scripts)) commands_1 = ';'.join(commands_1) commands_2 = ';'.join(scripts) commands = commands_1 + ';' + commands_2 else: commands = list( map(lambda x: '{} '.format(prog) + x, scripts)) commands = ';'.join(commands) _logger.info(commands) self.connect() print("=> Getting results:") self.channel.execute(commands) datalist = self.get_result() # Return a string containing output return "".join(datalist)
def remote_copy(sftp, file): data = '' if not isdir(file, sftp): data = sftp.open(file).read() return data
print 'golem at work' sftp_src = remote.sftpclient(args.source) if origin_src == 'remote' else None sftp_dst = remote.sftpclient(args.destination) if origin_dst == 'remote' else None root_src = remote.info(args.source).root if origin_src == 'remote' else os.path.abspath(args.source) root_dst = remote.info(args.destination).root if origin_dst == 'remote' else os.path.abspath(args.destination) ignore = ignore(root_src + os.sep + 'config.json', sftp_src) if args.backup: print 'backuping folder ' + root_dst zip(root_dst, paths(root_dst, ignore, sftp_dst), args.backup, sftp_dst) for file_src in paths(root_src, ignore, sftp_src): file_dst = file_src.replace(root_src, root_dst) if utils.isdir(file_src, sftp_src): print 'creating folder ' + file_dst utils.mkdir(file_dst, sftp_dst) else: data_src = copy(file_src, sftp_src) if exists(file_dst): data_dst = copy(file_dst, sftp_dst) md5_data_src = md5(data_src).digest() md5_data_dst = md5(data_dst).digest() if md5_data_src == md5_data_dst: print 'unchanged ' + file_dst else: print 'modificing ' + file_dst paste(file_dst, data_src, sftp_dst) else: print 'creating ' + file_dst