def relpath_win(self, path, start="."): """Return a relative version of a path""" sep="\\" if not path: raise ValueError("no path specified") start_list = ntpath.abspath(start).split(sep) path_list = ntpath.abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = ntpath.splitunc(path) unc_start, rest = ntpath.splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = ['..'] * (len(start_list)-i) + path_list[i:] if not rel_list: return "." return ntpath.join(*rel_list)
def test_abspath(self): # ntpath.abspath() can only be used on a system with the "nt" module # (reasonably), so we protect this test with "import nt". This allows # the rest of the tests for the ntpath module to be run to completion # on any platform, since most of the module is intended to be usable # from any platform. # XXX this needs more tests try: import nt except ImportError: # check that the function is there even if we are not on Windows ntpath.abspath else: tester('ntpath.abspath("C:\\")', "C:\\") # Issue 3426: check that abspath retuns unicode when the arg is # unicode and str when it's str, with both ASCII and non-ASCII cwds saved_cwd = os.getcwd() for cwd in (u'cwd', u'\xe7w\xf0'): try: os.mkdir(cwd) os.chdir(cwd) for path in ('', 'foo', 'f\xf2\xf2', '/foo', 'C:\\'): self.assertTrue(isinstance(ntpath.abspath(path), str)) for upath in (u'', u'fuu', u'f\xf9\xf9', u'/fuu', u'U:\\'): self.assertTrue(isinstance(ntpath.abspath(upath), unicode)) finally: os.chdir(saved_cwd) os.rmdir(cwd)
def __init__(self, pathOfSouce): self.hasErrors = False canonicalFileName = ntpath.basename(pathOfSouce) withoutExtention = canonicalFileName[0:canonicalFileName.find(".")] lexDirectory = "target_" + withoutExtention + "\\lex\\" self.lexFileHandler = LexerFileHandler(pathOfSouce) self.stateManager = StateManager() self.stateManager.setState("INICIO") basepath = ntpath.abspath( pathOfSouce)[0:len(ntpath.abspath(pathOfSouce)) - len(canonicalFileName)] if platform.system() != 'Linux': try: os.makedirs(basepath + lexDirectory) except OSError: pass self.errFile = open(basepath + lexDirectory + "err.lex", "+w") self.outFile = open(basepath + lexDirectory + "out.lex", "+w") else: lexDirectory = "target_" + withoutExtention + "/lex/" try: os.makedirs(lexDirectory) except OSError: pass self.errFile = open(lexDirectory + "err.lex", "+w") self.outFile = open(lexDirectory + "out.lex", "+w")
def parse_args(): parser = argparse.ArgumentParser( description='Splitting Twitch VOD by a designated time interval.') parser.add_argument('-i', '--input', dest="input", required=True, help='Input video file path') parser.add_argument('-o', '--output', dest="output", default=None, help='Output video directory') parser.add_argument('-d', '--duration', dest="duration", default=1800, type=int, help='Duration of each video clip, in seconds') args = parser.parse_args() _input_file = args.input _output_path = args.output if _output_path is None: _output_path = compute_default_output_path(_input_file) _input_file = ntpath.abspath(_input_file) _output_path = ntpath.abspath(_output_path) return _input_file, _output_path, args.duration
def _compatrelpath_win(path, start=os.path.curdir): """Return a relative version of a path""" if not path: raise ValueError("no path specified") start_list = ntpath.abspath(start).split(ntpath.sep) path_list = ntpath.abspath(path).split(ntpath.sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = ntpath.splitunc(path) unc_start, rest = ntpath.splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [ntpath.pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return ntpath.curdir return ntpath.join(*rel_list)
def openSyntaxTree(self, pathOfSouce): canonicalFileName = ntpath.basename(pathOfSouce) withoutExtention = canonicalFileName[0:canonicalFileName.find(".")] synDirectory = "target_" + withoutExtention + "\\syn\\" basepath = ntpath.abspath( pathOfSouce)[0:len(ntpath.abspath(pathOfSouce)) - len(canonicalFileName)] if platform.system() != 'Linux': try: jsonFile = open(basepath + synDirectory + "out.json", "r", encoding='utf-8') return jsonFile except OSError: pass else: synDirectory = "target_" + withoutExtention + "/syn/" try: jsonFile = open(synDirectory + "out.json", "r", encoding='utf-8') return jsonFile except OSError: pass return None
def getFile(url, filePath, savePath): ''' GET запрос ''' if (url[-1] != "/"): url += "/" filePath = urllib.parse.quote_plus(filePath) uri = url + filePath h = httplib2.Http(".cache") try: (resp_headers, content) = h.request(uri, "GET") except: print("Error! Can't send url request.") return if (resp_headers.status == 200): filename = ntpath.basename(filePath) if (not os.path.exists(savePath)): print("Can't save file. Directory [{}] does not exists.". format(ntpath.abspath(savePath))) return f = open(savePath + "\\" + filename, "wb") f.write(content) f.close() print("Saved: [{}]".format(ntpath.abspath(savePath + "\\" + filename))) elif(resp_headers.status == 404): print("Error! Bad request. File does not exists") else: print("Error in saving file.") return
def nt_relpath(path, start=curdir): """Implementa os.path.relpath para Windows ya que en python 2.5 no esta implementada""" from ntpath import abspath, splitunc, sep, pardir, join if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = splitunc(path) unc_start, rest = splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list)
def nt_relpath(path, start=curdir): """Implementa os.path.relpath para Windows ya que en python 2.5 no esta implementada""" from ntpath import abspath, splitunc, sep, pardir, join if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = splitunc(path) unc_start, rest = splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return curdir return join(*rel_list)
def _path_convert(path_str: str, path_type: str) -> str: if path_type is None: return ntpath.abspath(path_str) elif path_type == 'win': return ntpath.abspath(path_str).replace('/', '\\') elif path_type in ['unix', 'linux', 'mac']: return ntpath.abspath(path_str).replace('\\', '/')
def __init__(self, path, isCli, noOutputs=True): #LOAD TREE jsonFile = self.openSyntaxTree(path) #self.tree = DictionaryUtility.to_object(json.load(jsonFile)) self.tree = json.load(jsonFile) self.analyzer = Analyzer(self.tree) if isCli and noOutputs == False: TreeUtils.cliDisplay(self.analyzer.tree) self.analyzer.mockTree() if not isCli and noOutputs == False: print(json.dumps(self.analyzer.tree, indent=2, sort_keys=False)) #self.analyzer.tabla.cliDisplayTable() #SAVE FILE STATEMENTS: pathOfSouce = path fileOpen = True canonicalFileName = ntpath.basename(pathOfSouce) withoutExtention = canonicalFileName[0:canonicalFileName.find(".")] synDirectory = "target_" + withoutExtention + "\\sem\\" basepath = ntpath.abspath( pathOfSouce)[0:len(ntpath.abspath(pathOfSouce)) - len(canonicalFileName)] if platform.system() != 'Linux': try: os.makedirs(basepath + synDirectory) except OSError: pass errFile = open(basepath + synDirectory + "err.sem", "+w") outFile = open(basepath + synDirectory + "out.sem", "+w", encoding='utf-8') tablaFile = open(basepath + synDirectory + "tabla.sem", "+w", encoding='utf-8') jsonFile = open(basepath + synDirectory + "out.json", "+w", encoding='utf-8') else: synDirectory = "target_" + withoutExtention + "/sem/" try: os.makedirs(synDirectory) except OSError: pass errFile = open(synDirectory + "err.sem", "+w") outFile = open(synDirectory + "out.sem", "+w", encoding='utf-8') tablaFile = open(basepath + synDirectory + "tabla.sem", "+w", encoding='utf-8') jsonFile = open(synDirectory + "out.json", "+w", encoding='utf-8') self.hasErrors = self.analyzer.hasErrors print(json.dumps(self.analyzer.tree, indent=2, sort_keys=False), file=jsonFile) print(self.analyzer.err, file=errFile) self.analyzer.tabla.fileDisplayTable(tablaFile)
def manage_offline_site(self, site, action_type, path=''): """ Manage offline sites Configure, Export, or Delete a Polaris offline site. Variable *site* - offline site name *action_type* - choose from 'config', 'export', 'delete' *path* - location to export the offline site zip (including the file name) """ logger.warn('manage_offline_site DEPRECATED') assert action_type in ('config', 'export', 'delete'), AssertionError("Invalid option {0}".format(action_type)) assert path is not '' if action_type == 'export' else True, AssertionError("Please provide path including file name if you want to export.") PolarisInterface.navi.open_site_management_window() logger.info('Select the offline site {0}.'.format(site)) # This step was incorporated to handle site name changes from CamelCase to upper case try: PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['offline site']['id'].format(site))) except errorhandler.NoSuchElementException: PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['offline site']['id'].format(site.upper()))) logger.info('Click the {0} button for offline site {1}.'.format(action_type, site)) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['offline {0}'.format(action_type)]['id'].format(site))) if action_type == 'export': PolarisInterface.navi.ok_to_synchronize() logger.info('Offline site {0} is exported to {1}.'.format(site, ntpath.abspath(path))) self.file_dialog(ntpath.abspath(path), 'Save') PolarisInterface.navi.verify_popup_message(color='green') elif action_type == 'config': PolarisInterface.navi.ok_to_synchronize() try: PolarisInterface.webdriver.click(PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['local site load']['id'])) # if IsolatedStorage is not cleared except errorhandler.NoSuchElementException: pass else: PolarisInterface.navi.verify_popup_message(color='red') sleep(10) # windows that pop up after take action_type on the site needs to be handled within this function # otherwise can not close site management dialog unless make it a separate function in navigation.py logger.info('Close Site Management dialog') PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard ok']['id']))
def take_system_snapshot(self, snapshot, polaris_logs='true', configuration_data='true', ecu_data='true'): """ Takes a system snapshot Takes a system snapshot. Currently limited to taking all ECU data, if the option is selected """ logger.warn('take_system_snapshot DEPRECATED') if ntpath.exists('{0}.zip'.format(ntpath.abspath(snapshot))): os.remove('{0}.zip'.format(ntpath.abspath(snapshot))) else: _dir = ntpath.dirname(ntpath.abspath(snapshot)) if not ntpath.exists(_dir): os.makedirs(_dir) _plogs = polaris_logs.lower() _cdata = configuration_data.lower() _edata = ecu_data.lower() _complete = list() PolarisInterface.navi.go_to('take system snapshot') if _plogs == 'true' or _cdata == 'true' or _edata == 'true': PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup custom snapshot']['id'])) if _plogs == 'true': _complete.append(_plogs) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup polaris logs']['id'])) if _cdata == 'true': _complete.append(_cdata) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup config data']['id'])) if _edata == 'true': _complete.append(_edata) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup ecu data']['id'])) else: PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup default snapshot']['id'])) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup start']['id'])) self.file_dialog(ntpath.abspath(snapshot), 'Save') sleep(60) assert len(_complete) <= len(PolarisInterface.webdriver.find_elements_by_name('Complete')), \ AssertionError('Unable to verify successful system snapshot') PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup close']['id']))
def analisis_personalizado(): aup = tk.Tk(className=' Análisis personalizado') aup.geometry('400x300') aup.iconbitmap('res/icon2.ico') aup.configure(background='#a1dbcd') w = tk.Label(aup, text="Selecciona el fichero JSON con los tweets") w.grid(row=0, column=0, padx=0, pady=10) aup.update_idletasks() aup.update() time.sleep(1) json_tweets = askopenfilename( parent=aup, initialdir=ntpath.abspath("json_tweets").replace("\\", "/"), title="Selecciona el archivo JSON", filetypes=(("JSON files", "*.json"), ("All files", "*.*"))) w = tk.Label(aup, text="Selecciona el fichero JSON perteneciente al telediario") w.grid(row=0, column=0, padx=0, pady=10) aup.update_idletasks() aup.update() time.sleep(1) json_speech = askopenfilename( parent=aup, initialdir=ntpath.abspath("json_speech").replace("\\", "/"), title="Selecciona el archivo JSON", filetypes=(("JSON files", "*.json"), ("All files", "*.*"))) dic_tweets, dic_speech = cargar(json_tweets, json_speech) trends = extract_trends(dic_tweets) #print(dic_tweets) #print(dic_speech) #print(trends) aup.destroy() aup = tk.Tk(className=' Resultados Análisis') aup.geometry('400x800') aup.iconbitmap('res/icon2.ico') aup.configure(background='#a1dbcd') for i in range(len(trends)): w = tk.Label(aup, text=trends[i]) w.grid(row=i, column=0, padx=0, pady=10) b = tk.Button(aup, text="Resultado", command=lambda i=i: genera_tabla(trends[i], dic_tweets[ str(i + 1)], dic_speech)) b.grid(row=i, column=2, padx=0, pady=10) aup.update_idletasks() aup.update() aup.mainloop() return
def _offline_mode(self, dll, data_path): if dll and data_path: if not self.offline: self.session = DllSession(str(ntpath.abspath(dll)), str(ntpath.abspath(data_path))) self.offline = True self.base_url = self.base_url[self.base_url.find('/api'):] else: self.offline = False self._make_new_session() return self.session
def backup_system(self, path, timeout=15): """ Backup a system Backup a system to the specified path. Variable *path* - location to export the offline site zip .. code:: robotframework *** Test Cases *** Sample Backup system path=.//artifacts """ if 'localhost' in PolarisInterface.hostname or '127.0.0.1' in PolarisInterface.hostname: if ntpath.exists('{0}.zip'.format(ntpath.abspath(path))): os.remove('{0}.zip'.format(ntpath.abspath(path))) # Open System Backup UI PolarisInterface.navi.go_to('backup system') # Find backup file Name textbox backupname_input = PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup file']['id']) # Remove the filename from the Path and save it to Backupfilename backup_filename = ntpath.splitext(ntpath.basename(path))[0] # Append '*' to the Start and end of the path path = "*{0}*".format(ntpath.abspath(ntpath.dirname(path))) # Send the path backupname_input.send_keys(path) # Clear the field backupname_input.clear() # Send the File name backupname_input.send_keys(backup_filename) # Perform backup PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard next']['id'])) if timeout: PolarisInterface.webdriver.waitforclickable(ui_ref.mapping['wizard finish']['id'], int(timeout)) # Finish the process PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard finish']['id']))
def click_export(): continue_ = False for button in part_button.all_buttons_source: if button.part_state in ['recorded', 'overdub']: continue_ = True break if not continue_: message = '\n\nIn order to export .wav, your .tlsd file must contain\nat least one recorded or overdub part\n\n' popup_ok(message, PICS['IMG_ERR_EXPORT']) return w = popup_wait('Please wait while your .wav files are being extracted') index = int(lb.curselection()[0]) file_name = Full_Filenames[index] data = trio.readBytes(file_name) parts = trio.getPartInfo(data) parts_with_audio = trio.give_parts_with_audio_only(parts) audioBlocks = trio.formAudioParts(parts_with_audio, data) errors = trio.write_wave_files(ntpath.basename(file_name), audioBlocks) w.kill() if errors: message = 'During .wav export, we encountered the following error(s):\n\n' message += '\n'.join(errors) popup_ok(message, PICS['IMG_ERR_EXPORT']) else: message = 'Your .wav files have been successfully extracted here:\n\n' + str( ntpath.abspath(trio.wavDir)) popup_ok(message, PICS['IMG_OK'], 'ALL GOOD')
def __init__(self, input_files=[], output_file=[], formatting=None, lmax=10, **kwargs): """ Parameters ---------- input_files : list(str) a list of the input files (default None) output_file : str output filepath - this file will have all pi/2 jumps removed to form continuous phase shifts lmax (optional) : int Maximum angular momentum quantum number to calculate and must be in the range 0 <= lmax <= 18 . formatting (optional) : str output style - use either 'CLEED' or None (default None) """ self.input_files = [ filename for filename in input_files if os.path.isfile(filename) ] self.output_file = ntpath.abspath(str(output_file)) if int(lmax) >= 0 and int(lmax) <= 18: self.lmax = int(lmax) self.set_format(formatting) self.__dict__.update(kwargs)
def __init__(self, filePath="", **kwargs): """ Initializes the object by parsing the data in the given file path. Calls parent's __init__ ARGS: self: reference to the object calling this method (i.e. Java's THIS) filePath: String, containing absolute path to raw data file """ if 'empty' in kwargs and kwargs['empty']: return #Quick little bit of formatting if system() != "Windows": filePath = path.abspath(filePath).replace("\\", "").strip() #Call the parent class' __init__ eastAndWestServerIP = CrowdSource_EastWest File.__init__(self, filePath=filePath, eastWestIP=eastAndWestServerIP) self.loadCrowdSourceInfo() #Actually parsing the tests in this file self.findAndParseTCPTests() self.findAndParsePINGTests() self.findAndParseUDPTests() #This is one final check, to make sure that we have all 6 tests. If not, then # there was an unknown test of some kind, and we set our ContainsErrors to True #The 6 Tests are: # 2 PING TESTS (1 East, 1 West) # 2 TCP TESTS (1 East, 1 West) # 2 UDP 1 second TESTS (1 East, 1 West) if (len(self.TestsByNum) != 6) and not self.ContainsErrors: specialMessage = ( "There was an unknown error of some kind, and the 14 necessary" + " tests were not performed. There are " + str(6 - len(self.TestsByNum)) + " tests missing.") self._ErrorHandling__setErrorCode(404, specialMessage)
def get_projects_info_from_sln(fileName): basepath = ntpath.dirname(os.path.abspath(fileName)) content = read_file(fileName) p = re.compile('Project\("\{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942\}"\)(.*?)EndProject\n', re.MULTILINE | re.DOTALL) projects = {} for entries in p.findall(content): p = re.compile('"(.*?)"'); projectName = p.findall(entries) # check for "real" project if len(projectName) != 3: continue # extract dependencies p = re.compile('\} = \{([A-F0-9\-]{0,})\}') dependencies = p.findall(entries) # identify the absolute path for the project file projectFilename = ntpath.abspath(ntpath.join(basepath, projectName[1])) # remove { } from the ID id = projectName[2][1:-1] projects[id] = { 'id': id, 'name': projectName[0], 'projectFilename': projectFilename, 'dependencies': dependencies } return projects
def test_realpath_cwd(self): ABSTFN = ntpath.abspath(support.TESTFN) support.unlink(ABSTFN) support.rmtree(ABSTFN) os.mkdir(ABSTFN) self.addCleanup(support.rmtree, ABSTFN) test_dir_long = ntpath.join(ABSTFN, "MyVeryLongDirectoryName") os.mkdir(test_dir_long) test_dir_short = _getshortpathname(test_dir_long) test_file_long = ntpath.join(test_dir_long, "file.txt") test_file_short = ntpath.join(test_dir_short, "file.txt") with open(test_file_long, "wb") as f: f.write(b"content") self.assertPathEqual(test_file_long, ntpath.realpath(test_file_short)) with support.change_cwd(test_dir_long): self.assertPathEqual(test_file_long, ntpath.realpath("file.txt")) with support.change_cwd(test_dir_long.lower()): self.assertPathEqual(test_file_long, ntpath.realpath("file.txt")) with support.change_cwd(test_dir_short): self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
def read_audio_data_chunk(self, seconds_to_read=30): """ Read audio data in chunks. Parameters ---------- seconds_to_read: int define how many seconds will be read from file. Returns ------- samples: [float] list of value of audio samples.""" chunk_size = seconds_to_read * self.frame_rate total_length = round(self.audio_file.getnframes() / self.audio_file.getframerate(), 2) print('Read file {}'.format(ntpath.abspath(self.file_path))) print(('frame rate is {}, chunk size is {}'.format(self.frame_rate, chunk_size))) print('total length is {} s'.format(total_length)) while True: start = self.audio_file.tell() print('Read samples from {} to {}'.format(start, start + chunk_size)) samples = self.audio_file.readframes(chunk_size) print('Samples from {} to {} has been read'.format(start, self.audio_file.tell())) if not samples: print('End reading {}. Read {} frames '.format(ntpath.basename(self.file_path), self.audio_file.tell())) print('') self.audio_file.close() return print('') samples = self.decode_audio_chunk(samples) yield samples
def test_realpath_symlink_prefix(self): ABSTFN = ntpath.abspath(support.TESTFN) self.addCleanup(support.unlink, ABSTFN + "3") self.addCleanup(support.unlink, "\\\\?\\" + ABSTFN + "3.") self.addCleanup(support.unlink, ABSTFN + "3link") self.addCleanup(support.unlink, ABSTFN + "3.link") with open(ABSTFN + "3", "wb") as f: f.write(b'0') os.symlink(ABSTFN + "3", ABSTFN + "3link") with open("\\\\?\\" + ABSTFN + "3.", "wb") as f: f.write(b'1') os.symlink("\\\\?\\" + ABSTFN + "3.", ABSTFN + "3.link") self.assertPathEqual(ntpath.realpath(ABSTFN + "3link"), ABSTFN + "3") self.assertPathEqual(ntpath.realpath(ABSTFN + "3.link"), "\\\\?\\" + ABSTFN + "3.") # Resolved paths should be usable to open target files with open(ntpath.realpath(ABSTFN + "3link"), "rb") as f: self.assertEqual(f.read(), b'0') with open(ntpath.realpath(ABSTFN + "3.link"), "rb") as f: self.assertEqual(f.read(), b'1') # When the prefix is included, it is not stripped self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3link"), "\\\\?\\" + ABSTFN + "3") self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3.link"), "\\\\?\\" + ABSTFN + "3.")
def do_cd(self, line): ''' Pseudo Command: cd Description: Change the psuedo current working directory Note: The shell keeps a pseudo working directory that allows the user to change directory without changing the directory of the working process on sensor. Args: cd <Directory> ''' self._needs_attached() path = self._file_path_fixup(line) path = ntpath.abspath(path) type = self._stat(path) if (type != "dir"): print("Error: Path %s does not exist" % path) return else: self.cwd = path # cwd never has a trailing \ if self.cwd[-1:] == '\\': self.cwd = self.cwd[:-1] log.info("Changed directory to {0}".format(self.cwd))
def get_sets(self, set_type, files): df = None for element in files: if set_type in ntpath.abspath(element): df = self.load_data(element) return df
def __init__(self, filePath="", **kwargs): """ Initializes the object by parsing the data in the given file path. Calls parent's __init__ ARGS: filePath String, containing absolute path to raw data file """ if 'empty' in kwargs and kwargs['empty']: return #Quick little bit of formatting if system()!="Windows": filePath = path.abspath(filePath).replace("\\","").strip() #Call the parent class' __init__ eastAndWestServerIP = FieldTest_EastWest File.__init__(self, filePath=filePath, eastWestIP=eastAndWestServerIP) self.loadFieldTestInfo() #Actually parsing the tests in the file self.findAndParseTCPTests() self.findAndParsePINGTests() self.findAndParseUDPTests() self.findAndParseTCRTTests() #This is one final check, to make sure that we have all 14 tests. If not, then # there was an unknown test of some kind, and we set our _contains_Errors to True #The 14 Tests are: # 2 PING TESTS (1 East, 1 West) # 4 TCP TESTS (2 East, 2 West) # 6 UDP 1 second TESTS (3 East, 3 West) # 2 UDP 5 second TESTS (1 East, 1 West) if (len(self.TestsByNum) != 14) and not self.ContainsErrors: specialMessage = ("There was an unknown error of some kind, and the 14 necessary" + " tests were not performed. There are "+str(14-len(self.TestsByNum))+ " tests missing.") self._ErrorHandling__setErrorCode(404, specialMessage)
def __new__(cls, *args, **kwargs): """ Before creating an instance of the given file as a parsed object, we want to check that the file is indeed a test file. This will see if the necessary text is in the first few lines. If not, then we return None, and the object is not created """ if 'empty' in kwargs and kwargs['empty']: return File.__new__(cls) #Getting the file path that was passed in to the constructor if "filePath" in kwargs: fileLoc = kwargs["filePath"] else: fileLoc = args[0] #Removing any backslashes that may have been included in the file path by # converting the string into a representation of it (which will escape the # backslashes), removing the backslashes, and then the single quotes. We # then save the result back into fileLoc if system()!="Windows": fileLoc = path.abspath(fileLoc).replace("\\","").strip() #Checking that the file is indeed a FieldTest File. If not, return None try: with open(fileLoc) as checkingFile: allText = checkingFile.read() #END WITH FILE if all( [(string not in allText.split("\n\n")[0]) for string in ["CPUC Tester","CPUC Traceroute"]] ): if "DEBUG" in kwargs and kwargs["DEBUG"]: print("{} is not a Field Test output file. ".format(os.path.basename(fileLoc))+ "It did not have the necessary header.", file=sys.stderr) return None except: print(fileLoc+" is a file that could not be read.", file=sys.stderr) return None inst = File.__new__(cls) return inst
def writeBuffer(self): canonicalFileName = ntpath.basename(self.filename) fpath = ntpath.abspath(canonicalFileName) fpath = "\\".join(fpath.split("\\")[:-1])+"\\code.tm" f = open(fpath,'+w') print(self.buffer, file=f,end='')
def get_templates(): result = [] absfolder = ntpath.abspath('toolchain/wizard') # absfolder = ntpath.abspath('../wizard') for file in template.get_files(absfolder, 'zip'): result.append(file) return result
def test_realpath_relative(self): ABSTFN = ntpath.abspath(support.TESTFN) open(ABSTFN, "wb").close() self.addCleanup(support.unlink, ABSTFN) self.addCleanup(support.unlink, ABSTFN + "1") os.symlink(ABSTFN, ntpath.relpath(ABSTFN + "1")) self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN)
def test_realpath_strict(self): # Bug #43757: raise FileNotFoundError in strict mode if we encounter # a path that does not exist. ABSTFN = ntpath.abspath(os_helper.TESTFN) os.symlink(ABSTFN + "1", ABSTFN) self.addCleanup(os_helper.unlink, ABSTFN) self.assertRaises(FileNotFoundError, ntpath.realpath, ABSTFN, strict=True) self.assertRaises(FileNotFoundError, ntpath.realpath, ABSTFN + "2", strict=True)
def submitRecovery(self): nSourceSpec = self.sourceSpec.get() nDestSpec = self.destSpec.get() d = ntpath.abspath(ntpath.join('C:\\', nSourceSpec)) if not os.path.exists(d): print('Directory does not exist') else: path = d rout = '*.*' files = glob.glob(os.path.join(path, rout)) list_of_files = glob.iglob(os.path.join('./*.bin')) shares = [] for ful, file in [(ful, file) for ful in list_of_files for file in files]: with open(ful, 'rb+') as d: dReader = d.read().splitlines() for share in dReader: f = (share.split(':')[1].lstrip(path)) fils = (share.split(':')[2]) fil = file.lstrip(path) if f == fil: try: deal = eval(fils) deal = unhexlify(deal) shares.append(deal) except Exception: print('%s share in %s could not be read' % (fil, ful)) start = time.time() key = TSS.reconstruct_secret(shares, strict_mode=True) key = str(key) end = time.time() processtime = end - start print('Time taken to recover secret:%f' % processtime) print('Writing files to %s directory' % nDestSpec) try: dirname = nDestSpec os.mkdir(dirname) os.chdir(dirname) except OSError: pass for file in files: fil = file.lstrip(path) with open(fil, 'ab+') as foo: foo.write(key) tkinter.messagebox.showinfo( 'Status Message', 'Your Secret is successfully recovered, BACK to Main Menu')
def test_realpath_broken_symlinks(self): ABSTFN = ntpath.abspath(support.TESTFN) os.mkdir(ABSTFN) self.addCleanup(support.rmtree, ABSTFN) with support.change_cwd(ABSTFN): os.mkdir("subdir") os.chdir("subdir") os.symlink(".", "recursive") os.symlink("..", "parent") os.chdir("..") os.symlink(".", "self") os.symlink("missing", "broken") os.symlink(r"broken\bar", "broken1") os.symlink(r"self\self\broken", "broken2") os.symlink(r"subdir\parent\subdir\parent\broken", "broken3") os.symlink(ABSTFN + r"\broken", "broken4") os.symlink(r"recursive\..\broken", "broken5") self.assertPathEqual(ntpath.realpath("broken"), ABSTFN + r"\missing") self.assertPathEqual(ntpath.realpath(r"broken\foo"), ABSTFN + r"\missing\foo") # bpo-38453: We no longer recursively resolve segments of relative # symlinks that the OS cannot resolve. self.assertPathEqual(ntpath.realpath(r"broken1"), ABSTFN + r"\broken\bar") self.assertPathEqual(ntpath.realpath(r"broken1\baz"), ABSTFN + r"\broken\bar\baz") self.assertPathEqual(ntpath.realpath("broken2"), ABSTFN + r"\self\self\missing") self.assertPathEqual( ntpath.realpath("broken3"), ABSTFN + r"\subdir\parent\subdir\parent\missing") self.assertPathEqual(ntpath.realpath("broken4"), ABSTFN + r"\missing") self.assertPathEqual(ntpath.realpath("broken5"), ABSTFN + r"\missing") self.assertPathEqual(ntpath.realpath(b"broken"), os.fsencode(ABSTFN + r"\missing")) self.assertPathEqual(ntpath.realpath(rb"broken\foo"), os.fsencode(ABSTFN + r"\missing\foo")) self.assertPathEqual(ntpath.realpath(rb"broken1"), os.fsencode(ABSTFN + r"\broken\bar")) self.assertPathEqual(ntpath.realpath(rb"broken1\baz"), os.fsencode(ABSTFN + r"\broken\bar\baz")) self.assertPathEqual(ntpath.realpath(b"broken2"), os.fsencode(ABSTFN + r"\self\self\missing")) self.assertPathEqual( ntpath.realpath(rb"broken3"), os.fsencode(ABSTFN + r"\subdir\parent\subdir\parent\missing")) self.assertPathEqual(ntpath.realpath(b"broken4"), os.fsencode(ABSTFN + r"\missing")) self.assertPathEqual(ntpath.realpath(b"broken5"), os.fsencode(ABSTFN + r"\missing"))
def test_realpath_basic(self): ABSTFN = ntpath.abspath(support.TESTFN) open(ABSTFN, "wb").close() self.addCleanup(support.unlink, ABSTFN) self.addCleanup(support.unlink, ABSTFN + "1") os.symlink(ABSTFN, ABSTFN + "1") self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN) self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1")), os.fsencode(ABSTFN))
def run_init_scripts(start=False, kill=False, other=False): """ Execute scripts in /etc/cumulus-init.d or C:\\cumulus\\init.d :type start: bool :param start: Run scripts starting with S :type kill: bool :param kill: Run scripts starting with K :type others: bool :param others: Run scripts not starting with S or K """ init_dir = '/etc/cumulus-init.d' if sys.platform in ['win32', 'cygwin']: init_dir = 'C:\\cumulus\\init.d' # Run the post install scripts provided by the bundle if not ospath.exists(init_dir): LOGGER.info('No init scripts found in {}'.format(init_dir)) return LOGGER.info('Running init scripts from {}'.format(init_dir)) filenames = [] for filename in sorted(os.listdir(init_dir)): if ospath.isfile(ospath.join(init_dir, filename)): filenames.append(ospath.join(init_dir, filename)) if start: for filename in filenames: if ospath.basename(filename)[0] == 'S': _run_command(ospath.abspath(filename)) if kill: for filename in filenames: if ospath.basename(filename)[0] == 'K': _run_command(ospath.abspath(filename)) if other: for filename in filenames: if ospath.basename(filename)[0] not in ['K', 'S']: _run_command(ospath.abspath(filename))
def startJob(self,path): #Check that the file specified by path exists. absname = ntpath.abspath(path) try : ntpath.exists(absname) except : logging.error("The path specified does not exist") raise #Submit to condor & return if false retVal = Manager.callCondor(self, ["condor_submit", path]) if retVal == False: return False
def loadData(self, path): if not ntpath.isfile(path): print 'Can not open ' + path self.acq = None self.timer.stop() return self.dataPath, self.dataFile = ntpath.split(ntpath.abspath(path)) try: reader = btk.btkAcquisitionFileReader() reader.SetFilename(path) reader.Update() self.acq = reader.GetOutput() except RuntimeError: print 'File format is not valid ' + path self.acq = None self.timer.stop() return if self.acq: print 'C3D file loaded ' + path self.frequency = self.acq.GetPointFrequency() self.totalFrame = self.acq.GetPointFrameNumber() self.totalPoint = self.acq.GetPointNumber() print 'Sample Frequency :', self.acq.GetPointFrequency() print 'Total Frame :', self.acq.GetPointFrameNumber() print 'Marker Number :', self.acq.GetPointNumber() self.maxDataValue = 0 for i in range(self.totalPoint): point = self.acq.GetPoint(i) for j in range(self.totalFrame): pos = point.GetValues()[j,:] if pos[0] > self.maxDataValue: self.maxDataValue = pos[0] if pos[1] > self.maxDataValue: self.maxDataValue = pos[1] if pos[2] > self.maxDataValue: self.maxDataValue = pos[2] self.paused = False self.currentFrame = 0 self.timer.setInterval(int(1000 / self.frequency)) self.timer.start() self.dataLoaded.emit()
def gen_thumbnail(clip,scalew,frameno=None,compw=1280,comph=720,special_hack=True): input_name = ntpath.abspath(clip.fname) if special_hack: input_name = input_name.replace('\\dxv\\','\\webm\\').replace('.mov','.webm') output_name = './scrot/{}.png'.format(ntpath.basename(clip.fname)) scaleh = int(scalew * comph/compw) if not frameno: command = 'ffmpeg -y -i "{0}" -vf "thumbnail" -q:v 2 -vframes 1 "./scrot/temp.png"'.format(input_name) else: command = 'ffmpeg -ss {1} -y -i "{0}" -q:v 2 -vframes 1 "./scrot/temp.png"'.format(input_name,frameno) print(command) print('\#'*10) process = subprocess.Popen(command) # -ss to seek to frame process.communicate() make_thumbnail('./scrot/temp.png',output_name,size=(scalew,scaleh)) if not special_hack: if not os.path.exists(output_name): return gen_thumbnail(clip,scalew,frameno,compw,comph,special_hack=True) return output_name
def validate(self): dirprops = ["playlist_source", "source", "dest"] if self.playlist_dest is not None: dirprops.append("playlist_dest") for prop in dirprops: value = getattr(self, prop) if value is None or not os.path.isdir(value): raise IOError(prop + "=" + str(value) + " is not a directory") if self.fb2k_source_mapping is not None: self.fb2k_source_mapping = ntpath.abspath(self.fb2k_source_mapping) if not self.fb2k_source_mapping.endswith(ntpath.sep): self.fb2k_source_mapping = self.fb2k_source_mapping + ntpath.sep if not isinstance(self.dry_run, bool): raise TypeError("dry_run must be a bool") if self.max_size is not None: if not isinstance(self.max_size, int): self.max_size = self.size_str_to_bytes(self.max_size) if self.min_free is not None: if not isinstance(self.min_free, int): self.min_free = self.size_str_to_bytes(self.min_free) if self.min_free < 0: raise ValueError("min_free must be grater than zero")
def _wine_path(self, thing): if is_windows: return ntpath.abspath(thing) else: return "Z:%s" % ntpath.abspath(thing)
def get_song(self, windows_path): normalized = ntpath.abspath(windows_path) if not normalized in self.songs: self.songs[normalized] = Song(normalized, self.config) return self.songs[normalized]
cfg_path = sys.argv[2] output = sys.argv[3] test(os.path.exists(xls_path), "EXCEL文件[" + xls_path + "]不存在", 404) test(re.search(r'\.xlsx?$', xls_path.lower()) != None, "[" + xls_path + "]不是EXCEL文件") test(os.path.exists(cfg_path), "导表模板[" + cfg_path + "]不存在", 404) test(cfg_path[-4:].lower() == ".xml", "导表模板[" + cfg_path + "]不是XML文件") cfg = parseXMLCfg(cfg_path) test(output != None, "XML输出目录为空"); if output[-4:].lower() != ".xml": output = output + "/" + cfg.name + ".xml" print ">>> " + output folder = ntpath.abspath(ntpath.split(output)[0]) folder = folder.replace("\\", "/") if not os.path.exists(folder): os.makedirs(folder) result = "<?xml version='1.0' encoding='utf-8'?>\n" + convert(xls_path) print "" + output + "\t-> size:" + format(len(result), ",") f = open(output, 'wb') f.write(result) f.close()
import os import glob import os import ntpath os.chdir("./") documents ='<html dir="rtl"><head><meta content="text/html" charset="utf-8" http-equiv="Content-Type"></head><body>' for files in glob.glob("*.htm"): filename = ntpath.abspath(files) textFile = open( files , "r") text = textFile.read() textFile.close() end = -1 start = text.find("<blockquote>", end+1) end = text.find("</blockquote>",start+1) documents+=(text[start:end]) documents+= '</body></html>' os.chdir("E:\\Personal\\Projects\\Mafatih\\Aviny - Test\\Result") out = open("AllFile.htm", "w") out.write(documents) out.close()
def fix_symbol_store_path(symbol_store_path = None, remote = True, force = False): """ Fix the symbol store path. Equivalent to the C{.symfix} command in Microsoft WinDbg. If the symbol store path environment variable hasn't been set, this method will provide a default one. @type symbol_store_path: str or None @param symbol_store_path: (Optional) Symbol store path to set. @type remote: bool @param remote: (Optional) Defines the symbol store path to set when the C{symbol_store_path} is C{None}. If C{True} the default symbol store path is set to the Microsoft symbol server. Debug symbols will be downloaded through HTTP. This gives the best results but is also quite slow. If C{False} the default symbol store path is set to the local cache only. This prevents debug symbols from being downloaded and is faster, but unless you've installed the debug symbols on this machine or downloaded them in a previous debugging session, some symbols may be missing. If the C{symbol_store_path} argument is not C{None}, this argument is ignored entirely. @type force: bool @param force: (Optional) If C{True} the new symbol store path is set always. If C{False} the new symbol store path is only set if missing. This allows you to call this method preventively to ensure the symbol server is always set up correctly when running your script, but without messing up whatever configuration the user has. Example:: from winappdbg import Debug, System def simple_debugger( argv ): # Instance a Debug object debug = Debug( MyEventHandler() ) try: # Make sure the remote symbol store is set System.fix_symbol_store_path(remote = True, force = False) # Start a new process for debugging debug.execv( argv ) # Wait for the debugee to finish debug.loop() # Stop the debugger finally: debug.stop() @rtype: str or None @return: The previously set symbol store path if any, otherwise returns C{None}. """ try: if symbol_store_path is None: local_path = "C:\\SYMBOLS" if not ntpath.isdir(local_path): local_path = "C:\\Windows\\Symbols" if not ntpath.isdir(local_path): local_path = ntpath.abspath(".") if remote: symbol_store_path = ( "cache*;SRV*" + local_path + "*" "http://msdl.microsoft.com/download/symbols" ) else: symbol_store_path = "cache*;SRV*" + local_path previous = os.environ.get("_NT_SYMBOL_PATH", None) if not previous or force: os.environ["_NT_SYMBOL_PATH"] = symbol_store_path return previous except Exception, e: warnings.warn("Cannot fix symbol path, reason: %s" % str(e), RuntimeWarning)
def gsimcli(stations_file, stations_header, no_data, stations_order, correct_method, detect_prob, detect_flag, detect_save, exe_path, par_file, outfolder, purge_sims, rad=0, correct_skew=None, correct_percentile=None, optional_stats=None, cores=None, dbgfile=None, print_status=False, skip_dss=False): """Main routine to run GSIMCLI homogenisation procedure in a set of stations. Parameters ---------- stations_file : string or PointSet object Stations file path or PointSet instance. stations_header : boolean Stations file has the GSLIB standard header lines. no_data : number Missing data value. stations_order : array_like Stations' ID's in the order that they will be homogenised. correct_method : {'mean', 'median', 'skewness', 'percentile'} string, default 'mean' Method for the inhomogeneities correction: - mean: replace detected irregularities with the mean of simulated values; - median: replace detected irregularities with the median of simulated values; - skewness: use the sample skewness to decide whether detected irregularities will be replaced by the mean or by the median of simulated values. - percentile : replace detected irregularities with the percentile `100 * (1 - p)`, which is the same value used in the detection. detect_prob : float Probability value to build the detection interval centred in the local PDF. detect_flag : boolean DEPRECATED detect_save : boolean Save generated files in the procedure\: intermediary PointSet files containing candidate and reference stations, homogenised and simulated values, and DSS parameters files. exe_path : string DSS binary file path. par_file : string or DssParam object DSS parameters file path or DssParam instance. outfolder : string Directory to save the results. purge_sims : boolean Remove all simulated maps in the end. rad : number, default 0 Tolerance radius used to search for neighbour nodes, used to calculate the local pdf's. correct_skew : float, optional Samples skewness threshold, used if `correct_method == 'skewness'`. correct_percentile: float, optional p value used if correct_method == 'percentile'. optional_stats : cores : int, optional Maximum number of cores to be used. If None, it will use all available cores. dbgfile : string, optional Debug output file path. Write DSS console output to a file. print_status : boolean, default False Print some messages with the procedure status while it is running. skip_dss : boolean, default False Do not run DSS. Choose if the simulated maps are already in place and only the homogenisation process is needed. Returns ------- homogenised_file : string Homogenised data file path. The generated file name ends with *_homogenised_data.csv*. dnumber_list : list of int Number of detected breakpoints in each candidate station. fnumber_list : list of int Number of missing data that were interpolated in each candidate station. """ global is_alive if not cores or cores > mp.cpu_count(): cores = mp.cpu_count() if print_status: print 'GSIMCLI using {0} cores'.format(cores) # load data and prepare the iterative process if isinstance(stations_file, gr.PointSet): stations_pset = stations_file else: stations_pset = gr.PointSet() stations_pset.load(stations_file, nd=no_data, header=stations_header) if isinstance(par_file, pdss.DssParam): dsspar = par_file else: dsspar = pdss.DssParam() dsspar.load_old(par_file) # TODO: old dnumber_list = list() fnumber_list = list() # workaround for Qt forcing backslash if os.name == "nt": exe_path = ntpath.abspath(exe_path) commonpath = os.path.commonprefix((outfolder, exe_path)) # start iterative process for i in xrange(len(stations_order)): if not is_alive: raise SystemError("process aborted") if print_status: print ('Processing candidate {0} out of {1} with ID {2}.'. format(i + 1, len(stations_order), stations_order[i])) print "STATUS: candidate {0}".format(stations_order[i]) # manage stations candidate, references = hmg.take_candidate(stations_pset, stations_order[i]) # prepare and launch DSS basename = os.path.basename(outfolder) refname = basename + '_references_' + str(i) + '.prn' outname = basename + '_dss_map_st' + str(i) + '_sim.out' # TODO: +1 parname = basename + '_dss_par_st' + str(i) + '.par' candname = basename + '_candidate_' + str(i) + '.prn' reffile = os.path.join(outfolder, refname) outfile = os.path.join(outfolder, outname) reffile_nt = ntpath.relpath(os.path.join(outfolder, refname), commonpath) outfile_nt = ntpath.relpath(os.path.join(outfolder, outname), commonpath) # workaround for mp_exec, it needs one less directory in the tree reffile_nt = reffile_nt[reffile_nt.index('\\') + 1:] outfile_nt = outfile_nt[outfile_nt.index('\\') + 1:] parfile = os.path.join(outfolder, parname) references.save(psetfile=reffile, header=False) if detect_save: candfile = os.path.join(outfolder, candname) candidate.save(psetfile=candfile, header=True) if not skip_dss: dsspar.update(['datapath', 'output'], [reffile_nt, outfile_nt]) dsspar.save_old(parfile) # TODO: old oldpar = pdss.DssParam() oldpar.load_old(parfile) oldpar.nsim = 1 purge_temp = False for sim in xrange(1, dsspar.nsim + 1, cores): if not is_alive: raise SystemError("process aborted") if print_status: print ('[{0}/{1}] Working on realization {2}'. format(i + 1, len(stations_order), sim)) print "STATUS: realization {0}".format(sim) if sim >= dsspar.nsim + 1 - cores: purge_temp = True dss.mp_exec(dss_path=exe_path, par_path=oldpar, dbg=dbgfile, output=outfile_nt, simnum=sim, cores=cores, purge=purge_temp, totalsim=dsspar.nsim) # prepare detection intermediary_files = os.path.join(outfolder, basename + '_homogenised_' + str(i) + '.prn') dims = [dsspar.xx[0], dsspar.yy[0], dsspar.zz[0]] first_coord = [dsspar.xx[1], dsspar.yy[1], dsspar.zz[1]] cells_size = [dsspar.xx[2], dsspar.yy[2], dsspar.zz[2]] sim_maps = gr.GridFiles() sim_maps.load(outfile, dsspar.nsim, dims, first_coord, cells_size, no_data, headerin=0) # detect and fix inhomogeneities if print_status: print 'Detecting inhomogeneities...' homogenisation = hmg.detect(grids=sim_maps, obs_file=candidate, method=correct_method, prob=detect_prob, flag=detect_flag, save=detect_save, outfile=intermediary_files, header=True, skewness=correct_skew, rad=rad, percentile=correct_percentile, optional_stats=optional_stats) homogenised, detected_number, filled_number = homogenisation if print_status: print 'Inhomogeneities detected: {0}'.format(detected_number) dnumber_list.append(detected_number) fnumber_list.append(filled_number) # prepare next iteration stations_pset = hmg.update_station(stations_pset, homogenised) if not detect_save: [os.remove(fpath) for fpath in [reffile, parfile]] # , dsspar.transfile]] if purge_sims: sim_maps.purge() else: sim_maps.dump() # save results if print_status: print 'Process completed.' print 'Detections: ', ', '.join(map(str, dnumber_list)) print 'Missing data filled: ', ', '.join(map(str, fnumber_list)) print 'Saving results...' homogenised_file = os.path.join(outfolder, basename + '_homogenised_data.csv') hmg.save_output(pset_file=stations_pset, outfile=homogenised_file, fformat='gsimcli', header=True, save_stations=True) return homogenised_file, dnumber_list, fnumber_list