def calcularDiff(ds, **kwargs): print (ds) #Cargo el nombre de los files del sftp a un dict. with open(config['FILES']['BASH_OUT']) as bo: for line in bo: sftp_files[line] = True #Chequeo si el archivo existe, sino existe lo creo if not ntpath.exists(config['FILES']['FILES_LOADED']): open(config['FILES']['FILES_LOADED'], 'a').close() with open(config['FILES']['FILES_LOADED']) as fl: for line in fl: loaded[line] = True #Remuevo los que no se cargaron y guardo solo #los que ya se cargaron. #De esta forma tengo para usar paramiko o bash para bajarlos #Checkeo si existe sino lo creo if not ntpath.exists(config['FILES']['FILES_UNLOADED']): open(config['FILES']['FILES_UNLOADED'], 'a').close() with open(config['FILES']['FILES_UNLOADED'], 'w') as f_unloaded: for key in sftp_files.keys(): if loaded.has_key(key): sftp_files.pop(key) else: f_unloaded.write(key) f_unloaded.close() return "Cantidad de archivos no cargados: {}".format(len(sftp_files))
def extract_ui_coordinates(self, label='', reference=False): """ Extract position of all UI components Extracts the bounding rectangles of all UI components in the current Polaris screen. Their reference position is then calculated based on the top-left corner (0, 0) coordinate. """ ui_coordinates = list() reference_box = map( int, PolarisInterface.webdriver.get_boundingrect( PolarisInterface.window_ui)) #logger.info('MAIN BOX {0}'.format(reference_box)) for ui in PolarisInterface.webdriver.find_elements_by_xpath('.//*'): item_bounding_box = PolarisInterface.webdriver.get_boundingrect(ui) if item_bounding_box: item_bounding_box = map(int, item_bounding_box.split(',')) if reference_box[0] < item_bounding_box[0] <= reference_box[0] + reference_box[2] and \ reference_box[1] < item_bounding_box[1] <= reference_box[1] + reference_box[3]: relative_item_box = [ round( float(item_bounding_box[0] - reference_box[0]) / self.max_resolution[0], 4), round( float(item_bounding_box[1] - reference_box[1]) / self.max_resolution[1], 4), round( float(item_bounding_box[2]) / self.max_resolution[0], 4), round( float(item_bounding_box[3]) / self.max_resolution[1], 4) ] ui_coordinates.append(relative_item_box) #logger.info('RELATIVE: {0} | ACTUAL: {1}'.format(relative_item_box, item_bounding_box), also_console=True) if reference: assert label, AssertionError('Please enter a reference label') json_ref = dict() # Save the json entry to the reference coordinate path target_path = ntpath.dirname(self.reference_ui_path) if not ntpath.exists(target_path): os.makedirs(target_path) if ntpath.exists(self.reference_ui_path): with open(self.reference_ui_path, 'r') as f: data = f.read() json_ref = json.loads(data) json_ref[label] = ui_coordinates with open(self.reference_ui_path, 'w') as f: f.write(json.dumps(json_ref)) return ui_coordinates
def take_system_snapshot(self, snapshot, polaris_logs='true', configuration_data='true', ecu_data='true'): """ Takes a system snapshot Takes a system snapshot. Currently limited to taking all ECU data, if the option is selected """ logger.warn('take_system_snapshot DEPRECATED') if ntpath.exists('{0}.zip'.format(ntpath.abspath(snapshot))): os.remove('{0}.zip'.format(ntpath.abspath(snapshot))) else: _dir = ntpath.dirname(ntpath.abspath(snapshot)) if not ntpath.exists(_dir): os.makedirs(_dir) _plogs = polaris_logs.lower() _cdata = configuration_data.lower() _edata = ecu_data.lower() _complete = list() PolarisInterface.navi.go_to('take system snapshot') if _plogs == 'true' or _cdata == 'true' or _edata == 'true': PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup custom snapshot']['id'])) if _plogs == 'true': _complete.append(_plogs) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup polaris logs']['id'])) if _cdata == 'true': _complete.append(_cdata) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup config data']['id'])) if _edata == 'true': _complete.append(_edata) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup ecu data']['id'])) else: PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup default snapshot']['id'])) PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup start']['id'])) self.file_dialog(ntpath.abspath(snapshot), 'Save') sleep(60) assert len(_complete) <= len(PolarisInterface.webdriver.find_elements_by_name('Complete')), \ AssertionError('Unable to verify successful system snapshot') PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup close']['id']))
def configure(args): """ Populate the objects :type args: Namespace :param args: Parsed arguments from argparse :type config_file: str or None :param config_file: Configuration file to read from """ # Add custom configuration file path config_files = [] if args.config: for config in args.config: config = ospath.expanduser(config) if ospath.exists(config): config_files.append(config) LOGGER.info('Added "{}" to config file list'.format(config)) continue LOGGER.warning('Configuration file {} not found.'.format(config)) else: if sys.platform in ['win32', 'cygwin']: config_files.append('C:\\cumulus.conf') config_files.append(ospath.expanduser('~\\.cumulus.conf')) config_files.append('{}\\cumulus.conf'.format(os.curdir)) else: config_files.append('/etc/cumulus.conf') config_files.append(ospath.expanduser('~/.cumulus.conf')) config_files.append('{}/cumulus.conf'.format(os.curdir)) # Get the include option from the general section config_files = __get_include_files(config_files) + config_files # Read config file conf_file_found = False for conf_file in config_files: if ospath.exists(conf_file): conf_file_found = True LOGGER.info('Reading configuration from {}'.format(conf_file)) if not conf_file_found: raise ConfigurationException( 'No configuration file found. Looked for {}'.format( ', '.join(config_files))) config = SafeConfigParser() config.read(config_files) try: _populate_general(args, config) _populate_environments(args, config) _populate_stacks(args, config) _populate_bundles(args, config) except ConfigurationException: raise return CONF
def copy_to_target(self): """ Copies the files identified to the target identified :return: """ if not ntpath.exists(self.target_root): os.makedirs(self.target_root) for fname in self.files: if not ntpath.exists(pjoin(self.target_root, ntpath.dirname(fname.strip('\\')))): os.makedirs(pjoin(self.target_root, ntpath.dirname(fname.strip('\\')))) shutil.copy2(pjoin(self.source_root, fname.strip('\\')), pjoin(self.target_root, fname.strip('\\')))
def startJob(self,path): #Check that the file specified by path exists. absname = ntpath.abspath(path) try : ntpath.exists(absname) except : logging.error("The path specified does not exist") raise #Submit to condor & return if false retVal = Manager.callCondor(self, ["condor_submit", path]) if retVal == False: return False
def _remove_old_files(): """ Remove files from previous bundle """ cache_file = '/var/local/cumulus-bundle-handler.cache' if sys.platform in ['win32', 'cygwin']: if not ospath.exists('C:\\cumulus\\cache'): os.makedirs('C:\\cumulus\\cache') cache_file = 'C:\\cumulus\\cache\\cumulus-bundle-handler.cache' if not ospath.exists(cache_file): LOGGER.info('No previous bundle files to clean up') return LOGGER.info('Removing old files and directories') with open(cache_file, 'r') as file_handle: for line in file_handle.readlines(): line = line.replace('\n', '') if not ospath.exists(line): continue if ospath.isdir(line): try: os.removedirs(line) LOGGER.debug('Removing directory {}'.format(line)) except OSError: pass elif ospath.isfile(line): LOGGER.debug('Removing file {}'.format(line)) os.remove(line) try: os.removedirs(ospath.dirname(line)) except OSError: pass elif ospath.islink(line): LOGGER.debug('Removing link {}'.format(line)) os.remove(line) try: os.removedirs(ospath.dirname(line)) except OSError: pass else: LOGGER.warning('Unknown file type {}'.format(line)) # Remove the cache file when done os.remove(cache_file)
def analyzeData(self): # Analyze file data, hide setup window and show data window impulsePath = self.impulsePathBox.text() filterPath = self.filterPathBox.text() try: # For Impulse Response data if self.dataType == "IR" and ntpath.exists(impulsePath): self.paramL, self.paramR, self.nominalBands = ap.analyzeFile(impulsePath, None, self.b, self.truncate, self.smoothing, int(self.windowText.text())) # For Sweep data elif self.dataType == "sweep" and ntpath.exists(impulsePath) and ntpath.exists(filterPath): self.paramL, self.paramR, self.nominalBands = ap.analyzeFile(impulsePath, filterPath, self.b, self.truncate, self.smoothing, int(self.windowText.text())) # If file path doesn't exist, show error message else: error_dialog = QErrorMessage() error_dialog.showMessage('Invalid file path') except: error_dialog = QErrorMessage() error_dialog.showMessage('Unknown error. Please try again') # Replace zeros (errors) with "--" def replaceZeros(myl): myl = list(myl) for idx, value in enumerate(myl): if value == 0: myl[idx] = "--" return myl def replaceParam(param): param.EDT = replaceZeros(param.EDT) param.T20 = replaceZeros(param.T20) param.T30 = replaceZeros(param.T30) param.EDTTt = replaceZeros(param.EDTTt) return param self.paramL = replaceParam(self.paramL) if self.paramR is not None: self.paramR = replaceParam(self.paramR) # Generate data window self.dataWindow = DataWindow(self.paramL, self.paramR, self.nominalBands, self.b, self.filePath, self.smoothing) # Hide setup and show data self.hide() self.dataWindow.show()
def read_av_info(id): if type(id) != type(0): id = int(id) if 0<=id<=174999: this_dir = av_dir elif 175000<=id<=290998: this_dir = av_dir2 elif 290999<=id<=469999: this_dir = av_dir3 elif 470000<=id<=539999: this_dir = av_dir4 else: this_dir = av_dir5 FILE = this_dir+'\\'+str(id)+'.json' FILE_EXIST = ntpath.exists(FILE) if FILE_EXIST: f = open(FILE,'r') jsoncon = f.readline() f.close() else: return 404 di = json.loads(jsoncon) if di.has_key('code') and di['code'] == -403: return 404 elif di.has_key('code') and di['code'] == -503: raise NameError, str(id) con = GetVideoInfo(id) with open(FILE) as f: print >> f,con di = json.loads(jsoncon) return di
def get_file(self, extended_path, revision=HEAD, **kwargs): """Return content of file or list content of directory""" if not extended_path: raise FileNotFoundError(extended_path, revision) if revision == PRE_CREATION: return '' if self.viewtype == self.VIEW_SNAPSHOT: # Get the path to (presumably) file element (remove version) # The '@@' at the end of file_path is required. file_path = extended_path.rsplit('@@', 1)[0] + '@@' okind = self._get_object_kind(file_path) if okind == 'directory element': raise SCMError('Directory elements are unsupported.') elif okind == 'file element': output = self.client.cat_file(extended_path, revision) else: raise FileNotFoundError(extended_path, revision) else: if cpath.isdir(extended_path): output = self.client.list_dir(extended_path, revision) elif cpath.exists(extended_path): output = self.client.cat_file(extended_path, revision) else: raise FileNotFoundError(extended_path, revision) return output
def get_file(self, extended_path, revision=HEAD): """Return content of file or list content of directory""" if not extended_path: raise FileNotFoundError(extended_path, revision) if revision == PRE_CREATION: return '' if self.viewtype == self.VIEW_SNAPSHOT: # Get the path to (presumably) file element (remove version) # The '@@' at the end of file_path is required. file_path = extended_path.rsplit('@@', 1)[0] + '@@' okind = self._get_object_kind(file_path) if okind == 'directory element': raise SCMError('Directory elements are unsupported.') elif okind == 'file element': output = self.client.cat_file(extended_path, revision) else: raise FileNotFoundError(extended_path, revision) else: if cpath.isdir(extended_path): output = self.client.list_dir(extended_path, revision) elif cpath.exists(extended_path): output = self.client.cat_file(extended_path, revision) else: raise FileNotFoundError(extended_path, revision) return output
def renames(old, new): """renames(old, new) Super-rename; create directories as necessary and delete any left empty. Works like rename, except creation of any intermediate directories needed to make the new pathname good is attempted first. After the rename, directories corresponding to rightmost path segments of the old name will be pruned until either the whole path is consumed or a nonempty directory is found. Note: this function can fail with the new directory structure made if you lack permissions needed to unlink the leaf directory or file. """ head, tail = path.split(new) if head and tail and not path.exists(head): makedirs(head) rename(old, new) head, tail = path.split(old) if head and tail: try: removedirs(head) except OSError: pass
def _store_bundle_files(filenames, extraction_path): """ Store a list of bundle paths :type filenames: list :param filenames: List of full paths for all paths in the bundle' :type extraction_path: str :param extraction_path: Path to prefix all filenames with """ cache_file = '/var/local/cumulus-bundle-handler.cache' if sys.platform in ['win32', 'cygwin']: if not ospath.exists('C:\\cumulus\\cache'): os.makedirs('C:\\cumulus\\cache') cache_file = 'C:\\cumulus\\cache\\cumulus-bundle-handler.cache' file_handle = open(cache_file, 'a') try: for filename in filenames: if not filename: continue if sys.platform in ['win32', 'cygwin']: filename = '{}\\{}'.format(extraction_path, filename) else: filename = '{}/{}'.format(extraction_path, filename) file_handle.write('{}\n'.format(filename)) LOGGER.debug('Stored bundle information in {}'.format(cache_file)) finally: file_handle.close()
def makedirs(name, mode=0o777, exist_ok=False): """makedirs(name [, mode=0o777][, exist_ok=False]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. If the target directory already exists, raise an OSError if exist_ok is False. Otherwise no exception is raised. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, mode, exist_ok) except FileExistsError: # Defeats race condition when another thread created the path pass cdir = curdir if isinstance(tail, bytes): cdir = bytes(curdir, 'ASCII') if tail == cdir: # xxx/newdir/. exists if xxx/newdir exists return try: mkdir(name, mode) except OSError: # Cannot rely on checking for EEXIST, since the operating system # could give priority to other errors like EACCES or EROFS if not exist_ok or not path.isdir(name): raise
def renames(old, new): """renames(old, new) Super-rename; create directories as necessary and delete any left empty. Works like rename, except creation of any intermediate directories needed to make the new pathname good is attempted first. After the rename, directories corresponding to rightmost path segments of the old name will be pruned way until either the whole path is consumed or a nonempty directory is found. Note: this function can fail with the new directory structure made if you lack permissions needed to unlink the leaf directory or file. """ head, tail = path.split(new) if head and tail and not path.exists(head): makedirs(head) rename(old, new) head, tail = path.split(old) if head and tail: try: removedirs(head) except error: pass
def main(image_path, output_path, distance, display_size, display_resolution, display_h_angle, display_v_angle, observer_view_direction, overwrite): image = ndimage.imread(image_path) observer_pos = np.array([0, 0, 0]) display = make_perpendicular_display(display_size[0], display_size[1], distance, math.radians(display_h_angle), math.radians(display_v_angle), display_resolution[0], display_resolution[1]) observer_display_dir = display_center(display) - observer_pos observer_direct = ObserverSpecification(observer_pos, observer_display_dir, DEFAULT_FOVEAL_RESOLUTION) if (not overwrite) and ntpath.exists(output_path): print('Image already exists.') return if len(observer_view_direction) != 3: print('No valid viewing direction giving. Assuming observer looking ' 'directly at target.') out_image = filter_image(image, display, observer_direct) else: observer_forward_dir = np.array([observer_view_direction]) observer_forward = ObserverSpecification(observer_pos, observer_forward_dir, DEFAULT_FOVEAL_RESOLUTION) out_image = filter_image(image, display, observer_forward) misc.imsave(output_path, out_image[0]) print('Saved image', output_path)
def ensure_dir_exist(directory): if not ntpath.exists(directory): print("Directory '{}' does not exists".format(directory)) exit(-1) if not ntpath.isdir(directory): print("Directory name '{}' is not a directory".format(directory)) exit(-1)
def ensure_file_exist(filename): if not ntpath.exists(filename): print("Filename '{}' does not exists".format(filename)) exit(-1) if not ntpath.isfile(filename): print("Filename '{}' is not file".format(filename)) exit(-1)
def setFiles(self, files): for file in files: if not ntpath.exists(str(file)): sys.stderr.write( 'Error: "{0}" does not exist - aborting\n'.format( str(file))) sys.stderr.flush() return self.files = files
def _saveClassifiers(classifiers, path): parts = [ config.CLASSIFIERS, path, 'classifiers.pkl'] # Create folder if does not exist if not ntpath.exists(ntpath.join(*parts[:-1])): os.makedirs(ntpath.join(*parts[:-1])) # Save models joblib.dump(classifiers, ntpath.join(*parts))
def creteEmptyDir(folder): if ntpath.exists(folder): for file in os.listdir(folder): os.chmod(ntpath.join(folder, file), stat.S_IWRITE) os.remove(ntpath.join(folder, file)) os.chmod(folder, stat.S_IWRITE) shutil.rmtree(folder) logging.debug('delete: {0}'.format(folder)) os.makedirs(folder) logging.debug('create: {0}'.format(folder))
def get_file(self, extended_path, revision=HEAD, **kwargs): """Return content of file or list content of directory. Args: extended_path (unicode): The path of the element, including revision information. revision (reviewboard.scmtools.core.Revision, optional): Revision information. This will be either :py:data:`~reviewboard.scmtools.core.PRE_CREATION` (new file), or :py:data:`~reviewboard.scmtools.core.HEAD` (signifying to use the revision information included in ``extended_path``). **kwargs (dict, optional): Additional unused keyword arguments. Returns: bytes: The contents of the element. Raises: reviewboard.scmtools.errors.FileNotFoundError: The given ``extended_path`` did not match a valid element. reviewboard.scmtools.errors.SCMError: Another error occurred. """ if not extended_path: raise FileNotFoundError(extended_path, revision) if revision == PRE_CREATION: return '' if self.viewtype == self.VIEW_SNAPSHOT: # Get the path to (presumably) file element (remove version) # The '@@' at the end of file_path is required. file_path = extended_path.rsplit('@@', 1)[0] + '@@' okind = self._get_element_kind(file_path) if okind == 'directory element': raise SCMError('Directory elements are unsupported.') elif okind == 'file element': output = self.client.cat_file(extended_path) else: raise FileNotFoundError(extended_path) else: if cpath.isdir(extended_path): output = self.client.list_dir(extended_path) elif cpath.exists(extended_path): output = self.client.cat_file(extended_path) else: raise FileNotFoundError(extended_path) return output
def get_contigs_count(path): result = 0 if ntpath.exists(path): try: result = int( subprocess.check_output(f"grep -c '>' {path}", shell=True)) except subprocess.CalledProcessError as grepexc: pass return result
def main(argv): # Print the banner. print "SelectMyParent: Start a program with a selected parent process" print "by Mario Vilas (mvilas at gmail.com)" print "based on a Didier Stevens tool (https://DidierStevens.com)" print # Check the command line arguments. if len(argv) < 3: script = os.path.basename(argv[0]) print " %s <pid> <process.exe> [arguments]" % script return # Request debug privileges. system = System() system.request_debug_privileges() # Parse the parent process argument. try: dwParentProcessId = HexInput.integer(argv[1]) except ValueError: dwParentProcessId = None if dwParentProcessId is not None: dwMyProcessId = win32.GetProcessId(win32.GetCurrentProcess()) if dwParentProcessId != dwMyProcessId: system.scan_processes_fast() if not system.has_process(dwParentProcessId): print "Can't find process ID %d" % dwParentProcessId return else: system.scan_processes() process_list = system.find_processes_by_filename(argv[1]) if not process_list: print "Can't find process %r" % argv[1] return if len(process_list) > 1: print "Too many processes found:" for process, name in process_list: print "\t%d:\t%s" % (process.get_pid(), name) return dwParentProcessId = process_list[0][0].get_pid() # Parse the target process argument. filename = argv[2] if not ntpath.exists(filename): try: filename = win32.SearchPath(None, filename, '.exe')[0] except WindowsError, e: print "Error searching for %s: %s" % (filename, str(e)) return argv = list(argv) argv[2] = filename
def change_song_name(folder): # folder = 'C:/Users/Arvind/Desktop/Newfolder/' folder = folder + '/' print "name changing" + folder for x in glob.glob(folder + '*.mp3'): if ntpath.exists(x): # print os.remove(x) print x file_name = eyed3.load(x) if hasattr(file_name, 'tag'): if hasattr(file_name.tag, 'title'): os.rename(x, folder + file_name.tag.title + '.mp3')
def read_user_info(id): if type(id) != type(""): id = str(id) FILE = user_dir+'\\'+id+'.json' FILE_EXIST = ntpath.exists(FILE) if FILE_EXIST: f = open(FILE,'r') jsoncon = f.readline() f.close() else: jsoncon = GetuserInfo(int(id)) return json.loads(jsoncon)
def main(argv): # Print the banner. print "SelectMyParent: Start a program with a selected parent process" print "by Mario Vilas (mvilas at gmail.com)" print "based on a Didier Stevens tool (https://DidierStevens.com)" print # Check the command line arguments. if len(argv) < 3: script = os.path.basename(argv[0]) print " %s <pid> <process.exe> [arguments]" % script return # Request debug privileges. system = System() system.request_debug_privileges() # Parse the parent process argument. try: dwParentProcessId = HexInput.integer(argv[1]) except ValueError: dwParentProcessId = None if dwParentProcessId is not None: dwMyProcessId = win32.GetProcessId( win32.GetCurrentProcess() ) if dwParentProcessId != dwMyProcessId: system.scan_processes_fast() if not system.has_process(dwParentProcessId): print "Can't find process ID %d" % dwParentProcessId return else: system.scan_processes() process_list = system.find_processes_by_filename(argv[1]) if not process_list: print "Can't find process %r" % argv[1] return if len(process_list) > 1: print "Too many processes found:" for process, name in process_list: print "\t%d:\t%s" % (process.get_pid(), name) return dwParentProcessId = process_list[0][0].get_pid() # Parse the target process argument. filename = argv[2] if not ntpath.exists(filename): try: filename = win32.SearchPath(None, filename, '.exe')[0] except WindowsError, e: print "Error searching for %s: %s" % (filename, str(e)) return argv = list(argv) argv[2] = filename
def backup_system(self, path, timeout=15): """ Backup a system Backup a system to the specified path. Variable *path* - location to export the offline site zip .. code:: robotframework *** Test Cases *** Sample Backup system path=.//artifacts """ if 'localhost' in PolarisInterface.hostname or '127.0.0.1' in PolarisInterface.hostname: if ntpath.exists('{0}.zip'.format(ntpath.abspath(path))): os.remove('{0}.zip'.format(ntpath.abspath(path))) # Open System Backup UI PolarisInterface.navi.go_to('backup system') # Find backup file Name textbox backupname_input = PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['backup file']['id']) # Remove the filename from the Path and save it to Backupfilename backup_filename = ntpath.splitext(ntpath.basename(path))[0] # Append '*' to the Start and end of the path path = "*{0}*".format(ntpath.abspath(ntpath.dirname(path))) # Send the path backupname_input.send_keys(path) # Clear the field backupname_input.clear() # Send the File name backupname_input.send_keys(backup_filename) # Perform backup PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard next']['id'])) if timeout: PolarisInterface.webdriver.waitforclickable(ui_ref.mapping['wizard finish']['id'], int(timeout)) # Finish the process PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard finish']['id']))
def MoveFile(From, To, Inc): res = str() OriTo = To if Inc > 0: To = ntpath.splitext(To)[0] + " (" + str(Inc) + ")" + ntpath.splitext( To)[1] if ntpath.exists(To): res = MoveFile(From, OriTo, Inc + 1) else: os.rename(From, To) res = To
def assembleOutputPath(out_param, input_path): if out_param: # Make sure that output directory exists if output param exists abs_out_path = ntpath.abspath(out_param) output_dir, output_filename = ntpath.split(abs_out_path) if not ntpath.exists(output_dir): os.mkdir(output_dir) return abs_out_path else: # Default output is same directory and name as input .bin file output_dir, input_filename = ntpath.split(input_path) output_filename = input_filename[:-4] + '.hex' return ntpath.join(output_dir, output_filename)
def makedirs(name, mode=0777): """makedirs(path [, mode=0777]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): makedirs(head, mode) mkdir(name, mode)
def _diff(self, old_file, new_file, xpatches=None, unified=True): """Calculate the diff. Content should be a list of strings with no endl. Supports exclude patches (list of list of strings with no endl). If the content is None, it is assumed that the file is binary. The file names (new_file and old_file) are only to be used as a header for a diff. Returns None if the files are different and binary. Otherwise returns a difference as a list of strings with no lineseps. The binary files which are equal also return an empty string.""" old_content = None new_content = None # The content should have line endings removed from it! if cpath.isdir(new_file): # read directory content old_content = sorted(os.listdir(old_file)) + [''] new_content = sorted(os.listdir(new_file)) + [''] elif cpath.exists(new_file): # returns None for binary file old_content = read_text_file(old_file) new_content = read_text_file(new_file) else: logging.debug("File %s does not exist or access is denied." % new_file) return None # check if binary files and if they differ if old_content is None or new_content is None: old_crc = zlib.crc32(open(old_file).read()) new_crc = zlib.crc32(open(new_file).read()) if old_crc != new_crc: return None else: return u'' # check if we need to exclude anything from the diff if xpatches: for patch in reversed(xpatches): patched = self._patch(new_content, patch) if patched: new_content = patched return self._content_diff(old_content, new_content, old_file, new_file, unified=unified)
def get_file(self, extended_path, revision=HEAD): """Return content of file or list content of directory""" if not extended_path: raise FileNotFoundError(extended_path, revision) if revision == PRE_CREATION: return '' if cpath.isdir(extended_path): output = self.client.list_dir(extended_path, revision) elif cpath.exists(extended_path): output = self.client.cat_file(extended_path, revision) else: raise FileNotFoundError(extended_path, revision) return output
def _generate_local_md5hash(filename): """ Get the MD5 hash of a local file :type filename: str :param filename: Path to the file to read :returns: str -- MD5 of the file """ if not ospath.exists(filename): logger.warning('Unable to generate MD5 of local file {}. ' 'File does not exist.'.format(filename)) return None hash = hashlib.md5(open(filename, 'rb').read()).hexdigest() logger.debug('Generated md5 checksum for {} ({})'.format( ospath.basename(filename), hash)) return hash
def _do_diff(self, changeset): """Generate a unified diff for all files in the given changeset. Args: changeset (list): A list of changes. Returns: dict: A dictionary containing a ``diff`` key. """ # Sanitize all changesets of version 0 before processing changeset = self._sanitize_version_0_changeset(changeset) diff = [] for old_file, new_file in changeset: dl = [] # cpath.isdir does not work for snapshot views but this # information can be found using `cleartool describe`. if self.viewtype == 'snapshot': # ClearCase object path is file path + @@ object_path = new_file.split('@@')[0] + '@@' output = execute(['cleartool', 'describe', '-fmt', '%m', object_path]) object_kind = output.strip() isdir = object_kind == 'directory element' else: isdir = cpath.isdir(new_file) if isdir: dl = self._diff_directories(old_file, new_file) elif cpath.exists(new_file) or self.viewtype == 'snapshot': dl = self._diff_files(old_file, new_file) else: logging.error('File %s does not exist or access is denied.', new_file) continue if dl: diff.append(b''.join(dl)) return { 'diff': b''.join(diff), }
def do_diff(self, changeset): """Generates a unified diff for all files in the changeset.""" diff = [] for old_file, new_file in changeset: dl = [] if cpath.isdir(new_file): dl = self.diff_directories(old_file, new_file) elif cpath.exists(new_file): dl = self.diff_files(old_file, new_file) else: logging.error("File %s does not exist or access is denied." % new_file) continue if dl: diff.append(''.join(dl)) return (''.join(diff), None)
def import_floorplan(self, path, target=ui_ref.mapping['site explorer floor']['id']): """ Import floor plan Import a floor plan """ assert ntpath.exists(path), IOError('Unable to find input file {0}'.format(path)) _properties_tab = ui_ref.mapping['configure properties tab']['id'] _expander = ui_ref.mapping['configure floor expander']['id'] _import_floor = ui_ref.mapping['configure floor plan import']['id'] _site_tree = ui_ref.mapping['site tree']['id'] _tree = PolarisInterface.webdriver.find_element_by_accessibility_id(_site_tree) PolarisInterface.webdriver.click(_tree.find_element_by_name(target)) PolarisInterface.webdriver.click(PolarisInterface.webdriver.find_element_by_accessibility_id(_properties_tab)) PolarisInterface.webdriver.click(PolarisInterface.webdriver.find_element_by_accessibility_id(_expander)) _import_floor_element = PolarisInterface.webdriver.find_element_by_accessibility_id(_import_floor) if not _import_floor_element.is_displayed(): PolarisInterface.webdriver.click(PolarisInterface.webdriver.find_element_by_accessibility_id(_expander)) PolarisInterface.webdriver.click(_import_floor_element) PolarisInterface.webdriver.find_element_by_name('File name:').send_keys('{0}\n'.format(ntpath.abspath(path))) try: ui_list = PolarisInterface.webdriver.find_elements_by_id('1') for ui in ui_list: if ui.get_attribute('Name') in ('Save', 'Open'): PolarisInterface.webdriver.click(click()) break except errorhandler.NoSuchElementException: PolarisInterface.webdriver.send_keys(Keys.ENTER) PolarisInterface.webdriver.send_keys(Keys.ENTER) PolarisInterface.webdriver.double_click(_tree.find_element_by_name(target))
def restore_system(self, path): """ Restore a system Restore a system from a specified path. Variable *path* - location to export the offline site zip .. code:: robotframework *** Test Cases *** Sample Restore system path=.//artifacts """ if 'localhost' in PolarisInterface.hostname or '127.0.0.1' in PolarisInterface.hostname: if ntpath.exists('{0}.zip'.format(ntpath.abspath(path))): os.remove('{0}.zip'.format(ntpath.abspath(path))) logger.info('Go to Burger Menu Restore System') # Open System Backup UI PolarisInterface.navi.go_to('restore system') # Click restore file button PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['restore backup file']['id'])) # Manage the file dialog UI self.file_dialog(ntpath.abspath(path), 'Open') # Perform backup PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard next']['id'])) # Validate successful backup PolarisInterface.navi.verify_popup_message(color='green') # Finish the process PolarisInterface.webdriver.click( PolarisInterface.webdriver.find_element_by_accessibility_id(ui_ref.mapping['wizard ok']['id']))
def run_init_scripts(start=False, kill=False, other=False): """ Execute scripts in /etc/cumulus-init.d or C:\\cumulus\\init.d :type start: bool :param start: Run scripts starting with S :type kill: bool :param kill: Run scripts starting with K :type others: bool :param others: Run scripts not starting with S or K """ init_dir = '/etc/cumulus-init.d' if sys.platform in ['win32', 'cygwin']: init_dir = 'C:\\cumulus\\init.d' # Run the post install scripts provided by the bundle if not ospath.exists(init_dir): LOGGER.info('No init scripts found in {}'.format(init_dir)) return LOGGER.info('Running init scripts from {}'.format(init_dir)) filenames = [] for filename in sorted(os.listdir(init_dir)): if ospath.isfile(ospath.join(init_dir, filename)): filenames.append(ospath.join(init_dir, filename)) if start: for filename in filenames: if ospath.basename(filename)[0] == 'S': _run_command(ospath.abspath(filename)) if kill: for filename in filenames: if ospath.basename(filename)[0] == 'K': _run_command(ospath.abspath(filename)) if other: for filename in filenames: if ospath.basename(filename)[0] not in ['K', 'S']: _run_command(ospath.abspath(filename))
def makedirs(name, mode=0777): """makedirs(path [, mode=0777]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. This is recursive. """ from errno import EEXIST head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, mode) except OSError, e: # be happy if someone already created the path if e.errno != EEXIST: raise if tail == curdir: # xxx/newdir/. exists if xxx/newdir exists return
def _get_extraction_path(bundle_type): """ Returns the path to where the bundle should be extracted :type bundle_type: str :param bundle_type: Bundle type to download :returns: str -- Path """ path = '/' if sys.platform in ['win32', 'cygwin']: path = 'C:\\' if not ospath.exists(path): LOGGER.debug('Created extraction path {}'.format(path)) os.makedirs(path) bundle_paths = config.get('bundle-extraction-paths').split('\n') if bundle_paths: for line in bundle_paths: if not line: continue try: for_bundle_type, extraction_path = line.split('->') for_bundle_type = for_bundle_type.strip() extraction_path = extraction_path.strip() if for_bundle_type == bundle_type: path = extraction_path except ValueError: LOGGER.error( 'Error parsing bundle-extraction-paths: {}'.format(line)) sys.exit(1) LOGGER.debug('Determined bundle extraction path to {} for {}'.format( path, bundle_type)) return path
def open(filename, flag, mode=0777): """open(filename, flag [, mode=0777]) -> fd Open a file (for low level IO). """ reading = flag & O_RDONLY writing = flag & O_WRONLY updating = flag & O_RDWR creating = flag & O_CREAT truncating = flag & O_TRUNC exclusive = flag & O_EXCL sync = flag & O_SYNC appending = flag & O_APPEND if updating and writing: raise OSError(errno.EINVAL, strerror(errno.EINVAL), filename) if not creating and not path.exists(filename): raise OSError(errno.ENOENT, strerror(errno.ENOENT), filename) if not writing: if updating: writing = True else: reading = True if truncating and not writing: # Explicitly truncate, writing will truncate anyway FileIO(filename, 'w').close() if exclusive and creating: try: if not File(sys.getPath(filename)).createNewFile(): raise OSError(errno.EEXIST, strerror(errno.EEXIST), filename) except java.io.IOException, ioe: raise OSError(ioe)
def _do_diff(self, changeset): """Generates a unified diff for all files in the changeset.""" # Sanitize all changesets of version 0 before processing changeset = self._sanitize_version_0_changeset(changeset) diff = [] for old_file, new_file in changeset: dl = [] # cpath.isdir does not work for snapshot views but this # information can be found using `cleartool describe`. if self.viewtype == 'snapshot': # ClearCase object path is file path + @@ object_path = new_file.split('@@')[0] + '@@' output = execute(["cleartool", "describe", "-fmt", "%m", object_path]) object_kind = output.strip() isdir = object_kind == 'directory element' else: isdir = cpath.isdir(new_file) if isdir: dl = self._diff_directories(old_file, new_file) elif cpath.exists(new_file) or self.viewtype == 'snapshot': dl = self._diff_files(old_file, new_file) else: logging.error("File %s does not exist or access is denied." % new_file) continue if dl: diff.append(''.join(dl)) return { 'diff': ''.join(diff), }
def do_diff(self, params): # Diff returns "1" if differences were found. # Add the view name and view type to the description o = [] Feol = False while len(params) > 0: # Read both original and modified files. onam = params.pop(0) mnam = params.pop(0) file_data = [] do_rem = False # If the filename length is greater than 254 char for windows, # we copied the file to a temp file # because the open will not work for path greater than 254. # This is valid for the original and # modified files if the name size is > 254. for filenam in (onam, mnam): if cpath.exists(filenam) and self.viewtype == "dynamic": do_rem = False fn = filenam elif len(filenam) > 254 or self.viewtype == "snapshot": fn = self.get_filename_hash(filenam) fn = cpath.join(tempfile.gettempdir(), fn) do_rem = True if cpath.isdir(filenam): content = [ '%s\n' % s for s in sorted(os.listdir(filenam)) ] file_data.append(content) else: fd = open(cpath.normpath(fn)) fdata = fd.readlines() fd.close() file_data.append(fdata) # If the file was temp, it should be removed. if do_rem: os.remove(filenam) modi = file_data.pop() orig = file_data.pop() # For snapshot views, the local directories must be removed because # they will break the diff on the server. Just replacing # everything before the view name (including the view name) for # vobs do the work. if (self.viewtype == "snapshot" and (sys.platform.startswith('win') or sys.platform.startswith('cygwin'))): vinfo = self.viewinfo.rstrip("\r\n") mnam = "c:\\\\vobs" + mnam[mnam.rfind(vinfo) + len(vinfo):] onam = "c:\\\\vobs" + onam[onam.rfind(vinfo) + len(vinfo):] # Call the diff lib to generate a diff. # The dates are bogus, since they don't natter anyway. # The only thing is that two spaces are needed to the server # so it can identify the heades correctly. diff = difflib.unified_diff(orig, modi, onam, mnam, ' 2002-02-21 23:30:39.942229878 -0800', ' 2002-02-21 23:30:50.442260588 -0800', lineterm=' \n') # Transform the generator output into a string output # Use a comprehension instead of a generator, # so 2.3.x doesn't fail to interpret. diffstr = ''.join([str(l) for l in diff]) # Workaround for the difflib no new line at end of file # problem. if not diffstr.endswith('\n'): diffstr = diffstr + ("\n\\ No newline at end of file\n") o.append(diffstr) ostr = ''.join(o) return (ostr, None) # diff, parent_diff (not supported)
def get_extended_namespace(self, files): """ Parses the file path to get the extended namespace """ versions = self.get_previous_version(files) evfiles = [] hlist = [] for vkey in versions: # Verify if it is a checkedout file. if "CHECKEDOUT" in vkey: # For checkedout files just add it to the file list # since it cannot be accessed outside the view. splversions = vkey[:vkey.rfind("@@")] evfiles.append(splversions) else: # For checkedin files. ext_path = [] ver = [] fname = "" # fname holds the file name without the version. (bpath, fpath) = cpath.splitdrive(vkey) if bpath: # Windows. # The version (if specified like file.c@@/main/1) # should be kept as a single string # so split the path and concat the file name # and version in the last position of the list. ver = fpath.split("@@") splversions = fpath[:vkey.rfind("@@")].split("\\") fname = splversions.pop() splversions.append(fname + ver[1]) else: # Linux. if vkey.rfind("vobs") != -1: bpath = vkey[:vkey.rfind("vobs") + 4] fpath = vkey[vkey.rfind("vobs") + 5:] else: bpath = vkey[:0] fpath = vkey[1:] ver = fpath.split("@@") splversions = ver[0][:vkey.rfind("@@")].split("/") fname = splversions.pop() splversions.append(fname + ver[1]) filename = splversions.pop() bpath = cpath.normpath(bpath + "/") elem_path = bpath for key in splversions: # For each element (directory) in the path, # get its version from clearcase. elem_path = cpath.join(elem_path, key) # This is the version to be appended to the extended # path list. this_version = self.util.execute( ["cleartool", "desc", "-fmt", "%Vn", cpath.normpath(elem_path)]) if this_version: ext_path.append(key + "/@@" + this_version + "/") else: ext_path.append(key + "/") # This must be done in case we haven't specified # the version on the command line. ext_path.append(cpath.normpath(fname + "/@@" + vkey[vkey.rfind("@@") + 2:len(vkey)])) epstr = cpath.join(bpath, cpath.normpath(''.join(ext_path))) evfiles.append(epstr) """ In windows, there is a problem with long names(> 254). In this case, we hash the string and copy the unextended filename to a temp file whose name is the hash. This way we can get the file later on for diff. The same problem applies to snapshot views where the extended name isn't available. The previous file must be copied from the CC server to a local dir. """ if cpath.exists(epstr): pass else: if len(epstr) > 254 or self.viewtype == "snapshot": name = self.get_filename_hash(epstr) # Check if this hash is already in the list try: i = hlist.index(name) die("ERROR: duplicate value %s : %s" % (name, epstr)) except ValueError: hlist.append(name) normkey = cpath.normpath(vkey) td = tempfile.gettempdir() # Cygwin case must transform a linux-like path to # windows like path including drive letter if 'cygdrive' in td: where = td.index('cygdrive') + 9 drive_letter = td[where:where + 1] + ":" td = cpath.join(drive_letter, td[where + 1:]) tf = cpath.normpath(cpath.join(td, name)) if cpath.exists(tf): debug("WARNING: FILE EXISTS") os.unlink(tf) self.util.execute(["cleartool", "get", "-to", tf, \ normkey]) else: die("ERROR: FILE NOT FOUND : %s" % epstr) return evfiles
# Create a snaphot of running processes. system = winappdbg.System() system.request_debug_privileges() system.scan_processes() # Get all processes that match the requested filenames. for filename in sys.argv[1:]: print "Looking for: %s" % filename for process, pathname in system.find_processes_by_filename(filename): pid = process.get_pid() bits = process.get_bits() print "Dumping memory for process ID %d (%d bits)" % (pid, bits) # Parse the database filename. dbfile = '%d.db' % pid if ntpath.exists(dbfile): counter = 1 while 1: dbfile = '%d_%.3d.db' % (pid, counter) if not ntpath.exists(dbfile): break counter += 1 del counter print "Creating database %s" % dbfile # Connect to the database and get a cursor. database = sqlite.connect(dbfile) cursor = database.cursor() # Create the table for the memory map. cursor.execute("""
def removeWhenExist(file): if ntpath.exists(file): os.remove(file)
try: process = Process(dwProcessId) process.open_handle() process.close_handle() except WindowsError, e: parser.error("can't open process %d: %s" % (dwProcessId, e)) attach_targets.append( process.get_pid() ) options.attach = attach_targets # Get the list of console programs to execute console_targets = list() for vector in options.console: if not vector: parser.error("bad use of --console") filename = vector[0] if not ntpath.exists(filename): try: filename = win32.SearchPath(None, filename, '.exe')[0] except WindowsError, e: parser.error("error searching for %s: %s" % (filename, str(e))) vector[0] = filename console_targets.append(vector) options.console = console_targets # Get the list of windowed programs to execute windowed_targets = list() for vector in options.windowed: if not vector: parser.error("bad use of --windowed") filename = vector[0] if not ntpath.exists(filename):
def _exists(path): # Init all path parts parts = [ config.CLASSIFIERS, path, 'classifiers.pkl'] return ntpath.exists(ntpath.join(*parts))
def load_dbghelp(cls, pathname = None): """ Load the specified version of the C{dbghelp.dll} library. This library is shipped with the Debugging Tools for Windows, and it's required to load debug symbols. If you don't specify the pathname, this method will try to find the location of the dbghelp.dll library despite Microsoft's efforts to keep us from using it, since they keep moving it around... This method can be useful for bundling dbghelp.dll in your scripts, so users won't need to have the Microsoft SDK installed. Example:: from winappdbg import Debug def simple_debugger( argv ): # Instance a Debug object, passing it the event handler callback debug = Debug( my_event_handler ) try: # Load a specific dbghelp.dll file debug.system.load_dbghelp("C:\Custom install path\dbghelp.dll") # Start a new process for debugging debug.execv( argv ) # Wait for the debugee to finish debug.loop() # Stop the debugger finally: debug.stop() @see: U{http://msdn.microsoft.com/en-us/library/ms679294(VS.85).aspx} @type pathname: str @param pathname: (Optional) Full pathname to the C{dbghelp.dll} library. If not provided this method will try to autodetect it. @rtype: ctypes.WinDLL @return: Loaded instance of C{dbghelp.dll}. @raise NotImplementedError: This feature was not implemented for the current architecture. @raise WindowsError: An error occured while processing this request. """ # If a pathname was given, just load the library and return. # Raise an exception on error. if pathname: dbghelp = ctypes.windll.LoadLibrary(pathname) # If no pathname was provided, we try to autodetect the install path for the SDK. else: # This is where we'll keep all the candidate libraries. # There may be more than one, so we'll sort out later which one to load. candidates = [] # The Microsoft SDK always seems to be installed in the "Program Files (x86)" folder on # Intel 64 bit machines, and "Program Files" on every other platform. sysdrive = getenv("SystemDrive", "C:") if win32.arch == win32.ARCH_AMD64: basedir = "%s\\Program Files (x86)" % sysdrive basedir = getenv("ProgramFiles(x86)", basedir) else: basedir = "%s\\Program Files" % sysdrive basedir = getenv("ProgramFiles", basedir) # Let's try the oldest known location for dbghelp.dll. # Oh, those were the days, when this was the same across all versions. candidates.append( ntpath.join(basedir, "Debugging Tools for Windows (x86)", "dbghelp.dll")) # Then the debugger got embedded into the SDK. This path is different for each version. # The format is different too. And they bundled 32 and 64 bits together. # Then on later versions there's also binaries for other, incompatible architectures too??? # I gave up on trying to make sense of it, let's just try all combinations to be safe. # (We only support x86 and x64 though. In the future we may have to update this.) # This StackOverflow answer helped me a lot: https://stackoverflow.com/a/24478856 if win32.bits == 32: candidates.extend(glob.glob( ntpath.join(basedir, "Windows Kits", "*", "Debuggers", "x86", "dbghelp.dll"))) else: candidates.extend(glob.glob( ntpath.join(basedir, "Windows Kits", "*", "Debuggers", "x64", "dbghelp.dll"))) if win32.bits == 32: candidates.extend(glob.glob( ntpath.join( basedir, "Microsoft SDKs", "Windows", "*", "Debuggers", "x86", "dbghelp.dll"))) else: candidates.extend(glob.glob( ntpath.join( basedir, "Microsoft SDKs", "Windows", "*", "Debuggers", "x64", "dbghelp.dll"))) if win32.bits == 32: candidates.extend(glob.glob( ntpath.join( basedir, "Microsoft", "Microsoft SDKs", "Windows", "*", "Debuggers", "x86", "dbghelp.dll"))) else: candidates.extend(glob.glob( ntpath.join( basedir, "Microsoft", "Microsoft SDKs", "Windows", "*", "Debuggers", "x64", "dbghelp.dll"))) # All of the above only works for the scenario where the SDK was installed globally. # But after who knows what version they also allow installing the SDK on a user's home. # So we need to check the Windows Registry for that. # ...unfortunately the registry keys are just as chaotic and inconsistent as the default paths. :( # TODO: I feel too tired and angry to implement this right now. Will do it later. Pinky promise. # Now that we have a list of potential locations for dbghelp.dll, let's check them out. # The idea here is 1) test if the file exists, 2) read the metadata, 3) pick the best one. # Sort the list and remove duplicates (there shouldn't be any, but why not, it's fast anyway). candidates = sorted(set(candidates)) # Discard any pathnames where the file cannot be found. candidates = [ x for x in candidates if ntpath.exists(x) ] # Get the metadata for each file found. Sort them by version, newer first. by_version = [] for pathname in candidates: pBlock = win32.GetFileVersionInfoA(pathname) pBuffer, dwLen = win32.VerQueryValueA(pBlock, "\\") if dwLen != ctypes.sizeof(win32.VS_FIXEDFILEINFO): #raise ctypes.WinError(win32.ERROR_BAD_LENGTH) continue pVersionInfo = ctypes.cast(pBuffer, ctypes.POINTER(win32.VS_FIXEDFILEINFO)) VersionInfo = pVersionInfo.contents if VersionInfo.dwSignature != 0xFEEF04BD: #raise ctypes.WinError(win32.ERROR_BAD_ARGUMENTS) continue FileVersion = (VersionInfo.dwFileVersionMS, VersionInfo.dwFileVersionLS) ProductVersion = (VersionInfo.dwProductVersionMS, VersionInfo.dwProductVersionLS) if FileVersion > ProductVersion: by_version.append( (FileVersion, pathname) ) else: by_version.append( (ProductVersion, pathname) ) by_version.sort() by_version = by_version[::-1] # Try loading them all, starting with the newer versions. # Stop once we got one to load successfully. dbghelp = None for _, pathname in by_version: try: dbghelp = ctypes.windll.LoadLibrary(pathname) break except Exception: continue # If we couldn't load the SDK library, try the system default one. # It's an outdated version generally, but still better than nothing. # Issue a warning to let the user know they should install the SDK. if dbghelp is None: pathname = ntpath.join(getenv("WINDIR", "C:\WINDOWS"), "System32", "dbghelp.dll") try: dbghelp = ctypes.windll.LoadLibrary(pathname) except Exception: dbghelp = None # If no library could be loaded, fail with an exception. if dbghelp is None: msg = "Could not find a compatible dbghelp.dll in the system. Tried the following: %r" msg = msg % (candidates + [pathname],) raise NotImplementedError(msg) # If we loaded the system default, issue a warning. warnings.warn("Microsoft SDK not found, using the system default dbghelp.dll.") # Set it globally as the library to be used. ctypes.windll.dbghelp = dbghelp # Return the library. return dbghelp
'environment': None, 'version': None } OPTIONAL_OPTIONS = { 'bundle-extraction-paths': None, 'log-level': None } if sys.platform in ['win32', 'cygwin']: CONFIG_PATH = 'C:\\cumulus\\conf\\metadata.conf' else: CONFIG_PATH = '/etc/cumulus/metadata.conf' if not ospath.exists(CONFIG_PATH): print('Error: Configuration file not found: {}'.format(CONFIG_PATH)) sys.exit(1) CONF_FILE = SafeConfigParser() CONF_FILE.read(CONFIG_PATH) # Parse required options section = 'metadata' for option in REQUIRED_OPTIONS: try: CONFIG[option] = CONF_FILE.get(section, option) except NoOptionError: LOGGER.error('Missing required option {} in {}'.format( option, CONFIG_PATH)) sys.exit(1)
import sys import ntpath splitLen = 45000 # 45000 lines per file outputBase = 'train_output' # output.1.txt, output.2.txt, etc. print(ntpath.exists('E:/Kaggle/Display Advertising Challenge/train/')) input = open('E:/Kaggle/Display Advertising Challenge/train/train.csv', 'r') count = 0 at = 0 dest = None for line in input: if count % splitLen == 0: if dest: dest.close() dest = open('E:/Kaggle/Display Advertising Challenge/train/'+outputBase + str(at) + '.txt', 'w') at += 1 dest.write(line) count += 1
'': { 'handlers': ['console', 'file'], 'level': 'WARNING', 'propagate': True }, 'cumulus_bundle_handler': { 'handlers': ['console', 'file'], 'level': 'DEBUG', 'propagate': False } } } # Change the log file path on Windows systems if sys.platform in ['win32', 'cygwin']: if not ospath.exists('C:\\cumulus\\logs'): os.makedirs('C:\\cumulus\\logs') LOG_CONF['handlers']['file']['filename'] = \ 'C:\\cumulus\\logs\\cumulus-bundle-handler.log' logging.config.dictConfig(LOG_CONF) from cumulus_bundle_handler import config from cumulus_bundle_handler import bundle_manager from cumulus_bundle_handler import script_executor from cumulus_bundle_handler.command_line_options import ARGS as args # Read log level from the metadata.conf if config.get('log-level'): LOG_CONF['handlers']['console']['level'] = config.get('log-level').upper() LOG_CONF['handlers']['file']['level'] = config.get('log-level').upper()
__author__ = 'gerson64' import sys import ntpath splitLen = 704098 # 704098 lines per file fileName='E:/medicare/' outputBase = 'medicare_records' # output.1.txt, output.2.txt, etc. print(ntpath.exists('E:/medicare/')) input = open('E:/medicare/Medicare-Physician-and-Other-Supplier-PUF-CY2012.txt', 'r') count = 0 at = 0 dest = None for line in input: if count % splitLen == 0: if dest: dest.close() dest = open('E:/medicare/splitfile/'+outputBase + str(at) + '.txt', 'w') at += 1 dest.write(line) count += 1