def __init__(self, pathname): assert (ntpath.isdir(pathname) or ntpath.isfile(pathname)), "no file or directory %s exists" % pathname self.pathname = pathname self.audiofiles = [] self.artwork = None self.clr = False if ntpath.isfile(pathname): extension = ntpath.basename(pathname).rsplit('.', 1)[-1] assert extension == "mp3", "file must be mp3 format" self.audiofiles.append(pathname) if ntpath.isdir(pathname): dir_files = utl.get_multiple(pathname, "mp3") assert dir_files, "No mp3 files in given directory" self.audiofiles.extend(dir_files)
def set_artwork(self, img_path): assert ntpath.isfile( img_path), 'pathname "%s" does not belong to a file' % img_path assert img_path.rsplit(".", 1)[-1] == ("jpg" or "png"), "image must be jpg or png" self.artwork = img_path
def main(): ''' Main function to output manifests to stdout. ''' parser = parse() args = parser.parse_args() if not args.recur: #Windows does not do wildcard expansion at the shell level if sys.platform == 'win32': files = [ y for x in args.files for y in glob.glob(x) if ntpath.isfile(y) ] else: files = [x for x in args.files if os.path.isfile(x)] else: files = recurse_files(args.files) output = [] for num, fil in enumerate(files): testme = fcheck.Checker(fil) if args.out == 'csv' and num == 0: output.append(testme.manifest(headers=True, **vars(args))) else: output.append(testme.manifest(**vars(args))) if not args.out == 'json': print('\n'.join(output)) else: outjson = ('{"files" :' + '[' + ','.join(output) + ']' + '}') outjson = json.dumps(json.loads(outjson)) #validate print(outjson)
def read_datafile(self, filename): """ Read in discontinuous phase shift file Parameters ---------- filename : str The path to the discontinuous phase shift file """ if not ntpath.isfile(filename): return try: with open(filename) as f: data = [] data = [ data.append( line.replace('-', ' -').replace('\n', '').split()) for line in f ] data = "".join(line.replace('-', ' -').rstrip() for line in f) filename = "C:\\Users\\Liam\\Desktop\\leedph.d" data = "".join(line.rstrip() for line in f) self.data = loadtxt("C:\\Users\\Liam\\Desktop\\leedph.d", dtype=float, comments="#") except IOError: assert IOError
def _get_rule_files(rules_folder, rule): if not isinstance(rules_folder, str) and not isinstance(rule, str): raise TypeError( "Error: You must set a path containing rule files" " in rules_folder or the path to a rule file in rule.") rule_files = {} if rule is not None: try: yara.compile(filepath=rule) except yara.Error as er: utils.display_warning("skipping rule {}.\n{}".format(rule, er)) return None rule_files[ntpath.basename(rule)] = rule else: if ntpath.isdir(rules_folder): for file in os.listdir(rules_folder): rule = ntpath.join(rules_folder, file) if ntpath.isfile(rule) and file not in rule_files: try: yara.compile(filepath=rule) rule_files[file] = rule except yara.Error as er: utils.display_warning( "skipping rule {}.\n{}".format(rule, er)) return rule_files
def ensure_file_exist(filename): if not ntpath.exists(filename): print("Filename '{}' does not exists".format(filename)) exit(-1) if not ntpath.isfile(filename): print("Filename '{}' is not file".format(filename)) exit(-1)
def restaurationPartie(maFlotte, saFlotte): #OP if ntpath.isfile('sauvegarde.bin') == True: fich = open('sauvegarde.bin', 'r') texte = fich.readlines() maFlotte = texte[0] saFlotte = texte[1] return maFlotte, saFlotte else: return False
def add_subs(self, sub_path): assert ntpath.isfile( sub_path), 'pathname "%s" does not belong to a file' % sub_path assert sub_path.rsplit(".", 1)[-1] == "srt", "subtitle must be srt" lang = ntpath.basename(sub_path).rsplit(".", 2) if len(lang) == 3: lang = lang[1] else: lang = "" self.__subs[lang] = sub_path
def set_input_files(self, input_files=[]): """set list of input filenames""" if input_files: input_files = [ self.__fix_path(filename) for filename in input_files ] temp_input_files = [ filename for filename in input_files if ntpath.isfile(file) ] if temp_input_files != None and temp_input_files != []: self.input_files = temp_input_files
def getView(self, fileName, data={}): fullPathToView = self.basePath + str(fileName) + ".py" # print(fullPathToView) # print(fullPathToView + " this is the path") if isfile(fullPathToView): fileHandle = open(fullPathToView) # print(fileHandle) for line in fileHandle: print(Template(line).substitute(data))
def __init__(self, pathname): assert ntpath.isfile( pathname), "given pathname doest not belong to a file" self.pathname = pathname self.filename, self.extension = ntpath.basename(pathname).rsplit( '.', 1) self.directory = ntpath.dirname(pathname) self.metadata = {} # Key=tag, Value=value for the tag self.toc = [] # [heirarchy, "title", page] self.fileToMerge = {} self.addPagePdf = {} self.addPageIndex = {} self.watermark = {}
def __init__(self, pathname): assert (ntpath.isdir(pathname) or ntpath.isfile(pathname) ), "no file or directory %s exists" % pathname self.pathname = pathname self.videofiles = {} if ntpath.isfile(pathname): filename, extension = ntpath.basename(pathname).rsplit('.', 1) assert extension == "mp4", "file must be mp4 format" self.videofiles[pathname] = filename self.directory = ntpath.dirname(pathname) if ntpath.isdir(pathname): dir_files = utl.get_multiple(pathname, "mp4") assert dir_files, "no mp4 files in given directory" for file in dir_files: filename, extension = ntpath.basename(file).rsplit('.', 1) self.videofiles[file] = filename self.directory = pathname self.artwork = None self.clr = False
def update_paths(self): """Update the parameters related to file paths. Prepends '..\..\'. Necessary to call multiprocessing DSS launcher. """ params = ['datapath', 'corrpath', 'secpath'] for param in params: val = getattr(self.par, param) if not ntpath.isfile(val) and val != 'no file': # FIXME: not a pretty solution... code smell setattr(self.par, param, ntpath.join('..\\..\\', ntpath.basename(val)))
def create_job(self, path, aggregator_id, downlink, downdate, md5=None, sha1=None, fuzzy=None, **kwargs): if not ntpath.isfile(path): raise Exception("File %s does not exist or it is not a file." % path) # Calculate more info about the given installer. size = ntpath.getsize(path) if md5 is None or sha1 is None or fuzzy is None: # Calculate the hash function and ssdeep values m = hashlib.md5() s = hashlib.sha1() with open(path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): m.update(chunk) s.update(chunk) md5 = m.hexdigest().lower() sha1 = s.hexdigest().lower() fuzzy = Fuzzy._fuzzy_hash_from_file(path).lower() fname = ntpath.basename(path) # Now add the data into the DB session = sessionmaker() try: new_job = Job(fname=fname, aggregator_id=aggregator_id, downlink=downlink, downdate=downdate, path=path, md5=md5.lower(), sha1=sha1.lower(), fuzzy=fuzzy.lower()) session.add(new_job) session.commit() except: session.rollback() raise finally: session.close()
def loadData(self, path): if not ntpath.isfile(path): print 'Can not open ' + path self.acq = None self.timer.stop() return self.dataPath, self.dataFile = ntpath.split(ntpath.abspath(path)) try: reader = btk.btkAcquisitionFileReader() reader.SetFilename(path) reader.Update() self.acq = reader.GetOutput() except RuntimeError: print 'File format is not valid ' + path self.acq = None self.timer.stop() return if self.acq: print 'C3D file loaded ' + path self.frequency = self.acq.GetPointFrequency() self.totalFrame = self.acq.GetPointFrameNumber() self.totalPoint = self.acq.GetPointNumber() print 'Sample Frequency :', self.acq.GetPointFrequency() print 'Total Frame :', self.acq.GetPointFrameNumber() print 'Marker Number :', self.acq.GetPointNumber() self.maxDataValue = 0 for i in range(self.totalPoint): point = self.acq.GetPoint(i) for j in range(self.totalFrame): pos = point.GetValues()[j,:] if pos[0] > self.maxDataValue: self.maxDataValue = pos[0] if pos[1] > self.maxDataValue: self.maxDataValue = pos[1] if pos[2] > self.maxDataValue: self.maxDataValue = pos[2] self.paused = False self.currentFrame = 0 self.timer.setInterval(int(1000 / self.frequency)) self.timer.start() self.dataLoaded.emit()
def _remove_old_files(): """ Remove files from previous bundle """ cache_file = '/var/local/cumulus-bundle-handler.cache' if sys.platform in ['win32', 'cygwin']: if not ospath.exists('C:\\cumulus\\cache'): os.makedirs('C:\\cumulus\\cache') cache_file = 'C:\\cumulus\\cache\\cumulus-bundle-handler.cache' if not ospath.exists(cache_file): LOGGER.info('No previous bundle files to clean up') return LOGGER.info('Removing old files and directories') with open(cache_file, 'r') as file_handle: for line in file_handle.readlines(): line = line.replace('\n', '') if not ospath.exists(line): continue if ospath.isdir(line): try: os.removedirs(line) LOGGER.debug('Removing directory {}'.format(line)) except OSError: pass elif ospath.isfile(line): LOGGER.debug('Removing file {}'.format(line)) os.remove(line) try: os.removedirs(ospath.dirname(line)) except OSError: pass elif ospath.islink(line): LOGGER.debug('Removing link {}'.format(line)) os.remove(line) try: os.removedirs(ospath.dirname(line)) except OSError: pass else: LOGGER.warning('Unknown file type {}'.format(line)) # Remove the cache file when done os.remove(cache_file)
def _compute_executable(self): if self._runtime_info.platform.os == WINDOWS: # Hack to take into account virtualenvs paths = ( self._runtime_info.executable, ntpath.join( self.scriptsdir, ntpath.basename(self._runtime_info.executable) ) ) for path in paths: if ntpath.isfile(path): return path return self._runtime_info.executable else: return self._runtime_info.executable
def verifyFilePath(file, extension=None): abs_file = ntpath.abspath(file) # Check if input file exists if ntpath.isfile(abs_file): # Check if input file ends with extension if passed in if extension: if not abs_file.endswith(extension): raise Exception('Error: File must be a ' + extension + ' file! (' + abs_file + ')') else: return abs_file else: return abs_file else: raise Exception('Error: File is not valid! (' + abs_file + ')')
def execute(self, context): sent_files = list() if self.file_list_xcom_location is not None: file_list = context['task_instance'].xcom_pull(self.file_list_xcom_location) else: file_list = list() for file in file_list: self.log.info("Working on file: %s", str(file)) if not ntpath.isfile(file): raise FileNotFoundError basename = ntpath.basename(file) s3_key = self.get_s3_key(self.s3_prefix, basename) self.log.info("Sending %s to S3 bucket %s, key %s", file, self.s3_bucket, s3_key) self.s3_hook.load_file(file, s3_key, self.s3_bucket, replace=True) sent_files.append(s3_key) context['task_instance'].xcom_push(key='s3_files', value=sent_files)
def add_email_attachments(msg, attachments): for attachment in attachments or []: assert ntpath.isfile( attachment), "Email attachments must be valid file paths" filename = ntpath.basename(attachment) ctype, encoding = mimetypes.guess_type(attachment) if ctype is None or encoding is not None: # No guess could be made, or the file is encoded (compressed), so # use a generic bag-of-bits type. ctype = "application/octet-stream" maintype, subtype = ctype.split("/", 1) with open(attachment, "rb") as fp: msg.add_attachment(fp.read(), maintype=maintype, subtype=subtype, filename=filename)
def run_init_scripts(start=False, kill=False, other=False): """ Execute scripts in /etc/cumulus-init.d or C:\\cumulus\\init.d :type start: bool :param start: Run scripts starting with S :type kill: bool :param kill: Run scripts starting with K :type others: bool :param others: Run scripts not starting with S or K """ init_dir = '/etc/cumulus-init.d' if sys.platform in ['win32', 'cygwin']: init_dir = 'C:\\cumulus\\init.d' # Run the post install scripts provided by the bundle if not ospath.exists(init_dir): LOGGER.info('No init scripts found in {}'.format(init_dir)) return LOGGER.info('Running init scripts from {}'.format(init_dir)) filenames = [] for filename in sorted(os.listdir(init_dir)): if ospath.isfile(ospath.join(init_dir, filename)): filenames.append(ospath.join(init_dir, filename)) if start: for filename in filenames: if ospath.basename(filename)[0] == 'S': _run_command(ospath.abspath(filename)) if kill: for filename in filenames: if ospath.basename(filename)[0] == 'K': _run_command(ospath.abspath(filename)) if other: for filename in filenames: if ospath.basename(filename)[0] not in ['K', 'S']: _run_command(ospath.abspath(filename))
## Mandatory inputDataPathFound = False #print opts for o, a in opts: if o in ("-v", "--verbose"): myApp.verbose = True elif o in ("-h", "--help"): myApp.usage() sys.exit(0) elif o in ("-i", "--input"): myApp.inputDataPath = a inputDataPathFound = True else: assert False, "unhandled option" ## Mandatory Check if not inputDataPathFound: ## Is an input given? print "-i [file_path] or --input [file_path] was not given" myApp.usage() sys.exit(2) if not ntpath.isfile(myApp.inputDataPath): ## Is input a file? print "The path given as an input is not a file" myApp.usage() sys.exit(2) myApp.makeFolder()
inputDataPathFound = False for o, a in opts: if o in ("-v", "--verbose"): verbose = True elif o in ("-h", "--help"): usage() sys.exit(0) elif o in ("-i", "--input"): inputDataPath = a inputDataPathFound = True else: assert False, "Unhandled Option !" ## Mandatory Check if not inputDataPathFound: ## Is an input given? print "Error : -i [file_path] or --input [file_path] was not given" usage() sys.exit(2) if not ntpath.isfile(inputDataPath): ## Is input a file? print "The path given as an input is not a file" usage() sys.exit(2) makeFolder() getData(True) # getData(False) train(False) # train()
""" print(FILESEC) parser = argparse.ArgumentParser() if __name__ == "__main__": parser.add_argument("path", type=str) parser.add_argument("-e", "--encrypt", action="store_true") parser.add_argument("-d", "--decrypt", action="store_true") parser.add_argument("-r", "--device", type=str) parser.add_argument("-f", "--forgot", action="store_true") parser.add_argument("-o", "--open", action="store_true") args = parser.parse_args() print(args.device) t = threading.Thread(target=animate) if (ntpath.isfile(args.path)): # encrypt if (args.encrypt): print( "use '-reverseTime-' if you want password to be in mm:hh format (24 hr fomat)" ) password = getpass.getpass("Password : "******"confirm Password : "******"File Size {fileSize} (bytes) estimated encrypted file size {estimatedFileSize} (bytes)" ) t2 = threading.Thread(
def load_dbghelp(cls, pathname=None): """ Load the specified version of the C{dbghelp.dll} library. This library is shipped with the Debugging Tools for Windows, and it's required to load debug symbols. Normally you don't need to call this method, as WinAppDbg already tries to load the latest version automatically - but it may come in handy if the Debugging Tools are installed in a non standard folder. Example:: from winappdbg import Debug def simple_debugger( argv ): # Instance a Debug object, passing it the event handler callback debug = Debug( my_event_handler ) try: # Load a specific dbghelp.dll file debug.system.load_dbghelp("C:\Some folder\dbghelp.dll") # Start a new process for debugging debug.execv( argv ) # Wait for the debugee to finish debug.loop() # Stop the debugger finally: debug.stop() @see: U{http://msdn.microsoft.com/en-us/library/ms679294(VS.85).aspx} @type pathname: str @param pathname: (Optional) Full pathname to the C{dbghelp.dll} library. If not provided this method will try to autodetect it. @rtype: ctypes.WinDLL @return: Loaded instance of C{dbghelp.dll}. @raise NotImplementedError: This feature was not implemented for the current architecture. @raise WindowsError: An error occured while processing this request. """ # If an explicit pathname was not given, search for the library. if not pathname: # Under WOW64 we'll treat AMD64 as I386. arch = win32.arch if arch == win32.ARCH_AMD64 and win32.bits == 32: arch = win32.ARCH_I386 # Check if the architecture is supported. if not arch in cls.__dbghelp_locations: msg = "Architecture %s is not currently supported." raise NotImplementedError(msg % arch) # Grab all versions of the library we can find. # Since some of the possible paths are dependent on the exact # version of the Windows SDK, we use wildcards instead. possible = [] for pathname in cls.__dbghelp_locations[arch]: if "*" in pathname: possible.extend(glob.glob(pathname)) else: possible.append(pathname) found = [] for pathname in possible: if ntpath.isfile(pathname): try: f_ver, p_ver = cls.get_file_version_info(pathname)[:2] except WindowsError: msg = "Failed to parse file version metadata for: %s" warnings.warn(msg % pathname) if not f_ver: f_ver = p_ver elif p_ver and p_ver > f_ver: f_ver = p_ver found.append((f_ver, pathname)) # If we found any, use the newest version. if found: found.sort() pathname = found.pop()[1] # If we didn't find any, trust the default DLL search algorithm. else: pathname = "dbghelp.dll" # Load the library. dbghelp = ctypes.windll.LoadLibrary(pathname) # Set it globally as the library to be used. ctypes.windll.dbghelp = dbghelp # Return the library. return dbghelp
# hour = 15 # Innovation to Reitz start_lat = 29.649981 start_lon = -82.332703 stop_lat = 29.646386 stop_lon = -82.34779 hour = 15 starts, stops = all_hubs.determine_routes(start_lat, start_lon, stop_lat, stop_lon, hour) print('Start hubs: ') for start in starts: print('Building: {0}\tAward: {1}\tDistance: {2} miles'.format( start['hub'].name, round(start['award'], 2), round(start['distance'], 4))) print('') print('Destination hubs: ') for stop in stops: print('Building: {0}\tAward: {1}\tDistance: {2} miles'.format( stop['hub'].name, round(stop['award'], 2), round(stop['distance'], 4))) if __name__ == "__main__": style_file = 'style/report_simple.mplstyle' if (ntpath.isfile(style_file)): plt.style.use(style_file) calculate_routes()
def update_event(self, inp=-1): self.set_output_val(0, ntpath.isfile(self.input(0)))
def set_artwork(self, img_path): assert ntpath.isfile(img_path), 'pathname "%s" does not belong to a file' % img_path extension = img_path.rsplit(".", 1)[-1] assert extension == "jpg" or extension == "png", "image must be jpg or png" self.artwork = img_path
def getAttackFromFile(self, fileName): """ Creates an attack object by parsing a CSV file. @param fileName: the file name of the CSV file to parse. @return: the attack object. @raise IOError: if the file has an invalid format or if no suitable parser plug-in is available. """ # First: checks if the file is a regular file. if not ntpath.isfile(fileName): LOG.critical("The file '%s' is not a regular file.", fileName) raise IOError("The file '%s' is not a regular file." % fileName) # Second: checks the file name format. match = re.match("^(Very Low|Very low|very low|Low|low|High|high|Very High|Very high|high)-(.+)?-(\d+)\.csv$", os.path.basename(fileName)) if match: severity = match.group(1).lower() if severity == "very low": severity = 1 elif severity == "low": severity = 2 elif severity == "high": severity = 3 else: severity = 4 attackType = match.group(2) identifier = int(match.group(3)) else: severity = 4 attackType = os.path.splitext(ntpath.basename(fileName))[0] identifier = None anomaly_name = attackType # Finds a suitable parser. plugin = None for i in self.pluginManager.getPluginsOfCategory("Parser"): pluginFileName = i.details.get("Core", "FileName") if re.match(pluginFileName, attackType): plugin = i break if plugin is None: LOG.critical("No suitable attack event parser found.") raise IOError("No suitable attack event parser found") # Creates an attack object. attackType = plugin.details.get("Core", "Attack") attack = Attack(severity, attackType, identifier, anomaly_name) # Opens the file and read the events. count = 0 with open(fileName, "rt") as csv: for line in csv: count += 1 event = plugin.plugin_object.parse(fileName, count, line) if event is not None: attack.events.append(event) # Third: checks if there are some events. if count <= 1: LOG.critical("The file '%s' is empty.", fileName) raise IOError("The file '%s' is empty." % fileName) LOG.info("Parsed an attack of type '%s' with severity %d and containing %d events.", attack.type, attack.severity, len(attack.events)) return attack
# Options in_file = "debug.txt" # default file name in case no args supplied greps = ["command line:", "vc_busy_counter", "send_allowed=1"] mixed_greps = False sorting = True def exit_verbose(s): print s sys.exit(1) if len(sys.argv) > 1: in_file = sys.argv[1] if not ntpath.isfile(in_file): exit_verbose("File {0} does not exist.".format(in_file)) else: exit_verbose("Usage: ./grepdebug.py filename") dir_name, base_name = ntpath.split(in_file) fn, ext = ntpath.splitext(base_name) if len(dir_name) > 0: dir_name += "/" out_file = ntpath.join(dir_name, fn + "_parsed" + ext) out_file_sorted = ntpath.join(dir_name, fn + "_parsed_sorted" + ext) with open(in_file, "rt") as fin, open(out_file, "wt") as fout: if mixed_greps: for line in fin:
ext_locations.to_csv(path) building_events = analysis.calculate_events_per_building() ext_locations['total'] = self.locations['name'].map(building_events) ext_locations.to_csv(path) if __name__ == "__main__": style_file = '../data/style/tibs_plot_style.mplstyle' data_file = '../../data/outputwireless-logs-20120407.DHCP_ANON.csv' # data_file = '../data/outputwireless-logs-20120409.DHCP_ANON.csv' location_file = '../../data/prefix_lat_lon_name_category.csv' output_location_file = '../../data/density_locations_20120407.csv' if (ntpath.isfile(style_file)): plt.style.use(style_file) # if (ntpath.isfile(data_file) and ntpath.isfile(location_file)): analysis = DHCPAnalysis(data_file, location_file) analysis.write_building_csv(output_location_file) # building_count = analysis.calculate_events_per_building() # fig, ax = plt.subplots() # max_value = np.amax(building_count) # plt.bar(range(len(building_count)), list(building_count.values()), align='edge', width=1) # plt.xticks(range(len(building_count)), list(building_count.keys()), rotation=90) # ax.set_ylim([0, max(building_count.values()) * 1.15]) # plt.gcf().subplots_adjust(bottom=0.4) # plt.show() else:
# res = moveFile(From, ToBis, inc+1) # else: # os.rename(From, ToBis) # # FilesList.append(os.path.join(SuperPath, ntpath.basename(target))) # res = ToBis # return res SuperPath = input("[*] Veuillez saisir le chemin du tri : ") # print("[*] Veuillez saisir le chemin du tri : ./Test") # SuperPath = "./Test" print("Chargement ...") for FilePath in os.listdir(SuperPath): if ntpath.isfile(ntpath.join(SuperPath, FilePath)): try: Artiste = TinyTag.get(ntpath.join(SuperPath, FilePath)).artist FolderName = str(Artiste) except: print("(" + ntpath.basename(FilePath) + ") Erreur de lecture de l'artiste") else: try: FolderPath = os.path.join(SuperPath, FolderName) Path(FolderPath).mkdir(parents=True, exist_ok=True) os.rename( ntpath.join(SuperPath, FilePath), os.path.join(SuperPath, FolderName, ntpath.basename(FilePath))) except:
def arguments(): try: opts, args = getopt.getopt(sys.argv[1:], "hi:o:v", ["help", "input=", "output=", "verbose"]) except getopt.GetoptError as err: ## Error in arguments print str(err) usage() sys.exit(2) ## Default arguments inputDataPath = 'example.txt' outputDataPath = 'exampleOutput.txt' logDataPath = 'exampleLog.txt' verbose = False ## Mandatory inputDataPathFound = False outputDataPathFound = False logDataPathFound = False #print opts for o, a in opts: if o in ("-v", "--verbose"): verbose = True elif o in ("-h", "--help"): usage() sys.exit(0) elif o in ("-i", "--input"): inputDataPath = a inputDataPathFound = True #elif o in ("-o", "--output"): #outputDataPath = a #outputDataPathFound = True #elif o in ("-l", "--log"): #logDataPath = a #logDataPathFound = True else: assert False, "unhandled option" ## Mandatory Check if not inputDataPathFound: ## Is an input given? print "-i [file_path] or --input [file_path] was not given" usage() sys.exit(2) if not ntpath.isfile(inputDataPath): ## Is input a file? print "The path given as an input is not a file" usage() sys.exit(2) ## Folder/File/Ext Names workingFolder, fileName = ntpath.split(inputDataPath) #fileName = ntpath.basename(inputDataPath) if '.' not in ntpath.basename(inputDataPath) else ntpath.basename(inputDataPath) fileName, ext = ntpath.splitext(fileName) #startTime = time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()) startTime = time.strftime("%x_%X") saveFolder = ntpath.join(workingFolder, fileName) + "Results" logsFile = startTime + "_Logs" loadFile = startTime + "_Load" if verbose: print "Starting Time : " + startTime print "workingFolder : " + workingFolder print "saveFolder : " + saveFolder print "fileName : " + fileName print "ext : " + ext ## Make dir if not ntpath.isdir(saveFolder): os.makedirs(saveFolder) train(inputDataPath)
def load_dbghelp(cls, pathname = None): """ Load the specified version of the C{dbghelp.dll} library. This library is shipped with the Debugging Tools for Windows, and it's required to load debug symbols. Normally you don't need to call this method, as WinAppDbg already tries to load the latest version automatically - but it may come in handy if the Debugging Tools are installed in a non standard folder. Example:: from winappdbg import Debug def simple_debugger( argv ): # Instance a Debug object, passing it the event handler callback debug = Debug( my_event_handler ) try: # Load a specific dbghelp.dll file debug.system.load_dbghelp("C:\Some folder\dbghelp.dll") # Start a new process for debugging debug.execv( argv ) # Wait for the debugee to finish debug.loop() # Stop the debugger finally: debug.stop() @see: U{http://msdn.microsoft.com/en-us/library/ms679294(VS.85).aspx} @type pathname: str @param pathname: (Optional) Full pathname to the C{dbghelp.dll} library. If not provided this method will try to autodetect it. @rtype: ctypes.WinDLL @return: Loaded instance of C{dbghelp.dll}. @raise NotImplementedError: This feature was not implemented for the current architecture. @raise WindowsError: An error occured while processing this request. """ # If an explicit pathname was not given, search for the library. if not pathname: # Under WOW64 we'll treat AMD64 as I386. arch = win32.arch if arch == win32.ARCH_AMD64 and win32.bits == 32: arch = win32.ARCH_I386 # Check if the architecture is supported. if not arch in cls.__dbghelp_locations: msg = "Architecture %s is not currently supported." raise NotImplementedError(msg % arch) # Grab all versions of the library we can find. found = [] for pathname in cls.__dbghelp_locations[arch]: if ntpath.isfile(pathname): try: f_ver, p_ver = cls.get_file_version_info(pathname)[:2] except WindowsError: msg = "Failed to parse file version metadata for: %s" warnings.warn(msg % pathname) if not f_ver: f_ver = p_ver elif p_ver and p_ver > f_ver: f_ver = p_ver found.append( (f_ver, pathname) ) # If we found any, use the newest version. if found: found.sort() pathname = found.pop()[1] # If we didn't find any, trust the default DLL search algorithm. else: pathname = "dbghelp.dll" # Load the library. dbghelp = ctypes.windll.LoadLibrary(pathname) # Set it globally as the library to be used. ctypes.windll.dbghelp = dbghelp # Return the library. return dbghelp
current_frame = 0 extract_flow = cv2.VideoCapture(video_file) while True: # reading from frame ret, frame = extract_flow.read() if ret: # if video is still left continue creating images name = os.path.join(output_folder, f'frame{current_frame}.jpg') # writing the extracted images cv2.imwrite(name, frame) # increasing counter so that it will # show how many frames are created current_frame += 1 else: break # Release all space and windows once done extract_flow.release() if __name__ == "__main__": video_file = os.path.join('output', 'video.mp4') __compress_to_mp4(video_file, [ os.path.join('input', f) for f in listdir('input') if isfile(os.path.join('input', f)) ]) __extract_images('output', video_file)