def show_and_save(showplot=True, savefig=None, formats=FORMATS, pickleit=False, fig=None): """ Maximize the window if need to show it, save it if needed, and then show it or close it. - Inspired by https://tomspur.blogspot.fr/2015/08/publication-ready-figures-with.html#Save-the-figure """ if showplot: maximizeWindow() if savefig is not None: if pickleit and fig is not None: form = "pickle" path = "{}.{}".format(savefig, form) print("Saving raw figure with format {}, to file '{}'...".format( form, path)) # DEBUG with open(path, "bw") as f: pickle_dump(fig, f) print(" Saved! '{}' created of size '{}b', at '{:%c}' ...". format(path, getsize(path), datetime.fromtimestamp(getatime(path)))) for form in formats: path = "{}.{}".format(savefig, form) print("Saving figure with format {}, to file '{}'...".format( form, path)) # DEBUG plt.savefig(path, bbox_inches=BBOX_INCHES) print(" Saved! '{}' created of size '{}b', at '{:%c}' ...". format(path, getsize(path), datetime.fromtimestamp(getatime(path)))) try: plt.show() if showplot else plt.close() except (TypeError, AttributeError): print("Failed to show the figure for some unknown reason...") # DEBUG
def CheckAtime(fname, logfile): from time import strftime, localtime from os.path import getatime from o2tf import printlog Atime1 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) os.system('cat %s >> /dev/null' % fname) Atime2 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) if Atime1 != Atime2: printlog('file %s - unexpected atime update - previous (%s),' \ ' current(%s)' % (fname, Atime1, Atime2), logfile, 0, '') time.sleep(atime + 1) Atime3 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) if Atime3 == strftime("%m %d %Y %H %M", localtime()): if DEBUGON: printlog('file %s - atime update successful - ' \ 'previous (%s), current(%s)' % (fname, Atime1, Atime2), logfile, 0, '') else: printlog('file %s - atime update failed - previous (%s),' \ ' current(%s), expected (%s)' % \ (fname, Atime1, Atime2, strftime("%m %d %Y %H %M", \ localtime())), logfile, 0, '')
def CheckAtime(fname, logfile): from time import strftime, localtime from os.path import getatime from o2tf import printlog Atime1 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) os.system('cat %s >> /dev/null' % fname) Atime2 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) if Atime1 != Atime2: printlog('file %s - unexpected atime update - previous (%s),' \ ' current(%s)' % (fname, Atime1, Atime2), logfile, 0, '') time.sleep(atime+1) Atime3 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) if Atime3 == strftime("%m %d %Y %H %M", localtime()): if DEBUGON: printlog('file %s - atime update successful - ' \ 'previous (%s), current(%s)' % (fname, Atime1, Atime2), logfile, 0, '') else: printlog('file %s - atime update failed - previous (%s),' \ ' current(%s), expected (%s)' % \ (fname, Atime1, Atime2, strftime("%m %d %Y %H %M", \ localtime())), logfile, 0, '')
def getinfo(fn): "获取文件信息" if not exists(fn['a']): return -1 if isfile(fn['a']): try: atime = getatime(fn['a']) except: atime = 'N/A' try: ctime = getctime(fn['a']) except: ctime = 'N/A' try: mtime = getmtime(fn['a']) except: mtime = 'N/A' try: size = getsize(fn['a']) except: size = 'N/A' return { 'l': fn['a'], 'f': fn['f'], 'a': atime, 'c': ctime, 'm': mtime, 's': size, 'i': 'f' } if isdir(fn['a']): try: atime = getatime(fn['a']) except: atime = 'N/A' try: ctime = getctime(fn['a']) except: ctime = 'N/A' try: mtime = getmtime(fn['a']) except: mtime = 'N/A' size = 'N/A' return { 'l': fn['a'], 'f': fn['f'], 'a': atime, 'c': ctime, 'm': mtime, 's': size, 'i': 'd' }
def get_metadata(self, simple_hash=True): ''' Args: simple_hash: Boolean => whether to only collect SHA256 hash or MD5 and SHA1 as well Returns: Container<String, Any> Container of metadata about this prefetch file: file_name: prefetch file name file_path: full path on local system file_size: size of file on local system md5hash: MD5 hash of prefetch file sha1hash: SHA1 hash of prefetch file sha2hash: SHA256 hash of prefetch file modify_time: last modification time of prefetch file on local system access_time: last access time of prefetch file on local system create_time: create time of prefetch file on local system Preconditions: simple_hash is of type Boolean ''' assert isinstance(simple_hash, bool), 'Simple_hash is of type Boolean' return Container(\ file_name=path.basename(self._filepath), file_path=path.abspath(self._filepath), file_size=path.getsize(self._filepath), md5hash=self._hash_file('md5') if not simple_hash else None, sha1hash=self._hash_file('sha1') if not simple_hash else None, sha2hash=self._hash_file('sha256'), modify_time=datetime.fromtimestamp(path.getmtime(self._filepath), tzlocal()).astimezone(tzutc()), access_time=datetime.fromtimestamp(path.getatime(self._filepath), tzlocal()).astimezone(tzutc()), create_time=datetime.fromtimestamp(path.getctime(self._filepath), tzlocal()).astimezone(tzutc())\ )
def main(): rootPath = sys_argv[0].decode(sys_get_fs_encoding()) print 'RootPath\t', rootPath #檔案的絕對路徑 print 'path.abspath\t', path.abspath(rootPath) #檔案名稱 print 'path.basename\t', path.basename(rootPath) #檢查檔案是否存在 print 'path.exists\t', path.exists(rootPath) #取得檔案目錄位置 print 'path.dirname\t', path.dirname(rootPath) #分割出副檔名 print 'path.splitext\t', path.splitext(rootPath) #修改時間 print 'path.getatime\t', path.getatime(rootPath) #檔案大小 print 'path.getsize', path.getsize(rootPath) #路徑正規劃,正反斜線清除整理 print 'path.normcase', path.normcase(rootPath) #由後面路徑推敲出前面路徑的相對位置 print 'path.relpath', path.relpath(rootPath, path.dirname(rootPath)+'/../../') #檢查路徑下所有檔案 print 'path.walk(path, visit, arg)' for root, dirs, files in os.walk(path.dirname(rootPath)): print root for f in files: print os.path.join(root, f)
def runfile(self, index): ' run the choosed file ' s = str(file(self.model.filePath(index), 'r').read().strip()) f = str(self.model.filePath(index)) # ctime is NOT crossplatform,metadata change on *nix,creation on Window # http://docs.python.org/library/os.path.html#os.path.getctime m = ''.join((f, N, str(path.getsize(f) / 1024), ' Kilobytes', N, str(len(file(f, 'r').readlines())), ' Lines', N, str(len(s.replace(N, ''))), ' Characters', N, str(len([a for a in sub('[^a-zA-Z0-9 ]', '', s).split(' ') if a != ''])), ' Words', N, str(len([a for a in s if a in punctuation])), ' Punctuation', N, oct(stat(f).st_mode)[-3:], ' Permissions', N, time.ctime(path.getatime(f)), ' Accessed', N, time.ctime(path.getmtime(f)), ' Modified', N, 'Owner: ', str(self.model.fileInfo(index).owner()), N, 'Is Writable: ', str(self.model.fileInfo(index).isWritable()), N, 'Is Executable: ', str(self.model.fileInfo(index).isExecutable()), N, 'Is Hidden: ', str(self.model.fileInfo(index).isHidden()), N, 'Is SymLink: ', str(self.model.fileInfo(index).isSymLink()), N, 'File Extension: ', str(self.model.fileInfo(index).suffix()) )) #print(m) self.preview.setToolTip(m) self.preview.setText(s) self.preview.resize(self.preview.size().width(), self.dock.size().height()) self.process.start('xdg-open {}'.format(f)) if not self.process.waitForStarted(): print((" ERROR: Process {} Failed ! ".format(str(f)))) return
def _get_attributes(self, context, buf): attr = { 'number': buf.number, 'name': buf.name } attr.update({ 'filetype': buf.options['filetype'], 'timestamp': getatime( attr['name']) if exists(attr['name']) else time(), 'status': '{0}{1}{2}{3}'.format( ' ' if self.vim.call('buflisted', attr['number']) else 'u', '%' if attr['number'] == context['__caller_bufnr'] else '#' if attr['number'] == context['__alter_bufnr'] else ' ', 'a' if self.vim.call('bufwinnr', attr['number']) > 0 else 'h' if self.vim.call('bufloaded', attr['number']) != 0 else ' ', '=' if buf.options['readonly'] else ('+' if buf.options['modified'] else '-' if buf.options['modifiable'] == 0 else ' ') ) }) return attr
def _get_attributes(self, context: UserContext, buf: Buffer) -> typing.Dict[str, typing.Any]: attr = { 'number': buf.number, 'name': buf.name } attr.update({ 'filetype': self.vim.call('getbufvar', buf.number, '&filetype'), 'timestamp': getatime( attr['name']) if exists(attr['name']) else time(), 'status': '{}{}{}{}'.format( ' ' if self.vim.call('buflisted', attr['number']) else 'u', '%' if attr['number'] == context['__caller_bufnr'] else '#' if attr['number'] == context['__alter_bufnr'] else ' ', 'a' if self.vim.call('win_findbuf', attr['number']) else 'h' if self.vim.call('bufloaded', attr['number']) != 0 else ' ', '=' if buf.options['readonly'] else '+' if buf.options['modified'] else '-' if buf.options['modifiable'] == 0 else ' ' ) }) return attr
def main(): # Print the name of the OS print(os.name) print() # Check for item existence and type print("Item exists: " + str(path.exists("textfile.txt"))) print("Item is a file: " + str(path.isfile("textfile.txt"))) print("Item is a directory: " + str(path.isdir("textfile.txt"))) print() # Work with file paths print("Item path: " + str(path.realpath("textfile.txt"))) print("Item path and name: " + str(path.split(path.realpath("textfile.txt")))) print() # Get the modification time m_time = time.ctime(path.getatime("textfile.txt")) print("Modification time: " + str(m_time)) # Calculate how long ago the item was modified td = datetime.datetime.now() - datetime.datetime.fromtimestamp( path.getmtime("textfile.txt") ) print("It has been " + str(td) + " since the file was modified") print("Or, " + str(td.total_seconds()) + " seconds")
def _get_attributes(self, context, buf): attr = { 'number': buf.number, 'name': buf.name } attr.update({ 'filetype': self.vim.call('getbufvar', buf.number, '&filetype'), 'timestamp': getatime( attr['name']) if exists(attr['name']) else time(), 'status': '{0}{1}{2}{3}'.format( ' ' if self.vim.call('buflisted', attr['number']) else 'u', '%' if attr['number'] == context['__caller_bufnr'] else '#' if attr['number'] == context['__alter_bufnr'] else ' ', 'a' if self.vim.call('win_findbuf', attr['number']) else 'h' if self.vim.call('bufloaded', attr['number']) != 0 else ' ', '=' if buf.options['readonly'] else '+' if buf.options['modified'] else '-' if buf.options['modifiable'] == 0 else ' ' ) }) return attr
def getMRUList(self): fileList = [ f for f in listdir(self.MRUPath) if isfile(join(self.MRUPath,f)) ] for foundFile in fileList: try: f = open(self.MRUPath+foundFile) tempMRU = mruOBJ.mruClass() # Extract info from within the file for line in f: # Clear trailing whitespaces line = line.rstrip() if line[0:4] == "Name": tempMRU.name = line.split("=")[1] if line[0:3] == "URL": tempMRU.URL = line.split("=")[1] if line[0:20] == "X-KDE-LastOpenedWith": tempMRU.lastApp = line.split("=")[1] f.close() # Extract date info from OS (epoch) # then convert them to datetime objects epochTime = getatime(self.MRUPath+foundFile) tempMRU.accessDate = datetime.datetime.fromtimestamp(epochTime) epochTime = getmtime(self.MRUPath+foundFile) tempMRU.modifyDate = datetime.datetime.fromtimestamp(epochTime) # Append new element to internal list self.MRUList.append(tempMRU) # DEBUG - Show it! #tempMRU.show() except Exception,e: print "\n |(!)-> {}".format(e)
def prop(request, name): filepath = path.join(PATH, name) response = { 'File: ': filepath, 'Access time: ': ctime(path.getatime(filepath)), 'Modified time: ': ctime(path.getmtime(filepath)), 'Change time: ': ctime(path.getctime(filepath)), 'Size: ': path.getsize(filepath) } return Response(response)
def getfiledata(file_path): """Gets data about file at file_path path.getatime - returns last access time Returns a tuple with relevant data. """ last_access_time_since_epoch = path.getatime(file_path) size = path.getsize(file_path) return last_access_time_since_epoch, size, file_path
def zipExtractFile(zipFileName, elem, dstDir="."): with ZipFile(zipFileName) as zipFile: dateTimeTuple = zipFile.getinfo(elem).date_time newDateTime = datetime(*dateTimeTuple) zipFile.extract(elem, dstDir) if dstDir != tmpDir: if args.dir == "/": print " inflating: " + "/" + elem else: print " inflating: " + args.dir + "/" + elem utime(dstDir + os.sep + elem, (getatime(dstDir + os.sep + elem), mktime( newDateTime.timetuple())))
def timeout(self): print(HTTP_HTML, "\n") timeout = 1440 ficheros = os.listdir(TRB_SESS_DIR) for fichero in ficheros: ruta = path.join(TRB_SESS_DIR, fichero) access_date = path.getatime(ruta) time = time() - access_date if time > timeout: unlink(ruta) print(time, ruta)
def zipExtractAll(zipFileName, dstDir="."): # print >> stderr, "=> Extracting all files to < " + dstDir + " > to preserve the timestamps." fileList, dirList = readZIPFile(zipFileName) with ZipFile(zipFileName) as zipFile: for elem in fileList: dateTimeTuple = zipFile.getinfo(elem).date_time newDateTime = datetime(*dateTimeTuple) zipFile.extract(elem, dstDir) utime(dstDir + os.sep + elem, (getatime(dstDir + os.sep + elem), mktime(newDateTime.timetuple()))) return fileList, dirList
def compareFileTime(sourcePath, targetPath, flag='m'): """比较两个文件或文件夹的修改/创建/访问时间,返回时间差,单位为秒 :param targetPath: :param sourcePath: :param flag: 'm': modification,修改时间 'c': creation,创建时间 'a': access,最后访问时间 """ if flag == 'c': s_time = path.getctime(sourcePath) t_time = path.getctime(targetPath) elif flag == 'a': s_time = path.getatime(sourcePath) t_time = path.getatime(targetPath) else: s_time = path.getmtime(sourcePath) t_time = path.getmtime(targetPath) return int(s_time) - int(t_time)
def expandregion(self): vms = [] for account in listdir(self.region): for name in listdir(path.join(self.region, account)): vms.append((self.statusofVM(account=account, name=name), path.join(self.region, account, name))) vms.sort(key=lambda x: path.getatime( path.join(self.region, account, x[1])), reverse=True) if (len(vms) > self.statuscount): vms = vms[:self.statuscount] return '\n'.join(list(map(lambda vm: vm[0], vms)))
def _get_data(self, interval='d'): update_required = self.check_for_update # assuming that update will be required output = pd.DataFrame() # time info time_now = datetime.now() weekday_now = datetime.weekday(time_now) is_weekend = True if weekday_now in (5, 6) else False # calculate expected date for last OHLC data, consider only weekdays # from Mo-Fr, assume that last update was one day earlier delta_days = (weekday_now - 4) if is_weekend else 0 expected_ohlc_time = time_now - timedelta(days=delta_days) # file with data for ticker exists if get_storage_status() and path.exists( self.csv_file_path(interval=interval)): timestamp_now = datetime.timestamp(time_now) timestamp_up = path.getatime(self.csv_file_path( interval=interval)) # CSV file modification time # CSV updated within last 24 hours or it is weekend (no new data) if (timestamp_now - timestamp_up < StockQuotes.update_period * 3600) or is_weekend: output = read_ohlcv_from_csv( self.csv_file_path(interval=interval)) last_ohlc_time = output.iloc[-1].name updated_data = last_ohlc_time.date( ) == expected_ohlc_time.date() session_time = time_now.hour < StockQuotes.update_hour and not is_weekend if updated_data or session_time: update_required = False if update_required: # update CSV file and read data new_output = self.download_ohlc_from_stooq(interval=interval) if not new_output.empty: # Updated data downloaded - update output output = new_output # save to CSV if get_storage_status(): new_output.to_csv(self.csv_file_path(interval=interval)) else: # Update error (Stooq: Exceeded the daily hits limit) pass if not output.empty: output.sort_index(ascending=True, inplace=True) return output
def procedure_file_metadata(file_location: Path) -> Dict: # pragma: no cover """ A dict of: full_path last_modified last_accessed""" last_modified = ctime(getmtime(file_location)) last_accessed = ctime(getatime(file_location)) return { "last_modified": last_modified, "last_accessed": last_accessed, "full_path": file_location.resolve(), }
def get_dates(path): try: created = datetime.fromtimestamp(getctime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: created = e try: accessed = datetime.fromtimestamp(getatime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: accessed = e try: modified = datetime.fromtimestamp(getmtime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: modified = e return created, accessed, modified
def dirInfo(self, path): from os import listdir, chdir from os.path import getsize, getatime, getmtime from time import gmtime, strftime try: chdir(path) print('%50s | %10s | %30s | %30s' % ("Nazwa", "Rozmiar", "Czas stworzenia", "Czas modyfikacji")) for i in listdir(path): print('%50s | %10i | %30s | %30s' % (i, getsize(i), strftime('%d-%m-%Y %H:%M:%S', gmtime( getatime(i))), getmtime(i))) except: print('Error! Błędna ścieżka pliku.')
def CheckAtime0(fname, logfile): from time import strftime, localtime from os.path import getatime from o2tf import printlog Atime1 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) os.system("cat %s >> /dev/null" % fname) Atime2 = strftime("%m %d %Y %H %M", localtime(getatime(fname))) if Atime2 == strftime("%m %d %Y %H %M", localtime()): if DEBUGON: printlog( "file %s - atime update successful - " "previous (%s), current(%s)" % (fname, Atime1, Atime2), logfile, 0, "", ) else: printlog( "file %s - atime update failed - previous (%s)," " current(%s), expected (%s)" % (fname, Atime1, Atime2, strftime("%m %d %Y %H %M", localtime())), logfile, 0, "", )
def encrypt_and_store_file(self, args): filename, current_count, total_count = args _print_progress(current_count + 1, total_count + 1, filename) outputs = [] for temp_path in self.generate_encrypted_blobs(filename): blob_name = path.basename(temp_path) self.blob_store.save_blob(blob_name, temp_path) outputs.append(blob_name) print(blob_name) return { self.archived_filename(filename): { 'blobs': outputs, 'atime': path.getatime(filename), 'mtime': path.getmtime(filename), } }
def rename_file_by_time_accurate(file_path, order=0): """ like function@rename_file_by_time() format: YearMonthDay-HourMinuteSecond if you want to use this function, please modify the code of main_process(): if select == "6": folder_path_input = input_path_and_check() order = int(input(in_order_rename_time_cn)) for file_path in get_all_files_path(folder_path_input, order) is_success = rename_file_by_time(folder_path_input, order) if is_success == 1: count_s += 1 count += 1 report_result(count, count_s) return 1 :param file_path: file path :param order: 2: modify time 3: assess time other: create time :return: 1:success 0: path error -1: same name file exists """ try: file_type = path.splitext(file_path)[1] # get a float if order == 2: file_time = path.getmtime(file_path) elif order == 3: file_time = path.getatime(file_path) else: file_time = path.getctime(file_path) # format the time # time.localtime(time): getctime()->localtime file_time_format = time.strftime("%Y%m%d-%H%M%S", time.localtime(file_time)) file_name_new = str(file_time_format) + file_type file_name_final = path.join(path.dirname(file_path), file_name_new) try: rename(file_path, file_name_final) print(out_mission_compete_cn % (path.basename(file_path), file_name_new)) return 1 except OSError: print(out_same_name_error_cn) return -1 except OSError: print(out_no_path_error_cn) return 0
def deliver(identifier): identifier = secure_filename(identifier) # Load info from db info = db.session.query(KmlInfo).get(identifier) # If identifier is invalid return 404 if info is None: abort(404) # make legend and density-dicts dicts = [] for key, color in json.loads(info.color_info).items(): grid = app.config['grids'][info.city].get(key, None) # remove the css '#' from color-string grid['color'] = color[1:] dicts.append(grid) # Create kml kml_path = path.join(app.config['RESULT_FOLDER'], '%s.kml' % identifier) if not path.exists(kml_path): density_kml( kml_path, info.city, dicts, app.config['borders'], scaling=lambda x: x**info.scaling, ) # file limiter results = listdir(app.config['RESULT_FOLDER']) if len(results) > app.config['FILE_LIMIT']: # Delete last accessed file access_list = [] for filename in results: filepath = path.join(app.config['RESULT_FOLDER'], filename) access_list.append((filepath, path.getatime(filepath))) # Sort by access time and take first element (least accessed) # Take first key of tuple (path) access_list.sort(key=itemgetter(1)) last_accessed_path = access_list[0][0] remove(last_accessed_path) return send_file(kml_path)
def deliver(identifier): identifier = secure_filename(identifier) # Load info from db info = db.session.query(KmlInfo).get(identifier) # If identifier is invalid return 404 if info is None: abort(404) # make legend and density-dicts dicts = [] for key, color in json.loads(info.color_info).items(): grid = app.config['grids'][info.city].get(key, None) # remove the css '#' from color-string grid['color'] = color[1:] dicts.append(grid) # Create kml kml_path = path.join(app.config['RESULT_FOLDER'], '%s.kml' % identifier) if not path.exists(kml_path): density_kml( kml_path, info.city, dicts, app.config['borders'], scaling=lambda x: x ** info.scaling, ) # file limiter results = listdir(app.config['RESULT_FOLDER']) if len(results) > app.config['FILE_LIMIT']: # Delete last accessed file access_list = [] for filename in results: filepath = path.join(app.config['RESULT_FOLDER'], filename) access_list.append((filepath, path.getatime(filepath))) # Sort by access time and take first element (least accessed) # Take first key of tuple (path) access_list.sort(key=itemgetter(1)) last_accessed_path = access_list[0][0] remove(last_accessed_path) return send_file(kml_path)
def file_info(in_file, out_file): from os import path from datetime import datetime message = '' # в данном случае формируем список в программе из двух входных элементов file_list = [] file_list.append(in_file) file_list.append(out_file) # цикл перебирает список for f in file_list: message = message + f + '\n' message = message + ' Размер файла: '+ str(path.getsize(f)/1024) + ' Kb\n' message = message + ' Дата модификации: ' + datetime.fromtimestamp(path.getmtime(f)).strftime("%d-%m-%Y %H:%M:%S") + '\n' message = message + ' Дата обращения: ' + datetime.fromtimestamp(path.getatime(f)).strftime("%d-%m-%Y %H:%M:%S") + '\n' message = message + ' Контрольная сумма: ' + getMD5sum(f) + '\n' message = message + '--------------------------------------------------------------\n' return message
def lista_restringida(ruta=getcwd(), dias=30): s = int( input( 'Ingrese el límite de tamaño (en bytes) que los archivos no deben superar:' )) t = datetime.now().date() - timedelta(days=dias) lista = list( filter( lambda x: isfile(x) and datetime.fromtimestamp(getatime(x)).date() > t and getsize(x) < s, listdir(ruta))) if lista != []: for x in lista: print(x) else: print('No existen archivos con esa fecha y tamaño')
def _get_data(self): update_required = True # assuming that update will be required output = pd.DataFrame() # time info time_now = datetime.now() weekday_now = datetime.weekday(time_now) is_weekend = True if weekday_now in (5, 6) else False # calculate expected date for last OHLC data, consider only weekdays # from Mo-Fr, assume that last update was one day earlier delta_days = (weekday_now - 4) if is_weekend else 0 expected_ohlc_time = time_now - timedelta(days=delta_days) last_ohlc_time = expected_ohlc_time - timedelta(days=1) # file with data for ticker exists if path.exists(self.csv_file_path): timestamp_now = datetime.timestamp(time_now) timestamp_up = path.getatime( self.csv_file_path) # CSV file modification time # CSV updated within last 24 hours or it is weekend (no new data) if (timestamp_now - timestamp_up < 24 * 3600) or is_weekend: output = self.read_csv_file() last_ohlc_time = output.iloc[-1].name if last_ohlc_time.date() == expected_ohlc_time.date() or ( time_now.hour < 20 and not is_weekend): update_required = False if update_required: # update CSV file and read data new_output = self.download_ohlc_from_stooq() if not new_output.empty: # Updated data downloaded - save to CSV and update output new_output.to_csv(self.csv_file_path) output = new_output else: # Update error (Stooq: Exceeded the daily hits limit) pass if not output.empty: output.sort_index(ascending=True, inplace=True) return output
def save (self): from os.path import exists, getatime, getmtime, normpath from os import utime import gzip import pickle as pickle from pygeode.progress import PBar if self.modified_table is True and self.filename is not None: with gzip.open(self.filename,'w') as f: pickle.dump(_MANIFEST_VERSION, f) blob = pickle.dumps(self.table) f.write(blob) # Set the modification time to the latest file that was used. atime = getatime(self.filename) utime(self.filename,(atime,self.mtime)) self.modified_table = False
def get_dates(path): try: created = datetime.fromtimestamp( getctime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: created = e try: accessed = datetime.fromtimestamp( getatime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: accessed = e try: modified = datetime.fromtimestamp( getmtime(path)).strftime('%d %b %Y, %H:%M:%S') except OSError as e: modified = e return created, accessed, modified
def rename_file_by_time(folder_path, order): """ rename file by create/modify/assess time format: YearMonthDay-1/2/3/... order = 2: modify time order = 3: assess time order = others: create time :param folder_path: the folder of path :param order: the order :return: the amount of files """ try: serial_num = 1 # serial number count = 0 if order == 2: # the recent modify time of the files list_ordered = sorted(get_all_files_path(folder_path), key=lambda file_p: path.getmtime(file_p)) elif order == 3: # the recent assess time of the files list_ordered = sorted(get_all_files_path(folder_path), key=lambda file_p: path.getatime(file_p)) else: list_ordered = sorted(get_all_files_path(folder_path), key=lambda file_p: path.getctime(file_p)) for file_path in list_ordered: file_type = path.splitext(file_path)[1] # get a float if order == 2: file_time = path.getmtime(file_path) elif order == 3: file_time = path.getatime(file_path) else: file_time = path.getctime(file_path) # format the time # time.localtime(time): getctime()->localtime file_time_format = time.strftime("%Y%m%d-", time.localtime(file_time)) file_name_new = str(file_time_format) + str(serial_num) + file_type file_name_final = path.join(path.dirname(file_path), file_name_new) try: rename(file_path, file_name_final) except OSError: print(out_same_name_error_cn) print(out_mission_compete_cn % (path.basename(file_path), file_name_new)) serial_num = serial_num + 1 count = count + 1 return count except OSError: print(out_error_happen_cn)
def save(self): from os.path import exists, getatime, getmtime, normpath from os import utime import gzip import pickle as pickle from pygeode.progress import PBar if self.modified_table is True and self.filename is not None: with gzip.open(self.filename, 'w') as f: pickle.dump(_MANIFEST_VERSION, f) blob = pickle.dumps(self.table) f.write(blob) # Set the modification time to the latest file that was used. atime = getatime(self.filename) utime(self.filename, (atime, self.mtime)) self.modified_table = False
def main(): if(path.exists("chakri.txt")): src=path.realpath("chakri.txt"); print(src) head, tail = path.split(src) print("path:" +head) print("path:" +tail) dst=src+".bak" shutil.copy(src, dst) shutil.copystat(src,dst) t=time.ctime(path.getmtime("chakri.txt")) print(t) print(datetime.datetime.fromtimestamp(path.getmtime("chakri.txt"))) print(datetime.datetime.fromtimestamp(path.getctime("chakri.txt"))) print(datetime.datetime.fromtimestamp(path.getatime("chakri.txt"))) #os.rename("chakri2.txt","chakri.txt") shutil.make_archive("chakri archive","zip",head)
def goRecursivelly(self, basepath, paths): for p in listdir(unicode(basepath)): fp = unicode(join(basepath, p)) isDir = isdir(fp) np = Path.Path(fp, isDir) np.ctime = getctime(fp) np.mtime = getmtime(fp) np.atime = getatime(fp) np.size = getsize(fp) if not self.progressCallback is None: self.progressCallback(self, 'newPath', {'p':np, 'isDir':isDir}) paths.append(np) if isDir: paths = self.goRecursivelly(fp, paths) return paths
def loadClass(self, transaction, path): className = self.computeClassName(path) classFile = join(self._cacheDir, className + ".py") mtime = getmtime(path) if not exists(classFile) or getmtime(classFile) != mtime: context = Context.PSPCLContext(path) context.setClassName(className) context.setPythonFileName(classFile) context.setPythonFileEncoding(self._fileEncoding) clc = PSPCompiler.Compiler(context) sourceFiles = clc.compile() # Set the modification time of the compiled file # to be the same as the source file; # that's how we'll know if it needs to be recompiled: utime(classFile, (getatime(classFile), mtime)) # Record all included files so we can spot any changes: for sourcefile in sourceFiles: self._recordFile(sourcefile) return self.loadClassFromFile(transaction, classFile, className)
def formatFileTime(filePath, flag='m'): """格式化文件或文件夹的修改/创建/访问时间 :param filePath: :param flag: 'm': modification,修改时间 'c': creation,创建时间 'a': access,最后访问时间 """ if flag == 'c': sTime = path.getctime(filePath) elif flag == 'a': sTime = path.getatime(filePath) else: sTime = path.getmtime(filePath) lt = time.localtime(sTime) return "%d-%02d-%02d %02d:%02d:%02d" % (lt.tm_year, lt.tm_mon, lt.tm_mday, lt.tm_hour, lt.tm_min, lt.tm_sec)
def from_json(self, blob, append=False, _fix=False): if type(blob) is str: self.data = json.loads(blob) self.last_update = datetime.fromtimestamp( time.time()).strftime('%d/%m/%Y') else: self.data = json.load(blob) self.last_update = datetime.fromtimestamp(getatime( blob.name)).strftime('%d/%m/%Y') if not append: self._members = [] for member in self.data['tMembers']: player = Player(member) self._members.append(player) if not _fix: self.fix_dict_logs()
def u_dir_info(root, mod=False): ''' return info from selected directory @param string root: absolute pathname @param boolean mod: if return only mtime @return: tuple ''' # if( path.exists( root ) and path.isdir( root ) ): try: if(mod): return int(path.getmtime(root)) else: return int(path.getatime(root)), int(path.getmtime(root)) except FileNotFoundError: if(mod): return 0 else: return 0, 0
def get_accessed_time(self, name): return datetime_from_timestamp(getatime(self._get_template_file(name)))
from os import path print "============================================================" print "return verified path exists : ", path.exists("/home/anb") print "return date of create :", path.getatime("/home/anb") print "return date of modified :", path.getmtime("/home/anb") print "============================================================" print "path join : ", path.join("home", "pepe") print "path join : ", path.join("/", "home", "anb") print "path join win : ", path.join("c:", "Users") print "============================================================"
def _get_atime ( self ): try: return strftime( '%m/%d/%Y %I:%M:%S %p', localtime( getatime( self.file_name ) ) ) except: return ''
def accessed_time(self, name): return datetime.fromtimestamp(path.getatime(self.path(name)))
def get_atime(self, path): path = self._resolve_path(path) atime = getatime(path) return datetime.fromtimestamp(atime)
def test106(): from time import ctime print(ctime(path.getatime(__file__))) print(ctime(path.getmtime(__file__))) print(ctime(path.getctime(__file__))) print(path.getsize(__file__))
def _get_last_accessed ( self ): """ Returns the time at which the file was last accessed. """ return (getatime( self.absolute_path ) if self.exists else 0)
def atime(self): return ospath.getatime(self)
print("\n") temp_db_path = localsettings.HYPER_ESTRAIER_DB_PATH + "_temp" if path.islink(temp_db_path): os.remove(temp_db_path) os.symlink(db_path, temp_db_path) os.rename(temp_db_path, localsettings.HYPER_ESTRAIER_DB_PATH) db_base_dir = path.dirname(localsettings.HYPER_ESTRAIER_DB_PATH) dirs = os.listdir(db_base_dir) cur_time = time.time() print("cur_time = %d\n" % cur_time) for dir in dirs: if not dir.startswith("casket_"): continue dir = db_base_dir + "/" + dir atime = path.getatime(dir) delta_atime = cur_time - atime print("access time of %s = %d delta_atime = %d" % (dir, atime, delta_atime)) if delta_atime > 60 * 60: # access time more than 1 hour ago print("deleting %s with delta_atime = %d" % (dir, delta_atime)) shutil.rmtree(dir)
import datetime from datetime import date import time time = date.fromtimestamp(time.time()) #converts the time.time() to a more readable print time newDate = time.strftime("%d/%m/%y") #changes the date format with strftume print newDate ad = date.fromordinal(10000) # gets the year after 1 ad. wtf. print ad ############################# OPERATING SYSTEM ############### print 'operating system' import os from os import path path2 = path.exists("C:") #checks if a direcotry exists. print path2 pathTime = path.getatime("C:") #checks the time it was modified print pathTime #pathBytes = path.getsize{"C:"} #shows the size of a directory in bytes. pathJoin = path.join("C:", "users") print pathJoin ############################### LAST TIPS ########################### #assign the modules and functions in variables. #random.random() will be: ran = random.random #parenthesis will be in the variable print ran() #parenthesis here square = math.sqrt print square(2)
def check_logs(): log_file = path.dirname(path.realpath(__file__)) + '\\check_mis_logs.log' logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w', format='%(asctime)s %(levelname)s: %(message)s', datefmt='%Y.%m.%d %H:%M:%S') logging.getLogger().addHandler(logging.StreamHandler()) logging.debug("started at: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S")) config_file = path.dirname(path.realpath(__file__)) + '\\check_mis_logs_config.ini' config_main_section = 'main' config_mail_section = 'mail' email_server = '172.16.6.6' email_login = '******' email_pass = '******' email_to = ['*****@*****.**'] message = '' def mail(): __send_mail(email_server, email_login, email_pass, email_to, message, log_file) if not path.exists(config_file): logging.error("config file '" + config_file + "' doesn\'t exist") logging.error("exiting due to error") mail() exit() paths_to_log = list() files_to_check = list() reports_name = list() logging.debug("reading the config file") try: config = configparser.ConfigParser() config.read(config_file, encoding="utf-8") paths_to_log = config[config_main_section]['paths_to_log'].splitlines() files_to_check = config[config_main_section]['files_to_check'].splitlines() reports_name = config[config_main_section]['reports_name'].splitlines() email_server = config[config_mail_section]['server_address'] email_login = config[config_mail_section]['login'] email_pass = config[config_mail_section]['password'] email_to = config[config_mail_section]['to'].splitlines() except configparser.Error as err: logging.error("configparser.Error " + err.message) except KeyError as err: logging.error("configparser.KeyError " + repr(err)) if not len(paths_to_log) or not len(files_to_check) or not len(reports_name): logging.error("variable 'paths_to_log', 'files_to_check' or 'reports_name' is empty") mail() exit() current_date = datetime.now() for p in paths_to_log: logging.info("analyzing directory: " + p) message += '"' + p + '"<br>' if not path.exists(p): logging.error("the path '" + p + "' doesn\'t exist") continue files_in_folders = [f for f in listdir(p) if path.isfile(path.join(p, f))] for ftc in files_to_check: ftc = str(ftc).split(';') found = False for f in files_in_folders: if ftc[0] in f: f_size = path.getsize(p + f) f_date = path.getatime(p + f) f_dif = (current_date - datetime.fromtimestamp(f_date)).days if f_size and f_dif < int(ftc[1]): found = True message += __check_file(p, f, reports_name.copy()) if not found: logging.info("!!! ERROR: cannot find the file '" + ftc[0] + "'") message += '<font color="red">' + ftc[0] + " не удается найти файл</font><br>" message += "<br>" logging.debug("ended at: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S")) mail()
def sync(self, force = False): self._getFiles() c = self._dbcon.cursor() c.execute ("select max(lastchange) from filetable") # Initialize to a very old time, for the first time the syncronization runs stime = "1970-01-01 00:00:00" for row in c: if (not(row[0] == None)): stime = row[0] # Only do something if a file has changed time_format = "%Y-%m-%d %H:%M:%S" # TODO Checking st_atime for syncpath doesn't work in certain situations. Search for a better solution if (time.mktime(time.strptime(stime, time_format)) < getatime(self._syncdir) or force): c.execute ("select * from filetable") i = None rows = {} for row in c: rows[row[1]] = row for root, dirs, files in os.walk(self._syncdir): for name in files: path = join(root, name)[len(self._syncdir):] if (path in rows): # File exists, update or leave it alone if (time.mktime(time.strptime(rows[path][2], time_format)) < getmtime(join(root, name))): # File has to be updated self._con.send(bytes("5 " + str(rows[path][0]), "utf8")) changetime = None if(self._con.recieve().decode("utf8") == "0"): # Send timestamp # TODO Check if everything is right with timezone etc. changetime = datetime.datetime.fromtimestamp(getmtime(join(root,name))) self._con.send(bytes("6 " + str(changetime), "utf8")) else: # Something went wrong self._con.send(bytes("16", "utf8")) exit() print("Sending: " + join(root, name)) if (self._con.recieve().decode("utf8") == "0"): self._sendFile(join(root, name)) print("Finished sending file") # Just wait for the message, nothing else if(self._con.recieve().decode("utf8") != "0"): print ("No acknowlegement recieved. Exiting Thread") self._con.close() exit() c.execute("update filetable set lastchange = '" + str(changetime) + "' where fileid = " + str(rows[path][0])) else: # File does not exist, send it to the server self._con.send(bytes("4 " + path, "utf8")) changetime = None if(self._con.recieve().decode("utf8") == "0"): # Send timestamp # TODO Check if everything is right with timezone etc. changetime = datetime.datetime.fromtimestamp(getmtime(join(root,name))) self._con.send(bytes("6 " + str(changetime), "utf8")) else: print ("Error sending new file") # Something went wrong self._con.send(bytes("16", "utf8")) exit() # Send file ack = self._con.recieve().decode("utf8") if (ack == "0"): print("Sending: " + join(root, name)) self._sendFile(join(root, name)) print("Finished sending file") fileid = self._con.recieve().decode("utf8") c.execute("insert into filetable values (" + fileid + ", '" + path + "', '" + str(changetime) + "');") self._dbcon.commit() # Commit the query, after all files have been checked self._dbcon.commit() c.close()