def get_backup_dir_list(monitoring_dir, ELAPSED_TIME): backup_dir_list = [] now = int(time.time()) for f in fs.find('*', path=monitoring_dir): last_accessed = fs.stat(f).st_atime diff = now - last_accessed if diff > ELAPSED_TIME: if not fs.dirname(f) in backup_dir_list: backup_dir_list.append(fs.dirname(f)) return list(set(backup_dir_list))
def test_dirname_from_dir(): _dir1 = "foo" _dir = "%s/bar" % _dir1 _dirname = fs.dirname(_dir) assert _dirname == _dir1
def export_neglected_file_list(monitoring_dir, ROOT_DIR, LOG_DIR, backup_file_list): today_obj = datetime.datetime.today() today_str = str(today_obj.year)+str(today_obj.month)+str(today_obj.day) export_name = today_str + "_neglected_files.log" export_path = fs.join([ROOT_DIR, LOG_DIR, export_name]) if not fs.exists(fs.join([ROOT_DIR, LOG_DIR])): try: fs.mkdir(fs.join([ROOT_DIR, LOG_DIR])) except: print ("Can't create LOG_DIR in Func:", export_neglected_file_list) try: fs.touch(export_path) file = fs.open(export_path, 'w') for f in backup_file_list: try: file.write('================================================') file.write('\n') file.write(fs.filename(f)) file.write('\n') file.write(fs.dirname(f)) file.write('\n') except: print("Cant' write export file in func: export_neglected_file_list") except: print ("cant export in func: export_neglected_file_list")
def set(key, data): #print 'storing %s => %s' % (key, data) filePath = _constructPathFromKey(key) parentPath = dirname(filePath) if not os.path.exists(parentPath): mkdirs(parentPath) with open(filePath, 'w') as fh: return pickle.dump(data, fh) #filePutContents(_constructPathFromKey(key), data)
def test_dirname_from_file(): _file = 'test.txt' _dir = "foo/bar" _path = "%s/%s" % (_dir, _file) _dirname = fs.dirname(_path) assert _dirname == _dir
print("Expected Shape: ", nb_filter, stack_size, nb_col, nb_row) print("Found Shape: ", np.array(blobs[0].data).shape) weights_p = blobs[0].data.astype(dtype=np.float32) weights_b = blobs[1].data.astype(dtype=np.float32) if len(weights_p.shape) > 2: # Caffe uses the shape f, (d, y, x) # ConvnetJS uses the shape f, (y, x, d) weights_p = np.swapaxes(np.swapaxes(weights_p, 3, 1), 2, 1) print("Converted to Shape: ", weights_p.shape) weights = { 'filter': weights_p.reshape((nb_filter, stack_size*nb_col*nb_row)).tolist(), 'bias': weights_b.tolist() } filename = WEIGHTS_DIR + key + '.txt' if not fs.exists(fs.dirname(filename)): fs.mkdir(fs.dirname(filename)) fs.write(fs.add_suffix(filename, "_filter"), "") for i, f_weights in enumerate(weights['filter']): if i == len(weights['filter']) - 1: fs.append(fs.add_suffix(filename, "_filter"), ",".join(map(str, f_weights))) else: fs.append(fs.add_suffix(filename, "_filter"), ",".join(map(str, f_weights)) + "\n") fs.write(fs.add_suffix(filename, "_bias"), ",".join(map(str, weights['bias'])))
import hashlib, os, sys from fs import mkdirs, dirname #, filePutContents, fileGetContents try: import cPickle as pickle except: import pickle """ @description A static singleton-esque file-based key/value cache. @date 2012-04-19 """ CACHE_BASE_PATH = '%s/.cache/wget' % dirname(sys.argv[0]) def set(key, data): #print 'storing %s => %s' % (key, data) filePath = _constructPathFromKey(key) parentPath = dirname(filePath) if not os.path.exists(parentPath): mkdirs(parentPath) with open(filePath, 'w') as fh: return pickle.dump(data, fh) #filePutContents(_constructPathFromKey(key), data) def get(key): try: with open(_constructPathFromKey(key), 'r') as fh: return pickle.load(fh) #fileGetContents(_constructPathFromKey(key)) except IOError: return None
print("====> Layer: ", key) print("Expected Shape: ", nb_filter, stack_size, nb_col, nb_row) print("Found Shape: ", np.array(blobs[0].data).shape) weights_p = blobs[0].data.reshape( (nb_filter, stack_size, nb_col, nb_row)).astype(dtype=np.float32) weights_b = blobs[1].data.astype(dtype=np.float32) if len(weights_p.shape) > 2: # Caffe uses the shape f, (d, y, x) # ConvnetJS uses the shape f, (y, x, d) weights_p = np.swapaxes(np.swapaxes(weights_p, 3, 1), 2, 1) print("Converted to Shape: ", weights_p.shape) weights = { 'filter': weights_p.reshape((nb_filter, stack_size * nb_col * nb_row)), 'bias': weights_b } filename = WEIGHTS_DIR + key + '.bin' prev_shape = (nb_filter, stack_size, nb_col, nb_row) if not fs.exists(fs.dirname(filename)): fs.mkdir(fs.dirname(filename)) with open(fs.add_suffix(filename, "_filter"), 'wb') as f: f.write(weights['filter'].astype(np.float32).tostring()) with open(fs.add_suffix(filename, "_bias"), 'wb') as f: f.write(weights['bias'].astype(np.float32).tostring())