def export_neglected_file_list(monitoring_dir, ROOT_DIR, LOG_DIR, backup_file_list): today_obj = datetime.datetime.today() today_str = str(today_obj.year)+str(today_obj.month)+str(today_obj.day) export_name = today_str + "_neglected_files.log" export_path = fs.join([ROOT_DIR, LOG_DIR, export_name]) if not fs.exists(fs.join([ROOT_DIR, LOG_DIR])): try: fs.mkdir(fs.join([ROOT_DIR, LOG_DIR])) except: print ("Can't create LOG_DIR in Func:", export_neglected_file_list) try: fs.touch(export_path) file = fs.open(export_path, 'w') for f in backup_file_list: try: file.write('================================================') file.write('\n') file.write(fs.filename(f)) file.write('\n') file.write(fs.dirname(f)) file.write('\n') except: print("Cant' write export file in func: export_neglected_file_list") except: print ("cant export in func: export_neglected_file_list")
def create_new_backup_root_dir(ROOT_DIR, TARGET_ROOT_DIR, backup_dir_name): backup_dir_path = fs.join([ROOT_DIR, TARGET_ROOT_DIR, backup_dir_name]) if not fs.exists(backup_dir_path): try: fs.mkdir(backup_dir_path) print (backup_dir_path, "was successfully created") except: print ("Cant create a backup directory in func: create_new_backup_root_dir")
def main(): if not fs.exists(DATA_DST): fs.mkdir(DATA_DST) meta_all = utils.shuffle_meta_data(utils.load_meta_data(DATA_SRC, WIKI_META_OBJ, IMDB_META_OBJ)) train, test = utils.split_meta_data(meta_all, TRAIN_TEST_SPLIT) train, val = utils.split_meta_data(train, TRAIN_VAL_SPLIT) # Free the memory del meta_all gc.collect() print("Converting blocks") print(" [train] %i Sapmles. %i Blocks required" % (len(train['path']), math.ceil(len(train['path']) / SAMPLES_PER_SPLIT))) for i in range(math.ceil(len(train['path']) / SAMPLES_PER_SPLIT)): X_train, y_age, y_gender = utils.get_img_array(train, DATA_SRC, age_classes, img_dim=INPUT_DIM, split=i, num_samples_per_split=SAMPLES_PER_SPLIT) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_%02d' % i), X_train) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_label_age_%02d' % i), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_label_gender_%02d' % i), y_gender) # Remove the array from memory del X_train del y_age del y_gender gc.collect() print(" [val] %i Sapmles. 1 Block forced" % (len(val['path']))) X_val, y_age, y_gender = utils.get_img_array(val, DATA_SRC, age_classes, img_dim=INPUT_DIM, num_samples_per_split=len(val['path'])) np.save(fs.join(DATA_DST, VAL_DATA_OBJ), X_val) np.save(fs.add_suffix(fs.join(DATA_DST, VAL_DATA_OBJ), '_label_age'), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, VAL_DATA_OBJ), '_label_gender'), y_gender) # Remove the array from memory del X_val del y_age del y_gender gc.collect() print("[test] %i Sapmles. %i Blocks required" % (len(test['path']), math.ceil(len(test['path']) / SAMPLES_PER_SPLIT))) for i in range(math.ceil(len(test['path']) / SAMPLES_PER_SPLIT)): X_test, y_age, y_gender = utils.get_img_array(test, DATA_SRC, age_classes, img_dim=INPUT_DIM, split=i, num_samples_per_split=SAMPLES_PER_SPLIT) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_%02d' % i), X_test) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_label_age_%02d' % i), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_label_gender_%02d' % i), y_gender) # Remove the array from memory del X_test del y_age del y_gender gc.collect()
def main(): if not fs.exists(DATA_DST): fs.mkdir(DATA_DST) meta_all = utils.shuffle_meta_data(utils.load_meta_data(DATA_SRC, WIKI_META_OBJ, IMDB_META_OBJ)) train, test = utils.split_meta_data(meta_all, TRAIN_TEST_SPLIT) train, val = utils.split_meta_data(train, TRAIN_VAL_SPLIT) # Free the memory del meta_all gc.collect() print("Converting blocks") print(" [train] %i Sapmles" % (train_samples)) i = 0 X_train, y_age, y_gender = utils.get_img_array(train, DATA_SRC, age_classes, img_dim=INPUT_DIM, split=i, num_samples_per_split=train_samples) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_%02d' % i), X_train) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_label_age_%02d' % i), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, TRAIN_DATA_OBJ), '_label_gender_%02d' % i), y_gender) # Remove the array from memory del X_train del y_age del y_gender gc.collect() print(" [val] %i Sapmles" % (val_samples)) X_val, y_age, y_gender = utils.get_img_array(val, DATA_SRC, age_classes, img_dim=INPUT_DIM, num_samples_per_split=val_samples) np.save(fs.join(DATA_DST, VAL_DATA_OBJ), X_val) np.save(fs.add_suffix(fs.join(DATA_DST, VAL_DATA_OBJ), '_label_age'), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, VAL_DATA_OBJ), '_label_gender'), y_gender) # Remove the array from memory del X_val del y_age del y_gender gc.collect() print("[test] %i Sapmles" % (test_samples)) i = 0 X_test, y_age, y_gender = utils.get_img_array(test, DATA_SRC, age_classes, img_dim=INPUT_DIM, split=i, num_samples_per_split=test_samples) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_%02d' % i), X_test) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_label_age_%02d' % i), y_age) np.save(fs.add_suffix(fs.join(DATA_DST, TEST_DATA_OBJ), '_label_gender_%02d' % i), y_gender) # Remove the array from memory del X_test del y_age del y_gender gc.collect()
def create_archive_tree(monitoring_dir, ROOT_DIR, TARGET_ROOT_DIR, backup_dir_name, backup_dir_list): for source_full_path in backup_dir_list: re_pattern = re.compile(r'%s' % monitoring_dir) source_path = re.sub(re_pattern, '', source_full_path) archive_path = fs.join([ROOT_DIR, TARGET_ROOT_DIR, backup_dir_name, source_path]) if not fs.exists(archive_path): try: fs.mkdir(archive_path) except: print (archive_path, "Can't create archive tree in func: create_archive_tree")
def add_to_report(text, params={}): timestamp = params.get('timestamp') if not fs.exists('results/%s' % timestamp): fs.mkdir('results/%s' % timestamp) if isinstance(text, dict): with open('results/%s/report.txt' % timestamp, 'at+') as file: pprint.pprint(text, stream=file, indent=2) else: fs.append("results/%s/report.txt" % timestamp, text + '\n')
def test_mkdir(): dir_name = "foo" path = os.path.join(TEST_DIR, dir_name) if (os.path.exists(path)): raise ValueError("Directory %s already exists!" % path) fs.mkdir(path) assert os.path.exists(path) is True
def test_mkdir_recursive_fail(): dir_name = "foo/bar/bal" path = os.path.join(TEST_DIR, dir_name) if (os.path.exists(path)): raise ValueError("Directory %s already exists!" % path) try: fs.mkdir(path, recursive=False) except FileNotFoundError: pass
def test_api_for_create_file(): path = fs.mkdir('root') fs.mkdir('root/code') fs.mkfile('root/code/main.c', contents = u'int main() { return 0; }') tree = ipe.load_tree(path) # We might want to create files and directories that are not in # the disk. To do that we use 'create_file' and 'create_dir', that # are provided by the Dir class. tree.create_file('.emacs') # The file is now available for us dotemacs = tree.get('.emacs') # Files that we create are called "artificial". Files that exist # are called "natural." assert dotemacs.is_artificial assert tree.get('code/main.c').is_natural # Initially, artificial files have no content. assert dotemacs.contents is None # No problem. We can set it ourselves. lisp = u'(setq tab-width 4)' dotemacs.contents = lisp assert dotemacs.contents == lisp # We cannot create a file or a directory if its name would # conflict with that of an existing file. try: tree.create_dir('.emacs') except ipe.DuplicateChild: pass # If we're tired of an artificial file, we can remove it. tree.remove(dotemacs) assert not tree.contains(dotemacs) # And now we can proceed creating the directory again. It will not # conflict. d = tree.create_dir('.emacs')
def test_store_trees(): # Let's first create a place where to store our stuff. out = fs.mkdir() assert isdir(out) # Let's just create a simple tree with a couple directories and # files. root = ipe.Dir('root') a = root.create_dir('a') b = a.create_dir('b') f1 = root.create_file('f1', contents = 'hello') f2 = b.create_file('f2', contents = 'world') # Now we store it ipe.store_tree(root, out) # And check if everything is ok assert isdir(join(out, a.path)) assert isdir(join(out, b.path)) assert isfile(join(out, f1.path)) assert isfile(join(out, f2.path))
def test_creating_dir_fails_if_there_is_a_file_in_the_way(): # Let's first create a place where to store our stuff. out = fs.mkdir() rock = fs.mkfile(join(os.path.basename(out), 'rock')) assert isdir(out) assert os.path.isfile(rock) # Now let's create a tree that has a directory with the same # name as the file we created previously. They will conflict. root = ipe.Dir('root') root.create_dir('rock') # We'll store these with the CreateMissingDirectories tool. tool = ipe.storage.CreateMissingDirectories(out) # It must not succeed because there is a file in the way. try: ipe.top_down(root, tool) assert False, 'There is a file in the way!' except RuntimeError: pass
def test_store_different_types_of_contents(): # Let's first create a place where to store our stuff. out = fs.mkdir() assert isdir(out) # Files can have all sorts of contents. Unicode strings... txt_contents = u'O ipê amarelo é a árvore nacional do Brasil.' # ...iterables... lst_contents = [ txt_contents ] # ...generator functions... def gen_contents(): for line in lst_contents: yield line # ...callables... def def_contents(): return gen_contents # ...and files. fil_contents = open(fs.mkfile('tempfile', contents = txt_contents)) # So, let's create the files. Each with a different kind of content root = ipe.Dir('root') f1 = root.create_file('f1', contents = txt_contents) f2 = root.create_file('f2', contents = lst_contents) f3 = root.create_file('f3', contents = gen_contents) f4 = root.create_file('f4', contents = def_contents) f5 = root.create_file('f5', contents = fil_contents) # Before creating files, let's first create the directories. We # know it works because we've tested it before! tool = ipe.storage.CreateMissingDirectories(out) ipe.top_down(root, tool) # Now that we know the directories are in place, we'll use # the StoreFiles tool to write the files down. tool = ipe.storage.StoreFiles(out) ipe.top_down(root, tool) # Now, let's check what's in those files assert codecs.open(join(out, f1.path), encoding='utf-8').read() == txt_contents assert codecs.open(join(out, f2.path), encoding='utf-8').read() == txt_contents assert codecs.open(join(out, f3.path), encoding='utf-8').read() == txt_contents assert codecs.open(join(out, f4.path), encoding='utf-8').read() == txt_contents assert codecs.open(join(out, f5.path), encoding='utf-8').read() == txt_contents
def test_load_dir(): r = fs.mkdir('root') fs.mkdir('root/vanilla') fs.mkdir('root/foo/bar') fs.mkfile('root/a.txt') fs.mkfile('root/b.txt', contents = u'André') fs.mkfile('root/vanilla/c.txt') fs.mkfile('root/vanilla/d.txt') fs.mkfile('root/foo/e.txt') fs.mkfile('root/foo/bar/f.txt') fs.mkfile('root/foo/bar/g.txt') t = ipe.load_tree(r) eq_(t.get('vanilla/c.txt').name, 'c.txt') eq_(t.get('foo/bar/g.txt').parent.get('f.txt').abspath, t.get('foo/bar/f.txt').abspath)
def test_load_empty_dir(): d = fs.mkdir('empty') t = ipe.load_tree(d) eq_(t.name, 'empty') eq_(t.path, '') eq_(t.abspath, d) k = reduce(lambda x, y: x + 1, t.children(), 0) eq_(k, 0)
def test_natural_file(): dn = fs.mkdir('natural-files') fn = fs.mkfile('natural-files/file.txt', contents = u'test') root = ipe.load_tree(dn) # This is a natural file. f = root.get('file.txt') # By default, it has contents. The standard value is a file-like # object with f.contents as stream: # It is a read only stream try: stream.write('anything') assert False, 'The file should be read only.' except IOError: pass # And it must represent the contents of the file on disk. text = stream.read().decode('utf-8') assert text == u'test' # Replacing it with an UTF-8 string is OK. f.contents = u'replaced' assert f.contents == u'replaced' # It should not overwrite the contents of the file, though. with open(fn, 'rb') as stream: text = stream.read().decode('utf-8') assert text == u'test' # Setting to None should not reset the file to the original state. f.contents = None assert f.contents == None
def test_creating_directories_using_the_built_in_tool(): # Let's first create a place where to store our stuff. out = fs.mkdir() assert isdir(out) # Now, let's create a totally artificial tree, containing files # and directories. root = ipe.Dir('root') a = root.create_dir('a') x = a.create_file('x') y = a.create_file('y') b = root.create_dir('b') z = b.create_file('z') c = root.create_dir('c') d = c.create_dir('d') # We'll use the builtin CreateMissingDirectories tool to create # the directories. It will output to the 'out' directory we have # just created. tool = ipe.storage.CreateMissingDirectories(out) ipe.top_down(root, tool) # First of all, the root directory should NOT be created. The root # of the tree is the output directory, "out". assert not isdir(join(out, root.name)) # Each directory should have been properly created. assert isdir(join(out, a.path)) assert isdir(join(out, b.path)) assert isdir(join(out, c.path)) assert isdir(join(out, d.path)) # None of the files should be present, however. assert not exists(join(out, x.path)) assert not exists(join(out, y.path)) assert not exists(join(out, z.path))
def test_creating_files_with_the_built_in_tool(): # Let's first create a place where to store our stuff. out = fs.mkdir() assert isdir(out) # Now, let's create a totally artificial tree, containing files # and directories. The contents of each file will be its name. root = ipe.Dir('root') a = root.create_dir('a') x = a.create_file('x', contents = u'x') y = a.create_file('y', contents = u'y') b = root.create_dir('b') z = b.create_file('z', contents = u'z') c = root.create_dir('c') d = c.create_dir('d') # Before creating files, let's first create the directories. We # know it works because we've tested it before! tool = ipe.storage.CreateMissingDirectories(out) ipe.top_down(root, tool) # Now that we know the directories are in place, we'll use # the StoreFiles tool to write the files down. tool = ipe.storage.StoreFiles(out) ipe.top_down(root, tool) # First let's check if the files were really created. assert isfile(join(out, x.path)) assert isfile(join(out, y.path)) assert isfile(join(out, z.path)) # Now, let's check what's in those files assert codecs.open(join(out, x.path), encoding='utf-8').read() == u'x' assert codecs.open(join(out, y.path), encoding='utf-8').read() == u'y' assert codecs.open(join(out, z.path), encoding='utf-8').read() == u'z'
def test_artificial_file(): dn = fs.mkdir('artificial-files') root = ipe.load_tree(dn) # This is an artificial file. f = root.create_file('hello.c') # Artificial files have no content, initially. assert f.contents is None # But we can set it. f.contents = u'Hello!' # And get it back. assert f.contents == u'Hello!'
def main(): fs.init('fs') fs.mkdir('a') fs.mkdir('b') fs.mkdir('a/c') fs.create('a/d.txt', 20) fs.create('a/c/e.txt', 20) fd1 = fs.open('a/d.txt', 'rw') fd2 = fs.open('a/c/e.txt', 'rw') fs.write(fd1, 'hello\nbye\n') fs.write(fd2, 'goodbye\n') print fs.read(fd2, 4) print fs.readlines(fd1) for f in fs.readlines(fd1): print(f), fs.close(fd1) fs.close(fd2) fs.suspend()
print("Expected Shape: ", nb_filter, stack_size, nb_col, nb_row) print("Found Shape: ", np.array(blobs[0].data).shape) weights_p = blobs[0].data.astype(dtype=np.float32) weights_b = blobs[1].data.astype(dtype=np.float32) if len(weights_p.shape) > 2: # Caffe uses the shape f, (d, y, x) # ConvnetJS uses the shape f, (y, x, d) weights_p = np.swapaxes(np.swapaxes(weights_p, 3, 1), 2, 1) print("Converted to Shape: ", weights_p.shape) weights = { 'filter': weights_p.reshape((nb_filter, stack_size*nb_col*nb_row)).tolist(), 'bias': weights_b.tolist() } filename = WEIGHTS_DIR + key + '.txt' if not fs.exists(fs.dirname(filename)): fs.mkdir(fs.dirname(filename)) fs.write(fs.add_suffix(filename, "_filter"), "") for i, f_weights in enumerate(weights['filter']): if i == len(weights['filter']) - 1: fs.append(fs.add_suffix(filename, "_filter"), ",".join(map(str, f_weights))) else: fs.append(fs.add_suffix(filename, "_filter"), ",".join(map(str, f_weights)) + "\n") fs.write(fs.add_suffix(filename, "_bias"), ",".join(map(str, weights['bias'])))
count++ mapping[id] = new_folder; } catch (e) { console.error('Error creating Folder:', e.message) } })) same_levels = [].concat(...same_levels.map(v => folders.filter(vv => vv.parent === v.id))) } return mapping } async function create_local_folder(folderPath: string) { return new Promise((res, rej) => { if (!fs.existsSync(folderPath)) { fs.mkdir(folderPath, { recursive: true }, (err) => { if (err) rej(err); else res(''); }); } else res(''); }); } async function downloadFile(file: any, drive: drive_v3.Drive, filePath: string) { return new Promise(async (resolve, reject) => { var dest = fs.createWriteStream(filePath); await drive.files.get({ fileId: file.id, supportsAllDrives: true, alt: 'media'
import fs print "\nTESTING GETCWD() \n" try: fs.getcwd() except: print "ERROR: fs.getcwd()" print "DONE" print "\nTESTING MKDIR() \n" try: fs.mkdir('a') except: print "ERROR : fs.mkdir('a')" try: fs.mkdir('a/ab') except: print "ERROR : fs.mkdir('a/ab')" try: fs.mkdir('a/ab/../ac') except: print "ERROR : fs.mkdir('a/ab/ab/ac')" try: fs.mkdir('a/../../f') except:
print("====> Layer: ", key) print("Expected Shape: ", nb_filter, stack_size, nb_col, nb_row) print("Found Shape: ", np.array(blobs[0].data).shape) weights_p = blobs[0].data.reshape( (nb_filter, stack_size, nb_col, nb_row)).astype(dtype=np.float32) weights_b = blobs[1].data.astype(dtype=np.float32) if len(weights_p.shape) > 2: # Caffe uses the shape f, (d, y, x) # ConvnetJS uses the shape f, (y, x, d) weights_p = np.swapaxes(np.swapaxes(weights_p, 3, 1), 2, 1) print("Converted to Shape: ", weights_p.shape) weights = { 'filter': weights_p.reshape((nb_filter, stack_size * nb_col * nb_row)), 'bias': weights_b } filename = WEIGHTS_DIR + key + '.bin' prev_shape = (nb_filter, stack_size, nb_col, nb_row) if not fs.exists(fs.dirname(filename)): fs.mkdir(fs.dirname(filename)) with open(fs.add_suffix(filename, "_filter"), 'wb') as f: f.write(weights['filter'].astype(np.float32).tostring()) with open(fs.add_suffix(filename, "_bias"), 'wb') as f: f.write(weights['bias'].astype(np.float32).tostring())
def createCollection(self, respath): fs.mkdir(respath)
import fs def prn(x): try: return "Dir " + x.dirName except: return "File " + x.fileName # file system tree #1 #now it is on the root directory. fs.init('abc') fs.mkdir('a') fs.mkdir('a/b1') print fs.listdir('a') #now it is on the root directory. #fs.chdir('a') fs.chdir('/') fs.create('fa', 17) print fs.listdir('.') #on a directory fs.chdir('a')
import fs fs.init('myfs') # if __name__ == '__main__': fs.init('myfs') fs.getcwd() # / fs.mkdir('a0') fs.mkdir('a0/a1') fs.mkdir('a0/a1/a2') fs.listdir() fs.listdir('a0') fs.mkdir('a0/b1') fs.listdir('a0') fs.chdir('a0/b1') # /a0/b1 fs.create('../a1/a2.txt', 10) fs.listdir('../a1') fs.chdir('/a0/a1') # /a0/a1 fs.getcwd() try: fs.mkdir('../a1') except Exception, e: print e try: fs.chdir('../c1') except Exception, e: print e fs.getcwd() try: fs.deldir('/a0/a1') except Exception, e: print e
def test_create_some_files(): d = fs.mkdir('hello/world') f = fs.mkfile('hello/world/again.txt')