def run(): print("-----------------------------------------") print("local ssd") ssd = "X:/cam_front_center" process_folder(ssd) # on a local ssd. print("-----------------------------------------") print("nas") with f.timing("get camera folders", units=1): folders, jsons, labels = f.get_camera_folders(f.get_path(), root=f.get_root()) print(len(folders), " camera folders") for folder in folders: process_folder(folder) if True: break print("-----------------------------------------") [print("local ssd")] process_folder(ssd) # on a local ssd.ls .. print("-----------------------------------------") print("just read some files in a camera folder.") print("nas") with f.timing("get camera folders", units=1): folders, jsons, labels = f.get_camera_folders(f.get_path(), root=f.get_root()) print(len(folders), " camera folders") for folder in folders: process_folder(folder) if True: break
def get_files(units=None): images, jsons, labels = bf.get_camera_folders(bf.get_path(), root=bf.get_root()) print(len(images), ' folders for images') l = [] for folder in images: l = get_pngs(folder) break # just do one now. folder = images[0] l = l if units is None else l[:units] return l
def run(): print("--------------------------------------------") x,y,z=f.get_camera_folders(f.get_path(),root=f.get_root()) title="cars by folder:" for i,folder in enumerate(x): files=f.get_files(folder,"*.png") units=len(files) print(i,"folder: "+folder,"has:",len(files),"files.") with f.timing("folder: "+folder+" ["+str(i)+"] has: "+str(len(files))+" files.",units,title): ds=tff.make_tensor_slices_dataset_list(files) print("----------------------------------------------") for i,folder in enumerate(x): files=f.get_files(folder,"*.png") units=len(files) print(i,"folder: "+folder,"has:",len(files),"files.") with f.timing("folder: "+folder+" ["+str(i)+"] has: "+str(len(files))+" files.",units,title): ds=tff.make_tensor_slices_dataset_list(files) mapped=ds.map(tff.parse1and,tff.autotune) tff.do_enumeration(mapped,parse2=tff.write_file) print("--------------------------------------------")
def get_filenames_from_camera_folders(path, root=None): correct_length = 49 print("getting directories from:", root, path) camera_folders = f.get_camera_folders(path, root=root) print(type(camera_folders), len(camera_folders), "camera folders, first:", camera_folders[0][0]) if len(camera_folders) != correct_length: print("length should be:", correct_length) else: print("length is correct") #missing=set(['L:/a2d2/camera_lidar_semantic/20181016_095036/camera/cam_front_center','L:/a2d2/camera_lidar_semantic/20181204_191844/camera/cam_front_center']) missing_ = [ '20181016_095036/camera/cam_front_center', '20181204_191844/camera/cam_front_center' ] missing = [join(path, part) for part in missing_] print("we expect problems with:", missing) # L:/a2d2/camera_lidar_semantic/20181016_095036/camera/cam_front_center # L:/a2d q2/camera_lidar_semantic/20181204_191844/camera/cam_front_center return get_files_from_camera_folders(list(zip(*camera_folders)))
with f.timing("import tensorflow",1): import tensorflow as tf print(tf.__version__) import tffunctions as tff x,y,z=f.get_lists_of_filenames() print("got (",len(x),len(y),len(z),") files.",flush=True) first=x[0] print("first file:",first,os.path.exists(first)) path=f.path_head(first) filename=f.path_leaf(first) print(path,filename) maybe='L:/ss/sem/20180807_145028/camera/cam_front_center/20180807145028_camera_frontcenter_000000091.png' if maybe==first: print("maybe =") print("maybe file:",maybe,os.path.exists(maybe)) x,y,z=f.get_camera_folders(f.get_path(),root=f.get_root()) title="cars by folder:" #with f.timing("folder: "+folder+" ["+str(i)+"] has: "+str(len(files))+" files.",units,title): # ds=tff.make_tensor_slices_dataset_list(files) filesets=f.getFilesets(x,"*.png") print(len(filesets),"filesets.") with f.timing("make datasets from filename sets.",len(x),title): datasets=tff.makeDatasets(filesets) print(len(datasets),"datasets, type:",type(datasets)) tff.time_enumeration(datasets,units=len(datasets)) # was 1892? # this will make the 44 datasets, one for each camera folder # naybe this is enough for this file. exit() print("---------------------------------------") def concat_datasets(datasets): ds0 = tf.data.Dataset.from_tensors(datasets[0])
printffo(files,folders,others) return (files,folders,others) def printijo(imageFiles,jsonFiles,otherFiles): print("image_files:",imageFiles[:3]) print("json_files:",jsonFiles[:3]) print("others:",otherFiles[:3]) def getijo(l:list): # images, jsons, others - should only be given files. imageFiles=[e for e in l if '.png' in e] maybeFiles=list(set(l)-set(imageFiles)) jsonFiles=[e for e in maybeFiles if '.json' in e] otherFiles=list(set(maybeFiles)-set(jsonFiles)) if len(maybeFiles)!=0: print("maybies:",maybeFiles[:3]) printijo(imageFiles,jsonFiles,otherFiles) return imageFiles,jsonFiles,otherFiles path=op.join(f.get_root(),f.get_path()) # these will be sorted (folders,y,z)=f.get_camera_folders(path) print('--------------------------') f.ppl0(folders,name="folders from get camera folders",print_=True) print('--------------------------') files,folders,others=getffo(path) print('--------------------------') for folder in folders: (files2,folders2,others2)=getffo(op.join(path,folder)) for folder2 in folders2: print("process folder:",folder2) files3,folders3,others3=getffo(op.join(path,folder,folder2)) # may need camera or lifar sun directory? for folder3 in folders3: print("process folder:",folder2) files4,folders4,others4=getffo(op.join(path,folder,folder2,folder3)) # may need camera or lifar sun directory? (imageFiles,jsonFiles,otherFiles)=getijo(files4) if True: