def test_make_zipfile_no_zlib(self): patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError called = [] zipfile_class = zipfile.ZipFile def fake_zipfile(*a, **kw): if kw.get('compression', None) == zipfile.ZIP_STORED: called.append((a, kw)) return zipfile_class(*a, **kw) patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) # create something to tar and compress tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') with change_cwd(tmpdir): make_zipfile(base_name, 'dist') tarball = base_name + '.zip' self.assertEqual(called, [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) self.assertTrue(os.path.exists(tarball)) with zipfile.ZipFile(tarball) as zf: self.assertEqual(sorted(zf.namelist()), ['dist/file1', 'dist/file2', 'dist/sub/file3'])
def build_final_zip_file(self): progress("Building final zip file from staging directory") package_name = "blenderseed-{0}-{1}-{2}".format( self.package_version, self.settings.platform, self.build_date) package_path = os.path.join(self.settings.output_dir, package_name) archive_util.make_zipfile(package_path, "blenderseed") info("Package path: {0}".format(package_path + ".zip"))
def test_make_zipfile_no_zlib(self): patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError called = [] zipfile_class = zipfile.ZipFile def fake_zipfile(*a, **kw): if kw.get('compression', None) == zipfile.ZIP_STORED: called.append((a, kw)) return zipfile_class(*a, **kw) patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) # create something to tar and compress tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') with change_cwd(tmpdir): make_zipfile(base_name, 'dist') tarball = base_name + '.zip' self.assertEqual(called, [((tarball, "w"), { 'compression': zipfile.ZIP_STORED })]) self.assertTrue(os.path.exists(tarball)) with zipfile.ZipFile(tarball) as zf: self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
def build_final_zip_file(self): package_name = "blenderseed-{0}-{1}".format(self.package_version, self.settings.platform) old_path = pushd(self.settings.package_temp_dir) archive_util.make_zipfile( os.path.join(self.settings.output_dir, package_name), ".") os.chdir(old_path)
def test_make_zipfile(self): tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') make_zipfile(base_name, tmpdir) tarball = base_name + '.zip'
def build_final_zip_file(self): package_name = "appleseed-maya{0}-{1}-{2}".format( self.settings.maya_version, self.settings.plugin_version, self.settings.platform) old_path = pushd(self.settings.package_output_path) archive_util.make_zipfile( os.path.join(self.settings.this_dir, package_name), ".") os.chdir(old_path)
def test_make_zipfile(self): tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') with change_cwd(tmpdir): make_zipfile(base_name, 'dist') tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) with zipfile.ZipFile(tarball) as zf: self.assertEqual(sorted(zf.namelist()), ['dist/file1', 'dist/file2', 'dist/sub/file3'])
def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + '.zip'
def test_make_zipfile(self): # creating something to tar tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') with change_cwd(tmpdir): make_zipfile(base_name, 'dist') # check if the compressed tarball was created tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) with zipfile.ZipFile(tarball) as zf: self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, "file1"], "xxx") self.write_file([tmpdir, "file2"], "xxx") tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, "archive") make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + ".zip"
def upload_directory_to_vault(directory, vault): # create a local zip, which will become the Glacier archive LOG.info('creating local zipfile of "%s"' % directory) try: zipfile = archive_util.make_zipfile(os.urandom(14).encode('hex'), directory) except Exception, e: LOG.exception('could not create local zipfile, exception was:') raise SystemExit, 1
def test_make_zipfile_no_zlib(self): patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError called = [] zipfile_class = zipfile.ZipFile def fake_zipfile(*a, **kw): if kw.get('compression', None) == zipfile.ZIP_STORED: called.append((a, kw)) return zipfile_class(*a, **kw) patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) # create something to tar and compress tmpdir, tmpdir2, base_name = self._create_files() make_zipfile(base_name, tmpdir) tarball = base_name + '.zip' self.assertEqual(called, [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) self.assertTrue(os.path.exists(tarball))
def uploadall(self): """ Upload all data to shared Google Drive folder. """ old_dir = os.path.abspath(os.curdir) # Zip the tables directory os.chdir(os.path.dirname(self.tables_dir)) base_name = os.path.basename(self.tables_dir) zip_name = archive_util.make_zipfile(base_name, base_name) # Upload the zipped file shutil.move(zip_name, os.path.join(UPLOADS_DIR, zip_name)) os.chdir(old_dir)
def make_ezipfile(base_name, base_dir, verbose=0, dry_run=0): fname = archive_util.make_zipfile(base_name, base_dir, verbose, dry_run) ofname = fname+".old" os.rename(fname,ofname) of=open(ofname) f=open(fname,"w") f.write(EZIP_HEADER % base_dir) while True: data = of.read(8192) if not data: break f.write(data) f.close() of.close() os.unlink(ofname) os.chmod(fname,0755) return fname
def make_ezipfile(base_name, base_dir, verbose=0, dry_run=0, **kwargs): fname = archive_util.make_zipfile(base_name, base_dir, verbose, dry_run) ofname = fname + ".old" os.rename(fname, ofname) of = open(ofname) f = open(fname, "w") f.write(EZIP_HEADER % base_dir) while True: data = of.read(8192) if not data: break f.write(data) f.close() os.system("zip -A '%s'" % fname) of.close() os.unlink(ofname) os.chmod(fname, 0o755) return fname
def _make_ezipfile(base_name, base_dir, verbose=0, dry_run=0): """ Zip all files in a directory. This is mainly used for zipping up Robot results or log files after the run """ fname = archive_util.make_zipfile(base_name, base_dir, verbose, dry_run) return fname
def do_collect(self, scriptname, targetdir, excludes=[], distlib='lib', zip_lib=False, verbose=False): """ See module documentation for an introduction and example. do_collect performs the actual work of this module. Arguments: scriptname Path to your top-level application file. Can be either relative or absolute. targetdir Path to the target directory where the packaged distribution will be placed. The distribution consists of a loader script and a distribution library (either a directory or a zip file). This directory may not exist prior to invocation. If it exists, it will be overridden. excludes A list of module names for exclusion from the distribution. For example, if you know all your users have wxPython installed, you may want to add 'wx' to this list - it will save a lot of space. distlib Name of the distribution library that will be created in targetdir. zip_lib True if you want the distribution library to be zipped into a single file. False if you want it to be an uncompressed directory. Notes: * While saving disk space, this option is likely to hinder the start-up performance of the script, because Python won't pre-compile the .py files into .pyc files after the first load if the .py files are in a zip archive. * Due to a limitation of zipimport (Python's built-in importer from zip files), your script won't work after distribution if the library contains extensions (.pyd & .pyo) or console-less Windows scripts (.pyw). See the documentation of zipimport for more information. verbose True to make do_collect print out its progress to stdout. May be useful for the first time you create a distribution for some application. Returns: Nothing. An exception may be thrown with an error message from one of the undelying method calls. """ self.site_packages = os.path.normcase(distutils.sysconfig.get_python_lib(standard_lib=False)) self.standard_lib = os.path.normcase(distutils.sysconfig.get_python_lib(standard_lib=True)) self.sys_prefix = os.path.normcase(sys.prefix) self.verbose = verbose self.log("\nLibCollect v%s running in verbose mode\n" % version) # Initial preparation to create the lib directory # if os.path.exists(targetdir): self.log("Directory '%s' exists. Removing it." % targetdir) shutil.rmtree(targetdir) libdir = os.path.join(targetdir, distlib) self.log("Creating path '%s'" % libdir) mkpath(libdir) # Find the modules we need to collect # modules = self.find_modules(scriptname, excludes, verbose) self.log("Collecting modules into '%s'" % libdir) # Collect the modules in the lib directory # for modname, modtype, modfile in modules: modname_components = modname.split('.') if modtype == 'm': if len(modname_components) > 1: new_path = os.path.join(libdir, *modname_components[0:-1]) else: new_path = libdir elif modtype == 'P': new_path = os.path.join(libdir, *modname_components) else: assert False mkpath(new_path) shutil.copy(modfile, new_path) os.chdir(targetdir) if zip_lib: self.log("Zipping directory '%s' into '%s'" % (libdir, libdir + '.zip')) make_zipfile(distlib, distlib) self.log("Removing directory '%s'" % libdir) shutil.rmtree(distlib) path_add = "os.path.join('" + distlib + ".zip', '" + distlib + "')" else: path_add = "'" + distlib + "'" # Create the loader script # self.log("Writing loader script: %s" % scriptname) loader = open(os.path.basename(scriptname), 'w') loader_name = os.path.splitext(scriptname)[0] loader.write("import os, sys, runpy\n") loader.write("sys.path.insert(0, %s)\n" % path_add) loader.write("runpy.run_module('%s', run_name=\"__main__\", alter_sys=True)\n" % loader_name) loader.close()
def do_collect(self, scriptname, targetdir, excludes=[], distlib='lib', zip_lib=False, verbose=False): """ See module documentation for an introduction and example. do_collect performs the actual work of this module. Arguments: scriptname Path to your top-level application file. Can be either relative or absolute. targetdir Path to the target directory where the packaged distribution will be placed. The distribution consists of a loader script and a distribution library (either a directory or a zip file). This directory may not exist prior to invocation. If it exists, it will be overridden. excludes A list of module names for exclusion from the distribution. For example, if you know all your users have wxPython installed, you may want to add 'wx' to this list - it will save a lot of space. distlib Name of the distribution library that will be created in targetdir. zip_lib True if you want the distribution library to be zipped into a single file. False if you want it to be an uncompressed directory. Notes: * While saving disk space, this option is likely to hinder the start-up performance of the script, because Python won't pre-compile the .py files into .pyc files after the first load if the .py files are in a zip archive. * Due to a limitation of zipimport (Python's built-in importer from zip files), your script won't work after distribution if the library contains extensions (.pyd & .pyo) or console-less Windows scripts (.pyw). See the documentation of zipimport for more information. verbose True to make do_collect print out its progress to stdout. May be useful for the first time you create a distribution for some application. Returns: Nothing. An exception may be thrown with an error message from one of the undelying method calls. """ self.site_packages = os.path.normcase( distutils.sysconfig.get_python_lib(standard_lib=False)) self.standard_lib = os.path.normcase( distutils.sysconfig.get_python_lib(standard_lib=True)) self.sys_prefix = os.path.normcase(sys.prefix) self.verbose = verbose self.log("\nLibCollect v%s running in verbose mode\n" % version) # Initial preparation to create the lib directory # if os.path.exists(targetdir): self.log("Directory '%s' exists. Removing it." % targetdir) shutil.rmtree(targetdir) libdir = os.path.join(targetdir, distlib) self.log("Creating path '%s'" % libdir) mkpath(libdir) # Find the modules we need to collect # modules = self.find_modules(scriptname, excludes, verbose) self.log("Collecting modules into '%s'" % libdir) # Collect the modules in the lib directory # for modname, modtype, modfile in modules: modname_components = modname.split('.') if modtype == 'm': if len(modname_components) > 1: new_path = os.path.join(libdir, *modname_components[0:-1]) else: new_path = libdir elif modtype == 'P': new_path = os.path.join(libdir, *modname_components) else: assert False mkpath(new_path) shutil.copy(modfile, new_path) os.chdir(targetdir) if zip_lib: self.log("Zipping directory '%s' into '%s'" % (libdir, libdir + '.zip')) make_zipfile(distlib, distlib) self.log("Removing directory '%s'" % libdir) shutil.rmtree(distlib) path_add = "os.path.join('" + distlib + ".zip', '" + distlib + "')" else: path_add = "'" + distlib + "'" # Create the loader script # self.log("Writing loader script: %s" % scriptname) loader = open(os.path.basename(scriptname), 'w') loader_name = os.path.splitext(scriptname)[0] loader.write("import os, sys, runpy\n") loader.write("sys.path.insert(0, %s)\n" % path_add) loader.write( "runpy.run_module('%s', run_name=\"__main__\", alter_sys=True)\n" % loader_name) loader.close()
def preeditmovie(expt_raw_data_dir, expt_analyses_dir, positions, params): """ Automated steps to perform prior to editing. """ expt = os.path.basename(expt_analyses_dir) g = params['general'] # First load or create log files for each position log.main(expt_raw_data_dir, expt_analyses_dir, positions, g['write_mode']) # Execute each position in succession for p in positions: # Update the terminal display read.updatelog(expt, p, 'preedit') print 'start position ' + p + ': ' + time.asctime() posn_raw_data_dir = os.path.join(expt_raw_data_dir, p) posn_analyses_dir = os.path.join(expt_analyses_dir, p) # Segmented files will be saved to a temporary directory temp_dir = os.path.join(posn_analyses_dir, 'temp') if g['write_mode'] == 0: read.rmkdir(temp_dir) else: read.cmkdir(temp_dir) # Pad with default parameters, and find frames to process frame_start, frame_stop = float('inf'), 0. for mode in MODES: print '---mode', mode d = params[mode] # Pad with default parameters as necessary d = eval('%s.workflow.fillparams(d)' % mode) # Find all .tif images of specified type in the given directory d['segment']['file_list'] = [] for f in read.listfiles(posn_raw_data_dir, d['segment']['pattern']): j = read.getframenum(f, d['segment']['pattern']) if g['frame_range'][0] <= j < g['frame_range'][1]: frame_start = min(frame_start, j) frame_stop = max(frame_stop, j) d['segment']['file_list'].append(f) frame_stop += 1 # Create arguments for parallel processing args = [(posn_raw_data_dir, temp_dir, MODES, copy.deepcopy(params)) for _ in range(g['num_procs'])] file_list = sorted(args[0][3]['phase']['segment']['file_list']) # # debug: select only a few files -BK # print 'initial frame stop', frame_stop # frame_stop = 500 # file_list = file_list[:frame_stop] # # debug: select only a few files -BK inds = partition_indices(file_list, g['num_procs']) for (sta_ind, end_ind), arg in zip(inds, args): arg[3]['phase']['segment']['file_list'] = file_list[sta_ind:end_ind] # Process each block of frames in parallel parallel.main(preeditblock, args, g['num_procs']) print 'extract: ' + time.asctime() # Archive the output files into .zip files, then delete each .tif num_tifs = frame_stop - frame_start num_digits = int(np.ceil(np.log10(num_tifs + 1))) # Create new set of directories with pre-specified block size frames = range(frame_start, frame_stop-1, g['block_size']) frames.append(frame_stop) block_frames = zip(frames[:-1], frames[1:]) # Make directories to hold files, named according to frames read.cmkdir(os.path.join(posn_analyses_dir, 'blocks')) block_dirs = [] for j1, j2 in block_frames: strs = [str(v).zfill(num_digits) for v in (j1, j2)] v = os.path.join(posn_analyses_dir, 'blocks', 'frame{}-{}'.format(*strs)) os.mkdir(v) block_dirs.append(v) for m in MODES: # The segmented .tif files will be stored in a .zip file zip_name = m.capitalize() + 'Segment' [read.cmkdir(os.path.join(v, zip_name)) for v in block_dirs] # Find all segmented .tif images and transfer to the new directories d = params[m] for f in read.listfiles(temp_dir, d['segment']['pattern']): j = read.getframenum(f, d['segment']['pattern']) for i, (j1, j2) in enumerate(block_frames): if j1 <= j < j2: old_name = os.path.join(temp_dir, f) zip_dir = os.path.join(block_dirs[i], zip_name) shutil.move(old_name, zip_dir) # Zip each directory of segmented .tif files old_dir = os.path.abspath(os.curdir) for v in block_dirs: os.chdir(v) archive_util.make_zipfile(zip_name, zip_name) shutil.rmtree(zip_name) os.chdir(old_dir) # Make temporary directories for data outputs dat_name = m.capitalize() + 'Data' [read.cmkdir(os.path.join(v, dat_name)) for v in block_dirs] # Find all analyzed .pickle files and transfer to the new directories f, e = os.path.splitext(d['segment']['pattern']) dat_pattern = (f + '.pickle' + e[4:]) for f in read.listfiles(temp_dir, dat_pattern): j = read.getframenum(f, dat_pattern) for i, (j1, j2) in enumerate(block_frames): if j1 <= j < j2: # Transfer each frame to the correct block old_name = os.path.join(temp_dir, f) dat_dir = os.path.join(block_dirs[i], dat_name) shutil.move(old_name, dat_dir) # Concatenate each set of files into a DataFrame for each parameter for block_dir in block_dirs: dat_dir = os.path.join(block_dir, dat_name) data = [] for u in os.listdir(dat_dir): dat_file = os.path.join(dat_dir, u) try: d = read_pickle(dat_file) except: pass data.append(d) df = concat(data) df = df.reindex(sorted(df.index)) for c in df.columns: df[c].to_pickle(os.path.join(block_dir, c + '.pickle')) shutil.rmtree(dat_dir) print 'shuffle: ' + time.asctime() # Delete all temporary files shutil.rmtree(temp_dir) ''' block_dirs = [os.path.join(posn_analyses_dir, 'blocks', v) for v in os.listdir(os.path.join(posn_analyses_dir, 'blocks')) if 'frame' in v] ''' # Track the blocks in parallel args = [] for v in block_dirs: output_file = os.path.join(v, 'Trace.pickle') if os.path.isfile(output_file): os.remove(output_file) args.append((v, output_file, params['phase']['track'])) parallel.main(trackblock, args, g['num_procs']) print 'track: ' + time.asctime() # Stitch independently-tracked trajectories together stitchblocks(block_dirs, params['phase']['track']) print 'stitch: ' + time.asctime() # Collate the data for manual editing output_file = os.path.join(posn_analyses_dir, 'edits.pickle') collateblocks(block_dirs, output_file, params['phase']['collate']) print 'collate: ' + time.asctime() # Update the experiment log file read.updatelog(expt, p, 'preedit', expt_analyses_dir) print 'final: ' + time.asctime()
def zipFolder(source, target): os.chdir(os.path.dirname(source)) make_zipfile(target, './' + os.path.basename(source))
def zip(): print("Creating zip") os.chdir(build_folder) make_zipfile(composite_mod_folder_name, composite_mod_folder_name) os.chdir("../")
def zipFolder(source, target): #os.chdir(source) #make_zipfile(target, "./") os.chdir(os.path.dirname(source)) make_zipfile(target, './' + os.path.basename(source))
def build_final_zip_file(self): progress("Building final zip file from staging directory") package_base_path = os.path.splitext(self.package_info.package_path)[0] archive_util.make_zipfile(package_base_path, "appleseed")
def preeditmovie(expt_raw_data_dir, expt_analyses_dir, positions, params): """ Automated steps to perform prior to editing. """ expt = os.path.basename(expt_analyses_dir) g = params['general'] # First load or create log files for each position log.main(expt_raw_data_dir, expt_analyses_dir, positions, g['write_mode']) # Execute each position in succession for p in positions: # Update the terminal display read.updatelog(expt, p, 'preedit') print 'start position ' + p + ': ' + time.asctime() posn_raw_data_dir = os.path.join(expt_raw_data_dir, p) posn_analyses_dir = os.path.join(expt_analyses_dir, p) # Segmented files will be saved to a temporary directory temp_dir = os.path.join(posn_analyses_dir, 'temp') if g['write_mode'] == 0: read.rmkdir(temp_dir) else: read.cmkdir(temp_dir) # Pad with default parameters, and find frames to process frame_start, frame_stop = float('inf'), 0. for mode in MODES: print '---mode', mode d = params[mode] # Pad with default parameters as necessary d = eval('%s.workflow.fillparams(d)' % mode) # Find all .tif images of specified type in the given directory d['segment']['file_list'] = [] for f in read.listfiles(posn_raw_data_dir, d['segment']['pattern']): j = read.getframenum(f, d['segment']['pattern']) if g['frame_range'][0] <= j < g['frame_range'][1]: frame_start = min(frame_start, j) frame_stop = max(frame_stop, j) d['segment']['file_list'].append(f) frame_stop += 1 # Create arguments for parallel processing args = [(posn_raw_data_dir, temp_dir, MODES, copy.deepcopy(params)) for _ in range(g['num_procs'])] file_list = sorted(args[0][3]['phase']['segment']['file_list']) # # debug: select only a few files -BK # print 'initial frame stop', frame_stop # frame_stop = 500 # file_list = file_list[:frame_stop] # # debug: select only a few files -BK inds = partition_indices(file_list, g['num_procs']) for (sta_ind, end_ind), arg in zip(inds, args): arg[3]['phase']['segment']['file_list'] = file_list[ sta_ind:end_ind] # Process each block of frames in parallel parallel.main(preeditblock, args, g['num_procs']) print 'extract: ' + time.asctime() # Archive the output files into .zip files, then delete each .tif num_tifs = frame_stop - frame_start num_digits = int(np.ceil(np.log10(num_tifs + 1))) # Create new set of directories with pre-specified block size frames = range(frame_start, frame_stop - 1, g['block_size']) frames.append(frame_stop) block_frames = zip(frames[:-1], frames[1:]) # Make directories to hold files, named according to frames read.cmkdir(os.path.join(posn_analyses_dir, 'blocks')) block_dirs = [] for j1, j2 in block_frames: strs = [str(v).zfill(num_digits) for v in (j1, j2)] v = os.path.join(posn_analyses_dir, 'blocks', 'frame{}-{}'.format(*strs)) os.mkdir(v) block_dirs.append(v) for m in MODES: # The segmented .tif files will be stored in a .zip file zip_name = m.capitalize() + 'Segment' [read.cmkdir(os.path.join(v, zip_name)) for v in block_dirs] # Find all segmented .tif images and transfer to the new directories d = params[m] for f in read.listfiles(temp_dir, d['segment']['pattern']): j = read.getframenum(f, d['segment']['pattern']) for i, (j1, j2) in enumerate(block_frames): if j1 <= j < j2: old_name = os.path.join(temp_dir, f) zip_dir = os.path.join(block_dirs[i], zip_name) shutil.move(old_name, zip_dir) # Zip each directory of segmented .tif files old_dir = os.path.abspath(os.curdir) for v in block_dirs: os.chdir(v) archive_util.make_zipfile(zip_name, zip_name) shutil.rmtree(zip_name) os.chdir(old_dir) # Make temporary directories for data outputs dat_name = m.capitalize() + 'Data' [read.cmkdir(os.path.join(v, dat_name)) for v in block_dirs] # Find all analyzed .pickle files and transfer to the new directories f, e = os.path.splitext(d['segment']['pattern']) dat_pattern = (f + '.pickle' + e[4:]) for f in read.listfiles(temp_dir, dat_pattern): j = read.getframenum(f, dat_pattern) for i, (j1, j2) in enumerate(block_frames): if j1 <= j < j2: # Transfer each frame to the correct block old_name = os.path.join(temp_dir, f) dat_dir = os.path.join(block_dirs[i], dat_name) shutil.move(old_name, dat_dir) # Concatenate each set of files into a DataFrame for each parameter for block_dir in block_dirs: dat_dir = os.path.join(block_dir, dat_name) data = [] for u in os.listdir(dat_dir): dat_file = os.path.join(dat_dir, u) try: d = read_pickle(dat_file) except: pass data.append(d) df = concat(data) df = df.reindex(sorted(df.index)) for c in df.columns: df[c].to_pickle(os.path.join(block_dir, c + '.pickle')) shutil.rmtree(dat_dir) print 'shuffle: ' + time.asctime() # Delete all temporary files shutil.rmtree(temp_dir) ''' block_dirs = [os.path.join(posn_analyses_dir, 'blocks', v) for v in os.listdir(os.path.join(posn_analyses_dir, 'blocks')) if 'frame' in v] ''' # Track the blocks in parallel args = [] for v in block_dirs: output_file = os.path.join(v, 'Trace.pickle') if os.path.isfile(output_file): os.remove(output_file) args.append((v, output_file, params['phase']['track'])) parallel.main(trackblock, args, g['num_procs']) print 'track: ' + time.asctime() # Stitch independently-tracked trajectories together stitchblocks(block_dirs, params['phase']['track']) print 'stitch: ' + time.asctime() # Collate the data for manual editing output_file = os.path.join(posn_analyses_dir, 'edits.pickle') collateblocks(block_dirs, output_file, params['phase']['collate']) print 'collate: ' + time.asctime() # Update the experiment log file read.updatelog(expt, p, 'preedit', expt_analyses_dir) print 'final: ' + time.asctime()
def build_final_zip_file(self): progress("Building final zip file from staging directory") package_name = "blenderseed-{0}-{1}-{2}".format(self.package_version, self.settings.platform, self.build_date) package_path = os.path.join(self.settings.output_dir, package_name) archive_util.make_zipfile(package_path, "blenderseed") info("Package path: {0}".format(package_path + ".zip"))