def update_video(video, uuid): response = question('Attempt to set UUID in title of video file, ' '`%s`?' % video, title='Update video?') if response == QMessageBox.StandardButton.Yes: try: f = mutagen.File(video) if ('\xa9nam' not in f.tags) or ('UUID' not in f.tags['\xa9nam']): f.tags['\xa9nam'] = \ 'DMF chip QC - UUID: %s' % uuid f.save() logging.info('wrote UUID to video title: `%s`', video) except Exception: logging.warning('Error setting video title.', exc_info=True) # Substitute UUID into output directory path as necessary. path_subs_dict = {'uuid': uuid} path_subs_dict.update(_date_subs_dict()) output_dir_ = ph.path(output_dir % path_subs_dict).expand().realpath() output_dir_.makedirs_p() output_path = output_dir_.joinpath('%s.mp4' % uuid) if not output_path.exists() or overwrite or \ (question('Output `%s` exists. Overwrite?' % output_path, title='Overwrite?') == QMessageBox.StandardButton.Yes): if output_path.exists(): output_path.remove() ph.path(video).move(output_path) logging.info('moved video to : `%s`', output_path)
def documents_directory(platform_=None): if platform_ is None: platform_ = platform.platform() if platform_.startswith('Windows'): import ctypes from ctypes.wintypes import MAX_PATH try: import win32com.shell.shellcon as shellcon documents_csidl_code = shellcon.CSIDL_PERSONAL except ImportError: documents_csidl_code = 0x05 # CSIDL_PERSONAL dll = ctypes.windll.shell32 buf = ctypes.create_unicode_buffer(MAX_PATH + 1) # Query Windows API to get `Documents` directory for current user. success = dll.SHGetSpecialFolderPathW(None, buf, documents_csidl_code, False) if not success: raise IOError('Could not determine user `Documents` directory.') return path(buf.value) elif (platform_.startswith('Linux') or platform_.startswith('Darwin')): return path('~').expand()
def test_load_non_existant_dmf_device(): """ test loading DMF device file that doesn't exist """ DmfDevice.load(path(__file__).parent / path('devices') / path('no device'))
def save(self, filename=None): if filename == None: filename = self.filename # make sure that the parent directory exists path(filename).realpath().parent.makedirs_p() with open(filename, 'w') as f: self.data.write(outfile=f)
def on_experiment_log_changed(self, experiment_log): # Reset number of completed DStat experiments for each step. self.dstat_experiment_count_by_step = {} self.dstat_experiment_data = None app = get_app() app_values = self.get_app_values() calibrator_file = app_values.get('calibrator_file', '') data = {'calibrator_file': calibrator_file} if hasattr(app, 'experiment_log') and app.experiment_log: app.experiment_log.metadata[self.name] = data # copy the calibrator file to the experiment log directory if calibrator_file: if not path(calibrator_file).isfile(): logger.error('Calibration file (%s) does not exist.' % calibrator_file) else: try: output_path = path(app.experiment_log.get_log_path()) / self.name if not output_path.isdir(): output_path.mkdir() path(calibrator_file).copy2(output_path / 'calibrator.csv') except: logger.error('Could not copy calibration file to the ' 'experiment log directory.' , exc_info=True)
def __init__(self, in_file, in_file2=None, *args, **kwargs): super(RegistrationDemoGUI, self).__init__(*args, **kwargs) self.in_file = path(in_file) if in_file2: self.in_file2 = path(in_file2) else: self.in_file2 = None
def update(self): plugin_names = self.get_plugin_names() del self.plugins self.plugins = [] for name in plugin_names: p = PluginController(self, name) # Skip the plugin if it has been marked for uninstall, or no # longer exists if p.get_plugin_path().abspath() in self.requested_deletions\ or not p.get_plugin_path().isdir(): continue self.plugins.append(p) # Save the list of path deletions to be processed on next app launch app = get_app() requested_deletion_path = (path(app.config.data['plugins'] ['directory']) .joinpath('requested_deletions.yml')) requested_deletion_path.write_bytes(yaml.dump([p.abspath() for p in self .requested_deletions])) rename_queue_path = (path(app.config.data['plugins']['directory']) .joinpath('rename_queue.yml')) rename_queue_path.write_bytes(yaml.dump([(p1.abspath(), p2.abspath()) for p1, p2 in self.rename_queue])) post_install_queue_path = (path(app.config.data['plugins']['directory']) .joinpath('post_install_queue.yml')) post_install_queue_path.write_bytes(yaml.dump([p.abspath() for p in self .post_install_queue]))
def update(self): plugin_names = self.get_plugin_names() del self.plugins self.plugins = [] for name in plugin_names: plugin_controller = PluginController(self, name) # Skip the plugin if it has been marked for uninstall, or no # longer exists if (plugin_controller.get_plugin_path().abspath() in self.requested_deletions) or (not plugin_controller .get_plugin_path().isdir()): continue self.plugins.append(plugin_controller) # Save the list of path deletions to be processed on next app launch app = get_app() requested_deletion_path = (path(app.config.data['plugins'] ['directory']) .joinpath('requested_deletions.yml')) requested_deletion_path.write_bytes(yaml.dump([p.abspath() for p in self .requested_deletions])) rename_queue_path = (path(app.config.data['plugins']['directory']) .joinpath('rename_queue.yml')) rename_queue_path.write_bytes(yaml.dump([(p1.abspath(), p2.abspath()) for p1, p2 in self.rename_queue])) post_install_queue_path = (path(app.config.data['plugins']['directory']) .joinpath('post_install_queue.yml')) post_install_queue_path.write_bytes(yaml.dump([p.abspath() for p in self .post_install_queue]))
def parse_args(args=None): '''Parses arguments, returns ``(options, args)``.''' if args is None: args = sys.argv parser = argparse.ArgumentParser(description='MicroDrop plugin ' 'Conda recipe builder') parser.add_argument('-s', '--source-dir', type=ph.path, nargs='?') parser.add_argument('-t', '--target-dir', type=ph.path, nargs='?') parser.add_argument('-p', '--package-name', nargs='?') parser.add_argument('-v', '--versionNumber', nargs='?') parsed_args = parser.parse_args() if not parsed_args.source_dir: parsed_args.source_dir = ph.path(os.environ['SRC_DIR']) if not parsed_args.target_dir: prefix_dir = ph.path(os.environ['PREFIX']) # Extract module name from Conda package name. # # For example, the module name for a package named # `microdrop.droplet_planning_plugin` would be # `droplet_planning_plugin`. module_name = os.environ['PKG_NAME'].split('.')[-1].replace('-', '_') parsed_args.target_dir = prefix_dir.joinpath('share', 'microdrop', 'plugins', 'available', module_name) if not parsed_args.package_name: parsed_args.package_name = os.environ['PKG_NAME'] if not parsed_args.versionNumber: parsed_args.versionNumber = None return parsed_args
def data_files(): ''' Collect data files for packages that are not supported out of the box. ''' # Seems like `libzmq.pyd` needs to be copied to `dist` directory, *even # though* it is already automatically copied by `py2exe` to the name # `zmq.libzmq.pyd`. data_files_ = [('', [ph.path(zmq.__path__[0]).joinpath('libzmq.pyd')])] # Jupyter notebook templates and static files cannot be accessed within the # zip file, so need to be copied. # The `nbformat` package contains a `jsonschema` file which cannot be # accessed within the zip file, so it also needs to be copied. for path_i, module_i in ((ph.path(notebook.__path__[0]), ('templates', )), (ph.path(notebook.__path__[0]), ('static', )), (ph.path(nbformat.__path__[0]), tuple())): data_path = path_i.joinpath(*module_i) # Copy the template files to the collection dir. Also add the copied # file to the list of compiled files so it will be included in the # zipfile. files = sorted([file_j for file_j in data_path.walkfiles()]) for parent_i, files_i in it.groupby(files, lambda x: x.parent): data_files_ += [(path_i.parent.relpathto(parent_i), list(files_i))] return data_files_
def update(self): ''' Update list of plugin controllers (one controller for each imported plugin in the ``microdrop.managed`` environment). ..notes:: Also update **deletion**, **rename**, and **post-install** queue files. ''' plugin_names = self.get_plugin_names() del self.plugins self.plugins = [] for name in plugin_names: plugin_controller = PluginController(self, name) # Skip the plugin if it has been marked for uninstall, or no # longer exists if (plugin_controller.get_plugin_path().abspath() in self.requested_deletions) or (not plugin_controller .get_plugin_path().isdir()): continue self.plugins.append(plugin_controller) # Save the list of path deletions to be processed on next app launch app = get_app() requested_deletion_path = (ph.path(app.config.data['plugins'] ['directory']) .joinpath('requested_deletions.yml')) requested_deletion_path.write_bytes(yaml.dump([p.abspath() for p in self .requested_deletions])) rename_queue_path = (ph.path(app.config.data['plugins']['directory']) .joinpath('rename_queue.yml')) rename_queue_path.write_bytes(yaml.dump([(p1.abspath(), p2.abspath()) for p1, p2 in self.rename_queue]))
def test_load_non_existant_protocol(): """ test loading protocol file that doesn't exist """ Protocol.load(path(__file__).parent / path('protocols') / path('no protocol'))
def main(output_dir): output_dir = path(output_dir) if not output_dir.isdir(): output_dir.makedirs_p() elif list(output_dir.files()): raise IOError('Output directory exists and is not empty.') config_path = output_dir.joinpath('microdrop.ini') with config_path.open('wb') as output: template = jinja2.Template(config_template) config_str = template.render(output_dir=output_dir.name) output.write(config_str) py_exe = path(sys.executable).abspath() launcher_path = output_dir.joinpath('microdrop.bat') with launcher_path.open('wb') as output: template = jinja2.Template(launcher_template) launcher_str = template.render(working_dir=output_dir.abspath(), py_exe=py_exe, config_path=config_path.abspath()) output.write(launcher_str) print 'Start MicroDrop with the following:' print '\n %s' % launcher_path.abspath()
def on_remove_clicked(profile_row_i): dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION) dialog.set_icon_from_file(ICON_PATH) dialog.set_title('Remove profile') RESPONSE_REMOVE, RESPONSE_REMOVE_WITH_DATA, RESPONSE_CANCEL = \ range(3) dialog.add_buttons('_Remove', RESPONSE_REMOVE, 'Remove with _data', RESPONSE_REMOVE_WITH_DATA, 'Can_cel', RESPONSE_CANCEL) dialog.set_markup('Remove the following profile from list?\n\n' ' {}\n\n' '<b>"Remove with data"</b> removes profile from ' 'list <b>and deletes the profile ' 'directory</b>.'.format(profile_row_i.path)) response = dialog.run() dialog.destroy() if response not in (RESPONSE_REMOVE, RESPONSE_REMOVE_WITH_DATA): return try: if response == RESPONSE_REMOVE_WITH_DATA: dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION) dialog.set_icon_from_file(ICON_PATH) dialog.set_title('Confirm profile delete') dialog.set_markup('Remove profile data (cannot be ' 'undone)?') dialog.add_buttons(gtk.STOCK_YES, gtk.RESPONSE_YES, gtk.STOCK_NO, gtk.RESPONSE_NO) response = dialog.run() dialog.destroy() if response == gtk.RESPONSE_YES: ph.path(profile_row_i.path).rmtree() else: return except Exception, exception: gd.error(str(exception))
def test_load_non_existant_experiment_log(): """ test loading experiment log file that doesn't exist """ ExperimentLog.load(path(__file__).parent / path('experiment_logs') / path('no log'))
def update(self): ''' Update list of plugin controllers (one controller for each imported plugin in the ``microdrop.managed`` environment). ..notes:: Also update **deletion**, **rename**, and **post-install** queue files. ''' plugin_names = self.get_plugin_names() del self.plugins self.plugins = [] for name in plugin_names: plugin_controller = PluginController(self, name) # Skip the plugin if it has been marked for uninstall, or no # longer exists if (plugin_controller.get_plugin_path().abspath() in self.requested_deletions) or ( not plugin_controller.get_plugin_path().isdir()): continue self.plugins.append(plugin_controller) # Save the list of path deletions to be processed on next app launch app = get_app() requested_deletion_path = (ph.path( app.config.data['plugins']['directory']).joinpath( 'requested_deletions.yml')) requested_deletion_path.write_bytes( yaml.dump([p.abspath() for p in self.requested_deletions])) rename_queue_path = (ph.path( app.config.data['plugins']['directory']).joinpath( 'rename_queue.yml')) rename_queue_path.write_bytes( yaml.dump([(p1.abspath(), p2.abspath()) for p1, p2 in self.rename_queue]))
def test_import_device(root=None): if root is None: root = path(__file__).parent else: root = path(root) for i in range(6): yield _import_device, i, root
def on_button_clicked(self, widget, data=None): if callable(self.starting_dir): starting_dir = self.starting_dir() else: starting_dir = self.starting_dir if self.value: if path(self.value).isdir(): starting_dir = path(self.value) elif path(self.value).parent.isdir(): starting_dir = path(self.value).parent if self.mode == 'file': response, filepath =\ self.browse_for_file('Select file path', action=self.action, starting_dir=starting_dir) elif self.mode == 'directory': response, filepath =\ self.browse_for_file('Select directory', action=self.action, starting_dir=starting_dir) else: raise ValueError('[Filepath] Invalid mode: %s' % self.mode) if response == gtk.RESPONSE_OK: logging.info('got new filepath: %s' % filepath) self.value = path(filepath) self.emit('content-changed')
def test_fit_parameters_to_velocity_data(): velocity_df = pd.read_csv(path(__file__).parent / path('data_0/velocity_summary_data.csv'), index_col=0) fit_df, outliers_df = fit_parameters_to_velocity_data(velocity_df, eft=0.3, cache_path=None) ref_df = pd.read_csv(path(__file__).parent / path('data_0/fitted_params.csv'), index_col=0) test_dict = {} test_dict['f_th_linear'] = 1e-10 test_dict['f_th_linear_error'] = 1e-10 test_dict['k_df_linear'] = 1e-10 test_dict['k_df_linear_error'] = 1e-10 test_dict['f_sat'] = 1e-10 test_dict['f_th_post_sat'] = 1e-10 test_dict['f_th_post_sat_error'] = 1e-10 test_dict['k_df_post_sat'] = 1e-10 test_dict['k_df_post_sat_error'] = 1e-10 test_dict['R2_post_sat'] = 1e-10 test_dict['R2_mkt'] = 1e-10 test_dict['f_th_mkt'] = 1e-6 test_dict['f_th_mkt_error'] = 1e-6 test_dict['Lambda'] = 1e-4 test_dict['Lambda_error'] = 1e-4 test_dict['k0'] = 1e-4 test_dict['k0_error'] = 1e-4 test_dict['max_sinh_arg'] = 1e-4 for name, tol in test_dict.items(): yield _check_param, fit_df, ref_df, name, tol
def generate_recipe(package_str, conda_package_name, template_root, output_root, overwrite=False): ''' Generate recipe for latest Microdrop version on PyPi. ''' package_name, releases = pip_helpers.get_releases(package_str, pre=True) versions = releases.keys() output_root = ph.path(output_root) template_root = ph.path(template_root) for version_i in versions[-1:]: output_root_i = output_root.joinpath('microdrop-{}'.format(version_i)) output_root_i.makedirs_p() for template_file in template_root.files(): output_path_i = output_root_i.joinpath(template_file.name) if output_path_i.isfile() and not overwrite: raise IOError('Output file exists {}. Use `-f` to overwrite.' .format(output_path_i)) with template_file.open('r') as template_fhandle: try: template = jinja2.Template(template_fhandle.read()) except: template_file.copy(output_path_i) else: text_i = template.render(conda_package_name= conda_package_name, version=version_i) with output_path_i.open('w') as output: output.write(text_i)
def get_scripts(package_specs, package_scripts): '''Get list of script references. Any script name including the suffix ``-script`` will have the suffix removed in the resulting executable name.. Parameters ---------- package_specs : list[str] List of Conda package specifiers, e.g., from the dependencies section of a Conda environment YAML file. static_packages : dict[str->dict] Mapping from Conda package name to static package configuration. Each configuration **MAY** include any of the following keys:: - ``module``: Python module name (if different than the Conda package name) - ``dirs``: list of directories to scan for static files, relative to module path (search all directories if not set) Returns ------- list[path_helpers.path or dict] List of file paths to static contents of specified packages. ''' package_specs = package_specs[:] package_specs.insert(0, '__core__') scripts = [] # Save shell envionment. init_env = os.environ.copy() try: os.environ['PY2EXE_HELPERS'] = ph.path(__file__).parent for package_spec_i in package_specs: name_i = re.split(r'=+', package_spec_i)[0] if name_i in package_scripts: for script_ij in package_scripts[name_i]: if isinstance(script_ij, dict): script_path_ij = ph.path(script_ij['script']).expand() script_ij = script_ij.copy() script_ij['script'] = str(script_path_ij) else: script_path_ij = ph.path(script_ij).expand() if script_path_ij.namebase.endswith('-script'): # Script ends with `-script`. Automatically rename script_ij = { 'script': str(script_path_ij), 'dest_base': script_path_ij.namebase.rstrip('-script') } else: script_ij = str(script_path_ij) scripts.append(script_ij) finally: os.environ = init_env return scripts
def home_dir(): if platform.system() == 'Windows': from win32com.shell import shell, shellcon dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PERSONAL, 0, 0) else: dir = path('~').expand() return path(dir)
def __init__(self, dirpath, root=None): self.dirpath = path(dirpath) #self.id = _clean_id(self.dirpath) self.id = 'ID__%s' % hashlib.md5(self.dirpath).hexdigest().upper() if root: self.dirname = path(root) / self.dirpath.name else: self.dirname = self.dirpath.name
def app_data_dir(): if os.name == 'nt': from win32com.shell import shell, shellcon app_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) else: app_dir = path('~').expand() logging.debug('app_data_dir()=%s' % app_dir) return path(app_dir)
def base_path(): p = path(__file__).parent.abspath() attempted_paths = [] while p != path("/").abspath() and not p.joinpath("pygst_utils").isdir(): attempted_paths.append(p) p = p.parent if not p: raise RuntimeError("cannot find server.exe (attempted paths: %s)" % attempted_paths) return p.joinpath("pygst_utils")
def _init_plugins_dir(self): if self.data['plugins']['directory'] is None: self.data['plugins']['directory'] = (path(self['data_dir']) .joinpath('plugins')) plugins_directory = path(self.data['plugins']['directory']) if not plugins_directory.isdir(): plugins_directory.makedirs_p() if not plugins_directory.joinpath('__init__.py').isfile(): plugins_directory.joinpath('__init__.py').touch()
def home_dir(): if os.name == 'nt': from win32com.shell import shell, shellcon dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PERSONAL, 0, 0) else: dir = path('~').expand() logging.debug('home_dir()=%s' % dir) return path(dir)
def _init_plugins_dir(self): if self.data['plugins']['directory'] is None: self.data['plugins']['directory'] = (path( self['data_dir']).joinpath('plugins')) plugins_directory = path(self.data['plugins']['directory']) if not plugins_directory.isdir(): plugins_directory.makedirs_p() if not plugins_directory.joinpath('__init__.py').isfile(): plugins_directory.joinpath('__init__.py').touch()
def __init__(self, filepath, root=None): self.filepath = path(filepath) self.guid = str(self._get_guid()) #self.id = _clean_id(self.filepath) self.id = 'ID__%s' % hashlib.md5(self.filepath).hexdigest().upper() if root: self.filename = path(root) / self.filepath.name else: self.filename = self.filepath.name self.source = self.filepath
def dump_cpp_ast(env): project_dir = ph.path(env['PROJECT_DIR']) project_name = project_dir.name.replace('-', '__') lib_dir = project_dir.joinpath('lib', project_name) lib_dir.makedirs_p() main_c_file = ph.path(env['PROJECTSRC_DIR']).joinpath('main.cpp') cpp_ast_json = parse_cpp_ast(main_c_file, env) with lib_dir.joinpath('cpp_ast.json').open('w') as output: json.dump(cpp_ast_json, output, indent=2)
def base_path(): # When executing from a frozen (pyinstaller) executable... if hasattr(sys, 'frozen'): return path(sys.executable).parent # Otherwise... try: script = path(__file__) except NameError: script = path(sys.argv[0]) return script.parent
def build_pyx(self, pyx_source_path, module_name=None, module_dir=None, pyx_kwargs=None, **distutils_kwargs): if pyx_kwargs is None: pyx_kwargs = {} build_cache_root = self.build_cache_root if not build_cache_root.isdir(): build_cache_root.makedirs_p() pyx_source_path = path(pyx_source_path).expand() if module_name is None: module_name = pyx_source_path.namebase build_dir = path( tempfile.mkdtemp(prefix='temp_%s__' % module_name, dir=build_cache_root)) try: source_file = build_dir.joinpath(module_name + '.pyx') pyx_source_path.copy(source_file) # If there is a Cython `pxd` header, copy it to the build # directory. header_file = pyx_source_path.parent.joinpath( pyx_source_path.namebase + '.pxd') if header_file.isfile(): header_file.copy( source_file.parent.joinpath(module_name + '.pxd')) else: print '"%s" is not a file.' % header_file compile_result = compile(source_file, default_options, **pyx_kwargs) if module_dir is None: module_dir = build_cache_root.joinpath(module_name) else: module_dir = path(module_dir).expand() module_dir.makedirs_p() success = self.builder(compile_result.c_file, build_dir=module_dir, **distutils_kwargs) if not success: raise RuntimeError('Error building extension: %s' % compile_result.c_file) else: if module_dir not in sys.path: sys.path.insert(0, module_dir) except: print build_dir raise else: build_dir.rmtree() return module_dir, module_name
def __init__(self, root_path='.'): self.root_path = path(root_path) if root_path is None: dir_node = path(os.getcwd()) while not dir_node.dirs('.git') and dir_node: dir_node = dir_node.parent if not dir_node: raise GitError('No git root found.') self.root_path = dir_node self._git = None assert ((self.root_path / path('.git')).exists())
def download_latest(self, package_name, output_dir, app_version=None): output_dir = path(output_dir) package_url = self.latest_package_url(package_name, app_version) package_full_url = '%s%s' % (self.server_url, package_url) data = urllib.urlopen(package_full_url).read() local_path = output_dir.joinpath(path(package_url).name) if not local_path.isfile(): local_path.write_bytes(data) print 'Saved latest %s to %s' % (package_name, local_path) else: print 'File %s already exists - skipping download' % (local_path)
def __init__(self, root_path='.'): self.root_path = path(root_path) if root_path is None: dir_node = path(os.getcwd()) while not dir_node.dirs('.git') and dir_node: dir_node = dir_node.parent if not dir_node: raise GitError('No git root found.') self.root_path = dir_node self._git = None assert(self.root_path.dirs('.git'))
def launch_profile(profile_path): ''' 1. If cached latest MicroDrop version is newer than currently installed version, prompt user to offer to upgrade. 2. Launch MicroDrop using specified profile path. .. versionchanged:: 0.7.2 Launch MicroDrop in an **activated** Conda environment. Parameters ---------- profile_path : str File-system path to MicroDrop profile directory. Returns ------- int Exit code from MicroDrop program. ''' # Prompt user to upgrade MicroDrop if a newer version is available. check_version_cache_for_upgrade() # Launch MicroDrop using specified profile path. profile_path = ph.path(profile_path) verify_or_create_profile_version(profile_path) config_file = profile_path.joinpath('microdrop.ini') # Major version in `RELEASE-VERSION` file and major version of # installed MicroDrop package **match**. original_directory = ph.path(os.getcwd()) try: # Change directory into the parent directory of the configuration file. os.chdir(config_file.parent) return_code = None env = os.environ.copy() env['MICRODROP_PROFILE'] = str(profile_path) env['MICRODROP_CONFIG'] = str(config_file) # Return code of `5` indicates program should be restarted. while return_code is None or return_code == 5: # Launch MicroDrop and save return code. # # XXX Use `conda_activate_command` to launch MicroDrop in an # **activated** Conda environment. See [issue #10][i10]. # # [i10]: https://github.com/wheeler-microfluidics/microdrop-launcher/issues/10 command = (ch.conda_activate_command() + ['&', sys.executable, '-m', 'microdrop.microdrop', '-c', config_file]) return_code = sp.call(command, env=env, shell=True) finally: # Restore original working directory. os.chdir(original_directory) return return_code
def __init__(self, root_path='.'): self.root_path = path(root_path) if root_path is None: dir_node = path(os.getcwd()) while ((not dir_node.dirs('.git') and not dir_node.files('.git')) and dir_node): dir_node = dir_node.parent if not dir_node: raise GitError('No git root found.') self.root_path = dir_node self._git = None assert(self.root_path.dirs('.git') or self.root_path.files('.git'))
def get_serial_ports(): if os.name == 'nt': ports = _get_serial_ports_windows() else: ports = itertools.chain(ph.path('/dev').walk('ttyUSB*'), ph.path('/dev').walk('ttyACM*'), ph.path('/dev').walk('tty.usb*')) # sort list alphabetically ports_ = [port for port in ports] ports_.sort() for port in ports_: yield port
def test_find_outliers(): velocity_df = pd.read_csv(path(__file__).parent / path('data_0/velocity_summary_data.csv'), index_col=0) fit_df, outliers_df = fit_parameters_to_velocity_data(velocity_df, eft=0.3, cache_path=None) ref_outliers_df = pd.read_csv(path(__file__).parent / path('data_0/outliers.csv'), index_col=0) # check that the outlier masks match assert (np.all(outliers_df == ref_outliers_df))
def get_serial_ports(): if os.name == 'nt': ports = _get_serial_ports_windows() else: ports = itertools.chain( ph.path('/dev').walk('ttyUSB*'), ph.path('/dev').walk('ttyACM*'), ph.path('/dev').walk('tty.usb*')) # sort list alphabetically ports_ = [port for port in ports] ports_.sort() for port in ports_: yield port
def compress_core_action(self, target, source, env): import re #core_pattern = re.compile(r'build.*/core/'.replace('/', os.path.sep)) core_pattern = re.compile(r'build.*core') core_files = (x for x in imap(str, source) if core_pattern.search(x)) target_path = path(target[0]).abspath() if not target_path.parent.isdir(): target_path.parent.makedirs_p() for core_file in core_files: core_file_path = path(core_file).abspath() print('[compress_core_action]', core_file_path, core_file_path.isfile()) command = [self.AVR_BIN_PREFIX + 'ar', 'rcs', target_path, core_file_path] run(command)
def save_dmf_device(self, save_as=False): ''' Save device configuration. If `save_as=True`, we are saving a copy of the current device with a new name. .. versionchanged:: 2.33 Deprecate ``rename`` keyword argument. Use standard file chooser dialog to select device output path. ''' app = get_app() default_path = app.config['dmf_device'].get('filepath') if save_as or default_path is None: default_path = (DEVICES_DIR.joinpath(ph.path(default_path).name) if default_path else DEVICES_DIR.joinpath('New device.svg')) try: output_path = \ select_device_output_path(title='Please select location to' ' save device', default_path=default_path) except IOError: _L().debug('No output path was selected.') return else: output_path = default_path output_path = ph.path(output_path) # Convert device to SVG string. svg_unicode = app.dmf_device.to_svg() # Save the device to the new target directory. with output_path.open('wb') as output: output.write(svg_unicode) # Set saved device as first position in recent devices menu. self._update_recent(output_path) # Reset modified status, since save acts as a checkpoint. self.modified = False # Notify plugins that device has been saved. emit_signal('on_dmf_device_saved', [app.dmf_device, str(output_path)]) self.load_device(output_path) return output_path
def compress_core_action(self, target, source, env): import re #core_pattern = re.compile(r'build.*/core/'.replace('/', os.path.sep)) core_pattern = re.compile(r'build.*core') core_files = (x for x in imap(str, source) if core_pattern.search(x)) target_path = path(target[0]).abspath() if not target_path.parent.isdir(): target_path.parent.makedirs_p() for core_file in core_files: core_file_path = path(core_file).abspath() print '[compress_core_action]', core_file_path, core_file_path.isfile() command = [self.AVR_BIN_PREFIX + 'ar', 'rcs', target_path, core_file_path] run(command)
def get_arduino_version(p): while p and not (p / path('revisions.txt')).exists(): p = p.parent if not p: print('''Arduino install path not found.''', file=sys.stderr) sys.exit(1) with open(p / path('revisions.txt'), 'r') as f: version = f.readline() f.close() match = re.search(r'ARDUINO (.*) - .*', version) if match: return match.groups()[0] else: return None
def main(repo_root): repo_root = ph.path(repo_root).realpath() output_dir = ph.path(os.getcwd()).realpath() try: os.chdir(repo_root) repo = git.repo.Repo() describe_match = CRE_DESCRIBE.match( repo.git.describe(['--tags', '--dirty'])) build_miniconda_exe('.miniconda-recipe', output_dir, context=describe_match.groupdict()) finally: os.chdir(output_dir)
def render_template_directory(template_root, output_root, context=None, overwrite=False, **kwargs): ''' Generate recipe for latest package version on PyPi. Parameters ---------- template_root : str Directory of template directory. Each file in the template directory is rendered as a ``jinja2`` template with the specified :data:`context`. output_root : str Output directory path. overwrite : bool, optional If ``True``, existing files in output directory are overwritten. ''' import jinja2 import path_helpers as ph output_root = ph.path(output_root) template_root = ph.path(template_root) output_root.makedirs_p() for template_file in template_root.files(): output_path = output_root.joinpath(template_file.name) if output_path.isfile() and not overwrite: raise IOError( 'Output file exists {}. Use `-f` to overwrite.'.format( output_path)) with template_file.open('r') as template_fhandle: try: template = jinja2.Template(template_fhandle.read()) except: template_file.copy(output_path) else: context = context or {} context = context.copy() context.update(kwargs) text = template.render(**context) with output_path.open('w') as output: output.write(text) return output_root
def build_pyx(self, pyx_source_path, module_name=None, module_dir=None, pyx_kwargs=None, **distutils_kwargs): if pyx_kwargs is None: pyx_kwargs = {} build_cache_root = self.build_cache_root if not build_cache_root.isdir(): build_cache_root.makedirs_p() pyx_source_path = path(pyx_source_path).expand() if module_name is None: module_name = pyx_source_path.namebase build_dir = path(tempfile.mkdtemp(prefix='temp_%s__' % module_name, dir=build_cache_root)) try: source_file = build_dir.joinpath(module_name + '.pyx') pyx_source_path.copy(source_file) # If there is a Cython `pxd` header, copy it to the build # directory. header_file = pyx_source_path.parent.joinpath( pyx_source_path.namebase + '.pxd') if header_file.isfile(): header_file.copy(source_file.parent.joinpath(module_name + '.pxd')) else: print '"%s" is not a file.' % header_file compile_result = compile(source_file, default_options, **pyx_kwargs) if module_dir is None: module_dir = build_cache_root.joinpath(module_name) else: module_dir = path(module_dir).expand() module_dir.makedirs_p() success = self.builder(compile_result.c_file, build_dir=module_dir, **distutils_kwargs) if not success: raise RuntimeError('Error building extension: %s' % compile_result.c_file) else: if module_dir not in sys.path: sys.path.insert(0, module_dir) except: print build_dir raise else: build_dir.rmtree() return module_dir, module_name
def build_template(major_version, output_path, cmd_func): ''' Args ---- major_version (int) : Major Microdrop version number. output_path (str) : If an existing directory is specified, built file is moved to specified directory with filename from build. Otherwise, built file is renamed to specified output file path. cmd_func (function) : Call-back function that takes a recipe/output directory as a single argument and returns the path of the built file. Returns ------- (path_helpers.path) : Returns the path of the built file. ''' package_str = 'microdrop>={}.0,<{}.0'.format(major_version, major_version + 1) conda_package_name = 'microdrop-{}.0'.format(major_version) output_path = ph.path(output_path) # Get directory containing static templates. static_root = ph.path(pkg.resource_filename('microdrop_conda_constructor', 'static')) # Get directory for Conda Microdrop package recipe template. template_root = static_root.joinpath('conda.microdrop.template') # Create temporary output directory. output_root = tmp.mkdtemp(prefix=template_root.name + '-') try: # Generate recipe for specified Microdrop version. mc.generate_recipe(package_str=package_str, conda_package_name=conda_package_name, template_root=template_root, output_root=output_root) # Move built package archive to output path. archive_path = cmd_func(output_root.dirs()[0]) if output_path.isdir(): # Output path is directory. Move archive to directory. output_path = output_path.joinpath(archive_path.name) archive_path.copy(output_path) return output_path finally: output_root.rmtree()
def get_definition_header(cpp_ast_json, type_): ''' Parameters ---------- cpp_ast_json : dict JSON-serializable C++ abstract syntax tree. type_ : str Name of C++ type defined within C++ abstract syntax tree. Returns ------- path_helpers.path Path to header where type of variable is defined. Raises ------ IOError If header containing type definition cannot be located. ''' get_class_json = ch.clang_ast.get_class_factory(cpp_ast_json) get_typedef_json = get_typedef_factory(cpp_ast_json) if get_class_json(type_): node = get_class_json(type_) elif get_typedef_json(type_): node = get_typedef_json(type_) else: raise IOError('Definition header not found for type: {}'.format(type_)) return ph.path(py_.get(node, 'location.file')).realpath()
def _init_data_dir(self): # If no user data directory is set in the configuration file, select # default directory based on the operating system. if os.name == 'nt': default_data_dir = home_dir().joinpath('MicroDrop') else: default_data_dir = home_dir().joinpath('.microdrop') if 'data_dir' not in self.data: self.data['data_dir'] = default_data_dir warnings.warn('Using default MicroDrop user data path: %s' % default_data_dir) if not path(self['data_dir']).isdir(): warnings.warn('MicroDrop user data directory does not exist.') path(self['data_dir']).makedirs_p() warnings.warn('Created MicroDrop user data directory: %s' % self['data_dir'])
def releases_info(versions, base_uri=''): ''' Args ---- versions (list) : List of version objects with `path()` and `url()` methods and `major`, `minor`, and `micro` attributes. Returns ------- (dict) : Dictionary of releases info corresponding to the list of version objects. See `RELEASES_SCHEMA` for format. ''' from datetime import datetime from path_helpers import path root_directory = path(__file__).abspath() def version_release_info(v): file_path = root_directory.joinpath(v.path()) upload_time = datetime.utcfromtimestamp(file_path.mtime).isoformat() return [{'upload_time': upload_time, 'url': base_uri + v.url(), 'size': file_path.size, 'filename': file_path.name, 'md5_digest': file_path.read_hexhash('md5')}] releases = dict([('%d.%d.%d' % (v.major, v.minor, v.micro), version_release_info(v)) for v in versions]) return releases
def run_all_tests(self): ''' Run all DropBot on-board self-diagnostic tests. Record test results as JSON and results summary as a Word document. .. versionadded:: 0.14 .. versionchanged:: 0.16 Prompt user to insert DropBot test board. ''' results = db.self_test.self_test(self.control_board) results_dir = ph.path(self.diagnostics_results_dir) results_dir.makedirs_p() # Create unique output filenames based on current timestamp. timestamp = dt.datetime.now().isoformat().replace(':', '_') json_path = results_dir.joinpath('results-%s.json' % timestamp) report_path = results_dir.joinpath('results-%s.docx' % timestamp) # Write test results encoded as JSON. with json_path.open('w') as output: # XXX Use `json_tricks` rather than standard `json` to support # serializing [Numpy arrays and scalars][1]. # # [1]: http://json-tricks.readthedocs.io/en/latest/#numpy-arrays output.write(json_tricks.dumps(results, indent=4)) # Generate test result summary report as Word document. db.self_test.generate_report(results, output_path=report_path, force=True) # Launch Word document report. report_path.launch()
def flash(self, hex_path, extra_flags=None): hex_path = path(hex_path) flags = [ "-c", self.protocol, "-b", str(self.baud_rate), "-p", self.microcontroller, "-P", "%s" % self.port, "-U", "flash:w:%s:i" % hex_path.name, "-C", "%(avrconf)s", ] if extra_flags is not None: flags.extend(extra_flags) cwd = os.getcwd() try: os.chdir(hex_path.parent) stdout, stderr = self._run_command(flags) finally: os.chdir(cwd) return stdout, stderr
def save(self, filename=None, format='pickle'): if filename == None: log_path = self.get_log_path() filename = os.path.join(log_path, "data") else: log_path = path(filename).parent if self.data: out = deepcopy(self) # serialize plugin dictionaries to strings for i in range(len(out.data)): for plugin_name, plugin_data in out.data[i].items(): if format == 'pickle': out.data[i][plugin_name] = pickle.dumps(plugin_data) elif format == 'yaml': out.data[i][plugin_name] = yaml.dump(plugin_data) else: raise TypeError with open(filename, 'wb') as f: if format == 'pickle': pickle.dump(out, f, -1) elif format == 'yaml': yaml.dump(out, f) else: raise TypeError return log_path
def download_release_packages(repo_name, github_user, release, output_dir): ''' Download Conda packages from GitHub release to ``artifacts`` output directory. In Powershell, results can be uploaded using: dir artifacts\*\*.tar.bz2 | % { anaconda upload -i -u <anaconda user> $_.FullName } If package already exists on Conda channel, a prompt will ask to overwrite. ''' output_dir = ph.path(output_dir) url = ( 'https://api.github.com/repos/{user}/{repo}/releases/{release}'.format( user=github_user, repo=repo_name, release=release)) response = requests.get(url) response_obj = json.loads(response.text) platforms = ['noarch', 'win-32', 'win-64'] for asset_i in response_obj['assets']: for platform_j in platforms: if asset_i['name'].startswith(platform_j): output_path = \ output_dir.joinpath(platform_j, asset_i['name'][len(platform_j) + 1:]) if output_path.isfile(): print('skipped:', output_path, 'since it already exists') break response = requests.get(asset_i['browser_download_url']) if not response.ok: raise IOError(response.reason) output_path.parent.makedirs_p() with output_path.open('wb') as output: output.write(response.content) print('wrote:', asset_i['name'], '->', output_path)