def test_export_coreml_with_predict(self): for test_number in range(len(self.models)): feature = self.feature model = self.models[test_number] sf = self.trains[test_number] if self.warm_start: prefix = "pretrained" + str(test_number) else: prefix = "scratch" + str(test_number) filename = _mkstemp(prefix + ".mlmodel")[1] model.export_coreml(filename) mlmodel = _coremltools.models.MLModel(filename) tc_preds = model.predict(sf) if test_number == 1: # stroke input sf[feature] = _tc.drawing_classifier.util.draw_strokes( sf[self.feature]) for row_number in range(len(sf)): core_ml_preds = mlmodel.predict({ "drawing": sf[feature][row_number]._to_pil_image() }) assert (core_ml_preds["classLabel"] == tc_preds[row_number]) if test_number == 1: sf = sf.remove_column(feature)
def mkstemp(self, *args, **kwargs): f = tempfile._mkstemp(*args, **kwargs) try: log.debug(("Opened tempfile %s with mkstemp:\n" % f[1]) + "".join(traceback.format_stack())) except TypeError: pass return f
def __init__(self, name, mode='w+b', bufsize=-1, chmod=None, copy_existing=True, fsync=True): self.name = name self.copy_existing = copy_existing self.fsync = fsync abs_fn = _os.path.abspath(name) path, fn = _os.path.split(abs_fn) prefix = '.' + fn fd, self.tmp_name = _mkstemp(prefix=prefix, dir=path) self.file = _os.fdopen(fd, mode, bufsize) if self.copy_existing and _os.path.exists(abs_fn): _shutil.copystat(abs_fn, self.tmp_name) with open(abs_fn) as existing_f: while 1: buf = existing_f.read(16 * 1024) if not buf: break _os.write(fd, buf) _os.lseek(fd, 0, _os.SEEK_SET) if chmod is not None: _os.fchmod(fd, chmod) self.close_called = False
def _create_env_file(self, env, taskcode): fh, fname = _mkstemp(suffix='.env') fname_base = fname[:-4] self._print(fh, "'" + env['name'] + "'") self._print(fh, "%0.6f" % (env['frequency'])) self._print(fh, "1") svp = env['soundspeed'] svp_interp = 'S' if env['soundspeed_interp'] == spline else 'C' if isinstance(svp, _pd.DataFrame): if len(svp.columns) > 1: svp_interp = 'Q' else: svp = _np.hstack((_np.array([svp.index]).T, _np.asarray(svp))) if env['surface'] is None: self._print(fh, "'%cVWT'" % svp_interp) else: self._print(fh, "'%cVWT*'" % svp_interp) self._create_bty_ati_file(fname_base + '.ati', env['surface'], env['surface_interp']) max_depth = env['depth'] if _np.size(env['depth']) == 1 else _np.max( env['depth'][:, 1]) self._print(fh, "1 0.0 %0.6f" % (max_depth)) if _np.size(svp) == 1: self._print(fh, "0.0 %0.6f /" % (svp)) self._print(fh, "%0.6f %0.6f /" % (max_depth, svp)) elif svp_interp == 'Q': for j in range(svp.shape[0]): self._print(fh, "%0.6f %0.6f /" % (svp.index[j], svp.iloc[j, 0])) self._create_ssp_file(fname_base + '.ssp', svp) else: for j in range(svp.shape[0]): self._print(fh, "%0.6f %0.6f /" % (svp[j, 0], svp[j, 1])) depth = env['depth'] if _np.size(depth) == 1: self._print(fh, "'A' %0.6f" % (env['bottom_roughness'])) else: self._print(fh, "'A*' %0.6f" % (env['bottom_roughness'])) self._create_bty_ati_file(fname_base + '.bty', depth, env['depth_interp']) self._print( fh, "%0.6f %0.6f 0.0 %0.6f %0.6f /" % (max_depth, env['bottom_soundspeed'], env['bottom_density'] / 1000, env['bottom_absorption'])) self._print_array(fh, env['tx_depth']) self._print_array(fh, env['rx_depth']) self._print_array(fh, env['rx_range'] / 1000) if env['tx_directionality'] is None: self._print(fh, "'" + taskcode + "'") else: self._print(fh, "'" + taskcode + " *'") self._create_sbp_file(fname_base + '.sbp', env['tx_directionality']) self._print(fh, "%d" % (env['nbeams'])) self._print(fh, "%0.6f %0.6f /" % (env['min_angle'], env['max_angle'])) self._print( fh, "0.0 %0.6f %0.6f" % (1.01 * max_depth, 1.01 * _np.max(env['rx_range']) / 1000)) _os.close(fh) return fname_base
def test_export_coreml(self): import coremltools import platform max_iters_ans = [str(self.max_iterations), "1"] warm_start_ans = "" if self.warm_start is None else self.warm_start for i, model in enumerate(self.models): filename = _mkstemp("bingo.mlmodel")[1] model.export_coreml(filename) # Load the model back from the CoreML model file coreml_model = coremltools.models.MLModel(filename) self.assertDictEqual( { "com.github.apple.turicreate.version": _tc.__version__, "com.github.apple.os.platform": platform.platform(), "target": self.target, "feature": self.feature, "type": "drawing_classifier", "warm_start": warm_start_ans, "max_iterations": max_iters_ans[i], "version": "2", }, dict(coreml_model.user_defined_metadata), ) expected_result = ( "Drawing classifier created by Turi Create (version %s)" % (_tc.__version__)) self.assertEquals(expected_result, coreml_model.short_description)
def test_export_coreml(self): import coremltools import platform max_iters_ans = [str(self.max_iterations), '1'] warm_start_ans = '' if self.warm_start is None else self.warm_start for i, model in enumerate(self.models): filename = _mkstemp("bingo.mlmodel")[1] model.export_coreml(filename) # Load the model back from the CoreML model file coreml_model = coremltools.models.MLModel(filename) self.assertDictEqual( { 'com.github.apple.turicreate.version': _tc.__version__, 'com.github.apple.os.platform': platform.platform(), 'target': self.target, 'feature': self.feature, 'type': 'drawing_classifier', 'warm_start': warm_start_ans, 'max_iterations': max_iters_ans[i], 'version': '2', }, dict(coreml_model.user_defined_metadata)) expected_result = 'Drawing classifier created by Turi Create (version %s)' \ % (_tc.__version__) self.assertEquals(expected_result, coreml_model.short_description)
def __init__(self, dir=None, prefix=None, suffix=None, text=True): self.__PATH__.make_dir(dir) level, path = _mkstemp(dir=dir, prefix=prefix, suffix=suffix, text=text) super().__init__(path) self._os_level = level
def mkstemp(suffix='', prefix='tmp', dir=TMP_DIRECTORY, text=False): """ Wrapper around tempfile.mkstemp which defaults to creating temporary files in the MyPyTutor temporary directory. Arguments and return values as for tempfile.mkstemp """ return _mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
def _show_static_images(f): fh, fname = _mkstemp(suffix='.png') _os.close(fh) with _warnings.catch_warnings(): # to avoid displaying deprecation warning _warnings.simplefilter('ignore') # from bokeh 0.12.16 _bio.export_png(f, fname) import IPython.display as _ipyd _ipyd.display(_ipyd.Image(filename=fname, embed=True)) _os.unlink(fname)
def supports(self, env=None, task=None): if env is not None and env['type'] != '2D': return False fh, fname = _mkstemp(suffix='.env') _os.close(fh) fname_base = fname[:-4] self._unlink(fname_base + '.env') rv = self._bellhop(fname_base) self._unlink(fname_base + '.prt') self._unlink(fname_base + '.log') return rv
def save(self, filepath, include_data=True): """ A method for saving the Plot object in a vega representation Parameters ---------- include_data : bool, optional If True, save's the Plot in a vega spec with the data spec included. Notes ----- - The save method saves the Plot object in a vega json format Examples -------- Suppose 'plt' is an Plot Object We can save it using: >>> plt.save('vega_spec.json') We can also save the vega representation of the plot without data: >>> plt.save('vega_spec.json', False) """ if type(filepath) != str: raise ValueError("filepath provided is not a string") if filepath.endswith(".json"): # save as vega json spec = self._get_vega(include_data=include_data) with open(filepath, 'w') as fp: _json.dump(spec, fp) elif filepath.endswith(".png") or filepath.endswith(".svg"): # save as png/svg, but json first spec = self._get_vega(include_data=True) extension = filepath[-3:] temp_file_tuple = _mkstemp() temp_file_path = temp_file_tuple[1] with open(temp_file_path, 'w') as fp: _json.dump(spec, fp) dirname = _os.path.dirname(__file__) relative_path_to_vg2png_vg2svg = "../vg2" + extension absolute_path_to_vg2png_vg2svg = _os.path.join( dirname, relative_path_to_vg2png_vg2svg) _os.system("node " + absolute_path_to_vg2png_vg2svg + " " + temp_file_path + " " + filepath) # delete temp file that user didn't ask for _os.system("rm " + temp_file_path) else: raise NotImplementedError( "filename must end in .json, .svg, or .png")
def _create_env_file(self, env, taskcode): fh, fname = _mkstemp(suffix='.env') fname_base = fname[:-4] self._print(fh, "'" + env['name'] + "'") self._print(fh, "%0.4f" % (env['frequency'])) self._print(fh, "1") if env['surface'] is None: self._print( fh, "'%cVWT'" % ('S' if env['soundspeed_interp'] == spline else 'C')) else: self._print( fh, "'%cVWT*'" % ('S' if env['soundspeed_interp'] == spline else 'C')) self._create_bty_ati_file(fname_base + '.ati', env['surface'], env['surface_interp']) max_depth = env['depth'] if _np.size(env['depth']) == 1 else _np.max( env['depth'][:, 1]) self._print(fh, "1 0.0 %0.4f" % (max_depth)) svp = env['soundspeed'] if _np.size(svp) == 1: self._print(fh, "0.0 %0.4f /" % (svp)) self._print(fh, "%0.4f %0.4f /" % (max_depth, svp)) else: for j in range(svp.shape[0]): self._print(fh, "%0.4f %0.4f /" % (svp[j, 0], svp[j, 1])) depth = env['depth'] if _np.size(depth) == 1: self._print(fh, "'A' %0.4f" % (env['bottom_roughness'])) else: self._print(fh, "'A*' %0.4f" % (env['bottom_roughness'])) self._create_bty_ati_file(fname_base + '.bty', depth, env['depth_interp']) self._print( fh, "%0.4f %0.4f 0.0 %0.4f %0.4f /" % (max_depth, env['bottom_soundspeed'], env['bottom_density'] / 1000, env['bottom_absorption'])) self._print_array(fh, env['tx_depth']) self._print_array(fh, env['rx_depth']) self._print_array(fh, env['rx_range'] / 1000) if env['tx_directionality'] is None: self._print(fh, "'" + taskcode + "'") else: self._print(fh, "'" + taskcode + " *'") self._create_sbp_file(fname_base + '.sbp', env['tx_directionality']) self._print(fh, "%d" % (env['nbeams'])) self._print(fh, "%0.4f %0.4f /" % (env['min_angle'], env['max_angle'])) self._print( fh, "0.0 %0.4f %0.4f" % (1.01 * max_depth, 1.01 * _np.max(env['rx_range']) / 1000)) _os.close(fh) return fname_base
def new_file(cls, path=None, dir=None, prefix=None, suffix=None, text=True): if path is None: cls.make_dir(dir) level, path = _mkstemp(dir=dir, prefix=prefix, suffix=suffix, text=text) lazy_file = cls(path) lazy_file.resolve() lazy_file._os_level = level elif _ispathlike(path): lazy_file = cls(path) lazy_file.touch() lazy_file.resolve() else: raise TypeError("'path 'argument should be a None, path or str object, not '%s'" % type(path).__name__) return lazy_file
def mk_tmp_file(suffix: str = None, prefix: str = None, subdir: str = None, text: bool = False) -> _Tuple[int, str]: """Create temporary file Returns tuple of two items: file's descriptor and absolute path. """ from pytsite import reg tmp_dir = reg.get('paths.tmp') if not tmp_dir: raise RuntimeError('Cannot determine temporary directory location') if subdir: tmp_dir = _path.join(tmp_dir, subdir) if not _path.exists(tmp_dir): _makedirs(tmp_dir, 0o755) return _mkstemp(suffix, prefix, tmp_dir, text)
def write_concatenated_data_to_R_data_frame(self, output_file): """Creates a R data frame of the concatenated data and stores it in a RDS file. Parameters ---------- output_file : str, optional name of RDS output file If not specified data will the save to {file_name}.csv Notes ----- This method requires R and the Python package 'Rpy2'. """ try: import rpy2.robjects as robjects except: message = "Saving data to R data frame requires the " + "Python package 'Rpy2', which is not installed." "" raise ImportError(message) if output_file is None: output_file = u"{0}.rds".format(self.file_name) fl, tmp_file_name = _mkstemp() _os.close(fl) self.write_concatenated_data(output_file=tmp_file_name, delimiter=",") robjects.r( """data = read.csv("{0}", comment.char="#", na.strings=c("NA", "None"))""".format( tmp_file_name ) ) robjects.r("""str(data)""") print ("write file: {0}".format(output_file)) robjects.r("""saveRDS(data, file="{0}")""".format(output_file)) try: _os.remove(tmp_file_name) except: pass
def write_concatenated_data_to_R_data_frame(self, output_file): """Creates a R data frame of the concatenated data and stores it in a RDS file. Parameters ---------- output_file : str, optional name of RDS output file If not specified data will the save to {file_name}.csv Notes ----- This method requires R and the Python package 'Rpy2'. """ try: import rpy2.robjects as robjects except: message = "Saving data to R data frame requires the " +\ "Python package 'Rpy2', which is not installed.""" raise ImportError(message) if output_file is None: output_file = u"{0}.rds".format(self.file_name) fl, tmp_file_name = _mkstemp() _os.close(fl) self.write_concatenated_data(output_file=tmp_file_name, delimiter=',') robjects.r('''data = read.csv("{0}", comment.char="#", na.strings=c("NA", "None"))'''.format(tmp_file_name)) robjects.r('''str(data)''') print("write file: {0}".format(output_file)) robjects.r('''saveRDS(data, file="{0}")'''.format(output_file)) try: _os.remove(tmp_file_name) except: pass
def __init__(self, suffix='', prefix='tmp', dir=None, text=False): super().__init__() self.fd, self.name = _mkstemp(suffix, prefix, dir, text) self.fp = os.fdopen(self.fd, 'w' if text else 'wb')
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys from tempfile import mkstemp as _mkstemp from distutils.spawn import find_executable as _find_executable CHIMERA_BINARY = 'chimera.exe' CHIMERA_PREFIX = 'Chimera*' CHIMERA_LOCATIONS = map(os.getenv, ('PROGRAMFILES', 'PROGRAMFILES(X86)', 'PROGRAMW6432')) _fh, NULL = _mkstemp(suffix='.py') def _patch_envvars(*args, **kwargs): pass def _patch_libraries(*args, **kwargs): pass def _patch_paths(basedir, libdir, nogui=True): os.environ['PATH'] = ';'.join([os.path.join(basedir, 'bin'), os.path.join(basedir, 'bin', 'DLLs'), os.path.join(basedir, 'bin', 'lib'), os.environ['PATH']]) os.environ['PYTHONPATH'] = ';'.join( [os.path.join(basedir, 'share'), os.path.join(basedir, 'bin')] + (sys.path if nogui else []) +
def save(self, filepath): """ A method for saving the Plot object in a vega representation Parameters ---------- filepath: string The destination filepath where the plot object must be saved as. The extension of this filepath determines what format the plot will be saved as. Currently supported formats are JSON, PNG, and SVG. Examples -------- Suppose 'plt' is an Plot Object We can save it using: >>> plt.save('vega_spec.json') We can also save the vega representation of the plot without data: >>> plt.save('vega_spec.json', False) We can save the plot as a PNG/SVG using: >>> plt.save('test.png') >>> plt.save('test.svg') """ if type(filepath) != str: raise ValueError("filepath provided is not a string") if filepath.endswith(".json"): # save as vega json spec = self.get_vega(include_data=True) with open(filepath, 'w') as fp: _json.dump(spec, fp) elif filepath.endswith(".png") or filepath.endswith(".svg"): # save as png/svg, but json first spec = self.get_vega(include_data=True) EXTENSION_START_INDEX = -3 extension = filepath[EXTENSION_START_INDEX:] temp_file_tuple = _mkstemp() temp_file_path = temp_file_tuple[1] with open(temp_file_path, 'w') as fp: _json.dump(spec, fp) dirname = _os.path.dirname(__file__) relative_path_to_vg2png_vg2svg = "../vg2" + extension absolute_path_to_vg2png_vg2svg = _os.path.join( dirname, relative_path_to_vg2png_vg2svg) # try node vg2[png|svg] json_filepath out_filepath (exitcode, stdout, stderr) = _run_cmdline("node " + absolute_path_to_vg2png_vg2svg + " " + temp_file_path + " " + filepath) if exitcode == _NODE_NOT_FOUND_ERROR_CODE: # user doesn't have node installed raise RuntimeError( "Node.js not found. Saving as PNG and SVG" + " requires Node.js, please download and install Node.js " + "from here and try again: https://nodejs.org/en/download/") elif exitcode == _CANVAS_PREBUILT_NOT_FOUND_ERROR: # try to see if canvas-prebuilt is globally installed # if it is, then link it # if not, tell the user to install it (is_installed_exitcode, is_installed_stdout, is_installed_stderr ) = _run_cmdline("npm ls -g -json | grep canvas-prebuilt") if is_installed_exitcode == _SUCCESS: # npm link canvas-prebuilt link_exitcode, link_stdout, link_stderr = _run_cmdline( "npm link canvas-prebuilt") if link_exitcode == _PERMISSION_DENIED_ERROR_CODE: # They don't have permission, tell them. raise RuntimeError( link_stderr + '\n\n' + "`npm link canvas-prebuilt` failed, " + "Permission Denied.") elif link_exitcode == _SUCCESS: # canvas-prebuilt link is now successful, so run the # node vg2[png|svg] json_filepath out_filepath # command again. (exitcode, stdout, stderr) = _run_cmdline( "node " + absolute_path_to_vg2png_vg2svg + " " + temp_file_path + " " + filepath) if exitcode != _SUCCESS: # something else that we have not identified yet # happened. raise RuntimeError(stderr) else: raise RuntimeError(link_stderr) else: raise RuntimeError( "canvas-prebuilt not found. " + "Saving as PNG and SVG requires canvas-prebuilt, " + "please download and install canvas-prebuilt by " + "running this command, and try again: " + "`npm install -g canvas-prebuilt`") elif exitcode == _SUCCESS: pass else: raise RuntimeError(stderr) # delete temp file that user didn't ask for _run_cmdline("rm " + temp_file_path) else: raise NotImplementedError("filename must end in" + " .json, .svg, or .png")
#################################################################### ### NO MODIFICATIONS SHOULD BE NECESSARY BEYOND THIS POINT ### #################################################################### # ******************************************************************* from distutils.core import setup, Extension import os, os.path, glob import sys import re from tempfile import mktemp if sys.version_info < (2, 2): print >> sys.stderr, "You need at least python 2.2" sys.exit(1) try: # python >= 2.3 has better mktemp from tempfile import mkstemp as _mkstemp mktemp = lambda *args, **kwargs: _mkstemp(*args, **kwargs)[1] except ImportError: pass try: # subprocess is available from python >= 2.4 from subprocess import call except ImportError: def call(cmd, stdout=None): # ignore stdout redirection # could also escape via ``re.sub(r'[\"$*()?!{}[]]', r'\\\1', cmd[0])`` return os.spawnvp(os.P_WAIT, cmd[0], cmd) if PYTHON_INCLUDE_DIR is None and not USE_NUMERIC: try: import numpy
import sys, os, re import gc from pdb import pm # for debugging test failures try: import numpy from numpy.random import rand, randn toscalar = lambda a:a.item() except ImportError: import Numeric as numpy from MLab import rand, randn toscalar = lambda a:a.toscalar() from tempfile import mktemp try: # python >= 2.3 has better mktemp from tempfile import mkstemp as _mkstemp mktemp = lambda *args,**kwargs: _mkstemp(*args, **kwargs)[1] except ImportError: pass degensym_proxy = lambda s, rex=re.compile(r'(PROXY_VAL)\d+'): rex.sub(r'\1',s) import unittest TestCase = unittest.TestCase TestSuite = unittest.TestSuite try: import awmstest TestCase = awmstest.PermeableTestCase2 TestSuite = awmstest.RotatingTestSuite except ImportError: pass from awmstools import indexme, without from mlabwrap import *
def test_export_coreml(self): for model in self.models: filename = _mkstemp("bingo.mlmodel")[1] model.export_coreml(filename)