Пример #1
0
    def setup_cache(self):
        if not self.enabled:
            return {}

        # create the logfile to start with
        with open(self.dump_fn, 'w') as f:
            ioJSON.dump({}, f, indent=2)
Пример #2
0
def plot_graph(graph, scope=None, parent=None,
               excludes=(), d3page='fixedforce.html', minimal=False):
    """Open up a display of the graph in a browser window."""

    tmpdir = tempfile.mkdtemp()
    fdir = os.path.dirname(os.path.abspath(__file__))
    shutil.copy(os.path.join(fdir, 'd3.js'), tmpdir)
    shutil.copy(os.path.join(fdir, d3page), tmpdir)

    graph = _clean_graph(graph, excludes=excludes,
                         scope=scope, parent=parent, minimal=minimal)
    data = node_link_data(graph)
    tmp = data.get('graph', [])
    data['graph'] = [dict(tmp)]

    startdir = os.getcwd()
    os.chdir(tmpdir)
    try:
        # write out the json as a javascript var
        # so we we're not forced to start our own webserver
        # to avoid cross-site issues
        with open('__graph.js', 'w') as f:
            f.write("__mygraph__json = ")
            ioJSON.dump(data, f)
            f.write(";\n")

        # open URL in web browser
        wb = webbrowser.get()
        wb.open('file://' + os.path.join(tmpdir, d3page))
    except Exception as err:
        print str(err)
    finally:
        os.chdir(startdir)
        print "remember to remove temp directory '%s'" % tmpdir
Пример #3
0
    def _persist_input(self,
                       output_dir,
                       duration,
                       args,
                       kwargs,
                       this_duration_limit=0.5):
        """ Save a small summary of the call using json format in the
            output directory.

            output_dir: string
                directory where to write metadata.

            duration: float
                time taken by hashing input arguments, calling the wrapped
                function and persisting its output.

            args, kwargs: list and dict
                input arguments for wrapped function

            this_duration_limit: float
                Max execution time for this function before issuing a warning.
        """
        start_time = time.time()
        argument_dict = filter_args(self.func, self.ignore, args, kwargs)

        input_repr = dict((k, repr(v)) for k, v in argument_dict.items())
        # This can fail due to race-conditions with multiple
        # concurrent joblibs removing the file or the directory
        metadata = {"duration": duration, "input_args": input_repr}
        try:
            mkdirp(output_dir)
            with open(os.path.join(output_dir, 'metadata.json'), 'w') as f:
                ioJSON.dump(metadata, f)
        except:
            pass

        this_duration = time.time() - start_time
        if this_duration > this_duration_limit:
            # This persistence should be fast. It will not be if repr() takes
            # time and its output is large, because json.dump will have to
            # write a large file. This should not be an issue with numpy arrays
            # for which repr() always output a short representation, but can
            # be with complex dictionaries. Fixing the problem should be a
            # matter of replacing repr() above by something smarter.
            warnings.warn("Persisting input arguments took %.2fs to run.\n"
                          "If this happens often in your code, it can cause "
                          "performance problems \n"
                          "(results will be correct in all cases). \n"
                          "The reason for this is probably some large input "
                          "arguments for a wrapped\n"
                          " function (e.g. large strings).\n"
                          "THIS IS A JOBLIB ISSUE. If you can, kindly provide "
                          "the joblib's team with an\n"
                          " example so that they can fix the problem." %
                          this_duration,
                          stacklevel=5)
        return metadata
Пример #4
0
 def teardown(self, name, ret_value, solver):
     with open(self.dump_fn, 'w') as f:
         ioJSON.dump(self.results, f, indent=2, sort_keys=True)
Пример #5
0
 def save(self):
     tmp = self.filename + ".new"
     with open(tmp, 'wb') as f:
         ioJSON.dump(self.cache, f)
     os.rename(tmp, self.filename)