Exemplo n.º 1
0
def fill_form_if_cloned(form):
    # is there a request to clone a job.
    from digits.webapp import scheduler
    clone = get_request_arg('clone')
    if clone is not None:
        clone_job = scheduler.get_job(clone)
        fill_form_from_job(clone_job, form)
Exemplo n.º 2
0
 def load_dataset(self):
     from digits.webapp import scheduler
     job = scheduler.get_job(self.dataset_id)
     assert job is not None, 'Cannot find dataset'
     self.dataset = job
     for task in self.tasks:
         task.dataset = job
Exemplo n.º 3
0
def visualize_network():
    """
    Returns a visualization of the custom network as a string of PNG data
    """
    framework = flask.request.args.get('framework')
    if not framework:
        raise werkzeug.exceptions.BadRequest('framework not provided')

    dataset = None
    if 'dataset_id' in flask.request.form:
        dataset = scheduler.get_job(flask.request.form['dataset_id'])

    fw = frameworks.get_framework_by_id(framework)
    ret = fw.get_network_visualization(
        desc=flask.request.form['custom_network'],
        dataset=dataset,
        solver_type=flask.request.form['solver_type']
        if 'solver_type' in flask.request.form else None,
        use_mean=flask.request.form['use_mean']
        if 'use_mean' in flask.request.form else None,
        crop_size=flask.request.form['crop_size']
        if 'crop_size' in flask.request.form else None,
        num_gpus=flask.request.form['num_gpus']
        if 'num_gpus' in flask.request.form else None,
    )
    return ret
Exemplo n.º 4
0
def layer_visualizations(job_id):
    job = scheduler.get_job(job_id)
    if not job.has_deploy():
        job.tasks[0].write_deploy()

    return flask.render_template("pretrained_models/layer_visualizations.html",
                                 job=format_job_name(job))
Exemplo n.º 5
0
def models_customize():
    """Returns a customized file for the Model based on completed form fields"""
    network = request.args.get('network')
    if not network:
        return 'args.network not found!', 400

    networks_dir = os.path.join(os.path.dirname(digits.__file__), 'standard-networks')
    for filename in os.listdir(networks_dir):
        path = os.path.join(networks_dir, filename)
        if os.path.isfile(path):
            match = re.match(r'%s.prototxt' % network, filename)
            if match:
                with open(path) as infile:
                    return json.dumps({'network': infile.read()})
    job = scheduler.get_job(network)
    snapshot = None
    try:
        epoch = int(request.form['snapshot_epoch'])
        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break
    except:
        pass

    if job:
        return json.dumps({
            'network': text_format.MessageToString(job.train_task().network),
            'snapshot': snapshot
            })

    return 'ERROR: Network not found!', 400
Exemplo n.º 6
0
def download(job_id, extension):
    """
    Return a tarball of all files required to run the model
    """

    job = scheduler.get_job(job_id)

    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    epoch = -1
    # GET ?epoch=n
    if 'epoch' in flask.request.args:
        epoch = float(flask.request.args['epoch'])

    # POST ?snapshot_epoch=n (from form)
    elif 'snapshot_epoch' in flask.request.form:
        epoch = float(flask.request.form['snapshot_epoch'])

    # Write the stats of the job to json,
    # and store in tempfile (for archive)
    info = json.dumps(job.json_dict(verbose=False, epoch=epoch),
                      sort_keys=True,
                      indent=4,
                      separators=(',', ': '))
    info_io = io.BytesIO()
    info_io.write(info)

    task = job.train_task()
    snapshot_filename = None
    snapshot_filename = task.get_snapshot(epoch)

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            for path, name in job.download_files(epoch):
                tf.add(path, arcname=name)
            tf_info = tarfile.TarInfo("info.json")
            tf_info.size = len(info_io.getvalue())
            info_io.seek(0)
            tf.addfile(tf_info, info_io)
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            for path, name in job.download_files(epoch):
                zf.write(path, arcname=name)
            zf.writestr("info.json", info_io.getvalue())
    else:
        raise werkzeug.exceptions.BadRequest('Invalid extension')

    response = flask.make_response(b.getvalue())
    response.headers[
        'Content-Disposition'] = 'attachment; filename=%s_epoch_%s.%s' % (
            job.id(), epoch, extension)
    return response
Exemplo n.º 7
0
def image_classification_model_test_one():
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    image = None
    if request.form['image_url']:
        image = utils.image.load_image(request.form['image_url'])
    elif request.files['image_file']:
        with tempfile.NamedTemporaryFile() as outfile:
            request.files['image_file'].save(outfile.name)
            image = utils.image.load_image(outfile.name)
    if image is None:
        return 'There was a problem with the image.', 400
    task = job.train_task().dataset.train_db_task()
    image = utils.image.resize_image(image, task.image_dims[0], task.image_dims[1],
            channels = task.image_dims[2],
            resize_mode = task.resize_mode,
            )
    epoch = int(request.form['snapshot_epoch'])
    predictions, visualizations = job.train_task().infer_one(image, snapshot_epoch=epoch, layers='all')
    # take top 5
    predictions = [(p[0], round(100.0*p[1],2)) for p in predictions[:5]]
    # embed as html
    visualizations = [(
        v[0],
        utils.image.embed_image_html(v[1]),
        utils.image.embed_image_html(v[2]),
        )
        for v in visualizations]
    return render_template('models/images/classification/infer_one.html',
            image_src       = utils.image.embed_image_html(image),
            predictions     = predictions,
            visualizations  = visualizations,
            )
Exemplo n.º 8
0
def get_previous_network_snapshots():
    prev_network_snapshots = []
    for job_id, _ in get_previous_networks():
        job = scheduler.get_job(job_id)
        e = [(0, "None")] + [(epoch, "Epoch #%s" % epoch) for _, epoch in reversed(job.train_task().snapshots)]
        prev_network_snapshots.append(e)
    return prev_network_snapshots
Exemplo n.º 9
0
 def load_dataset(self):
     from digits.webapp import scheduler
     job = scheduler.get_job(self.dataset_id)
     assert job is not None, 'Cannot find dataset'
     self.dataset = job
     for task in self.tasks:
         task.dataset = job
Exemplo n.º 10
0
def image_classification_model_test_one():
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    image = None
    if request.form['image_url']:
        image = utils.image.load_image(request.form['image_url'])
    elif request.files['image_file']:
        with tempfile.NamedTemporaryFile() as outfile:
            request.files['image_file'].save(outfile.name)
            image = utils.image.load_image(outfile.name)
    if image is None:
        return 'There was a problem with the image.', 400
    task = job.train_task().dataset.train_db_task()
    image = utils.image.resize_image(image, task.image_dims[0], task.image_dims[1],
            channels = task.image_dims[2],
            resize_mode = task.resize_mode,
            )
    epoch = int(request.form['snapshot_epoch'])
    predictions, visualizations = job.train_task().infer_one(image, snapshot_epoch=epoch, layers='all')
    # take top 5
    predictions = [(p[0], round(100.0*p[1],2)) for p in predictions[:5]]
    # embed as html
    visualizations = [(
        v[0],
        utils.image.embed_image_html(v[1]),
        utils.image.embed_image_html(v[2]),
        )
        for v in visualizations]
    return render_template('models/images/classification/infer_one.html',
            image_src       = utils.image.embed_image_html(image),
            predictions     = predictions,
            visualizations  = visualizations,
            )
Exemplo n.º 11
0
def fill_form_if_cloned(form):
    # is there a request to clone a job.
    from digits.webapp import scheduler
    clone = get_request_arg('clone')
    if clone is not None:
        clone_job = scheduler.get_job(clone)
        fill_form_from_job(clone_job, form)
Exemplo n.º 12
0
def image_classification_trial_show_examples(job_id, class_example):
    job = scheduler.get_job(job_id)
        
    hit_results = job.hit_miss_results[str(class_example)]['hit']
    miss_results = job.hit_miss_results[str(class_example)]['miss']
        
    return render_template("trials/images/classification/images.html", class_example=class_example, miss_results=miss_results, hit_results=hit_results, job_id=job_id, job=job)
Exemplo n.º 13
0
def features_download(job_id, extension):
    """
    Return a tarball of all features extracted from the model
    """
    job = scheduler.get_job(job_id)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            tf.add(job.dir(), arcname='features')
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            zf.write(job.dir(), arcname='features')
    else:
        raise werkzeug.exceptions.BadRequest('Invalid extension')

    response = flask.make_response(b.getvalue())
    response.headers['Content-Disposition'] = 'attachment; filename=%s.%s' % (job.id(), extension)
    return response    
Exemplo n.º 14
0
def models_customize():
    """
    Returns a customized file for the ModelJob based on completed form fields
    """
    network = flask.request.args['network']
    if not network:
        raise werkzeug.exceptions.BadRequest('network not provided')

    networks_dir = os.path.join(os.path.dirname(digits.__file__), 'standard-networks')
    for filename in os.listdir(networks_dir):
        path = os.path.join(networks_dir, filename)
        if os.path.isfile(path):
            match = re.match(r'%s.prototxt' % network, filename)
            if match:
                with open(path) as infile:
                    return json.dumps({'network': infile.read()})
    job = scheduler.get_job(network)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    snapshot = None
    try:
        epoch = int(flask.request.form['snapshot_epoch'])
        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break
    except:
        pass

    return json.dumps({
        'network': text_format.MessageToString(job.train_task().network),
        'snapshot': snapshot
        })
Exemplo n.º 15
0
def models_customize():
    """
    Returns a customized file for the ModelJob based on completed form fields
    """
    network = flask.request.args['network']
    framework = flask.request.args.get('framework')
    if not network:
        raise werkzeug.exceptions.BadRequest('network not provided')

    fw = frameworks.get_framework_by_id(framework)

    # can we find it in standard networks?
    network_desc = fw.get_standard_network_desc(network)
    if network_desc:
        return json.dumps({'network': network_desc})

    # not found in standard networks, looking for matching job
    job = scheduler.get_job(network)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    snapshot = None
    try:
        epoch = int(flask.request.form['snapshot_epoch'])
        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break
    except:
        pass

    return json.dumps({
            'network': job.train_task().get_network_desc(),
            'snapshot': snapshot
            })
Exemplo n.º 16
0
    def on_status_update(self):
        """
        Called when StatusCls.status.setter is used
        """
        from digits.webapp import app, socketio

        # Send socketio updates
        message = {
            'task': self.html_id(),
            'update': 'status',
            'status': self.status.name,
            'css': self.status.css,
            'show': (self.status in [Status.RUN, Status.ERROR]),
            'running': self.status.is_running(),
        }
        with app.app_context():
            message['html'] = flask.render_template(
                'status_updates.html',
                updates=self.status_history,
                exception=self.exception,
                traceback=self.traceback,
            )

        socketio.emit(
            'task update',
            message,
            namespace='/jobs',
            room=self.job_id,
        )

        from digits.webapp import scheduler
        job = scheduler.get_job(self.job_id)
        if job:
            job.on_status_update()
Exemplo n.º 17
0
def inference_form(extension_id, job_id):
    """
    Returns a rendering of an inference form
    """
    inference_form_html = ""

    if extension_id != "all-default":
        extension_class = extensions.data.get_extension(extension_id)
        if not extension_class:
            raise RuntimeError("Unable to find data extension with ID=%s"
                               % job_id.dataset.extension_id)
        job = scheduler.get_job(job_id)
        if hasattr(job, 'extension_userdata'):
            extension_userdata = job.extension_userdata
        else:
            extension_userdata = {}
        extension_userdata.update({'is_inference_db': True})
        extension = extension_class(**extension_userdata)

        form = extension.get_inference_form()
        if form:
            template, context = extension.get_inference_template(form)
            inference_form_html = flask.render_template_string(template, **context)

    return inference_form_html
Exemplo n.º 18
0
def show(job_id):
    """
    Show a DatasetJob

    Returns JSON when requested:
        {id, name, directory, status}
    """
    job = scheduler.get_job(job_id)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    related_jobs = scheduler.get_related_jobs(job)

    if request_wants_json():
        return flask.jsonify(job.json_dict(True))
    else:
        if isinstance(job, dataset_images.ImageClassificationDatasetJob):
            return dataset_images.classification.views.show(
                job, related_jobs=related_jobs)
        elif isinstance(job, dataset_images.GenericImageDatasetJob):
            return dataset_images.generic.views.show(job,
                                                     related_jobs=related_jobs)
        elif isinstance(job, generic.GenericDatasetJob):
            return generic.views.show(job, related_jobs=related_jobs)
        else:
            raise werkzeug.exceptions.BadRequest('Invalid job type')
Exemplo n.º 19
0
    def on_status_update(self):
        """
        Called when StatusCls.status.setter is used
        """
        from digits.webapp import app, socketio

        # Send socketio updates
        message = {
                'task': self.html_id(),
                'update': 'status',
                'status': self.status.name,
                'css': self.status.css,
                'show': (self.status in [Status.RUN, Status.ERROR]),
                'running': self.status.is_running(),
                }
        with app.app_context():
            message['html'] = flask.render_template('status_updates.html',
                    updates     = self.status_history,
                    exception   = self.exception,
                    traceback   = self.traceback,
                    )

        socketio.emit('task update',
                message,
                namespace='/jobs',
                room=self.job_id,
                )

        from digits.webapp import scheduler
        job = scheduler.get_job(self.job_id)
        if job:
            job.on_status_update()
Exemplo n.º 20
0
def models_customize():
    """Returns a customized file for the Model based on completed form fields"""
    network = request.args.get('network')
    if not network:
        return 'args.network not found!', 400

    networks_dir = os.path.join(os.path.dirname(digits.__file__),
                                'standard-networks')
    for filename in os.listdir(networks_dir):
        path = os.path.join(networks_dir, filename)
        if os.path.isfile(path):
            match = re.match(r'%s.prototxt' % network, filename)
            if match:
                with open(path) as infile:
                    return json.dumps({'network': infile.read()})
    job = scheduler.get_job(network)
    try:
        epoch = int(request.form['snapshot_epoch'])
        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break
    except Exception as e:
        snapshot = None
    if job:
        return json.dumps({
            'network':
            text_format.MessageToString(job.train_task().network),
            'snapshot':
            snapshot
        })

    return 'ERROR: Network not found!', 400
Exemplo n.º 21
0
def get_outputs():
    job = scheduler.get_job(flask.request.args["job_id"])

    # If older job, then create weights db file:
    if not job.has_weights():
        job.tasks[0].write_deploy()
        weights_job = run_weights_job(job, utils.auth.get_username())
        weights_job.wait_completion()
        # If failed to create weights, recommend re-uploading:
        status = weights_job.status.name
        if status is "Error":
            return flask.jsonify({
                "stats":
                status,
                "msg":
                "Could not generate weights, consider re-uploading job."
            })

    layers_with_outputs = []
    if os.path.isfile(job.get_filters_path()):
        f = h5py.File(job.get_filters_path(), 'r')
        layers_with_outputs = f.keys()

    return flask.jsonify({
        "model_def": job.get_model_def(True),
        "framework": job.framework,
        "layers_with_outputs": layers_with_outputs
    })
Exemplo n.º 22
0
def features_download(job_id, extension):
    """
    Return a tarball of all features extracted from the model
    """
    job = scheduler.get_job(job_id)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            tf.add(job.dir(), arcname='features')
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            zf.write(job.dir(), arcname='features')
    else:
        raise werkzeug.exceptions.BadRequest('Invalid extension')

    response = flask.make_response(b.getvalue())
    response.headers['Content-Disposition'] = 'attachment; filename=%s.%s' % (
        job.id(), extension)
    return response
Exemplo n.º 23
0
def get_previous_network_snapshots():
    prev_network_snapshots = []
    for job_id, _ in get_previous_networks():
        job = scheduler.get_job(job_id)
        e = [(0, 'None')] + [(epoch, 'Epoch #%s' % epoch)
                for _, epoch in reversed(job.train_task().snapshots)]
        prev_network_snapshots.append(e)
    return prev_network_snapshots
Exemplo n.º 24
0
def dataset_summary():
    job_id = request.args.get('job_id', '')
    if not job_id:
        return 'No job_id in request!'

    job = scheduler.get_job(job_id)

    return render_template('datasets/summary.html', dataset=job)
Exemplo n.º 25
0
def dataset_summary():
    job_id = request.args.get('job_id', '')
    if not job_id:
        return 'No job_id in request!'

    job = scheduler.get_job(job_id)

    return render_template('datasets/summary.html', dataset=job)
Exemplo n.º 26
0
def set_previous_network_snapshots(form):
    while len(form.previous_network_snapshots):
        form.previous_network_snapshots.pop_entry()

    for job_id, _ in get_previous_networks():
        job = scheduler.get_job(job_id)
        e = form.previous_network_snapshots.append_entry()
        e.choices = [('none', 'None')] + [(epoch, 'Epoch #%s' % epoch)
                for _, epoch in reversed(job.train_task().snapshots)]
Exemplo n.º 27
0
def dataset_summary():
    """
    Return a short HTML summary of a DatasetJob
    """
    job = scheduler.get_job(flask.request.args['job_id'])
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    return flask.render_template('datasets/summary.html', dataset=job)
Exemplo n.º 28
0
def image_classification_model_large_graph():
    """
    Show the loss/accuracy graph, but bigger
    """
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    return render_template('models/images/classification/large_graph.html', job=job)
Exemplo n.º 29
0
def image_classification_model_large_graph():
    """
    Show the loss/accuracy graph, but bigger
    """
    job = scheduler.get_job(flask.request.args['job_id'])
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    return flask.render_template('models/images/classification/large_graph.html', job=job)
Exemplo n.º 30
0
def get_previous_network_snapshots():
    prev_network_snapshots = []
    for job_id, _ in get_previous_networks():
        job = scheduler.get_job(job_id)
        e = [(0, "None")] + [(epoch, "Epoch #%s" % epoch) for _, epoch in reversed(job.train_task().snapshots)]
        if job.train_task().pretrained_model:
            e.insert(0, (-1, "Previous pretrained model"))
        prev_network_snapshots.append(e)
    return prev_network_snapshots
Exemplo n.º 31
0
def download(job_id, extension):
    """
    Return a tarball of all files required to run the model
    """

    job = scheduler.get_job(job_id)

    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    epoch = -1
    # GET ?epoch=n
    if 'epoch' in flask.request.args:
        epoch = float(flask.request.args['epoch'])

    # POST ?snapshot_epoch=n (from form)
    elif 'snapshot_epoch' in flask.request.form:
        epoch = float(flask.request.form['snapshot_epoch'])

    # Write the stats of the job to json,
    # and store in tempfile (for archive)
    info = json.dumps(job.json_dict(verbose=False,epoch=epoch), sort_keys=True, indent=4, separators=(',', ': '))
    temp = tempfile.NamedTemporaryFile()
    temp.write(info)
    temp.seek(0)

    task = job.train_task()
    snapshot_filename = None
    snapshot_filename = task.get_snapshot(epoch)

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            for path, name in job.download_files(epoch):
                tf.add(path, arcname=name)
            tf.add(temp.name,arcname="info.json")
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            for path, name in job.download_files(epoch):
                zf.write(path, arcname=name)
            zf.write(temp.name,arcname="info.json")
    else:
        raise werkzeug.exceptions.BadRequest('Invalid extension')

    # Close and delete temporary file
    temp.close()

    response = flask.make_response(b.getvalue())
    response.headers['Content-Disposition'] = 'attachment; filename=%s_epoch_%s.%s' % (job.id(), epoch, extension)
    return response
Exemplo n.º 32
0
def trials_show(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, trial_images.ImageClassificationTrialJob):
        return trial_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 33
0
def experiments_show(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, experiment_images.ImageClassificationExperimentJob):
        return experiment_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 34
0
def image_classification_model_large_graph():
    """
    Show the loss/accuracy graph, but bigger
    """
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    return render_template('models/images/classification/large_graph.html',
                           job=job)
Exemplo n.º 35
0
def get_previous_network_snapshots():
    prev_network_snapshots = []
    for job_id, _ in get_previous_networks():
        job = scheduler.get_job(job_id)
        e = [(0, 'None')] + [(epoch, 'Epoch #%s' % epoch)
                             for _, epoch in reversed(job.train_task().snapshots)]
        if job.train_task().pretrained_model:
            e.insert(0, (-1, 'Previous pretrained model'))
        prev_network_snapshots.append(e)
    return prev_network_snapshots
Exemplo n.º 36
0
def models_show(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, model_images.ImageClassificationModelJob):
        return model_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 37
0
def datasets_show(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, dataset_images.ImageClassificationDatasetJob):
        return dataset_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 38
0
def datasets_show(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, dataset_images.ImageClassificationDatasetJob):
        return dataset_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 39
0
def models_download(job_id, extension='tar.gz'):
    """
    Return a tarball of all files required to run the model
    """
    job = scheduler.get_job(job_id)

    if not job:
        return 'Job not found', 404

    epoch = -1
    # GET ?epoch=n
    if 'epoch' in request.args:
        epoch = float(request.args['epoch'])

    # POST ?snapshot_epoch=n (from form)
    elif 'snapshot_epoch' in request.form:
        epoch = float(request.form['snapshot_epoch'])

    task = job.train_task()

    snapshot_filename = None
    if epoch == -1 and len(task.snapshots):
        epoch = task.snapshots[-1][1]
        snapshot_filename = task.snapshots[-1][0]
    else:
        for f, e in task.snapshots:
            if e == epoch:
                snapshot_filename = f
                break
    if not snapshot_filename:
        return 'Invalid epoch', 400

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            for path, name in job.download_files(epoch):
                tf.add(path, arcname=name)
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            for path, name in job.download_files(epoch):
                zf.write(path, arcname=name)
    else:
        return 'Unrecognized extension "%s"' % extension, 400

    response = make_response(b.getvalue())
    response.headers[
        'Content-Disposition'] = 'attachment; filename=%s_epoch_%s.%s' % (
            job.id(), epoch, extension)
    return response
Exemplo n.º 40
0
def dataset_summary():
    """
    Return a short HTML summary of a DatasetJob
    """
    job_id = request.args.get('job_id', '')
    if not job_id:
        return 'No job_id in request!'

    job = scheduler.get_job(job_id)

    return render_template('datasets/summary.html', dataset=job)
Exemplo n.º 41
0
def image_classification_model_classify_one():
    """
    Classify one image and return the predictions, weights and activations
    """
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    image = None
    if 'image_url' in request.form and request.form['image_url']:
        image = utils.image.load_image(request.form['image_url'])
    elif 'image_file' in request.files and request.files['image_file']:
        with tempfile.NamedTemporaryFile() as outfile:
            request.files['image_file'].save(outfile.name)
            image = utils.image.load_image(outfile.name)
    else:
        return 'You must select an image to classify', 400

    # resize image
    db_task = job.train_task().dataset.train_db_task()
    height = db_task.image_dims[0]
    width = db_task.image_dims[1]
    if job.train_task().crop_size:
        height = job.train_task().crop_size
        width = job.train_task().crop_size
    image = utils.image.resize_image(
        image,
        height,
        width,
        channels=db_task.image_dims[2],
        resize_mode=db_task.resize_mode,
    )

    epoch = None
    if 'snapshot_epoch' in request.form:
        epoch = float(request.form['snapshot_epoch'])

    layers = 'none'
    if 'show_visualizations' in request.form and request.form[
            'show_visualizations']:
        layers = 'all'

    predictions, visualizations = job.train_task().infer_one(
        image, snapshot_epoch=epoch, layers=layers)
    # take top 5
    predictions = [(p[0], round(100.0 * p[1], 2)) for p in predictions[:5]]

    return render_template(
        'models/images/classification/classify_one.html',
        image_src=utils.image.embed_image_html(image),
        predictions=predictions,
        visualizations=visualizations,
    )
Exemplo n.º 42
0
def models_download(job_id, extension='tar.gz'):
    """
    Return a tarball of all files required to run the model
    """
    job = scheduler.get_job(job_id)

    if not job:
        return 'Job not found', 404

    epoch = -1
    # GET ?epoch=n
    if 'epoch' in request.args:
        epoch = float(request.args['epoch'])

    # POST ?snapshot_epoch=n (from form)
    elif 'snapshot_epoch' in request.form:
        epoch = float(request.form['snapshot_epoch'])

    task = job.train_task()

    snapshot_filename = None
    if epoch == -1 and len(task.snapshots):
        epoch = task.snapshots[-1][1]
        snapshot_filename = task.snapshots[-1][0]
    else:
        for f, e in task.snapshots:
            if e == epoch:
                snapshot_filename = f
                break
    if not snapshot_filename:
        return 'Invalid epoch', 400

    b = io.BytesIO()
    if extension in ['tar', 'tar.gz', 'tgz', 'tar.bz2']:
        # tar file
        mode = ''
        if extension in ['tar.gz', 'tgz']:
            mode = 'gz'
        elif extension in ['tar.bz2']:
            mode = 'bz2'
        with tarfile.open(fileobj=b, mode='w:%s' % mode) as tf:
            for path, name in job.download_files(epoch):
                tf.add(path, arcname=name)
    elif extension in ['zip']:
        with zipfile.ZipFile(b, 'w') as zf:
            for path, name in job.download_files(epoch):
                zf.write(path, arcname=name)
    else:
        return 'Unrecognized extension "%s"' % extension, 400

    response = make_response(b.getvalue())
    response.headers['Content-Disposition'] = 'attachment; filename=%s_epoch_%s.%s' % (job.id(), epoch, extension)
    return response
Exemplo n.º 43
0
def models_show_json(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    return jsonify({
        'id': job.id(),
        'name': job.name(),
        'status': job.status.name,
        'snapshots': [s[1] for s in job.train_task().snapshots],
        })
Exemplo n.º 44
0
def models_show_json(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    return jsonify({
        'id': job.id(),
        'name': job.name(),
        'status': job.status.name,
        'snapshots': [s[1] for s in job.train_task().snapshots],
    })
Exemplo n.º 45
0
def image_classification_trial_show_examples(job_id, class_example):
    job = scheduler.get_job(job_id)

    hit_results = job.hit_miss_results[str(class_example)]['hit']
    miss_results = job.hit_miss_results[str(class_example)]['miss']

    return render_template("trials/images/classification/images.html",
                           class_example=class_example,
                           miss_results=miss_results,
                           hit_results=hit_results,
                           job_id=job_id,
                           job=job)
Exemplo n.º 46
0
def run_max_activations():
    """ Run Gradient Ascent on a given layer and units """
    job = scheduler.get_job(flask.request.args["job_id"])
    args = flask.request.args
    layer_name = args["layer_name"]
    units = eval(args["units"])
    username = utils.auth.get_username()

    gradient_ascent_job = create_max_activation_job(job, username, layer_name,
                                                    units)

    return flask.jsonify({"stats": units, "job_id": gradient_ascent_job.id()})
Exemplo n.º 47
0
def models_download(job_id, extension):
    """
    Return a tarball of all files required to run the model
    """
    job = scheduler.get_job(job_id)
    if job is None:
        raise werkzeug.exceptions.NotFound("Job not found")

    epoch = -1
    # GET ?epoch=n
    if "epoch" in flask.request.args:
        epoch = float(flask.request.args["epoch"])

    # POST ?snapshot_epoch=n (from form)
    elif "snapshot_epoch" in flask.request.form:
        epoch = float(flask.request.form["snapshot_epoch"])

    task = job.train_task()

    snapshot_filename = None
    if epoch == -1 and len(task.snapshots):
        epoch = task.snapshots[-1][1]
        snapshot_filename = task.snapshots[-1][0]
    else:
        for f, e in task.snapshots:
            if e == epoch:
                snapshot_filename = f
                break
    if not snapshot_filename:
        raise werkzeug.exceptions.BadRequest("Invalid epoch")

    b = io.BytesIO()
    if extension in ["tar", "tar.gz", "tgz", "tar.bz2"]:
        # tar file
        mode = ""
        if extension in ["tar.gz", "tgz"]:
            mode = "gz"
        elif extension in ["tar.bz2"]:
            mode = "bz2"
        with tarfile.open(fileobj=b, mode="w:%s" % mode) as tf:
            for path, name in job.download_files(epoch):
                tf.add(path, arcname=name)
    elif extension in ["zip"]:
        with zipfile.ZipFile(b, "w") as zf:
            for path, name in job.download_files(epoch):
                zf.write(path, arcname=name)
    else:
        raise werkzeug.exceptions.BadRequest("Invalid extension")

    response = flask.make_response(b.getvalue())
    response.headers["Content-Disposition"] = "attachment; filename=%s_epoch_%s.%s" % (job.id(), epoch, extension)
    return response
Exemplo n.º 48
0
def image_classification_model_classify_one():
    """
    Classify one image and return the top 5 classifications

    Returns JSON when requested: {predictions: {category: confidence,...}}
    """
    job = scheduler.get_job(flask.request.args['job_id'])
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    image = None
    if 'image_url' in flask.request.form and flask.request.form['image_url']:
        image = utils.image.load_image(flask.request.form['image_url'])
    elif 'image_file' in flask.request.files and flask.request.files['image_file']:
        with tempfile.NamedTemporaryFile() as outfile:
            flask.request.files['image_file'].save(outfile.name)
            image = utils.image.load_image(outfile.name)
    else:
        raise werkzeug.exceptions.BadRequest('No image given')

    # resize image
    db_task = job.train_task().dataset.train_db_task()
    height = db_task.image_dims[0]
    width = db_task.image_dims[1]
    if job.train_task().crop_size:
        height = job.train_task().crop_size
        width = job.train_task().crop_size
    image = utils.image.resize_image(image, height, width,
            channels = db_task.image_dims[2],
            resize_mode = db_task.resize_mode,
            )

    epoch = None
    if 'snapshot_epoch' in flask.request.form:
        epoch = float(flask.request.form['snapshot_epoch'])

    layers = 'none'
    if 'show_visualizations' in flask.request.form and flask.request.form['show_visualizations']:
        layers = 'all'

    predictions, visualizations = job.train_task().infer_one(image, snapshot_epoch=epoch, layers=layers)
    # take top 5
    predictions = [(p[0], round(100.0*p[1],2)) for p in predictions[:5]]

    if request_wants_json():
        return flask.jsonify({'predictions': predictions})
    else:
        return flask.render_template('models/images/classification/classify_one.html',
                image_src       = utils.image.embed_image_html(image),
                predictions     = predictions,
                visualizations  = visualizations,
                )
Exemplo n.º 49
0
def models_show(job_id):
    """
    Show a ModelJob
    """
    job = scheduler.get_job(job_id)

    if job is None:
        abort(404)

    if isinstance(job, model_images.ImageClassificationModelJob):
        return model_images.classification.views.show(job)
    else:
        abort(404)
Exemplo n.º 50
0
def image_classification_model_classify_one():
    """
    Classify one image and return the predictions, weights and activations
    """
    job = scheduler.get_job(request.args['job_id'])
    if not job:
        abort(404)

    image = None
    if 'image_url' in request.form and request.form['image_url']:
        image = utils.image.load_image(request.form['image_url'])
    elif 'image_file' in request.files and request.files['image_file']:
        with tempfile.NamedTemporaryFile() as outfile:
            request.files['image_file'].save(outfile.name)
            image = utils.image.load_image(outfile.name)
    if image is None:
        return 'There was a problem with the image.', 400

    # resize image
    db_task = job.train_task().dataset.train_db_task()
    height = db_task.image_dims[0]
    width = db_task.image_dims[1]
    if job.train_task().crop_size:
        height = job.train_task().crop_size
        width = job.train_task().crop_size
    image = utils.image.resize_image(image, height, width,
            channels = db_task.image_dims[2],
            resize_mode = db_task.resize_mode,
            )

    epoch = None
    if 'snapshot_epoch' in request.form:
        epoch = float(request.form['snapshot_epoch'])

    predictions, visualizations = job.train_task().infer_one(image, snapshot_epoch=epoch, layers='all')
    # take top 5
    predictions = [(p[0], round(100.0*p[1],2)) for p in predictions[:5]]

    # embed as html
    visualizations = [(
        v[0],
        utils.image.embed_image_html(v[1]),
        utils.image.embed_image_html(v[2]),
        )
        for v in visualizations]
    return render_template('models/images/classification/classify_one.html',
            image_src       = utils.image.embed_image_html(image),
            predictions     = predictions,
            visualizations  = visualizations,
            )
Exemplo n.º 51
0
def display_images(job_id, key):
    # Display the Images
    job = scheduler.get_job(job_id)
    test_db_dir = job.path(job.test_db_task().db_name)
    env = lmdb.open(test_db_dir, readonly=True)
    #env = lmdb.open('/home/david/.digits/jobs/' + job_id + '/test_db', readonly=True)

    with env.begin() as txn:
        raw_datum = txn.get(str(key))
    datum = caffe.proto.caffe_pb2.Datum()
    datum.ParseFromString(raw_datum)
    stream = StringIO.StringIO(datum.data)

    return send_file(stream, mimetype='image/png')
Exemplo n.º 52
0
def display_images(job_id, key):
    # Display the Images
    job = scheduler.get_job(job_id)
    test_db_dir = job.path(job.test_db_task().db_name)
    env = lmdb.open(test_db_dir, readonly=True)
    #env = lmdb.open('/home/david/.digits/jobs/' + job_id + '/test_db', readonly=True)
 
    with env.begin() as txn:
        raw_datum = txn.get(str(key))
    datum = caffe.proto.caffe_pb2.Datum()
    datum.ParseFromString(raw_datum)
    stream = StringIO.StringIO(datum.data)
        
    return send_file(stream, mimetype='image/png')
Exemplo n.º 53
0
def create_pretrained_model(job_id,username,epoch):
    job = scheduler.get_job(job_id)

    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    # Write the stats of the job to json,
    # and store in tempfile (for archive)
    info = job.json_dict(verbose=False,epoch=epoch)

    task = job.train_task()
    snapshot_filename = None
    snapshot_filename = task.get_snapshot(epoch)

    # Set defaults:
    labels_path = None
    mean_path = None
    resize_mode = None

    if "labels file" in info:
        labels_path = os.path.join(task.dataset.dir(), info["labels file"])
    if "mean file" in info:
        mean_path = os.path.join(task.dataset.dir(),info["mean file"])
    if "image resize mode" in info:
        resize_mode = info["image resize mode"]


    model_file = os.path.join(job.dir(),str(task.model_file))

    # If jobs don't container model_file (too old), raise exception:
    if not os.path.isfile(model_file):
       raise werkzeug.exceptions.BadRequest('Model file not found in job dir. Job may be too old for conversion.')

    job = PretrainedModelJob(
        snapshot_filename,
        model_file,
        labels_path,
        mean_path,
        info["framework"],
        info["image dimensions"][2],
        resize_mode,
        info["image dimensions"][0],
        info["image dimensions"][1],
        username = username,
        name = info["name"]
    )

    scheduler.add_job(job)
    return job
Exemplo n.º 54
0
def to_pretrained(job_id):
    job = scheduler.get_job(job_id)

    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    epoch = -1
    # GET ?epoch=n
    if 'epoch' in flask.request.args:
        epoch = float(flask.request.args['epoch'])

    # POST ?snapshot_epoch=n (from form)
    elif 'snapshot_epoch' in flask.request.form:
        epoch = float(flask.request.form['snapshot_epoch'])

    # Write the stats of the job to json,
    # and store in tempfile (for archive)
    info = job.json_dict(verbose=False,epoch=epoch)

    task = job.train_task()
    snapshot_filename = None
    snapshot_filename = task.get_snapshot(epoch)

    # Set defaults:
    labels_path = None
    resize_mode = None

    if "labels file" in info:
        labels_path = os.path.join(task.dataset.dir(), info["labels file"])
    if "image resize mode" in info:
        resize_mode = info["image resize mode"]

    job = PretrainedModelJob(
        snapshot_filename,
        os.path.join(job.dir(), task.model_file) ,
        labels_path,
        info["framework"],
        info["image dimensions"][2],
        resize_mode,
        info["image dimensions"][0],
        info["image dimensions"][1],
        username = auth.get_username(),
        name = info["name"]
    )

    scheduler.add_job(job)

    return flask.redirect(flask.url_for('digits.views.home',tab=3)), 302
Exemplo n.º 55
0
def customize():
    """
    Returns a customized file for the ModelJob based on completed form fields
    """
    network = flask.request.args['network']
    framework = flask.request.args.get('framework')
    if not network:
        raise werkzeug.exceptions.BadRequest('network not provided')

    fw = frameworks.get_framework_by_id(framework)

    # can we find it in standard networks?
    network_desc = fw.get_standard_network_desc(network)
    if network_desc:
        return json.dumps({'network': network_desc})

    # not found in standard networks, looking for matching job
    job = scheduler.get_job(network)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    snapshot = None
    epoch = float(flask.request.form.get('snapshot_epoch', 0))
    if epoch == 0:
        pass
    elif epoch == -1:
        snapshot = job.train_task().pretrained_model
    else:

        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break

    if isinstance(job, PretrainedModelJob):
        model_def = open(job.get_model_def_path(), 'r')
        network = model_def.read()
        snapshot = job.get_weights_path()
        python_layer = job.get_python_layer_path()
    else:
        network = job.train_task().get_network_desc()
        python_layer = None

    return json.dumps({
        'network': network,
        'snapshot': snapshot,
        'python_layer': python_layer
    })
Exemplo n.º 56
0
def customize():
    """
    Returns a customized file for the ModelJob based on completed form fields
    """
    network = flask.request.args['network']
    framework = flask.request.args.get('framework')
    if not network:
        raise werkzeug.exceptions.BadRequest('network not provided')

    fw = frameworks.get_framework_by_id(framework)

    # can we find it in standard networks?
    network_desc = fw.get_standard_network_desc(network)
    if network_desc:
        return json.dumps({'network': network_desc})

    # not found in standard networks, looking for matching job
    job = scheduler.get_job(network)
    if job is None:
        raise werkzeug.exceptions.NotFound('Job not found')

    snapshot = None
    epoch = float(flask.request.form.get('snapshot_epoch', 0))
    if epoch == 0:
        pass
    elif epoch == -1:
        snapshot = job.train_task().pretrained_model
    else:

        for filename, e in job.train_task().snapshots:
            if e == epoch:
                snapshot = job.path(filename)
                break

    if isinstance(job, PretrainedModelJob):
        model_def = open(job.get_model_def_path(), 'r')
        network = model_def.read()
        snapshot = job.get_weights_path()
        python_layer = job.get_python_layer_path()
    else:
        network = job.train_task().get_network_desc()
        python_layer = None

    return json.dumps({
        'network': network,
        'snapshot': snapshot,
        'python_layer': python_layer
    })