Пример #1
0
def upload_image_file():
    print("root testing", request.files)
    print(">>>>>")
    print(user_name)
    if request.method == "POST":
        # check wether the request value is
        print("upload file", request.files)
        if "file" in request.files:
            # get the multiDict
            file = request.files.getlist("file")[0]
            print("upload file", request.files)
            # secure the filename which will only give file name excluding other parameters
            filename = secure_filename(file.filename)
            #print(filename , "  " , dir(file.stream))
            # get the file path
            path = os.path.join(UPLOAD_FOLDER, filename)
            file.save(path)
            infer = Inference(user_name)
            result = infer(path)
            return render_template("result.html",
                                   name=user_name,
                                   result=result,
                                   image_path=os.path.join(
                                       SUB_UPLOAD_FOLDER, filename))
        else:
            return render_template("result.html")
    else:
        return render_template("result.html")
Пример #2
0
import sys, os, shutil
sys.path.append(os.getcwd())
from src.inference import Inference
from flask import Flask, request, flash, redirect, render_template, url_for, Markup, send_file, send_from_directory
from werkzeug.utils import secure_filename

infer = Inference()
app = Flask(__name__)
app.secret_key = "!@#$%^&*()a-=afs;'';312$%^&*k-[;.sda,./][p;/'=-0989#$%^&0976678v$%^&*(fdsd21234266OJ^&UOKN4odsbd#$%^&*(sadg7(*&^%32b342gd']"
# the upload path for all the files

UPLOAD_FOLDER = "static/uploadFolder"
# a list to track all the files loaded in memory
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
list_of_uploaded_file = []


def clean_upload_folder():
    try:
        shutil.rmtree(UPLOAD_FOLDER)
    except FileNotFoundError as e:
        pass


def make_directory():
    os.makedirs(UPLOAD_FOLDER, exist_ok=True)


@app.route('/')
def root():
    # clean the upload directory every time user use the website and create a new empty directory
Пример #3
0
def infer():
    # load infer data, need to fix
    #TODO
    loadFile = True
    ifLoad, data = False, None
    loaddict, dicts = load_file(cfg.dict_path, 'dict', 'pickle')
    if not loaddict:
        raise (ValueError, 'dict load failed')
    if loadFile:
        ifLoad, data = load_file(cfg.processed_path, 'processed data',
                                 'pickle')
    if not ifLoad or not loadFile:
        train_data_obj = Dataset(cfg.train_data_path,
                                 'train',
                                 dicts=dicts,
                                 language_type='es',
                                 unlabeled_file_path=cfg.unlabeled_data_path,
                                 emb_file_path=cfg.emb_es_path)
        dev_data_obj = Dataset(cfg.dev_data_path, 'dev', dicts=dicts)
        test_data_obj = Dataset(cfg.test_data_path, 'test', dicts=dicts)

        save_file(
            {
                'train_data_obj': train_data_obj,
                'dev_data_obj': dev_data_obj,
                'test_data_obj': test_data_obj
            }, cfg.processed_path)

        # train_data_obj.save_dict(cfg.dict_path)
    else:
        train_data_obj = data['train_data_obj']
        dev_data_obj = data['dev_data_obj']
        test_data_obj = data['test_data_obj']

    infer_data_obj = Dataset(cfg.infer_data_path, 'infer', dicts=dicts)

    # load model
    emb_mat_token = train_data_obj.emb_mat_token  # need to restore model

    with tf.variable_scope(network_type) as scope:
        if network_type in model_type_set:
            model = Model(emb_mat_token, len(train_data_obj.dicts['es']), 100,
                          scope.name)

    graphHandler = GraphHandler(model)
    #evaluator = Evaluator(model)
    inference = Inference(model)

    if cfg.gpu_mem < 1:
        gpu_options = tf.GPUOptions(
            per_process_gpu_memory_fraction=cfg.gpu_mem, allow_growth=True)
    else:
        gpu_options = tf.GPUOptions()
    graph_config = tf.ConfigProto(gpu_options=gpu_options,
                                  allow_soft_placement=True)
    sess = tf.Session(config=graph_config)
    graphHandler.initialize(sess)

    saver = tf.train.Saver()
    step = cfg.load_step
    model_path = os.path.join(cfg.ckpt_dir,
                              'top_result_saver_step_%d.ckpt' % step)
    saver.restore(sess, model_path)
    logits_array, prob_array = inference.get_inference(sess, infer_data_obj)

    inference.save_inference(prob_array, cfg.infer_result_path)
Пример #4
0
        random.seed(seed)
        np.random.seed(seed)
        torch.manual_seed(seed)
        if config['CUDA']:
            torch.backends.cudnn.deterministic = True
            torch.backends.cudnn.benchmark = False



def handle_model(engine, model_path="/data/egg/lego.egg"):
    model = engine.load_model(path=model_path)
    meshes = parse_model_geometry(model)

    mesh = meshes[0]
    renderable_obj = RenderableMesh(mesh)
    engine.clear_renderables()
    engine.add_renderable(renderable_obj)
    engine.start_rendering_loop()


config = load_config()
set_seed(config)

if config['RENDERING']['RENDERING_ENGINE_ON']:
    engine = RenderEngine(rendering_config=config['RENDERING'])
else:
    engine = None

inferer = Inference(config=config, engine=engine)
inferer.infer()
Пример #5
0
import sys, os, shutil
sys.path.append(os.getcwd())
from src.inference import Inference
from flask import Flask, request, flash, redirect, render_template, url_for, Markup, send_file, send_from_directory
from werkzeug.utils import secure_filename

infer = None

args = list(sys.argv)
if len(args) > 1:
    infer = Inference(args[1])
else:
    infer = Inference('densenet')

app = Flask(__name__)
app.secret_key = "!@#$%^&*()a-=afs;'';312$%^&*k-[;.sda,./][p;/'=-0989#$%^&0976678v$%^&*(fdsd21234266OJ^&UOKN4odsbd#$%^&*(sadg7(*&^%32b342gd']"
# the upload path for all the files

SUB_UPLOAD_FOLDER = "static/uploadFolder"
UPLOAD_FOLDER = "flask_api/" + SUB_UPLOAD_FOLDER
# a list to track all the files loaded in memory
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
list_of_uploaded_file = []


def clean_upload_folder():
    try:
        shutil.rmtree(UPLOAD_FOLDER + "/")
    except FileNotFoundError as e:
        pass