Esempio n. 1
0
def upload():
    if request.method == 'POST':
        uploaded_files = request.files.getlist("file")
        try:
            isHaveOne = request.form['model_id']
        except:
            isHaveOne = 'aaaaaaaaaa'

        model = create_model(request, session, isHaveOne)
        # session['id'] = model.id

        if not model.check_filetype():
            return jsonify(status='failed', message='filetype error')

        # initial database object
        es_model = EsModel(meta={'id': model.id}, ip=request.remote_addr)
        es_model.save()

        producer = UploadProducer('Producer', uploading_queue, model_pool, app)
        if producer.add_task(model):
            producer.start()
            producer.join()
            # return jsonify(model.uploaded)
            return jsonify(name=model.id, status='success', message='uploaded')
        else:
            return jsonify(name=model.id, status='failed', message='waiting')
Esempio n. 2
0
def x2paddle(cmd, model_name, save_base_dir):
    save_dir = os.path.join(save_base_dir, model_name)
    p = Popen(cmd,
              stdout=PIPE,
              stderr=STDOUT,
              shell=True,
              universal_newlines=True)
    cmd_result = ''
    for line in p.stdout.readlines():
        if 'Converting node' in line:
            continue
        cmd_result += str(line).rstrip() + '<br/>\n'
        sys.stdout.flush()
    # zip_dir = os.path.join(save_dir, model_name + '.tar.gz')
    zip_dir = save_dir + '.tar.gz'
    #    zip_dir = model_name + '.tar.gz'
    es_model = EsModel.get(id=session.get('id'))
    es_model.update(log=cmd_result)

    if os.path.exists(os.path.join(save_dir, 'inference_model/__model__')):
        os.system('tar -C ' + save_base_dir + ' -zcvf ' + zip_dir + ' ' +
                  model_name)
        app.logger.info(model_name + ' convert success')
        return jsonify(name=model_name + '.tar.gz',
                       status='success',
                       cmd_result=cmd_result)
    else:
        app.logger.info(model_name + ' convert failed')
        return jsonify(name='', status='failed', cmd_result=cmd_result)
Esempio n. 3
0
def run_script(cmd, model_name, save_base_dir, id):
    p = Popen(cmd,
              stdout=PIPE,
              stderr=STDOUT,
              shell=True,
              universal_newlines=True)
    cmd_result = ''
    for line in p.stdout.readlines():
        if "Converting node" in line:
            continue
        cmd_result += str(line).rstrip() + '<br/>\n'
        sys.stdout.flush()

    zip_dir = os.path.join(save_base_dir, model_name + '.tar.gz')
    save_dir = os.path.join(save_base_dir, model_name)

    es_model = EsModel.get(id=id)
    es_model.update(log=cmd_result)
    es_model.update(save_dir=save_dir)

    if os.path.exists(os.path.join(save_dir, 'inference_model/__model__')):
        os.system('tar -C ' + save_base_dir + ' -cvzf ' + zip_dir + ' ' +
                  model_name)
        res = {
            'name': model_name + '.tar.gz',
            'status': 'success',
            'cmd_result': cmd_result
        }
        return res
    else:
        res = {'status': 'failed', 'cmd_result': cmd_result}
        return res
Esempio n. 4
0
    def save(self):
        updir = os.path.join(self.upload_base_dir, self.id)
        if not os.path.exists(updir):
            os.mkdir(updir)

        try:
            self.files['caffe_weight'].id = self.id
            caffe_weight = self.files['caffe_weight']
            caffe_weight_filename = secure_filename(caffe_weight.filename)
            caffe_weight_dir = os.path.join(updir, caffe_weight_filename)
            caffe_weight.save(caffe_weight_dir)
            self.files['caffe_weight'].upload_dir = caffe_weight_dir
            save_base_dir = os.path.join(self.convert_base_dir, self.id)
            save_dir = os.path.join(save_base_dir + '/' +
                                    self.files['caffe_weight'].filename)
            file_size = os.path.getsize(caffe_weight_dir)

            es_model = EsModel.get(id=self.id)
            es_model.update(upload_dir=caffe_weight_dir)
            es_model.update(file_size=file_size)

            self.save_dir = save_dir
        except:
            self.files['caffe_model'].id = self.id
            caffe_model = self.files['caffe_model']
            caffe_model_filename = secure_filename(caffe_model.filename)
            caffe_model_dir = os.path.join(updir, caffe_model_filename)
            caffe_model.save(caffe_model_dir)
            self.files['caffe_model'].upload_dir = caffe_model_dir
            save_base_dir = os.path.join(self.convert_base_dir, self.id)
            save_dir = os.path.join(save_base_dir + '/' +
                                    self.files['caffe_model'].filename)

            self.save_dir = save_dir
Esempio n. 5
0
def upload():
    #获取用户ip地址
    app.logger.info('start upload')
    start_time = time.time()
    # id = str(start_time) + '_' + request.remote_addr
    id = uuid.uuid4().hex
    es_model = EsModel(meta={'id': id}, ip=request.remote_addr)
    es_model.save()
    session['id'] = id
    if request.method == 'POST':
        file = request.files['file']
        if file and check_file_extension(file.filename):
            app.logger.info('file type is allow')
            producer = Producer('Producer', uploading_queue, uploaded_queue,
                                app)
            if producer.add_task(file):
                producer.start()
                producer.join()
            else:
                return jsonify(name=file.filename, status='waited')
            print(threading.enumerate())
            updir = os.path.join(basedir, 'upload/')
            updir = os.path.join(updir, id)
            es_model = EsModel.get(id=session.get('id'))
            es_model.update(models_dir=os.path.join(updir, file.filename))
            return jsonify(name=file.filename, status='success')
Esempio n. 6
0
def convert():
    '''
    {0:'tensorflow',1:'onnx',2:'caffe'}
    :return:
    '''
    print(request.get_data(), 123456)
    data = json.loads(request.get_data().decode('utf-8'))

    id = session.get('id')
    updir = os.path.join(basedir, 'upload/')
    updir = os.path.join(updir, id)
    es_model = EsModel.get(id=id)
    es_model.update(email=data['email'])
    es_model.update(framework=data['framework'])
    app.logger.info('start convert')

    if data['framework'] == '0':
        #tensorflow
        model_full_name = data['tf_name']
        if model_full_name == '':
            return jsonify(status='failed')
        save_base_dir = os.path.join(convert_base_dir, id)
        model_name = model_full_name.split('.')[0]
        save_dir = os.path.join(save_base_dir, model_name)
        model_path = os.path.join(updir, model_full_name)
        cmd = 'x2paddle' + ' --framework=tensorflow' + ' --model=' + model_path + ' --save_dir=' + save_dir
        return x2paddle(cmd, model_name, save_base_dir)

    elif data['framework'] == '1':
        #onnx
        model_full_name = data['onnx_name']
        if model_full_name == '':
            return jsonify(status='failed')
        save_base_dir = os.path.join(convert_base_dir, id)
        model_name = model_full_name.split('.')[0]
        save_dir = os.path.join(save_base_dir, model_name)
        model_path = os.path.join(updir, model_full_name)
        cmd = 'x2paddle' + ' --framework=onnx' + ' --model=' + model_path + ' --save_dir=' + save_dir
        return x2paddle(cmd, model_name, save_base_dir)

    elif data['framework'] == '1':
        # caffe
        caffe_weight_name = data['caffe_weight_name']
        caffe_model_name = data['caffe_model_name']
        if caffe_weight_name == '' or caffe_model_name == '':
            return jsonify(status='failed')
        save_base_dir = os.path.join(convert_base_dir, id)
        model_name = caffe_model_name.split('.')[0]
        save_dir = os.path.join(save_base_dir, model_name)
        weight_path = os.path.join(updir, caffe_weight_name)
        model_path = os.path.join(updir, caffe_model_name)
        cmd = 'x2paddle' + ' --framework=caffe' + ' --prototxt=' + model_path + ' --weight=' + weight_path + ' --save_dir=' + save_dir

        return x2paddle(cmd, model_name, save_base_dir)
Esempio n. 7
0
def x2paddle():
    if request.method == 'POST':
        model = create_model(request)
        #
        if not model.check_filetype():
            return jsonify(status='failed', message='filetype error')

        # initial database object
        es_model = EsModel(meta={'id': model.id}, ip=request.remote_addr)
        es_model.save()
        session['id'] = model.id

        producer = Producer('Producer', uploading_queue, converted_pool, app)
        if producer.add_task(model):
            producer.start()
            print('uploading_queue size: ', uploading_queue.qsize())
            producer.join()
            return jsonify(producer.result)
        else:
            return jsonify(name=model.id, status='failed', message='waiting')
Esempio n. 8
0
    def save(self):
        updir = os.path.join(self.upload_base_dir, self.id)
        if not os.path.exists(updir):
            os.mkdir(updir)
        file = self.file['object']
        filename = secure_filename(file.filename)
        file_dir = os.path.join(updir, filename)
        file.save(file_dir)
        self.file['upload_dir'] = file_dir
        save_base_dir = os.path.join(self.convert_base_dir, self.id)
        save_dir = os.path.join(save_base_dir + '/' + self.file['filename'])
        file_size = os.path.getsize(file_dir)

        es_model = EsModel.get(id=self.id)
        es_model.update(upload_dir=file_dir)
        es_model.update(file_size=file_size)

        self.save_dir = save_dir
Esempio n. 9
0
def convert():
    # model = get_model(session)
    data = json.loads(request.get_data().decode('utf-8'))
    model = get_model(data['model_id'])
    global model_id
    model_id = data['model_id']
    es_model = EsModel.get(id=model.id)
    es_model.update(email=data['email'])
    es_model.update(framework=data['framework'])
    es_model.update(model_class=data['model_class'])
    localtime = time.localtime(time.time())
    es_model.update(time=localtime)

    producer = ConvertProducer('Producer', uploaded_queue, model_pool, app)
    if producer.add_task(model):
        producer.start()
        producer.join()
        return jsonify(model.result)
    else:
        return jsonify(name=model.id, status='failed', message='waiting')