Exemplo n.º 1
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from utils.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/pagoda")

test_zip = GoogleFile('0B60FAQcEiqEyWUJ2dUhmUDllb1k', 'pagoda_data_test.zip',
                      dir('data/pagoda_data_test.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download test file"
google_download.download_file_from_google_drive(test_zip)
print "Finish"

print "Extracting test zip file"
unzip_with_progress(test_zip.file_path, dir("data"))
print "Finish"

print "\n\n------------------------TESTING PHRASE-----------------------------\n\n"

mean_proto = dir("data/mean.binaryproto")

caffe_deploy = dir("caffe_model/caffenet_deploy.prototxt")

py_render_template("template/caffenet_deploy.template", caffe_deploy)

mean_data = read_mean_data(mean_proto)
Exemplo n.º 2
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from utils.create_lmdb import CreateLmdb
from utils.pycaffe import Caffe
from utils.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/pagoda")

train_zip = GoogleFile('0B60FAQcEiqEyWlY1UldpVU5hT2c', 'pagoda_dataset.zip',
                       dir('data/pagoda_dataset.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download train file"
google_download.download_file_from_google_drive(train_zip)
print "Finish"

print "Extracting train zip file"
unzip_with_progress(train_zip.file_path, dir("data"))
print "Finish"

train_lmdb = dir("data/train_lmdb")
validation_lmdb = dir("data/validation_lmdb")

lmdb = CreateLmdb()
lmdb.create_lmdb(dir("data/pagoda_dataset"), train_lmdb, validation_lmdb,
                 "pagoda")
Exemplo n.º 3
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from utils.create_lmdb import CreateLmdb
from utils.pycaffe import Caffe
from utils.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/cat_dog")

train_zip = GoogleFile('0BzL8pCLanAIASFNnLUNEZFZHcmM', 'train.zip',
                       dir('data/train.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download train file"
google_download.download_file_from_google_drive(train_zip)
print "Finish"

print "Extracting train zip file"
unzip_with_progress(train_zip.file_path, dir("data"))
print "Finish"

train_lmdb = dir("data/train_lmdb")
validation_lmdb = dir("data/validation_lmdb")

lmdb = CreateLmdb()
lmdb.create_lmdb(dir("data/train"), train_lmdb, validation_lmdb, "cat")

mean_proto = dir("data/mean.binaryproto")
Exemplo n.º 4
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from code.create_lmdb import CreateLmdb
from utils.caffe import Caffe
from code.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/cat_dog")

train_zip = GoogleFile('0BzL8pCLanAIASFNnLUNEZFZHcmM', 'train.zip',
                       dir('data/train.zip'))
test_zip = GoogleFile('0BzL8pCLanAIAZTlvcEs3U082U00', 'test1.zip',
                      dir('data/test1.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download test file"
google_download.download_file_from_google_drive(test_zip)
print "Finish"

print "Extracting test zip file"
unzip_with_progress(test_zip.file_path, dir("data"))
print "Finish"

print "Starting download train file"
google_download.download_file_from_google_drive(train_zip)
print "Finish"

print "Extracting train zip file"
Exemplo n.º 5
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from utils.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/cat_dog")

test_zip = GoogleFile('0BzL8pCLanAIAZTlvcEs3U082U00', 'test1.zip',
                      dir('data/test1.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download test file"
google_download.download_file_from_google_drive(test_zip)
print "Finish"

print "Extracting test zip file"
unzip_with_progress(test_zip.file_path, dir("data"))
print "Finish"

print "\n\n------------------------TESTING PHRASE-----------------------------\n\n"

caffe_deploy = dir("caffe_model/caffenet_deploy.prototxt")

py_render_template("template/caffenet_deploy.template", caffe_deploy)

mean_proto = dir("data/mean.binaryproto")

mean_data = read_mean_data(mean_proto)
Exemplo n.º 6
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.zip_utils import unzip_with_progress
from code.create_lmdb import CreateLmdb
from utils.caffe import Caffe
from code.make_predictions import *

google_download = DownloadGoogleDrive()

set_workspace("data/pagoda")

train_zip = GoogleFile('0BxsB7D9gLcdOQkdoQXRUMDdUUnM', 'train.zip', dir('data/train.zip'))
test_zip = GoogleFile('0BxsB7D9gLcdON3hYX2FFeVpQQlE', 'pagodatest.zip', dir('data/pagodatest.zip'))

print "\n\n------------------------PREPARE PHRASE----------------------------\n\n"

print "Starting download test file"
google_download.download_file_from_google_drive(test_zip)
print "Finish"

print "Extracting test zip file"
unzip_with_progress(test_zip.file_path, dir("data"))
print "Finish"

print "Starting download train file"
google_download.download_file_from_google_drive(train_zip)
print "Finish"

print "Extracting train zip file"
unzip_with_progress(train_zip.file_path, dir("data"))
print "Finish"
Exemplo n.º 7
0
from network.download_google_drive import DownloadGoogleDrive
from network.google_file import GoogleFile
from utils.utils import *

cuCNN = GoogleFile('0BzL8pCLanAIAUFFvcURwb3EwOHM',
                   'cudnn-8.0-linux-x64-v6.0.tgz',
                   "/tmp/cudnn-8.0-linux-x64-v6.0.tgz")
google_download = DownloadGoogleDrive()

google_download.download_file_from_google_drive(cuCNN)

execute("chmod +x caffe_install.sh")
execute("./caffe_install.sh")
Exemplo n.º 8
0
def heobs_image_classification(template, max_iter, img_width, img_height, gpu_id, lr, stepsize, batchsize_train,
                               batchsize_test, trained_model, ws, test_id):
    # type: (str, int, int, int, str, float, int, int, int, str, Workspace, int) -> None

    try:
        put_message(("update", test_id, 1, 100, "starting..."))

        put_message(("log", test_id, "Working dir: %s" % ws.workspace("")))
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        classes = ["being", "heritage", "scenery"]

        train_lmdb_path = ws.workspace("data/extracted/train_lmdb")

        validation_lmdb_path = ws.workspace("data/extracted/validation_lmdb")

        test_path = ws.workspace("data/extracted/test")

        # Will create after render
        caffe_train_model = ws.workspace("caffe_model/caffenet_train.prototxt")

        # Will create after render
        caffe_solver = ws.workspace("caffe_model/caffenet_solver.prototxt")

        caffe_log = ws.workspace("caffe_model/caffe_train.log")

        caffe_deploy = ws.workspace("caffe_model/caffenet_deploy.prototxt")

        snapshot_prefix = ws.workspace("caffe_model/snapshot")

        mean_proto = ws.workspace("data/mean.binaryproto")

        pycaffe = PyCaffe()

        if not os.path.isfile(caffe_log):

            train_zip = GoogleFile('0BzL8pCLanAIAd0hBV2NUVHpmckE', ws.workspace('data/heobs_large_dataset.zip'))

            put_message(("update", test_id, 2, 100, "downloading dataset..."))
            google_download = DownloadGoogleDrive()

            put_message(("log", test_id, "Starting download train file"))
            google_download.download_file_from_google_drive(train_zip)
            put_message(("log", test_id, "Finish"))

            put_message(("log", test_id, "Extracting train zip file"))
            put_message(("update", test_id, 10, 100, "extracting dataset..."))
            unzip_with_progress(train_zip.file_path, ws.workspace("data/extracted"))
            put_message(("log", test_id, "Finish"))

            put_message(("update", test_id, 15, 100, "creating lmdb..."))
            lmdb = CreateLmdb()
            lmdb.create_lmdb(ws.workspace("data/extracted/heobs_large_dataset"), train_lmdb_path, validation_lmdb_path,
                             classes, test_path, img_width, img_height)

            put_message(("update", test_id, 20, 100, "computing train image mean..."))
            pycaffe.compute_image_mean(test_id, "lmdb", train_lmdb_path, mean_proto)
            put_message(("update", test_id, 25, 100, "computing test image mean..."))
            pycaffe.compute_image_mean(test_id, "lmdb", validation_lmdb_path, mean_proto)

            solver_mode = constant.CAFFE_SOLVER
            if "CAFFE_SOLVER" in os.environ:
                solver_mode = os.environ['CAFFE_SOLVER']

            py_render_template("template/" + template + "/caffenet_train.template", caffe_train_model,
                               mean_file=mean_proto,
                               train_lmdb=train_lmdb_path, validation_lmdb=validation_lmdb_path,
                               batchsize_train=batchsize_train,
                               batchsize_test=batchsize_test,
                               num_output=len(classes))
            py_render_template("template/" + template + "/caffenet_solver.template", caffe_solver,
                               caffe_train_model=caffe_train_model,
                               max_iter=max_iter,
                               snapshot_prefix=snapshot_prefix,
                               learning_rate=lr,
                               stepsize=stepsize,
                               solver_mode=solver_mode)

        put_message(("log", test_id, "Starting to train"))
        put_message(("update", test_id, 30, 100, "starting to train..."))
        pycaffe.train(caffe_solver, caffe_log, gpu_id, trained_model, ws, test_id, max_iter)

        put_message(("log", test_id, "Train completed"))
        put_message(("log", test_id, "Starting to test"))

        if sig_kill:
            return

        if not os.path.isfile(ws.workspace("result/slover.prototxt")):
            put_message(("update", test_id, 90, 100, "starting to test..."))
            py_render_template("template/" + template + "/caffenet_deploy.template", caffe_deploy,
                               num_output=len(classes), img_width=img_width, img_height=img_height)

            set_caffe_gpu(gpu_id)
            put_message(("log", test_id, "Reading mean file"))
            put_message(("update", test_id, 91, 100, "reading mean file..."))
            mean_data = read_mean_data(mean_proto)

            put_message(("log", test_id, "Reading neural network model"))
            put_message(("update", test_id, 92, 100, "reading cnn model..."))
            net = read_model_and_weight(caffe_deploy, snapshot_prefix + "_iter_" + str(max_iter) + ".caffemodel")
            transformer = image_transformers(net, mean_data)

            put_message(("log", test_id, "Predicting..."))
            put_message(("update", test_id, 95, 100, "predicting..."))
            prediction = making_predictions(ws.workspace("data/extracted/test"), transformer, net, img_width,
                                            img_height)
            if sig_kill:
                return
            put_message(("update", test_id, 99, 100, "exporting data..."))
            put_message(("log", test_id, "Exporting result to csv"))
            export_to_csv(prediction, ws.workspace("result/test_result.csv"))

            put_message(("log", test_id, "Exporting predict result to folder"))
            export_data(prediction, ws.workspace("data/extracted/test"), ws.workspace("result/data"))

            put_message(("log", test_id, "Moving log"))
            shutil.copyfile(caffe_log, ws.workspace("result/caffe_train.log"))
            shutil.copyfile(caffe_train_model, ws.workspace("result/model/train_val.prototxt"))
            shutil.copyfile(caffe_deploy, ws.workspace("result/deploy.prototxt"))
            shutil.copyfile(caffe_solver, ws.workspace("result/slover.prototxt"))

        put_message(("log", test_id, "Test completed"))
        put_message(("done", test_id, "completed"))
    except:
        put_message(("log", test_id, traceback.format_exc()))