Example #1
0
def main(_config):
    data = get_data()
    model = build_model(data.X_train)

    model.compile(optimizer=tf.optimizers.Adam(0.01))
    train_model(model, (data.X_train, data.Y_train))

    metrics = evaluate_model(model, (data.X_test, data.Y_test))

    _dict = {**_config, **metrics, "commit": git_version()}
    with open(f"{LOGS}/{experiment_name()}.json", "w") as fp:
        json.dump(_dict, fp, indent=2)

    print("=" * 60)
    print(experiment_name())
    pprint.pprint(_dict)
    print("=" * 60)
from __future__ import print_function
import time
import pdb
import logging
import sys
import platform
from datetime import datetime
import utils
from ForestCoverClassifier import ForestCoverClassifier

if __name__ == '__main__':
    runinfo = {}
    runinfo['platform'] = platform.uname()
    runinfo['git_rev'] = utils.git_version()
    runinfo['start_time'] = datetime.now()
    runinfo['end_time'] = None
    runinfo['params'] = {}
    print('Platform: ', runinfo['platform'])
    print('Git revision:', runinfo['git_rev'])
    print('Start Time: ', runinfo['start_time'])
    start_time = time.time()
    start_ctime = time.clock()
    try:
        fcc = ForestCoverClassifier(runinfo)
        # fcc.classify()
        # fcc.classifyNN()
        fcc.classifyNN_nolearn()
        end_time = time.time()
        end_ctime = time.clock()
        print('Success: wall time:  %f sec, processor time: %f sec' %
              (end_time - start_time, end_ctime - start_ctime))
Example #3
0
from __future__ import print_function
import time
import pdb
import logging
import sys
import platform
from datetime import datetime
import utils
from TableImporter import import_table

settings = "../settings.ini"

if __name__ == "__main__":
    runinfo = {}
    runinfo["platform"] = platform.uname()
    runinfo["git_rev"] = utils.git_version()
    runinfo["start_time"] = datetime.now()
    runinfo["end_time"] = None
    runinfo["params"] = {}
    print("Platform: ", runinfo["platform"])
    print("Git revision:", runinfo["git_rev"])
    print("Start Time: ", runinfo["start_time"])
    start_time = time.time()
    start_ctime = time.clock()
    try:
        import_table("PlacesTableImporter", settings)
        end_time = time.time()
        end_ctime = time.clock()
        print("Success: wall time:  %f sec, processor time: %f sec" % (end_time - start_time, end_ctime - start_ctime))
        runinfo["end_time"] = datetime.now()
        print("End Time: ", runinfo["end_time"])
Example #4
0
 def test_git_version_extra(self):
     self._version_str = 'git version 1.7.0.3.295.gd8fa2'
     self.assertEqual((1, 7, 0, 3), utils.git_version())
Example #5
0
 def test_git_version_4(self):
     self._version_str = 'git version 1.7.0.2'
     self.assertEqual((1, 7, 0, 2), utils.git_version())
Example #6
0
 def test_git_version_3(self):
     self._version_str = 'git version 1.6.6'
     self.assertEqual((1, 6, 6, 0), utils.git_version())
Example #7
0
 def test_git_version_none(self):
     self._version_str = 'not a git version'
     self.assertEqual(None, utils.git_version())
Example #8
0
def main():
    args = get_args()

    if not os.path.exists(args.fname):
        os.makedirs(args.fname)
    if not os.path.exists(args.checkpoints):
        os.makedirs(args.checkpoints)

    if args.tensorboard:
        from tensorboardX import SummaryWriter

        writer = SummaryWriter(args.fname[8:])
    else:
        import wandb

        wandb.init(
            project=args.project,
            name=args.fname.replace("/", "_")[8:],
            config=args.__dict__,
            settings=wandb.Settings(_disable_stats=True),
        )
        writer = None

    logger = logging.getLogger(__name__)
    logging.basicConfig(
        format="[%(asctime)s] - %(message)s",
        datefmt="%Y/%m/%d %H:%M:%S",
        level=logging.DEBUG,
        handlers=[
            logging.FileHandler(os.path.join(args.fname, "output.log")),
            logging.StreamHandler(),
        ],
    )

    logger.info(args)
    logger.info(git_version())

    random.seed(args.seed)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)
    torch.cuda.manual_seed(args.seed)

    dataset = get_dataset(args)

    args.mean = dataset.mean
    args.std = dataset.std
    args.dataset = dataset
    model = get_network(args)

    opt = torch.optim.SGD(model.parameters(),
                          args.lr,
                          momentum=0.9,
                          weight_decay=args.weight_decay)
    scheduler = Lr_schedule(
        opt, milestones=[int(v) for v in args.lr_adjust.split(",")], gamma=0.1)

    if args.resume_checkpoint != "":
        state = torch.load(args.resume_checkpoint)
        model.load_state_dict(state["state_dict"])
        opt.load_state_dict(state["optimizer"])
        args.epoch = state["epoch"] + 1

    if not args.no_amp:
        # from torch.cuda.amp.grad_scaler import GradScaler
        # scaler = GradScaler()
        # args.scaler = scaler

        from apex import amp

        model, opt = amp.initialize(model,
                                    opt,
                                    opt_level="O1",
                                    loss_scale=1.0,
                                    verbosity=False)
        args.amp = amp

    args.opt = opt

    attack = get_attack(args, model=model)
    defense = get_defense(args, model, attack)

    trainer = Trainer(
        args=args,
        model=model,
        dataset=dataset,
        logger=logger,
        optimizer=opt,
        scheduler=scheduler,
        attack=attack,
        writer=writer,
        defense=defense,
    )
    trainer.train()

    # logger.info("Begin evaluating last")
    # eval(model, args, dataset, logger)

    if not args.tensorboard:
        wandb.finish()