def run(self):  ## train the network with jac

        print('GLOnet can Start Here!!!!!!!!!!!!!!!!!!!!!!!!!!')
        output_dir = 'E:\\LAB\\Project GLOnet-LumeircalAPI\\GLOnet-LumericalAPI\\results'
        restore_from = None

        json_path = os.path.join(output_dir, 'Params.json')
        assert os.path.isfile(json_path), "No json file found at {}".format(
            json_path)
        params = utils.Params(json_path)

        params.output_dir = output_dir
        params.cuda = torch.cuda.is_available()
        params.restore_from = restore_from
        params.numIter = int(params.numIter)
        params.noise_dims = int(params.noise_dims)
        params.gkernlen = int(params.gkernlen)
        params.step_size = int(params.step_size)

        # make directory
        os.makedirs(output_dir + '/outputs', exist_ok=True)
        os.makedirs(output_dir + '/model', exist_ok=True)
        os.makedirs(output_dir + '/figures/histogram', exist_ok=True)
        os.makedirs(output_dir + '/figures/deviceSamples', exist_ok=True)

        generator = Generator(params)
        ## save model
        #torch.save(generator,output_dir+'/net.pth')

        if params.cuda:
            generator.cuda()

        # Define the optimizer
        optimizer = torch.optim.Adam(generator.parameters(),
                                     lr=params.lr,
                                     betas=(params.beta1, params.beta2))

        # Define the scheduler
        scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                    step_size=params.step_size,
                                                    gamma=params.gamma)

        # Load model data
        if restore_from is not None:
            params.checkpoint = utils.load_checkpoint(restore_from, generator,
                                                      optimizer, scheduler)
            logging.info('Model data loaded')

        # Train the model and save
        if params.numIter != 0:
            logging.info('Start training')
            train(generator,
                  optimizer,
                  scheduler,
                  params,
                  func=self.callable_fom,
                  jac=self.callable_jac,
                  callback=self.callback)

        # Generate images and save
        logging.info('Start generating devices')
        evaluate(generator, func=self.callable_fom, numImgs=10, params=params)

        return
Beispiel #2
0
    # Define the optimizer
    optimizer = torch.optim.Adam(generator.parameters(), lr=params.lr, betas=(params.beta1, params.beta2))
    
    # Define the scheduler
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=params.step_size, gamma = params.gamma)


    # Load model data
    if args.restore_from is not None :
        params.checkpoint = utils.load_checkpoint(restore_from, generator, optimizer, scheduler)
        logging.info('Model data loaded')

    #set the timer
    timer=utils.timer()

    # Train the model and save 
    if params.numIter != 0 :
        logging.info('Start training')   
        train(generator, optimizer, scheduler, eng, params)

    # Generate images and save 
    logging.info('Start generating devices')
    evaluate(generator, eng, numImgs=500, params=params)
    
    timer.out()
    writer.close()



Beispiel #3
0
def autosim(args, eng):

    os.makedirs(args.output_dir, exist_ok=True)
    # Set the logger
    utils.set_logger(os.path.join(args.output_dir, 'train.log'))

    copyfile(args.output_dir)

    # Load parameters from json file
    json_path = os.path.join(args.output_dir, 'Params.json')
    assert os.path.isfile(json_path), "No json file found at {}".format(
        json_path)
    params = utils.Params(json_path)

    # Add attributes to params
    params.output_dir = args.output_dir
    params.cuda = torch.cuda.is_available()
    params.restore_from = args.restore_from
    params.numIter = int(params.numIter)
    params.noise_dims = int(params.noise_dims)
    params.gkernlen = int(params.gkernlen)
    params.step_size = int(params.step_size)
    params.gen_ver = int(args.gen_ver)
    params.dime = 1
    if args.wavelength is not None:
        params.wavelength = int(args.wavelength)
    if args.angle is not None:
        params.angle = int(args.angle)
        #build a recorder
    max_recorder = utils.max_recorder()
    params.recorder = max_recorder

    #build tools
    writer = SummaryWriter(log_dir=r'./scan/runs')
    max_recorder = utils.max_recorder()
    params.recorder = max_recorder
    params.writer = writer

    # make directory
    os.makedirs(args.output_dir + '/outputs', exist_ok=True)
    os.makedirs(args.output_dir + '/model', exist_ok=True)
    os.makedirs(args.output_dir + '/figures/histogram', exist_ok=True)
    os.makedirs(args.output_dir + '/figures/deviceSamples', exist_ok=True)
    os.makedirs(args.output_dir + '/figures/deviceSamples_max', exist_ok=True)
    os.makedirs(args.output_dir + '/deg{}_wl{}_gen_ver{}'.format(
        params.angle, params.wavelength, params.gen_ver),
                exist_ok=True)
    # Define the models
    if params.gen_ver == 0:
        generator = Generator0(params)
    else:
        generator = Generator(params)

    # Move to gpu if possible
    if params.cuda:
        generator.cuda()

    # Define the optimizer
    optimizer = torch.optim.Adam(generator.parameters(),
                                 lr=params.lr,
                                 betas=(params.beta1, params.beta2))

    # Define the scheduler
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                step_size=params.step_size,
                                                gamma=params.gamma)

    # Load model data
    if args.restore_from is not None:
        params.checkpoint = utils.load_checkpoint(restore_from, generator,
                                                  optimizer, scheduler)
        logging.info('Model data loaded')

    #set the timer
    timer = utils.timer()

    # Train the model and save
    if params.numIter != 0:
        logging.info('Start training')
        train(generator, optimizer, scheduler, eng, params)

    # Generate images and save
    logging.info('Start generating devices')
    evaluate(generator, eng, numImgs=500, params=params)

    timer.out()
    writer.close()
Beispiel #4
0
    generator = Generator(params)
    discriminator = Discriminator(params)
    if params.cuda:
        generator.cuda()
        discriminator.cuda()

    # Define the optimizers
    optimizer_G = torch.optim.Adam(generator.parameters(),
                                   lr=params.lr_gen,
                                   betas=(params.beta1_gen, params.beta2_gen))
    optimizer_D = torch.optim.Adam(discriminator.parameters(),
                                   lr=params.lr_dis,
                                   betas=(params.beta1_dis, params.beta2_dis))

    # train the model and save
    logging.info('Start training')
    loss_history = train((generator, discriminator),
                         (optimizer_G, optimizer_D), dataloader, params)

    # plot loss history and save
    utils.plot_loss_history(loss_history, output_dir)

    # Generate images and save
    wavelengths = [w for w in range(500, 1301, 50)]
    angles = [a for a in range(35, 86, 5)]

    logging.info(
        'Start generating devices for wavelength range {} to {} and angle range from {} to {} \n'
        .format(min(wavelengths), max(wavelengths), min(angles), max(angles)))
    evaluate(generator, wavelengths, angles, num_imgs=500, params=params)
Beispiel #5
0
    scheduler_G = torch.optim.lr_scheduler.StepLR(optimizer_G,
                                                  step_size=params.step_size,
                                                  gamma=params.gamma)

    # load model data
    if params.restore_from is not None:
        params.checkpoint = utils.load_checkpoint(params.restore_from,
                                                  generator, optimizer_G,
                                                  scheduler_G)
        logging.info('Model data loaded')

    if args.test:
        test(generator, eng, numImgs=args.generate_num, params=params)

    elif args.test_group:
        test_group(generator,
                   eng,
                   numImgs=args.generate_num,
                   params=params,
                   test_num=args.test_num)

    else:
        # train the model and save
        if params.numIter != 0:
            logging.info('Start training')
            train(generator, optimizer_G, scheduler_G, eng, params)

        # Generate images and save
        logging.info('Start generating devices for wavelength')
        evaluate(generator, eng, numImgs=args.generate_num, params=params)