Exemplo n.º 1
0
def train_epoch(_epoch, dataloader, model, loss_function):
    # switch to train mode -> enable regularization layers, such as Dropout
    model.train()
    running_loss = 0.0

    for i_batch, sample_batched in enumerate(dataloader, 1):

        # get the inputs (batch)
        inputs, labels, lengths, indices, get_aspect = sample_batched
        # in this point i have to concatenate the get_aspect embedding to the input.
        # sort batch (for handling inputs of variable length)
        lengths, (inputs,
                  labels), get_aspect = sort_cnn_batch(lengths,
                                                       (inputs, labels),
                                                       get_aspect)

        # convert to CUDA Variables
        if torch.cuda.is_available():
            inputs = Variable(inputs.cuda())
            labels = Variable(labels.cuda())
        else:
            inputs = Variable(inputs)
            labels = Variable(labels)
        # 1 - zero the gradients
        optimizer.zero_grad()

        # 2 - forward pass: compute predicted y by passing x to the model
        outputs = model(inputs, get_aspect)
        # 3 - compute loss
        _, labels = labels.squeeze().max(dim=1)
        loss = loss_function(outputs, labels)
        loss.backward()

        # 5 - update weights
        optimizer.step()

        running_loss += loss.data[0]

        # print statistics
        progress(loss=loss.data[0],
                 epoch=_epoch,
                 batch=i_batch,
                 batch_size=BATCH_SIZE,
                 dataset_size=len(train_set))
    return loss.data[0]
                best_match = {}
                for a in possible_matches:
                    if best_match == {}:
                        best_match = a
                    elif a["popularity"] > best_match["popularity"]:
                        best_match = a
                found_match = best_match
                # print "best match: " + best_match["name"]
        else:
            covered_artists_file.write("%s\n" % artist)
            # print "none: " + artist

    if found_match != {}:
        write_obj = {
            'name': found_match["name"],
            'id': found_match["id"],
            'genres': found_match["genres"],
            'popularity': found_match["popularity"]
        }
        artist_info_file.write("%s\n" % write_obj)
        covered_artists_file.write("%s\n" % artist)
        found_artists_amount += 1
    counter += 1

    progress = ((counter + 0.0) / artist_amount) * 100
    helpers.progress(progress)

covered_artists_amount += counter

helpers.endProgress("  Artists covered: " + str(covered_artists_amount))
Exemplo n.º 3
0
_ewkLoose = np.full((nl_max), False, dtype=bool)
_ewkTight = np.full((nl_max), False, dtype=bool)
#output_tree.Branch('_ewkLoose', _ewkLoose, '_ewkLoose[_nL]/O')
#output_tree.Branch('_ewkTight', _ewkTight, '_ewkTight[_nL]/O')

if args.isTest:
    eventRange = xrange(250)
else:
    eventRange = xrange(Chain.GetEntries())

from objectSelection import isEwkLoose, isEwkTight

#output_file.cd('blackJackAndHookers')
#Fill new tree
for entry in eventRange:
    progress(entry, len(eventRange))
    Chain.GetEntry(entry)
    if args.skim == 'ewkino':
        if not isGoodEventEwkino(Chain): continue
    elif args.skim == 'tauAN':
        if not isGoodEventAN17_094(Chain): continue
    elif args.skim == 'FR':
        if not isGoodEventFakeRate(Chain): continue
    elif args.skim == 'jana':
        if not isGoodEventJana(Chain): continue

    for l in xrange(Chain._nL):
        _ewkLoose[l] = isEwkLoose(Chain, l)
        _ewkTight[l] = isEwkTight(Chain, l)
    output_tree.Fill()
Exemplo n.º 4
0
def log_optimization_init(i, T, res=0.1, logger=None):
    if logger is None:
        logger = logging.getLogger('root')
    v = progress(i,T, res=res)
    if v is not None:
        logger.info('%0.1f%% Models initialized', v*100)