Esempio n. 1
0
cursor.execute("SELECT COUNT(*) FROM commits")

print

progress.start(cursor.fetchone()[0], prefix="Fetching commits ...")

cursor.execute("SELECT id, sha1 FROM commits")

for commit_id, commit_sha1 in cursor:
    commits[commit_id] = commit_sha1
    pending_commits.add(commit_id)

    progress.update()

progress.end(" %d commits." % len(commits))

print

cursor.execute("SELECT MAX(CHARACTER_LENGTH(name)) FROM repositories")

repository_name_length = cursor.fetchone()[0]

cursor.execute("SELECT id FROM repositories ORDER BY id ASC")

repositories = [repository_id for (repository_id,) in cursor]

def processCommits(process_commits):
    global commits

    processed_commits = set()
Esempio n. 2
0
                try: commits = getReviewCommits(repository, getReview(branch_id))
                except KeyboardInterrupt: sys.exit(1)
                except:
                    progress.write("  review meta-data references missing commits")
                    incorrect_reviews.append((getReview(branch_id), "branches.head = %s" % branch_sha1))
                    continue

                heads = commits.getHeads()

                if len(heads) > 1:
                    progress.write("  multiple heads: r/%d" % review_id)
                    continue

                head = heads.pop()

                try:
                    if force: repository.run("update-ref", "refs/heads/%s" % branch_name, head.sha1, "0" * 40)
                    progress.write("  re-created review branch")
                except KeyboardInterrupt: sys.exit(1)
                except:
                    progress.write("  failed to re-create review branch")
                    incorrect_reviews.append((getReview(branch_id), "failed to re-create review branch"))

    progress.end(".")

if incorrect_reviews:
    print "\nReviews that need attention:"

    for review_id, message in incorrect_reviews:
        print "  %5d: %s" % (review_id, message)
Esempio n. 3
0
    def exists_in_db(branch_name):
        return branch_name in branches_in_db

    def process(path, prefix=None):
        for entry in os.listdir(path):
            entry_path = os.path.join(path, entry)
            branch_name = os.path.join(prefix, entry) if prefix else entry
            if os.path.isdir(entry_path):
                process(entry_path, branch_name)
            elif not exists_in_db(branch_name):
                progress.write("WARNING[%s]: %s exists in the repository but not in the database!" % (repository.name, branch_name))
                if force: repository.run("update-ref", "-d", "refs/heads/%s" % branch_name)
                progress.write("  deleted from repository")
            processed.add(branch_name)

    for branch_name in refs.keys():
        if branch_name not in processed and not exists_in_db(branch_name):
            progress.write("WARNING[%s]: %s exists in the repository but not in the database!" % (repository.name, branch_name))
            if force: repository.run("update-ref", "-d", "refs/heads/%s" % branch_name)
            progress.write("  deleted from repository")

    process(heads_path)

    progress.end(".")

if incorrect_reviews:
    print "\nReviews that need attention:"

    for review_id, message in incorrect_reviews:
        print "  %5d: %s" % (review_id, message)
Esempio n. 4
0
cursor.execute("SELECT COUNT(*) FROM commits")

print

progress.start(cursor.fetchone()[0], prefix="Fetching commits ...")

cursor.execute("SELECT id, sha1 FROM commits")

for commit_id, commit_sha1 in cursor:
    commits[commit_id] = commit_sha1
    pending_commits.add(commit_id)

    progress.update()

progress.end(" %d commits." % len(commits))

print

cursor.execute("SELECT MAX(CHARACTER_LENGTH(name)) FROM repositories")

repository_name_length = cursor.fetchone()[0]

cursor.execute("SELECT id FROM repositories ORDER BY id ASC")

repositories = [repository_id for (repository_id, ) in cursor]


def processCommits(process_commits):
    global commits
Esempio n. 5
0
def test_ndcg(net, data, num_users, ratio, save_file=None, gpus=[0]):
    """Evaluate net."""
    progress = ProgressBar()
    posi_scores = [[] for u in range(num_users)]
    posi_binary = [[] for u in range(num_users)]

    net.eval()
    parallel = len(gpus) > 1
    dtype = torch.FloatTensor if parallel else torch.cuda.FloatTensor
    #data.loader.dataset.set_to_posi()
    progress.reset(len(data.loader), messsage='Computing postiive outfits')
    for idx, inputv in enumerate(data.loader):
        items_text, nega_text, items_img, nega_img, uidx = inputv
        text = tuple(Variable(v.type(dtype)) for v in items_text[0])
        img = tuple(Variable(v.type(dtype)) for v in items_img[0])
        uidx = uidx.view(-1, 1)
        uidxv = torch.zeros(uidx.shape[0], num_users).scatter_(1, uidx, 1.0)
        uidxv = Variable(uidxv.type(dtype))
        inputv = (text, img, uidxv)
        if parallel:
            scores, binary = data_parallel(net, inputv, gpus)
        else:
            scores, binary = net(*inputv)
        for n, u in enumerate(uidx.view(-1)):
            posi_binary[u].append(binary[n].item())
            posi_scores[u].append(scores[n].data[0])
        progress.forward()
    progress.end()
    # compute scores for negative outfits
    nega_scores = [[] for u in range(num_users)]
    nega_binary = [[] for u in range(num_users)]
    #data.loader.dataset.set_to_nega(ratio=6)
    progress.reset(len(data.loader), messsage='Computing negative outfits')
    for idx, inputv in enumerate(data.loader):
        posi_text, items_text, posi_img, items_img, uidx = inputv
        for i in range(ratio):
            text = tuple(Variable(v.type(dtype)) for v in items_text[i])
            img = tuple(Variable(v.type(dtype)) for v in items_img[i])
            uidx = uidx.view(-1, 1)
            uidxv = torch.zeros(uidx.shape[0], num_users).scatter_(1, uidx, 1.0)
            uidxv = Variable(uidxv.type(dtype))
            inputv = (text,img, uidxv)
            if parallel:
                scores, binary = data_parallel(net, inputv, gpus)
            else:
                scores, binary = net(*inputv)
            for n, u in enumerate(uidx.view(-1)):
                nega_binary[u].append(binary[n].data[0])
                nega_scores[u].append(scores[n].data[0])
        progress.forward()
    progress.end()
    mean_ndcg, avg_ndcg = metrics.NDCG(posi_scores, nega_scores)
    mean_ndcg_bianry, avg_ndcg_binary = metrics.NDCG(
        posi_binary, nega_binary)
    aucs, mean_auc = metrics.ROC(posi_scores, nega_scores)
    aucs_binary, mean_auc_binary = metrics.ROC(posi_binary, nega_binary)
    results = dict(
        mean_ndcg=mean_ndcg,
        avg_ndcg=avg_ndcg,
        mean_ndcg_bianry=mean_ndcg_bianry,
        avg_ndcg_binary=avg_ndcg_binary,
        aucs=aucs,
        mean_auc=mean_auc,
        aucs_binary=aucs_binary,
        mean_auc_binary=mean_auc_binary)
    print('avg_mean_ndcg:{} avg_mean_auc:{}'.format(
        mean_ndcg.mean(), mean_auc))
    # save results
    if os.path.exists(save_file):
        results.update(np.load(save_file))
    np.savez(save_file, **results)
Esempio n. 6
0
        if "LT" in filename:
            h2key = pattern.match(filename).group(1)
            if h2key not in lt_h2:
                lt_h2[h2key] = ROOT.TH2F(args.name + h2key,
                                         args.name + ' ' + h2key, ncat, 0,
                                         ncat, ncat, 0, ncat)
        else:
            h2key = None
    progress.update(100. * i / chain.GetEntries())
    if entry.smearerCat[0] >= 0:
        x = cat1[entry.smearerCat[0]]
        y = cat2[entry.smearerCat[0]]
        h2.Fill(x, y)
        if h2key in lt_h2:
            lt_h2[h2key].Fill(x, y)
progress.end()

h2.Draw("colz")
c.SetLogz()
c.SaveAs("plots/smearerCat_" + h2.GetName() + "_" + region_name + ".png")
for key in lt_h2:
    lt_h2[key].Draw("colz")
    c.SaveAs("plots/smearerCat_" + lt_h2[key].GetName() + "_" + region_name +
             ".png")

from distutils.version import LooseVersion
header = "{} {:35} {:35} {:11}".format(args.name, "cat1", "cat2", args.name)
sorted_keys = lt_h2.keys()
sorted_keys.sort(key=LooseVersion)
for key in sorted_keys:
    header += " {:11}".format(key)