예제 #1
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_delete(api: GBD, args):
    if args.hashes and len(args.hashes) > 0:
        if args.force or confirm(
                "Delete attributes of given hashes from '{}'?".format(
                    args.name)):
            api.remove_attributes(args.name, args.hashes)
    elif args.force or confirm(
            "Delete feature '{}' and all associated attributes?".format(
                args.name)):
        api.delete_feature(args.name)
예제 #2
0
파일: init.py 프로젝트: Udopia/gbd
def init_transform_cnf_to_kis(api: GBD, query, hashes, max_edges, max_nodes):
    api.database.create_feature('kis_local', permissive=True)
    api.database.create_feature('kis_nodes', "empty", permissive=True)
    api.database.create_feature('kis_edges', "empty", permissive=True)
    api.database.create_feature('kis_k', "empty", permissive=True)
    api.database.create_feature('cnf_to_kis', permissive=True)
    api.database.create_feature('kis_to_cnf', permissive=True)
    resultset = api.query_search(query, hashes, ["local"], collapse="MIN")
    run(api, resultset, transform_cnf_to_kis, {
        **api.get_limits(), 'max_edges': max_edges,
        'max_nodes': max_nodes
    })
예제 #3
0
def par2(api: GBD, query, runtimes, timeout, divisor):
    for name in runtimes:
        times = api.query_search(query, [], [name])
        div = len(times) if divisor is None else divisor
        par2 = sum(float(time[1]) if util.is_number(time[1]) and float(time[1]) < timeout else 2*timeout for time in times) / div
        solved = sum(1 if util.is_number(time[1]) and float(time[1]) < timeout else 0 for time in times)
        print(str(round(par2, 2)) + " " + str(solved) + "/" + str(div) + " " + name)
    times = api.query_search(query, [], runtimes)
    div = len(times) if divisor is None else divisor
    vbs_par2 = sum([min(float(val) if util.is_number(val) and float(val) < timeout else 2*timeout for val in row[1:]) for row in times]) / div
    solved = sum(1 if t < timeout else 0 for t in [min(float(val) if util.is_number(val) else 2*timeout for val in row[1:]) for row in times])
    print(str(round(vbs_par2, 2)) + " " + str(solved) + "/" + str(div) + " VBS")
예제 #4
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_info(api: GBD, args):
    if args.name is None:
        for db_str in api.get_databases():
            if len(api.get_features(dbname=db_str)):
                print("Database: {}".format(db_str))
                feat = api.get_material_features(dbname=db_str)
                if len(feat):
                    print("Features: " + " ".join(feat))
                feat = api.get_virtual_features(dbname=db_str)
                if len(feat):
                    print("Virtuals: " + " ".join(feat))
    else:
        info = api.get_feature_info(args.name)
        for key in info:
            print("{}: {}".format(key, info[key]))
예제 #5
0
def greedy_comb(api: GBD, query, runtimes, timeout, size):
    result = api.query_search(query, [], runtimes)
    result = [[float(val) if util.is_number(val) and float(val) < float(timeout) else 2*timeout for val in row] for row in result]
    runtimes.insert(0, "dummy")
    for comb in combinations(range(1, len(runtimes)), size):
        comb_par2 = sum([min(itemgetter(*comb)(row)) for row in result]) / len(result)
        print(str(itemgetter(*comb)(runtimes)) + ": " + str(comb_par2))
예제 #6
0
파일: init.py 프로젝트: Udopia/gbd
def init_local(api: GBD, root):
    clocal = util.prepend_context("local", api.context)
    api.database.create_feature(clocal, permissive=True)
    sanitize = [
        path[0] for path in api.query_search(group_by=clocal)
        if not isfile(path[0])
    ]
    if len(sanitize) and confirm(
            "{} files not found. Remove stale entries from local table?".
            format(len(sanitize))):
        for paths in slice_iterator(sanitize, 1000):
            api.database.delete_values("local", paths)
    resultset = []
    #if clocal in api.get_features():
    for suffix in config.suffix_list(api.context):
        for path in glob.iglob(root + "/**/*" + suffix, recursive=True):
            if not len(api.query_search("{}='{}'".format(clocal, path))):
                resultset.append(("", path))
    run(api, resultset, compute_hash, api.get_limits())
예제 #7
0
파일: init.py 프로젝트: Udopia/gbd
def init_networkit_features(api: GBD, query, hashes):
    try:
        import networkit as nk
    except ImportError as e:
        raise GBDException(
            "Module 'networkit' not found. Setup https://networkit.github.io/")
    nk.setNumberOfThreads(min(multiprocessing.cpu_count(), api.jobs))
    resultset = api.query_search(query, hashes, ["local"], collapse="MIN")
    for (hash, local) in resultset:
        result = networkit_features(hash, local, {})
        eprint(result['hashvalue'])
        for att in result['attributes']:
            eprint(att[1] + "=" + att["2"])
예제 #8
0
파일: server.py 프로젝트: Udopia/gbd
def quick_search_results():
    query = request_query(request)
    features = request_features(request)
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        try:
            rows = gbd_api.query_search(query, resolve=features)
            result = json.dumps(
                [dict(zip(["GBDhash"] + features, row)) for row in rows])
            return json_response(
                result, "Sending query results (Query='{}')".format(query),
                request.remote_addr)
        except (GBDException, DatabaseException) as err:
            return error_response("{}, {}".format(type(err), str(err)),
                                  request.remote_addr,
                                  errno=500)
예제 #9
0
파일: server.py 프로젝트: Udopia/gbd
def get_attribute(feature, hashvalue):
    app.logger.info("Resolving '{}' with feature '{}' for IP {}".format(
        hashvalue, feature, request.remote_addr))
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        try:
            records = gbd_api.query_search(hashes=[hashvalue],
                                           resolve=[feature])
            if len(records) == 0:
                return error_response("Hash '{}' not found".format(hashvalue),
                                      request.remote_addr)
            return records[0][1]
        except (GBDException, DatabaseException) as err:
            return error_response("{}, {}".format(type(err), str(err)),
                                  request.remote_addr,
                                  errno=500)
예제 #10
0
파일: server.py 프로젝트: Udopia/gbd
def get_csv_file(context='cnf'):
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        query = request_query(request)
        features = request_features(request)
        group = util.prepend_context("hash", context)
        try:
            results = gbd_api.query_search(query, [], features, group_by=group)
        except (GBDException, DatabaseException) as err:
            return error_response("{}, {}".format(type(err), str(err)),
                                  request.remote_addr,
                                  errno=500)
        titles = " ".join([group] + features) + "\n"
        content = "\n".join(
            [" ".join([str(entry) for entry in result]) for result in results])
        return file_response(titles + content, "query_result.csv", "text/csv",
                             request.remote_addr)
예제 #11
0
파일: server.py 프로젝트: Udopia/gbd
def get_file(hashvalue, context='cnf'):
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        hash = util.prepend_context("hash", context)
        local = util.prepend_context("local", context)
        filename = util.prepend_context("filename", context)
        records = gbd_api.query_search(hashes=[hashvalue],
                                       resolve=[local, filename],
                                       collapse="MIN",
                                       group_by=hash)
        if len(records) == 0:
            return error_response("Hash '{}' not found".format(hashvalue),
                                  request.remote_addr)
        path, file = operator.itemgetter(1, 2)(records[0])
        if not os.path.exists(path):
            return error_response("Files temporarily not accessible",
                                  request.remote_addr)
        return path_response(path, file, 'text/plain', request.remote_addr)
예제 #12
0
파일: server.py 프로젝트: Udopia/gbd
def get_url_file(context='cnf'):
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        query = request_query(request)
        try:
            result = gbd_api.query_search(query,
                                          group_by=util.prepend_context(
                                              "hash", context))
        except (GBDException, DatabaseException) as err:
            return error_response("{}, {}".format(type(err), str(err)),
                                  request.remote_addr,
                                  errno=500)
        content = "\n".join([
            flask.url_for("get_file",
                          hashvalue=row[0],
                          context=context,
                          _external=True) for row in result
        ])
        return file_response(content, "query_result.uri", "text/uri-list",
                             request.remote_addr)
예제 #13
0
파일: server.py 프로젝트: Udopia/gbd
def get_all_attributes(hashvalue, context='cnf'):
    app.logger.info("Listing all attributes of hashvalue {} for IP {}".format(
        hashvalue, request.remote_addr))
    with GBD(app.config['database'], verbose=app.config['verbose']) as gbd_api:
        features = app.config['features']['all']
        try:
            records = gbd_api.query_search(hashes=[hashvalue],
                                           resolve=features,
                                           group_by=util.prepend_context(
                                               "hash", context))
            if len(records) == 0:
                return error_response("Hash '{}' not found".format(hashvalue),
                                      request.remote_addr)
            return json_response(json.dumps(dict(zip(features, records[0]))),
                                 "Sending list of attributes",
                                 request.remote_addr)
        except (GBDException, DatabaseException) as err:
            return error_response("{}, {}".format(type(err), str(err)),
                                  request.remote_addr,
                                  errno=500)
예제 #14
0
def optimal_comb(api: GBD, args):
    result = api.query_search(args.query, [], args.runtimes)
    result = [[
        int(float(val)) if is_number(val) and float(val) < float(args.tlim)
        else int(2 * args.tlim) for val in row[1:]
    ] for row in result]

    cnf = dimacs.DIMACSPrinter()
    _ACT = [cnf.create_literal() for _ in range(0, len(args.runtimes))]
    _MAX = get_bitvector(cnf, int(pow(2, _BW - 1) - 1))
    for c in encode_at_most_k_constraint_ltseq(cnf, args.size, _ACT):
        cnf.consume_clause(c)

    # encode row-wise minima
    MINS = []
    for row in result:
        i = 0
        B0_ = get_bitvector(cnf, int(row[0]))
        B0 = if_then_else(cnf, B0_, _MAX, _ACT[i])
        for rt in row[1:]:
            i = i + 1
            B1_ = get_bitvector(cnf, int(rt))
            B1 = if_then_else(cnf, B1_, _MAX, _ACT[i])
            Bcarry = get_carry_bits(cnf, B1, [-i for i in B0])
            Bmin = if_then_else(cnf, B0, B1, Bcarry[_BW - 1])
            B0 = Bmin
        MINS.append(B0)  # B0 is now minimum of row

    # encode sum of minima
    A = MINS[0]
    for B in MINS[1:]:
        SUM = get_sum_bits(cnf, A, B)
        A = SUM

    solver = Cadical(bootstrap_with=cnf.clauses, with_proof=False)
    result = solver.solve()
    if result == True:
        model = solver.get_model()
        print(slice_model(model, _ACT))
        print(decode_bitvector(slice_model(model, A)))
예제 #15
0
파일: test_init.py 프로젝트: Udopia/gbd
 def test_init_local(self):
     context = 'cnf'
     db = "/raid/gbd/meta.db"
     eprint("Sanitizing local path entries ... ")
     feature = "local" if context == 'cnf' else "{}.local".format(context)
     with GBD(db) as api:
         paths = [path[0] for path in api.query_search(group_by=feature)]
         eprint(paths)
         hashes = [hash[0] for hash in api.query_search()]
         eprint(hashes)
         feature = api.query_search(resolve=["family"])
         eprint(feature)
         values = api.query_search(hashes=[hashes[0]],
                                   resolve=["family"])[0][1]
         eprint(values)
         values = api.query_search(hashes=[hashes[0]],
                                   resolve=["local"])[0][1].split(',')
         eprint(values)
         records = api.query_search(hashes=[hashes[0]],
                                    resolve=["local", "filename"],
                                    collapse="MIN")
         eprint(records)
예제 #16
0
def optimal_comb(api: GBD, query, runtimes, timeout, k):
    result = api.query_search(query, [], runtimes)
    result = [[
        int(float(val))
        if is_number(val) and float(val) < float(timeout) else int(2 * timeout)
        for val in row[1:]
    ] for row in result]
    dataset = pd.DataFrame(result, columns=runtimes)
    dataset = dataset[(dataset != 2 * timeout).any(axis='columns')]
    model = mip.Model()
    instance_solver_vars = [[
        model.add_var(f'x_{i}_{j}', var_type=mip.BINARY)
        for j in range(dataset.shape[1])
    ] for i in range(dataset.shape[0])]
    solver_vars = [
        model.add_var(f's_{j}', var_type=mip.BINARY)
        for j in range(dataset.shape[1])
    ]
    for var_list in instance_solver_vars:  # per-instance constraints
        model.add_constr(mip.xsum(var_list) == 1)
    for j in range(dataset.shape[1]):  # per-solver-constraints
        model.add_constr(
            mip.xsum(instance_solver_vars[i][j]
                     for i in range(dataset.shape[0])) <=
            dataset.shape[0] * solver_vars[j])  # "Implies" in Z3
    model.add_constr(mip.xsum(solver_vars) <= k)
    model.objective = mip.minimize(
        mip.xsum(instance_solver_vars[i][j] * int(dataset.iloc[i, j])
                 for i in range(dataset.shape[0])
                 for j in range(dataset.shape[1])))
    print(dataset.sum().min())
    print(model.optimize())
    print(model.objective_value)
    for index, item in enumerate([var.x for var in solver_vars]):
        if item > 0:
            print(runtimes[index])
예제 #17
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_create(api: GBD, args):
    api.create_feature(args.name, args.unique)
예제 #18
0
def scatter(api: GBD, query, runtimes, timeout, groups):
    plt.rcParams.update({'font.size': 6})
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.set_aspect('equal', adjustable='box')
    plt.axline((0, 0), (1, 1), linewidth=0.5, color='grey', zorder=0)
    plt.axhline(y=timeout,
                xmin=0,
                xmax=1,
                linewidth=0.5,
                color='grey',
                zorder=0)
    plt.axvline(x=timeout,
                ymin=0,
                ymax=1,
                linewidth=0.5,
                color='grey',
                zorder=0)
    plt.xlabel(runtimes[0], fontsize=8)
    plt.ylabel(runtimes[1], fontsize=8)
    markers = itertools.cycle(plt.Line2D.markers.items())
    next(markers)
    next(markers)
    plt.rcParams['axes.prop_cycle'] = plt.cycler(color=coolors)
    if not groups:
        groups = []

    result = api.query_search(query, [], runtimes)
    dfall = pd.DataFrame(result, columns=["hash"] + runtimes)
    for r in runtimes:
        dfall[r] = pd.to_numeric(dfall[r], errors='coerce')
        dfall.loc[(dfall[r] >= timeout) | pd.isna(dfall[r]), r] = timeout
    print(dfall)

    plots = []
    title = []
    for g in groups:
        color = next(ax._get_lines.prop_cycler)['color']
        marker = next(markers)[0]

        result = api.query_search(query + " and (" + g + ")", [], runtimes)
        df = pd.DataFrame(result, columns=["hash"] + runtimes)
        for r in runtimes:
            df[r] = pd.to_numeric(df[r], errors='coerce')
            df.loc[(df[r] >= timeout) | pd.isna(df[r]), r] = timeout
        dfall = pd.concat([dfall, df]).drop_duplicates(keep=False)

        plots = plots + [
            plt.scatter(data=df,
                        x=runtimes[0],
                        y=runtimes[1],
                        c=color,
                        marker=marker,
                        alpha=0.7,
                        linewidth=0.7,
                        zorder=2)
        ]
        title = title + [g]

    plt.scatter(data=dfall,
                x=runtimes[0],
                y=runtimes[1],
                marker='.',
                alpha=0.7,
                linewidth=0.7,
                color="black",
                zorder=1)

    plt.legend(tuple(plots),
               tuple(title),
               scatterpoints=1,
               bbox_to_anchor=(0., 1.02, 1., .102),
               loc='lower left',
               ncol=5,
               mode="expand",
               borderaxespad=0.)
    plt.savefig('out.svg', transparent=True, bbox_inches='tight', pad_inches=0)
    plt.show()
예제 #19
0
def cdf(api: GBD, query, runtimes, timeout, title):
    plt.rcParams.update({'font.size': 8})
    result = api.query_search(query, [], runtimes)
    result = [[
        float(val)
        if util.is_number(val) and float(val) < float(timeout) else timeout
        for val in row[1:]
    ] for row in result]
    df = pd.DataFrame(result)
    df.columns = runtimes
    df['vbs'] = df[runtimes].min(axis=1)
    print(df)

    plt.rcParams['axes.prop_cycle'] = plt.cycler(color=coolors2 + coolors)

    params = {
        'legend.fontsize': 'small',
        'axes.labelsize': 6,
        'axes.titlesize': 6,
        'xtick.labelsize': 6,
        'ytick.labelsize': 6,
        'axes.titlepad': 10
    }
    plt.rcParams.update(params)

    fig = plt.figure()
    ax = fig.add_subplot(111)

    plt.xlim(0, timeout + 100)
    plt.grid(linestyle='-', linewidth=.5)
    plt.axvline(x=timeout, linestyle='dashed', color='black', linewidth=.5)
    #plt.ylim(0, len(result))

    # Build Title
    if (title is None):
        title = []
        for elem in query.split('=')[1:]:
            parts = elem.strip().split(' ')
            title = title + [parts[0].replace('_', ' ').title()]
        ax.set_title(", ".join(title), fontsize=6, variant='small-caps')
    else:
        ax.set_title(title, fontsize=6, variant='small-caps')

    df2 = pd.DataFrame(index=range(timeout + 100), columns=runtimes)
    df2.fillna(0)
    for col in ['vbs'] + runtimes:
        df2[col] = [0] * (timeout + 100)
        for val in df[col]:
            if val < timeout:
                df2.loc[round(val), col] = df2[col][round(val)] + 1

        sum = 0
        for val in range(1, timeout + 100):
            df2.loc[val, col + "_"] = NaN
            if df2[col][val] != 0:
                df2.loc[val, col + "_"] = sum
            sum = sum + df2.loc[val, col]
            df2.loc[val, col] = sum

    markers = itertools.cycle([
        'o', 'v', '^', '<', '>', 'p', 'P', '*', 'h', 'H', '8', 'X', 'd', 'D',
        's'
    ])
    #next(markers)
    #next(markers)
    order = len(runtimes) + 1
    for col in ['vbs'] + runtimes:
        color = next(ax._get_lines.prop_cycler)['color']
        ax.plot(df2[col],
                zorder=order,
                linestyle='-',
                linewidth=.5,
                color=color)
        ax.plot(df2[str(col) + "_"],
                label=col,
                zorder=order,
                fillstyle='none',
                marker=next(markers)[0],
                alpha=.9,
                markeredgewidth=.5,
                markersize=3,
                drawstyle='steps-post',
                color=color)
        order = order - 1

    plt.legend(ncol=2, loc='lower right')
    plt.savefig('out.svg', transparent=True, bbox_inches='tight', pad_inches=0)
    plt.show()
예제 #20
0
파일: init.py 프로젝트: Udopia/gbd
def init_gate_features(api: GBD, query, hashes):
    resultset = api.query_search(query, hashes, ["local"], collapse="MIN")
    run(api, resultset, gate_features, api.get_limits())
예제 #21
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_set(api: GBD, args):
    api.set_attribute(args.assign[0], args.assign[1], None, args.hashes)
예제 #22
0
파일: gbd.py 프로젝트: Udopia/gbd
def main():
    parser = argparse.ArgumentParser(description='GBD Benchmark Database')

    parser.add_argument('-d',
                        "--db",
                        help='Specify database to work with',
                        default=os.environ.get('GBD_DB'),
                        nargs='?')
    parser.add_argument('-j',
                        "--jobs",
                        help='Specify number of parallel jobs',
                        default=1,
                        nargs='?')
    parser.add_argument(
        '-t',
        '--tlim',
        help=
        "Time limit (sec) per instance for 'init' sub-commands (also used for score calculation in 'eval' and 'plot')",
        default=5000,
        type=int)
    parser.add_argument(
        '-m',
        '--mlim',
        help="Memory limit (MB) per instance for 'init' sub-commands",
        default=2000,
        type=int)
    parser.add_argument(
        '-f',
        '--flim',
        help=
        "File size limit (MB) per instance for 'init' sub-commands which create files",
        default=1000,
        type=int)
    parser.add_argument(
        '-s',
        "--separator",
        help="Feature separator (delimiter used in import and output",
        choices=[" ", ",", ";"],
        default=" ")
    parser.add_argument(
        "--join-type",
        help="Join Type: treatment of missing values in queries",
        choices=["INNER", "OUTER", "LEFT"],
        default="LEFT")
    parser.add_argument(
        '-v',
        '--verbose',
        help='Print additional (or diagnostic) information to stderr',
        action='store_true')
    parser.add_argument(
        '-c',
        '--context',
        default='cnf',
        choices=config.contexts(),
        help=
        'Select context (affects hash-selection and feature-extraction in init)'
    )

    subparsers = parser.add_subparsers(help='Available Commands:',
                                       required=True,
                                       dest='gbd command')

    # INITIALIZATION
    parser_init = subparsers.add_parser('init', help='Initialize Database')
    parser_init_subparsers = parser_init.add_subparsers(
        help='Select Initialization Procedure:',
        required=True,
        dest='init what?')
    # init local paths:
    parser_init_local = parser_init_subparsers.add_parser(
        'local', help='Initialize Local Hash/Path Entries')
    parser_init_local.add_argument('path',
                                   type=directory_type,
                                   help="Path to benchmarks")
    parser_init_local.set_defaults(func=cli_init_local)
    # init base features:
    parser_init_base_features = parser_init_subparsers.add_parser(
        'base_features', help='Initialize Base Features')
    add_query_and_hashes_arguments(parser_init_base_features)
    parser_init_base_features.set_defaults(func=cli_init_base_features)
    # init gate features:
    parser_init_gate_features = parser_init_subparsers.add_parser(
        'gate_features', help='Initialize Gate Features')
    add_query_and_hashes_arguments(parser_init_gate_features)
    parser_init_gate_features.set_defaults(func=cli_init_gate_features)
    # generate kis instances from cnf instances:
    parser_init_cnf2kis = parser_init_subparsers.add_parser(
        'cnf2kis', help='Generate KIS Instances from CNF Instances')
    add_query_and_hashes_arguments(parser_init_cnf2kis)
    parser_init_cnf2kis.add_argument(
        '-e',
        '--max_edges',
        default=0,
        type=int,
        help='Maximum Number of Edges (0 = unlimited)')
    parser_init_cnf2kis.add_argument(
        '-n',
        '--max_nodes',
        default=0,
        type=int,
        help='Maximum Number of Nodes (0 = unlimited)')
    parser_init_cnf2kis.set_defaults(func=cli_init_cnf2kis)
    # init iso-hash:
    parser_init_iso = parser_init_subparsers.add_parser(
        'isohash',
        help='Initialize Isomorphic Hash (MD5 of sorted degree sequence)')
    add_query_and_hashes_arguments(parser_init_iso)
    parser_init_iso.set_defaults(func=cli_init_iso)

    # GBD HASH
    parser_hash = subparsers.add_parser('hash',
                                        help='Print hash for a single file')
    parser_hash.add_argument(
        '-c',
        '--context',
        default='cnf',
        choices=config.contexts(),
        help='Select context (affects hashes and features)')
    parser_hash.add_argument('path',
                             type=file_type,
                             help="Path to one benchmark")
    parser_hash.set_defaults(func=cli_hash)

    # GBD GET $QUERY
    parser_get = subparsers.add_parser(
        'get', help='Get data by query (or hash-list via stdin)')
    add_query_and_hashes_arguments(parser_get)
    parser_get.add_argument('-r',
                            '--resolve',
                            help='List of features to resolve against',
                            nargs='+')
    parser_get.add_argument(
        '-c',
        '--collapse',
        default='group_concat',
        choices=['group_concat', 'min', 'max', 'avg', 'count', 'sum'],
        help='Treatment of multiple values per hash (or grouping value resp.)')
    parser_get.add_argument('-g',
                            '--group_by',
                            default='hash',
                            help='Group by specified attribute value')
    parser_get.set_defaults(func=cli_get)

    # GBD SET
    parser_set = subparsers.add_parser(
        'set', help='Set specified attribute-value for query result')
    parser_set.add_argument('assign', type=key_value_type, help='key=value')
    add_query_and_hashes_arguments(parser_set)
    parser_set.set_defaults(func=cli_set)

    # CREATE/DELETE/MODIFY FEATURES
    parser_create = subparsers.add_parser('create',
                                          help='Create a new feature')
    parser_create.add_argument('name',
                               type=column_type,
                               help='Name of feature')
    parser_create.add_argument(
        '-u',
        '--unique',
        help='Unique constraint: specify default-value of feature')
    parser_create.set_defaults(func=cli_create)

    parser_delete = subparsers.add_parser(
        'delete',
        help=
        'Delete all values assiociated with given hashes (via argument or stdin) or remove feature if no hashes are given'
    )
    parser_delete.add_argument('--hashes',
                               help='Hashes',
                               nargs='*',
                               default=[])
    parser_delete.add_argument('name',
                               type=column_type,
                               help='Name of feature')
    parser_delete.add_argument('-f',
                               '--force',
                               action='store_true',
                               help='Do not ask for confirmation')
    parser_delete.set_defaults(func=cli_delete)

    parser_rename = subparsers.add_parser('rename', help='Rename feature')
    parser_rename.add_argument('old_name',
                               type=column_type,
                               help='Old name of feature')
    parser_rename.add_argument('new_name',
                               type=column_type,
                               help='New name of feature')
    parser_rename.set_defaults(func=cli_rename)

    # GET META INFO
    parser_info = subparsers.add_parser(
        'info', help='Print info about available features')
    parser_info.add_argument('name',
                             type=column_type,
                             help='Print info about specified feature',
                             nargs='?')
    parser_info.set_defaults(func=cli_info)

    # SCORE CALCULATION
    parser_eval = subparsers.add_parser('eval',
                                        help='Evaluate Runtime Features')
    parser_eval_subparsers = parser_eval.add_subparsers(
        help='Select Evaluation Procedure', required=True, dest='eval type')

    parser_eval_par2 = parser_eval_subparsers.add_parser(
        'par2', help='Calculate PAR-2 Score')
    add_query_and_hashes_arguments(parser_eval_par2)
    parser_eval_par2.add_argument('-r',
                                  '--runtimes',
                                  help='List of runtime features',
                                  nargs='+')
    parser_eval_par2.add_argument(
        '-d',
        '--divisor',
        type=int,
        help='Overwrite Divisor used for Averaging Scores',
        nargs='?')
    parser_eval_par2.set_defaults(func=cli_eval_par2)

    parser_eval_vbs = parser_eval_subparsers.add_parser('vbs',
                                                        help='Calculate VBS')
    add_query_and_hashes_arguments(parser_eval_vbs)
    parser_eval_vbs.add_argument('-r',
                                 '--runtimes',
                                 help='List of runtime features',
                                 nargs='+')
    parser_eval_vbs.set_defaults(func=cli_eval_vbs)

    parser_eval_comb = parser_eval_subparsers.add_parser(
        'comb', help='Calculate VBS of Solver Combinations')
    add_query_and_hashes_arguments(parser_eval_comb)
    parser_eval_comb.add_argument('-r',
                                  '--runtimes',
                                  help='List of runtime features',
                                  nargs='+')
    parser_eval_comb.add_argument('-k',
                                  '--size',
                                  default=2,
                                  type=int,
                                  help='Number of Solvers per Combination')
    parser_eval_comb.set_defaults(func=cli_eval_combinations)

    # PLOTS
    parser_plot = subparsers.add_parser('plot', help='Plot Runtimes')
    parser_plot_subparsers = parser_plot.add_subparsers(help='Select Plot',
                                                        required=True,
                                                        dest='plot type')

    parser_plot_scatter = parser_plot_subparsers.add_parser(
        'scatter', help='Scatter Plot')
    add_query_and_hashes_arguments(parser_plot_scatter)
    parser_plot_scatter.add_argument('-r',
                                     '--runtimes',
                                     help='Two runtime features',
                                     nargs=2)
    parser_plot_scatter.add_argument(
        '-g',
        '--groups',
        help='Highlight specific groups (e.g. family=cryptography)',
        nargs='+')
    parser_plot_scatter.set_defaults(func=cli_plot_scatter)

    parser_plot_cdf = parser_plot_subparsers.add_parser('cdf', help='CDF Plot')
    add_query_and_hashes_arguments(parser_plot_cdf)
    parser_plot_cdf.add_argument('-r',
                                 '--runtimes',
                                 help='List of runtime features',
                                 nargs='+')
    parser_plot_cdf.add_argument('--title', help='Plot Title')
    parser_plot_cdf.set_defaults(func=cli_plot_cdf)

    # GRAPHS
    parser_graph = subparsers.add_parser('graph', help='Visualize Formula')
    parser_graph.add_argument('path', type=file_type, help='CNF File')
    parser_graph.add_argument('proof', type=file_type, help='Proof File')
    parser_graph.set_defaults(func=cli_graph)

    # PARSE ARGUMENTS
    args = parser.parse_args()
    if not args.db:
        eprint("""Error: No database path is given. 
A database path can be given in two ways:
-- by setting the environment variable GBD_DB
-- by giving a path via --db=[path]
A database file containing some attributes of instances used in the SAT Competitions can be obtained at http://gbd.iti.kit.edu/getdatabase"""
               )
    elif len(sys.argv) > 1:
        try:
            with GBD(args.db, args.context, int(args.jobs), args.tlim,
                     args.mlim, args.flim, args.separator, args.join_type,
                     args.verbose) as api:
                if hasattr(args, 'hashes') and not sys.stdin.isatty():
                    if not args.hashes or len(args.hashes) == 0:
                        args.hashes = read_hashes()  # read hashes from stdin
                args.func(api, args)
        except GBDException as err:
            eprint("GBD Exception: " + str(err))
            sys.exit(1)
        except DatabaseException as err:
            eprint("Database Exception: " + str(err))
            sys.exit(1)
        except SchemaException as err:
            eprint("Schema Exception: " + str(err))
            sys.exit(1)
    else:
        parser.print_help()
예제 #23
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_rename(api: GBD, args):
    api.rename_feature(args.old_name, args.new_name)
예제 #24
0
def vbs(api: GBD, query, runtimes, timeout, separator):
    result = api.query_search(query, [], runtimes)
    resultset = [(row[0], min(float(val) if util.is_number(val) else 2*timeout for val in row[1:])) for row in result]
    for result in resultset:
        print(separator.join([(str(item or '')) for item in result]))
예제 #25
0
파일: server.py 프로젝트: Udopia/gbd
def main():
    pwd = os.path.dirname(os.path.abspath(gbd_server.__file__))
    parser = argparse.ArgumentParser(
        description=
        'Web- and Micro- Services to access global benchmark database.')
    parser.add_argument('-d',
                        "--db",
                        help='Specify database to work with',
                        default=os.environ.get('GBD_DB'),
                        nargs='?')
    parser.add_argument('-l',
                        "--logdir",
                        help='Specify logging dir',
                        default=os.environ.get('GBD_LOGGING_DIR') or pwd,
                        nargs='?')
    parser.add_argument('-p',
                        "--port",
                        help='Specify port on which to listen',
                        type=int)
    parser.add_argument('-v',
                        "--verbose",
                        help='Verbose Mode',
                        action='store_true')
    args = parser.parse_args()
    formatter = logging.Formatter(
        fmt=
        '[%(asctime)s, %(name)s, %(levelname)s] %(module)s.%(filename)s.%(funcName)s():%(lineno)d\n%(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger().setLevel(logging.DEBUG)
    # Add sys.stdout to logging output
    console_handler = logging.StreamHandler()
    console_handler.setFormatter(formatter)
    console_handler.setLevel(logging.INFO)
    logging.getLogger().addHandler(console_handler)
    # Add handler to write in rotating logging files
    file_handler = TimedRotatingFileHandler(os.path.join(
        args.logdir, "gbd-server-log"),
                                            when="midnight",
                                            backupCount=10)
    file_handler.setFormatter(formatter)
    file_handler.setLevel(logging.WARNING)
    logging.getLogger().addHandler(file_handler)
    global app
    if not args.db:
        app.logger.error("No Database Given")
        exit(1)
    app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1)
    app.config['database'] = args.db
    app.config['verbose'] = args.verbose
    try:
        with GBD(app.config['database'], verbose=app.config['verbose']) as gbd:
            app.config['dbnames'] = gbd.get_databases()
            if "main" in app.config['dbnames']:
                app.config['dbnames'].remove("main")
            app.config['features'] = {'all': gbd.get_features()}
            for context in config.contexts():
                local = util.prepend_context("local", context)
                if local in app.config['features']['all']:
                    app.config['features']['all'].remove(local)
            app.config['dbpaths'] = dict()
            for db in app.config['dbnames']:
                if db != 'main':
                    app.config['features'][db] = gbd.get_features(dbname=db)
                    for context in config.contexts():
                        local = util.prepend_context("local", context)
                        if local in app.config['features'][db]:
                            app.config['features'][db].remove(local)
                    app.config['dbpaths'][db] = gbd.get_database_path(db)
    except Exception as e:
        app.logger.error(str(e))
        exit(1)
    app.static_folder = os.path.join(pwd, "static")
    app.template_folder = os.path.join(pwd, "templates-vue")
    #app.run(host='0.0.0.0', port=args.port)
    from waitress import serve
    serve(app, host="0.0.0.0", port=5000)
예제 #26
0
파일: gbd.py 프로젝트: Udopia/gbd
def cli_get(api: GBD, args):
    resultset = api.query_search(args.query, args.hashes, args.resolve,
                                 args.collapse, args.group_by)
    for result in resultset:
        print(args.separator.join([(str(item or '')) for item in result]))
예제 #27
0
파일: init.py 프로젝트: Udopia/gbd
def init_iso_hash(api: GBD, query, hashes):
    if not api.feature_exists("isohash"):
        api.create_feature("isohash", "empty")
    resultset = api.query_search(query, hashes, ["local"], collapse="MIN")
    run(api, resultset, compute_iso_hash, api.get_limits())