def gate_features(hashvalue, filename, args): eprint('Extracting gate features from {}'.format(filename)) rec = extract_gate_features(filename, args['tlim'], args['mlim']) eprint('Done with gate features from {}'.format(filename)) return [(key, hashvalue, int(value) if isinstance(value, float) and value.is_integer() else value) for key, value in rec.items()]
def gbd_hash_inner(file): Tstart = time.time() space = False skip = False start = True cldelim = True hash_md5 = hashlib.md5() for byte in iter(lambda: file.read(1), b''): if not skip and (byte >= b'0' and byte <= b'9' or byte == b'-'): cldelim = byte == b'0' and (space or start) start = False if space: space = False hash_md5.update(b' ') hash_md5.update(byte) elif byte <= b' ': space = not start # remember non-leading space characters skip = skip and byte != b'\n' and byte != b'\r' # comment line ended else: #byte == b'c' or byte == b'p': skip = True # do not hash comment and header line if not cldelim: hash_md5.update(b' 0') Tend = time.time() eprint("Seconds to hash: {0:5.2f}".format(Tend - Tstart)) return hash_md5.hexdigest()
def set_attribute(self, feature, value, hash_list, force): if not feature in self.get_material_features(): raise ValueError( "Attribute '{}' is not available (or virtual)".format(feature)) values = ', '.join( ['("{}", "{}")'.format(hash, value) for hash in hash_list]) if self.database.table_unique(feature): if force: self.database.submit( 'DELETE FROM {} WHERE hash IN ("{}")'.format( feature, '", "'.join(hash_list))) try: self.database.submit( 'REPLACE INTO {} (hash, value) VALUES {}'.format( feature, values)) except sqlite3.IntegrityError as err: #thrown if existing value is not the default value or equal to the value to be set #requires the unique on insert-triggers introduced in version 3.0.9 eprint(str(err) + ": Use the force!") else: try: self.database.submit( 'INSERT INTO {} (hash, value) VALUES {}'.format( feature, values)) except Exception as err: #thrown if hash+value combination is already set #requires the unique constraint introduced in version 3.0.9 eprint(err)
def transform_cnf_to_kis(cnfhash, cnfpath, args): if not cnfhash or not cnfpath: raise GBDException( "Arguments missing: transform_cnf_to_kis({}, {})".format( cnfhash, cnfpath)) kispath = reduce( lambda path, suffix: path[:-len(suffix)] if path.endswith(suffix) else path, config.suffix_list('cnf'), cnfpath) kispath = kispath + ".kis" if isfile(kispath): raise GBDException("{} already exists. Aborting.".format( basename(kispath))) eprint('Transforming {} to k-ISP {}'.format(cnfpath, kispath)) result = cnf2kis(cnfpath, kispath, args['max_edges'], args['max_nodes'], args['tlim'], args['mlim'], args['flim']) if not "local" in result: if exists(kispath): os.path.remove(kispath) eprint('''{} got {}. Aborting.'''.format(basename(kispath), result['hash'])) return [('cnf_to_kis', cnfhash, result['hash']), ('kis_to_cnf', result['hash'], cnfhash)] return [('kis_local', result['hash'], result['local']), ('kis_nodes', result['hash'], result['nodes']), ('kis_edges', result['hash'], result['edges']), ('kis_k', result['hash'], result['k']), ('cnf_to_kis', cnfhash, result['hash']), ('kis_to_cnf', result['hash'], cnfhash)]
def compute_degree_sequence_hash(hashvalue, filename): eprint('Computing degree-sequence hash for {}'.format(filename)) hash_md5 = hashlib.md5() degrees = dict() f = open_cnf_file(filename, 'rt') for line in f: line = line.strip() if line and line[0] not in ['p', 'c']: for lit in line.split()[:-1]: num = int(lit) tup = degrees.get(abs(num), (0, 0)) degrees[abs(num)] = (tup[0], tup[1] + 1) if num < 0 else (tup[0] + 1, tup[1]) degree_list = list(degrees.values()) degree_list.sort(key=lambda t: (t[0] + t[1], abs(t[0] - t[1]))) for t in degree_list: hash_md5.update(str(t[0] + t[1]).encode('utf-8')) hash_md5.update(b' ') hash_md5.update(str(abs(t[0] - t[1])).encode('utf-8')) hash_md5.update(b' ') f.close() return { 'hashvalue': hashvalue, 'attributes': [('REPLACE', 'degree_sequence_hash', hash_md5.hexdigest())] }
def compute_clause_types(hashvalue, filename): eprint('Computing clause_types for {}'.format(filename)) c_vars = 0 c_clauses = 0 c_horn = 0 c_pos = 0 c_neg = 0 f = open_cnf_file(filename, 'rt') for line in f: line = line.strip() if line and line[0] not in ['p', 'c']: clause = [int(lit) for lit in line.split()[:-1]] if not len(clause): raise ValueError("clause is empty: {}".format(line)) c_vars = max(c_vars, max(abs(lit) for lit in clause)) c_clauses += 1 n_pos = sum(lit > 0 for lit in clause) if n_pos < 2: c_horn += 1 if n_pos == 0: c_neg += 1 if n_pos == len(clause): c_pos += 1 f.close() attributes = [('REPLACE', 'clauses_horn', c_horn), ('REPLACE', 'clauses_positive', c_pos), ('REPLACE', 'clauses_negative', c_neg), ('REPLACE', 'variables', c_vars), ('REPLACE', 'clauses', c_clauses)] return {'hashvalue': hashvalue, 'attributes': attributes}
def safe_run_results(api: GBD, result, check=False): for attr in result: name, hashv, value = attr[0], attr[1], attr[2] eprint("Saving {}={} for {}".format(name, value, hashv)) if check and not name in api.database.get_features(): api.database.create_feature(name, "empty") api.database.set_values(name, value, [hashv])
def main(): parser = argparse.ArgumentParser(description='Web- and Micro- Services to access global benchmark database.') parser.add_argument('-d', "--db", help='Specify database to work with', default=os.environ.get('GBD_DB'), nargs='?') parser.add_argument('-p', "--port", help='Specify port on which to listen', type=int) args = parser.parse_args() if not args.db: eprint("""No database path is given. A database path can be given in two ways: -- by setting the environment variable GBD_DB -- by giving a path via --db=[path] A database file containing some attributes of instances used in the SAT Competitions can be obtained at http://gbd.iti.kit.edu/getdatabase Don't forget to initialize each database with the paths to your benchmarks by using the init-command. """) else: logging_dir = "gbd-server-logs" logging_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), logging_dir) if not os.path.exists(logging_path): os.makedirs(logging_path) logging.basicConfig(filename='{}/server.log'.format(logging_path), level=logging.DEBUG) logging.getLogger().addHandler(default_handler) global app app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1) app.config['database'] = args.db app.static_folder = os.path.join(os.path.dirname(os.path.abspath(gbd_server.__file__)), "static") app.template_folder = os.path.join(os.path.dirname(os.path.abspath(gbd_server.__file__)), "templates-vue") app.run(host='0.0.0.0', port=args.port)
def cli_rename(api: GbdApi, args): if not api.feature_exists(args.old_name): eprint("Feature '{}' does not exist or is virtual".format( args.old_name)) elif api.feature_exists(args.new_name): eprint("Feature '{}' does already exist".format(args.new_name)) else: api.rename_feature(args.old_name, args.new_name)
def init_database(self, path=None): eprint('Initializing local path entries {} using {} cores'.format( path, self.jobs)) if self.jobs == 1 and multiprocessing.cpu_count() > 1: eprint("Activate parallel initialization using --jobs={}".format( multiprocessing.cpu_count())) benchmark_administration.remove_benchmarks(self.database) benchmark_administration.register_benchmarks(self, self.database, path, self.jobs)
def remove_benchmarks(database): eprint("Sanitizing local path entries ... ") paths = database.value_query("SELECT value FROM local") sanitize = list(filter(lambda path: not isfile(path), paths)) if len(sanitize) and confirm( "{} files not found, remove local path entries from database?". format(len(sanitize))): for path in sanitize: eprint("File '{}' not found, removing path entry.".format(path)) database.submit("DELETE FROM local WHERE value='{}'".format(path))
def __init__(self, path_list, verbose=False): self.paths = path_list self.verbose = verbose # init non-existent databases and check existing databases for path in self.paths: if not os.path.isfile(path): eprint( "Initializing DB '{}' with version {} and hash-version {}". format(path, VERSION, HASH_VERSION)) self.init(path, VERSION, HASH_VERSION) else: self.check(path, VERSION, HASH_VERSION)
def init_networkit_features(api: GBD, query, hashes): try: import networkit as nk except ImportError as e: raise GBDException( "Module 'networkit' not found. Setup https://networkit.github.io/") nk.setNumberOfThreads(min(multiprocessing.cpu_count(), api.jobs)) resultset = api.query_search(query, hashes, ["local"], collapse="MIN") for (hash, local) in resultset: result = networkit_features(hash, local, {}) eprint(result['hashvalue']) for att in result['attributes']: eprint(att[1] + "=" + att["2"])
def calculate_par2_score(self, query, feature): info = self.database.meta_record(feature) if not "timeout" in info: eprint( "Time-limit 'timeout' missing in meta-record of table '{}'.". format(feature)) eprint("Unable to calculate score.") return if not "memout" in info: eprint( "Memory-limit 'memout' missing in meta-record of table '{}'.". format(feature)) if not "machine" in info: eprint( "Machine-id 'machine' missing in meta-record of table '{}'.". format(feature)) timeout = int(info["timeout"]) times = self.query_search(query, [], [feature]) score = 0 penalized = set() for time in times: if is_number(time[1]): score += int(time[1]) else: score += 2 * timeout penalized.add(time[1]) print(score / len(times)) print(penalized)
def test_insert_values(self): os.remove(self.TDB) FEAT = "letter" NAME = self.TDBN + ".features" with Database([self.TDB], verbose=True) as db: db.create_feature(FEAT, "empty") db.set_values(FEAT, "a", ['1', '2', '3']) db.set_values(FEAT, "b", ['4', '5', '6']) q = GBDQuery(db) r = db.query(q.build_query(resolve=[FEAT])) eprint(r) assert (r == [('1', 'a'), ('2', 'a'), ('3', 'a'), ('4', 'b'), ('5', 'b'), ('6', 'b')]) r = db.query(q.build_query("{}=a".format(FEAT), resolve=[FEAT])) assert (r == [('1', 'a'), ('2', 'a'), ('3', 'a')])
def cli_delete(api: GbdApi, args): if api.feature_exists(args.name): if (not args.hashes or len(args.hashes) == 0) and not sys.stdin.isatty(): args.hashes = read_hashes() if args.hashes and len(args.hashes) > 0: if args.force or confirm( "Delete attributes of given hashes from '{}'?".format( args.name)): api.remove_attributes(args.name, args.hashes) elif args.force or confirm( "Delete feature '{}' and all associated attributes?".format( args.name)): api.remove_feature(args.name) else: eprint("Feature '{}' does not exist or is virtual".format(args.name))
def init_features(self) -> typing.Dict[str, FeatureInfo]: result = dict() schema: Schema for schema in self.schemas.values(): feature: FeatureInfo for feature in schema.features: if not feature.name in result: result[feature.name] = feature elif feature.column == "hash": if not Schema.is_main_hash_column( result[feature.name] ) and Schema.is_main_hash_column(feature): result[feature.name] = feature else: eprint( "Warning: Feature name collision on {}. Using first occurence in {}." .format(feature.name, result[feature.name].database)) return result
def register_benchmarks(api, database, root, jobs=1): pool = Pool(min(multiprocessing.cpu_count(), jobs)) for root, dirnames, filenames in os.walk(root): for filename in filenames: path = os.path.join(root, filename) if path.endswith(".cnf") or path.endswith( ".cnf.gz") or path.endswith(".cnf.lzma") or path.endswith( ".cnf.xz") or path.endswith(".cnf.bz2"): hashes = database.value_query( "SELECT hash FROM local WHERE value = '{}'".format(path)) if len(hashes) != 0: eprint('Problem {} already hashed'.format(path)) else: handler = pool.apply_async( compute_hash, args=(path, ), callback=api.callback_set_attributes_locked) #handler.get() pool.close() pool.join()
def run(api: GBD, resultset, func, args: dict): first = True if api.jobs == 1: for (hash, local) in resultset: result = func(hash, local, args) safe_run_results(api, result, check=first) first = False else: with pebble.ProcessPool(min(multiprocessing.cpu_count(), api.jobs)) as p: futures = [ p.schedule(func, (hash, local, args)) for (hash, local) in resultset ] for f in as_completed( futures): #, timeout=api.tlim if api.tlim > 0 else None): try: result = f.result() safe_run_results(api, result, check=first) first = False except pebble.ProcessExpired as e: f.cancel() eprint("{}: {}".format(e.__class__.__name__, e)) except GBDException as e: # might receive special handling in the future eprint("{}: {}".format(e.__class__.__name__, e)) except Exception as e: eprint("{}: {}".format(e.__class__.__name__, e))
def check(self, path, version, hash_version): con = sqlite3.connect(path) cur = con.cursor() lst = cur.execute( "SELECT tbl_name FROM sqlite_master WHERE type='table'") tables = [x[0] for x in lst] if not "__version" in tables: eprint("WARNING: Version info not available in database {}".format( path)) return __version = cur.execute( "SELECT version, hash_version FROM __version").fetchall() if __version[0][0] != version: eprint("WARNING: DB Version is {} but tool version is {}".format( __version[0][0], version)) if __version[0][1] != hash_version: eprint( "WARNING: DB Hash-Version is {} but tool hash-version is {}". format(__version[0][1], hash_version)) # upgrade legacy data-model if not "filename" in tables: cur.execute( "CREATE VIEW IF NOT EXISTS filename (hash, value) AS SELECT hash, REPLACE(value, RTRIM(value, REPLACE(value, '/', '')), '') FROM local" ) if not "hash" in tables: cur.execute( "CREATE VIEW IF NOT EXISTS hash (hash, value) AS SELECT DISTINCT hash, hash FROM local" ) if not "__meta" in tables: cur.execute( "CREATE TABLE IF NOT EXISTS __meta (name TEXT UNIQUE, value BLOB)" ) if not "__tags" in tables: cur.execute( "CREATE TABLE IF NOT EXISTS __tags (hash TEXT NOT NULL, name TEXT NOT NULL, value TEXT NOT NULL, CONSTRAINT all_unique UNIQUE(hash, name, value))" ) cur.execute( '''CREATE VIEW IF NOT EXISTS tags (hash, value) AS SELECT hash, name || '_is_' || value as value FROM __tags UNION SELECT hash, " " FROM local WHERE NOT EXISTS (SELECT 1 FROM __tags WHERE __tags.hash = local.hash)''' ) con.commit() con.close()
def compute_cnf_sanitation_info(hashvalue, filename): eprint('Computing sanitiation info for {}'.format(filename)) f = open_cnf_file(filename, 'rt') attributes = [('INSERT', 'sanitation_info', 'checked')] lc = 0 preamble = False decl_clauses = 0 decl_variables = 0 num_clauses = 0 num_variables = 0 for line in f: lc = lc + 1 line = line.strip() if not line: attributes.append(('INSERT', 'sanitation_info', "Warning: empty line {}".format(lc))) elif line.startswith("p cnf"): if preamble: attributes.append(('INSERT', 'sanitation_info', "Warning: more than one preamble")) preamble = True header = line.split() if len(header) == 4: try: decl_variables = int(header[2]) decl_clauses = int(header[3]) except: attributes.append(('INSERT', 'sanitation_info', "Warning: unable to read preamble")) else: attributes.append(('INSERT', 'sanitation_info', "Warning: unable to read preamble")) elif line[0] == 'c' and preamble: attributes.append( ('INSERT', 'sanitation_info', "Warning: comment after preamble in line {}".format(lc))) else: if not preamble: attributes.append( ('INSERT', 'sanitation_info', "Warning: preamble missing")) preamble = True try: clause = [int(part) for part in line.split()] num_clauses = num_clauses + 1 num_variables = max(num_variables, max([abs(lit) for lit in clause])) if 0 in clause[:-1]: attributes.append( ('INSERT', 'sanitation_info', "Error: more than one clause in line {}".format(lc))) if clause[-1] != 0: attributes.append( ('INSERT', 'sanitation_info', "Error: clause not terminated in line {}".format(lc))) if len(clause) > len(set(clause)): attributes.append( ('INSERT', 'sanitation_info', "Error: redundant literals in line {}".format(lc))) except Exception as e: attributes.append( ('INSERT', 'sanitation_info', "Error: clause not readable in line {}, {}".format(lc, e))) break f.close() if decl_variables != num_variables: attributes.append( ('INSERT', 'sanitation_info', "Warning: {} variables declared, but found {} variables".format( decl_variables, num_variables))) if decl_clauses != num_clauses: attributes.append( ('INSERT', 'sanitation_info', "Warning: {} variables declared, but found {} variables".format( decl_clauses, num_clauses))) return {'hashvalue': hashvalue, 'attributes': attributes}
def test_init_local(self): context = 'cnf' db = "/raid/gbd/meta.db" eprint("Sanitizing local path entries ... ") feature = "local" if context == 'cnf' else "{}.local".format(context) with GBD(db) as api: paths = [path[0] for path in api.query_search(group_by=feature)] eprint(paths) hashes = [hash[0] for hash in api.query_search()] eprint(hashes) feature = api.query_search(resolve=["family"]) eprint(feature) values = api.query_search(hashes=[hashes[0]], resolve=["family"])[0][1] eprint(values) values = api.query_search(hashes=[hashes[0]], resolve=["local"])[0][1].split(',') eprint(values) records = api.query_search(hashes=[hashes[0]], resolve=["local", "filename"], collapse="MIN") eprint(records)
def main(): parser = argparse.ArgumentParser( description='Access and maintain the global benchmark database.') parser.add_argument('-d', "--db", help='Specify database to work with', default=os.environ.get('GBD_DB'), nargs='?') parser.add_argument('-j', "--jobs", help='Specify number of parallel jobs', default=1, nargs='?') parser.add_argument( '-s', "--separator", choices=[" ", ",", ";"], default=" ", help="Feature separator (delimiter used in import and output)") parser.add_argument( '-t', "--join-type", choices=["INNER", "OUTER", "LEFT"], default="LEFT", help="Join Type: treatment of missing values in queries") parser.add_argument( '-v', '--verbose', action='store_true', help='Print additional (or diagnostic) information to stderr') subparsers = parser.add_subparsers(help='Available Commands:') # INITIALIZATION AND BOOTSTRAPPING parser_init = subparsers.add_parser('init', help='Initialize Database') parser_init_subparsers = parser_init.add_subparsers( help='Select Initialization Procedure:') # init local paths: parser_init_local = parser_init_subparsers.add_parser( 'local', help='Initialize Local Hash/Path Entries') parser_init_local.add_argument('path', type=directory_type, help="Path to benchmarks") parser_init_local.set_defaults(func=cli_init_local) # init clause types: parser_init_ct = parser_init_subparsers.add_parser( 'clause_types', help='Initialize Clause-Type Tables') parser_init_ct.add_argument('hashes', help='Hashes', nargs='+') parser_init_ct.set_defaults(func=cli_init_ct) # init degree_sequence_hash: parser_init_dsh = parser_init_subparsers.add_parser( 'degree_sequence_hash', help='Initialize Degree-Sequence Hash') parser_init_dsh.add_argument('hashes', help='Hashes', nargs='+') parser_init_dsh.set_defaults(func=cli_init_dsh) # init sanitation info parser_init_sanitize = parser_init_subparsers.add_parser( 'sanitize', help='Check Instances, Store Sanitation Info') parser_init_sanitize.add_argument('hashes', help='Hashes', nargs='+') parser_init_sanitize.set_defaults(func=cli_init_sanitize) # GBD HASH parser_hash = subparsers.add_parser('hash', help='Print hash for a single file') parser_hash.add_argument('path', type=file_type, help="Path to one benchmark") parser_hash.set_defaults(func=cli_hash) # GET/SET ATTRIBUTES parser_get = subparsers.add_parser( 'get', help='Get data by query (or hash-list via stdin)') parser_get.add_argument( 'query', help= 'Specify a query-string (e.g. "variables > 100 and path like %%mp1%%")', nargs='?') parser_get.add_argument('-r', '--resolve', help='List of features to resolve against', nargs='+') parser_get.add_argument( '-c', '--collapse', default='group_concat', choices=['group_concat', 'min', 'max', 'avg', 'count', 'sum'], help='Treatment of multiple values per hash (or grouping value resp.)') parser_get.add_argument('-g', '--group_by', default='hash', help='Group by specified attribute value') parser_get.set_defaults(func=cli_get) parser_set = subparsers.add_parser( 'set', help= 'Set specified attribute-value for given hashes (via argument or stdin)' ) parser_set.add_argument('hashes', help='Hashes', nargs='*') parser_set.add_argument('-n', '--name', type=column_type, help='Feature name', required=True) parser_set.add_argument('-v', '--value', help='Attribute value', required=True) parser_set.add_argument('-f', '--force', action='store_true', help='Overwrite existing unique values') parser_set.set_defaults(func=cli_set) # IMPORT DATA FROM CSV parser_import = subparsers.add_parser( 'import', help='Import attributes from csv-file') parser_import.add_argument('path', type=file_type, help="Path to csv-file") parser_import.add_argument('-k', '--key', type=column_type, help="Name of the key column (gbd-hash)", required=True) parser_import.add_argument('-s', '--source', help="Name of source column in csv-file", required=True) parser_import.add_argument('-t', '--target', type=column_type, help="Name of target column (in database)", required=True) parser_import.set_defaults(func=cli_import) # CREATE/DELETE/MODIFY FEATURES parser_create = subparsers.add_parser('create', help='Create a new feature') parser_create.add_argument('name', type=column_type, help='Name of feature') parser_create.add_argument( '-u', '--unique', help='Unique constraint: specify default-value of feature') parser_create.set_defaults(func=cli_create) parser_delete = subparsers.add_parser( 'delete', help= 'Delete all values assiociated with given hashes (via argument or stdin) or remove feature if no hashes are given' ) parser_delete.add_argument('hashes', help='Hashes', nargs='*') parser_delete.add_argument('name', type=column_type, help='Name of feature') parser_delete.add_argument('-f', '--force', action='store_true', help='Do not ask for confirmation') parser_delete.set_defaults(func=cli_delete) parser_rename = subparsers.add_parser('rename', help='Rename feature') parser_rename.add_argument('old_name', type=column_type, help='Old name of feature') parser_rename.add_argument('new_name', type=column_type, help='New name of feature') parser_rename.set_defaults(func=cli_rename) # HANDLE META-FEATURES (e.g. specify runtime meta-data like timeout/memout/machine) parser_info = subparsers.add_parser( 'info', help='Print info about available features') parser_info.add_argument('name', type=column_type, help='Print info about specified feature', nargs='?') parser_info.set_defaults(func=cli_info) parser_info_set = subparsers.add_parser('info_set', help='Set feature meta-attributes') parser_info_set.add_argument('feature', type=column_type, help='Feature name') parser_info_set.add_argument('-n', '--name', type=column_type, help='Meta-feature name', required=True) parser_info_set.add_argument('-v', '--value', help='Meta-feature value', required=True) parser_info_set.set_defaults(func=cli_info_set) parser_info_clear = subparsers.add_parser( 'info_clear', help='Clear feature meta-attributes') parser_info_clear.add_argument('feature', type=column_type, help='Feature name') parser_info_clear.add_argument('-n', '--name', type=column_type, help='Meta-feature name') parser_info_clear.set_defaults(func=cli_info_clear) # SCORE CALCULATION parser_par2 = subparsers.add_parser( 'par2', help='Calculate PAR-2 score for given runtime feature') parser_par2.add_argument( 'query', help= 'Specify a query-string (e.g. "variables > 100 and path like %%mp1%%")', nargs='?') parser_par2.add_argument('name', type=column_type, help='Name of runtime feature') parser_par2.set_defaults(func=cli_par2) # EVALUATE ARGUMENTS args = parser.parse_args() if not args.db: eprint("""No database path is given. A database path can be given in two ways: -- by setting the environment variable GBD_DB -- by giving a path via --db=[path] A database file containing some attributes of instances used in the SAT Competitions can be obtained at http://gbd.iti.kit.edu/getdatabase Initialize your database with local paths to your benchmark instances by using the init-command. """ ) elif len(sys.argv) > 1: try: with GbdApi(args.db, int(args.jobs), args.separator, args.join_type, args.verbose) as api: args.func(api, args) except AttributeError as e: eprint(e) else: parser.print_help()
def compute_hash(path): eprint('Hashing {}'.format(path)) hashvalue = gbd_hash(path) attributes = [('INSERT', 'local', path)] return {'hashvalue': hashvalue, 'attributes': attributes}
def compute_iso_hash(hashvalue, filename, args): eprint('Computing iso hash for {}'.format(filename)) isoh = isohash(filename) return [('isohash', hashvalue, isoh)]
def execute(self, q): if self.verbose: eprint(q) self.cursor.execute(q)
def query(self, q): if self.verbose: eprint(q) return self.cursor.execute(q).fetchall()
def cli_create(api: GbdApi, args): if not api.feature_exists(args.name): api.create_feature(args.name, args.unique) else: eprint("Feature '{}' does already exist".format(args.name))
def execute(self, q): if self.verbose: eprint(q) self.cursor.execute(q) self.connection.commit()
def compute_hash(nohashvalue, path, args): eprint('Hashing {}'.format(path)) hashvalue = gbd_hash(path) return [('local', hashvalue, path)]