def create_table(self, db: Database, uid_key: str, keys: List[str]) -> Table: query = self.query if self.nrows is None: try: self.nrows = db.count(query) except (ValueError, KeyError) as e: error = ', '.join(['Bad query'] + list(e.args)) from flask import flash flash(error) query = 'id=0' # this will return no rows self.nrows = 0 table = Table(db, uid_key) table.select(query, self.columns, self.sort, self.limit, offset=self.page * self.limit, show_empty_columns=True) table.format() assert self.columns is not None table.addcolumns = sorted(column for column in all_columns + keys if column not in self.columns) return table
def test_hide_empty_columns(): db = TestConnection() table = Table(db) for show in [True, False]: table.select('...', ['a', 'b', 'c'], '', 10, 0, show_empty_columns=show) if show: assert table.columns == ['a', 'b', 'c'] else: assert table.columns == ['a', 'b']
def index(): global next_table_id table_id = int(request.args.get('x', '0')) if table_id not in tables: table_id = next_table_id next_table_id += 1 query = '' columns = list(all_columns) sort = 'id' limit = 100 opened = set() else: query, columns, sort, limit, opened = tables[table_id] if 'toggle' in request.args: column = request.args['toggle'] if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' else: columns.append(column) elif 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column elif 'query' in request.args: query = request.args['query'].encode() limit = int(request.args.get('limit', '0')) columns = list(all_columns) sort = 'id' opened = set() table = Table(connection) table.select(query, columns, sort, limit) tables[table_id] = query, table.columns, sort, limit, opened table.format(SUBSCRIPT) return render_template('table.html', t=table, query=query, sort=sort, limit=limit, tid=table_id, opened=opened, home=home)
def index(): global next_table_id table_id = int(request.args.get('x', '0')) if table_id not in tables: table_id = next_table_id next_table_id += 1 query = '' columns = list(all_columns) sort = 'id' limit = 100 opened = set() else: query, columns, sort, limit, opened = tables[table_id] if 'toggle' in request.args: column = request.args['toggle'] if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' else: columns.append(column) elif 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column elif 'query' in request.args: query = request.args['query'].encode() limit = int(request.args.get('limit', '0')) columns = list(all_columns) sort = 'id' opened = set() table = Table(connection) table.select(query, columns, sort, limit) tables[table_id] = query, table.columns, sort, limit, opened table.format(SUBSCRIPT) return render_template('table.html', t=table, query=query, sort=sort, limit=limit, tid=table_id, opened=opened)
def create_table(self, db: Database, uid_key: str) -> Table: query = self.query if self.nrows is None: try: self.nrows = db.count(query) except (ValueError, KeyError) as e: error = ', '.join(['Bad query'] + list(e.args)) flash(error) query = 'id=0' # this will return no rows self.nrows = 0 table = Table(db, uid_key) table.select(query, self.columns, self.sort, self.limit, offset=self.page * self.limit) table.format() table.addcolumns = sorted(column for column in all_columns + table.keys if column not in table.columns) return table
def run(opts, args, verbosity): filename = args.pop(0) query = ','.join(args) if query.isdigit(): query = int(query) if opts.add_keywords: add_keywords = opts.add_keywords.split(',') else: add_keywords = [] if opts.delete_keywords: delete_keywords = opts.delete_keywords.split(',') else: delete_keywords = [] add_key_value_pairs = {} if opts.add_key_value_pairs: for pair in opts.add_key_value_pairs.split(','): key, value = pair.split('=') for type in [int, float]: try: value = type(value) except ValueError: pass else: break add_key_value_pairs[key] = value if opts.delete_key_value_pairs: delete_key_value_pairs = opts.delete_key_value_pairs.split(',') else: delete_key_value_pairs = [] con = connect(filename) def out(*args): if verbosity > 0: print(*args) if opts.add_from_file: filename = opts.add_from_file if ':' in filename: calculator_name, filename = filename.split(':') atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) con.write(atoms, add_keywords, key_value_pairs=add_key_value_pairs) out('Added {0} from {1}'.format(atoms.get_chemical_formula(), filename)) return if opts.count: n = 0 for dct in con.select(query): n += 1 print('%s' % plural(n, 'row')) return if opts.explain: for dct in con.select(query, explain=True, verbosity=verbosity, limit=opts.limit): print(dct['explain']) return if opts.insert_into: con2 = connect(opts.insert_into) nkw = 0 nkvp = 0 nrows = 0 for dct in con.select(query): keywords = dct.get('keywords', []) for keyword in add_keywords: if keyword not in keywords: keywords.append(keyword) nkw += 1 kvp = dct.get('key_value_pairs', {}) nkvp = -len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) con2.write(dct, keywords, data=dct.get('data'), **kvp) nrows += 1 out('Added %s and %s (%s updated)' % (plural(nkw, 'keyword'), plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_keywords or add_key_value_pairs: ids = [dct['id'] for dct in con.select(query)] nkw, nkv = con.update(ids, add_keywords, **add_key_value_pairs) out('Added %s and %s (%s updated)' % (plural(nkw, 'keyword'), plural(nkv, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - nkv, 'pair'))) return if opts.delete: ids = [dct['id'] for dct in con.select(query)] if ids and not opts.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if raw_input(msg).lower() != 'yes': return con.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if delete_keywords or delete_key_value_pairs: ids = [dct['id'] for dct in con.select(query)] nkw, nkv = con.delete_keywords_and_key_value_pairs( ids, delete_keywords, delete_key_value_pairs) print('Removed %s and %s' % (plural(nkw, 'keyword'), plural(nkv, 'key-value pair'))) return if opts.python_expression: for dct in con.select(query): row = eval(opts.python_expression, dct) if not isinstance(row, (list, tuple, np.ndarray)): row = [row] print(', '.join(str(x) for x in row)) return if opts.long: dct = con.get(query) summary = Summary(dct) summary.write() else: if opts.open_web_browser: import ase.db.app as app app.connection = con app.app.run(host='0.0.0.0', debug=True) else: columns = list(all_columns) c = opts.columns if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(con, verbosity, opts.cut) table.select(query, columns, opts.sort, opts.limit) if opts.csv: table.write_csv() else: table.write()
def index(project): global next_con_id # Backwards compatibility: project = request.args.get('project') or project if not projects: # First time: initialize list of projects for proj, db in sorted(databases.items()): meta = ase.db.web.process_metadata(db) db.meta = meta nrows = len(db) projects.append((proj, db.meta.get('title', proj), nrows)) print('Initialized {proj}: {nrows} rows'.format(proj=proj, nrows=nrows)) if project is None and len(projects) > 1: return render_template('projects.html', projects=projects, home=home, md=None, ase_db_footer=ase_db_footer) if project is None: project = list(databases)[0] con_id = int(request.args.get('x', '0')) if con_id in connections: query, nrows, page, columns, sort, limit = connections[con_id] if con_id not in connections: # Give this connetion a new id: con_id = next_con_id next_con_id += 1 query = ['', {}, ''] nrows = None page = 0 columns = None sort = 'id' limit = 25 db = databases.get(project) if db is None: return 'No such project: ' + project meta = db.meta if columns is None: columns = meta.get('default_columns')[:] or list(all_columns) if 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column page = 0 elif 'query' in request.args: dct = {} query = [request.args['query']] q = query[0] for special in meta['special_keys']: kind, key = special[:2] if kind == 'SELECT': value = request.args['select_' + key] dct[key] = convert_str_to_int_float_or_str(value) if value: q += ',{}={}'.format(key, value) elif kind == 'BOOL': value = request.args['bool_' + key] dct[key] = convert_str_to_int_float_or_str(value) if value: q += ',{}={}'.format(key, value) else: v1 = request.args['from_' + key] v2 = request.args['to_' + key] var = request.args['range_' + key] dct[key] = (v1, v2, var) if v1 or v2: var = request.args['range_' + key] if v1: q += ',{}>={}'.format(var, v1) if v2: q += ',{}<={}'.format(var, v2) q = q.lstrip(',') query += [dct, q] sort = 'id' page = 0 nrows = None elif 'limit' in request.args: limit = int(request.args['limit']) page = 0 elif 'page' in request.args: page = int(request.args['page']) if 'toggle' in request.args: column = request.args['toggle'] if column == 'reset': columns = meta.get('default_columns')[:] or list(all_columns) else: if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' page = 0 else: columns.append(column) okquery = query if nrows is None: try: nrows = db.count(query[2]) except (ValueError, KeyError) as e: flash(', '.join(['Bad query'] + list(e.args))) okquery = ('', {}, 'id=0') # this will return no rows nrows = 0 table = Table(db, meta.get('unique_key', 'id')) table.select(okquery[2], columns, sort, limit, offset=page * limit) con = Connection(query, nrows, page, columns, sort, limit) connections[con_id] = con if len(connections) > 1000: # Forget old connections: for cid in sorted(connections)[:200]: del connections[cid] table.format(SUBSCRIPT) addcolumns = [ column for column in all_columns + table.keys if column not in table.columns ] return render_template('table.html', project=project, t=table, md=meta, con=con, x=con_id, home=home, ase_db_footer=ase_db_footer, pages=pages(page, nrows, limit), nrows=nrows, addcolumns=addcolumns, row1=page * limit + 1, row2=min((page + 1) * limit, nrows), download_button=download_button)
def run(opts, args, verbosity): filename = args.pop(0) query = ",".join(args) if query.isdigit(): query = int(query) add_key_value_pairs = {} if opts.add_key_value_pairs: for pair in opts.add_key_value_pairs.split(","): key, value = pair.split("=") add_key_value_pairs[key] = convert_str_to_float_or_str(value) if opts.delete_keys: delete_keys = opts.delete_keys.split(",") else: delete_keys = [] con = connect(filename, use_lock_file=not opts.no_lock_file) def out(*args): if verbosity > 0: print(*args) if opts.analyse: con.analyse() return if opts.add_from_file: filename = opts.add_from_file if ":" in filename: calculator_name, filename = filename.split(":") atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) con.write(atoms, key_value_pairs=add_key_value_pairs) out("Added {0} from {1}".format(atoms.get_chemical_formula(), filename)) return if opts.count: n = con.count(query) print("%s" % plural(n, "row")) return if opts.explain: for dct in con.select(query, explain=True, verbosity=verbosity, limit=opts.limit, offset=opts.offset): print(dct["explain"]) return if opts.insert_into: nkvp = 0 nrows = 0 with connect(opts.insert_into, use_lock_file=not opts.no_lock_file) as con2: for dct in con.select(query): kvp = dct.get("key_value_pairs", {}) nkvp -= len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) if opts.unique: dct["unique_id"] = "%x" % randint(16 ** 31, 16 ** 32 - 1) con2.write(dct, data=dct.get("data"), **kvp) nrows += 1 out( "Added %s (%s updated)" % (plural(nkvp, "key-value pair"), plural(len(add_key_value_pairs) * nrows - nkvp, "pair")) ) out("Inserted %s" % plural(nrows, "row")) return if add_key_value_pairs or delete_keys: ids = [dct["id"] for dct in con.select(query)] m, n = con.update(ids, delete_keys, **add_key_value_pairs) out( "Added %s (%s updated)" % (plural(m, "key-value pair"), plural(len(add_key_value_pairs) * len(ids) - m, "pair")) ) out("Removed", plural(n, "key-value pair")) return if opts.delete: ids = [dct["id"] for dct in con.select(query)] if ids and not opts.yes: msg = "Delete %s? (yes/No): " % plural(len(ids), "row") if input(msg).lower() != "yes": return con.delete(ids) out("Deleted %s" % plural(len(ids), "row")) return if opts.plot: if ":" in opts.plot: tags, keys = opts.plot.split(":") tags = tags.split(",") else: tags = [] keys = opts.plot keys = keys.split(",") plots = collections.defaultdict(list) X = {} labels = [] for row in con.select(query, sort=opts.sort): name = ",".join(row[tag] for tag in tags) x = row.get(keys[0]) if x is not None: if isinstance(x, (unicode, str)): if x not in X: X[x] = len(X) labels.append(x) x = X[x] plots[name].append([x] + [row.get(key) for key in keys[1:]]) import matplotlib.pyplot as plt for name, plot in plots.items(): xyy = zip(*plot) x = xyy[0] for y, key in zip(xyy[1:], keys[1:]): plt.plot(x, y, label=name + key) if X: plt.xticks(range(len(labels)), labels, rotation=90) plt.legend() plt.show() return if opts.long: dct = con.get(query) summary = Summary(dct) summary.write() elif opts.json: dct = con.get(query) con2 = connect(sys.stdout, "json", use_lock_file=False) kvp = dct.get("key_value_pairs", {}) con2.write(dct, data=dct.get("data"), **kvp) else: if opts.open_web_browser: import ase.db.app as app app.db = con app.app.run(host="0.0.0.0", debug=True) else: columns = list(all_columns) c = opts.columns if c and c.startswith("++"): keys = set() for row in con.select(query, limit=opts.limit, offset=opts.offset): keys.update(row._keys) columns.extend(keys) if c[2:3] == ",": c = c[3:] else: c = "" if c: if c[0] == "+": c = c[1:] elif c[0] != "-": columns = [] for col in c.split(","): if col[0] == "-": columns.remove(col[1:]) else: columns.append(col.lstrip("+")) table = Table(con, verbosity, opts.cut) table.select(query, columns, opts.sort, opts.limit, opts.offset) if opts.csv: table.write_csv() else: table.write(query)
def run(opts, args, verbosity): filename = args.pop(0) query = ','.join(args) if query.isdigit(): query = int(query) add_key_value_pairs = {} if opts.add_key_value_pairs: for pair in opts.add_key_value_pairs.split(','): key, value = pair.split('=') for type in [int, float]: try: value = type(value) except ValueError: pass else: break add_key_value_pairs[key] = value if opts.delete_keys: delete_keys = opts.delete_keys.split(',') else: delete_keys = [] con = connect(filename, use_lock_file=not opts.no_lock_file) def out(*args): if verbosity > 0: print(*args) if opts.add_from_file: filename = opts.add_from_file if ':' in filename: calculator_name, filename = filename.split(':') atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) con.write(atoms, key_value_pairs=add_key_value_pairs) out('Added {0} from {1}'.format(atoms.get_chemical_formula(), filename)) return if opts.count: n = con.count(query) print('%s' % plural(n, 'row')) return if opts.explain: for dct in con.select(query, explain=True, verbosity=verbosity, limit=opts.limit, offset=opts.offset): print(dct['explain']) return if opts.insert_into: nkvp = 0 nrows = 0 with connect(opts.insert_into, use_lock_file=not opts.no_lock_file) as con2: for dct in con.select(query): kvp = dct.get('key_value_pairs', {}) nkvp = -len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) con2.write(dct, data=dct.get('data'), **kvp) nrows += 1 out('Added %s (%s updated)' % (plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_key_value_pairs or delete_keys: ids = [dct['id'] for dct in con.select(query)] m, n = con.update(ids, delete_keys, **add_key_value_pairs) out('Added %s (%s updated)' % (plural(m, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - m, 'pair'))) out('Removed', plural(n, 'key-value pair')) return if opts.delete: ids = [dct['id'] for dct in con.select(query)] if ids and not opts.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if raw_input(msg).lower() != 'yes': return con.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if opts.python_expression: for dct in con.select(query): row = eval(opts.python_expression, dct) if not isinstance(row, (list, tuple, np.ndarray)): row = [row] print(', '.join(str(x) for x in row)) return if opts.long: dct = con.get(query) summary = Summary(dct) summary.write() else: if opts.open_web_browser: import ase.db.app as app app.db = con app.app.run(host='0.0.0.0', debug=True) else: columns = list(all_columns) c = opts.columns if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(con, verbosity, opts.cut) table.select(query, columns, opts.sort, opts.limit, opts.offset) if opts.csv: table.write_csv() else: table.write()
def index(): global next_con_id con_id = int(request.args.get('x', '0')) if con_id not in connections: con_id = next_con_id next_con_id += 1 query = '' columns = list(all_columns) sort = 'id' limit = 25 opened = set() nrows = None page = 0 else: query, nrows, page, columns, sort, limit, opened = connections[con_id] if 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column page = 0 elif 'query' in request.args: query = request.args['query'].encode() try: limit = max(1, min(int(request.args.get('limit', limit)), 200)) except ValueError: pass sort = 'id' opened = set() page = 0 nrows = None elif 'page' in request.args: page = int(request.args['page']) if 'toggle' in request.args: tcolumns = request.args['toggle'].split(',') if tcolumns == ['reset']: columns = list(all_columns) else: for column in tcolumns: if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' page = 0 else: columns.append(column) if nrows is None: nrows = db.count(query) table = Table(db) table.select(query, columns, sort, limit, offset=page * limit) con = Connection(query, nrows, page, columns, sort, limit, opened) connections[con_id] = con table.format(SUBSCRIPT) addcolumns = [column for column in all_columns + table.keys if column not in table.columns] return render_template('table.html', t=table, con=con, cid=con_id, home=home, pages=pages(page, nrows, limit), nrows=nrows, addcolumns=addcolumns, row1=page * limit + 1, row2=min((page + 1) * limit, nrows))
def run(opts, args, verbosity): filename = args.pop(0) query = ','.join(args) if query.isdigit(): query = int(query) add_key_value_pairs = {} if opts.add_key_value_pairs: for pair in opts.add_key_value_pairs.split(','): key, value = pair.split('=') add_key_value_pairs[key] = convert_str_to_float_or_str(value) if opts.delete_keys: delete_keys = opts.delete_keys.split(',') else: delete_keys = [] con = connect(filename, use_lock_file=not opts.no_lock_file) def out(*args): if verbosity > 0: print(*args) if opts.analyse: con.analyse() return if opts.add_from_file: filename = opts.add_from_file if ':' in filename: calculator_name, filename = filename.split(':') atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) con.write(atoms, key_value_pairs=add_key_value_pairs) out('Added {0} from {1}'.format(atoms.get_chemical_formula(), filename)) return if opts.count: n = con.count(query) print('%s' % plural(n, 'row')) return if opts.explain: for dct in con.select(query, explain=True, verbosity=verbosity, limit=opts.limit, offset=opts.offset): print(dct['explain']) return if opts.insert_into: nkvp = 0 nrows = 0 with connect(opts.insert_into, use_lock_file=not opts.no_lock_file) as con2: for dct in con.select(query): kvp = dct.get('key_value_pairs', {}) nkvp -= len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) if opts.unique: dct['unique_id'] = '%x' % randint(16**31, 16**32 - 1) con2.write(dct, data=dct.get('data'), **kvp) nrows += 1 out('Added %s (%s updated)' % (plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_key_value_pairs or delete_keys: ids = [dct['id'] for dct in con.select(query)] m, n = con.update(ids, delete_keys, **add_key_value_pairs) out('Added %s (%s updated)' % (plural(m, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - m, 'pair'))) out('Removed', plural(n, 'key-value pair')) return if opts.delete: ids = [dct['id'] for dct in con.select(query)] if ids and not opts.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if input(msg).lower() != 'yes': return con.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if opts.plot_data: from ase.db.plot import dct2plot dct2plot(con.get(query).data, opts.plot_data) return if opts.plot: if ':' in opts.plot: tags, keys = opts.plot.split(':') tags = tags.split(',') else: tags = [] keys = opts.plot keys = keys.split(',') plots = collections.defaultdict(list) X = {} labels = [] for row in con.select(query, sort=opts.sort): name = ','.join(str(row[tag]) for tag in tags) x = row.get(keys[0]) if x is not None: if isinstance(x, basestring): if x not in X: X[x] = len(X) labels.append(x) x = X[x] plots[name].append([x] + [row.get(key) for key in keys[1:]]) import matplotlib.pyplot as plt for name, plot in plots.items(): xyy = zip(*plot) x = xyy[0] for y, key in zip(xyy[1:], keys[1:]): plt.plot(x, y, label=name + ':' + key) if X: plt.xticks(range(len(labels)), labels, rotation=90) plt.legend() plt.show() return if opts.long: dct = con.get(query) summary = Summary(dct) summary.write() elif opts.json: dct = con.get(query) con2 = connect(sys.stdout, 'json', use_lock_file=False) kvp = dct.get('key_value_pairs', {}) con2.write(dct, data=dct.get('data'), **kvp) else: if opts.open_web_browser: import ase.db.app as app app.db = con app.app.run(host='0.0.0.0', debug=True) else: columns = list(all_columns) c = opts.columns if c and c.startswith('++'): keys = set() for row in con.select(query, limit=opts.limit, offset=opts.offset): keys.update(row._keys) columns.extend(keys) if c[2:3] == ',': c = c[3:] else: c = '' if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(con, verbosity, opts.cut) table.select(query, columns, opts.sort, opts.limit, opts.offset) if opts.csv: table.write_csv() else: table.write(query)
def index(): global next_con_id if not projects: # First time: initialize list of projects projects[:] = [(proj, d.metadata.get('title', proj)) for proj, d in sorted(databases.items())] con_id = int(request.args.get('x', '0')) if con_id in connections: project, query, nrows, page, columns, sort, limit = connections[con_id] newproject = request.args.get('project') if newproject is not None and newproject != project: con_id = 0 if con_id not in connections: # Give this connetion a new id: con_id = next_con_id next_con_id += 1 project = request.args.get('project', projects[0][0]) query = ['', {}, ''] nrows = None page = 0 columns = None sort = 'id' limit = 25 db = databases[project] if not hasattr(db, 'meta'): meta = ase.db.web.process_metadata(db) db.meta = meta else: meta = db.meta if columns is None: columns = meta.get('default_columns')[:] or list(all_columns) if 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column page = 0 elif 'query' in request.args: dct = {} query = [request.args['query']] q = query[0] for special in meta['special_keys']: kind, key = special[:2] if kind == 'SELECT': value = request.args['select_' + key] dct[key] = value if value: q += ',{}={}'.format(key, value) elif kind == 'BOOL': value = request.args['bool_' + key] dct[key] = value if value: q += ',{}={}'.format(key, value) else: v1 = request.args['from_' + key] v2 = request.args['to_' + key] var = request.args['range_' + key] dct[key] = (v1, v2, var) if v1 or v2: var = request.args['range_' + key] if v1: q += ',{}>={}'.format(var, v1) if v2: q += ',{}<={}'.format(var, v2) q = q.lstrip(',') query += [dct, q] sort = 'id' page = 0 nrows = None elif 'limit' in request.args: limit = int(request.args['limit']) page = 0 elif 'page' in request.args: page = int(request.args['page']) if 'toggle' in request.args: column = request.args['toggle'] if column == 'reset': columns = meta.get('default_columns')[:] or list(all_columns) else: if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' page = 0 else: columns.append(column) okquery = query if nrows is None: try: nrows = db.count(query[2]) except (ValueError, KeyError) as e: flash(', '.join(['Bad query'] + list(e.args))) okquery = ('', {}, 'id=0') # this will return no rows nrows = 0 table = Table(db) table.select(okquery[2], columns, sort, limit, offset=page * limit) con = Connection(project, query, nrows, page, columns, sort, limit) connections[con_id] = con if len(connections) > 1000: # Forget old connections: for cid in sorted(connections)[:200]: del connections[cid] table.format(SUBSCRIPT) addcolumns = [column for column in all_columns + table.keys if column not in table.columns] return render_template('table.html', project=project, projects=projects, t=table, md=meta, con=con, x=con_id, home=home, pages=pages(page, nrows, limit), nrows=nrows, addcolumns=addcolumns, row1=page * limit + 1, row2=min((page + 1) * limit, nrows))
def main(args): verbosity = 1 - args.quiet + args.verbose query = ','.join(args.query) if args.sort.endswith('-'): args.sort = '-' + args.sort[:-1] if query.isdigit(): query = int(query) add_key_value_pairs = {} if args.add_key_value_pairs: for pair in args.add_key_value_pairs.split(','): key, value = pair.split('=') add_key_value_pairs[key] = convert_str_to_int_float_or_str(value) if args.delete_keys: delete_keys = args.delete_keys.split(',') else: delete_keys = [] db = connect(args.database, use_lock_file=not args.no_lock_file) def out(*args): if verbosity > 0: print(*args) if args.analyse: db.analyse() return if args.add_from_file: filename = args.add_from_file if ':' in filename: calculator_name, filename = filename.split(':') atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) db.write(atoms, key_value_pairs=add_key_value_pairs) out('Added {0} from {1}'.format(atoms.get_chemical_formula(), filename)) return if args.count: n = db.count(query) print('%s' % plural(n, 'row')) return if args.explain: for row in db.select(query, explain=True, verbosity=verbosity, limit=args.limit, offset=args.offset): print(row['explain']) return if args.show_metadata: print(json.dumps(db.metadata, sort_keys=True, indent=4)) return if args.set_metadata: with open(args.set_metadata) as fd: db.metadata = json.load(fd) return if args.insert_into: nkvp = 0 nrows = 0 with connect(args.insert_into, use_lock_file=not args.no_lock_file) as db2: for row in db.select(query, sort=args.sort): kvp = row.get('key_value_pairs', {}) nkvp -= len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) if args.unique: row['unique_id'] = '%x' % randint(16**31, 16**32 - 1) db2.write(row, data=row.get('data'), **kvp) nrows += 1 out('Added %s (%s updated)' % (plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_key_value_pairs or delete_keys: ids = [row['id'] for row in db.select(query)] m, n = db.update(ids, delete_keys, **add_key_value_pairs) out('Added %s (%s updated)' % (plural(m, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - m, 'pair'))) out('Removed', plural(n, 'key-value pair')) return if args.delete: ids = [row['id'] for row in db.select(query)] if ids and not args.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if input(msg).lower() != 'yes': return db.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if args.plot_data: from ase.db.plot import dct2plot dct2plot(db.get(query).data, args.plot_data) return if args.plot: if ':' in args.plot: tags, keys = args.plot.split(':') tags = tags.split(',') else: tags = [] keys = args.plot keys = keys.split(',') plots = collections.defaultdict(list) X = {} labels = [] for row in db.select(query, sort=args.sort, include_data=False): name = ','.join(str(row[tag]) for tag in tags) x = row.get(keys[0]) if x is not None: if isinstance(x, basestring): if x not in X: X[x] = len(X) labels.append(x) x = X[x] plots[name].append([x] + [row.get(key) for key in keys[1:]]) import matplotlib.pyplot as plt for name, plot in plots.items(): xyy = zip(*plot) x = xyy[0] for y, key in zip(xyy[1:], keys[1:]): plt.plot(x, y, label=name + ':' + key) if X: plt.xticks(range(len(labels)), labels, rotation=90) plt.legend() plt.show() return if args.json: row = db.get(query) db2 = connect(sys.stdout, 'json', use_lock_file=False) kvp = row.get('key_value_pairs', {}) db2.write(row, data=row.get('data'), **kvp) return db.python = args.metadata_from_python_script db.meta = process_metadata(db, html=args.open_web_browser) if args.long: # Remove .png files so that new ones will be created. for func, filenames in db.meta.get('functions', []): for filename in filenames: try: os.remove(filename) except OSError: # Python 3 only: FileNotFoundError pass row = db.get(query) summary = Summary(row, db.meta) summary.write() else: if args.open_web_browser: import ase.db.app as app app.databases['default'] = db app.app.run(host='0.0.0.0', debug=True) else: columns = list(all_columns) c = args.columns if c and c.startswith('++'): keys = set() for row in db.select(query, limit=args.limit, offset=args.offset, include_data=False): keys.update(row._keys) columns.extend(keys) if c[2:3] == ',': c = c[3:] else: c = '' if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(db, verbosity, args.cut) table.select(query, columns, args.sort, args.limit, args.offset) if args.csv: table.write_csv() else: table.write(query)
def index(): global next_con_id con_id = int(request.args.get('x', '0')) if con_id not in connections: con_id = next_con_id next_con_id += 1 query = '' columns = list(all_columns) sort = 'id' limit = 25 opened = set() nrows = None page = 0 else: query, nrows, page, columns, sort, limit, opened = connections[con_id] if 'sort' in request.args: column = request.args['sort'] if column == sort: sort = '-' + column elif '-' + column == sort: sort = 'id' else: sort = column page = 0 elif 'query' in request.args: query = request.args['query'].encode() try: limit = max(1, min(int(request.args.get('limit', limit)), 200)) except ValueError: pass sort = 'id' opened = set() page = 0 nrows = None elif 'page' in request.args: page = int(request.args['page']) if 'toggle' in request.args: tcolumns = request.args['toggle'].split(',') if tcolumns == ['reset']: columns = list(all_columns) else: for column in tcolumns: if column in columns: columns.remove(column) if column == sort.lstrip('-'): sort = 'id' page = 0 else: columns.append(column) if nrows is None: nrows = db.count(query) table = Table(db) table.select(query, columns, sort, limit, offset=page * limit) con = Connection(query, nrows, page, columns, sort, limit, opened) connections[con_id] = con table.format(SUBSCRIPT) addcolumns = [ column for column in all_columns + table.keys if column not in table.columns ] return render_template('table.html', t=table, con=con, cid=con_id, home=home, pages=pages(page, nrows, limit), nrows=nrows, addcolumns=addcolumns, row1=page * limit + 1, row2=min((page + 1) * limit, nrows))
def run(opts, args, verbosity): filename = args.pop(0) query = ','.join(args) if query.isdigit(): query = int(query) add_key_value_pairs = {} if opts.add_key_value_pairs: for pair in opts.add_key_value_pairs.split(','): key, value = pair.split('=') add_key_value_pairs[key] = convert_str_to_int_float_or_str(value) if opts.delete_keys: delete_keys = opts.delete_keys.split(',') else: delete_keys = [] con = connect(filename, use_lock_file=not opts.no_lock_file) def out(*args): if verbosity > 0: print(*args) if opts.analyse: con.analyse() return if opts.add_from_file: filename = opts.add_from_file if ':' in filename: calculator_name, filename = filename.split(':') atoms = get_calculator(calculator_name)(filename).get_atoms() else: atoms = ase.io.read(filename) con.write(atoms, key_value_pairs=add_key_value_pairs) out('Added {0} from {1}'.format(atoms.get_chemical_formula(), filename)) return if opts.count: n = con.count(query) print('%s' % plural(n, 'row')) return if opts.explain: for row in con.select(query, explain=True, verbosity=verbosity, limit=opts.limit, offset=opts.offset): print(row['explain']) return if opts.insert_into: nkvp = 0 nrows = 0 with connect(opts.insert_into, use_lock_file=not opts.no_lock_file) as con2: for row in con.select(query): kvp = row.get('key_value_pairs', {}) nkvp -= len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) if opts.unique: row['unique_id'] = '%x' % randint(16**31, 16**32 - 1) con2.write(row, data=row.get('data'), **kvp) nrows += 1 out('Added %s (%s updated)' % (plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_key_value_pairs or delete_keys: ids = [row['id'] for row in con.select(query)] m, n = con.update(ids, delete_keys, **add_key_value_pairs) out('Added %s (%s updated)' % (plural(m, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - m, 'pair'))) out('Removed', plural(n, 'key-value pair')) return if opts.delete: ids = [row['id'] for row in con.select(query)] if ids and not opts.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if input(msg).lower() != 'yes': return con.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if opts.plot_data: from ase.db.plot import dct2plot dct2plot(con.get(query).data, opts.plot_data) return if opts.plot: if ':' in opts.plot: tags, keys = opts.plot.split(':') tags = tags.split(',') else: tags = [] keys = opts.plot keys = keys.split(',') plots = collections.defaultdict(list) X = {} labels = [] for row in con.select(query, sort=opts.sort): name = ','.join(str(row[tag]) for tag in tags) x = row.get(keys[0]) if x is not None: if isinstance(x, basestring): if x not in X: X[x] = len(X) labels.append(x) x = X[x] plots[name].append([x] + [row.get(key) for key in keys[1:]]) import matplotlib.pyplot as plt for name, plot in plots.items(): xyy = zip(*plot) x = xyy[0] for y, key in zip(xyy[1:], keys[1:]): plt.plot(x, y, label=name + ':' + key) if X: plt.xticks(range(len(labels)), labels, rotation=90) plt.legend() plt.show() return if opts.long: row = con.get(query) summary = Summary(row) summary.write() elif opts.json: row = con.get(query) con2 = connect(sys.stdout, 'json', use_lock_file=False) kvp = row.get('key_value_pairs', {}) con2.write(row, data=row.get('data'), **kvp) else: if opts.open_web_browser: import ase.db.app as app app.db = con app.app.run(host='0.0.0.0', debug=True) else: columns = list(all_columns) c = opts.columns if c and c.startswith('++'): keys = set() for row in con.select(query, limit=opts.limit, offset=opts.offset): keys.update(row._keys) columns.extend(keys) if c[2:3] == ',': c = c[3:] else: c = '' if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(con, verbosity, opts.cut) table.select(query, columns, opts.sort, opts.limit, opts.offset) if opts.csv: table.write_csv() else: table.write(query)
def main(args): verbosity = 1 - args.quiet + args.verbose query = ','.join(args.query) if args.sort.endswith('-'): # Allow using "key-" instead of "-key" for reverse sorting args.sort = '-' + args.sort[:-1] if query.isdigit(): query = int(query) add_key_value_pairs = {} if args.add_key_value_pairs: for pair in args.add_key_value_pairs.split(','): key, value = pair.split('=') add_key_value_pairs[key] = convert_str_to_int_float_or_str(value) if args.delete_keys: delete_keys = args.delete_keys.split(',') else: delete_keys = [] db = connect(args.database, use_lock_file=not args.no_lock_file) def out(*args): if verbosity > 0: print(*args) if args.analyse: db.analyse() return if args.show_keys: keys = defaultdict(int) for row in db.select(query): for key in row._keys: keys[key] += 1 n = max(len(key) for key in keys) + 1 for key, number in keys.items(): print('{:{}} {}'.format(key + ':', n, number)) return if args.show_values: keys = args.show_values.split(',') values = {key: defaultdict(int) for key in keys} numbers = set() for row in db.select(query): kvp = row.key_value_pairs for key in keys: value = kvp.get(key) if value is not None: values[key][value] += 1 if not isinstance(value, str): numbers.add(key) n = max(len(key) for key in keys) + 1 for key in keys: vals = values[key] if key in numbers: print('{:{}} [{}..{}]'.format(key + ':', n, min(vals), max(vals))) else: print('{:{}} {}'.format( key + ':', n, ', '.join('{}({})'.format(v, n) for v, n in vals.items()))) return if args.add_from_file: filename = args.add_from_file configs = ase.io.read(filename) if not isinstance(configs, list): configs = [configs] for atoms in configs: db.write(atoms, key_value_pairs=add_key_value_pairs) out('Added ' + plural(len(configs), 'row')) return if args.count: n = db.count(query) print('%s' % plural(n, 'row')) return if args.explain: for row in db.select(query, explain=True, verbosity=verbosity, limit=args.limit, offset=args.offset): print(row['explain']) return if args.show_metadata: print(json.dumps(db.metadata, sort_keys=True, indent=4)) return if args.set_metadata: with open(args.set_metadata) as fd: db.metadata = json.load(fd) return if args.insert_into: nkvp = 0 nrows = 0 with connect(args.insert_into, use_lock_file=not args.no_lock_file) as db2: for row in db.select(query, sort=args.sort): kvp = row.get('key_value_pairs', {}) nkvp -= len(kvp) kvp.update(add_key_value_pairs) nkvp += len(kvp) if args.unique: row['unique_id'] = '%x' % randint(16**31, 16**32 - 1) if args.strip_data: db2.write(row.toatoms(), **kvp) else: db2.write(row, data=row.get('data'), **kvp) nrows += 1 out('Added %s (%s updated)' % (plural(nkvp, 'key-value pair'), plural(len(add_key_value_pairs) * nrows - nkvp, 'pair'))) out('Inserted %s' % plural(nrows, 'row')) return if add_key_value_pairs or delete_keys: ids = [row['id'] for row in db.select(query)] M = 0 N = 0 with db: for id in ids: m, n = db.update(id, delete_keys=delete_keys, **add_key_value_pairs) M += m N += n out('Added %s (%s updated)' % (plural(M, 'key-value pair'), plural(len(add_key_value_pairs) * len(ids) - M, 'pair'))) out('Removed', plural(N, 'key-value pair')) return if args.delete: ids = [row['id'] for row in db.select(query)] if ids and not args.yes: msg = 'Delete %s? (yes/No): ' % plural(len(ids), 'row') if input(msg).lower() != 'yes': return db.delete(ids) out('Deleted %s' % plural(len(ids), 'row')) return if args.plot_data: from ase.db.plot import dct2plot dct2plot(db.get(query).data, args.plot_data) return if args.plot: if ':' in args.plot: tags, keys = args.plot.split(':') tags = tags.split(',') else: tags = [] keys = args.plot keys = keys.split(',') plots = defaultdict(list) X = {} labels = [] for row in db.select(query, sort=args.sort, include_data=False): name = ','.join(str(row[tag]) for tag in tags) x = row.get(keys[0]) if x is not None: if isinstance(x, basestring): if x not in X: X[x] = len(X) labels.append(x) x = X[x] plots[name].append([x] + [row.get(key) for key in keys[1:]]) import matplotlib.pyplot as plt for name, plot in plots.items(): xyy = zip(*plot) x = xyy[0] for y, key in zip(xyy[1:], keys[1:]): plt.plot(x, y, label=name + ':' + key) if X: plt.xticks(range(len(labels)), labels, rotation=90) plt.legend() plt.show() return if args.json: row = db.get(query) db2 = connect(sys.stdout, 'json', use_lock_file=False) kvp = row.get('key_value_pairs', {}) db2.write(row, data=row.get('data'), **kvp) return db.python = args.metadata_from_python_script if args.long: db.meta = process_metadata(db, html=args.open_web_browser) row = db.get(query) summary = Summary(row, db.meta) summary.write() return if args.open_web_browser: try: import ase.db.app as app except ImportError: print('Please install Flask: pip install flask') return app.databases['default'] = db app.initialize_databases() app.app.run(host='0.0.0.0', debug=True) return if args.write_summary_files: prefix = args.write_summary_files db.meta = process_metadata(db, html=args.open_web_browser) ukey = db.meta.get('unique_key', 'id') for row in db.select(query): uid = row.get(ukey) summary = Summary(row, db.meta, prefix='{}-{}-'.format(prefix, uid)) return columns = list(all_columns) c = args.columns if c and c.startswith('++'): keys = set() for row in db.select(query, limit=args.limit, offset=args.offset, include_data=False): keys.update(row._keys) columns.extend(keys) if c[2:3] == ',': c = c[3:] else: c = '' if c: if c[0] == '+': c = c[1:] elif c[0] != '-': columns = [] for col in c.split(','): if col[0] == '-': columns.remove(col[1:]) else: columns.append(col.lstrip('+')) table = Table(db, verbosity=verbosity, cut=args.cut) table.select(query, columns, args.sort, args.limit, args.offset) if args.csv: table.write_csv() else: table.write(query)
def index(): global next_con_id con_id = int(request.args.get("x", "0")) if con_id not in connections: con_id = next_con_id next_con_id += 1 query = "" columns = list(all_columns) sort = "id" limit = 25 opened = set() nrows = None page = 0 else: query, nrows, page, columns, sort, limit, opened = connections[con_id] if "sort" in request.args: column = request.args["sort"] if column == sort: sort = "-" + column elif "-" + column == sort: sort = "id" else: sort = column page = 0 elif "query" in request.args: query = request.args["query"].encode() try: limit = max(1, min(int(request.args.get("limit", limit)), 200)) except ValueError: pass sort = "id" opened = set() page = 0 nrows = None elif "page" in request.args: page = int(request.args["page"]) if "toggle" in request.args: tcolumns = request.args["toggle"].split(",") if tcolumns == ["reset"]: columns = list(all_columns) else: for column in tcolumns: if column in columns: columns.remove(column) if column == sort.lstrip("-"): sort = "id" page = 0 else: columns.append(column) if nrows is None: nrows = db.count(query) table = Table(db) table.select(query, columns, sort, limit, offset=page * limit) con = Connection(query, nrows, page, columns, sort, limit, opened) connections[con_id] = con table.format(SUBSCRIPT) addcolumns = [column for column in all_columns + table.keys if column not in table.columns] return render_template( "table.html", t=table, con=con, cid=con_id, home=home, pages=pages(page, nrows, limit), nrows=nrows, addcolumns=addcolumns, row1=page * limit + 1, row2=min((page + 1) * limit, nrows), )