def get_by_project_id(project_id: int) -> List[Entity]: sql = """ SELECT e.id, ie.origin_id, e.class_code, e.name, e.description, e.created, e.modified, e.system_type, array_to_json( array_agg((t.range_id, t.description)) FILTER (WHERE t.range_id IS NOT NULL) ) as nodes FROM model.entity e LEFT JOIN model.link t ON e.id = t.domain_id AND t.property_code IN ('P2', 'P89') JOIN import.entity ie ON e.id = ie.entity_id WHERE ie.project_id = %(id)s GROUP BY e.id, ie.origin_id;""" g.execute(sql, {'id': project_id}) entities = [] for row in g.cursor.fetchall(): entity = Entity(row) entity.origin_id = row.origin_id entities.append(entity) return entities
def log(priority_: str, type_: str, message: str, info: Union[str, Exception, None] = None) -> None: log_levels = app.config['LOG_LEVELS'] priority = list(log_levels.keys())[list(log_levels.values()).index(priority_)] if int(session['settings']['log_level']) < priority: return info = '{method} {path}{info}'.format(path=request.path, method=request.method, info='\n' + str(info) if info else '') sql = """ INSERT INTO web.system_log (priority, type, message, user_id, info) VALUES(%(priority)s, %(type)s, %(message)s, %(user_id)s, %(info)s) RETURNING id;""" params = {'priority': priority, 'type': type_, 'message': message, 'user_id': current_user.id if hasattr(current_user, 'id') else None, 'info': info} g.execute(sql, params)
def insert(form: FlaskForm, image_id: int, place_id: int, link_id: int) -> None: sql = """ INSERT INTO web.map_overlay (image_id, place_id, link_id, bounding_box) VALUES (%(image_id)s, %(place_id)s, %(link_id)s, %(bounding_box)s);""" bounding_box = '[[{top_left_northing}, {top_left_easting}],' \ '[{bottom_right_northing}, {bottom_right_easting}]]'.format( top_left_easting=form.top_left_easting.data, top_left_northing=form.top_left_northing.data, bottom_right_easting=form.bottom_right_easting.data, bottom_right_northing=form.bottom_right_northing.data) g.execute( sql, { 'image_id': image_id, 'place_id': place_id, 'link_id': link_id, 'bounding_box': bounding_box })
def get_by_codes(class_name: str) -> List[Entity]: # Possible class names: actor, event, place, reference, source if class_name == 'source': sql = EntityMapper.build_sql(nodes=True) + """ WHERE e.class_code IN %(codes)s AND e.system_type = 'source content' GROUP BY e.id;""" elif class_name == 'reference': sql = EntityMapper.build_sql(nodes=True) + """ WHERE e.class_code IN %(codes)s AND e.system_type != 'file' GROUP BY e.id;""" else: aliases = True if class_name == 'actor' and current_user.settings[ 'table_show_aliases'] else False sql = EntityMapper.build_sql( nodes=True if class_name == 'event' else False, aliases=aliases) + """ WHERE e.class_code IN %(codes)s GROUP BY e.id;""" g.execute(sql, {'codes': tuple(app.config['CLASS_CODES'][class_name])}) return [Entity(row) for row in g.cursor.fetchall()]
def insert(form): sql = """ INSERT INTO web.user (username, real_name, info, email, active, password, group_id) VALUES (%(username)s, %(real_name)s, %(info)s, %(email)s, %(active)s, %(password)s, (SELECT id FROM web.group WHERE name LIKE %(group_name)s)) RETURNING id;""" password = bcrypt.hashpw(form.password.data.encode('utf-8'), bcrypt.gensalt()).decode('utf-8') g.execute( sql, { 'username': form.username.data, 'real_name': form.real_name.data, 'info': form.description.data, 'email': form.email.data, 'active': form.active.data, 'group_name': form.group.data, 'password': password }) return g.cursor.fetchone()[0]
def get_linked_entities(entity, codes, inverse: Optional[bool] = False, nodes: Optional[bool] = False) -> list: from openatlas.models.entity import EntityMapper sql = """ SELECT range_id AS result_id FROM model.link WHERE domain_id = %(entity_id)s AND property_code IN %(codes)s;""" if inverse: sql = """ SELECT domain_id AS result_id FROM model.link WHERE range_id = %(entity_id)s AND property_code IN %(codes)s;""" g.execute( sql, { 'entity_id': entity if type(entity) is int else entity.id, 'codes': tuple(codes if type(codes) is list else [codes]) }) ids = [element for (element, ) in g.cursor.fetchall()] return EntityMapper.get_by_ids(ids, nodes=nodes)
def get_overview_counts() -> Dict[str, int]: sql = """ SELECT SUM(CASE WHEN class_code = 'E33' AND system_type = 'source content' THEN 1 END) AS source, SUM(CASE WHEN class_code IN ('E7', 'E8') THEN 1 END) AS event, SUM(CASE WHEN class_code IN ('E21', 'E74', 'E40') THEN 1 END) AS actor, SUM(CASE WHEN class_code = 'E18' THEN 1 END) AS place, SUM(CASE WHEN class_code IN ('E31', 'E84') AND system_type != 'file' THEN 1 END) AS reference, SUM(CASE WHEN class_code = 'E22' THEN 1 END) AS find, SUM(CASE WHEN class_code = 'E31' AND system_type = 'file' THEN 1 END) AS file FROM model.entity;""" g.execute(sql) row = g.cursor.fetchone() return { col[0]: row[idx] for idx, col in enumerate(g.cursor.description) }
def delete_orphans(parameter: str) -> int: from openatlas.models.node import NodeMapper class_codes = tuple(app.config['CODE_CLASS'].keys()) if parameter == 'orphans': class_codes = class_codes + ('E55', ) sql_where = EntityMapper.sql_orphan + " AND e.class_code NOT IN %(class_codes)s" elif parameter == 'unlinked': sql_where = EntityMapper.sql_orphan + " AND e.class_code IN %(class_codes)s" elif parameter == 'types': count = 0 for node in NodeMapper.get_node_orphans(): node.delete() count += 1 return count else: return 0 sql = 'DELETE FROM model.entity WHERE id IN (' + sql_where + ');' g.execute(sql, {'class_codes': class_codes}) return g.cursor.rowcount
def update(link_: Link) -> None: sql = """ UPDATE model.link SET (property_code, domain_id, range_id, description, type_id, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(property_code)s, %(domain_id)s, %(range_id)s, %(description)s, %(type_id)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.execute(sql, {'id': link_.id, 'property_code': link_.property.code, 'domain_id': link_.domain.id, 'range_id': link_.range.id, 'type_id': link_.type.id if link_.type else None, 'description': link_.description, 'begin_from': DateMapper.datetime64_to_timestamp(link_.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp(link_.begin_to), 'begin_comment': link_.begin_comment, 'end_from': DateMapper.datetime64_to_timestamp(link_.end_from), 'end_to': DateMapper.datetime64_to_timestamp(link_.end_to), 'end_comment': link_.end_comment})
def update_content(name: str, form) -> None: g.execute('BEGIN') try: for language in app.config['LANGUAGES'].keys(): sql = 'DELETE FROM web.i18n WHERE name = %(name)s AND language = %(language)s' g.execute(sql, {'name': name, 'language': language}) sql = """ INSERT INTO web.i18n (name, language, text) VALUES (%(name)s, %(language)s, %(text)s);""" g.execute( sql, { 'name': name, 'language': language, 'text': form.__getattribute__(language).data.strip() }) g.execute('COMMIT') except Exception as e: # pragma: no cover g.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error')
def invalid_involvement_dates() -> list: """ Search invalid event participation dates and return the actors e.g. attending person was born after the event ended""" from openatlas.models.link import LinkMapper sql = """ SELECT l.id FROM model.entity actor JOIN model.link l ON actor.id = l.range_id AND l.property_code IN ('P11', 'P14', 'P22', 'P23') JOIN model.entity event ON l.domain_id = event.id WHERE (actor.begin_from IS NOT NULL AND l.end_from IS NOT NULL AND actor.begin_from > l.end_from) OR (actor.begin_to IS NOT NULL AND l.end_to IS NOT NULL AND actor.begin_to > l.end_to) OR (actor.begin_from IS NOT NULL AND event.end_from IS NOT NULL AND actor.begin_from > event.end_from) OR (actor.begin_to IS NOT NULL AND event.end_to IS NOT NULL AND actor.begin_to > event.end_to);""" g.execute(sql) return [LinkMapper.get_by_id(row.id) for row in g.cursor.fetchall()]
def insert(entity: Entity, form: FlaskForm) -> None: for shape in ['point', 'line', 'polygon']: data = getattr(form, 'gis_' + shape + 's').data if not data: continue # pragma: no cover for item in json.loads(data): # Don't save geom if coordinates are empty if not item['geometry']['coordinates'] or item['geometry'][ 'coordinates'] == [[]]: continue # pragma: no cover if item['properties']['shapeType'] != 'centerpoint': # Test for valid geom sql = """ SELECT st_isvalid( public.ST_SetSRID(public.ST_GeomFromGeoJSON(%(geojson)s),4326));""" g.execute(sql, {'geojson': json.dumps(item['geometry'])}) if not g.cursor.fetchone()[0]: raise InvalidGeomException sql = """ INSERT INTO gis.{shape} (entity_id, name, description, type, geom) VALUES ( %(entity_id)s, %(name)s, %(description)s, %(type)s, public.ST_SetSRID(public.ST_GeomFromGeoJSON(%(geojson)s),4326)); """.format( shape=shape if shape != 'line' else 'linestring') g.execute( sql, { 'entity_id': entity.id, 'name': sanitize(item['properties']['name'], 'description'), 'description': sanitize(item['properties']['description'], 'description'), 'type': item['properties']['shapeType'], 'geojson': json.dumps(item['geometry']) })
def get_by_id(entity_id: int, nodes: bool = False, aliases: bool = False, view_name: Optional[str] = None) -> Entity: from openatlas import logger if entity_id in g.nodes: # pragma: no cover, just in case a node is requested return g.nodes[entity_id] sql = EntityMapper.build_sql( nodes, aliases) + ' WHERE e.id = %(id)s GROUP BY e.id;' g.execute(sql, {'id': entity_id}) if g.cursor.rowcount < 1: # pragma: no cover abort(418) entity = Entity(g.cursor.fetchone()) if view_name and view_name != entity.view_name: # Entity was called from wrong view, abort! logger.log( 'error', 'model', 'entity ({id}) has view name "{view}", requested was "{request}"' .format(id=entity_id, view=entity.view_name, request=view_name)) abort(422) return entity
def delete_by_entity(entity: Entity) -> None: g.execute('DELETE FROM gis.point WHERE entity_id = %(id)s;', {'id': entity.id}) g.execute('DELETE FROM gis.linestring WHERE entity_id = %(id)s;', {'id': entity.id}) g.execute('DELETE FROM gis.polygon WHERE entity_id = %(id)s;', {'id': entity.id})
def check_links() -> List[Dict[str, str]]: """ Check all existing links for CIDOC CRM validity and return the invalid ones.""" from openatlas.util.util import link from openatlas.models.entity import EntityMapper sql = """ SELECT DISTINCT l.property_code AS property, d.class_code AS domain, r.class_code AS range FROM model.link l JOIN model.entity d ON l.domain_id = d.id JOIN model.entity r ON l.range_id = r.id;""" g.execute(sql) invalid_links = [] for row in g.cursor.fetchall(): property_ = g.properties[row.property] domain_is_valid = property_.find_object('domain_class_code', row.domain) range_is_valid = property_.find_object('range_class_code', row.range) invalid_linking = [] if not domain_is_valid or not range_is_valid: invalid_linking.append({'property': row.property, 'domain': row.domain, 'range': row.range}) for item in invalid_linking: sql = """ SELECT l.id, l.property_code, l.domain_id, l.range_id, l.description, l.created, l.modified FROM model.link l JOIN model.entity d ON l.domain_id = d.id JOIN model.entity r ON l.range_id = r.id WHERE l.property_code = %(property)s AND d.class_code = %(domain)s AND r.class_code = %(range)s;""" g.execute(sql, {'property': item['property'], 'domain': item['domain'], 'range': item['range']}) for row2 in g.cursor.fetchall(): domain = EntityMapper.get_by_id(row2.domain_id) range_ = EntityMapper.get_by_id(row2.range_id) invalid_links.append({'domain': link(domain) + ' (' + domain.class_.code + ')', 'property': link(g.properties[row2.property_code]), 'range': link(range_) + ' (' + range_.class_.code + ')'}) return invalid_links
def get_log_for_advanced_view(entity_id: str) -> dict: sql = """ SELECT ul.created, ul.user_id, ul.entity_id, u.username FROM web.user_log ul JOIN web.user u ON ul.user_id = u.id WHERE ul.entity_id = %(entity_id)s AND ul.action = %(action)s ORDER BY ul.created DESC LIMIT 1;""" g.execute(sql, {'entity_id': entity_id, 'action': 'insert'}) row_insert = g.cursor.fetchone() g.execute(sql, {'entity_id': entity_id, 'action': 'update'}) row_update = g.cursor.fetchone() sql = 'SELECT project_id, origin_id, user_id FROM import.entity WHERE entity_id = %(id)s;' g.execute(sql, {'id': entity_id}) row_import = g.cursor.fetchone() project = ImportMapper.get_project_by_id( row_import.project_id) if row_import else None log = { 'creator': UserMapper.get_by_id(row_insert.user_id) if row_insert else None, 'created': row_insert.created if row_insert else None, 'modifier': UserMapper.get_by_id(row_update.user_id) if row_update else None, 'modified': row_update.created if row_update else None, 'import_project': project, 'import_user': UserMapper.get_by_id(row_import.user_id) if row_import else None, 'import_origin_id': row_import.origin_id if row_import else None } return log
def get_all() -> Dict[str, Property]: sql = """ SELECT id, code, comment, domain_class_code, range_class_code, name, name_inverse FROM model.property;""" g.execute(sql) properties = {row.code: Property(id=row.id, _name=row.name, _name_inverse=row.name_inverse, code=row.code, comment=row.comment, domain_class_code=row.domain_class_code, range_class_code=row.range_class_code, sub=[], super=[], i18n={}, i18n_inverse={} ) for row in g.cursor.fetchall()} g.execute('SELECT super_code, sub_code FROM model.property_inheritance;') for row in g.cursor.fetchall(): properties[row.super_code].sub.append(row.sub_code) properties[row.sub_code].super.append(row.super_code) sql = """ SELECT property_code, language_code, text, text_inverse FROM model.property_i18n WHERE language_code IN %(language_codes)s;""" g.execute(sql, {'language_codes': tuple(app.config['LANGUAGES'].keys())}) for row in g.cursor.fetchall(): properties[row.property_code].i18n[row.language_code] = row.text properties[row.property_code].i18n_inverse[row.language_code] = row.text_inverse return properties
def insert(code, name, system_type=None, description=None): from openatlas.util.util import sanitize if not name: # pragma: no cover logger.log('error', 'database', 'Insert entity without name and date') return sql = """ INSERT INTO model.entity (name, system_type, class_code, description) VALUES (%(name)s, %(system_type)s, %(code)s, %(description)s) RETURNING id;""" params = { 'name': str(name).strip(), 'code': code, 'system_type': system_type.strip() if system_type else None, 'description': sanitize(description, 'description') if description else None } g.execute(sql, params) return EntityMapper.get_by_id(g.cursor.fetchone()[0])
def update(entity: Entity) -> None: from openatlas.util.util import sanitize sql = """ UPDATE model.entity SET (name, description, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(name)s, %(description)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.execute( sql, { 'id': entity.id, 'name': entity.name, 'begin_from': DateMapper.datetime64_to_timestamp( entity.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp( entity.begin_to), 'end_from': DateMapper.datetime64_to_timestamp( entity.end_from), 'end_to': DateMapper.datetime64_to_timestamp(entity.end_to), 'begin_comment': entity.begin_comment, 'end_comment': entity.end_comment, 'description': sanitize(entity.description, 'description') })
def insert(entity: 'Entity', property_code: str, range_: Union['Entity', List['Entity']], description: Optional[str] = None, inverse: bool = False, type_id: Optional[int] = None) -> List[int]: property_ = g.properties[property_code] entities = range_ if isinstance(range_, list) else [range_] new_link_ids = [] for linked_entity in entities: domain = linked_entity if inverse else entity range_ = entity if inverse else linked_entity domain_error = True range_error = True if property_.find_object('domain_class_code', g.classes[domain.class_.code].code): domain_error = False if property_.find_object('range_class_code', g.classes[range_.class_.code].code): range_error = False if domain_error or range_error: text = _('error link') + ': ' + g.classes[domain.class_.code].code + ' > ' text += property_code + ' > ' + g.classes[range_.class_.code].code logger.log('error', 'model', text) flash(text, 'error') continue sql = """ INSERT INTO model.link (property_code, domain_id, range_id, description, type_id) VALUES ( %(property_code)s, %(domain_id)s, %(range_id)s, %(description)s, %(type_id)s) RETURNING id;""" # Todo: build only one sql and get execution out of loop g.execute(sql, {'property_code': property_code, 'domain_id': domain.id, 'range_id': range_.id, 'description': description, 'type_id': type_id}) new_link_ids.append(g.cursor.fetchone()[0]) return new_link_ids
def get_all_nodes() -> Dict[int, Node]: """ Get and return all type and place nodes""" sql = """ SELECT e.id, e.name, e.class_code, e.description, e.system_type, e.created, e.modified, es.id AS super_id, COUNT(l2.id) AS count, COUNT(l3.id) AS count_property FROM model.entity e -- Get super LEFT JOIN model.link l ON e.id = l.domain_id AND l.property_code = %(property_code)s LEFT JOIN model.entity es ON l.range_id = es.id -- Get count LEFT JOIN model.link l2 ON e.id = l2.range_id AND (l2.property_code = 'P2' OR (l2.property_code = 'P89' AND e.system_type = 'place location')) LEFT JOIN model.link l3 ON e.id = l3.type_id WHERE e.class_code = %(class_code)s AND (e.system_type IS NULL OR e.system_type != 'place location') GROUP BY e.id, es.id ORDER BY e.name;""" g.execute(sql, {'class_code': 'E55', 'property_code': 'P127'}) types = g.cursor.fetchall() g.execute(sql, {'class_code': 'E53', 'property_code': 'P89'}) places = g.cursor.fetchall() nodes = {} for row in types + places: node = Node(row) nodes[node.id] = node node.count = row.count + row.count_property node.count_subs = 0 node.subs = [] node.locked = False node.root = [row.super_id] if row.super_id else [] NodeMapper.populate_subs(nodes) return nodes
def populate_subs(nodes: Dict[int, Node]) -> None: g.execute( "SELECT id, name, extendable FROM web.form ORDER BY name ASC;") forms = {} for row in g.cursor.fetchall(): forms[row.id] = { 'id': row.id, 'name': row.name, 'extendable': row.extendable } sql = """ SELECT h.id, h.name, h.multiple, h.system, h.directional, h.value_type, h.locked, (SELECT ARRAY( SELECT f.id FROM web.form f JOIN web.hierarchy_form hf ON f.id = hf.form_id AND hf.hierarchy_id = h.id)) AS form_ids FROM web.hierarchy h;""" g.execute(sql) hierarchies = {row.id: row for row in g.cursor.fetchall()} for id_, node in nodes.items(): if node.root: super_ = nodes[node.root[0]] super_.subs.append(id_) node.root = NodeMapper.get_root_path(nodes, node, node.root[0], node.root) node.system = False node.locked = nodes[node.root[0]].locked else: node.value_type = hierarchies[node.id].value_type node.directional = hierarchies[node.id].directional node.multiple = hierarchies[node.id].multiple node.system = hierarchies[node.id].system node.locked = hierarchies[node.id].locked node.forms = { form_id: forms[form_id] for form_id in hierarchies[node.id].form_ids }
def get_by_object(object_) -> Union[None, dict]: ids = [object_.id] # Get overlays of parents if object_.system_type == 'find': stratigraphic_unit = object_.get_linked_entity('P46', True) ids.append(stratigraphic_unit.id) feature = stratigraphic_unit.get_linked_entity('P46', True) ids.append(feature.id) ids.append(feature.get_linked_entity('P46', True).id) elif object_.system_type == 'stratigraphic unit': feature = object_.get_linked_entity('P46', True) ids.append(feature.id) ids.append(feature.get_linked_entity('P46', True).id) elif object_.system_type == 'feature': ids.append(object_.get_linked_entity('P46', True).id) sql = """ SELECT o.id, o.place_id, o.image_id, o.bounding_box, i.name FROM web.map_overlay o JOIN model.entity i ON o.image_id = i.id WHERE o.place_id IN %(place_ids)s;""" g.execute(sql, {'place_ids': tuple(ids)}) return {row.image_id: Overlay(row) for row in g.cursor.fetchall()}
def get_network_json(params): """ Returns JSON data for d3.js""" properties = [code for code, param in params['properties'].items() if param['active']] classes = [code for code, param in params['classes'].items() if param['active']] if not classes: return None # Get edges entities = set() edges = '' if properties: sql = """ SELECT l.domain_id, l.range_id FROM model.link l JOIN model.entity e ON l.domain_id = e.id WHERE property_code IN %(properties)s AND (e.system_type IS NULL OR e.system_type != 'file');""" g.execute(sql, {'properties': tuple(properties)}) for row in g.cursor.fetchall(): edges += "{{'source':'{domain_id}','target':'{range_id}'}},".format( domain_id=row.domain_id, range_id=row.range_id) entities.update([row.domain_id, row.range_id]) # Get entities sql = "SELECT id, class_code, name FROM model.entity WHERE class_code IN %(classes)s;" g.execute(sql, {'classes': tuple(classes)}) nodes = '' entities_already = set() for row in g.cursor.fetchall(): if params['options']['orphans'] or row.id in entities: entities_already.add(row.id) nodes += """{{'id':'{id}','name':'{name}','color':'{color}'}},""".format( id=row.id, name=truncate_string(row.name.replace("'", ""), span=False), color=params['classes'][row.class_code]['color']) # Get entities of links which weren't present in class selection array_diff = [item for item in entities if item not in entities_already] if array_diff: sql = "SELECT id, class_code, name FROM model.entity WHERE id IN %(array_diff)s;" g.execute(sql, {'array_diff': tuple(array_diff)}) result = g.cursor.fetchall() for row in result: color = '' if row.class_code in params['classes']: # pragma: no cover color = params['classes'][row.class_code]['color'] nodes += """{{'id':'{id}','name':'{name}','color':'{color}'}},""".format( id=row.id, color=color, name=truncate_string(row.name.replace("'", ""), span=False)) return "graph = {'nodes': [" + nodes + "], links: [" + edges + "]};" if nodes else None
def sql_execute() -> str: path = app.config['EXPORT_FOLDER_PATH'] + '/sql' latest_file = None latest_file_date = None for file in [ f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) ]: name = basename(file) if name == '.gitignore': continue file_date = datetime.utcfromtimestamp( os.path.getmtime(path + '/' + file)) if not latest_file_date or file_date > latest_file_date: latest_file = file latest_file_date = file_date file_data = {'backup_to_old': True} if latest_file: yesterday = datetime.today() - timedelta(days=1) file_data['file'] = latest_file file_data[ 'backup_to_old'] = True if yesterday > latest_file_date else False file_data['size'] = convert_size( os.path.getsize(path + '/' + latest_file)) file_data['date'] = format_date(latest_file_date) response = '' form = SqlForm() if form.validate_on_submit() and not file_data['backup_to_old']: g.execute('BEGIN') try: g.execute(form.statement.data) response = '<p>Rows affected: {count}</p>'.format( count=g.cursor.rowcount) try: response += '<p>{rows}</p>'.format(rows=g.cursor.fetchall()) except: # pragma: no cover pass # Assuming it was no SELECT statement so returning just the rowcount g.execute('COMMIT') flash(_('SQL executed'), 'info') except Exception as e: g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) response = e flash(_('error transaction'), 'error') return render_template('sql/execute.html', form=form, response=response, file_data=file_data)
def get_all() -> dict: g.execute("SELECT id, code, name FROM model.class;") classes = {row.code: ClassObject(row) for row in g.cursor.fetchall()} g.execute("SELECT super_code, sub_code FROM model.class_inheritance;") for row in g.cursor.fetchall(): classes[row.super_code].sub.append(row.sub_code) classes[row.sub_code].super.append(row.super_code) sql = """ SELECT class_code, language_code, attribute, text FROM model.class_i18n WHERE language_code IN %(language_codes)s;""" g.execute(sql, {'language_codes': tuple(app.config['LANGUAGES'].keys())}) for row in g.cursor.fetchall(): class_ = classes[row.class_code] if row.language_code not in class_.i18n: class_.i18n[row.language_code] = {} class_.i18n[row.language_code][row.attribute] = row.text return classes
def get_all() -> Dict: sql = """ SELECT id, code, domain_class_code, range_class_code, name, name_inverse FROM model.property;""" g.execute(sql) properties = {row.code: Property(row) for row in g.cursor.fetchall()} g.execute( 'SELECT super_code, sub_code FROM model.property_inheritance;') for row in g.cursor.fetchall(): properties[row.super_code].sub.append(row.sub_code) properties[row.sub_code].super.append(row.super_code) sql = """ SELECT property_code, language_code, attribute, text FROM model.property_i18n WHERE language_code IN %(language_codes)s;""" g.execute(sql, {'language_codes': tuple(app.config['LANGUAGES'].keys())}) for row in g.cursor.fetchall(): property_ = properties[row.property_code] if row.language_code not in property_.i18n: property_.i18n[row.language_code] = {} property_.i18n[row.language_code][row.attribute] = row.text return properties
def get_all() -> Dict[str, ClassObject]: g.execute("SELECT id, code, name, comment FROM model.class;") classes: Dict[str, ClassObject] = { row.code: ClassObject(_name=row.name, code=row.code, id=row.id, comment=row.comment, i18n={}, sub=[], super=[]) for row in g.cursor.fetchall() } g.execute("SELECT super_code, sub_code FROM model.class_inheritance;") for row in g.cursor.fetchall(): classes[row.super_code].sub.append(row.sub_code) classes[row.sub_code].super.append(row.super_code) sql = """ SELECT class_code, language_code, text FROM model.class_i18n WHERE language_code IN %(language_codes)s;""" g.execute(sql, {'language_codes': tuple(app.config['LANGUAGES'].keys())}) for row in g.cursor.fetchall(): classes[row.class_code].i18n[row.language_code] = row.text return classes
def remove_profile_image(entity_id: int) -> None: sql = 'DELETE FROM web.entity_profile_image WHERE entity_id = %(entity_id)s;' g.execute(sql, {'entity_id': entity_id})
def get_profile_image_id(id_: int) -> Optional[int]: sql = 'SELECT image_id FROM web.entity_profile_image WHERE entity_id = %(entity_id)s;' g.execute(sql, {'entity_id': id_}) return g.cursor.fetchone()[0] if g.cursor.rowcount else None