def relation_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) domain = EntityMapper.get_by_id(link_.domain.id) range_ = EntityMapper.get_by_id(link_.range.id) origin = range_ if origin_id == range_.id else domain related = range_ if origin_id == domain.id else domain form = build_form(RelationForm, 'Actor Actor Relation', link_, request) del form.actor, form.insert_and_continue, form.origin_id if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() if form.inverse.data: link_id = related.link('OA7', origin, form.description.data) else: link_id = origin.link('OA7', related, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('info update'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') return redirect(url_for('actor_view', id_=origin.id) + '#tab-relation') if origin.id == range_.id: form.inverse.data = True form.save.label.text = _('save') link_.set_dates() form.populate_dates(link_) return render_template('relation/update.html', origin=origin, form=form, related=related)
def __init__(self, row, domain: bool = None, range_: bool = None) -> None: from openatlas.models.entity import EntityMapper self.id = row.id self.description = row.description self.property = g.properties[row.property_code] self.domain = domain if domain else EntityMapper.get_by_id( row.domain_id) self.range = range_ if range_ else EntityMapper.get_by_id(row.range_id) self.type = g.nodes[row.type_id] if row.type_id else None self.nodes = dict() # type: Dict if hasattr(row, 'type_id') and row.type_id: self.nodes[g.nodes[row.type_id]] = None if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64( row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date( self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date( self.end_to, 'year') if self.end_to else self.last
def relation_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) domain = EntityMapper.get_by_id(link_.domain.id) range_ = EntityMapper.get_by_id(link_.range.id) origin = range_ if origin_id == range_.id else domain related = range_ if origin_id == domain.id else domain form = build_form(RelationForm, 'Actor Actor Relation', link_, request) del form.actor, form.insert_and_continue, form.origin_id if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() if form.inverse.data: link_id = related.link('OA7', origin, form.description.data) else: link_id = origin.link('OA7', related, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('info update'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') return redirect(url_for('actor_view', id_=origin.id) + '#tab-relation') if origin.id == range_.id: form.inverse.data = True form.save.label.text = _('save') link_.set_dates() form.populate_dates(link_) return render_template('relation/update.html', origin=origin, form=form, related=related)
def member_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) domain = EntityMapper.get_by_id(link_.domain.id) range_ = EntityMapper.get_by_id(link_.range.id) origin = range_ if origin_id == range_.id else domain form = build_form(MemberForm, 'Member', link_, request) del form.actor, form.group, form.insert_and_continue if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() link_id = domain.link('P107', range_, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') tab = '#tab-member-of' if origin.id == range_.id else '#tab-member' return redirect(url_for('actor_view', id_=origin.id) + tab) form.save.label.text = _('save') link_.set_dates() form.populate_dates(link_) related = range_ if origin_id == domain.id else domain return render_template('member/update.html', origin=origin, form=form, related=related)
def member_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) domain = EntityMapper.get_by_id(link_.domain.id) range_ = EntityMapper.get_by_id(link_.range.id) origin = range_ if origin_id == range_.id else domain related = range_ if origin_id == domain.id else domain form = build_form(MemberForm, 'Member', link_, request) del form.actor, form.group, form.insert_and_continue if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() link_id = domain.link('P107', range_, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') tab = '#tab-member-of' if origin.id == range_.id else '#tab-member' return redirect(url_for('actor_view', id_=origin.id) + tab) form.save.label.text = _('save') link_.set_dates() form.populate_dates(link_) return render_template('member/update.html', origin=origin, form=form, related=related)
def test_export(self): with app.app_context(): self.login() # SQL export rv = self.app.get(url_for('export_sql')) assert b'Export SQL' in rv.data rv = self.app.post(url_for('export_sql'), follow_redirects=True) assert b'Data was exported as SQL' in rv.data date_string = DateMapper.current_date_for_filename() self.app.get(url_for('download_sql', filename=date_string + '_dump.sql')) rv = self.app.get(url_for('delete_sql', filename=date_string + '_dump.sql'), follow_redirects=True) assert b'File deleted' in rv.data # CSV export rv = self.app.get(url_for('export_csv')) assert b'Export CSV' in rv.data rv = self.app.post(url_for('export_csv'), follow_redirects=True, data={'zip': True, 'model_class': True, 'gis_point': True, 'gis_format': 'wkt'}) assert b'Data was exported as CSV' in rv.data rv = self.app.post(url_for('export_csv'), follow_redirects=True, data={'model_class': True, 'timestamps': True, 'gis_polygon': True, 'gis_format': 'postgis'}) assert b'Data was exported as CSV' in rv.data rv = self.app.post(url_for('export_csv'), follow_redirects=True, data={'model_class': True, 'timestamps': True, 'gis_point': True, 'gis_polygon': True, 'gis_format': 'coordinates'}) assert b'Data was exported as CSV' in rv.data date_string = DateMapper.current_date_for_filename() self.app.get(url_for('download_csv', filename=date_string + '_csv.zip')) rv = self.app.get(url_for('delete_csv', filename=date_string + '_csv.zip'), follow_redirects=True) assert b'File deleted' in rv.data
def admin_check_dates() -> str: # Get invalid date combinations (e.g. begin after end) tables = {'link_dates': Table(['link', 'domain', 'range']), 'involvement_dates': Table(['actor', 'event', 'class', 'involvement', 'description']), 'dates': Table(['name', 'class', 'type', 'system type', 'created', 'updated', 'description'])} for entity in DateMapper.get_invalid_dates(): tables['dates'].rows.append([link(entity), link(entity.class_), entity.print_base_type(), entity.system_type, format_date(entity.created), format_date(entity.modified), truncate_string(entity.description)]) for link_ in DateMapper.get_invalid_link_dates(): label = '' if link_.property.code == 'OA7': # pragma: no cover label = 'relation' elif link_.property.code == 'P107': # pragma: no cover label = 'member' elif link_.property.code in ['P11', 'P14', 'P22', 'P23']: label = 'involvement' url = url_for(label + '_update', id_=link_.id, origin_id=link_.domain.id) tables['link_dates'].rows.append(['<a href="' + url + '">' + uc_first(_(label)) + '</a>', link(link_.domain), link(link_.range)]) for link_ in DateMapper.invalid_involvement_dates(): event = link_.domain actor = link_.range update_url = url_for('involvement_update', id_=link_.id, origin_id=actor.id) data = ([link(actor), link(event), g.classes[event.class_.code].name, link_.type.name if link_.type else '', truncate_string(link_.description), '<a href="' + update_url + '">' + uc_first(_('edit')) + '</a>']) tables['involvement_dates'].rows.append(data) return render_template('admin/check_dates.html', tables=tables)
def involvement_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) event = EntityMapper.get_by_id(link_.domain.id) actor = EntityMapper.get_by_id(link_.range.id) origin = event if origin_id == event.id else actor form = build_form(ActorForm, 'Involvement', link_, request) form.save.label.text = _('save') del form.actor, form.event, form.insert_and_continue form.activity.choices = [('P11', g.properties['P11'].name)] if event.class_.code in ['E7', 'E8', 'E12']: form.activity.choices.append(('P14', g.properties['P14'].name)) if event.class_.code == 'E8': form.activity.choices.append(('P22', g.properties['P22'].name)) form.activity.choices.append(('P23', g.properties['P23'].name)) if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() link_id = event.link(form.activity.data, actor, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') tab = 'actor' if origin.view_name == 'event' else 'event' return redirect(url_for(origin.view_name + '_view', id_=origin.id) + '#tab-' + tab) form.activity.data = link_.property.code form.description.data = link_.description link_.set_dates() form.populate_dates(link_) return render_template('involvement/update.html', origin=origin, form=form, linked_object=event if origin_id != event.id else actor)
def relation_insert(origin_id): origin = EntityMapper.get_by_id(origin_id) form = build_form(RelationForm, 'Actor Actor Relation') form.origin_id.data = origin.id if form.validate_on_submit(): g.cursor.execute('BEGIN') try: for actor_id in ast.literal_eval(form.actor.data): if form.inverse.data: link_id = LinkMapper.insert(actor_id, 'OA7', origin.id, form.description.data) else: link_id = origin.link('OA7', actor_id, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('entity created'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') if form.continue_.data == 'yes': return redirect(url_for('relation_insert', origin_id=origin_id)) return redirect(url_for('actor_view', id_=origin.id) + '#tab-relation') return render_template('relation/insert.html', origin=origin, form=form)
def __init__(self, row: NamedTupleCursor.Record) -> None: from openatlas.forms.date import DateForm self.id = row.id self.nodes: Dict['Node', str] = {} if hasattr(row, 'nodes') and row.nodes: for node in row.nodes: self.nodes[g.nodes[node['f1']]] = node[ 'f2'] # f1 = node id, f2 = value self.aliases: Dict[int, str] = {} if hasattr(row, 'aliases') and row.aliases: for alias in row.aliases: self.aliases[alias['f1']] = alias[ 'f2'] # f1 = alias id, f2 = alias name self.aliases = OrderedDict( sorted(self.aliases.items(), key=lambda kv: (kv[1], kv[0]))) self.name = row.name self.description = row.description if row.description else '' self.system_type = row.system_type self.created = row.created self.modified = row.modified self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None self.note: Optional[ str] = None # User specific, private note for an entity self.origin_id: Optional[int] = None if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64( row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date( self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date( self.end_to, 'year') if self.end_to else self.last self.class_ = g.classes[row.class_code] self.view_name = '' # Used to build URLs self.external_references: List[Link] = [] if self.system_type == 'file': self.view_name = 'file' elif self.class_.code == 'E33' and self.system_type == 'source translation': self.view_name = 'translation' elif self.class_.code in app.config['CODE_CLASS']: self.view_name = app.config['CODE_CLASS'][self.class_.code] self.table_name = self.view_name # Used to build tables if self.view_name == 'place': self.table_name = self.system_type.replace(' ', '-')
def __init__(self, row) -> None: if not row: logger.log('error', 'model', 'invalid id') abort(418) self.id = row.id self.nodes = {} # type: Dict if hasattr(row, 'nodes') and row.nodes: for node in row.nodes: self.nodes[g.nodes[node['f1']]] = node[ 'f2'] # f1 = node id, f2 = value self.aliases = {} # type: Dict if hasattr(row, 'aliases') and row.aliases: for alias in row.aliases: self.aliases[alias['f1']] = alias[ 'f2'] # f1 = alias id, f2 = alias name self.aliases = OrderedDict( sorted(self.aliases.items(), key=lambda kv: (kv[1], kv[0]))) self.name = row.name self.root = None # type: Optional[list] self.description = row.description if row.description else '' self.system_type = row.system_type self.created = row.created self.modified = row.modified self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None self.note = None # type: Optional[str] # private, user specific note for an entity self.origin_id = None # type: Optional[int] if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64( row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date( self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date( self.end_to, 'year') if self.end_to else self.last self.class_ = g.classes[row.class_code] self.view_name = None # view_name is used to build urls self.external_references = [] # type: list if self.system_type == 'file': self.view_name = 'file' elif self.class_.code in app.config['CODE_CLASS']: self.view_name = app.config['CODE_CLASS'][self.class_.code] self.table_name = self.view_name # table_name is used to build tables if self.view_name == 'place': self.table_name = self.system_type.replace(' ', '-')
def validate(self) -> bool: valid = Form.validate(self) from_date = DateMapper.form_to_datetime64(self.begin_year.data, self.begin_month.data, self.begin_day.data) to_date = DateMapper.form_to_datetime64(self.end_year.data, self.end_month.data, self.end_day.data, True) if from_date and to_date and from_date > to_date: self.begin_year.errors.append( _('Begin dates cannot start after end dates.')) valid = False return valid
def __init__(self, row) -> None: if not row: logger.log('error', 'model', 'invalid id') abort(418) self.id = row.id self.nodes = {} # type: Dict if hasattr(row, 'nodes') and row.nodes: for node in row.nodes: self.nodes[g.nodes[node['f1']]] = node['f2'] # f1 = node id, f2 = value self.aliases = {} # type: Dict if hasattr(row, 'aliases') and row.aliases: for alias in row.aliases: self.aliases[alias['f1']] = alias['f2'] # f1 = alias id, f2 = alias name self.aliases = OrderedDict(sorted(self.aliases.items(), key=lambda kv: (kv[1], kv[0]))) self.name = row.name self.root = None # type: Optional[list] self.description = row.description if row.description else '' self.system_type = row.system_type self.created = row.created self.modified = row.modified self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None self.origin_id = None # type: Optional[int] if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64(row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date(self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date(self.end_to, 'year') if self.end_to else self.last self.class_ = g.classes[row.class_code] self.view_name = None # view_name is used to build urls self.external_references = [] # type: list if self.system_type == 'file': self.view_name = 'file' elif self.class_.code in app.config['CODE_CLASS']: self.view_name = app.config['CODE_CLASS'][self.class_.code] self.table_name = self.view_name # table_name is used to build tables if self.view_name == 'place': self.table_name = self.system_type.replace(' ', '-')
def update(entity: Entity) -> None: from openatlas.util.util import sanitize sql = """ UPDATE model.entity SET (name, description, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(name)s, %(description)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.cursor.execute(sql, { 'id': entity.id, 'name': entity.name, 'begin_from': DateMapper.datetime64_to_timestamp(entity.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp(entity.begin_to), 'end_from': DateMapper.datetime64_to_timestamp(entity.end_from), 'end_to': DateMapper.datetime64_to_timestamp(entity.end_to), 'begin_comment': entity.begin_comment, 'end_comment': entity.end_comment, 'description': sanitize(entity.description, 'description')}) debug_model['div sql'] += 1
def export_csv(form): """ Creates CSV file(s) in the export/csv folder, filename begins with current date.""" import pandas.io.sql as psql date_string = DateMapper.current_date_for_filename() path = app.config['EXPORT_FOLDER_PATH'] + '/csv/' if form.zip.data: path = '/tmp/' + date_string + '_openatlas_csv_export' if os.path.exists(path): shutil.rmtree(path) # pragma: no cover os.makedirs(path) tables = { 'model_class': ['id', 'name', 'code'], 'model_class_inheritance': ['id', 'super_code', 'sub_code'], 'model_entity': ['id', 'name', 'description', 'class_code', 'begin_from', 'begin_to', 'begin_comment', 'end_from', 'end_to', 'end_comment'], 'model_link': ['id', 'property_code', 'domain_id', 'range_id', 'type_id', 'description', 'begin_from', 'begin_to', 'begin_comment', 'end_from', 'end_to', 'end_comment'], 'model_property': ['id', 'code', 'range_class_code', 'domain_class_code', 'name', 'name_inverse'], 'model_property_inheritance': ['id', 'super_code', 'sub_code'], 'gis_point': ['id', 'entity_id', 'name', 'description', 'type'], 'gis_linestring': ['id', 'entity_id', 'name', 'description', 'type'], 'gis_polygon': ['id', 'entity_id', 'name', 'description', 'type']} gis_tables = ['gis_point', 'gis_linestring', 'gis_polygon'] for table, fields in tables.items(): if getattr(form, table).data: if form.timestamps.data: fields.append('created') fields.append('modified') if table in gis_tables: if form.gis_format.data == 'wkt': fields.append("ST_AsText(geom)") elif form.gis_format.data == 'coordinates': if table == 'gis_point': fields.append("ST_X(geom) || ' ' || ST_Y(geom) AS coordinates") else: fields.append(""" ST_X(public.ST_PointOnSurface(geom)) || ' ' || ST_Y(public.ST_PointOnSurface(geom)) AS polygon_center_point""") else: fields.append('geom') sql = "SELECT {fields} FROM {table};".format( fields=','.join(fields), table=table.replace('_', '.', 1)) data_frame = psql.read_sql(sql, g.db) file_path = path + '/{date}_{name}.csv'.format(date=date_string, name=table) data_frame.to_csv(file_path, index=False) if form.zip.data: info = 'CSV export from: {host}\n'. format(host=request.headers['Host']) info += 'Created: {date} by {user}\nOpenAtlas version: {version}'.format( date=date_string, user=current_user.username, version=app.config['VERSION']) with open(path + '/info.txt', "w") as file: print(info, file=file) zip_file = app.config['EXPORT_FOLDER_PATH'] + '/csv/' + date_string + '_csv' shutil.make_archive(zip_file, 'zip', path) shutil.rmtree(path) return
def set_dates(self, form: FlaskForm) -> None: self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None if form.begin_year_from.data: # Only if begin year is set create a begin date or time span self.begin_from = DateMapper.form_to_datetime64( form.begin_year_from.data, form.begin_month_from.data, form.begin_day_from.data) self.begin_to = DateMapper.form_to_datetime64( form.begin_year_to.data, form.begin_month_to.data, form.begin_day_to.data, True) self.begin_comment = form.begin_comment.data if form.end_year_from.data: # Only if end year is set create a year date or time span self.end_from = DateMapper.form_to_datetime64( form.end_year_from.data, form.end_month_from.data, form.end_day_from.data) self.end_to = DateMapper.form_to_datetime64( form.end_year_to.data, form.end_month_to.data, form.end_day_to.data, True) self.end_comment = form.end_comment.data
def set_dates(self, form): self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None if form.begin_year_from.data: # Only if begin year is set create a begin date or time span self.begin_from = DateMapper.form_to_datetime64( form.begin_year_from.data, form.begin_month_from.data, form.begin_day_from.data) self.begin_to = DateMapper.form_to_datetime64( form.begin_year_to.data, form.begin_month_to.data, form.begin_day_to.data, True) self.begin_comment = form.begin_comment.data if form.end_year_from.data: # Only if end year is set create a year date or time span self.end_from = DateMapper.form_to_datetime64( form.end_year_from.data, form.end_month_from.data, form.end_day_from.data) self.end_to = DateMapper.form_to_datetime64( form.end_year_to.data, form.end_month_to.data, form.end_day_to.data, True) self.end_comment = form.end_comment.data
def involvement_insert(origin_id): origin = EntityMapper.get_by_id(origin_id) view_name = get_view_name(origin) form = build_form(ActorForm, 'Involvement') if view_name == 'event': del form.event else: del form.actor form.activity.choices = [('P11', g.properties['P11'].name_inverse)] if origin.class_.code in ['E7', 'E8', 'E12']: form.activity.choices.append(('P14', g.properties['P14'].name_inverse)) if origin.class_.code == 'E8': form.activity.choices.append(('P22', g.properties['P22'].name_inverse)) form.activity.choices.append(('P23', g.properties['P23'].name_inverse)) if form.validate_on_submit(): g.cursor.execute('BEGIN') try: if view_name == 'event': for actor_id in ast.literal_eval(form.actor.data): link_id = origin.link(form.activity.data, actor_id, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) else: for event_id in ast.literal_eval(form.event.data): link_id = LinkMapper.insert(event_id, form.activity.data, origin.id, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('entity created'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') if form.continue_.data == 'yes': return redirect(url_for('involvement_insert', origin_id=origin_id)) tab = 'actor' if view_name == 'event' else 'event' return redirect( url_for(view_name + '_view', id_=origin.id) + '#tab-' + tab) return render_template('involvement/insert.html', origin=origin, form=form)
def update(link_: Link) -> None: sql = """ UPDATE model.link SET (property_code, domain_id, range_id, description, type_id, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(property_code)s, %(domain_id)s, %(range_id)s, %(description)s, %(type_id)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.execute(sql, {'id': link_.id, 'property_code': link_.property.code, 'domain_id': link_.domain.id, 'range_id': link_.range.id, 'type_id': link_.type.id if link_.type else None, 'description': link_.description, 'begin_from': DateMapper.datetime64_to_timestamp(link_.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp(link_.begin_to), 'begin_comment': link_.begin_comment, 'end_from': DateMapper.datetime64_to_timestamp(link_.end_from), 'end_to': DateMapper.datetime64_to_timestamp(link_.end_to), 'end_comment': link_.end_comment})
def __init__(self, row): if not row: logger.log('error', 'model', 'invalid id') abort(418) self.id = row.id self.nodes = dict() if hasattr(row, 'nodes') and row.nodes: for node in row.nodes: self.nodes[g.nodes[node['f1']]] = node['f2'] # f1 = node id, f2 = value self.name = row.name self.root = None self.description = row.description if row.description else '' self.system_type = row.system_type self.created = row.created self.modified = row.modified self.begin_from = None self.begin_to = None self.begin_comment = None self.end_from = None self.end_to = None self.end_comment = None if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64(row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date(self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date(self.end_to, 'year') if self.end_to else self.last self.class_ = g.classes[row.class_code] self.view_name = None # view_name is used to build urls self.external_references = [] # Used in view info tab for display if self.system_type == 'file': self.view_name = 'file' elif self.class_.code in app.config['CODE_CLASS']: self.view_name = app.config['CODE_CLASS'][self.class_.code] self.table_name = self.view_name # table_name is used to build tables if self.view_name == 'place': self.table_name = self.system_type.replace(' ', '-')
def __init__(self, row, domain=None, range_=None): from openatlas.models.entity import EntityMapper self.id = row.id self.description = row.description self.property = g.properties[row.property_code] self.domain = domain if domain else EntityMapper.get_by_id(row.domain_id) self.range = range_ if range_ else EntityMapper.get_by_id(row.range_id) self.type = g.nodes[row.type_id] if row.type_id else None self.nodes = dict() if hasattr(row, 'type_id') and row.type_id: self.nodes[g.nodes[row.type_id]] = None if hasattr(row, 'begin_from'): self.begin_from = DateMapper.timestamp_to_datetime64(row.begin_from) self.begin_to = DateMapper.timestamp_to_datetime64(row.begin_to) self.begin_comment = row.begin_comment self.end_from = DateMapper.timestamp_to_datetime64(row.end_from) self.end_to = DateMapper.timestamp_to_datetime64(row.end_to) self.end_comment = row.end_comment self.first = DateForm.format_date(self.begin_from, 'year') if self.begin_from else None self.last = DateForm.format_date(self.end_from, 'year') if self.end_from else None self.last = DateForm.format_date(self.end_to, 'year') if self.end_to else self.last
def update(link): sql = """ UPDATE model.link SET (property_code, domain_id, range_id, description, type_id, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(property_code)s, %(domain_id)s, %(range_id)s, %(description)s, %(type_id)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.cursor.execute(sql, {'id': link.id, 'property_code': link.property.code, 'domain_id': link.domain.id, 'range_id': link.range.id, 'type_id': link.type.id if link.type else None, 'description': link.description, 'begin_from': DateMapper.datetime64_to_timestamp(link.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp(link.begin_to), 'begin_comment': link.begin_comment, 'end_from': DateMapper.datetime64_to_timestamp(link.end_from), 'end_to': DateMapper.datetime64_to_timestamp(link.end_to), 'end_comment': link.end_comment}) debug_model['link sql'] += 1
def involvement_update(id_, origin_id): link_ = LinkMapper.get_by_id(id_) event = EntityMapper.get_by_id(link_.domain.id) actor = EntityMapper.get_by_id(link_.range.id) origin = event if origin_id == event.id else actor form = build_form(ActorForm, 'Involvement', link_, request) form.save.label.text = _('save') del form.actor, form.event, form.insert_and_continue form.activity.choices = [('P11', g.properties['P11'].name)] if event.class_.code in ['E7', 'E8', 'E12']: form.activity.choices.append(('P14', g.properties['P14'].name)) if event.class_.code == 'E8': form.activity.choices.append(('P22', g.properties['P22'].name)) form.activity.choices.append(('P23', g.properties['P23'].name)) if form.validate_on_submit(): g.cursor.execute('BEGIN') try: link_.delete() link_id = event.link(form.activity.data, actor, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') view_name = get_view_name(origin) tab = 'actor' if view_name == 'event' else 'event' return redirect( url_for(view_name + '_view', id_=origin.id) + '#tab-' + tab) form.activity.data = link_.property.code form.description.data = link_.description link_.set_dates() form.populate_dates(link_) return render_template( 'involvement/update.html', origin=origin, form=form, linked_object=event if origin_id != event.id else actor)
def member_insert(origin_id): origin = EntityMapper.get_by_id(origin_id) form = build_form(MemberForm, 'Member') del form.group form.origin_id.data = origin.id if form.validate_on_submit(): g.cursor.execute('BEGIN') try: for actor in EntityMapper.get_by_ids(ast.literal_eval(form.actor.data)): link_id = origin.link('P107', actor, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('entity created'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') if form.continue_.data == 'yes': return redirect(url_for('member_insert', origin_id=origin_id)) return redirect(url_for('actor_view', id_=origin.id) + '#tab-member') return render_template('member/insert.html', origin=origin, form=form)
def update(entity: Entity) -> None: from openatlas.util.util import sanitize sql = """ UPDATE model.entity SET (name, description, begin_from, begin_to, begin_comment, end_from, end_to, end_comment) = (%(name)s, %(description)s, %(begin_from)s, %(begin_to)s, %(begin_comment)s, %(end_from)s, %(end_to)s, %(end_comment)s) WHERE id = %(id)s;""" g.execute( sql, { 'id': entity.id, 'name': entity.name, 'begin_from': DateMapper.datetime64_to_timestamp( entity.begin_from), 'begin_to': DateMapper.datetime64_to_timestamp( entity.begin_to), 'end_from': DateMapper.datetime64_to_timestamp( entity.end_from), 'end_to': DateMapper.datetime64_to_timestamp(entity.end_to), 'begin_comment': entity.begin_comment, 'end_comment': entity.end_comment, 'description': sanitize(entity.description, 'description') })
def involvement_insert(origin_id): origin = EntityMapper.get_by_id(origin_id) form = build_form(ActorForm, 'Involvement') if origin.view_name == 'event': del form.event else: del form.actor form.activity.choices = [('P11', g.properties['P11'].name_inverse)] if origin.class_.code in ['E7', 'E8', 'E12']: form.activity.choices.append(('P14', g.properties['P14'].name_inverse)) if origin.class_.code == 'E8': form.activity.choices.append(('P22', g.properties['P22'].name_inverse)) form.activity.choices.append(('P23', g.properties['P23'].name_inverse)) if form.validate_on_submit(): g.cursor.execute('BEGIN') try: if origin.view_name == 'event': for actor in EntityMapper.get_by_ids(ast.literal_eval(form.actor.data)): link_id = origin.link(form.activity.data, actor, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) else: for event in EntityMapper.get_by_ids(ast.literal_eval(form.event.data)): link_id = event.link(form.activity.data, origin, form.description.data) DateMapper.save_link_dates(link_id, form) NodeMapper.save_link_nodes(link_id, form) g.cursor.execute('COMMIT') flash(_('entity created'), 'info') except Exception as e: # pragma: no cover g.cursor.execute('ROLLBACK') logger.log('error', 'database', 'transaction failed', e) flash(_('error transaction'), 'error') if form.continue_.data == 'yes': return redirect(url_for('involvement_insert', origin_id=origin_id)) tab = 'actor' if origin.view_name == 'event' else 'event' return redirect(url_for(origin.view_name + '_view', id_=origin.id) + '#tab-' + tab) return render_template('involvement/insert.html', origin=origin, form=form)
def validate(self) -> bool: valid = FlaskForm.validate(self) # Check date format, if valid put dates into a list called "dates" dates = {} for prefix in ['begin_', 'end_']: for postfix in ['_from', '_to']: if getattr(self, prefix + 'year' + postfix).data: date = DateMapper.form_to_datetime64( getattr(self, prefix + 'year' + postfix).data, getattr(self, prefix + 'month' + postfix).data, getattr(self, prefix + 'day' + postfix).data) if not date: getattr(self, prefix + 'day' + postfix).errors.append( _('not a valid date')) valid = False else: dates[prefix + postfix.replace('_', '')] = date # Check for valid date combination e.g. begin not after end if valid: for prefix in ['begin', 'end']: if prefix + '_from' in dates and prefix + '_to' in dates: if dates[prefix + '_from'] > dates[prefix + '_to']: field = getattr(self, prefix + '_day_from') field.errors.append( _('First date cannot be after second.')) valid = False if valid and 'begin_from' in dates and 'end_from' in dates: field = getattr(self, 'begin_day_from') if len(dates) == 4: # All dates are used if dates['begin_from'] > dates['end_from'] or dates[ 'begin_to'] > dates['end_to']: field.errors.append( _('Begin dates cannot start after end dates.')) valid = False else: first = dates['begin_to'] if 'begin_to' in dates else dates[ 'begin_from'] second = dates['end_from'] if 'end_from' in dates else dates[ 'end_to'] if first > second: field.errors.append( _('Begin dates cannot start after end dates.')) valid = False return valid
def insert(code, name, system_type=None, description=None, date=None): if not name and not date: # pragma: no cover logger.log('error', 'database', 'Insert entity without name and date') return # Something went wrong so don't insert sql = """ INSERT INTO model.entity (name, system_type, class_code, description, value_timestamp) VALUES (%(name)s, %(system_type)s, %(code)s, %(description)s, %(value_timestamp)s) RETURNING id;""" params = { 'name': name.strip(), 'code': code, 'system_type': system_type.strip() if system_type else None, 'description': description.strip() if description else None, 'value_timestamp': DateMapper.datetime64_to_timestamp(date) if date else None} g.cursor.execute(sql, params) debug_model['div sql'] += 1 return EntityMapper.get_by_id(g.cursor.fetchone()[0])
def export_sql(): """ Creates a pg_dump file in the export/sql folder, filename begins with current date.""" # Todo: prevent exposing the database password to the process list path = '{path}/sql/{date}_dump.sql'.format(path=app.config['EXPORT_FOLDER_PATH'], date=DateMapper.current_date_for_filename()) command = '''pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}'''.format( host=app.config['DATABASE_HOST'], database=app.config['DATABASE_NAME'], port=app.config['DATABASE_PORT'], user=app.config['DATABASE_USER'], file=path) try: subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, env={'PGPASSWORD': app.config['DATABASE_PASS']}).wait() except Exception: # pragma: no cover return False return True
def validate(self, extra_validators=None): valid = Form.validate(self) fields = {} # put date form values in a dictionary for name in ['begin', 'end']: for item in ['year', 'month', 'day', 'year2', 'month2', 'day2']: value = getattr(self, 'date_' + name + '_' + item).data fields[name + '_' + item] = int(value) if value else '' # Check date format, if valid put dates into a dictionary dates = {} for name in ['begin', 'end']: for postfix in ['', '2']: if fields[name + '_' + 'year' + postfix]: date = DateMapper.form_to_datetime64( fields[name + '_' + 'year' + postfix], fields[name + '_' + 'month' + postfix], fields[name + '_' + 'day' + postfix]) if not date: field = getattr(self, 'date_' + name + '_' + 'day' + postfix) field.errors.append(_('not a valid date')) valid = False else: dates[name + postfix] = date # Check for valid date combination e.g. begin not after end if valid: for name in ['begin', 'end']: if name in dates and name + '2' in dates: if dates[name] > dates[name + '2']: field = getattr(self, 'date_' + name + '_day') field.errors.append(_('First date cannot be after second.')) valid = False if valid and 'begin' in dates and 'end' in dates: field = getattr(self, 'date_begin_day') if len(dates) == 4: # All dates are used if dates['begin'] > dates['end'] or dates['begin2'] > dates['end2']: field.errors.append(_('Begin dates cannot start after end dates.')) valid = False else: first = dates['begin2'] if 'begin2' in dates else dates['begin'] second = dates['end'] if 'end' in dates else dates['end2'] if first > second: field.errors.append(_('Begin dates cannot start after end dates.')) valid = False return valid
def export_sql() -> bool: """ Creates a pg_dump file in the export/sql folder, filename begins with current date.""" # Todo: prevent exposing the database password to the process list file_name = DateMapper.current_date_for_filename() + '_dump.sql' path = app.config['EXPORT_FOLDER_PATH'].joinpath('sql', file_name) command = """pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}""".format( host=app.config['DATABASE_HOST'], database=app.config['DATABASE_NAME'], port=app.config['DATABASE_PORT'], user=app.config['DATABASE_USER'], file=path) try: subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, env={ 'PGPASSWORD': app.config['DATABASE_PASS'] }).wait() except Exception: # pragma: no cover return False return True
def validate(self, extra_validators=None): valid = Form.validate(self) # Check date format, if valid put dates into a list called "dates" dates = {} for prefix in ['begin_', 'end_']: for postfix in ['_from', '_to']: if getattr(self, prefix + 'year' + postfix).data: date = DateMapper.form_to_datetime64( getattr(self, prefix + 'year' + postfix).data, getattr(self, prefix + 'month' + postfix).data, getattr(self, prefix + 'day' + postfix).data) if not date: getattr(self, prefix + 'day' + postfix).errors.append(_('not a valid date')) valid = False else: dates[prefix + postfix.replace('_', '')] = date # Check for valid date combination e.g. begin not after end if valid: for prefix in ['begin', 'end']: if prefix + '_from' in dates and prefix + '_to' in dates: if dates[prefix + '_from'] > dates[prefix + '_to']: field = getattr(self, prefix + '_day_from') field.errors.append(_('First date cannot be after second.')) valid = False if valid and 'begin_from' in dates and 'end_from' in dates: field = getattr(self, 'begin_day_from') if len(dates) == 4: # All dates are used if dates['begin_from'] > dates['end_from'] or dates['begin_to'] > dates['end_to']: field.errors.append(_('Begin dates cannot start after end dates.')) valid = False else: first = dates['begin_to'] if 'begin_to' in dates else dates['begin_from'] second = dates['end_from'] if 'end_from' in dates else dates['end_to'] if first > second: field.errors.append(_('Begin dates cannot start after end dates.')) valid = False return valid
def insert(code, name, system_type=None, description=None, date=None): if not name and not date: # pragma: no cover logger.log('error', 'database', 'Insert entity without name and date') return # Something went wrong so don't insert sql = """ INSERT INTO model.entity (name, system_type, class_code, description, value_timestamp) VALUES (%(name)s, %(system_type)s, %(code)s, %(description)s, %(value_timestamp)s) RETURNING id;""" params = { 'name': str(date) if date else name.strip(), 'code': code, 'system_type': system_type.strip() if system_type else None, 'description': description.strip() if description else None, 'value_timestamp': DateMapper.datetime64_to_timestamp(date) if date else None } g.cursor.execute(sql, params) return EntityMapper.get_by_id(g.cursor.fetchone()[0])
def export_sql(): """ Creates a pg_dump file in the export/sql folder, filename begins with current date.""" # Todo: prevent exposing the database password to the process list if os.name != "posix": # pragma: no cover return False # For other operating systems e.g. Windows, we would need adaptions here path = '{path}/sql/{date}_dump.sql'.format( path=app.config['EXPORT_FOLDER_PATH'], date=DateMapper.current_date_for_filename()) command = """pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}""".format( host=app.config['DATABASE_HOST'], database=app.config['DATABASE_NAME'], port=app.config['DATABASE_PORT'], user=app.config['DATABASE_USER'], file=path) try: subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, env={ 'PGPASSWORD': app.config['DATABASE_PASS'] }).wait() except Exception: # pragma: no cover return False return True
def search(form): if not form.term.data: return [] sql = EntityMapper.build_sql() + """ {user_clause} WHERE (LOWER(e.name) LIKE LOWER(%(term)s) {description_clause}) AND {user_clause2} (""".format( user_clause=""" LEFT JOIN web.user_log ul ON e.id = ul.entity_id """ if form.own.data else '', description_clause=""" OR lower(e.description) LIKE lower(%(term)s) """ if form.desc.data else '', user_clause2=' ul.user_id = %(user_id)s AND ' if form.own.data else '') sql_where = [] for name in form.classes.data: if name in ['source', 'event']: sql_where.append("e.class_code IN ({codes})".format( codes=str(app.config['CLASS_CODES'][name])[1:-1])) elif name == 'actor': codes = app.config['CLASS_CODES'][name] + ['E82'] # Add alias sql_where.append(" e.class_code IN ({codes})".format(codes=str(codes)[1:-1])) elif name == 'place': sql_where.append("(e.class_code = 'E41' OR e.system_type = 'place')") elif name == 'feature': sql_where.append("e.system_type = 'feature'") elif name == 'stratigraphic unit': sql_where.append("e.system_type = 'stratigraphic unit'") elif name == 'find': sql_where.append("e.class_code = 'E22'") elif name == 'reference': sql_where.append(" e.class_code IN ({codes}) AND e.system_type != 'file'".format( codes=str(app.config['CLASS_CODES']['reference'])[1:-1])) elif name == 'file': sql_where.append(" e.system_type = 'file'") sql += ' OR '.join(sql_where) + ") GROUP BY e.id ORDER BY e.name;" g.cursor.execute(sql, {'term': '%' + form.term.data + '%', 'user_id': current_user.id}) debug_model['div sql'] += 1 # Prepare date filter from_date = DateMapper.form_to_datetime64(form.begin_year.data, form.begin_month.data, form.begin_day.data) to_date = DateMapper.form_to_datetime64(form.end_year.data, form.end_month.data, form.end_day.data, True) # Refill form in case dates were completed if from_date: string = str(from_date) if string.startswith('-') or string.startswith('0000'): string = string[1:] parts = string.split('-') form.begin_month.raw_data = None form.begin_day.raw_data = None form.begin_month.data = int(parts[1]) form.begin_day.data = int(parts[2]) if to_date: string = str(to_date) if string.startswith('-') or string.startswith('0000'): string = string[1:] # pragma: no cover parts = string.split('-') form.end_month.raw_data = None form.end_day.raw_data = None form.end_month.data = int(parts[1]) form.end_day.data = int(parts[2]) entities = [] for row in g.cursor.fetchall(): entity = None if row.class_code == 'E82': # If found in actor alias entity = LinkMapper.get_linked_entity(row.id, 'P131', True) elif row.class_code == 'E41': # If found in place alias entity = LinkMapper.get_linked_entity(row.id, 'P1', True) elif row.class_code == 'E18': if row.system_type in form.classes.data: entity = Entity(row) else: entity = Entity(row) if not entity: # pragma: no cover continue if not from_date and not to_date: entities.append(entity) continue # Date criteria present but entity has no dates if not entity.begin_from and not entity.begin_to and not entity.end_from \ and not entity.end_to: if form.include_dateless.data: # Include dateless entities entities.append(entity) continue # Check date criteria dates = [entity.begin_from, entity.begin_to, entity.end_from, entity.end_to] begin_check_ok = False if not from_date: begin_check_ok = True # pragma: no cover else: for date in dates: if date and date >= from_date: begin_check_ok = True end_check_ok = False if not to_date: end_check_ok = True # pragma: no cover else: for date in dates: if date and date <= to_date: end_check_ok = True if begin_check_ok and end_check_ok: entities.append(entity) return {d.id: d for d in entities}.values() # Remove duplicates before returning
def export_csv(form: FlaskForm) -> None: """ Creates CSV file(s) in the export/csv folder, filename begins with current date.""" import pandas.io.sql as psql date_string = DateMapper.current_date_for_filename() path = app.config['EXPORT_FOLDER_PATH'].joinpath('csv') if form.zip.data: path = app.config['TMP_FOLDER_PATH'].joinpath( date_string + '_openatlas_csv_export') if os.path.exists(path): shutil.rmtree(path) # pragma: no cover os.makedirs(path) tables = { 'model_class': ['id', 'name', 'code'], 'model_class_inheritance': ['id', 'super_code', 'sub_code'], 'model_entity': [ 'id', 'name', 'description', 'class_code', 'begin_from', 'begin_to', 'begin_comment', 'end_from', 'end_to', 'end_comment' ], 'model_link': [ 'id', 'property_code', 'domain_id', 'range_id', 'type_id', 'description', 'begin_from', 'begin_to', 'begin_comment', 'end_from', 'end_to', 'end_comment' ], 'model_property': [ 'id', 'code', 'range_class_code', 'domain_class_code', 'name', 'name_inverse' ], 'model_property_inheritance': ['id', 'super_code', 'sub_code'], 'gis_point': ['id', 'entity_id', 'name', 'description', 'type'], 'gis_linestring': ['id', 'entity_id', 'name', 'description', 'type'], 'gis_polygon': ['id', 'entity_id', 'name', 'description', 'type'] } gis_tables = ['gis_point', 'gis_linestring', 'gis_polygon'] for table, fields in tables.items(): if getattr(form, table).data: if form.timestamps.data: fields.append('created') fields.append('modified') if table in gis_tables: if form.gis_format.data == 'wkt': fields.append("ST_AsText(geom)") elif form.gis_format.data == 'coordinates': if table == 'gis_point': fields.append( "ST_X(geom) || ' ' || ST_Y(geom) AS coordinates" ) else: fields.append(""" ST_X(public.ST_PointOnSurface(geom)) || ' ' || ST_Y(public.ST_PointOnSurface(geom)) AS polygon_center_point""" ) else: fields.append('geom') sql = "SELECT {fields} FROM {table};".format( fields=','.join(fields), table=table.replace('_', '.', 1)) data_frame = psql.read_sql(sql, g.db) data_frame.to_csv(path.joinpath(date_string + '_' + table + '.csv'), index=False) if form.zip.data: info = 'CSV export from: {host}\n'.format( host=request.headers['Host']) info += 'Created: {date} by {user}\nOpenAtlas version: {version}'.format( date=date_string, user=current_user.username, version=app.config['VERSION']) with open(path.joinpath('info.txt'), "w") as file: print(info, file=file) zip_file = app.config['EXPORT_FOLDER_PATH'].joinpath( 'csv', date_string + '_csv') shutil.make_archive(zip_file, 'zip', path) shutil.rmtree(path)
def search(form: FlaskForm) -> ValuesView[Entity]: if not form.term.data: return {}.values() sql = EntityMapper.build_sql() + """ {user_clause} WHERE (LOWER(e.name) LIKE LOWER(%(term)s) {description_clause}) AND {user_clause2} (""".format( user_clause=""" LEFT JOIN web.user_log ul ON e.id = ul.entity_id """ if form.own.data else '', description_clause=""" OR lower(e.description) LIKE lower(%(term)s) """ if form.desc.data else '', user_clause2=' ul.user_id = %(user_id)s AND ' if form.own.data else '') sql_where = [] for name in form.classes.data: if name in ['source', 'event']: sql_where.append("e.class_code IN ({codes})".format( codes=str(app.config['CLASS_CODES'][name])[1:-1])) elif name == 'actor': codes = app.config['CLASS_CODES'][name] + ['E82'] # Add alias sql_where.append(" e.class_code IN ({codes})".format( codes=str(codes)[1:-1])) elif name == 'place': sql_where.append( "(e.class_code = 'E41' OR e.system_type = 'place')") elif name == 'feature': sql_where.append("e.system_type = 'feature'") elif name == 'stratigraphic unit': sql_where.append("e.system_type = 'stratigraphic unit'") elif name == 'find': sql_where.append("e.class_code = 'E22'") elif name == 'reference': sql_where.append( " e.class_code IN ({codes}) AND e.system_type != 'file'". format(codes=str(app.config['CLASS_CODES']['reference']) [1:-1])) elif name == 'file': sql_where.append(" e.system_type = 'file'") sql += ' OR '.join(sql_where) + ") GROUP BY e.id ORDER BY e.name;" g.execute(sql, { 'term': '%' + form.term.data + '%', 'user_id': current_user.id }) # Prepare date filter from_date = DateMapper.form_to_datetime64(form.begin_year.data, form.begin_month.data, form.begin_day.data) to_date = DateMapper.form_to_datetime64(form.end_year.data, form.end_month.data, form.end_day.data, True) # Refill form in case dates were completed if from_date: string = str(from_date) if string.startswith('-') or string.startswith('0000'): string = string[1:] parts = string.split('-') form.begin_month.raw_data = None form.begin_day.raw_data = None form.begin_month.data = int(parts[1]) form.begin_day.data = int(parts[2]) if to_date: string = str(to_date) if string.startswith('-') or string.startswith('0000'): string = string[1:] # pragma: no cover parts = string.split('-') form.end_month.raw_data = None form.end_day.raw_data = None form.end_month.data = int(parts[1]) form.end_day.data = int(parts[2]) entities = [] for row in g.cursor.fetchall(): entity = None if row.class_code == 'E82': # If found in actor alias entity = LinkMapper.get_linked_entity(row.id, 'P131', True) elif row.class_code == 'E41': # If found in place alias entity = LinkMapper.get_linked_entity(row.id, 'P1', True) elif row.class_code == 'E18': if row.system_type in form.classes.data: entity = Entity(row) else: entity = Entity(row) if not entity: # pragma: no cover continue if not from_date and not to_date: entities.append(entity) continue # Date criteria present but entity has no dates if not entity.begin_from and not entity.begin_to and not entity.end_from \ and not entity.end_to: if form.include_dateless.data: # Include dateless entities entities.append(entity) continue # Check date criteria dates = [ entity.begin_from, entity.begin_to, entity.end_from, entity.end_to ] begin_check_ok = False if not from_date: begin_check_ok = True # pragma: no cover else: for date in dates: if date and date >= from_date: begin_check_ok = True end_check_ok = False if not to_date: end_check_ok = True # pragma: no cover else: for date in dates: if date and date <= to_date: end_check_ok = True if begin_check_ok and end_check_ok: entities.append(entity) return {d.id: d for d in entities }.values() # Remove duplicates before returning
def save_dates(self, form): DateMapper.save_dates(self, form)
def set_dates(self): self.dates = DateMapper.get_dates(self)
def format_date(value, format_='medium'): if not value: return '' if type(value) is numpy.datetime64: return DateMapper.datetime64_to_timestamp(value) return dates.format_date(value, format=format_, locale=session['language'])
def set_dates(self): from openatlas.models.date import DateMapper self.dates = DateMapper.get_link_dates(self)
def format_date(value): if type(value) is numpy.datetime64: return DateMapper.datetime64_to_timestamp(value) return value.date().isoformat() if value else ''
def save_dates(self, form): DateMapper.save_dates(self, form)
def set_dates(self): self.dates = DateMapper.get_dates(self)
def format_date(value, format_='medium'): if not value: return '' if isinstance(value, numpy.datetime64): return DateMapper.datetime64_to_timestamp(value) return dates.format_date(value, format=format_, locale=session['language'])