def test_export(self):
        with app.app_context():
            self.login()

            # SQL export
            rv = self.app.get(url_for('export_sql'))
            assert b'Export SQL' in rv.data
            rv = self.app.post(url_for('export_sql'), follow_redirects=True)
            assert b'Data was exported as SQL' in rv.data
            date_string = DateMapper.current_date_for_filename()
            self.app.get(url_for('download_sql', filename=date_string + '_dump.sql'))
            rv = self.app.get(url_for('delete_sql', filename=date_string + '_dump.sql'),
                              follow_redirects=True)
            assert b'File deleted' in rv.data

            # CSV export
            rv = self.app.get(url_for('export_csv'))
            assert b'Export CSV' in rv.data
            rv = self.app.post(url_for('export_csv'), follow_redirects=True,
                               data={'zip': True, 'model_class': True,
                                     'gis_point': True, 'gis_format': 'wkt'})
            assert b'Data was exported as CSV' in rv.data
            rv = self.app.post(url_for('export_csv'), follow_redirects=True,
                               data={'model_class': True, 'timestamps': True,
                                     'gis_polygon': True, 'gis_format': 'postgis'})
            assert b'Data was exported as CSV' in rv.data
            rv = self.app.post(url_for('export_csv'), follow_redirects=True,
                               data={'model_class': True, 'timestamps': True, 'gis_point': True,
                                     'gis_polygon': True, 'gis_format': 'coordinates'})
            assert b'Data was exported as CSV' in rv.data
            date_string = DateMapper.current_date_for_filename()
            self.app.get(url_for('download_csv', filename=date_string + '_csv.zip'))
            rv = self.app.get(url_for('delete_csv', filename=date_string + '_csv.zip'),
                              follow_redirects=True)
            assert b'File deleted' in rv.data
Exemple #2
0
 def export_csv(form):
     """ Creates CSV file(s) in the export/csv folder, filename begins with current date."""
     import pandas.io.sql as psql
     date_string = DateMapper.current_date_for_filename()
     path = app.config['EXPORT_FOLDER_PATH'] + '/csv/'
     if form.zip.data:
         path = '/tmp/' + date_string + '_openatlas_csv_export'
         if os.path.exists(path):
             shutil.rmtree(path)  # pragma: no cover
         os.makedirs(path)
     tables = {
         'model_class': ['id', 'name', 'code'],
         'model_class_inheritance': ['id', 'super_code', 'sub_code'],
         'model_entity': ['id', 'name', 'description', 'class_code', 'begin_from', 'begin_to',
                          'begin_comment', 'end_from', 'end_to', 'end_comment'],
         'model_link': ['id', 'property_code', 'domain_id', 'range_id', 'type_id', 'description',
                        'begin_from', 'begin_to', 'begin_comment', 'end_from', 'end_to',
                        'end_comment'],
         'model_property': ['id', 'code', 'range_class_code', 'domain_class_code', 'name',
                            'name_inverse'],
         'model_property_inheritance': ['id', 'super_code', 'sub_code'],
         'gis_point': ['id', 'entity_id', 'name', 'description', 'type'],
         'gis_linestring': ['id', 'entity_id', 'name', 'description', 'type'],
         'gis_polygon': ['id', 'entity_id', 'name', 'description', 'type']}
     gis_tables = ['gis_point', 'gis_linestring', 'gis_polygon']
     for table, fields in tables.items():
         if getattr(form, table).data:
             if form.timestamps.data:
                 fields.append('created')
                 fields.append('modified')
             if table in gis_tables:
                 if form.gis_format.data == 'wkt':
                     fields.append("ST_AsText(geom)")
                 elif form.gis_format.data == 'coordinates':
                     if table == 'gis_point':
                         fields.append("ST_X(geom) || ' ' || ST_Y(geom) AS coordinates")
                     else:
                         fields.append("""
                             ST_X(public.ST_PointOnSurface(geom)) || ' ' ||
                             ST_Y(public.ST_PointOnSurface(geom)) AS polygon_center_point""")
                 else:
                     fields.append('geom')
             sql = "SELECT {fields} FROM {table};".format(
                 fields=','.join(fields), table=table.replace('_', '.', 1))
             data_frame = psql.read_sql(sql, g.db)
             file_path = path + '/{date}_{name}.csv'.format(date=date_string, name=table)
             data_frame.to_csv(file_path, index=False)
     if form.zip.data:
         info = 'CSV export from: {host}\n'. format(host=request.headers['Host'])
         info += 'Created: {date} by {user}\nOpenAtlas version: {version}'.format(
             date=date_string, user=current_user.username, version=app.config['VERSION'])
         with open(path + '/info.txt', "w") as file:
             print(info, file=file)
         zip_file = app.config['EXPORT_FOLDER_PATH'] + '/csv/' + date_string + '_csv'
         shutil.make_archive(zip_file, 'zip', path)
         shutil.rmtree(path)
     return
 def export_sql():
     """ Creates a pg_dump file in the export/sql folder, filename begins with current date."""
     # Todo: prevent exposing the database password to the process list
     path = '{path}/sql/{date}_dump.sql'.format(path=app.config['EXPORT_FOLDER_PATH'],
                                                date=DateMapper.current_date_for_filename())
     command = '''pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}'''.format(
         host=app.config['DATABASE_HOST'],
         database=app.config['DATABASE_NAME'],
         port=app.config['DATABASE_PORT'],
         user=app.config['DATABASE_USER'],
         file=path)
     try:
         subprocess.Popen(command, shell=True, stdin=subprocess.PIPE,
                          env={'PGPASSWORD': app.config['DATABASE_PASS']}).wait()
     except Exception:  # pragma: no cover
         return False
     return True
 def export_sql() -> bool:
     """ Creates a pg_dump file in the export/sql folder, filename begins with current date."""
     # Todo: prevent exposing the database password to the process list
     file_name = DateMapper.current_date_for_filename() + '_dump.sql'
     path = app.config['EXPORT_FOLDER_PATH'].joinpath('sql', file_name)
     command = """pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}""".format(
         host=app.config['DATABASE_HOST'],
         database=app.config['DATABASE_NAME'],
         port=app.config['DATABASE_PORT'],
         user=app.config['DATABASE_USER'],
         file=path)
     try:
         subprocess.Popen(command,
                          shell=True,
                          stdin=subprocess.PIPE,
                          env={
                              'PGPASSWORD': app.config['DATABASE_PASS']
                          }).wait()
     except Exception:  # pragma: no cover
         return False
     return True
Exemple #5
0
 def export_sql():
     """ Creates a pg_dump file in the export/sql folder, filename begins with current date."""
     # Todo: prevent exposing the database password to the process list
     if os.name != "posix":  # pragma: no cover
         return False  # For other operating systems e.g. Windows, we would need adaptions here
     path = '{path}/sql/{date}_dump.sql'.format(
         path=app.config['EXPORT_FOLDER_PATH'],
         date=DateMapper.current_date_for_filename())
     command = """pg_dump -h {host} -d {database} -U {user} -p {port} -f {file}""".format(
         host=app.config['DATABASE_HOST'],
         database=app.config['DATABASE_NAME'],
         port=app.config['DATABASE_PORT'],
         user=app.config['DATABASE_USER'],
         file=path)
     try:
         subprocess.Popen(command,
                          shell=True,
                          stdin=subprocess.PIPE,
                          env={
                              'PGPASSWORD': app.config['DATABASE_PASS']
                          }).wait()
     except Exception:  # pragma: no cover
         return False
     return True
 def export_csv(form: FlaskForm) -> None:
     """ Creates CSV file(s) in the export/csv folder, filename begins with current date."""
     import pandas.io.sql as psql
     date_string = DateMapper.current_date_for_filename()
     path = app.config['EXPORT_FOLDER_PATH'].joinpath('csv')
     if form.zip.data:
         path = app.config['TMP_FOLDER_PATH'].joinpath(
             date_string + '_openatlas_csv_export')
         if os.path.exists(path):
             shutil.rmtree(path)  # pragma: no cover
         os.makedirs(path)
     tables = {
         'model_class': ['id', 'name', 'code'],
         'model_class_inheritance': ['id', 'super_code', 'sub_code'],
         'model_entity': [
             'id', 'name', 'description', 'class_code', 'begin_from',
             'begin_to', 'begin_comment', 'end_from', 'end_to',
             'end_comment'
         ],
         'model_link': [
             'id', 'property_code', 'domain_id', 'range_id', 'type_id',
             'description', 'begin_from', 'begin_to', 'begin_comment',
             'end_from', 'end_to', 'end_comment'
         ],
         'model_property': [
             'id', 'code', 'range_class_code', 'domain_class_code', 'name',
             'name_inverse'
         ],
         'model_property_inheritance': ['id', 'super_code', 'sub_code'],
         'gis_point': ['id', 'entity_id', 'name', 'description', 'type'],
         'gis_linestring':
         ['id', 'entity_id', 'name', 'description', 'type'],
         'gis_polygon': ['id', 'entity_id', 'name', 'description', 'type']
     }
     gis_tables = ['gis_point', 'gis_linestring', 'gis_polygon']
     for table, fields in tables.items():
         if getattr(form, table).data:
             if form.timestamps.data:
                 fields.append('created')
                 fields.append('modified')
             if table in gis_tables:
                 if form.gis_format.data == 'wkt':
                     fields.append("ST_AsText(geom)")
                 elif form.gis_format.data == 'coordinates':
                     if table == 'gis_point':
                         fields.append(
                             "ST_X(geom) || ' ' || ST_Y(geom) AS coordinates"
                         )
                     else:
                         fields.append("""
                             ST_X(public.ST_PointOnSurface(geom)) || ' ' ||
                             ST_Y(public.ST_PointOnSurface(geom)) AS polygon_center_point"""
                                       )
                 else:
                     fields.append('geom')
             sql = "SELECT {fields} FROM {table};".format(
                 fields=','.join(fields), table=table.replace('_', '.', 1))
             data_frame = psql.read_sql(sql, g.db)
             data_frame.to_csv(path.joinpath(date_string + '_' + table +
                                             '.csv'),
                               index=False)
     if form.zip.data:
         info = 'CSV export from: {host}\n'.format(
             host=request.headers['Host'])
         info += 'Created: {date} by {user}\nOpenAtlas version: {version}'.format(
             date=date_string,
             user=current_user.username,
             version=app.config['VERSION'])
         with open(path.joinpath('info.txt'), "w") as file:
             print(info, file=file)
         zip_file = app.config['EXPORT_FOLDER_PATH'].joinpath(
             'csv', date_string + '_csv')
         shutil.make_archive(zip_file, 'zip', path)
         shutil.rmtree(path)