def test_error(): with pytest.raises(InvalidPattern): RegexList(['*']) with pytest.raises(InvalidPattern): rules = RegexList() rules.append('**')
from etl.timeit import Timer from etl.utils import get_all_tables, get_schema_fieldname IGNORED_TABLES = RegexList([ '.*\.auth_user_groups', '.*\.auth_user_user_permissions', '.*\.users_userprofile', '.*\.users_userprofile_countries_available', 'public\.account_.*', 'public\.authtoken_token', 'public\.celery_.*', 'public\.corsheaders_.*', 'public\.django_admin_log', 'public\.django_celery_.*', '.*\.django_migrations', 'public\.django_session', 'public\.djcelery_.*', 'public\.environment_.*', 'public\.notification_notification', 'public\.post_office_.*', 'public\.reversion_.*', 'public\.socialaccount_.*', 'public\.spatial_ref_sys', 'public\.unicef_notification_.*', 'public\.waffle_.*', ]) def migrate_public(source: Engine, destination: Engine, echo=None): DestinationSession = sessionmaker(destination)
import geoalchemy2 # noqa: F401 from django_regex.utils import RegexList from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT from sqlalchemy import BOOLEAN, Column, Integer, MetaData, PrimaryKeyConstraint, String from sqlalchemy.engine import Engine from sqlalchemy.exc import ProgrammingError from sqlalchemy.sql import ColumnCollection from sqlalchemy.sql.ddl import AddConstraint, CreateTable from src.etl.utils import create_database, drop_database, get_all_tables, get_schema_fieldname from etl.timeit import Timer IGNORED_TABLES = RegexList(['public.spatial_ref_sys', '.*\.reversion_.*']) def syncronyze_extensions(source: Engine, destination: Engine): connFrom = source.connect() cur = connFrom.execute("SELECT * from pg_extension;") extensions = [line[0] for line in cur.fetchall()] extensions += ['btree_gin'] connFrom.close() connTo = destination.connect() connTo.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) for ext in extensions: try: connTo.execute("CREATE EXTENSION %s;" % ext) print("Creating extension %s" % ext) except ProgrammingError as e: # pragma: no cover if 'already exists' not in str(e):
from django.core.management.commands.inspectdb import Command as BaseCommand from django.db import connections from django_regex.utils import RegexList logger = logging.getLogger(__name__) INGNORED_TABLES = RegexList([ 'auth_permission', 'account_user_user_permissions', 'auth_group_permissions', 'django_cron_.*', 'post_office_.*', 'django_session', 'drfpasswordless_.*', 'celery_.*', 'djcelery_.*', 'django_celery_beat.*', 'django_celery_results.*', 'django_migrations', 'django_admin_log', 'unicef_notification_.*', 'celery_.*', 'social_auth_.*' ]) NO_API = RegexList([ 'AccountUserGroups', 'AccountUser', 'AccountUserUserPermissions', 'AccountUserprofile', 'Auth.*', 'Django.*'
def test_append_rex(): rules = RegexList() rules.append(re.compile('^abc$')) assert 'abc' in rules assert 'a' not in rules
def test_init_rex(): rules = RegexList([perl.compile('/^abc$/i')]) assert 'ABC' in rules assert 'a' not in rules
def test_repr(): rules = RegexList(['.*', '[0-9]*']) if sys.version_info[0] < 3: assert str(rules) == "[u'.*', u'[0-9]*']" elif sys.version_info[0] == 3: assert str(rules) == "['.*', '[0-9]*']"
def test_setitem(): rules = RegexList(['']) rules[0] = '^abc$' assert 'abc' in rules assert 'a' not in rules
def test_append(): rules = RegexList() rules.append('^abc$') assert 'abc' in rules assert 'a' not in rules
def test_number(): rules = RegexList() rules.append('\d*') assert 1 in rules assert 'a' not in rules
def test_init(): rules = RegexList(['^abc$']) assert 'abc' in rules assert 'a' not in rules
import re from collections import OrderedDict from django.utils.functional import cached_property import sqlparse from django_regex.utils import RegexList from sqlparse.sql import Function, Identifier, IdentifierList, Where from sqlparse.tokens import Keyword, Whitespace SHARED_TABLES = RegexList([ '"auth_.*', '"publics_.*', '"users_.*', '"categories_.*', '"django_content_type.*' ]) cache = {} def clean_stm(sql): return sql.replace("\n", " ").replace("\r", " ") class Parser: def __init__(self, sql): self.raw_sql = self.original = clean_stm(sql) self.where = "" self._raw_tables = [] # self._raw_order = [] # self._raw_fields = [] # self._raw_joins = [] # self._raw_where = []
def handle(self, *connections, **options): self.verbosity = options['verbosity'] if not connections: self.stdout.write("\n".join(settings.DATABASES.keys())) return for conn_name in connections: engine = settings.DATABASES[conn_name]['ENGINE'] if options['check_engine'] and engine not in [ 'django.contrib.gis.db.backends.postgis', 'django.contrib.gis.db.backends.postgresql', 'etools_datamart.apps.multitenant.postgresql' ]: raise CommandError("Engine not supported: '%s'" % engine) dbname = settings.DATABASES[conn_name]['NAME'] uname = settings.DATABASES[conn_name]['USER'] passw = settings.DATABASES[conn_name]['PASSWORD'] host = settings.DATABASES[conn_name]['HOST'] port = settings.DATABASES[conn_name]['PORT'] cmds = [ 'pg_dump', dbname, '--schema', 'public', '--schema-only', '--no-owner', '--no-security-labels', '--no-synchronized-snapshots', '--no-tablespaces', '--no-privileges', '-U', uname, '-p', str(port), '-h', host, ] if passw: cmds.extend(['-W', passw]) if options['models']: requested = RegexList(options['models']) models = apps.get_models() names = [m._meta.db_table for m in models] selection = [n for n in names if n in requested] for tablename in selection: cmds.extend(['-t', tablename]) if options['tables']: for tablename in options['tables']: cmds.extend(['-t', tablename]) if self.verbosity > 1 or options['dry_run']: self.stdout.write(" ".join(cmds)) if not options['dry_run']: p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, errors = p.communicate() if errors: self.stderr.write(errors.decode()) else: if options['format']: output = sqlparse.format(output, strip_comments=True, strip_whitespace=True, indent_width=2, output_format='sql', reindent=True, keyword_case='upper') lexer = SqlLexer() formatter = Terminal256Formatter( style=options['style']) if options['highlight']: output = highlight(output, lexer, formatter) else: output = output.decode('utf8') self.stdout.write(output)