def get_uri(): from django.db import connection as db return "postgres://" + db.get_connection_params( )['user'] + ":" + db.get_connection_params( )['password'] + "@" + db.get_connection_params( )['host'] + ":" + db.get_connection_params( )['port'] + '/' + db.get_connection_params()['database']
def import_data(release_dir): def load_records(filename): paths = glob.glob( os.path.join(release_dir, "Full", "Terminology", "sct2_" + filename + "*.txt")) assert len(paths) == 1 with open(paths[0]) as f: reader = csv.reader(f, delimiter="\t") next(reader) for r in reader: r[1] = parse_date(r[1]) # effective_time r[2] = r[2] == "1" # active yield r connection_params = django_connection.get_connection_params() connection = sqlite3.connect(**connection_params) connection.executemany(build_sql(Concept), load_records("Concept")) connection.executemany(build_sql(Description), load_records("Description")) connection.executemany(build_sql(Relationship), load_records("StatedRelationship")) connection.executemany(build_sql(Relationship), load_records("Relationship")) connection.commit() connection.close()
def handle(self, *args, **options): try: assert all( [options.get('data_file'), options.get('reporting_year')]) except AssertionError: raise ValueError('Please provide a data file and reporting year') self.reporting_year = options['reporting_year'] self.amend = options.get('amend', False) self.prompt_for_delete = not options.get('no_input', False) self.update_index = not options.get('no_index', False) self.data_file = self.validate(options['data_file']) django_conn = connection.get_connection_params() conn_kwargs = { 'username': django_conn.get('user', ''), 'password': django_conn.get('password', ''), 'host': django_conn.get('host', ''), 'port': django_conn.get('port', ''), 'database': django_conn.get('database', ''), } self.engine = sa.create_engine(URL('postgresql', **conn_kwargs)) self.upload()
def Login(request): if request.method == 'POST': loginpage = form.Loginform(request.POST) if (loginpage.is_valid()): user = loginpage.cleaned_data['Username'] password = loginpage.cleaned_data['Password'] db = connection.get_connection_params()['db'] data = UserData.from_db(db, ['Username', 'password'], values=[user, password]) print(data) #redirect('signup',{'signup':form.SignUpFrom()}) else: loginpage = form.Loginform() return render(request, 'Html/Login.html', {'login': loginpage})
def import_data(release_dir): """ Import NHSD CTV3 -> SNOMED concept maps """ paths = glob.glob( os.path.join( release_dir, "Mapping Tables", "Updated", "Clinically Assured", "ctv3sctmap2*.txt", ) ) assert len(paths) == 1 with open(paths[0]) as f: reader = csv.DictReader(f, delimiter="\t") values = list(iter_values(reader)) # UPSERT rows based on ID, using effective date to decide if a row should # overwrite an existing one. query = """ INSERT INTO ctv3sctmap2_mapping( id, ctv3_concept_id, ctv3_term_id, ctv3_term_type, sct_concept_id, sct_description_id, map_status, effective_date, is_assured) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET map_status=excluded.map_status, effective_date=excluded.effective_date, is_assured=excluded.is_assured WHERE excluded.effective_date > ctv3sctmap2_mapping.effective_date; """ # execute the query above for each row from the release data connection_params = django_connection.get_connection_params() connection = sqlite3.connect(**connection_params) connection.executemany(query, values) connection.commit() connection.close()
def setUpClass(cls): """ Sets up a temporary test database for the whole test case. For regular Django tests, this is usually done by Django's test runner. """ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ctf_gameserver.web.dev_settings') django.setup() # `interactive=False` causes the test database to be destroyed without asking if it already exists cls._old_db_conf = setup_databases(verbosity=1, interactive=False) super().setUpClass() # Get a fresh raw DB connection with as little of Django's pre-configuration as possible cls.connection = connection.get_new_connection(connection.get_connection_params()) # Ensure SQLite's default isolaton level (without autocommit) is being used cls.connection.isolation_level = ''
def import_data(release_dir): def load_query_table_records(): paths = glob.glob( os.path.join( release_dir, "Resources", "QueryTable", "xres2_*.txt", )) assert len(paths) == 1, paths with open(paths[0]) as f: reader = csv.reader(f, delimiter="\t") next(reader) yield from reader def load_history_substitution_table_recods(): paths = glob.glob( os.path.join(release_dir, "Resources", "HistorySubstitutionTable", "xres2_*.txt")) assert len(paths) == 1, paths with open(paths[0]) as f: reader = csv.reader(f, delimiter="\t") next(reader) for r in reader: r[5] = r[5] == "1" # is_ambiguous r[11] = r[11] == "1" # tlh_identical_flag r[12] = r[12] == "1" # fsn_tagless_identical_flag r[13] = r[13] == "1" # fsn_tag_identical_flag yield r connection_params = django_connection.get_connection_params() connection = sqlite3.connect(**connection_params) connection.executemany(build_sql(QueryTableRecord), load_query_table_records()) connection.executemany( build_sql(HistorySubstitution), load_history_substitution_table_recods(), ) connection.commit() connection.close()
def handle(self, *args, **options): django_conn = connection.get_connection_params() conn_kwargs = { 'username': django_conn.get('user', ''), 'password': django_conn.get('password', ''), 'host': django_conn.get('host', ''), 'port': django_conn.get('port', ''), 'database': django_conn.get('database', ''), } self.engine = sa.create_engine(URL('postgresql', **conn_kwargs)) self.endpoints = options['endpoints'].split(',') self.refresh = options['refresh'] if not self.refresh: # use the cached taxonomy file self.taxonomy_file = self.taxonomy_file_fmt.format( date='2018-09-12') for endpoint in self.endpoints: getattr(self, '{}_etl'.format(endpoint))()
def decorated_function(*args, **kwargs): with connection.get_new_connection( connection.get_connection_params()) as localConnection: with localConnection.cursor() as localCursor: return function(localCursor, *args, **kwargs)
def create_database(connection_params): params = connection_params.copy() database = params.pop('database') params['database'] = 'postgres' conn = Database.connect(**params) conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cursor = conn.cursor() cursor.execute('CREATE DATABASE ' + database) cursor.close() conn.close() if __name__ == '__main__': params = connection.get_connection_params() try: connection.ensure_connection() except OperationalError as error: if 'database "' + params['database'] + '" does not exist' in error.message: print('Creating database "' + params['database'] + '"') create_database(params) else: raise error # skipping if already some tables exists if connection.introspection.table_names(): sys.exit(0) print('Loading inital schema...') sql_file = open('schema.sql', 'U')
def __enter__(self): self.connection = connection.get_new_connection(connection.get_connection_params()) self.connection.autocommit = True return self.connection
def get_database_name(): return connection.get_connection_params()['database']