def handle(self, *args, **options): logger.debug("Backfill crossreference error lines command") with connection.cursor() as c: c.execute("""SELECT id FROM job ORDER by ID DESC LIMIT 1""") max_job_id = c.fetchone()[0] c.execute("""SELECT id FROM job ORDER by ID ASC LIMIT 1""") min_job_id = c.fetchone()[0] logger.info("Maximum job id %i", max_job_id) logger.info("Minimum job id %i", min_job_id) delta = 100000 job_id = max_job_id start_time = time.time() first_processed = None total_jobs = None while job_id > min_job_id: job_id -= delta connection.connect() with connection.cursor() as c: c.execute("""SELECT job.id FROM job INNER JOIN text_log_step ON text_log_step.job_id = job.id INNER JOIN text_log_error as tle ON tle.step_id = text_log_step.id INNER JOIN failure_line ON job.guid = failure_line.job_guid WHERE NOT EXISTS ( SELECT 1 FROM text_log_error INNER JOIN text_log_error_metadata ON text_log_error.id = text_log_error_metadata.text_log_error_id JOIN text_log_step ON text_log_step.id = text_log_error.step_id WHERE text_log_step.job_id = job.id) AND job.id > %s AND job.result NOT IN ('success', 'skipped', 'retry', 'usercancel', 'unknown', 'superseded') GROUP BY job.id ORDER BY job.id DESC;""", [job_id]) rows = c.fetchall() connection.close() if first_processed is None and rows: first_processed = rows[0][0] total_jobs = float(first_processed - min_job_id) logger.info("Found %i rows", len(rows)) with ProcessPoolExecutor(4) as executor: executor.map(_crossreference_job, (row[0] for row in rows)) now = time.time() fraction_complete = float(first_processed - job_id) / total_jobs time_elapsed = now - start_time total_time = time_elapsed / fraction_complete time_remaining = total_time - time_elapsed logger.info("Estimated %i seconds remaining", time_remaining)
def wrapped(*args, **kwargs): if os.name.lower() == "nt": try: connection.cursor() except ProgrammingError: connection.connect() return func(*args, **kwargs)
def wait_for_database(self, timeout=DEFAULT_TIMEOUT): ready = False for i in range(timeout): try: connection.connect() ready = True break except Exception as e: # Check error type error = str(e).splitlines()[0] if error.startswith('fe_sendauth: no password supplied'): ready = True break if error.startswith('FATAL: no pg_hba.conf entry'): ready = True break if error.startswith( 'could not connect to server: Connection refused'): pass elif error.startswith( 'FATAL: the database system is starting up'): pass else: self.stdout.write(self.style.ERROR(error)) time.sleep(1) connection.close() return ready
def __create_background_job__(target, *args, **kwargs): """ Create an independant processus for make tiling """ # Fork 1 pid = os.fork() # Child execution if pid == 0: os.setsid() # Fork 2 pidd = os.fork() if pidd == 0: # Make new connection to db connection.connect() # Start action target(*args, **kwargs) # kill os._exit(0) else: # kill os._exit(0)
def check_db_connection(): """ check connection to DB exists, connect if no connection exists """ try: if not connection.is_usable(): connection.connect() except Exception as e: connection.connect()
def pg_bus_conn(new_connection=False): ''' Any listeners probably want to establish a new database connection, separate from the Django connection used for queries, because that will prevent losing connection to the channel whenever a .close() happens. Any publishers probably want to use the existing connection so that messages follow postgres transaction rules https://www.postgresql.org/docs/current/sql-notify.html ''' if new_connection: conf = settings.DATABASES['default'] conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {})) # Django connection.cursor().connection doesn't have autocommit=True on by default conn.set_session(autocommit=True) else: if pg_connection.connection is None: pg_connection.connect() if pg_connection.connection is None: raise RuntimeError( 'Unexpectedly could not connect to postgres for pg_notify actions' ) conn = pg_connection.connection pubsub = PubSub(conn) yield pubsub if new_connection: conn.close()
def indexAllteach(request): #查询教师授课信息 print("查询教师授课信息") page = request.GET.get('page', 1) if 'sessionid' in request.COOKIES and request.session['role'] == 'admin': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute( "select teach.teacher_id, teacher_name, teach.course_id, course_name from \ teach natural join teacher natural join course \ order by teach.teacher_id, teach.course_id;") result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({ "teacher_id": r[0], 'teacher_name': r[1], 'course_id': r[2], 'course_name': r[3] }) return render(request, 'admin7.html', pageBuilder(result_list, page)) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def indexTDistSelect(request): #获取下拉框 print("查询教师教授的课程") if 'sessionid' in request.COOKIES and request.session['role'] == 'teacher': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute("select course.course_id,course_name,credits \ from course natural join teach \ where teacher_id='%s'" % (teacher_id)) result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({ "course_id": r[0], 'course_name': r[1], 'credits': r[2] }) for i in range(0, len(result_list)): print("课程ID:%s 课程名:%s" % (result_list[i]['course_id'], result_list[i]['course_name'])) return render(request, 'teacher4-1.html', {"data": result_list}) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def fill_old_data(self, data): save_db_name = settings.DATABASES['default']['NAME'] settings.DATABASES['default']['NAME'] = self.tmp_prod_database_name connection.connect() self.create_records(data) settings.DATABASES['default']['NAME'] = save_db_name connection.connect()
def changeTGrade(request):#录入、删除、修改所授课程学生成绩信息 page=request.GET.get('page',1) if 'sessionid' in request.COOKIES and request.session['role'] == 'teacher': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() operation = request.POST.get('my_select') student_id = request.POST.get('student_id') course_id = request.POST.get('course_id') cursor.execute("select * from student where student_id = '%s' " % (student_id)) student = cursor.fetchall() cursor.execute("select * from course where course_id = '%s' " % (course_id)) course = cursor.fetchall() if operation == 'update': #修改 grade = request.POST.get('grade') cursor.execute("select * from take \ where course_id = '%s' and student_id = '%s'" % (course_id, student_id)) grades = cursor.fetchall() error_count = 0 if len(student) == 0: print("该学生不存在") messages.error(request,"该学生不存在") error_count += 1 elif len(course) == 0: print("该课程不存在") messages.error(request,"该课程不存在") error_count += 1 elif len(grades) ==0 and (error_count == 0): print("该学生没有上此门课程") messages.error(request,"该学生没有上此门课程") error_count += 1 elif (ifdigit(grade) == False) or ((ifdigit(grade) == True) and ((float(grade) < 0) or (float(grade) > 100))): print("请输入0到100之间的数字") messages.error(request,"请输入0到100之间的数字") error_count += 1 elif error_count == 0: grade = float(grade) cursor.execute('update take set \ grade = "%f" where (student_id = "%s") \ and (course_id = "%s")' % (grade, student_id, course_id)) cursor.execute("select take.student_id,student_name,take.course_id,course_name,credits,grade \ from student natural join course natural join take natural join teach \ where teacher_id ='%s' \ order by take.student_id, take.course_id;" % (teacher_id)) result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"student_id":r[0],'student_name':r[1],'course_id':r[2],\ 'course_name':r[3],'credits':r[4],'grade':r[5]}) return render(request, 'teacher3.html',pageBuilder(result_list,page)) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def get_connection(self): logger.info("[%s] Connecting to %s" % (self._consumer_name, str(self._brokers))) connection = stomp.Connection(host_and_ports=self._brokers, use_ssl=self._use_ssl, ssl_version=3) connection.start() connection.connect(self._user, self._password, wait=False) time.sleep(0.5) return connection
def changeallClass(request): #录入、删除、修改班级信息 page = request.GET.get('page', 1) if 'sessionid' in request.COOKIES and request.session['role'] == 'admin': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() operation = request.POST.get('my_select') class_id = request.POST.get('class_id') dept = request.POST.get('dept') major = request.POST.get('major') cursor.execute("select * from class where class_id = '%s' " % (class_id)) Class = cursor.fetchall() if operation == 'add': #录入 error_count = 0 if len(Class) != 0: print("此班级ID已经存在") messages.error(request, "此班级ID已经存在") error_count += 1 elif error_count == 0: cursor.execute('insert into class values \ ("%s", "%s", "%s")' % (class_id, dept, major)) elif operation == 'update': #修改 error_count = 0 if len(Class) == 0: print("此班级ID不存在") messages.error(request, "此班级ID不存在") error_count += 1 elif error_count == 0: cursor.execute( 'update class set dept = "%s", major = "%s" where \ class_id = "%s"' % (dept, major, class_id)) elif operation == 'delete': #删除 error_count = 0 if len(Class) == 0: print("此班级ID不存在") messages.error(request, "此班级ID不存在") error_count += 1 elif error_count == 0: cursor.execute('delete from class where class_id = "%s"' % (class_id)) cursor.execute("select * from class order by class_id;") result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"class_id": r[0], 'dept': r[1], 'major': r[2]}) return render(request, 'admin5.html', pageBuilder(result_list, page)) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def clear_data(django_db_blocker): logging.info("Reconnecting to database") with django_db_blocker.unblock(): connection.connect() logging.info("Cleaning all data...") with django_db_blocker.unblock(): DjangoUser.objects.exclude(username=SELENIUM_USERNAME).delete() for model in apps.get_models(): if model.__module__ == 'backend.models': model.objects.all().delete()
def test_connect_one_down(self, mock_connect): "Ensure that connect() works when a peer is down but another is up." mock_connect.side_effect = [OperationalError(), mock.MagicMock()] # If any server is up, connect() should succeed. connection.connect() # One peer should be up, and one down. self.assertEqual(len(connection.state.peers_up), 1) self.assertEqual(len(connection.state.peers_down), 1)
def switch_user(test=False): """Re-connect to database with either user 'tester' or 'caluma'.""" # set username and password user = "******" if test else "caluma" password = "******" if test else "caluma" # connect new user to database connection.close() settings.DATABASES["default"]["USER"] = user settings.DATABASES["default"]["PASSWORD"] = password connection.connect()
def test_hooks_cleared_on_reconnect(self, track): with atomic(): track.do(1) connection.close() connection.connect() with atomic(): track.do(2) track.assert_done([2])
def test_hooks_cleared_on_reconnect(self): with transaction.atomic(): self.do(1) connection.close() connection.connect() with transaction.atomic(): self.do(2) self.assertDone([2])
def test_connect_all_down(self, mock_connect): "Ensure an error is raised when all peers are down." mock_connect.side_effect = OperationalError() # If all servers are down, we should raise an exception: with self.assertRaises(DatabaseError): connection.connect() # No peers should be up. self.assertEqual(len(connection.state.peers_up), 0) self.assertEqual(len(connection.state.peers_down), 2)
def check_connection(): """ Due to known and seemingly unresolved issue with celery, if a postgres connection drops and becomes unusable, it causes failure of tasks and all their signal handlers that use the DB: https://github.com/celery/celery/issues/621 """ try: connection.cursor() except InterfaceError: connection.close_if_unusable_or_obsolete() connection.connect()
def reconnect(): """ reconnects to db after OperationalError """ log.info("reconnecting...") while True: try: connection.connect() log.info("reconnected to db") return except (OperationalError, MySQLError) as e: log.error("Failed to reconnect, will retry...") time.sleep(5)
def execute(sql, params=None): """ 执行sql语句 并返回一个对象 :param sql: :param params: :return: """ connection.connect() cursor = connection.cursor() cursor.execute(sql, params) rawData = cursor.fetchall() cursor.close() connection.close() return rawData
def test_simple_dump_ls_restore(tmpdir, capsys, settings): """ Tests a simple dump, ls, and restore, asserting that a user created after a dump is deleted upon restore """ db_name = settings.DATABASES['default']['NAME'] settings.PGCLONE_STORAGE_LOCATION = tmpdir.strpath call_command('pgclone', 'ls') assert capsys.readouterr().out == '' with pytest.raises(RuntimeError): call_command('pgclone', 'restore', db_name) ddf.G('auth.User') call_command('pgclone', 'dump') call_command('pgclone', 'ls') assert capsys.readouterr().out == ( f'{db_name}/2020_07_01_00_00_00_000000.default.dump\n' ) ddf.G('auth.User') assert User.objects.count() == 2 call_command('pgclone', 'restore', db_name) connection.connect() assert User.objects.count() == 1 call_command( 'pgclone', 'restore', f'{db_name}/2020_07_01_00_00_00_000000.default.dump', ) connection.connect() assert User.objects.count() == 1 # Do some basic error assertions with pytest.raises(pgclone.exceptions.ConfigurationError): call_command('pgclone', 'dump', '-c bad_config_name') with pytest.raises(pgclone.exceptions.ConfigurationError): call_command('pgclone', 'restore', db_name, '-c bad_config_name') # Try restoring with custom swap hooks call_command('pgclone', 'restore', db_name, '--pre-swap-hook', 'migrate') connection.connect() assert User.objects.count() == 1 # Dump and restore while ignoring the user table with freezegun.freeze_time('2020-07-02'): call_command('pgclone', 'dump', '--exclude-model', 'auth.User') assert User.objects.count() == 1 call_command('pgclone', 'restore', db_name) connection.connect() assert not User.objects.exists()
def signin(request): con = connection.connect(name='db.sqlite3') cursor=con.cursor('email') con2 = connection.connect['db.sqlite3'] cursor2 = con.execute('password') sqlcommand='select email from User' sqlcommand2 = 'select password from User' cursor.execute(sqlcommand) cursor2.execute(sqlcommand2) e=[] p=[] for i in cursor: e.append(i) for j in cursor2: p.append(j) res=list(map(itemgetter(0),e)) res2 = list(map(itemgetter(0), p)) if request.method=='POST': email = request.POST['email'] password = request.POST['password '] i=1 k=len(res) while i <k: if res[i]==email and res2[i]==password: return render(request,'base.html',{'email':email}) break i+1 else: messages.info(request,'check username or password') return redirect('signin') return render (request,'signin.html')
def indexTeacher(request):#查询教师个人信息 print("查询教师自己的信息") if 'sessionid' in request.COOKIES and request.session['role'] == 'teacher': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute("select * from teacher where teacher_id='%s'" % (teacher_id)) result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"teacher_id":r[0],'teacher_name':r[2],'dept':r[3]}) return render(request, 'teacher1.html', {"data": result_list}) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def indexAllClass(request): #查询班级信息 print("查询所有班级信息") page = request.GET.get('page', 1) if 'sessionid' in request.COOKIES and request.session['role'] == 'admin': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute("select * from class order by class_id;") result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"class_id": r[0], 'dept': r[1], 'major': r[2]}) return render(request, 'admin5.html', pageBuilder(result_list, page)) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def get_connection(self, listener=None): if listener is None and not self._client_only: if self._listener is None: listener = Listener(self) self._listener = listener else: listener = self._listener logger.info("[%s] Connecting to %s" % (self._consumer_name, str(self._brokers))) connection = stomp.Connection(host_and_ports=self._brokers, use_ssl=self._use_ssl, ssl_version=3) if not self._client_only: connection.set_listener(self._consumer_name, listener) connection.start() connection.connect(self._user, self._password, wait=False) time.sleep(0.5) return connection
def indexStudent(request): #查询学生个人信息 print("查询学生自己的信息") if 'sessionid' in request.COOKIES and request.session['role'] == 'student': connection.connect() cursor = connection.cursor() cursor.execute("select student_id,student_name,s.class_id,dept,major\ from student as s,class as c\ where s.student_id=%s and s.class_id=c.class_id;", [request.session['id']]) #根据具体学生id查询学生数据 tmp = ('student_id', 'student_name', 'class_id', 'dept', 'major' ) #返回的字段名 result_list = [] result = cursor.fetchone() result_list.append(dict(zip(tmp, result))) return render(request, 'student1.html', {"data": result_list}) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def query_one(sql, params=None): """ 执行sql语句 并返回一个对象 :param sql: :param params: :return: """ connection.connect() cursor = connection.cursor() cursor.execute(sql, params) rawData = cursor.fetchone() col_names = [desc[0] for desc in cursor.description] objDict = {} for index, value in enumerate(rawData): objDict[col_names[index]] = value cursor.close() connection.close() return objDict
def indexAdmin(request): #查询管理员个人信息 print("查询管理员个人信息") if 'sessionid' in request.COOKIES and request.session['role'] == 'admin': admin_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute("select * from admin where admin_id='%s'" % (admin_id)) result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"admin_id": r[0], 'admin_name': r[2]}) for i in range(0, len(result_list)): print("管理员ID:%s 姓名:%s" % (result_list[i]['admin_id'], result_list[i]['admin_name'])) return render(request, 'admin1.html', {"data": result_list}) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'behave.settings') import django django.setup() from reddit.tasks import process_active_subs logger.warning("14: Bot started") while True: try: process_active_subs() except OperationalError: connection.connect() except Exception as e: logger.warning("15: " + str(e)) time.sleep(8)
def indexTCourse(request):#查询所授课程信息 print("查询教师教授的课程") page=request.GET.get('page',1) if 'sessionid' in request.COOKIES and request.session['role'] == 'teacher': teacher_id = request.session['id'] connection.connect() cursor = connection.cursor() cursor.execute("select course.course_id,course_name,credits \ from course natural join teach \ where teacher_id='%s' \ order by course.course_id;" % (teacher_id)) result = cursor.fetchall() connection.close() result_list = [] for r in result: result_list.append({"course_id":r[0],'course_name':r[1],'credits':r[2]}) return render(request, 'teacher2.html', pageBuilder(result_list,page)) else: print("用户身份不合法") return redirect('/pro/illegalUser/')
def refresh_django_db_connection(): ''' Function to refresh connection to Django DB. Behavior with python files uploaded to Spark context via Livy is atypical when it comes to opening/closing connections with MySQL. Specifically, if jobs are run farther apart than MySQL's `wait_timeout` setting, it will result in the error, (2006, 'MySQL server has gone away'). Running this function before jobs ensures that the connection is fresh between these python files operating in the Livy context, and Django's DB connection to MySQL. Args: None Returns: None ''' connection.close() connection.connect()
def handle(self, *args, **options): max_workers = options.get('max_workers') # For each partner defined... partners = Partner.objects.all() # If a specific partner was indicated, filter down the set partner_code = options.get('partner_code') if partner_code: partners = partners.filter(short_code=partner_code) if not partners: raise CommandError('No partners available!') token_type = 'JWT' for partner in partners: logger.info('Retrieving access token for partner [{}]'.format(partner_code)) try: access_token, __ = EdxRestApiClient.get_oauth_access_token( '{root}/access_token'.format(root=partner.oidc_url_root.strip('/')), partner.oidc_key, partner.oidc_secret, token_type=token_type ) except Exception: logger.exception('No access token acquired through client_credential flow.') raise username = jwt.decode(access_token, verify=False)['preferred_username'] kwargs = {'username': username} if username else {} # The Linux kernel implements copy-on-write when fork() is called to create a new # process. Pages that the parent and child processes share, such as the database # connection, are marked read-only. If a write is performed on a read-only page # (e.g., closing the connection), it is then copied, since the memory is no longer # identical between the two processes. This leads to the following behavior: # # 1) Newly forked process # parent # -> connection (Django open, MySQL open) # child # # 2) Child process closes the connection # parent -> connection (*Django open, MySQL closed*) # child -> connection (Django closed, MySQL closed) # # Calling connection.close() from a child process causes the MySQL server to # close a connection which the parent process thinks is still usable. Since # the parent process thinks the connection is still open, Django won't attempt # to open a new one, and the parent ends up running a query on a closed connection. # This results in a 'MySQL server has gone away' error. # # To resolve this, we force Django to reconnect to the database before running any queries. connection.connect() # If no courses exist for this partner, this command is likely being run on a # new catalog installation. In that case, we don't want multiple threads racing # to create courses. If courses do exist, this command is likely being run # as an update, significantly lowering the probability of race conditions. courses_exist = Course.objects.filter(partner=partner).exists() is_threadsafe = courses_exist and waffle.switch_is_active('threaded_metadata_write') logger.info( 'Command is{negation} using threads to write data.'.format(negation='' if is_threadsafe else ' not') ) pipeline = ( ( (SubjectMarketingSiteDataLoader, partner.marketing_site_url_root, None), (SchoolMarketingSiteDataLoader, partner.marketing_site_url_root, None), (SponsorMarketingSiteDataLoader, partner.marketing_site_url_root, None), (PersonMarketingSiteDataLoader, partner.marketing_site_url_root, None), ), ( (CourseMarketingSiteDataLoader, partner.marketing_site_url_root, None), (OrganizationsApiDataLoader, partner.organizations_api_url, None), ), ( (CoursesApiDataLoader, partner.courses_api_url, None), ), ( (EcommerceApiDataLoader, partner.ecommerce_api_url, 1), (ProgramsApiDataLoader, partner.programs_api_url, None), ), ( (XSeriesMarketingSiteDataLoader, partner.marketing_site_url_root, None), ), ) if waffle.switch_is_active('parallel_refresh_pipeline'): for stage in pipeline: with concurrent.futures.ProcessPoolExecutor() as executor: for loader_class, api_url, max_workers_override in stage: if api_url: executor.submit( execute_parallel_loader, loader_class, partner, api_url, access_token, token_type, (max_workers_override or max_workers), is_threadsafe, **kwargs, ) else: # Flatten pipeline and run serially. for loader_class, api_url, max_workers_override in itertools.chain(*(stage for stage in pipeline)): if api_url: execute_loader( loader_class, partner, api_url, access_token, token_type, (max_workers_override or max_workers), is_threadsafe, **kwargs, )
def setUp(self): # avoid error "connection already closed" connection.connect()
def run(self): self.references['logger'].info("Starting " + self.name) connection.connect() self.process_data() self.references['logger'].info("Exiting " + self.name)