def test__has_access_descriptor_staff_lock(self): """ Tests that "visible_to_staff_only" overrides start date. """ mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor # No start date, staff lock on mock_unit.visible_to_staff_only = True self.verify_access(mock_unit, False) # No start date, staff lock off. mock_unit.visible_to_staff_only = False self.verify_access(mock_unit, True) # Start date in the past, staff lock on. mock_unit.start = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=1) mock_unit.visible_to_staff_only = True self.verify_access(mock_unit, False) # Start date in the past, staff lock off. mock_unit.visible_to_staff_only = False self.verify_access(mock_unit, True) # Start date in the future, staff lock on. mock_unit.start = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1) # release date in the future mock_unit.visible_to_staff_only = True self.verify_access(mock_unit, False) # Start date in the future, staff lock off. mock_unit.visible_to_staff_only = False self.verify_access(mock_unit, False)
def test__has_access_descriptor_staff_lock(self): """ Tests that "visible_to_staff_only" overrides start date. """ mock_unit = Mock() mock_unit._class_tags = { } # Needed for detached check in _has_access_descriptor def verify_access(student_should_have_access): """ Verify the expected result from _has_access_descriptor """ self.assertEqual( student_should_have_access, access._has_access_descriptor( self.anonymous_user, 'load', mock_unit, course_key=self.course.course_key)) # staff always has access self.assertTrue( access._has_access_descriptor( self.course_staff, 'load', mock_unit, course_key=self.course.course_key)) # No start date, staff lock on mock_unit.visible_to_staff_only = True verify_access(False) # No start date, staff lock off. mock_unit.visible_to_staff_only = False verify_access(True) # Start date in the past, staff lock on. mock_unit.start = datetime.datetime.now( pytz.utc) - datetime.timedelta(days=1) mock_unit.visible_to_staff_only = True verify_access(False) # Start date in the past, staff lock off. mock_unit.visible_to_staff_only = False verify_access(True) # Start date in the future, staff lock on. mock_unit.start = datetime.datetime.now(pytz.utc) + datetime.timedelta( days=1) # release date in the future mock_unit.visible_to_staff_only = True verify_access(False) # Start date in the future, staff lock off. mock_unit.visible_to_staff_only = False verify_access(False)
def test_get_course_start_date(self, enrollment_mock, get_user_mock): """Testing _get_course_start_date method.""" now = datetime.now() course_mock = Mock() course_mock.start = now - timedelta(days=1) course_mock.self_paced = True user = Mock() get_user_mock.return_value = (user, Mock()) course_id = 'course-v1:test+CS102+2019_T3' course_key = CourseKey.from_string(course_id) enrollment = Mock() enrollment.created = now enrollment_mock.get_enrollment.return_value = enrollment self.assertEqual( now.strftime('%Y-%m-%d'), self.base._get_course_start_date(course_mock, 'test-email', course_id), # pylint: disable=protected-access ) enrollment_mock.get_enrollment.assert_called_with(user, course_key) course_mock.self_paced = False self.assertEqual( (now - timedelta(days=1)).strftime('%Y-%m-%d'), self.base._get_course_start_date(course_mock, 'test-email', course_id), # pylint: disable=protected-access )
def get_mute_collaborator(): mocked_mute = Mock(spec=MuteCollaborator) mocked_mute.getConfig = get_mute_config mocked_mute.start = trivial mocked_mute.killWriter = trivial mocked_mute.killReader = trivial return mocked_mute
def test_filter_on_duration_no_user_feedback_if_ok(self): # Trickery to gain access to the inner closure arg_list = [] def recording_filter_executor(rg_list, filter_test): arg_list.append(rg_list) arg_list.append(filter_test) return rg_list rg_list = [] filter_on_duration(rg_list, recording_filter_executor) w = Mock() w.start = datetime(2013, 10, 1) w.end = datetime(2013, 10, 3) rg = Mock() r = Mock() r.duration = 1 * 24 * 3600 arg_list[1](w, rg, r) assert_equal(rg.emit_rg_feedback.called, False)
def test_filter_on_duration_emits_user_feedback(self): # Trickery to gain access to the inner closure arg_list = [] def recording_filter_executor(rg_list, filter_test): arg_list.append(rg_list) arg_list.append(filter_test) return rg_list rg_list = [] filter_on_duration(rg_list, recording_filter_executor) w = Mock() w.start = datetime(2013, 10, 1) w.end = datetime(2013, 10, 3) w.get_resource_name.return_value = 'elp' ur = Mock() r = Mock() r.id = 1 r.duration = 5 * 24 * 3600 arg_list[1](w, ur, r) expected_msg = "Request %d Window (at elp) 2013-10-01 00:00:00 -> 2013-10-03 00:00:00 too small for duration '5 days, 0:00:00'" % r.id expected_tag = 'WindowTooSmall' ur.emit_rg_feedback.assert_called_with(expected_msg, expected_tag)
def default_args(): args = Mock() args.conf_file = '.lamvery.yml' args.follow = False args.interval = 1 args.start = '-1h' return args
def create_mock_session(): mock_session = Mock() mock_session.region = "test" mock_session.scrape_type = constants.ScrapeType.BACKGROUND session_start = datetime.datetime.now() mock_session.start = session_start return mock_session
def test__has_access_descriptor_staff_lock(self): """ Tests that "visible_to_staff_only" overrides start date. """ mock_unit = Mock() mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor def verify_access(student_should_have_access): """ Verify the expected result from _has_access_descriptor """ self.assertEqual( student_should_have_access, access._has_access_descriptor( self.anonymous_user, "load", mock_unit, course_key=self.course.course_key ), ) # staff always has access self.assertTrue( access._has_access_descriptor(self.course_staff, "load", mock_unit, course_key=self.course.course_key) ) # No start date, staff lock on mock_unit.visible_to_staff_only = True verify_access(False) # No start date, staff lock off. mock_unit.visible_to_staff_only = False verify_access(True) # Start date in the past, staff lock on. mock_unit.start = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=1) mock_unit.visible_to_staff_only = True verify_access(False) # Start date in the past, staff lock off. mock_unit.visible_to_staff_only = False verify_access(True) # Start date in the future, staff lock on. mock_unit.start = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1) # release date in the future mock_unit.visible_to_staff_only = True verify_access(False) # Start date in the future, staff lock off. mock_unit.visible_to_staff_only = False verify_access(False)
def test__has_access_descriptor(self): # TODO: override DISABLE_START_DATES and test the start date branch of the method u = Mock() d = Mock() d.start = time.gmtime(time.time() - 86400) # make sure the start time is in the past # Always returns true because DISABLE_START_DATES is set in test.py self.assertTrue(access._has_access_descriptor(u, d, 'load')) self.assertRaises(ValueError, access._has_access_descriptor, u, d, 'not_load_or_staff')
def test__has_access_descriptor_when_not_in_preview_mode(self): """ Tests that descriptor has no access when start date in future & without preview. """ mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor # No start date. mock_unit.visible_to_staff_only = False self.verify_access(mock_unit, True) # Start date in the past. mock_unit.start = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=1) self.verify_access(mock_unit, True) # Start date in the future. mock_unit.start = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1) # release date in the future self.verify_access(mock_unit, False)
def test__has_access_descriptor_in_preview_mode(self, start): """ Tests that descriptor has access in preview mode. """ mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor mock_unit.visible_to_staff_only = False mock_unit.start = start self.verify_access(mock_unit, True)
def test__has_access_descriptor(self): # TODO: override DISABLE_START_DATES and test the start date branch of the method u = Mock() d = Mock() d.start = time.gmtime(time.time() - 86400) # make sure the start time is in the past # Always returns true because DISABLE_START_DATES is set in test.py self.assertTrue(access._has_access_descriptor(u, d, "load")) self.assertRaises(ValueError, access._has_access_descriptor, u, d, "not_load_or_staff")
def test__has_access_descriptor_beta_user(self): mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} mock_unit.days_early_for_beta = 2 mock_unit.start = self.TOMORROW mock_unit.visible_to_staff_only = False self.assertTrue(bool(access._has_access_descriptor( self.beta_user, 'load', mock_unit, course_key=self.course.id)))
def test__has_access_descriptor(self): # TODO: override DISABLE_START_DATES and test the start date branch of the method u = Mock() d = Mock() d.start = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=1) # make sure the start time is in the past # Always returns true because DISABLE_START_DATES is set in test.py self.assertTrue(access._has_access_descriptor(u, d, 'load')) self.assertRaises(ValueError, access._has_access_descriptor, u, d, 'not_load_or_staff')
def test__has_access_descriptor_staff_lock(self, visible_to_staff_only, start, expected_error_type=None): """ Tests that "visible_to_staff_only" overrides start date. """ expected_access = expected_error_type is None mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor mock_unit.visible_to_staff_only = visible_to_staff_only mock_unit.start = start self.verify_access(mock_unit, expected_access, expected_error_type)
def test__has_access_descriptor_when_not_in_preview_mode(self, start, expected_error_type): """ Tests that descriptor has no access when start date in future & without preview. """ expected_access = expected_error_type is None mock_unit = Mock(user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor mock_unit.visible_to_staff_only = False mock_unit.start = start self.verify_access(mock_unit, expected_access, expected_error_type)
def test_index_iter_stop(): os = OverlapSearcher("asdf") segment = Mock() segment.start = 11 try: os.index_iter(segment, stop=10).next() except ValueError: assert True else: assert False
def get_contest(): contest = Mock() contest.id = get_int() contest.name = get_string() start = get_int(2 ** 11) duration = get_int(2 ** 8) contest.start = make_datetime(start) contest.stop = make_datetime(start + duration) contest.score_precision = 2 contest.description = get_string() return contest
def get_contest(): contest = Mock() contest.id = get_int() contest.name = get_string() start = get_int(2**11) duration = get_int(2**8) contest.start = make_datetime(start) contest.stop = make_datetime(start + duration) contest.score_precision = 2 contest.description = get_string() return contest
def test__has_access_descriptor_in_preview_mode(self, start): """ Tests that descriptor has access in preview mode. """ mock_unit = Mock(location=self.course.location, user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor mock_unit.visible_to_staff_only = False mock_unit.start = self.DATES[start] mock_unit.merged_group_access = {} self.verify_access(mock_unit, True)
def test__has_access_descriptor(self): # TODO: override DISABLE_START_DATES and test the start date branch of the method user = Mock() date = Mock() date.start = datetime.datetime.now(pytz.utc) - datetime.timedelta( days=1) # make sure the start time is in the past # Always returns true because DISABLE_START_DATES is set in test.py self.assertTrue(access._has_access_descriptor(user, 'load', date)) with self.assertRaises(ValueError): access._has_access_descriptor(user, 'not_load_or_staff', date)
def test___init__(): segs = [] for i in range(3): seg = Mock() seg.start = i * 3 seg.stop = i * 3 + 3 segs.append(seg) sc = SegmentChain(segs) assert sc.segments == segs assert sc.segment_start == [0, 3, 6] sc = SegmentChain() assert sc.segments == sc.segment_start == []
def test_must_start_container(self): self.container.is_created.return_value = True container_mock = Mock() self.mock_docker_client.containers.get.return_value = container_mock container_mock.start = Mock() self.container.start() self.mock_docker_client.containers.get.assert_called_with(self.container.id) container_mock.start.assert_called_with()
def test__has_access_descriptor_staff_lock(self, visible_to_staff_only, start, expected_error_type=None): """ Tests that "visible_to_staff_only" overrides start date. """ expected_access = expected_error_type is None mock_unit = Mock(location=self.course.location, user_partitions=[]) mock_unit._class_tags = {} # Needed for detached check in _has_access_descriptor mock_unit.category = "problem" mock_unit.visible_to_staff_only = visible_to_staff_only mock_unit.start = start mock_unit.merged_group_access = {} self.verify_access(mock_unit, expected_access, expected_error_type)
def test__has_access_descriptor(self): # TODO: override DISABLE_START_DATES and test the start date branch of the method user = Mock() date = Mock() date.start = datetime.datetime.now(pytz.utc) - datetime.timedelta( days=1 ) # make sure the start time is in the past # Always returns true because DISABLE_START_DATES is set in test.py self.assertTrue(access._has_access_descriptor(user, "load", date)) self.assertTrue(access._has_access_descriptor(user, "instructor", date)) with self.assertRaises(ValueError): access._has_access_descriptor(user, "not_load_or_staff", date)
def test_profile_start_decorator_starts_profiling_for_target_function(): profiler = Mock() profiler.start = Mock() profiler.intermediate = Mock() @profile_start(profiler) def fn(): profiler.intermediate() fn() expect(profiler.mock_calls[0]).to.equal(call.start('fn')) expect(profiler.mock_calls[1]).to.equal(call.intermediate()) expect(profiler.mock_calls).to.have.length_of(2)
def test_profile_checkpoint_decorator(): profiler = Mock() profiler.start = Mock() profiler.intermediate = Mock() @profile_checkpoint(profiler, profile_id='id') def fn(): profiler.intermediate() fn() expect(profiler.mock_calls[0]).to.equal(call.checkpoint('id')) expect(profiler.mock_calls[1]).to.equal(call.intermediate()) expect(profiler.mock_calls).to.have.length_of(2)
def test_main_loop_successful_while(self): config = self.get_config() worker = Mock() worker.start = Mock(return_value=1) task = Mock() task.task_id = Mock(return_value=1) tube = Mock() tube.take = Mock(return_value=task) queue = Mock() queue.tube = Mock(return_value=tube) with patch('notification_pusher.tarantool_queue.Queue', Mock(return_value=queue)): with patch('notification_pusher.Greenlet', Mock(return_value=worker)): with patch('notification_pusher.break_func_for_test', Mock(return_value=True)): with patch('notification_pusher.logger', Mock()) as logger: notification_pusher.main_loop(config) self.assertTrue(logger.info.called)
def test__has_access_descriptor_staff_lock(self, visible_to_staff_only, start, expected_error_type=None): """ Tests that "visible_to_staff_only" overrides start date. """ expected_access = expected_error_type is None mock_unit = Mock(location=self.course.location, user_partitions=[]) mock_unit._class_tags = { } # Needed for detached check in _has_access_descriptor mock_unit.category = "problem" mock_unit.visible_to_staff_only = visible_to_staff_only mock_unit.start = start mock_unit.merged_group_access = {} self.verify_access(mock_unit, expected_access, expected_error_type)
def test_coverage(self, coverage_func): coverage_object = Mock() coverage_func.return_value = coverage_object coverage_object.start = Mock() coverage_object.stop = Mock() coverage_object.save = Mock() cov = Coverage('coverage', True, 'coverage') self.assertFalse(coverage_func.called) self.assertFalse(coverage_object.start.called) self.assertFalse(coverage_object.stop.called) self.assertFalse(coverage_object.save.called) cov.setup() coverage_func.assert_called_once_with() coverage_object.start.assert_called_once_with() self.assertFalse(coverage_object.stop.called) self.assertFalse(coverage_object.save.called) cov.teardown() coverage_object.stop.assert_called_once_with() coverage_object.save.assert_called_once_with()
def test_read_and_persist(self, mock_persist, mock_session_update, mock_session_return, mock_task_manager): mock_session = Mock() mock_session.region = "test" mock_session.scrape_type = constants.ScrapeType.BACKGROUND session_start = datetime.datetime.now() mock_session.start = session_start mock_session_return.return_value = mock_session request_args = {"region": "test"} headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get("/read_and_persist", query_string=request_args, headers=headers) self.assertEqual(response.status_code, 200) mock_persist.assert_called_once_with("test", session_start) mock_task_manager.return_value.create_scraper_phase_task.assert_called_once_with( region_code="test", url="/release") mock_session_update.assert_called_once_with( mock_session, scrape_phase.ScrapePhase.RELEASE)
def test_read_and_persist( self, mock_persist, mock_session_update, mock_session_return, mock_enqueue): mock_session = Mock() mock_session.region = 'test' mock_session.scrape_type = constants.ScrapeType.BACKGROUND session_start = datetime.datetime.now() mock_session.start = session_start mock_session_return.return_value = mock_session request_args = {'region': 'test'} headers = {'X-Appengine-Cron': "test-cron"} response = self.client.get('/read_and_persist', query_string=request_args, headers=headers) self.assertEqual(response.status_code, 200) mock_persist.assert_called_once_with('test', session_start) mock_enqueue.assert_called_once_with(region_code='test', url='/release') mock_session_update.assert_called_once_with( mock_session, scrape_phase.ScrapePhase.RELEASE)
async def maigret(username, site_dict, logger, query_notify=None, proxy=None, timeout=None, is_parsing_enabled=False, id_type='username', debug=False, forced=False, max_connections=100, no_progressbar=False, cookies=None): """Main search func Checks for existence of username on certain sites. Keyword Arguments: username -- Username string will be used for search. site_dict -- Dictionary containing sites data. query_notify -- Object with base type of QueryNotify(). This will be used to notify the caller about query results. logger -- Standard Python logger object. timeout -- Time in seconds to wait before timing out request. Default is no timeout. is_parsing_enabled -- Extract additional info from account pages. id_type -- Type of username to search. Default is 'username', see all supported here: https://github.com/soxoj/maigret/wiki/Supported-identifier-types max_connections -- Maximum number of concurrent connections allowed. Default is 100. no_progressbar -- Displaying of ASCII progressbar during scanner. cookies -- Filename of a cookie jar file to use for each request. Return Value: Dictionary containing results from report. Key of dictionary is the name of the social network site, and the value is another dictionary with the following keys: url_main: URL of main site. url_user: URL of user on site (if account exists). status: QueryResult() object indicating results of test for account existence. http_status: HTTP status code of query which checked for existence on site. response_text: Text that came back from request. May be None if there was an HTTP error when checking for existence. """ # Notify caller that we are starting the query. if not query_notify: query_notify = Mock() query_notify.start(username, id_type) # TODO: connector connector = ProxyConnector.from_url( proxy) if proxy else aiohttp.TCPConnector(ssl=False) # connector = aiohttp.TCPConnector(ssl=False) connector.verify_ssl = False cookie_jar = None if cookies: logger.debug(f'Using cookies jar file {cookies}') cookie_jar = await import_aiohttp_cookies(cookies) session = aiohttp.ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar) if logger.level == logging.DEBUG: future = session.get(url='https://icanhazip.com') ip, status, check_error = await get_response(future, None, logger) if ip: logger.debug(f'My IP is: {ip.strip()}') else: logger.debug(f'IP requesting {check_error[0]}: {check_error[1]}') # Results from analysis of all sites results_total = {} # First create futures for all requests. This allows for the requests to run in parallel for site_name, site in site_dict.items(): if site.type != id_type: continue if site.disabled and not forced: logger.debug(f'Site {site.name} is disabled, skipping...') continue # Results from analysis of this specific site results_site = {} # Record URL of main site and username results_site['username'] = username results_site['parsing_enabled'] = is_parsing_enabled results_site['url_main'] = site.url_main results_site['cookies'] = cookie_jar and cookie_jar.filter_cookies( site.url_main) or None headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 11.1; rv:55.0) Gecko/20100101 Firefox/55.0', } headers.update(site.headers) if 'url' not in site.__dict__: logger.error('No URL for site %s', site.name) # URL of user on site (if it exists) url = site.url.format(urlMain=site.url_main, urlSubpath=site.url_subpath, username=username) # workaround to prevent slash errors url = re.sub('(?<!:)/+', '/', url) # Don't make request if username is invalid for the site if site.regex_check and re.search(site.regex_check, username) is None: # No need to do the check at the site: this user name is not allowed. results_site['status'] = QueryResult(username, site_name, url, QueryStatus.ILLEGAL) results_site["url_user"] = "" results_site['http_status'] = "" results_site['response_text'] = "" query_notify.update(results_site['status']) else: # URL of user on site (if it exists) results_site["url_user"] = url url_probe = site.url_probe if url_probe is None: # Probe URL is normal one seen by people out on the web. url_probe = url else: # There is a special URL for probing existence separate # from where the user profile normally can be found. url_probe = url_probe.format( urlMain=site.url_main, urlSubpath=site.url_subpath, username=username, ) for k, v in site.get_params.items(): url_probe += f'&{k}={v}' if site.check_type == 'status_code' and site.request_head_only: # In most cases when we are detecting by status code, # it is not necessary to get the entire body: we can # detect fine with just the HEAD response. request_method = session.head else: # Either this detect method needs the content associated # with the GET response, or this specific website will # not respond properly unless we request the whole page. request_method = session.get if site.check_type == "response_url": # Site forwards request to a different URL if username not # found. Disallow the redirect so we can capture the # http status from the original URL request. allow_redirects = False else: # Allow whatever redirect that the site wants to do. # The final result of the request will be what is available. allow_redirects = True future = request_method( url=url_probe, headers=headers, allow_redirects=allow_redirects, timeout=timeout, ) # Store future in data for access later # TODO: move to separate obj site.request_future = future # Add this site's results into final dictionary with all of the other results. results_total[site_name] = results_site coroutines = [] for sitename, result_obj in results_total.items(): coroutines.append( (update_site_dict_from_response, [sitename, site_dict, result_obj, logger, query_notify], {})) if no_progressbar: executor = AsyncioSimpleExecutor(logger=logger) else: executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=max_connections, timeout=timeout + 0.5) results = await executor.run(coroutines) await session.close() # TODO: move to separate function errors = {} for el in results: if not el: continue _, r = el if r and isinstance(r, dict) and r.get('status'): if not isinstance(r['status'], QueryResult): continue err = r['status'].error if not err: continue errors[err.type] = errors.get(err.type, 0) + 1 for err, count in sorted(errors.items(), key=lambda x: x[1], reverse=True): logger.warning(f'Errors of type "{err}": {count}') # Notify caller that all queries are finished. query_notify.finish() data = {} for result in results: # TODO: still can be empty if result: try: data[result[0]] = result[1] except Exception as e: logger.error(e, exc_info=True) logger.info(result) return data
async def maigret( username: str, site_dict: Dict[str, MaigretSite], logger, query_notify=None, proxy=None, timeout=3, is_parsing_enabled=False, id_type="username", debug=False, forced=False, max_connections=100, no_progressbar=False, cookies=None, retries=0, ) -> QueryResultWrapper: """Main search func Checks for existence of username on certain sites. Keyword Arguments: username -- Username string will be used for search. site_dict -- Dictionary containing sites data in MaigretSite objects. query_notify -- Object with base type of QueryNotify(). This will be used to notify the caller about query results. logger -- Standard Python logger object. timeout -- Time in seconds to wait before timing out request. Default is 3 seconds. is_parsing_enabled -- Extract additional info from account pages. id_type -- Type of username to search. Default is 'username', see all supported here: https://github.com/soxoj/maigret/wiki/Supported-identifier-types max_connections -- Maximum number of concurrent connections allowed. Default is 100. no_progressbar -- Displaying of ASCII progressbar during scanner. cookies -- Filename of a cookie jar file to use for each request. Return Value: Dictionary containing results from report. Key of dictionary is the name of the social network site, and the value is another dictionary with the following keys: url_main: URL of main site. url_user: URL of user on site (if account exists). status: QueryResult() object indicating results of test for account existence. http_status: HTTP status code of query which checked for existence on site. response_text: Text that came back from request. May be None if there was an HTTP error when checking for existence. """ # notify caller that we are starting the query. if not query_notify: query_notify = Mock() query_notify.start(username, id_type) # make http client session connector = (ProxyConnector.from_url(proxy) if proxy else aiohttp.TCPConnector(ssl=False)) connector.verify_ssl = False cookie_jar = None if cookies: logger.debug(f"Using cookies jar file {cookies}") cookie_jar = await import_aiohttp_cookies(cookies) session = aiohttp.ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar) if logger.level == logging.DEBUG: await debug_ip_request(session, logger) # setup parallel executor executor: Optional[AsyncExecutor] = None if no_progressbar: executor = AsyncioSimpleExecutor(logger=logger) else: executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=max_connections, timeout=timeout + 0.5) # make options objects for all the requests options: QueryOptions = {} options["cookies"] = cookie_jar options["session"] = session options["parsing"] = is_parsing_enabled options["timeout"] = timeout options["id_type"] = id_type options["forced"] = forced # results from analysis of all sites all_results: Dict[str, QueryResultWrapper] = {} sites = list(site_dict.keys()) attempts = retries + 1 while attempts: tasks_dict = {} for sitename, site in site_dict.items(): if sitename not in sites: continue default_result: QueryResultWrapper = { 'site': site, 'status': QueryResult( username, sitename, '', QueryStatus.UNKNOWN, error=CheckError('Request failed'), ), } tasks_dict[sitename] = ( check_site_for_username, [site, username, options, logger, query_notify], { 'default': (sitename, default_result) }, ) cur_results = await executor.run(tasks_dict.values()) # wait for executor timeout errors await asyncio.sleep(1) all_results.update(cur_results) sites = get_failed_sites(dict(cur_results)) attempts -= 1 if not sites: break if attempts: query_notify.warning( f'Restarting checks for {len(sites)} sites... ({attempts} attempts left)' ) # closing http client session await session.close() # notify caller that all queries are finished query_notify.finish() return all_results