Esempio n. 1
0
def test_send_requests_slave(monkeypatch):
    load = [{
        "url": "http://shadowreader.example.com",
        "req_method": "POST",
        "request_uri": "/test",
    }]
    headers = {"User-Agent": "sr_pytest"}

    fut = FuturesSession().get("http://www.example.com")
    monkeypatch.setattr("libs.slave._send_request",
                        lambda a, b, c, d, e, f, g: fut)

    futs, timeouts, exceptions = slave.send_requests_slave(load=load,
                                                           delay=0.1,
                                                           random_delay=True,
                                                           headers=headers)

    assert len(futs) == 1

    fut = futs[0]["fut"]
    assert fut.result().status_code == 200
Esempio n. 2
0
def generate_requests(hosts, jolokia_port, jolokia_prefix):
    """Return a generator of requests to fetch the under replicated
    partition number from the specified hosts.

    :param hosts: list of brokers ip addresses
    :type hosts: list of strings
    :param jolokia_port: HTTP port for Jolokia
    :type jolokia_port: integer
    :param jolokia_prefix: HTTP prefix on the server for the Jolokia queries
    :type jolokia_prefix: string
    :returns: generator of requests
    """
    session = FuturesSession()
    for host in hosts:
        url = "http://{host}:{port}/{prefix}/read/{key}".format(
            host=host,
            port=jolokia_port,
            prefix=jolokia_prefix,
            key=UNDER_REPL_KEY,
        )
        yield host, session.get(url)
def main():

    session = FuturesSession()
    live_requests = []

    print("starting threads...")

    # make requests
    for req in __request_objects:
        request_params = {'request-target': req['target_filename']}
        future_request = session.get(__url, params=request_params)
        live_requests.append(future_request)

    print("\nwaiting for results...")

    # print results
    for req in live_requests:
        response = req.result()
        print('response one status: {0}'.format(response.status_code))
        print(response.content)
        print("\n")
Esempio n. 4
0
    def __init__(self,
                 name: str,
                 api_key: str,
                 api_base_url: Optional[str] = None,
                 log_level: str = 'INFO',
                 api_max_reqs_per_sec: int = 10,
                 api_query_limit: int = 25,
                 logger: Optional[logging.Logger] = None,
                 session: Optional[FuturesSession] = None):
        self.name = name
        self.session = session or FuturesSession()
        self.logger = logger or logging.getLogger(self.__class__.__name__)
        self.logger.setLevel(getattr(logging, log_level))

        self.api_key = api_key
        self.api_base_url = api_base_url or 'https://openapi.etsy.com/v2'
        self.api_query_limit = api_query_limit
        self.api_request_timeout = (1 / api_max_reqs_per_sec) + 0.05

        self.total_pages: int = 0
        self.listings: list = []
Esempio n. 5
0
def schedule_svg_gen(mol, user):
    query = {'_id': mol['_id']}

    updates = {'$set': {'generating_svg': True}}

    super(MoleculeModel, MoleculeModel()).update(query, updates)

    base_url = openbabel_base_url()
    path = 'convert'
    output_format = 'svg'

    url = '/'.join([base_url, path, output_format])

    data = {'format': 'smi', 'data': mol['smiles']}

    session = FuturesSession()
    future = session.post(url, json=data)

    inchikey = mol['inchikey']
    future.add_done_callback(functools.partial(_finish_svg_gen, inchikey,
                                               user))
Esempio n. 6
0
    def __init__(self, app_name, elastic_url, headers=None, auth=None):
        """Initialize the ElasticLoggger class.

        Args:
            app_name:    Name of the application which is using current logger
            elastic_url: Url of elastic-http-input to push logs to (for eg. 'http://localhost:3332' )
            headers:     Since this is post request headers are required, defaults to {'content-type': 'application/json'}
            auth:        A tuple containing username and password: for eg.  ('myuser', 'mypassword')
        """
        self.elastic_url = elastic_url
        self.auth = auth
        self.headers = headers
        if not self.headers:
            self.headers = {'content-type': 'application/json'}

        self.debug_method = partial(self.__log, level='debug')
        self.error_method = partial(self.__log, level='error')
        self.info_method = partial(self.__log, level='info')
        self.exception_method = partial(self.__log, level='exception')
        self.session = FuturesSession(max_workers=10)
        self.app_name = app_name
Esempio n. 7
0
def do_queries(qis):
    to_do_qi = [qi for qi in qis if not qi.q.result]
    if not to_do_qi:
        return
    logger.info('BEGIN query ' + to_do_qi[0].q.url + ' (' +
                str(len(to_do_qi)) + ')')
    log_start = time.time()
    session = FuturesSession(max_workers=len(to_do_qi))
    futures = [
        session.get(qi.q.url,
                    params=qi.q.arguments,
                    headers=qi.q.headers,
                    background_callback=bg_cb) for qi in to_do_qi
    ]
    wait(futures)
    # for f in futures:
    #     print(f.result().url)
    for item in zip(to_do_qi, futures):
        item[0].q.result = item[1].result().data
    log_end = time.time()
    logger.info('END query; time=' + str(log_end - log_start))
Esempio n. 8
0
def send_ga_event(event, user):
    session = FuturesSession()
    payload = {
        'v': 1,
        'tid': settings.GOOGLE_TRACKING_ID,
        't': 'event',
        'ec': 'email',
        'ea': event.event_type,
        'cm': 'email',
    }
    if event.esp_event:
        payload['ua'] = event.esp_event.get('user-agent')
        payload['dt'] = event.esp_event.get('subject', [None])[0]
        payload['cn'] = event.esp_event.get('campaign_name', None)
        payload['cs'] = event.esp_event.get('campaign_source', None)
        payload['cc'] = payload['el'] = event.esp_event.get('email_id', None)
        payload['dp'] = "%s/%s" % (payload['cc'], event.event_type)
    else:
        logger.warn("No ESP event found for event: %s" % event.__dict__)
    logger.info("Sending mail event data Analytics: %s" % payload)
    session.post('https://www.google-analytics.com/collect', data=payload)
Esempio n. 9
0
    def get_async_parsers(self):
        fetchers = {}
        with futures.ThreadPoolExecutor(max_workers=6) as executor:
            session = FuturesSession(executor)
            session.headers.update(self.get_request_headers())
            # getter = partial(session.request, 'get',
            #                  background_callback=lambda s, r: self.parse_html(r))

            for type_, urls in self.urls.items():
                fetchers.update(
                    map(
                        lambda url:
                        (executor.submit(self.fetch_url, url), type_), urls))

            parsers = [
                executor.submit(self.parse_async,
                                fetcher.result(),
                                type=fetchers[fetcher])
                for fetcher in futures.as_completed(fetchers)
            ]
        return parsers
Esempio n. 10
0
    def get_all_issues(self, project_key, total, max_results):
        """Fetch all project issues."""
        log.debug("%s issues to fetch" % total)

        # Setup a session for concurrent fetching
        s = FuturesSession(executor=ThreadPoolExecutor(max_workers=4))
        s.auth = (self.username, self.password)
        s.params = {
            'jql': "project=%s" % project_key,
            'fields': 'summary,description,issuetype,status,resolution',
            'maxResults': max_results
        }
        s.headers = {'Content-Type': 'application/json'}

        def parse_json_cb(sess, resp):
            resp.data = map(
                lambda item: {
                    'key': item['key'],
                    'summary': item['fields']['summary'],
                    'description': item['fields']['description'],
                    'type': item['fields']['issuetype']['name'],
                    'status': item['fields']['status']['name'],
                    'resolved': True if item['fields']['resolution'] else False
                },
                resp.json()['issues'])

        def get_issues(start_at=0):
            future = s.get("%s/search" % self.url,
                           params={'startAt': start_at},
                           background_callback=parse_json_cb)
            next_at = start_at + max_results
            log.debug("... %s/%s" % (min(next_at, total), total))
            if next_at < total:
                data = get_issues(next_at)
            else:
                return future.result().data
            return future.result().data + data

        issues = get_issues()
        return issues
Esempio n. 11
0
def _test_simple(integration: IntegrationSession, exporter: str):
    session = FuturesSession()
    # start polling collector for spans
    future = session.get(integration.poll_url)

    # execute instrumented program
    env = os.environ.copy()
    env["OTEL_TRACES_EXPORTER"] = exporter
    subprocess.check_call(
        ["python", f"{integration.rootdir}/simple/main.py"],
        stdout=subprocess.DEVNULL,
        stderr=subprocess.DEVNULL,
        env=env,
    )

    # get result of poll and assert spans
    response = future.result()
    assert response.ok
    spans = response.json()
    assert len(spans) == 1
    span = spans[0]

    assert span["operationName"] == "custom span"

    tags = [
        {
            "key": "otel.library.name",
            "vStr": "simple"
        },
        {
            "key": "otel.library.version",
            "vStr": "0.1"
        },
        {
            "key": "otel.status_code",
            "vType": "INT64"
        },
    ]
    for tag in tags:
        assert tag in span["tags"]
Esempio n. 12
0
    def connect_write(self, source, whitelist, pragma=None):
        """Initialize a streaming write HTTP response. Manually connects the
    underlying file-handle. In the event of a network disconnection,
    use to manually reinitiate an HTTP session.

    Parameters
    ----------
    source : sbp.client.handler.Handler
      Iterable source of SBP messages.
    whitelist : [int]
      Whitelist of messages to write

    """
        headers = {
            'Device-Uid': self.device_uid,
            'Content-Type': BROKER_SBP_TYPE,
            'Pragma': pragma
        }
        if not pragma:
            del headers['Pragma']
        try:
            self.executor = ThreadPoolExecutor(max_workers=DEFAULT_POOLSIZE)
            self.write_session = FuturesSession(executor=self.executor)
            self.source = source.filter(whitelist)
            gen = (msg.pack() for msg, _ in self.source)
            self.write_session.put(self.url, data=gen, headers=headers)
            self.write_response = True
        except requests.exceptions.ConnectionError:
            msg = "Invalid request to %s with headers %s." % (self.url,
                                                              headers)
            warnings.warn(msg)
        except requests.exceptions.ConnectTimeout:
            pass
        except requests.exceptions.RetryError:
            pass
        except requests.exceptions.ReadTimeout:
            msg = "Invalid request to %s with headers %s." % (self.url,
                                                              headers)
            warnings.warn(msg)
        return self.write_ok
Esempio n. 13
0
    def __fetch(self, first_item, last_item):
        payloads = self.__payload_generator(first_item, last_item)
        futures = []

        with FuturesSession(session=self.__api._session,
                            max_workers=10) as session:
            for payload in payloads:
                futures.append(
                    session.post(self.__api._url,
                                 payload,
                                 background_callback=self._parser))
                if len(futures) == 10:
                    break

            while futures:
                if self.__without_trash:
                    data = (x for x in futures[0].result().data
                            if x.reagents and x.products)
                else:
                    data = futures[0].result().data

                if self.__single_step:
                    for r in data:
                        r.meta['reaxys_data'] = meta = [
                            x for x in r.meta['reaxys_data']
                            if len(x['stages']) == 1 and 'steps' not in x
                            and x['stages'][0]
                        ]
                        if meta:
                            yield r
                else:
                    yield from data

                del futures[0]
                payload = next(payloads, None)
                if payload:
                    futures.append(
                        session.post(self.__api._url,
                                     payload,
                                     background_callback=self._parser))
Esempio n. 14
0
def check_trillians(signal_nr):
    print("Checking Trillians")

    trillians = Trillian.objects.all()
    info_requests = {}
    with FuturesSession(executor=ThreadPoolExecutor(max_workers=5)) as session:
        for trillian in trillians:
            if not trillian.token:
                print(
                    "Skipping Trillian {trillian.name} ({trillian.hostname})".
                    format(trillian=trillian))
                if trillian.is_alive:
                    trillian.is_alive = False
                    trillian.save()
                continue

            info_requests[trillian.pk] = trillian, session.request(
                method='GET',
                url='https://{}/api/v1/info/'.format(trillian.hostname),
                auth=TokenAuth(trillian.token),
                timeout=(5, 10))

        # Wait for all the responses to come back in
        info_responses = {}
        for pk, (trillian, request) in info_requests.items():
            try:
                info_responses[pk] = trillian, request.result()
            except ConnectionError:
                info_responses[pk] = trillian, None

    for trillian, response in info_responses.values():
        if response and response.status_code == requests.codes.ok:
            data = response.json()
            trillian.is_alive = True
            trillian.version = data['version']
            trillian.last_seen = timezone.now()
        else:
            trillian.is_alive = False

        trillian.save()
Esempio n. 15
0
def test_parse_results(mocker, tmpdir, generated_request,
                       google_directions_api_response):
    request = FuturesSession(max_workers=1).get('http://hello.com')
    mocker.patch.object(request,
                        'result',
                        return_value=google_directions_api_response)
    o_d = generated_request['path_nodes'][0], generated_request['path_nodes'][
        -1]
    api_requests = {o_d: generated_request}
    api_requests[o_d]['request'] = request
    api_requests[o_d]['timestamp'] = 12345

    api_requests = google_directions.parse_results(api_requests)
    assert_semantically_equal(
        api_requests, {
            ('107316', '107352'): {
                'path_nodes': [
                    '107316', '2440643031', '4307345276', '107317',
                    '4307345495', '4307345497', '25495448', '2503102618',
                    '107351', '5411344775', '2440651577', '2440651556',
                    '2440651552', '107352'
                ],
                'path_polyline':
                'ahmyHzvYGJyBbCGHq@r@EDIJGBu@~@SToAzAEFEDIJ',
                'origin': {
                    'lat': 51.5188864,
                    'lon': -0.1369442
                },
                'destination': {
                    'lat': 51.5208299,
                    'lon': -0.1391027
                },
                'timestamp':
                12345,
                'parsed_response': {
                    'google_speed': 3.7183098591549295,
                    'google_polyline': 'ahmyHzvYkCvCuCdDcBrB'
                }
            }
        })
Esempio n. 16
0
    def __init__(self,
                 stack,
                 output_orientation=DEFAULT_3D_ORIENTATION,
                 preferred_mirror=None,
                 timeout=1,
                 cache_items=DEFAULT_CACHE_ITEMS,
                 cache_bytes=DEFAULT_CACHE_BYTES,
                 broken_slice_handling=DEFAULT_BROKEN_SLICE_HANDLING,
                 cval=0,
                 auth=None,
                 threads=THREADS):
        """
        Note: for small numbers of tiles on fast internet connection, ImageFetcher may be faster

        Parameters
        ----------
        stack : Stack
        output_orientation : str or Orientation3D
            default Orientation3D.ZYX
        preferred_mirror : int or str or StackMirror or None
            default None
        timeout : float
            default 1
        cache_items : int or None
            default 10
        cache_bytes : int or None
            default None
        broken_slice_handling : str or BrokenSliceHandling
            default BrokenSliceHandling.FILL
        cval : int
            default 0
        threads : int
            default 10
        """
        super(ThreadedImageFetcher,
              self).__init__(stack, output_orientation, preferred_mirror,
                             timeout, cache_items, cache_bytes,
                             broken_slice_handling, cval, auth)
        self._session = FuturesSession(session=self._session,
                                       max_workers=threads)
Esempio n. 17
0
def add_all_matches(*years: Iterable[int]) -> None:
    """
    Given a list of years, analyzes all matches from those years.

    :param years:
        Sequence of one or more years
    """

    years = [*years]
    print("Executing for years: %s" % years)

    requester = FuturesSession(executor=ProcessPoolExecutor(30),
                               session=requests.Session())

    event_get = lambda e: requester.get(event_url_template(event=e.key),
                                        headers=__api_key)
    teams_get = lambda e: requester.get(event_teams_url_template(event=e.key),
                                        headers=__api_key)
    matches_get = lambda e: requester.get(
        event_matches_url_template(event=e.key), headers=__api_key)

    events = Event.objects.prefetch_related('alliances').filter(
        year__in=years).order_by('end_date').all()
    print("Starting {} HTTP requests split between {} processes.".format(
        3 * len(events), requester.executor._max_workers))
    matches_futures = [matches_get(e) for e in events]  # type: List[Future]
    event_futures = [event_get(e) for e in events]  # type: List[Future]
    event_teams_futures = [teams_get(e) for e in events]  # type: List[Future]

    print("Waiting on HTTP requests.")
    wait(matches_futures + event_futures + event_teams_futures)
    requester.executor.shutdown(wait=True)

    arg_list = zip(events, [
        list_of_matches_json_converter(f.result().json())
        for f in matches_futures
    ], event_futures, event_teams_futures)

    for args in arg_list:
        _add_matches_from_event(*args)
Esempio n. 18
0
def add_list_new() -> None:
    requester = FuturesSession(executor=ProcessPoolExecutor(30),
                               session=requests.session())
    api_key = settings.TBA_API_HEADERS

    team_list_get = lambda p: requester.get(team_by_page_url_template(page=p),
                                            headers=api_key)
    team_participation_get = lambda tn: requester.get(
        team_participation_url_template(team=tn), headers=api_key)

    page_range = get_page_range()

    print(
        "\nStarting %d HTTP requests for team lists, split between %d processes..."
        % (page_range[1] - page_range[0], requester.executor._max_workers))
    team_list_futures = [team_list_get(p) for p in range(*page_range)]
    print("Waiting...")
    wait(team_list_futures)
    print("Done!\n")

    teams_lists = map(lambda f: f.result().json(), team_list_futures)
    teams_data = [item for page_data in teams_lists for item in page_data]
    team_numbers = [*map(lambda t: t['team_number'], teams_data)]

    print(
        "Starting %d HTTP requests for team participation data, split between %d processes..."
        % (len(team_numbers), requester.executor._max_workers))
    team_participation_futures = [
        team_participation_get(tn) for tn in team_numbers
    ]
    print("Waiting...")
    wait(team_participation_futures)
    print("Done!\n")

    team_participations = map(lambda f: f.result().json(),
                              team_participation_futures)
    arg_list = zip(team_numbers, teams_data, team_participations)

    for args in arg_list:
        add_team(*args)
Esempio n. 19
0
def run(urls):
    t = getTimeStamp()
    start = getSec()
    data = {
        "temp": getTemp(),
        "humidity": getHumidity(),
        "longitude": getLongitude(),
        "latitude": getLatitude(),
        "elevation": getElevation(),
        "sensorID": getSensorID(),
        "sensorLocalTime": getTimeStamp()
    }

    with FuturesSession(max_workers=2) as session:
        for url in urls:
            #print(url)
            session.post(url, data)

    end = getSec()
    timeran = end - start
    print("script took {} seconds".format(timeran))
    displayText("{}".format(timeran))
Esempio n. 20
0
def get_restaurant_details(res_ids):
    url = 'https://developers.zomato.com/api/v2.1/restaurant'

    headers = {'user_key': '85955a2247d2beb1f5ecadf80fbc4666'}

    restaurants = []

    session = FuturesSession()
    futures = []

    for res_id in res_ids:
        params = {'res_id': res_id}
        futures.append(session.get(url, headers=headers, params=params))

    for future in cf.as_completed(futures):
        response = future.result()

        if response:
            body = response.json()
            try:
                restaurants.append(
                    view_models.Restaurant(
                        body['name'], body['location']['address'],
                        body['photos'][0]['photo']['url'], body['timings'],
                        body['price_range'],
                        body['user_rating']['aggregate_rating'],
                        body['menu_url']))
            except KeyError:
                restaurants.append(
                    view_models.Restaurant(
                        body['name'],
                        body['location']['address'],
                        '',  # randomly zomato just doesn't have photos
                        body['timings'],
                        body['price_range'],
                        body['user_rating']['aggregate_rating'],
                        body['menu_url']))

    return restaurants
Esempio n. 21
0
  def _download_artifacts(self, artifacts):
    future_session = FuturesSession(max_workers=4)
    response_futures = []
    for artifact in artifacts:
      artifact_url = '/'.join([artifact.repo_url, artifact.artifact_path])
      streaming_callback = functools.partial(self._background_stream, artifact)
      response_future = future_session.get(
        artifact_url,
        stream=True,
        background_callback=streaming_callback,
      )
      response_future._artifact = artifact
      response_futures.append(response_future)

    # Trigger exceptions raised by background workers
    for future in response_futures:
      try:
        future.result()
      except Exception as e:
        print("Failed to download artifact: {}".format(future._artifact))
        raise e
      sys.stderr.write('.')
Esempio n. 22
0
    def _proxy_race(self, url, proxies_per_test, timeout, max_workers=10):
        proxies = [self.proxies_list.pop() for _ in range(proxies_per_test)]

        session = FuturesSession(max_workers=max_workers)
        futures = [(session.get(url,
                                headers=self.headers,
                                proxies=proxy,
                                timeout=timeout), proxy) for proxy in proxies]
        good_proxies = []
        response = None
        for future, proxy in futures:
            try:
                r = future.result()
            except Exception:
                logger.exception('Proxy race failed for proxy %s for url %s .'
                                 ' Moving to next proxy...' % (proxy, url))
                pass
            else:
                if r:
                    response = r
                    good_proxies.append(proxy)
        return response, good_proxies
Esempio n. 23
0
def get_issues_by_fixversion(
        fv,
        fields=[],
        search_url="http://cavcops01:9081/rest/api/2/search",
        username="******",
        password="******"):
    log("Connecting to Jira API...")

    session = FuturesSession()
    headers = {"content-type": "application/json"}

    data_dict = {
        "jql": f"fixversion = {fv} AND type != Sub-task ORDER BY key ASC",
        "maxResults": 1000,
        "fields": fields
    }
    data_json = json.dumps(data_dict)

    if len(fields) == 0:
        fields = "all"
    log(f"Making request with JQL = ' {data_dict['jql']} ' for fields {fields}"
        )

    search_future = session.post(search_url,
                                 data_json,
                                 auth=(username, password),
                                 headers=headers)
    search_resp = search_future.result()

    issues = []
    if search_resp.status_code < 300:
        issues = search_resp.json()["issues"]
        log(f"Found {len(issues)} Jira issue{'' if len(issues) == 1 else 's'}")
    else:
        log(f"Error occured while searching Jira")
        log(f"\tResponse Status Code: {search_resp.status_code}")
        log(f"\tError: {search_resp.text}")

    return issues
Esempio n. 24
0
def get_address(lat_lng):
    """ fetch results from google """
    url = "?".join(("https://maps.googleapis.com/maps/api/geocode/json",
                    "key={}&latlng={}".format(API_KEY, lat_lng)))
    print(lat_lng, file=sys.stderr)

    with FuturesSession() as s:
        r = s.get(url,
                  headers={
                      'User-Agent':
                      " ".join(
                          ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5)",
                           "AppleWebKit/537.36 (KHTML, like Gecko)",
                           "Chrome/83.0.4103.116 Safari/537.36"))
                  })
        try:
            return print_replacement_address(r.result().json())
        except Exception as e:
            print("ERROR: {}".format(e), file=sys.stderr)
            print("latlng -----> {}".format(lat_lng), file=sys.stderr)
            print("url -----> {}".format(url), file=sys.stderr)
            print("json -----> {}".format(r.result().json()), file=sys.stderr)
Esempio n. 25
0
def search_candidates_from_params(search_params,
                                  access_token,
                                  url_args=None,
                                  user_id=None,
                                  facets=None):
    """
    Calls the candidate_service's Search API with given search criteria and returns the Future object.
    We can get the result from future object by applying .result() on it.
    :param search_params: Search params or search criteria upon which candidates would be filtered.
    :param access_token: Oauth-based or JWT-based token
    :param  url_args:  accepted arguments sent via the url; e.g. "?user_ids=2,3,4"
    :param user_id: Id of logged-in user
    :param facets: string | a comma separated string-values for calculating and returning search facets.
                            Should be "none" if no facet is needed.
    :return: future object for search result based on search criteria.
    """
    if not access_token:
        jw_token = User.generate_jw_token(user_id=user_id)
        headers = {
            'Authorization': jw_token,
            'Content-Type': 'application/json'
        }
    else:
        access_token = access_token if 'Bearer' in access_token else 'Bearer %s' % access_token
        headers = {
            'Authorization': access_token,
            'Content-Type': 'application/json'
        }

    url = CandidateApiUrl.CANDIDATE_SEARCH_URI
    session = FuturesSession(max_workers=MAX_WORKERS)

    # Search facet selections
    url_args += '&facets={}'.format(facets)

    future = session.get(url=(url + url_args) if url_args else url,
                         params=search_params,
                         headers=headers)
    return future
Esempio n. 26
0
def release_all_locks(key_list, id):
    with FuturesSession(adapter_kwargs={'max_retries': 0}) as session:
        # Release locks
        unlock_payload = {'id': id, 'keys': key_list}
        unlock_futures = [
            session.post(address.rstrip() + "kv/release_locks/",
                         json=unlock_payload) for address in addresses
        ]
        count = 0
        for future in as_completed(unlock_futures):
            try:
                count += 1
                if (count <= final_count):
                    pass
                    #print(future.result())
                else:
                    #print("Majority ACKs received")
                    break
            except BaseException:
                count -= 1
                print('Server unavailable.')
                continue
Esempio n. 27
0
def multi_futures_app_request(app_ids,
                              headers=None,
                              verify=True,
                              params=None,
                              workers=s.CONCURRENT_REQUESTS):
    """
    :param app_ids: a list of app IDs.
    :param headers: a dictionary of custom headers to use.
    :param verify: bool for requesting SSL verification.
    :return: a list of all apps' detail data
    """
    session = FuturesSession(max_workers=workers)

    headers = default_headers() if headers is None else headers
    responses = [
        session.get(build_url('details', app_id),
                    headers=headers,
                    verify=verify,
                    params=params,
                    hooks={
                        'response': parse_app_details_response_hook,
                    }) for app_id in app_ids
    ]

    apps = []
    for i, response in enumerate(responses):
        try:
            result = response.result()
            app_json = result.app_details_data
            app_json.update({
                'app_id': app_ids[i],
                'url': result.url,
            })
            apps.append(response.result().app_details_data)
        except requests.exceptions.RequestException as e:
            log.error('Error occurred fetching {app}: {err}'.format(
                app=app_ids[i], err=str(e)))

    return apps
Esempio n. 28
0
    def get_subjurisdictions(self):
        """
        Returns a list of subjurisdictions depending on the level
        of the main jurisdiction. States always have counties, and
        counties and cities may have precincts.
        """

        subjurisdictions_url = self._get_subjurisdictions_url()
        if not subjurisdictions_url:
            json_url = self.url.replace('summary.html','json/electionsettings.json')
            try:
                r = requests.get(json_url)
                r.raise_for_status()
                jurisdictions = []
                counties = r.json()['settings']['electiondetails']['participatingcounties']
                jurisdictions = self._get_subjurisdictions_urls_from_json(counties)
                return jurisdictions
            except requests.exceptions.HTTPError:
                return []
        try:
            r = requests.get(subjurisdictions_url)
            r.raise_for_status()

            # Use a maximum of 10 workers.  Should we parameterize this?
            session = FuturesSession(max_workers=10)
            future_to_name = {}
            for url, name in self._scrape_subjurisdiction_paths(r.text):
                future = self._subjurisdiction_url_future(session, url)
                future_to_name[future] = name

            jurisdictions = []
            for future in concurrent.futures.as_completed(future_to_name):
                url = self._subjurisdiction_url_from_future(future)
                name = future_to_name[future]
                jurisdictions.append(Jurisdiction(url, 'county', name))

            return jurisdictions
        except requests.exceptions.HTTPError:
            return []
Esempio n. 29
0
def concurrent_get_first_comments(ticket_ids):
    session = FuturesSession()

    url_template = 'https://archivesupport.zendesk.com/api/v2/tickets/{}/audits.json'
    audit_ids = []

    for ticket_ids_chunk in chunked(ticket_ids, 1000):
        audit_futures = []
        for i, ticket_id in enumerate(ticket_ids_chunk):
            if ticket_id is None:
                audit_futures.append(None)
                continue
            logger.debug('getting audit future for ticket {}/{}'.format(
                i, len(ticket_ids_chunk)))
            audit_futures.append(
                session.get(url_template.format(ticket_id),
                            auth=HTTPBasicAuth(
                                env['ZENDESK_AGENT_ACCOUNT'] + "/token",
                                env['ZENDESK_API_KEY'])))
            time.sleep(60 / ZENDESK_API_RATE_LIMIT)

        for i, af in enumerate(audit_futures):
            if af is None:
                audit_ids.append(None)
                continue
            result = af.result()
            if result.status_code != 200:
                logger.error('ticket #{} bad status code {}: {}'.format(
                    ticket_ids_chunk[i], result.status_code, result.content))
                audit_ids.append(None)
                continue
            try:
                audit_ids.append(result.json()['audits'][0]['id'])
            except Exception as e:
                logger.error('while parsing result for #{} {}'.format(
                    ticket_ids_chunk[i], e))
                audit_ids.append(None)

    return audit_ids
Esempio n. 30
0
    def getPlaylist(self):
        self.playlist = []
        content = self._getInfo(1)
        self.total_num = content['ugc_total_count']
        self.nick_name = content['nickname']
        self.age = content['age']
        self.gender = content['gender']
        if self.total_num == 0:
            return self.playlist
        self._addDataToPlaylist(content['ugclist'])
        total_page = math.ceil(self.total_num/self.NUM_PER_PAGE)

        future_session = FuturesSession(max_workers = Object.REQUEST_WORKERS)
        futures=[self._getInfoAsync(i, future_session) for i in range(2, total_page+1)]
        p = 1
        for future in as_completed(futures):
            p += 1
            content = future.result().text
            data = re.findall(r'[(](.*)[)]', content)[0]
            self._addDataToPlaylist(json.loads(data)['data']['ugclist'])
        self.playlist.sort()
        return self.playlist