Beispiel #1
0
    def request_query(self, url: str, parameters: Dict) -> Dict:

        session = sessions.FuturesSession(max_workers=self.__NB_WORKER)
        response = session.get(url, params=parameters)

        self.check_request_response(response)
        return response.result().json()
Beispiel #2
0
 def __init__(self, name):
     self.name = name
     self.main_channel_link = None
     self.main_channel_html = None
     self.subscribers = 0
     self.bad_words = 0
     self.session = sessions.FuturesSession(max_workers=10)
Beispiel #3
0
 def load(cls, session=None):
     """Initialize an instance using values from lbry.io."""
     if not session:
         session = sessions.FuturesSession()
     return cls(
         session,
         conf.ANALYTICS_ENDPOINT,
         utils.deobfuscate(conf.ANALYTICS_TOKEN)
     )
Beispiel #4
0
def get_images(urls):
    imagesContent = []

    with sessions.FuturesSession() as session:
        futures = [session.get(url) for url in urls]
        for future in as_completed(futures):
            response = future.result()
            imagesContent.append(response.content)

    return imagesContent
Beispiel #5
0
def get_html_async(urls, n_workers=10, use_cache=True):
    """:func:`get_html`, but async, give or take.

    Takes a list (or any iterable) of urls and returns a corresponding generator of htmls.
    The htmls have their scripts and styles removed and are stored in cache.

    Args:
        urls (iterable(str)): The urls to be accessed
        n_workers (:obj:`int`, optional): The number of workers.
        use_cache (:obj:`bool`, optional): Attempts to use the cache if True, otherwise it will fetch from sigarra
        
    Returns:
      An str generator
    """
    global cache

    if cache == None:
        load_cache()

    urls = tuple(urls)

    remove_invalid_entries(urls)

    work_queue = [url for url in urls if url not in cache or not use_cache]

    if len(work_queue) > 0:
        with _sessions.FuturesSession(max_workers=n_workers) as session:

            futures = [session.get(url) for url in work_queue]

            for future in futures:
                request = future.result()

                if request.status_code != 200:
                    continue  # GTFO

                url = request.url
                html = request.text

                for match_rule, custom_treatment in _custom_treatments.items(
                ):  # Can we apply any custom treatment to the html?
                    if match_rule(url):
                        cache[url] = custom_treatment(html)
                        break
                else:  # If not, use the default treatment
                    cache[url] = _default_treatment(html)

    return (get_html(url) for url in urls)
Beispiel #6
0
def check_transaction(txid):
    """
    This function checks the transactional details by quering the BlockStream TestNet API
    
        @param: txid - list - This corresponds to the transaction ids to be queried
        @param: results - np array - The transactional details in certain array format
    """

    # Using the Future Session lib for doing parallel requests (HTTP - secured)
    session = sessions.FuturesSession(max_workers=len(txid))
    futures = [session.get(i).result() for i in txid]

    # using map() fn for fetching the required information about txids
    results = np.array(list(map(json_parser, futures)))

    return results
Beispiel #7
0
 def get_all_market_info(self):
     session = sessions.FuturesSession(session=self.s)
     results = {}
     for type_ in self.resource_id:
         if type_ == 'energy drink':
             continue
         results[type_] = session.get(f'http://rivalregions.com/storage/market/{self.resource_id[type_]}?{self.c}')
     for res in results:
         r = results[res].result()
         price, selling_amount, player_id, player_name, total_offers = self.parse_market_response(r, self.resource_id[res])
         results[res] = {'price': price,
                         'amount': selling_amount,
                         'player_id': player_id,
                         'player_name': player_name,
                         'total_offers':total_offers}
     return results
Beispiel #8
0
def null_session(futures: bool = False) -> 'Union[Session, FuturesSession]':
    """No proxy session.

    Args:
        futures: If returns a :class:`requests_futures.FuturesSession`.

    Returns:
        Union[requests.Session, requests_futures.FuturesSession]:
        The session object with no proxy settings.

    """
    if futures:
        session = requests_futures_sessions.FuturesSession(
            max_workers=DARC_CPU)
    else:
        session = requests.Session()

    session.headers['User-Agent'] = default_user_agent()
    return session
Beispiel #9
0
 def __init__(self, id, device_name, configFile, log):
     print("GpuDevice start init")
     threading.Thread.__init__(self)
     self.ready = False
     self.id = id
     self.batch_size = 2
     self.log = log
     self.cams = {}
     self.max_batch_size = 4
     self.config_mot = {
         'engine_path': 'models/yolov4_-1_3_416_416_dynamic.engine',
         'max_batch_size': self.max_batch_size
     }
     # with open('config/mot.json') as config_file:
     #     config_mot = json.load(config_file, cls=ConfigDecoder)
     #     self.config_mot = config['mot']
     self.detector = None
     self.server_URL = "http://localhost:8080/update?"
     self.session = sessions.FuturesSession(max_workers=2)
     self.proceedTime = 0
     self.device = str(device_name)
     try:
         self.config = configFile
         self.cnt = 0
         self.frame = []
         self.img_size = self.config['img_size']
         self._stopevent = threading.Event()
         self.ready = True
         # self.isRunning = False
         self.log.debug(device_name + " with name " + str(self.id) +
                        " created ok id:" + str(self.device))
         self.session.get(self.server_URL + "cmd=GpuStart&name=" + str(id) +
                          "&status=OK")
         self.start()
     except:
         print("GpuDevice init Can not start GPU for " + str(self.id) + " ",
               self.device, self.config)
         # traceback.print_exception(*sys.exc_info())
         self.session.get(self.server_URL + "cmd=GpuStart&name=" + str(id) +
                          "&status=error&module=Gpu")
         print(sys.exc_info())
         self.kill()
    def __init__(self, targetserver, hec_token, eventgen_name=None):
        self._name = 'eventgen_splunk_hec_logger'
        self.targetserver = targetserver
        self.hec_token = hec_token
        self.host = socket.gethostname()
        self.pid = os.getpid()
        self.events = []
        self.send = True
        self.os = platform.platform()
        self.system_username = commands.getoutput('whoami')
        self.eventgen_name = eventgen_name
        atexit.register(self._stopFlushTimer)

        self.log = logging.getLogger(self._name)
        self.log.setLevel(logging.DEBUG)
        self.log.info("SplunkHECHandler logger is initialized")

        try:
            self.ip = [
                l for l in ([
                    ip
                    for ip in socket.gethostbyname_ex(socket.gethostname())[2]
                    if not ip.startswith("127.")
                ][:1], [[(s.connect(('8.8.8.8', 53)), s.getsockname()[0],
                          s.close()) for s in [
                              socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
                          ]][0][1]]) if l
            ][0][0]
        except:
            self.ip = "unknown"

        self.session = sessions.FuturesSession(max_workers=32)

        if not targetserver or not hec_token:
            self.log.warn(
                "Please provide valid targetserver and hec_token in default/eventgen_engine.conf."
            )
            self.send = False

        super(SplunkHECHandler, self).__init__()

        self.timer = self._flushAndRepeatTimer()
Beispiel #11
0
    def get(self):
        session = sessions.FuturesSession(max_workers=10)
        httpFutures = []

        serviceRefAndStopIdArray = [('B', 1693, 'B'), ('L', 286, 'L all'),
                                    ('L', 224, 'L ret')]
        text = ''
        for serviceRefAndStopId in serviceRefAndStopIdArray:
            serviceRef = serviceRefAndStopId[0]
            stopId = serviceRefAndStopId[1]
            #response = taoBusTimesQuery(serviceRef, stopId)
            query = buildTaoBusTimesQuery(serviceRef, stopId)
            httpFutures.append(session.get(query))

        for i, future in enumerate(httpFutures):
            serviceRefAndStopId = serviceRefAndStopIdArray[i]
            serviceRef = serviceRefAndStopId[0]
            stopId = serviceRefAndStopId[1]
            stopLabel = serviceRefAndStopId[2]
            httpResponse = future.result()
            resObj = json.loads(httpResponse.text)
            if len(resObj['busTimes']) > 0:
                response = resObj['busTimes'][0]
            else:
                response = None

            text += '%s: ' % (stopLabel if stopLabel else serviceRef)
            if response:
                timeDatas = response['timeDatas']
                for timeData in timeDatas:
                    text += '%d ' % timeData['minutes']

                serviceDisruption = response['serviceDisruption']
                if serviceDisruption:
                    text += '\n/!\ %s' % serviceDisruption
            else:
                text += 'no data!'

            text += '\n'

        return txtPagesBuilder(text)
Beispiel #12
0
def i2p_session(futures: bool = False) -> 'Union[Session, FuturesSession]':
    """I2P (.i2p) session.

    Args:
        futures: If returns a :class:`requests_futures.FuturesSession`.

    Returns:
        Union[requests.Session, requests_futures.FuturesSession]:
        The session object with I2P proxy settings.

    See Also:
        * :data:`darc.proxy.i2p.I2P_REQUESTS_PROXY`

    """
    if futures:
        session = requests_futures_sessions.FuturesSession(
            max_workers=DARC_CPU)
    else:
        session = requests.Session()

    session.headers['User-Agent'] = default_user_agent(proxy='I2P')
    session.proxies.update(I2P_REQUESTS_PROXY)
    return session
Beispiel #13
0
def check_transaction(datatype, txid):
    """
    This function checks the transactional details by quering the BlockStream TestNet API
    
        KeyWord Arguments:
            datatype (str): This variable defines data type of txid (str or list)
            txid (str/list) -  This corresponds to the transaction ids to be queried
   
        Return:
            results (np array): The transactional details in certain array format
    """

    if datatype == 'str':
        url = "https://blockstream.info/testnet/api/tx/"
        req = requests.get(url + txid)
        results = np.array(list(json_parser(req)))

    elif datatype == 'arr':
        session = sessions.FuturesSession(max_workers=len(txid))
        futures = [session.get(i).result() for i in txid]
        results = np.array(list(map(json_parser, futures)))

    return results
Beispiel #14
0
    def get_html_async(self, urls, n_workers: int = 10):
        """:func:`Credentials.get_html`, but async, give or take.

        Takes a list (or any iterable) of urls and returns a corresponding generator of htmls.
        The htmls have their scripts and styles removed and are stored in cache.

        Args:
            urls (iterable(str)): The urls to be accessed
            n_workers (:obj:`int`, optional): The number of workers.
        
        Returns:
            An str generator
        """

        urls = tuple(urls)

        work_queue = (url for url in urls if url not in self.cache)

        with _sessions.FuturesSession(max_workers=n_workers) as session:
            session.cookies.update(
                self.session.cookies
            )  # Getting the cookies from the object session to this assynchronous session

            futures = [session.get(url) for url in work_queue]

            for future in futures:
                request = future.result()

                if request.status_code != 200:
                    continue  # GTFO

                url = request.url
                html = request.text

                self.cache[url] = _utils.trim_html(html)

        return (self.get_html(url) for url in urls)
Beispiel #15
0
 def __init__(self, camConfig, gpuConfig, device_id, cam_id, log, vc_device="/CPU:0"):
     threading.Thread.__init__(self)
     self.log = log      
     self.log.debug("VideoCapture start init stream object " + str(cam_id))
     self.totalFrames = 0
     self.vc_device = vc_device
     self.server_URL = "http://localhost:8080/update?"
     self.startTime = int(time.time())
     self.pipline_str = 'rtspsrc location={} latency={} ! rtph264depay ! h264parse ! queue leaky=1 ! decodebin ! videoconvert  ! appsink sync=false'
     self.cur_frame_cnt = 0
     self.proceed_frames_cnt = 0
     self.proceedTime = [0, 0]
     self.session = sessions.FuturesSession(max_workers=2)
     self.outFrame = np.array([])
     self.isDrow = False
     self.error_counter_default = 10
     self.error_counter = 10
     self.clients = []
     self.out_color = (252, 35, 240)
     self.in_color = (52, 235, 240)
     self.text_color = (255, 255, 0)
     self.intersections = {}
     self.save_video_res = None
     self.id = str(cam_id)
     self.device_id = device_id        
     self.uid = np.append(
         np.random.choice(
             np.fromstring('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', dtype=np.uint8),
             7), np.uint8(device_id))
     try:
         self.config = camConfig
         print("VideoCapture stream config", self.config)
         self.url = self.config['url']
         if 'type' in self.config: self.type = self.config['type']
         else: self.type = 0 
         self.isFromFile = self.config['isFromFile']
         self.cap = None
         self.buffer = 200
         self.img_size = int(gpuConfig['img_size'])
         print("img size", self.img_size) 
         self.cur_proceed_frame = None
         self.max_hum_w = int(self.img_size/2)
         self.GPUconfig = gpuConfig
         self.frame_res = (self.img_size, self.img_size)
         self.skip_frames = int(self.config['skip_frames'])
         self.batch_size = int(self.config['batch_size'])
         self.path_track = int(self.config['path_track'])
         self.save_video_flag = self.config['save_video_flag']
         print("save_video_flag", self.save_video_flag, cam_id)
         self.display_video_flag = self.config['display_video_flag']
         if(self.config['save_video_flag'] or self.config['display_video_flag']):
             self.isDrow = True
             self.save_video_res = tuple(self.config['save_video_res'])
         print("save video res", self.save_video_res)
         scale = self.img_size/416.0
         self.borders = self.config['borders']
         print("scale", scale, self.borders)
         if self.borders:
             for key in self.borders:
                 self.borders[key][0] = [int(self.borders[key][0][0]*scale),int(self.borders[key][0][1]*scale)]
                 self.borders[key][1] = [int(self.borders[key][1][0]*scale),int(self.borders[key][1][1]*scale)]
                 self.intersections[key] = [0, 0]
         self.out = None
         print("start load encoder", os.path.join('models',self.config['encoder_filename']))
         self.encoder = gdet.create_box_encoder(
           os.path.join('models', 'mars-small128.onnx'), 
           batch_size=self.batch_size, 
           device=self.vc_device)
         print("encider is ok")
         if self.save_video_flag:            
             outFile = self.config['save_path'] +"_"+ str(self.startTime)+".avi"
             if outFile == '': outFile =  'video/'+str(self.id)+"_"+ str(self.startTime)+"_auto.avi"
             self.log.info("!!!!!!Save out video to file " + outFile)
             self.out = cv2.VideoWriter(outFile, cv2.VideoWriter_fourcc(*'XVID'), 5, self.save_video_res)
         print("VideoCapture stream start load tracker")
         self.tracker = Tracker(nn_matching.NearestNeighborDistanceMetric("cosine", self.config['max_cosine_distance'], None))
         print("VideoCapture stream tracker ok")
         if not self.isFromFile:
             #print("start read from camera")
             url = self.pipline_str.format(self.url, self.buffer)
             print("url", self.url,"\n", url)
             #self.q = VideoCaptureStream(self.url, self.frame_res)
             self.q = VideoCaptureStream(url, self.frame_res)
             print("self.q", self.q)
         else:
             #print("start read from file")
             self.q = VideoCaptureFile(self.url, self.frame_res, self.skip_frames)
             self.totalFrames = self.q.totalFrames
             print("created stream ok")
             #if client is not None:
             #    client.send_json({'OK':["startStream", self.id]})
         self.session.get(self.server_URL+'cmd=startStream&name='+self.id+'&status=OK')
         self._stopevent = threading.Event()
     except:
         print("VideoStream err:", sys.exc_info())
         # self.session.get(self.server_URL+'cmd=startStream&name='+self.id+'&status=error')
         self.log.debug("VideoStream Can not start Video Stream for " + camConfig)            
         # print("VideoStream err types",type(traceback.print_exception(*sys.exc_info())), type(sys.exc_info())) 
         self.id = None
Beispiel #16
0
def reload_solr_cores() -> None:
    for core_name in _SOLR_CORE_NAMES:
        sessions.FuturesSession().get(
            _SOLR_RELOAD_URL.format(core_name),
            background_callback=_core_reload_callback)
Beispiel #17
0
 def new_instance(cls, session=None):
     """Initialize an instance using values from the configuration"""
     if not session:
         session = sessions.FuturesSession()
     return cls(session, conf.settings['ANALYTICS_ENDPOINT'],
                utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']))
Beispiel #18
0
    def __init__(self, configFileName):
        #threading.Thread.__init__(self)
        self.gpusActiveList = {}  # running devices
        self.gpusList = {} # available devices
        self.gpusConfigList = {} #  devices configs
        self.streamsConfigList = {}
        self.camsList = {} # running streams on gpus
        f = logging.Formatter('[L:%(lineno)d]# %(levelname)-8s [%(asctime)s]  %(message)s', datefmt = '%d-%m-%Y %H:%M:%S')
        # Add file rotating handler, with level DEBUG
        fileLog = logging.handlers.RotatingFileHandler('manager.log', 'a', 1000000, 5)
        fileLog.setLevel(logging.INFO)
        fileLog.setFormatter(f)
        logging.getLogger().addHandler(fileLog)

 #       physical_devices = tf.config.experimental.list_physical_devices('GPU')
 #       tf.config.set_visible_devices(physical_devices[0], 'GPU')
 #       tf.config.experimental.set_memory_growth(physical_devices[0], True)

        self.log = logging.getLogger('appManager')
        self._stopevent = threading.Event()
        self.isGPU = True
        self.server_URL = "http://localhost:8080/update?"
        # self.camIdFrame = []
        self.isGpuStarted = False
        try:
            self.session = sessions.FuturesSession(max_workers=2)
            self.config = self.loadConfig(configFileName, 'Gpus_manager_')
            if self.config:
                self.db_path = "db.wingu.sqlite3"
                self.db_table_name = "stats"
                self.config['gpu_configs'] = {}
                self.config['streams_configs'] = {}
                print("gpus manager config", self.config)
                #gpus = tf.config.experimental.list_physical_devices('GPU')
                if self.isGPU:
                #    print("gpus", gpus)
                #    print("len=", len(self.config['gpus_configs_list']), len(gpus))
                #    self.isGPU = True
                    nvidia_smi.nvmlInit()
                    self.gpuInfo = {}
                    index = 0
                    gpu_id = 0
                    #if len(gpus) <= len(self.config['gpus_configs_list']):
                    #    print("gpus_configs_list, gpus config is ok", self.config['gpus_configs_list'])
                    #    for index, gpu_id in enumerate(gpus):
                    print("key", index, gpu_id)
                    name = "/GPU:" + str(index)
                    device = str(self.config['gpus_configs_list'][index])
                    print("gpus name", name, device)
                    self.gpusList[name] = index
                    print("gpus 333", self.config['gpus_configs_list'][index])
                    cfg = self.loadConfig(self.config['gpus_configs_list'][index], "Gpu_")
                    print("gpus cfg", cfg)
                    if cfg is not None:
                                cfg['device_id'] = name
                                cfg['fileName'] = device
                                self.gpusConfigList[name] = cfg
                                if device in self.config['autostart_gpus_list']:
                                    self.startGpu(cfg, name)
                                    self.gpuInfo[name] = index
                                    print("gpu", name, index, self.gpuInfo)
                else:
                    name = "/CPU:0"
                    self.gpusList[name] = 0  # Init for CPU
                    cfg = self.loadConfig(self.config['cpu_config'], "GPU_")
                    if cfg != None:
                        cfg['device_id'] = name
                        cfg['fileName'] = self.config['cpu_config']
                        self.gpusConfigList['cpu'] = cfg
                        if self.config['autostart_gpus_list'] != None:
                            self.startGpu(cfg, name)
                self.log.debug("GPUsmanager Active GPUs list: "+" ".join(self.gpusActiveList.keys()))
                for stream in self.config['streams']:
                    stream = str (stream)
                    cfg = self.loadConfig(stream, "Stream_")
                    if cfg != None:
                        self.streamsConfigList[stream] = cfg
                        if self.config['autostart_streams']:
                            if stream in self.config['autostart_streams']:
                                self.startStream(stream)
                self.ready = True
                try:
                    self.log.debug("GPUsmanager try to autostart "+ stream)
                    time.sleep(3)
                except:
                    self.log.debug("GPUsmanager exception autostart "+ stream)

                self.session.get(self.server_URL+"cmd=startManager&status=OK&name=Init&module=Manager")
                # self.start()
            else:
                self.log.error("GPUsmanager Can not load config gor GPUs Maanger")
        except:
            self.log.error("GPUsmanager Can not start gpus manager")
            print(sys.exc_info())
            self.kill()            
Beispiel #19
0
# encoding: utf-8

from flask import Flask, render_template
import json
import os
import re
from requests_futures import sessions

import plot

app = Flask(__name__)
session = sessions.FuturesSession()

POLL_URL = f"https://api.gh-polls.com/poll/{os.environ['POLL_ID']}/"
DEFAULT_INFO_URL = "https://matplotlib.org//tutorials/introductory/customizing.html"


def init_styles(styles_filename):
    """Load the styles dict from JSON and construct the necessary URL properties"""

    with open(styles_filename, 'rb') as f:
        styles = json.load(f)

    for (style, style_properties) in styles.items():
        style_properties['poll_img_url'] = POLL_URL + style
        style_properties['info_url'] = style_properties.get(
            'info_url', DEFAULT_INFO_URL)
        style_properties[
            'poll_vote_url'] = style_properties['poll_img_url'] + '/vote'
        style_properties['votes'] = 0
Beispiel #20
0
def session_document_loader(secure=False, **kwargs):
    """
    Create a Requests document loader.

    Can be used to setup extra Requests args such as verify, cert, timeout,
    or others.

    :param secure: require all requests to use HTTPS (default: False).
    :param **kwargs: extra keyword args for Requests get() call.

    :return: the RemoteDocument loader function.
    """
    from pyld.jsonld import (
        JsonLdError,
        urllib_parse,
        parse_link_header,
        LINK_HEADER_REL,
    )

    import requests

    if True:
        import requests_cache

        requests_cache.install_cache("demo_cache")
        session = requests_cache.CachedSession()

    if False:
        from requests_futures import sessions

        session = sessions.FuturesSession()

    if False:
        # enable http/2
        from hyper.contrib import HTTP20Adapter

        adapter = HTTP20Adapter()
        adapter = requests.adapters.HTTPAdapter(pool_connections=100,
                                                pool_maxsize=100)
        session.mount("https://", adapter)
        # session.mount('https://w3id.org', adapter)
        # session.mount('https://ontopia-lodview.pdnd.italia.it', adapter)

    def loader(url):
        """
        Retrieves JSON-LD at the given URL.

        :param url: the URL to retrieve.

        :return: the RemoteDocument.
        """
        try:
            # validate URL
            pieces = urllib_parse.urlparse(url)
            if (not all([pieces.scheme, pieces.netloc])
                    or pieces.scheme not in ["http", "https"]
                    or set(pieces.netloc) >
                    set(string.ascii_letters + string.digits + "-.:")):
                raise JsonLdError(
                    'URL could not be dereferenced; only "http" and "https" '
                    "URLs are supported.",
                    "jsonld.InvalidUrl",
                    {"url": url},
                    code="loading document failed",
                )
            if secure and pieces.scheme != "https":
                raise JsonLdError(
                    "URL could not be dereferenced; secure mode enabled and "
                    'the URL\'s scheme is not "https".',
                    "jsonld.InvalidUrl",
                    {"url": url},
                    code="loading document failed",
                )
            headers = {"Accept": "application/ld+json, application/json"}
            response = session.get(url, headers=headers, **kwargs)

            doc = {
                "contextUrl": None,
                "documentUrl": response.url,
                "document": response.json(),
            }
            content_type = response.headers.get("content-type")
            link_header = response.headers.get("link")
            if link_header and content_type != "application/ld+json":
                link_header = parse_link_header(link_header).get(
                    LINK_HEADER_REL)
                # only 1 related link header permitted
                if isinstance(link_header, list):
                    raise JsonLdError(
                        "URL could not be dereferenced, it has more than one "
                        "associated HTTP Link Header.",
                        "jsonld.LoadDocumentError",
                        {"url": url},
                        code="multiple context link headers",
                    )
                if link_header:
                    doc["contextUrl"] = link_header["target"]
            return doc
        except JsonLdError as e:
            raise e
        except Exception as cause:
            raise JsonLdError(
                "Could not retrieve a JSON-LD document from the URL.",
                "jsonld.LoadDocumentError",
                code="loading document failed",
                cause=cause,
            )

    return loader
def report_time(test):
    t0 = time.time()
    yield
    print("Time needed for `%s' called: %.2fs" % (test, time.time() - t0))


with report_time("serialized"):
    for i in range(TRIES):
        requests.get(URL)

session = requests.Session()
with report_time("Session"):
    for i in range(TRIES):
        session.get(URL)

session = sessions.FuturesSession(max_workers=2)
with report_time("FuturesSession w/ 2 workers"):
    futures = [session.get(URL) for i in range(TRIES)]
    for f in futures:
        f.result()

session = sessions.FuturesSession(max_workers=TRIES)
with report_time("FuturesSession w/ max workers"):
    futures = [session.get(URL) for i in range(TRIES)]
    for f in futures:
        f.result()


async def get(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as response:
Beispiel #22
0
def get_async(*urls):
    session = sessions.FuturesSession()
    futures = map(session.get, urls)
    return list(x.result() for x in futures)
Beispiel #23
0
 def _query_requests(self):
     session = sessions.FuturesSession(max_workers=len(self._urls_to_query))
     self._requests = [
         session.get(url).result() for url in self._urls_to_query
     ]