Ejemplo n.º 1
0
    def run(self, dry_run=False):
        if dry_run:
            i = 0
            for _ in self.perform_requests(True):
                i += 1

            return i

        self._executor = ThreadPoolExecutor(self.concurrency)
        self.session = requests.Session()
        adapter = requests.adapters.HTTPAdapter(
            pool_connections=self.concurrency, pool_maxsize=self.concurrency)
        self.session.mount('http://', adapter)
        self.session.mount('https://', adapter)

        t0 = time()
        last_report = time()
        i = 0
        r = None
        for r in self.perform_requests():
            if r is not True:
                i += 1
                self.ui.info('{} responses sent | time elapsed {}s'.format(
                    i,
                    time() - t0))

                if time() - last_report > REPORT_INTERVAL:
                    self.progress_queue.put(
                        (ProgressQueueMsg.NETWORK_PROGRESS, {
                            "processed": self.n_requests,
                            "retried": self.n_retried,
                            "consumed": self.n_consumed,
                            "rusage": get_rusage(),
                        }))
                    last_report = time()

        self.progress_queue.put((ProgressQueueMsg.NETWORK_DONE, {
            "ret": r,
            "processed": self.n_requests,
            "retried": self.n_retried,
            "consumed": self.n_consumed,
            "rusage": get_rusage(),
        }))
Ejemplo n.º 2
0
    def mass_upsert(self, upserts):
        """
        Execute multiple upserts

        :param upserts: list of upserts
        :return: list of results
        """

        sqlResults = []

        q = Queue()
        map(q.put, upserts)
        while not q.empty():
            with ThreadPoolExecutor(1) as executor, self.transaction() as tx:
                sqlResults += [executor.submit(tx.upsert, *q.get()).result()]

        sickrage.srCore.srLogger.db("{} UPSERTS executed".format(
            len(sqlResults)))

        return sqlResults
Ejemplo n.º 3
0
def consume_from_balancer(balancer, playlists, destination, encrypt=False):
    '''
    Consume all active playlist resources from ``balancer`` and
    report status to it.

    '''
    def consume_resource(playlist_resource):
        m3u8_uri = "{server}:{port}{path}".format(
            server=playlist_resource.server.server,
            port=playlist_resource.server.port,
            path=playlists['streams'][playlist_resource.key]['input-path'])
        try:
            segments_modified = consume(m3u8_uri, destination, encrypt)
        except (httplib.HTTPException, urllib2.HTTPError, IOError,
                OSError) as err:
            logging.warning(u'Notifying error for resource %s: %s' %
                            (m3u8_uri, err))
            balancer.notify_error()
        else:
            if segments_modified:
                logging.info('Notifying content modified: %s' % m3u8_uri)
                balancer.notify_modified()
                m3u8_path = os.path.join(
                    build_full_path(destination, m3u8_uri),
                    os.path.basename(m3u8_uri))
                transcode_playlist(playlists, playlist_resource.key,
                                   segments_modified, m3u8_path)
            else:
                logging.debug('Content not modified: %s' % m3u8_uri)

    try:
        with ThreadPoolExecutor(max_workers=NUM_THREAD_WORKERS) as executor:
            list(
                executor.map(consume_resource,
                             balancer.actives,
                             timeout=CONSUME_TIMEOUT))
    except TimeoutError:
        balancer.notify_error()
Ejemplo n.º 4
0
class PlayApi(tornado.web.RequestHandler):
    executor = ThreadPoolExecutor(max_workers=4)

    def initialize(self):
        self.set_header('Content-Type', 'application/json')

    @gen.coroutine
    def get(self):
        self.play()
        self.write(json.dumps({'success': 0}))
        self.flush()

    @run_on_executor
    def play(self):
        """Publish line in a background task."""
        for i, c in enumerate(cs):
            if i == 0:
                line = {
                    'type': 'path',
                    'lat1': c['lat'],
                    'lon1': c['lon'],
                    'lat2': c['lat'],
                    'lon2': c['lon'],
                }
            else:
                line = {
                    'type': 'path',
                    'lat1': cs[i - 1]['lat'],
                    'lon1': cs[i - 1]['lon'],
                    'lat2': c['lat'],
                    'lon2': c['lon'],
                }

            kwargs = {'message': json.dumps(line)}
            self.application.pc.publish_message(**kwargs)
            print " [x] Sent:", kwargs['message']
            sleep(1)
Ejemplo n.º 5
0
#
from functools import wraps, partial
from tornado import gen, concurrent
from tornado import ioloop

# Suppressed known DeprecationWarning for the futures backport
import warnings, exceptions
warnings.filterwarnings("ignore", "The futures package has been deprecated.*",
                        exceptions.DeprecationWarning, "futures")
import futures

import logging
import sys
from futures import ThreadPoolExecutor

EXECUTOR = ThreadPoolExecutor(100)

_LINE = '%' * 40


def safe_return_future(func):
    '''
        Identical to tornado.gen.return_future plus
        thread safety.  Executes the callback in 
        the ioloop thread
    '''
    @wraps(func)
    def exec_func(*args, **kwargs):

        future = concurrent.TracebackFuture()
Ejemplo n.º 6
0
# -*- coding: UTF-8 -*-
__author__ = "rody800"

from futures import ThreadPoolExecutor
from functools import partial, wraps
import time
import tornado.ioloop
import tornado.web
''' 本例子通过http get 请求异步读取服务器上一个文件
    实现对其他http请求不影响                                                                         
'''

tpexe = ThreadPoolExecutor(max_workers=2)


class IndexHandler(tornado.web.RequestHandler):
    def get(self):
        self.write("This is index page")


class FileHandler(tornado.web.RequestHandler):
    @tornado.web.asynchronous
    def get(self, filename):
        tpexe.submit(partial(self.readfile, filename)).add_done_callback(
            lambda future: tornado.ioloop.IOLoop.instance().add_callback(
                partial(self.callback_func, future)))

    def callback_func(self, future):
        self.write(future.result())
        self.finish()
Ejemplo n.º 7
0
    utc = utc[:-6]  # ignore timezone
    soup.decompose()
    return [url, utc, loc]


#def process_user(username, fullname):
#    print u'creating thread for user; username={}; full name={}'.format(username, fullname)
#    result = get_user(username, fullname)
#    f = open('github/{}.json'.format(username), 'w')
#    f.write(json.dumps(result, indent=4))
#    f.close()

if __name__ == '__main__':
    from guppy import hpy
    h = hpy()
    executor = ThreadPoolExecutor(max_workers=THREADS)
    thread = None
    for subdirs, dirs, files in os.walk('stackoverflow/'):
        i = 0
        for filename in files:
            username = filename[:-5]
            github_filename = 'github/{}.csv'.format(username)
            if os.path.isfile('{}.tmp'.format(github_filename)):
                os.remove('{}.tmp'.format(github_filename))
            if os.path.isfile(github_filename):
                print u"skip {}".format(username)
                continue
            f = codecs.open('stackoverflow/{}'.format(filename), 'r', 'utf-8')
            data = json.load(f)
            f.close()
            fullname = data['answerer']['name']
Ejemplo n.º 8
0
    except easywebdav.client.OperationFailed:
        print ('Корень существует.Продолжаем.')
        
    #Проверка аналогичного файла
    try:
        webdav.mkdir(mainfolder+'/'+name)
    except easywebdav.client.OperationFailed:
        print('Папка существует.Поменяйте имя файла на другое.')
        sys.exit()

    #Скрипт загрузки
    #Создание главных папок
        
    print ('Создание главных папок.')

    with ThreadPoolExecutor(max_workers = maxconnect) as poolglav:
        results = [poolglav.submit(mkdirglav, nomerpapok) for nomerpapok in range(1,glav+1)]

    #Создание внутренних папок(костыль,позже реализовать через пул)

    print ('Создание внутренних папок.')

    if size > maxname:
        for a in range(0,vspom-1):
            for dlinaimeni in range (0,i):
                list.append(outsymbol(byte_file(proidennoe+dlinaimeni,1,file)[0]))
            s = "".join([str(list[uli]) for uli in range(len(list))])
            #poolvspom.submit(mkdirvspom,s)
            #poolvspom.shutdown
            pot=threading.Thread(target=mkdirvspom,args=[s])
            threads.append(pot)