Пример #1
0
def tif_to_jp2(
    _threads,
    _app,
    _source,
    _destination,
    _broken,
    _options,
    _verbose
    ):
 
    testApp(_app)

    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_source):
        subpath = root.replace(_source, '').lstrip('/')
        if _broken not in subpath:  
            jp2Path = os.path.join(_destination,subpath)
            makeDir(jp2Path)
            if any(".tif" in s for s in files):
                print >>emaillog, 'Converting contents of ' + subpath + ' from TIF to JP2'
            for file in files:
                if file.endswith('.tif'):
                    tiff = os.path.join(root, file)
                    jp2 = os.path.join(_destination, subpath,
                                       os.path.splitext(file)[0] + '.jp2')
		    tiffcopy = os.path.join(_destination,subpath,file)
                    command = _app + ' -i ' + tiff + ' -o ' + jp2 + ' ' \
                        + _options
                    command_post = 'shutil.move(\'' + tiff + '\',\'' + tiffcopy + '\')'
		    if _verbose == True:
			print 'Creating ' + jp2
	            t.add_task(executeConversion,command,command_post,tiff,_destination,_broken,file,jp2)
        t.await_completion()
Пример #2
0
def jp2_to_jpeg(
    _threads,
    _app,
    _source,
    _destination,
    _broken,
    _jpegs,
    _verbose
    ):
    testApp(_app)

    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_destination):
        subpath = root.replace(_destination, '').lstrip('/')
        if _broken not in subpath:
            if any(".jp2" in s for s in files):
                print >>emaillog, 'Converting contents of ' + subpath + ' from JP2 to JPEG'
	    for (output_file, size) in _jpegs:
                for file in files:
                    if file.endswith('.jp2'):
                        jp2 = os.path.join(root, file)
                        newfile = os.path.join(root,
                                  os.path.splitext(file)[0]) + '_' \
                                  + output_file
                        command = _app + ' -size ' + size + " " + jp2 \
                                  + ' -resize ' + size + ' ' + newfile
                        if _verbose == True:
                            print 'Creating ' + newfile
			t.add_task(executeConversion,command,None,jp2,_source,_broken,file,newfile)
	        t.await_completion()
Пример #3
0
 def pool_time(thread_num):
     start = time.clock()
     tp = ThreadPool(thread_num)
     for i in range(5):
         tp.add_task(time.sleep, i)
     tp.wait_completion()
     return time.clock() - start
Пример #4
0
def jp2_to_jpeg(_threads, _app, _source, _destination, _broken, _jpegs,
                _verbose):
    testApp(_app)

    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_destination):
        subpath = root.replace(_destination, '').lstrip('/')
        if _broken not in subpath:
            if any(".jp2" in s for s in files):
                print >> emaillog, 'Converting contents of ' + subpath + ' from JP2 to JPEG'
            for (output_file, size) in _jpegs:
                for file in files:
                    if file.endswith('.jp2'):
                        jp2 = os.path.join(root, file)
                        newfile = os.path.join(root,
                                  os.path.splitext(file)[0]) + '_' \
                                  + output_file
                        command = _app + ' -size ' + size + " " + jp2 \
                                  + ' -resize ' + size + ' ' + newfile
                        if _verbose == True:
                            print 'Creating ' + newfile
                        t.add_task(executeConversion, command, None, jp2,
                                   _source, _broken, file, newfile)
                t.await_completion()
Пример #5
0
def tif_to_jp2(_threads, _app, _source, _destination, _broken, _options,
               _verbose):

    testApp(_app)

    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_source):
        subpath = root.replace(_source, '').lstrip('/')
        if _broken not in subpath:
            jp2Path = os.path.join(_destination, subpath)
            makeDir(jp2Path)
            if any(".tif" in s for s in files):
                print >> emaillog, 'Converting contents of ' + subpath + ' from TIF to JP2'
            for file in files:
                if file.endswith('.tif'):
                    tiff = os.path.join(root, file)
                    jp2 = os.path.join(_destination, subpath,
                                       os.path.splitext(file)[0] + '.jp2')
                    tiffcopy = os.path.join(_destination, subpath, file)
                    command = _app + ' -i ' + tiff + ' -o ' + jp2 + ' ' \
                        + _options
                    command_post = 'shutil.move(\'' + tiff + '\',\'' + tiffcopy + '\')'
                    if _verbose == True:
                        print 'Creating ' + jp2
                    t.add_task(executeConversion, command, command_post, tiff,
                               _destination, _broken, file, jp2)
        t.await_completion()
Пример #6
0
def iterate(_source, _ignore, _patron, _patron_zip, _threads):
    print 'Descend into ' + _source

    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_source):
        t.add_task(patron_bundle, _patron, _patron_zip, root)
    t.await_completion()
Пример #7
0
def iterate(_source,_ignore,_patron,_patron_zip,_threads):
    print 'Descend into ' + _source
 
    t = ThreadPool(_threads)

    for (root, dirs, files) in os.walk(_source):
	t.add_task(patron_bundle,_patron,_patron_zip,root)
    t.await_completion()
Пример #8
0
def articles():
    error = None
    if request.method == 'POST':
        query = request.form['query'].lower()
        query = query.replace(" ", "%20")
        if query in app.cache:
            return Response(json.dumps(app.cache[query]), mimetype='application/json')
        url = GOOGLE_NEWS_RSS+query
        response = requests.get(url).text

        # Get article links from the RSS
        links = []
        utils.find_links(links, response)
        if len(links) > 2:
            links = links[2:]

        # Get article titles from the RSS
        titles = []
        utils.find_titles(titles, response)
        if len(titles) > 2:
            titles = titles[2:]

        num_links = len(links)
        num_titles = len(titles)
        if num_titles < num_links:
            links = links[:num_titles]
        if num_links < num_titles:
            titles = titles[:num_links]

        articles = {}
        pool = ThreadPool(num_links)
        for i in range(num_links):
            pool.add_task(
                utils.get_article_sentiment, links[i], titles[i], articles)
        pool.wait_completion()

        sentiments = []

        result = {}
        result["articles"] = []
        index = 0
        for key in articles:
            info = {}
            info["id"] = index
            index += 1
            info["title"] = articles[key]["title"]
            info["link"] = key
            sentiments.append(articles[key]["sentiment"])
            info["sentiment"] = articles[key]["sentiment"]
            info["snippet"] = articles[key]["snippet"]
            result["articles"].append(info)

        average_sentiment = utils.average_sentiment(
            sentiments,
            len(sentiments))
        result["sentiment"] = average_sentiment
        app.cache[query] = deepcopy(result)
        return Response(json.dumps(result), mimetype='application/json')
Пример #9
0
def sentiment():
    error = None
    if request.method == 'POST':
        query = request.form['query']
        query = query.replace(" ", "%20")
        if query in app.sentiment_cache:
            return Response(
                json.dumps(app.sentiment_cache[query]),
                mimetype='application/json')
        url = GOOGLE_NEWS_RSS+query
        response = requests.get(url).text

        # Get article links from the RSS
        links = []
        utils.find_links(links, response)
        if len(links) > 2:
            links = links[2:]

        # Get article titles from the RSS
        titles = []
        utils.find_titles(titles, response)
        if len(titles) > 2:
            titles = titles[2:]

        num_links = len(links)
        num_titles = len(titles)
        if num_titles > 3:
            num_titles = 3
        if num_titles < num_links:
            links = links[:num_titles]
        if num_links > 3:
            num_links = 3
        if num_links < num_titles:
            titles = titles[:num_links]

        articles = {}
        pool = ThreadPool(num_links)
        for i in range(num_links):
            pool.add_task(
                utils.get_article_sentiment, links[i], titles[i], articles)
        pool.wait_completion()

        sentiments = []

        result = {}
        for key in articles:
            print query, ':', articles[key]["sentiment"]
            sentiments.append(articles[key]["sentiment"])

        average_sentiment = utils.average_sentiment(
            sentiments,
            len(sentiments))
        print average_sentiment
        result["sentiment"] = average_sentiment
        result["query"] = request.form['query']
        app.sentiment_cache[query] = deepcopy(result)
        return Response(json.dumps(result), mimetype='application/json')
Пример #10
0
def main1():
    thread_pool = ThreadPool(20)
    thread_pool.start()
    session = get_session()
    topic_query = \
        session.query(VideoTopic).filter(VideoTopic.video_type == 1)
    for topic in topic_query:
        thread_pool.add_task(job, topic.henhen_id)
    session.close()
    thread_pool.wait_done()
Пример #11
0
 def test_results(self):
     def my_add(a,b):
         return a+b
     
     tp = ThreadPool(5)
     for i in range(5):
         tp.add_task(my_add, i, i)
     d = tp.wait_completion()
     vals = d.values()
     vals.sort()
     assert vals == [0, 2, 4, 6, 8]
Пример #12
0
def main():
    pool = ThreadPool(20)
    pool.start()
    session = get_session()
    topic_query = session.query(PicTopic).filter(PicTopic.pic_type
            == 'dongmantupian').order_by(PicTopic.id.desc())
    for pic_topic in topic_query:
        pool.add_task(dump_job, pic_topic)
    session.close()

    pool.wait_done()
Пример #13
0
class BaseBot:
    def __init__(self, service):
        self.service = service
        self.error_count = 0
        self.thread_pool = ThreadPool()

    # thread-safe 하다고 나와있긴 하지만 보장이 되는지 고민해 봐야함
    # callback 패턴으로 바꾸는 것 고려
    def handle_event(self, events):
        for event in events:
            if isinstance(event, ChatInitEvent):
                #print(event.user_index)
                self.handle_entered_room(event.team_index, event.room_index)

            elif isinstance(event, ChatMessageEvent):
                chat = self.service.get_chat_summary(event.room_index,
                                                     event.msg_index)
                if chat:
                    self.handle_chat(event.team_index, event.room_index, chat)

            elif isinstance(event, UserDropEvent) \
                    and self.service.my_index == event.user_index:
                logger.error("봇 계정이 탈퇴되었습니다.")
                sys.exit()
            elif isinstance(event, UserPasswordChangedEvent) \
                    and self.service.my_index == event.user_index:
                logger.error("봇 계정의 비밀번호가 바뀌었습니다.")
                sys.exit()

    def run(self):
        while True:
            try:
                events = self.service.get_events()
                if events:
                    self.thread_pool.add_task(self.handle_event, events)
                time.sleep(self.service.config['lp_idle_time'])
            except Exception as e:
                self.error_count += 1
                if self.error_count > 3:
                    logger.error("오류가 발생했습니다. 프로그램을 종료합니다.")
                    #sys.exit()
                else:
                    logger.error("오류가 발생했습니다.:" + e)
                    time.sleep(5)

    def handle_entered_room(self, team_index, room_index):
        raise NotImplementedError()

    def handle_chat(self, team_index, room_index, chat):
        raise NotImplementedError()
Пример #14
0
def main():
    thread_pool = ThreadPool(50)
    thread_pool.start()
    video_type = '7'
    base_url = 'http://www.toutoulu.com/vodlist/%s_%s.html'

    # init task

    for page_num in range(1, page_info[video_type] + 1):
        url = base_url % (video_type, page_num)
        print 'add task %s' % url
        thread_pool.add_task(thread_pool_job, url, video_type)

    thread_pool.wait_done()
Пример #15
0
def test_thread_pool():
    """
    thread pool should be able to handle task processing
    """
    thread_pool = ThreadPool()
    result = []

    def populate_result_task():
        result.extend([i for i in range(0, 10)])
        return

    thread_pool.add_task(populate_result_task)
    thread_pool.tasks.join()
    thread_pool.terminate_all_workers()
    assert result == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Пример #16
0
class DnsServer:
    def __init__(self, config_name="config.json"):
        self.__config = ServerConfig(config_name)
        logging.basicConfig(filename=self.__config.log_file,
                            level=logging.DEBUG,
                            format='%(asctime)s %(message)s')
        self.thread_pool = ThreadPool()
        cache_dir = Path.cwd() / self.__config.cache_dir
        self.cache = CacheStorage(cache_dir)
        self.request_handler = RequestHandler(self.cache)

    def run(self):
        """Binds, listens, processing DNS requests on socket"""
        signal.signal(signal.SIGINT, self.__handle_exit)
        s = socket(AF_INET, SOCK_DGRAM)
        s.bind((self.__config.server_host, self.__config.server_port))
        s.settimeout(self.__config.server_timeout)
        logging.info(
            f'Launched at {self.__config.server_host}:{self.__config.server_port}'
        )
        while True:
            try:
                data, addr = s.recvfrom(self.__config.recv_buff_size)
            except SystemExit as e:
                s.close()
                break
            except Exception as e:
                logging.info(str(e))
                s.close()
                break

            self.thread_pool.add_task(self.__process_request, data, addr, s)

    def __process_request(self, data, addr, s_socket):
        response = self.request_handler.handle_query(data)
        s_socket.sendto(response, addr)

    def __handle_exit(self, signal, frame):
        logging.info("Received SIGINT, shutting down threads...")
        print("shutting down...")
        self.thread_pool.tasks.join()
        self.thread_pool.terminate_all_workers()
        logging.info("Threads stopped, updating cache")
        self.cache.cleanup()
        sys.exit(0)
Пример #17
0
def scrap_connections():
    areas=set()
    airports = []
    with open('csv_files/top_100_airports.csv', 'rb') as csvfile:
        reader = csv.reader(csvfile)
        for row in reader:
            for airport in row:
                airports.append(airport)

    origins = dests = airports

    dates = []

    for i in range(100):
        new_date = datetime.date(2014, 11, 02) + timedelta(days=i)
        dates.append(new_date)

    pool = ThreadPool(20)
    pool.start()
    for dest in dests:
        for origin in origins:
            if dest != origin:
                date = dates[randint(0, len(dates)-1)]
                pool.add_task(get_connections, origin, dest, date)
                areas.add(get_area(origin, dest))

    pool.wait_completion()

    #arrange all connections in single set per area instead of list
    areas_conn = dict()
    for area in areas:
        conn_list = flight_resp_dal.get_connections_in_area(area)
        connections = set()
        for conn in conn_list:
            connections.update(set(conn))
        areas_conn[area] = connections

    flight_resp_dal.clean_areas_to_connections_table()

    for area in areas:
        flight_resp_dal.add_connections_to_area(area, areas_conn[area])
Пример #18
0
def test_thread_pool_with_exception():
    """
    thread pool should be able to handle task processing
    even if there were exceptions in some tasks
    """
    thread_pool = ThreadPool()
    result = []

    def throw_ex_task():
        raise Exception()

    def populate_result_task():
        result.extend([i for i in range(0, 10)])
        return

    thread_pool.add_task(throw_ex_task)
    thread_pool.add_task(populate_result_task)

    thread_pool.tasks.join()
    thread_pool.terminate_all_workers()

    assert result == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Пример #19
0
def proxy_server(host, port):
    addr = host, port

    tp = ThreadPool(WORKERS)

    try:
        with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)

            sock.bind(addr)
            sock.listen(WORKERS)

            print('Listening on', addr)

            while True:
                if not select.select([sock], [], [], 1)[0]:
                    continue

                conn, client_addr = sock.accept()
                tp.add_task(proxy_connection, conn, client_addr)
    except socket.error:
        print('Cannot run the server')

    tp.wait_completion()
Пример #20
0
class VayantConnector(object):
    def __init__(self):
        self.flights_resp_dal = FlightsRespDAL()
        self.pool = ThreadPool(30)
        self.pool.start()


    def get_flight_price_async(self, trip):
        response = AsyncResponse(self.do_after_done)
        self.pool.add_task(self.calculate_flight_info, trip, response)
        return response

    def get_flight_from_cache(self, key):

        cached_resp = self.flights_resp_dal.get(key)
        while self.flights_resp_dal.has_key(key) and cached_resp is None:
            time.sleep(5)
            cached_resp = self.flights_resp_dal.get(key)
        return cached_resp

    def calculate_flight_info(self, trip, response):
        resp = self.get_flights_info(trip)
        response.set_response_value(resp)

    def get_flights_info(self, trip):
        resp = None

        key = self._create_cache_key_from_trip(trip)
        cached_resp = self.get_flight_from_cache(key)
        if cached_resp:
            return cached_resp

        try:
            self.flights_resp_dal.set(key, None)

            request_json = self.build_trip_request(trip_data)

            header = {"Content-Type": "application/JSON ", "Accept-encoding": "gzip"}
            req = urllib2.Request("http://fs-json.demo.vayant.com:7080/", data=json.dumps(request_json), headers=header)
            response = urllib2.urlopen(req)
            resp = self._decompress_and_extract_json(response)

            trip = self._get_flights_from_vayant_response(response)
            if trip:
                self.flights_resp_dal.remove(key)
                return None

            self.flights_resp_dal.set(key, trip)
        finally:
            if not resp:
                self.flights_resp_dal.remove(key)

        return resp

    def _create_cache_key_from_trip(self, trip):
        key = ""
        for single_trip in trip:
            key += single_trip["Origin"][0] + "-"
            key += single_trip["Destination"][0] + "-"
            for date in single_trip["DepartureDates"]:
                key += date["Date"] + "-"
            key += ":"
        return key


    def _decompress_and_extract_json(self, response):
        decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)

        json_resp = ""

        while True:
            data = response.read(8192)
            if not data: break
            if response.info().get('Content-Encoding') == 'gzip':
                data = decompressor.decompress(data)
            json_resp += data

        return json.loads(json_resp)


    # def print_single_flight(self, flight):
    #     response = "\n"
    #     response += flight["Fares"][0]["Origin"] + " -> " + flight["Fares"][0]["Destination"] + ":\n"
    #     response += "total price = {}".format(flight['Price']['Total']['Amount']) + "\n"
    #     response += "flights details: "+ "\n"
    #     for leg in flight["Flights"]:
    #         response += "\t" + leg["Origin"] + " -> " + leg["Destination"] + ":"+ "\n"
    #         response += "\t departure: " + leg["Departure"]+ "\n"
    #         response += "\t arrival: " + leg["Arrival"]+ "\n"
    #         if self.flights_resp_dal.get_airline(leg["OperatingCarrier"]) is not None:
    #             response += "\t carrier: " + self.flights_resp_dal.get_airline(leg["OperatingCarrier"])+ "\n"
    #         else:
    #             response += "\t carrier: " + leg["OperatingCarrier"]+ "\n"
    #     return response


    def get_departure_flight_date(self, trip_response):
        return trip_response['Flights'][0]['Departure'][0:10]

    def get_return_flight_date(self, trip_response):
        return trip_response['Flights'][-1]['Departure'][0:10]

    def extract_cheapest_price(self, resp):
        sorted_response = sorted(resp['Journeys'], key=lambda trip: trip[0]['Price']['Total']['Amount'])
        try:
            return sorted_response[0][0]['Price']['Total']['Amount']
        except:
            print "ERROR getting the price", resp, sorted_response
            return 0

    def get_connections_list(self, trip):
        connections = set()
        for single in trip['Journeys']:
            if len(single[0]["Flights"]) == 2:
                if single[0]["Flights"][0]["Destination"] == single[0]["Flights"][1]["Origin"]:
                    connections.add(single[0]["Flights"][0]["Destination"])
        return connections

    def get_flight(self, trip, index):
        try:
            x = trip['Journeys'][index][0]
            return x
        except:
            return None

    def get_price(self, flight):
        if flight:
            return flight['Price']['Total']['Amount']
        return 99999

    def get_dest_flights_in_two_way(self, trip, connection):
        for i in xrange(len(trip['Flights'])-1):
            if trip['Flights'][i]['Destination'] == connection:
                return trip['Flights'][i], trip['Flights'][i + 1]

        return None, None

    def get_price_round_trip(self, origin, dest, depart_dates, arrive_dates, get_full_response = False):
        first_trip = self.build_trip(origin, dest, depart_dates, 1)
        second_trip = self.build_trip(dest, origin, arrive_dates, 2)
        return self.get_flight_price_async([first_trip, second_trip])



    def get_price_one_way(self, origin, dest, depart_dates):
        first_trip = self.build_trip(origin, dest, depart_dates, 1)
        return self.get_flight_price_async([first_trip])


    def do_after_done(self, resp):
        trip_data = resp
        if trip_data and trip_data.has_key("Journeys") and trip_data['Journeys'] and len(trip_data['Journeys']) > 0:
            return self.extract_cheapest_price(trip_data), trip_data

        return (None, None)
Пример #21
0
def entities():
    error = None
    if request.method == 'POST':
        query = request.form['query'].lower()
        query = query.replace(" ", "%20")
        if query in app.entity_cache:
            return Response(json.dumps(app.entity_cache[query]), mimetype='application/json')
        url = GOOGLE_NEWS_RSS+query
        response = requests.get(url).text

        # Get article links from the RSS
        links = []
        utils.find_links(links, response)
        if len(links) > 1:
            links = links[2:]

        # Get article titles from the RSS
        titles = []
        utils.find_titles(titles, response)
        if len(titles) > 1:
            titles = titles[2:]

        num_links = len(links)
        entities = []
        pool = ThreadPool(num_links)
        for i in range(num_links):
            pool.add_task(
                utils.get_article_entities, links[i], entities)
        pool.wait_completion()

        result = {}
        num_entities = len(entities)


        entity_dict = defaultdict(int)
        for entity_list in entities:
            for entity in entity_list:
                entity_dict[entity] += 1
        entities_list = reversed(list(sorted(entity_dict.items(), key=lambda x: x[1])))
        entities_list = [x[0] for x in entities_list]
        if len(entities_list) > 5:
            entities_list = entities_list[:5]

        fix_entities = False
        if not entities_list:
            fix_entities = True

        if num_entities > 0:
            entities_set = entities[0]
            if fix_entities:
                if len(entities[0]) > 3:
                    entities_list = [word.title() for word in list(entities[0])[:3]]
                else:
                    entities_list = [word.title() for word in entities[0]]
            result[0] = [word.title() for word in entities[0]]
            for i in range(1, num_entities):
                if fix_entities and len(entities_list) < 3:
                    if len(entities[i]) + len(entities_list) > 3:
                        entities_list.extend([word.title() for word in list(entities[i])[:3-len(entities_list)]])
                    else:
                        entities_list.extend([word.title() for word in entities[i]])
                result[i] = [word.title() for word in entities[i]]

        result["entities"] = entities_list
        app.entity_cache[query] = deepcopy(result)


        return Response(json.dumps(result), mimetype='application/json')
Пример #22
0
return_dates = [depart_date  + timedelta(days=21) for depart_date in depart_dates]
return_dates += [depart_date  + timedelta(days=7) for depart_date in depart_dates]
return_dates += [depart_date  + timedelta(days=80) for depart_date in depart_dates]

if __name__ == "__main__":

    pool = ThreadPool(20, "flight_checker", FlightChecker)

    for dest in dest_list:
        for single_connection in connections_list:
            for origin in origin_list:
                for depart_date in depart_dates:
                    for return_date in return_dates:
                        if depart_date < return_date :
                            if origin != dest != single_connection != origin:
                                pool.add_task(check_flights, origin, dest, single_connection, depart_date, return_date,
                                              final_prices)
    print "Number of tasks", pool.task_number
    pool.start()
    print "Waiting for completion"
    pool.wait_completion()

    print final_prices
    print len(final_prices)
    for cities, price in final_prices.iteritems():
        if True:#"Round Trip" not in price[0]:
            print "{}, {}, price = {}, flights information is: \n".format(cities, price[0], price[1])
            for flight in price[2]:
                Pricer("Vayant").flights_provider.print_single_flight(flight)


Пример #23
0
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''

import threading
from thread_pool import ThreadPool


# Function to be executed by the thread pool
def sum_function(*args):
    _sum_ = 0
    thread = threading.currentThread().getName()
    for arg in args:
        _sum_ += arg
    print(thread, "sum:", _sum_)


# Instantiate a thread pool with 50 worker threads
pool = ThreadPool(50)

# Add the functions to be executed by the threadpool
for _ in range(900):
    pool.add_task(sum_function, 1, 2, 3)

# This is to prevent the threads from still running in the background
pool.kill_all_threads()
Пример #24
0
import os

hh = hackhttp.hackhttp(hackhttp.httpconpool(500))
tp = ThreadPool(500)
package = "wooyun"

if not os.path.exists(package):
    os.mkdir(package)


def vlun(wid):
    print "[+]%s" % wid
    if os.path.isfile(wid + ".html"):
        return
    _, _, html, _, _ = hh.http(url="http://wooyun.org/bugs/%s" % wid,
                               cookcookie=False)
    open(package + "/" + wid + '.html', 'wb').write(html)


def catalog(page):
    _, _, html, _, _ = hh.http(
        url="http://wooyun.org/bugs/new_public/page/%d" % page,
        cookcookie=False)
    for wid in re.findall(r'href="/bugs/(wooyun-\d+-\d+)">', html):
        tp.add_task(vlun, wid)
    if page > 0:
        tp.add_task(catalog, page - 1)


tp.add_task(catalog, 1925)
Пример #25
0
    pool_size = [x for x in range(min_thread, max_thread, thread_step)]
    # Create dict with thread sizes to keep track of time
    for thread_count in pool_size: times[thread_count] = []

    for i in pool_size:
        if ovrld.overloaded:
            i = ovrld.opt_work_threads
        if need_count and i == pool_size[-1] \
                or need_count and ovrld.overloaded and i == ovrld.opt_work_threads:
            clients = i * calculate_needed()
        else:
            clients= i * count

        pool = ThreadPool(i)

        # Clients is the final goal, it'll run the thread count for "count" iterations
        # count is from config
        sched_clients += clients
        while clients:
            # Change to your desired function... 
            pool.add_task(time_event, xmlrpc_call, i)
            clients -= 1
            total_clients += 1
            if errors.error_count > errors_threshold:
                quit()
        pool.wait_completion()
        avg_time = sum(times[i]) / len(times[i])
        ovrld.calc_time(avg_time, i)

    quit()
Пример #26
0
class HttpServer:
    __request_parser: RequestParser = RequestParser()
    __request_processor: RequestHandler = RequestHandler()

    def __init__(self, config_name="config.json"):
        self.__config = ServerConfig(config_name)
        logging.basicConfig(filename=self.__config.log_file,
                            level=logging.DEBUG,
                            format='%(asctime)s %(message)s')
        self.thread_pool = ThreadPool()

    def run(self):
        """Binds, listens, processing HTTP requests on socket"""
        s = socket(AF_INET, SOCK_STREAM)
        s.bind((self.__config.host, self.__config.port))
        s.listen(self.__config.queue_size)
        logging.info(f'Launched at {self.__config.port}')
        while True:
            try:
                client_connection, _ = s.accept()
            except Exception as e:
                logging.info(e)
                s.close()
                break
            client_connection.settimeout(self.__config.max_req_time)
            self.thread_pool.add_task(self.__route_request, client_connection)

    def __route_request(self, client):
        """Routes request to handler if exists, then closes the connection"""
        if self.__config.proxy_pass_mode:
            __proxy_passer = ProxyPasser(client)
            __proxy_passer.run()
            return
        while True:
            try:
                raw_request = self.__read_from_socket(client)
            except timeout:
                logging.info("Caught timeout waiting for socket connection")
                break
            except ReadSocketError:
                bad_response = Response(code=400)
                client.sendall(bad_response.get_bytes())
                client.close()
                logging.info(f'Failed to read request. Returned response'
                             f' {bad_response.code}')
                return
            req = self.__request_parser.parse(raw_request)
            if req.method == "GET":
                response_func, response = self.__request_processor.handle_get(
                    req)
                logging.info(f'Received GET {req.path}, '
                             f'returned response {response.code}')
                response_func(client=client)
            if "Connection" not in req.headers \
                    or req.headers["Connection"].lower() != "keep-alive":
                break
        client.close()

    def __read_from_socket(self, client):
        """Reads request data from socket. If request method or protocol
        are not supported, rejects it"""
        result = bytearray()
        req = None
        head_len = 0
        total_len = None
        while not total_len or head_len < total_len:
            chunk = client.recv(8192)
            if not chunk:
                break
            result += chunk
            head_len += len(chunk)
            if not req:
                req = self.__request_parser.try_get_headers(result)
                if not req:
                    continue
            if req.method not in self.__config.supported_methods or \
                    req.proto not in self.__config.supported_protos:
                logging.info(f'Received unsupported request {req}')
                raise ReadSocketError("Request of this type not supported")
            total_len = req.headers.get("Content-Length")
            if not total_len:
                break
        return result
Пример #27
0
    # Create dict with thread sizes to keep track of time
    for thread_count in pool_size:
        times[thread_count] = []

    for i in pool_size:
        if ovrld.overloaded:
            i = ovrld.opt_work_threads
        if need_count and i == pool_size[-1] \
                or need_count and ovrld.overloaded and i == ovrld.opt_work_threads:
            clients = i * calculate_needed()
        else:
            clients = i * count

        pool = ThreadPool(i)

        # Clients is the final goal, it'll run the thread count for "count" iterations
        # count is from config
        sched_clients += clients
        while clients:
            # Change to your desired function...
            pool.add_task(time_event, xmlrpc_call, i)
            clients -= 1
            total_clients += 1
            if errors.error_count > errors_threshold:
                quit()
        pool.wait_completion()
        avg_time = sum(times[i]) / len(times[i])
        ovrld.calc_time(avg_time, i)

    quit()
Пример #28
0
class ProxyServer:
    __request_parser: RequestParser = RequestParser()
    __response_parser: ResponseParser = ResponseParser()

    def __init__(self, config_name="config.json"):
        self.__config = ServerConfig(config_name)
        logging.basicConfig(filename=self.__config.log_file,
                            level=logging.DEBUG,
                            format='%(asctime)s %(message)s')
        self.thread_pool = ThreadPool()

    def run(self):
        """Binds, listens, processing HTTP requests on socket"""
        signal.signal(signal.SIGINT, self.__handle_exit)
        s = socket(AF_INET, SOCK_STREAM)
        s.bind((self.__config.host, self.__config.port))
        s.listen(self.__config.queue_size)
        logging.info(f'Launched at {self.__config.port}')
        while True:
            try:
                client_connection, _ = s.accept()
            except SystemExit as e:
                s.close()
                break
            except Exception as e:
                logging.info(e)
                s.close()
                break
        # client_connection.settimeout(self.__config.max_req_time)
            self.thread_pool.add_task(self.__route_request, client_connection)

    def __route_request(self, client):
        """Routes request to handler if exists, then closes the connection"""
        proxy_passer = ProxyPasser(client)
        proxy_passer.run()

    def __read_from_socket(self, sock, is_response):
        """Reads request data from socket. If request method or protocol
        are not supported, rejects it"""
        result = bytearray()
        headers = None
        head_len = 0
        total_len = None
        while not total_len or head_len < total_len:
            chunk = sock.recv(8192)
            if not chunk:
                break
            result += chunk
            head_len += len(chunk)
            if not headers:
                headers = self.try_get_headers(result, is_response)
                if not headers:
                    continue
            total_len_raw = headers.get("Content-Length")
            if total_len_raw:
                total_len = int(total_len_raw)
            else:
                continue
        return result

    def try_get_headers(self, data, is_response):
        try:
            if is_response:
                headers, parsed_msg = self.__response_parser.parse(data)
            else:
                headers, parsed_msg = self.__request_parser.parse(data)
            if parsed_msg:
                return headers
        except ValueError:
            return None

    def __handle_exit(self, signal, frame):
        logging.info("Received SIGINT, shutting down threads...")
        print("shutting down...")
        self.thread_pool.tasks.join()
        self.thread_pool.terminate_all_workers()
        logging.info("Threads stopped")
        sys.exit(0)
Пример #29
0
        topo.del_switchs()
        resault = topo.buildNet_Topo(interval)
        if resault:
            docker = docker_operate()
            docker.resolve(interval)
    else:
        docker = docker_operate()
        docker.resolve(interval)
        if re == 1:
            topo.del_switchs()
    print interval


if __name__ == '__main__':
    client = zmq.Context()
    print 'Connect to server......'
    socket = client.socket(zmq.REQ)
    socket.connect("tcp://127.0.0.1:6662")
    dealThread = ThreadPool(3)
    while True:
        # get message
        socket.send("word")
        print 'listen......'
        message = socket.recv(102400)
        data = json.loads(message)

        # deal(data)
        dealThread.add_task(deal, data)
        # socket.send("word")
        # thread.start_new_thread(deal,(data,))
Пример #30
0
from thread_pool import ThreadPool
from db.flights_resp import FlightsRespDAL
from flights_data.flight_checks import FlightChecker
from datetime import date

airports_error = []

def check_flights(origin, dest,  depart_date, flight_checker):
    if not flight_checker.pricer.get_price_one_way(origin, dest, str(depart_date))[0]:
        airports_error.append(dest)


pool = ThreadPool(20, "flight_checker", FlightChecker)

flight_resp_dal = FlightsRespDAL()

depart = date(2014, 8, 02)
dests = flight_resp_dal.get_all_airports()
print len(dests)
for dest in dests:
    pool.add_task(check_flights, "JFK", dest, depart)
pool.start()
pool.wait_completion()

print airports_error

for y in airports_error:
    flight_resp_dal.airport_collection.remove({"airport_code": y})
Пример #31
0
titles = []
utils.find_titles(titles, response)
if len(titles) > 2:
    titles = titles[2:]

num_links = len(links)
num_titles = len(titles)
if num_titles < num_links:
    links = links[:num_titles]
if num_links < num_titles:
    titles = titles[:num_links]

articles = {}
pool = ThreadPool(num_links)
for i in range(num_links):
    pool.add_task(
        utils.get_article_sentiment, links[i], titles[i], articles)
pool.wait_completion()

sentiments = []

result = {}
result["articles"] = []
for key in articles:
    info = {}
    info["title"] = articles[key]["title"]
    info["link"] = key
    sentiments.append(articles[key]["sentiment"])
    info["sentiment"] = articles[key]["sentiment"]
    info["snippet"] = articles[key]["snippet"]
    result["articles"].append(info)
Пример #32
0
import re
import os

hh = hackhttp.hackhttp(hackhttp.httpconpool(500))
tp = ThreadPool(500)
package = "wooyun"

if not os.path.exists(package):
    os.mkdir(package)


def vlun(wid):
    print "[+]%s" % wid
    if os.path.isfile(wid + ".html"):
        return
    _, _, html, _, _ = hh.http(
        url="http://wooyun.org/bugs/%s" % wid, cookcookie=False)
    open(package + "/" + wid + '.html', 'wb').write(html)


def catalog(page):
    _, _, html, _, _ = hh.http(
        url="http://wooyun.org/bugs/new_public/page/%d" % page,
        cookcookie=False)
    for wid in re.findall(r'href="/bugs/(wooyun-\d+-\d+)">', html):
        tp.add_task(vlun, wid)
    if page > 0:
        tp.add_task(catalog, page - 1)

tp.add_task(catalog, 1925)
Пример #33
0
import logging
from threading import Thread
import asynctask
from thread_pool import ThreadPool

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s|%(levelname)s|%(threadName)s|%(message)s',
)


def foo(x):
    if x == 3:
        raise Exception('More than 2 but less than 4?')
    logging.info(x)


if __name__ == '__main__':
    thread_pool = ThreadPool(4)
    for i in xrange(100):
        thread_pool.add_task(func=foo, args=(i, ))
    thread_pool.join_and_stop()