Exemple #1
0
    def _fetch_birth_dates(self, id_list, **kwargs):
        if len(id_list) == 0:
            return pd.DataFrame([], columns=['player_id', 'birth'])

        logger.debug("[%s] entry found with empty birth date" % (len(id_list)))

        def make_fetcher(q):
            def fetch(id_list):
                #q = self._converter(name="Birthday fetcher")
                result = list(
                    map(lambda x: self._sofa_req.parse_player_birtdate(x),
                        id_list))
                c = list(
                    map(lambda x: q.update_player_birthday(x[0], x[1]),
                        result))
                # Create a dataframe from the tuples
                #q.get()
                return pd.DataFrame(result, columns=['player_id', 'birth'])

            return fetch

        q = self._converter(name="Birthday fetcher")
        fnc = make_fetcher(q)
        if self._get_config('multithreading'):
            threads = self._get_config('num_of_threads')
            splitted_id_list = split(id_list, threads)
            with TPE(max_workers=threads) as pool:
                df_list = list(pool.map(lambda x: fnc(x), splitted_id_list))
        else:
            df_list = [fnc(id_list)]

        return pd.concat(df_list)
Exemple #2
0
    def __init__(self, done_signal, loop):

        # Python seems to be very picky with its EventLoops.
        # Make sure to schedule everything happening in Redpy
        # onto the same loop.
        self._loop = loop
        self._loop.set_default_executor(TPE())
        # Transport is giving us STDIN and STDOUT of RedPy Rust
        # process. It's being set by connection_made callback.
        # Always assert this is present.
        self._transport = None

        # Once we are done we can shutdown the whole shebang.
        self._done_signal = done_signal

        # We read from the Pyred Rust process's STDOUT in an event driven
        # manner. This buffer captures the received bytes for the next message(s).
        self._receive_buf = bytearray()

        # We assign task_ids when scheduling tasks
        self._next_task_id = 0

        # A mapping from ongoing tasks to futures blocking on the availability
        # of the result
        self._submitted_tasks = {}

        super().__init__()
Exemple #3
0
def _process_news(feeds: Tuple[FeedEntity]) -> Tuple[FeedEntity]:
    with TPE(max_workers=THREADS_QUANTITY) as executor:
        futures = tuple(
            executor.submit(_mark_late_news, n) for f in feeds for n in f.news)

        for future in as_completed(futures, timeout=THREAD_TIMEOUT):
            future.result()

        return feeds
def thscrape(arg):
    key, urls = arg
    try:
        with TPE(max_workers=4) as exe:
            exe.map(thmap,
                    zip(itertools.cycle(range(4)), urls),
                    timeout=15,
                    chunksize=1)
    except Exception as ex:
        print(ex)
Exemple #5
0
def _get_feeds_data(feeds: Tuple[FeedEntity]) -> Tuple[FeedEntity]:
    with TPE(max_workers=THREADS_QUANTITY) as executor:
        futures = tuple(executor.submit(_get_feed_raw_data, f) for f in feeds)

        for future in as_completed(futures, timeout=THREAD_TIMEOUT):
            future.result()

        logging.info('Feeds were received.')

        return feeds
Exemple #6
0
    def progress(self):
        self.get_mdt_info()

        self.temp_list = {'head': [], 'value': []}
        f = open(os.path.join(self.main_path, 'sql.sql'), encoding='utf-8-sig')
        self.sql_scr = f.read()
        with TPE(1) as executor:
            executor.map(self.executer, self.dt_point_list)
        self.executer_head(self.dt_point_list[1])
        self.writer()
Exemple #7
0
def grabText(folder):
    croppedImgList = os.listdir(folder)
    # 对列表进行初始化,并指定长度    
    results = [0] * len(croppedImgList)
    # 线程池设置
    with TPE(multiprocessing.cpu_count()*4) as executor:
        for i in range(len(croppedImgList)):
            path = os.path.join(folder,croppedImgList[i])
            executor.submit(ocring,path,i,results)
    return results
Exemple #8
0
    def work():
        print('hello', os.getpid())
        from concurrent.futures import ThreadPoolExecutor as TPE

        def task(arg):
            time.sleep(0.5)
            print('Thread:', arg)

        pool = TPE(5)  # 线程池里放5个线程
        for i in range(100):
            # 去连接池中获取连接
            pool.submit(task, i)
Exemple #9
0
    def _split_fetch_merge(self, drv_list, df, fnc):
        threads = self._get_config('num_of_threads')
        group_df = df.groupby('player_id', group_keys=False)
        splitted_df = split(group_df, threads)

        df_list = []
        for element in splitted_df:
            merged_element = zip(drv_list, element)
            with TPE(max_workers=threads) as pool:
                df_list.append(
                    pool.map(lambda x: fnc(x[0], x[1][1]), merged_element))

        return pd.concat(df_list)
Exemple #10
0
    def execute(self, transformer: Callable, upstream: Iterable) -> Generator:
        input_queue = Queue(maxsize=self.threads)
        output_queue = Queue()

        collector = TPE(1, 'thread-pool-executor-collector')
        collector.submit(self._collect, upstream, input_queue)

        workers = TPE(self.threads, 'thread-pool-executor-worker')
        for _ in range(self.threads):
            workers.submit(self._work, input_queue, transformer, output_queue)

        iterator = iterate_until_none(output_queue.get, self.threads)
        try:
            for output in iterator:
                yield output
        finally:
            workers.shutdown(wait=False)
            time.sleep(0.1)
            for thread in workers._threads:
                interrupt_queue_thread(output_queue, thread)
            close_iterator(iterator)

        collector.shutdown()
        workers.shutdown()
Exemple #11
0
    def execute(self, transformer: Callable, upstream: Iterable) -> Generator:
        queue = Queue(maxsize=1)
        background = TPE(1, 'background-executor')
        background.submit(self._work, upstream, transformer, queue)

        iterator = iterate_until_none(queue.get)
        try:
            for output in iterator:
                yield output
        finally:
            background.shutdown(wait=False)
            time.sleep(0.1)
            for thread in background._threads:
                interrupt_queue_thread(queue, thread)
            close_iterator(iterator)
Exemple #12
0
    def _fetch_date(self, curr_date, *args, **kwargs):
        tournaments = self._get_tournaments(curr_date)

        self.info("Fetching %s tournament from date %s" %  (len(tournaments), curr_date))
        if self._get_config('multithreading'):
            threads = self._get_config('num_of_threads')
            with TPE(max_workers=threads) as pool:
                lst = pool.map(lambda x: self._fetch_tournament(x, date=curr_date, *args, **kwargs), tournaments)
            matches, player_stats = list(map(list, zip(*lst)))            
            return pd.concat(matches), pd.concat(player_stats)

        else:
            lst = list(map(lambda x: self._fetch_tournament(x, date=curr_date, *args, **kwargs), tournaments))
            matches, player_stats = list(map(list, zip(*lst)))
            return pd.concat(matches), pd.concat(player_stats)
Exemple #13
0
def main(THREADS):
    futures = []
    start = time()
    with TPE(max_workers=THREADS) as executor:
        for _ in range(THREADS):
            futures.append(executor.submit(use_server))
    while not all(future.done() for future in futures):
        pass

    result = [future.result() for future in futures]

    print('{} threaded predictions of {} lines took {:.4} seconds'.format(
        THREADS, len(X_test),
        time() - start))
    return result
Exemple #14
0
    def fastest(self):
        """
        Retrieves ipv4 dns nameservers,
        and get fastest (<100ms) for your location/network.
        """
        nameservers = self._get_nameservers()
        with TPE() as executor:
            results = []
            for res in tqdm(executor.map(self._check, nameservers),
                            total=len(nameservers)):
                tim = res.get('time')
                if tim:
                    results.append(res)

        for res in sorted(results, key=lambda row: row['time']):
            ip = res.get('ip_address')
            name = res.get('name') or '-'
            tim = res.get('time') or '-'
            print(f'{ip:<15} {tim:>3} ms {name}')
Exemple #15
0
    def _do_fetch(self, start_date, end_date, *args, **kwargs):
        # Get the database query as dataframe
        query_df = self._query_executor.get_matches_where_odds_are_null(
            start_date, end_date)
        seasons = query_df['season'].unique()
        tournaments = query_df['tournament'].unique()

        if self._get_config('multithreading'):
            # Multithreading
            param_list = []
            # For loop one thread
            for tr in tournaments:
                # Filter for the tournament
                filtered_df = query_df[(query_df['tournament'] == tr)]
                for season in seasons:
                    # Filter for the season
                    season_filtered_df = filtered_df[(
                        filtered_df['season'] == season)]
                    param_list.append(tuple([tr, season, season_filtered_df]))

            # player_id_gen = split_into(player_ids, cpu_count() * 5)
            with TPE(max_workers=self._get_config(
                    'num_of_threads')) as worker_pool:
                res_list = worker_pool.map(lambda x: self._process(x),
                                           param_list)
                return pd.concat(res_list)

        else:
            # For loop one thread
            res_list = []
            for tr in tournaments:
                # Filter for the tournament
                filtered_df = query_df[(query_df['tournament'] == tr)]
                for season in seasons:
                    # Filter for the season
                    filtered_df = filtered_df[(
                        filtered_df['season'] == season)]
                    res_list.append(
                        self._process(tuple([tr, season, filtered_df])))

            return pd.concat(res_list)
Exemple #16
0
    def execute(self, transformer: Callable, upstream: Iterable) -> Generator:
        manager = mp.Manager()
        manager.register('None', type(None))
        input_queue = manager.Queue(maxsize=self.processes)
        output_queue = manager.Queue()

        collector = TPE(1, 'multiprocessing-executor-collector')
        collector.submit(self._collect, upstream, input_queue)

        with mp.Pool(self.processes) as pool:
            for _ in range(self.processes):
                pool.apply_async(MultiProcessingExecutor._work,
                                 args=(input_queue, transformer, output_queue))
            iterator = iterate_until_none(output_queue.get, self.processes)
            try:
                for output in iterator:
                    yield output
            finally:
                close_iterator(iterator)

        collector.shutdown()
Exemple #17
0
def multithreader(proxyList):
    httpsProxy = []
    httpProxy = []

    with TPE(max_workers=50) as executor:
        futures = [executor.submit(proxyCheck, uri) for uri in proxyList]
        for future in as_completed(futures):
            output = future.result()
            if len(output[0]) == 1:
                httpsProxy.append(output[0].pop(0))
            elif len(output[1]) == 1:
                httpProxy.append(output[1].pop(0))

    proxyLogger.info(
        f'{httpsCount + httpCount} active proxies, {httpsCount} are HTTPS capable and {httpCount} for HTTP')

    combinedProxies = httpsProxy + httpProxy

    for item in combinedProxies:
        file_write(item)


    return httpsProxy, httpProxy
Exemple #18
0
    def fetch_matches(self, event_ids, **kwargs):
        event_ids = listify(event_ids)
        q = kwargs.get('converter', self._converter())
        try:
            # logger.info("Tournament: \'%s\' has %s number of events" % (tr_name, len(event_ids)))
            event_info = map(lambda x: self._req.parse_event(x), event_ids)
            lineups_info = map(lambda x: self._req.parse_lineups_event(x), event_ids)
            player_ids = list()
            try:
                for event, lineup in zip(event_info, lineups_info):
                    # Update the tournamens and season database
                    q.convert_tournaments(event['event'])
                    q.convert_season(event['event']['season'])

                    # Update the teams database
                    q.convert_teams(event['event']['homeTeam'])
                    q.convert_teams(event['event']['awayTeam'])

                    try:
                        home = [h['player'] for h in lineup['homeTeam']['lineupsSorted']]
                        away = [a['player'] for a in lineup['awayTeam']['lineupsSorted']]
                    except KeyError:
                        continue

                    # Convert stadium
                    q.convert_stadium_ref(event)
                    # Convert the referee data
                    q.convert_referee(event)
                    # Convert the match event
                    q.convert_match(event, get_nested(event, 'event', 'tournament', 'uniqueId'))
                    # Get the odds data
                    odds_json = self._req.parse_match_odds(get_nested(event, 'event', 'id'))
                    # Convert the odds
                    q.convert_match_odds(get_nested(event, 'event', 'id'), odds_json)
                    # Convert match statistics
                    q.convert_match_statistic(event)
                    # Convert players
                    players = home + away
                    for pl in players:
                        # Convert the player references
                        q.convert_player_ref(pl)
                        player_ids.append((event['event']['id'], pl['id']))
                    # Convert team lineup
                    try:
                        match_id = event['event']['id']
                        home_id = event['event']['homeTeam']['id']
                        away_id = event['event']['awayTeam']['id']
                        home_lineup = lineup['homeTeam']
                        away_lineup = lineup['awayTeam']

                        team_lineup = zip([home_id, away_id], [home_lineup, away_lineup])

                        for team_id, team_lineup in team_lineup:
                            # Convert manager
                            q.convert_manager(team_lineup)
                            # Convert team lineup
                            q.convert_team_lineup(match_id, team_id, team_lineup)
                            try:
                                for lineup_element in team_lineup['lineupsSorted']:
                                    # Convert the player lineups
                                    q.convert_player_lineup(match_id, team_id, lineup_element)
                            except KeyError:
                                continue

                    except KeyError:
                        pass

            except Exception as err:
                tb = traceback.format_exc()
                logger.error(tb)

            # logger.info("Tournament: \'%s\' has %s number of players" % (tr_name, len(player_ids)))

            # player_id_gen = split_into(player_ids, cpu_count() * 5)
            with TPE() as worker_pool:
                # player_stats_getter = create_worker(SofaScore.parse_player_stat)
                player_stats = worker_pool.map(lambda x: self._req.parse_player_stat(x), player_ids)

            for player in player_stats:
                try:
                    match_id = player['eventData']['id']
                    player_id = player['player']['id']
                except (KeyError, TypeError) as err:
                    continue

                # Convert the player statistics
                q.convert_player_stats(match_id, player_id, player)

        except Exception as err:
            tb = traceback.format_exc()
            logger.error(tb)
            # continue
        finally:
            return q.get()
Exemple #19
0
# PACKET_SIZE = [4, 16, 64]
PACKET_SIZE = [32]
BYTE_DELAY = [0, 1/2000, 1/1000, 1/500] # bytes/sec

if __name__ == "__main__":
    # Reset to start baud after fail
    # set_module_baud(PORT_UART, 19200, 9600)
    # os.remove(PORT_BLE)

    log = Log('results/log.csv')

    for baud in baud_to_test:
        print(f'\nTesting baud: {baud}')

        set_module_baud(PORT_UART, prev, baud)
        prev = baud

        with TPE(max_workers=3) as executor:
            futb = executor.submit(run_ble_serial)
            sleep(3)

            for size in PACKET_SIZE:
                for delay in BYTE_DELAY:
                    run_test(executor, log, Dir.BLE_UART(), baud, size, size*delay)
                    run_test(executor, log, Dir.UART_BLE(), baud, size, size*delay)

            signal_serial_end()

    set_module_baud(PORT_UART, prev, baud_to_test[0])
    log.close()
    async def profile(self, ctx, target: discord.User = None):
        '''Who are you again? You can check this!
        
        PIL code written by Suhail6octoling#9775'''
        if not target:
            target = ctx.author
        if not self.bot.profiles.get(target.id, None):
            await ctx.send(
                f'{str(target) + " does" if target.id != ctx.author.id else "You do"} not have a profile.'
            )
            return
        level, money, xp, note = self.bot.profiles[target.id].values()
        #await ctx.send(f'level: {level}, money: {money}, note: {note}, xp: {xp}')
        xptonext = (level**2) * 100 + 10
        progress = xp / xptonext * 100
        image = Image.new("RGBA", (768, 250), (0, 0, 0, 51))

        def drawavatar():
            try:
                pfp = Image.open(
                    requests.get(target.avatar_url.rstrip('?size=1024'),
                                 stream=True).raw)
                pfp = pfp.convert("RGBA")
                pfp = pfp.resize((128, 128))
                image.alpha_composite(pfp, dest=(8, 8))
            except Exception as e:
                raise e

        def drawxp():
            bar = Image.open("xpbar-empty.png").convert()
            image.alpha_composite(bar, dest=(8, 144))
            bar = image_tint(bar, colour.hsl2hex(
                ((level % 36) / 36, 0.8, 0.5)))
            bar = bar.crop((0, 0, round(752 * progress / 100), 104))
            image.alpha_composite(bar, dest=(8, 144))

        def drawtext(array):
            draw.text(xy=(array[0][0], array[0][1]),
                      text=array[1],
                      fill=array[2],
                      font=array[3])

        font = ImageFont.truetype("calibri.ttf", 32)
        level_font = ImageFont.truetype("calibri.ttf", 72)
        draw = ImageDraw.Draw(image)
        white = (255, 255, 255)
        black = (200, 200, 200)
        listie = [[(144, 8), "Profile for:", black, font],
                  [(398, 8), "Money:", black, font],
                  [(540, 8), "Level:", black, font],
                  [(144, 105), "XP:", black, font],
                  [(144, 58), str(target), white, font],
                  [(398, 58), "£" + str(money), white, font],
                  [(215, 105), f'({round(xp)}/{xptonext}, {progress:.2f}%)',
                   white, font], [(650, 8),
                                  str(level), white, level_font]]
        with TPE(max_workers=9) as executor:
            for thing in listie:
                executor.submit(drawtext, thing)
            executor.submit(drawxp)
        drawavatar()
        imgByteArr = io.BytesIO()
        image.save(imgByteArr, format="PNG")
        await ctx.send('"' + note + '"' if note else '',
                       file=discord.File(imgByteArr.getvalue(),
                                         filename="level.png"))
        """ def generatetext(*,level,money,note,xp):
    def fetch_day_events(date: str):
        try:
            day, events = SofaScore().parse_by_date(date)
            tournaments = events['sportItem']['tournaments']
        except Exception as e:
            print(e)
        for tr in tournaments:
            try:
                events = tr['events']
            except KeyError:
                print('events of tournament cannot be fetched')
                continue
            event_ids = list()
            for ev in events:
                try:
                    event_ids.append(ev['id'])
                    home = TeamHandler(date, ev['homeTeam'])
                    away = TeamHandler(date, ev['awayTeam'])
                except KeyError:
                    print('Key Error occured in adding teams to event')
                    continue
                except Exception as e:
                    print(e)
            event_id_gen = split_into(event_ids, cpu_count() * 5)
            with TPE() as worker_pool:
                event_getter = create_worker(SofaScore().parse_event)
                lineup_getter = create_worker(SofaScore().parse_lineups_event)

                event_info = [
                    x for x in worker_pool.map(event_getter, event_id_gen)
                ]
                lineups_info = [
                    x for x in worker_pool.map(lineup_getter, event_id_gen)
                ]

            stdout.write('all matches of {:40s} played on {} fetched\n'.format(
                tr['tournament']['uniqueName'], date))
            stdout.flush()
            player_ids = list()
            try:
                for ev, lineup in zip(event_info, lineups_info):
                    e_id, event = ev[0]
                    event_id, l = lineup[0]
                    home, away = l.values()
                    try:
                        home = [h['player'] for h in home['lineupsSorted']]
                        away = [a['player'] for a in away['lineupsSorted']]
                    except KeyError:
                        continue
                    EventHandler(date, event, (home, away))
                    players = home + away
                    for pl in players:
                        PlayerHandler(date, pl)
                        player_ids.append((event_id, pl['id']))
            except Exception as e:
                print(e)
                continue
            player_id_gen = split_into(player_ids, cpu_count() * 5)
            with TPE() as worker_pool:
                player_stats_getter = create_worker(
                    SofaScore().parse_player_stat)
                player_stats = worker_pool.map(player_stats_getter,
                                               player_id_gen)
            for s in player_stats:
                try:
                    e_id, p_id, stat = s[0]
                except Exception as e:
                    print(e)
                    continue
                PlayerHandler.add_event(p_id, e_id, stat)
        stdout.write('{:50s} {}\n'.format('date has finished:', date))
        stdout.flush()
Exemple #22
0
    with open(IR_TPC_R_CMAP_path,
              'rb') as cmap_file, open(IR_TPC_R_NORM_path, 'rb') as norm_file:
        IR_TPC_R_CMAP = pk.load(cmap_file, fix_imports=True, encoding='latin1')
        IR_TPC_R_NORM = pk.load(norm_file, fix_imports=True, encoding='latin1')

    # put in paths
    save_folder = 'images/2020/m6'
    read_from = 'ncfiles/2020/m6.txt'

    # Initialize processing class
    ABI_proc = ABI_Process(center_lat,
                           center_lon,
                           buffer,
                           save_directory=save_folder,
                           cmap=IR_TPC_R_CMAP,
                           norm=IR_TPC_R_NORM)

    file = open(read_from, 'r')
    lines = file.readlines()

    def process(line):
        fs = s3fs.S3FileSystem(anon=True)
        line = line.rstrip()
        fobj = fs.open(line)
        ds = xr.open_dataset(fobj, engine='h5netcdf')
        ABI_proc.channel_proc(ds)

    with TPE(max_workers=min(8, len(lines))) as pool:
        list(pool.map(process, lines))
def main():
    with TPE(max_workers=16) as exe:
        r = [r for r in exe.map(calc, list(range(1, 16)))]
    print(r)
Exemple #24
0
from funcs_for_test import make_3_dim_list
from concurrent.futures import ThreadPoolExecutor as TPE
from memory_profiler import memory_usage
mem = memory_usage(-1, interval=.2, timeout=1)
n = 200
t = 8

tpe = TPE(max_workers=4)

r = tpe.map(make_3_dim_list, [
    n,
] * t)
next(r)
print(mem)
Exemple #25
0
lines = file_read.split('\n')

namelist = []
seqlist = []


def add_list(item):
    if lines.index(item) % 2 == 0:
        name_s = item.split('\t')[0]
        cname = name_s.split('>')[1]
        namelist.append(cname)
    else:
        seqlist.append(item)


with TPE(16) as exec:
    exec.map(add_list, lines)

print(len(namelist), 'proteins')
oldt = time.time()
#print(seqlist)
seqlen = len(namelist)
q = np.zeros(seqlen)


#'''
def one2all(item):
    nameid = namelist.index(item)
    # print(nameid,seqlen)
    for i in range(nameid + 1, seqlen):
        #  print(i)
            result = is_table_have_same_set_of_headers(
                db_settings=db_settings,
                csvfile=csvfs[0],
                table=args.mysql_table,
                mysql_errors=args.mysql_errors)
            if result:
                print(
                    'Existed table have same set of fields as input CSV file.')
            else:
                sys.exit(
                    'Existed table have another set of fields as input CSV file.\n'
                    +
                    'Use --overwrite-mysql-table to recreate table with correct set of fields'
                )

        executor = TPE(max_workers=args.threads)
        for csvf in csvfs:
            executor.submit(loadcsv,
                            db_settings=db_settings,
                            csvfile=csvf,
                            table=args.mysql_table,
                            mysql_errors=args.mysql_errors)

        print('All archives was unpacked. CSV files put to the queues.')
        executor.shutdown()

    # Removing tar temp file
    print('Removing temp folder.')
    shutil.rmtree(args.temp_dir)

    if args.dry_run:
Exemple #27
0
def main(url_list):
    with TPE(max_workers=5) as executor:
        futures = [executor.submit(downloader, link) for link in url_list]
        for future in as_completed(futures):
            download_logger.info(
                f'Thread Closed for Attachment ID {future.result()}')
Exemple #28
0
    def _fetch_fifa_stat(self, df):
        # Make groups with player ids
        group_df = df.groupby('player_id', group_keys=False)

        def make_fetcher(q):
            def fetch(player_id, grp):
                #q = self._converter(name="Player ID: %s" % player_id)
                try:
                    #player_id = int(grp['player_id'].unique()[0])
                    grp.replace({pd.np.nan: None}, inplace=True)
                    try:
                        fifa_idx = int(grp['fifa_id'].unique()[0])
                    except Exception:
                        fifa_idx = None
                    birth = grp['birth'].tolist()[0]
                    player_name = grp['name'].unique()[0]
                    player_name_short = grp['short'].unique()[0]

                    logger.debug("For player ID: %s - Fetching FiFaStats" %
                                 player_id)
                    fifa_stats, fifa_idx = self._fifa_req.parse_fifa_stats(
                        birth=birth,
                        fifa_idx=fifa_idx,
                        name=player_name,
                        short=player_name_short)
                    logger.debug(
                        "For player ID: %s - [%s] FiFa stat found." %
                        (player_id,
                         len(fifa_stats) if fifa_stats is not None else 0))

                    if fifa_stats is not None:
                        player_matches_df = self._query_executor.get_all_match_for_player_id(
                            player_id)
                        if len(player_matches_df.index) > 0:
                            # Make sure the date's are in descending order
                            fifa_stats.sort(reverse=True, key=lambda x: x[0])
                            # List of stats and IDs
                            queue_list = []
                            # Initial first date
                            first_date = datetime.now().date()
                            for fifa_date, fifa_stat in fifa_stats:
                                filtered_df = player_matches_df[
                                    (player_matches_df['date'] < pd.Timestamp(
                                        first_date))
                                    & (player_matches_df['date'] >
                                       pd.Timestamp(fifa_date))]

                                match_ids = filtered_df['match_id'].tolist()
                                first_date = fifa_date
                                if len(match_ids) > 0:
                                    queue_list.append(
                                        (player_id, fifa_stat, match_ids))

                            for player_id, fifa_stat, match_ids in queue_list:
                                for match_id in match_ids:
                                    q.update_fifa_stat(player_id, match_id,
                                                       fifa_stat)
                    else:
                        q.update_has_fifa_stat(player_id, False)
                    # Update the fifa index also
                    if fifa_idx is not None:
                        q.update_fifa_id(player_id, fifa_idx)

                except Exception as err:
                    tb = traceback.format_exc()
                    logger.error(tb)
                #return q.get()

            return fetch

        q = self._converter(name="FifaStat fetcher")
        fnc = make_fetcher(q)
        if self._get_config('multithreading'):
            threads = self._get_config('num_of_threads')
            with TPE(max_workers=threads) as pool:
                pool.map(lambda x: fnc(x[0], x[1]), group_df)
        else:
            list(map(lambda x: fnc(x[0], x[1]), group_df))
        return q.get()