def update_score_odds (self, txt_trows,match_id): bet_ScoreLines = [] for score_odd in txt_trows: betscore1 = int(score_odd[0][0]) betscore2 = int(score_odd[0][1]) odd = float(score_odd[1]) bet_score_id = get_or_create_object_sosanh(self,'tsbd.betscore', {'betscore1':betscore1, 'betscore2':betscore2})[0].id bet_ScoreLine = get_or_create_object_sosanh(self,'tsbd.betscoreline', {'betscore_id':bet_score_id, 'match_id':match_id}, {'odd':odd}) bet_ScoreLines.append(bet_ScoreLine) return bet_ScoreLines
def get_team_and_date(self, match_link, add_update_dict, is_set_must_get_time = True): soup = None html = None if is_set_must_get_time: check_time_for_get_soup = 'time' not in add_update_dict else: check_time_for_get_soup = 'time' not in add_update_dict and 'date' not in add_update_dict is_get_soup = any(['home' not in add_update_dict, 'away' not in add_update_dict, check_time_for_get_soup]) if is_get_soup: html = request_html(match_link) soup = BeautifulSoup(html, 'html.parser') # soup = get_soup(match_link) if 'home' not in add_update_dict: home = soup.select('div#scr_home a')[0].get_text() home = re.sub('\s+\[\d+\]', '', home) else: home = add_update_dict['home'] if 'away' not in add_update_dict: away = soup.select('div#scr_away a')[0].get_text() away = re.sub('\[\d+\]\s+', '', away) else: away = add_update_dict['away'] home = home.strip() away = away.strip() if 'time' in add_update_dict: begin_time = add_update_dict['time'] dtime = datetime.strptime(begin_time,'%d/%m/%Y %H:%M') - timedelta(hours=7) str_time = fields.Datetime.to_string(dtime) match_date = dtime.date() str_date = fields.Date.to_string(dtime) else: if is_set_must_get_time: begin_time = soup.select('div#scr_start')[0].get_text() begin_time = begin_time[9:] dtime = datetime.strptime(begin_time,'%d/%m/%Y %H:%M') - timedelta(hours=7) str_time = fields.Datetime.to_string(dtime) match_date = dtime.date() str_date = fields.Date.to_string(match_date) else: match_date = datetime.strptime(add_update_dict['date'],'%d/%m/%Y') str_date = fields.Date.to_string(match_date) str_time = None team1_id = get_or_create_object_sosanh(self,'tsbd.team',{'name':home}) team2_id = get_or_create_object_sosanh(self,'tsbd.team',{'name':away}) team_dict = {'team1': team1_id.id, 'team2': team2_id.id, # 'time':str_time, 'date':str_date, } return team_dict, match_date, str_time,home,away, soup, html
def map_predict_id (self): for r in self.ndline_ids: if 'bongdanet' in r.link: name_site ='bdnet' elif 'aegoal' in r.link: name_site ='aegoal' else: name_site = 'bdp.com' if r.match_id and r.state =='tu_dong': site_id = get_or_create_object_sosanh(self, 'tsbd.site', {'name': name_site}).id predict_id = get_or_create_object_sosanh(self, 'tsbd.predict', {'link':r.link, 'match_id':r.match_id.id}, {'site_id':site_id,'predict_score1':r.score1,'predict_score2':r.score2, 'state':r.state}) r.predict_id = predict_id
def get_events(fix_id,self, home,away, event_soup= None): if not event_soup: soup = get_soup_of_events(fix_id, home,away) rs = soup.select('div.fixture_casting table tr') events =[] for tr in rs: score_soup = tr.select('td:nth-of-type(3)') minute_soup = tr.select('td:nth-of-type(1)') # score_txt = score[0].get_text() if score_soup: minute_str = minute_soup[0].get_text() minutes= minute_str.split('+') minute_str = minutes[0].replace("'",'') try: minute = int(minute_str) if len(minutes) ==2: adding_minute_str = minutes[1].replace("'",'') adding_time = int(adding_minute_str) else: adding_time = False except: adding_time = False pass str_score = score_soup[0].get_text() if str_score: des = tr.select('td:nth-of-type(4)')[0].get_text() str_scores = str_score.split(':') score1 = int(str_scores[0]) score2 = int(str_scores[1]) update_dict = {'des':des,'current_time': minute,'adding_time':adding_time} print ('update_dict', update_dict) score_event = get_or_create_object_sosanh(self,'tsbd.event', {'event':'goal','match_id':self.id, 'score1':score1,'score2':score2},update_dict).id events.append(score_event) event_soup = tr.select('td:nth-of-type(2) img') if event_soup: event_soup =event_soup[0] src = event_soup.get('src',None) if src == 'img/45.gif': des = tr.select('td:nth-of-type(4)')[0].get_text() rs = re.search('(\d+):(\d+)', des) score1,score2 =int(rs.group(1)), int(rs.group(2)) update_dict = {'des':des,'score1':score1, 'score2':score2, 'current_time': minute,'adding_time':adding_time} print ('update_dict',update_dict) haftime_event = get_or_create_object_sosanh(self,'tsbd.event', {'event':'h1_finish','match_id':self.id},update_dict).id events.append(haftime_event) return events
def nhan_dinh_a_match_aegoal(self,*arg,**karg): link = karg.get('link') if not link: print ('Not link***********') file = open('/media/sf_C_DRIVE/D4/dl/testfile_link1.html','r') html = file.read() soup = BeautifulSoup(html, 'html.parser') else: atuple = karg.get('atuple') link = atuple[0] team_1_2_date = atuple[2] team1_2 = team_1_2_date[0] dt = team_1_2_date[1] ngay = dt.date() dt = fields.Datetime.to_string(dt) print ('Co link************') rs = request_html(link) soup = BeautifulSoup(rs, 'html.parser') rs = soup.select('div.box-text-detail')[0].get_text() ti_so = du_doan_ti_so(rs) # raise UserError(ti_so) update_dict = {'nd_id':self.id} if ti_so: update_dict_more = { 'score1':ti_so[0],'score2':ti_so[1], 'state':'tu_dong'} else: update_dict_more = {'state':'can_read_du_doan'} update_dict.update(update_dict_more) ndlline = get_or_create_object_sosanh(self,'tsbd.ndlline', {'link':link, 'ngay':ngay,'ngay_gio':dt,'team1':team1_2[0],'team2':team1_2[1]}, update_dict) return ti_so
def leeching_a_match_function(self, match_link, is_write=True, is_get_thong_ke=True, add_update_dict={}): # search_dict = {} # search_list_fields = [] team_and_begintime, match_date, str_time = get_team_date( self, match_link, add_update_dict) search_dict = team_and_begintime # if is_write == False: # match_id = get_or_create_object_sosanh(self,'tsbd.match', search_dict, is_create=False, is_write =False) # if not match_id: # update_dict, txt_trows = self.get_update_dict (match_link,match_date, is_get_thong_ke = is_get_thong_ke, add_update_dict = add_update_dict,str_time=str_time) # # raise UserError(u'%s'%search_dict) # match_id = get_or_create_object_sosanh(self,'tsbd.match', search_dict, update_dict ) # else: update_dict, txt_trows = get_update_dict( self, match_link, match_date, is_get_thong_ke=is_get_thong_ke, add_update_dict=add_update_dict, str_time=str_time) match_id = get_or_create_object_sosanh(self, 'tsbd.match', search_dict, update_dict) bet_ScoreLines = update_score_odds(self, txt_trows, match_id.id) self.log = bet_ScoreLines return match_id.id
def leeching_a_match_function(self, match_link, IS_GET_STATISTICS_MATCH=False, add_update_dict={}, is_get_events=False): fix_id = get_fix_id(match_link) team_and_begintime, match_date, str_time, home, away, match_soup, html = get_team_and_date( self, match_link, add_update_dict) search_dict = team_and_begintime thong_ke_dict = self.get_tk(fix_id, home, away) update_dict, score_odd_lst_strows = get_update_dict( self, match_link, match_date, IS_GET_STATISTICS_MATCH=IS_GET_STATISTICS_MATCH, add_update_dict=add_update_dict, str_time=str_time, match_soup=match_soup, thong_ke_dict=thong_ke_dict) match_id = get_or_create_object_sosanh(self, 'tsbd.match', search_dict, update_dict) lineup_dict = self.gen_lineup_new(match_link, search_dict, match_id.id) match_id.write(lineup_dict) if is_get_events: events = get_events(fix_id, match_id, home, away) match_id.write({'event_ids': [(6, 0, events)]}) bet_ScoreLines = update_score_odds(self, score_odd_lst_strows, match_id.id) return match_id.id
def get_team_date(self, match_link, add_update_dict): if any(['home' not in add_update_dict, 'away' not in add_update_dict, ('time' not in add_update_dict and 'date' not in add_update_dict)]): if match_link == False: raise UserError('not match_link but add_update_dict %s'%add_update_dict) soup = get_soup(match_link) else: pass if 'home' not in add_update_dict: home = soup.select('div#scr_home a')[0].get_text() home = re.sub('\s+\[\d+\]', '', home) else: home = add_update_dict['home'] if 'away' not in add_update_dict: away = soup.select('div#scr_away a')[0].get_text() away = re.sub('\[\d+\]\s+', '', away) else: away = add_update_dict['away'] if 'time' not in add_update_dict and 'date' not in add_update_dict: begin_time = soup.select('div#scr_start')[0].get_text() begin_time = begin_time[9:] dtime = datetime.datetime.strptime(begin_time,'%d/%m/%Y %H:%M') - timedelta(hours=7) str_time = fields.Datetime.to_string(dtime) match_date = dtime.date() str_date = fields.Date.to_string(match_date) else: if 'time' in add_update_dict: begin_time = add_update_dict['time'] dtime = datetime.datetime.strptime(begin_time,'%d/%m/%Y %H:%M') - timedelta(hours=7) str_time = fields.Datetime.to_string(dtime) match_date = dtime.date() str_date = fields.Date.to_string(dtime) else: match_date = datetime.datetime.strptime(add_update_dict['date'],'%d/%m/%Y') str_date = fields.Date.to_string(match_date) str_time = None team1_id = get_or_create_object_sosanh(self,'tsbd.team',{'name':home}) team2_id = get_or_create_object_sosanh(self,'tsbd.team',{'name':away}) team_dict = {'team1': team1_id.id, 'team2': team2_id.id, # 'time':str_time, 'date':str_date, } return team_dict, match_date, str_time
def bxh_function(self, int_round): if u'ảng' not in self.name: cate_id = 'cate_id' else: cate_id = 'bang_id' domain = [(cate_id, '=', self.id), ('state', '!=', u'Chưa bắt đầu')] if int_round: domain.append(('round', '<=', int_round)) match_ids = self.env['tsbd.match'].search(domain) # raise UserError(u'%s--%s'%(len(match_ids),domain)) home_teams = match_ids.mapped('team1.id') away_teams = match_ids.mapped('team2.id') cate_teams = home_teams + away_teams cate_teams = set(cate_teams) bxh_dict = self.gen_bxh_dict(cate_teams, domain, int_round) # self.add_diem_doi_dau_new(cate_id, int_round, bxh_dict) for team, ateam_bxh_dict in bxh_dict.items(): ateam_bxh_dict['cate_id'] = self.id get_or_create_object_sosanh(self, 'tsbd.bxh', { 'team_id': team, 'cate_id': self.id, 'round': int_round, }, ateam_bxh_dict, is_must_update=True) bxh_ids = self.env['tsbd.bxh'].search( [('cate_id', '=', self.id), ('round', '=', int_round)], order='diem desc, hsbt desc, score_sum desc') for stt, r in enumerate(bxh_ids): r.stt = stt + 1 for stt, r in enumerate( bxh_ids.sorted(key=lambda r: r.bet_over, reverse=True)): r.stt_bet_over = stt + 1 bxh_ids = self.env['tsbd.bxh'].search([('cate_id', '=', self.id), ('round', '=', int_round)], order='bet_diem desc') for stt, r in enumerate(bxh_ids): r.bet_stt = stt + 1
def bxh(self): if u'ảng' not in self.name: cate_id = 'cate_id' else: cate_id = 'bang_id' domain = [(cate_id,'=', self.id),('state','!=', u'Chưa bắt đầu')] match_ids = self.env['tsbd.match'].search(domain) home_teams = match_ids.mapped('team1.id') away_teams = match_ids.mapped('team2.id') cate_teams = home_teams + away_teams bxh_dict = self.gen_bxh_dict(cate_teams, domain) for team,ateam_bxh_dict in bxh_dict.items(): new_ateam = {} new_ateam['home_t'] =ateam_bxh_dict['home_t'] new_ateam['away_t'] = ateam_bxh_dict['away_t'] new_ateam['home_h'] = ateam_bxh_dict['home_h'] new_ateam['away_h'] = ateam_bxh_dict['away_h'] new_ateam['home_tg'] = ateam_bxh_dict['home_tg'] new_ateam['home_th'] = ateam_bxh_dict['home_th'] new_ateam['away_tg'] = ateam_bxh_dict['away_tg'] new_ateam['away_th'] = ateam_bxh_dict['away_th'] new_ateam['home_match_number'] = ateam_bxh_dict['home_match_number'] new_ateam['away_match_number'] = ateam_bxh_dict['away_match_number'] new_ateam['cate_id'] = self.id get_or_create_object_sosanh(self,'tsbd.bxh', {'team_id':team, 'cate_id':self.id }, new_ateam, is_must_update = True) rg_rs = self.env['tsbd.bxh'].read_group([('cate_id','=', self.id)],['team_id','diem'],['diem'],lazy=False) rg_rs = list(filter(lambda i: i['__count']>1,rg_rs)) for ateam_rg in rg_rs: diem = ateam_rg['diem'] team_ids = self.env['tsbd.bxh'].search([('diem','=', diem),('cate_id','=',self.id)]).mapped('team_id.id') domain = [(cate_id,'=', self.id),('state','!=', u'Chưa bắt đầu'),('team1','in',team_ids),('team2','in',team_ids)] doi_dau_bxh_dict = self.gen_bxh_dict(team_ids, domain) for team, ex_team_bxh_dict in doi_dau_bxh_dict.items(): ex_search_dict = {'team_id':team, 'cate_id':self.id} ex_update_dict = {'diem_dd':ex_team_bxh_dict['diem']} get_or_create_object_sosanh(self,'tsbd.bxh', ex_search_dict, ex_update_dict, is_must_update = True) bxh_ids = self.env['tsbd.bxh'].search([('cate_id','=',self.id)],order='diem desc, diem_dd desc') for stt,r in enumerate(bxh_ids) : r.stt = stt +1
def get_update_dict(self,match_link,match_date, is_get_thong_ke = True,add_update_dict = {},str_time=None): update_dict = {} fix_id = get_fix_id(match_link) # cate = add_update_dict['cate'] cate_id = False if 'cate' in add_update_dict : cate_id = get_or_create_object_sosanh(self,'tsbd.cate', {'name':add_update_dict['cate']}).id else: if match_link: cate_ex = re.search(r'\?(.*$)',match_link).group(1) cate_id = get_cate(self, fix_id,cate_ex, match_date) # update_dict.update(cate_dict ) if cate_id: update_dict['cate_id'] = cate_id if match_link: soup = get_soup(match_link) grp_rnd_info = soup.select('div#grp_rnd_info') if grp_rnd_info: grp_rnd_info= grp_rnd_info[0].get_text() if u'ảng' in grp_rnd_info: update_dict['bang_id'] = get_or_create_object_sosanh(self,'tsbd.cate', {'name':grp_rnd_info, 'cate_id':cate_id}).id score_and_status_dict = get_score(fix_id, add_update_dict=add_update_dict) update_dict.update(score_and_status_dict) score_odd_lst_strows = [] if match_link: odds_adict, score_odd_lst_strows = get_odds(fix_id) update_dict.update(odds_adict) if 'period' in add_update_dict: update_dict['period_id'] = get_or_create_object_sosanh(self,'tsbd.period', {'name':add_update_dict['period']}).id update_dict['match_link'] = match_link or add_update_dict.get('match_link') if str_time: update_dict['time'] = str_time if is_get_thong_ke and match_link: match_link = re.sub('&Data=(.*?)$','',match_link) statictis_link =match_link +'&Data=stat' statictis_match_ids = self.leech_all_match_function(statictis_link, is_write = False, break_count=6, is_get_thong_ke = False, take_match_not_link=False) statictis_match_dict = {'statictis_match_ids':[(6,0,statictis_match_ids)]} update_dict.update(statictis_match_dict) # print ('***update_dict',update_dict) return update_dict, score_odd_lst_strows
def get_update_dict(self, match_link, match_date, IS_GET_STATISTICS_MATCH = False, add_update_dict = {},str_time=None, match_soup = None, thong_ke_dict = {}, ): update_dict = {} fix_id = get_fix_id(match_link) # cate = add_update_dict['cate'] cate_id = False if 'cate' in add_update_dict : cate_id = get_or_create_object_sosanh(self,'tsbd.cate', {'name':add_update_dict['cate']}).id else: league_season = re.search(r'\?(.*$)',match_link).group(1) cate_id = get_cate(self, fix_id, league_season, match_date) if cate_id: update_dict['cate_id'] = cate_id if not match_soup: match_soup = get_soup(match_link) bang_soup = match_soup.select('div#grp_rnd_info') if bang_soup: bang_name= bang_soup[0].get_text().replace(':','') if u'ảng' in bang_name: update_dict['bang_id'] = get_or_create_object_sosanh(self,'tsbd.cate', {'name':bang_name, 'cate_id':cate_id}).id score_and_status_dict = get_score(fix_id, add_update_dict=add_update_dict) update_dict.update(score_and_status_dict) odds_adict, score_odd_lst_strows = get_odds(fix_id) update_dict.update(odds_adict) if 'period' in add_update_dict: update_dict['period_id'] = get_or_create_object_sosanh(self,'tsbd.period', {'name':add_update_dict['period']}).id update_dict['match_link'] = match_link or add_update_dict.get('match_link') if str_time: update_dict['time'] = str_time if IS_GET_STATISTICS_MATCH: match_link = re.sub('&Data=(.*?)$','',match_link) statictis_link =match_link +'&Data=stat' statictis_match_ids = self.leech_all_match_function(statictis_link, is_write = False, break_count=6, IS_GET_STATISTICS_MATCH = False) statictis_match_dict = {'statictis_match_ids':[(6,0,statictis_match_ids)]} update_dict.update(statictis_match_dict) update_dict.update(thong_ke_dict) return update_dict, score_odd_lst_strows
def get_cate(self,fix_id, league_season, match_date, soup = None): if not soup: template_link = u'http://bongdaso.com/_OtherMatches.aspx?FixtureID=%s&{}&CountryRegionID=-1&PlayingDate={}'.format(league_season, match_date.strftime('%d-%m-%Y')) soup = get_soup_ajax_link(fix_id,template_link) try: name = soup.select('table tr.ls')[0].get_text() name = name.strip() except: name = False if name: cate_id = get_or_create_object_sosanh(self,'tsbd.cate', {'name':name}) cate_id = cate_id.id else: cate_id = False return cate_id
def nhan_dinh_a_match_bondaplus(self,*arg,**karg): link = karg.get('link') if not link: print ('Not link***********') file = open('/media/sf_C_DRIVE/D4/dl/testfile_link1.html','r') html = file.read() soup = BeautifulSoup(html, 'html.parser') else: print ('Co link************') rs = request_html(link) soup = BeautifulSoup(rs, 'html.parser') s = soup.select('h1.tit') str = s[0].get_text() print ('title **', str) rs = re.search(r'Nhận định bóng đá (.+?) vs (.+?),', str) if not rs: rs = re.search(r'Nhận định bóng đá (.+?) và (.+?),', str) if not rs: rs = re.search(r'Nhận định bóng đá.*?: (.+?) vs (.+?)$', str) if not rs: rs = re.search(r'Nhận định bóng đá.*?: (.+?) và (.+?)$', str) team1= rs.group(1).strip() team2= rs.group(2).strip() rs_search = re.search('(\d+)h(\d*).*?ngày\s+(\d+)/(\d+)', str) # gio= rs.group(1).strip() # rs_ngay = re.search('ngày\s+(\d+)/(\d+)', str) # ngay= rs.group(1).strip() rs = (rs_search.group(1), rs_search.group(2),rs_search.group(3),rs_search.group(4)) rs = list(map(lambda i:int_a_minute(i), rs)) dt = datetime(year=datetime.now().year, month= rs[3], day= rs[2], hour= rs[0], minute = rs[1]) - timedelta(hours=7) # dt = lay_du_doan_ngay_gio(gio,ngay) dt = dt - timedelta(hours = 7) ngay =dt.date() dt = fields.Datetime.to_string(dt) update_dict = {'ngay':ngay,'ngay_gio':dt,'nd_id':self.id} try: score1,score2 = self.du_doan(soup) update_dict.update({ 'score1':score1,'score2':score2, 'state':'tu_dong'}) except FETCHERROR: update_dict.update({ 'state':'can_read_du_doan'}) ndlline = get_or_create_object_sosanh(self,'tsbd.ndlline', {'link':link,'team1':team1,'team2':team2}, update_dict)
def get_cate(self,fix_id, cate_ex, match_date, soup = None): if not soup: # template_link = u'http://bongdaso.com/_OtherMatches.aspx?FixtureID=%s&LeagueID=1&SeasonID=106&CountryRegionID=-1&PlayingDate=02-02-2019&Data=casting' template_link = u'http://bongdaso.com/_OtherMatches.aspx?FixtureID=%s&{}&CountryRegionID=-1&PlayingDate={}'.format(cate_ex, match_date.strftime('%d-%m-%Y')) # raise UserError(template_link) # print ('***template_link',template_link) soup = get_soup_ajax_link(fix_id,template_link) try: name = soup.select('table tr.ls')[0].get_text() name = name.split() except: name = False if name: cate_id = get_or_create_object_sosanh(self,'tsbd.cate', {'name':name}) cate_id = cate_id.id else: cate_id = False return cate_id
def link_oc_(self): if self.link: rs = re.search('//(.+?)/',self.link) site_name = rs.group(1).replace('www.','') site_id = get_or_create_object_sosanh (self,'tsbd.site',{'name':site_name}) self.site_id = site_id
def gen_lineup_new(self, match_link, search_dict, match_id): match_link = match_link.replace('Data=Odds', 'Data=lineup').replace( 'Data=Casting', 'Data=lineup') if 'Data=lineup' not in match_link: match_link = match_link + '&Data=lineup' html = request_html(match_link) lineup_dict = {} playerlines = [] for patern in [ ("'_HomeLineup_','(.*?)'", search_dict['team1'], 'home'), ("'_AwayLineup_','(.*?)'", search_dict['team2'], 'away') ]: rs = re.search(patern[0], html) rs = 'http://bongdaso.com/' + rs.group(1) rs = request_html(rs) soup = BeautifulSoup(rs, 'html.parser') rs = soup.select('div.squad_table table tr') da_chinhs = [] da_phus = [] alist = da_chinhs for count, tr in enumerate(rs): if count != 0: if tr.get('class') == ['fixture_separator']: alist = da_phus continue gt = tr.get_text() number = tr.select('td:nth-of-type(1)')[0].get_text() try: number = int(number) except: number = False print('tr**', tr) # player_name_tr = tr.select('td:nth-of-type(2) div')[0] player_name_tr = tr.select('td:nth-of-type(2)')[0] name = player_name_tr.get_text() if number: adict_search = {'number': int(number), 'name': name} else: adict_search = {'name': name} adict_update = {} player_id = player_name_tr.get('id') if player_id: player_id = player_id.replace('player_', 'player_tip_') player_id_soup = soup.select('div#%s' % player_id)[0] image_soup = player_id_soup.select( 'div.boxBody > table > tr:nth-of-type(1) > td:nth-of-type(1) img' ) #[0].get_text() if image_soup: image_soup = image_soup[0] image_link = image_soup['src'] image_link = image_link.replace('&', '&') image_link = 'http://bongdaso.com/' + image_link else: image_link = False trs = player_id_soup.select( 'div.boxBody > table > tr:nth-of-type(1) > td:nth-of-type(2) tr' ) #[0].get_text() if image_link: adict_update['image_link'] = image_link for count, tr in enumerate(trs): if count == 0: continue if count == 1: td2 = tr.select( 'td:nth-of-type(2)')[0].get_text() dt = datetime.strptime(td2, '%d/%m/%Y') adict_update[ 'birthday'] = fields.Date.to_string(dt) alist.append((adict_search, adict_update)) for da_chinh_or_du_bi in [(da_chinhs, 'da_chinh'), (da_phus, 'du_bi')]: players = map( lambda i: get_or_create_object_sosanh( self, 'tsbd.player', i[0], i[1]).id, da_chinh_or_du_bi[0]) a_playerlines = map( lambda i: get_or_create_object_sosanh( self, 'tsbd.playerline', { 'player_id': i, 'team_id': patern[1], 'home_or_away': patern[2], 'da_chinh_hay_du_bi': da_chinh_or_du_bi[1], 'match_id': match_id }).id, players) playerlines += list(a_playerlines) lineup_dict['playerline_ids'] = [(6, 0, playerlines)] return lineup_dict