Ejemplo n.º 1
0
def cek_norek(bank: str, norek: str):
    if (len(bank) == 0) or (len(norek) == 0):
        return {'err': True, 'message': 'one of the parameter cannot be empty', 'date': datetime.now()}

    uri = api_cek_nomor_rekening
    payload = {'bankcode': bank, 'acc': norek}

    result = requests.post(uri, data=payload).json()

    return format_data(result, norek, bank)
Ejemplo n.º 2
0
def update_game_logs(url):
    """
	Update game logs of a certain player. 
	For prototype let's use Isaiah Thomas as an example. 
	"""
    # Check date to make sure because we're only getting
    # the lateest game.
    today_date = date.today()
    try:
        html_file = urlopen(url)
    except:
        print "Game log url does not exist. Skipping this player.", url
        return None
    soup = BeautifulSoup(html_file)
    player_name = soup.find_all("h1")[0].text
    team = helper.get_team(soup)
    # The 6th (counting from 1) tr element in the html texts
    # is the latest_game.
    latest_game = soup.find_all("tr")[5]
    raw_texts = latest_game.find_all("td")
    game_date_str = helper.get_date(str(raw_texts[0]))
    year, month, day = game_date_str.split("-")
    ONE_DAY = timedelta(1)
    # Transforming the scraped date into an date object.
    game_date = date(int(year), int(month), int(day))
    # On account of me being in China which is a day ahead of US time.
    if today_date - game_date == ONE_DAY:
        opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts = helper.format_data(
            raw_texts)
        log = Log(player_name, game_date, team, opponent, minutes, fgm, fga,
                  fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk, stl, foul,
                  turnover, pts)
        print game_date, player_name, team
        return log
Ejemplo n.º 3
0
def get_previous_game_logs(url):
    """
	Get previous game logs of a certain player. 
	"""
    existing_names = helper.check_existing_csv()
    try:
        html_file = urlopen(url)
    except:
        print "Game log url does not exist. Skipping this player."
        return None
    soup = BeautifulSoup(html_file)
    player_name = soup.find_all("h1")[0].text
    if player_name in existing_names:
        print "Skipping players that were scraped."
        return None
    games = soup.find_all("tr")
    team = helper.get_team(soup)
    logs = []
    for game in games:
        raw_texts = game.find_all("td")
        if helper.validate_texts(str(raw_texts[0])):
            game_date = helper.get_date(str(raw_texts[0]))
            opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts = helper.format_data(
                raw_texts)
            log = Log(player_name, game_date, team, opponent, minutes, fgm,
                      fga, fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk,
                      stl, foul, turnover, pts)
            print game_date, player_name, team
            logs.append(log)
    return logs
Ejemplo n.º 4
0
def update_game_logs(url):
	"""
	Update game logs of a certain player. 
	For prototype let's use Isaiah Thomas as an example. 
	"""
	# Check date to make sure because we're only getting 
	# the lateest game. 
	today_date = date.today()
	try:
		html_file = urlopen(url)
	except: 
		print "Game log url does not exist. Skipping this player.", url
		return None
	soup = BeautifulSoup(html_file)
	player_name = soup.find_all("h1")[0].text
	team = helper.get_team(soup)
	# The 6th (counting from 1) tr element in the html texts 
	# is the latest_game.
	latest_game = soup.find_all("tr")[5]
	raw_texts = latest_game.find_all("td")
	game_date_str = helper.get_date(str(raw_texts[0]))
	year, month, day = game_date_str.split("-")
	ONE_DAY = timedelta(1)
	# Transforming the scraped date into an date object. 
	game_date = date(int(year), int(month), int(day))
	# On account of me being in China which is a day ahead of US time.
	if today_date - game_date == ONE_DAY:
		opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts = helper.format_data(raw_texts)
		log = Log(player_name, game_date, team, opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, 
						ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts)
		print game_date, player_name, team
		return log 
Ejemplo n.º 5
0
def get_previous_game_logs(url):
	"""
	Get previous game logs of a certain player. 
	"""
	existing_names = helper.check_existing_csv()
	try:
		html_file = urlopen(url)
	except: 
		print "Game log url does not exist. Skipping this player."
		return None
	soup = BeautifulSoup(html_file)
	player_name = soup.find_all("h1")[0].text
	if player_name in existing_names:
		print "Skipping players that were scraped."
		return None
	games = soup.find_all("tr")
	team = helper.get_team(soup)
	logs = []
	for game in games:
		raw_texts = game.find_all("td")
		if helper.validate_texts(str(raw_texts[0])):
			game_date = helper.get_date(str(raw_texts[0])) 
			opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts = helper.format_data(raw_texts)
			log = Log(player_name, game_date, team, opponent, minutes, fgm, fga, fgp, tpm, tpa, tpp, 
							ftm, fta, ftp, reb, ast, blk, stl, foul, turnover, pts)
			print game_date, player_name, team
			logs.append(log)
	return logs