Esempio n. 1
0
def scraped_insert(db_client, l_race_key):
    url = scraping.Scraping.url_program
    scraping_client = scraping.Scraping(url, [l_race_key[0]], [l_race_key[1]],
                                        [l_race_key[2]])
    scraping_client.get_raceinfo_by_url_program()
    # スクレイピング結果が格納されていなかった場合、エラー
    if len(scraping_client.out_l_header) == 0:
        return False
    else:
        # ヘッダー情報をDBへINSERT
        l_param = l_race_key + scraping_client.out_l_header
        db_client.execute_insert(sql.Sql.insert_W_RACE_HEAD, l_param)
        # レース情報をDBへINSERT
        for i in range(len(scraping_client.out_l_racer)):
            l_param = l_race_key + [
                i + 1, scraping_client.out_l_racer[i],
                scraping_client.out_l_represent[i],
                scraping_client.out_l_hande[i],
                scraping_client.out_l_trialrun[i],
                scraping_client.out_l_deviation[i],
                scraping_client.out_l_position_x[i],
                scraping_client.out_l_position_y[i]
            ]
            db_client.execute_insert(sql.Sql.insert_W_RACE_INFO, l_param)
        return True
Esempio n. 2
0
def scraped_insert_result(db_client, l_race_key):
    url = scraping.Scraping.url_result
    scraping_client = scraping.Scraping(url, [l_race_key[0]], [l_race_key[1]],
                                        [l_race_key[2]])
    scraping_client.get_raceresult_by_url_result()
    # スクレイピング結果が格納されていなかった場合、エラー
    if len(scraping_client.out_l_rank) == 0:
        return scraping_client.out_err_msg
    else:
        try:
            # 払戻額をDBへINSERT(着順は7車制を考慮)
            l_param = l_race_key + scraping_client.out_l_payoff + (
                scraping_client.out_l_car_no + [None])[:8]
            db_client.execute_insert(sql.Sql.insert_T_RACE_PAYOFF, l_param)
            # レース結果をDBへINSERT
            for i in range(len(scraping_client.out_l_car_no)):
                l_param = l_race_key + [
                    scraping_client.out_l_car_no[i],
                    scraping_client.out_l_racetime[i],
                    scraping_client.out_l_starttime[i],
                    scraping_client.out_l_rank[i]
                ]
                db_client.execute_insert(sql.Sql.insert_T_RACE_RESULT, l_param)
            return ""
        except Exception as e:
            print(str(e))
            return str(e)
Esempio n. 3
0
def scraped_insert(db_client, dated):
    url = scraping.Scraping.url_program
    scraping_client = scraping.Scraping(url, [dated], ['isesaki','kawaguchi','hamamatsu','sanyou','iizuka'], ['1'])
    scraping_client.get_place_by_url_program()
    for place in scraping_client.out_list_place:
        for i in range(12):
            l_param = [dated, place, i+1, 0, None]
            db_client.execute_insert(sql.Sql.insert_T_MONTHLY_LOG, l_param)
Esempio n. 4
0
def scraped_insert(db_client, dated):
    url = scraping.Scraping.url_program
    scraping_client = scraping.Scraping(
        url, [dated],
        ['isesaki', 'kawaguchi', 'hamamatsu', 'sanyou', 'iizuka'], ['1'])
    scraping_client.get_place_by_url_program()
    # スクレイピング結果が格納されていなかった場合、エラー
    if len(scraping_client.out_list_place) == 0:
        return False
    else:
        # レース場をDBへINSERT
        for place in scraping_client.out_list_place:
            l_param = [dated, place]
            db_client.execute_insert(sql.Sql.insert_W_PLACE, l_param)
        return True
def lambda_handler(event, context):
    try:
        # パラム取得
        # dated = event["queryStringParameters"]["dated"]
        place = event["queryStringParameters"]["place"]
        round = event["queryStringParameters"]["round"]

        # 今日を取得(日付はLambdaで取得するよう統一)
        today = datetime.datetime.today()
        dated = today.strftime("%Y-%m-%d")

        # スクレイピング
        url = scraping.Scraping.url_program
        scraping_client = scraping.Scraping(url, [dated], [place], [round])
        scraping_client.get_raceinfo_by_url_program()
        # スクレイピング結果が格納されていなかった場合、エラー
        if len(scraping_client.out_l_header) == 0:
            err_msg = message.Message.ios_err01
            raise Exception(err_msg)

        else:
            # エラーチェック
            if scraping_client.out_l_header[7] != "8車制":
                err_msg = message.Message.ios_err02
                raise Exception(err_msg)

            elif scraping_client.out_l_trialrun[0] == None:
                err_msg = message.Message.ios_err03
                raise Exception(err_msg)

            elif scraping_client.out_l_header[6] != "良走路":
                err_msg = message.Message.ios_err04
                raise Exception(err_msg)

            else:
                # 分析開始
                if int(max(scraping_client.out_l_position_x)) > 4:
                    anaylize_client = anaylize.Sklearn("RandomForestRegressor")
                else:
                    anaylize_client = anaylize.Sklearn("LinearRegression")

                l_d_result = []
                train_count = 0

                for i in range(len(scraping_client.out_l_racer)):

                    # S3から訓練データを準備
                    l_d_train_data = []
                    s3_client = awsmanagement.S3()
                    for d_train_data in csv.DictReader(
                            s3_client.get_file(
                                scraping_client.out_l_racer[i] + ".csv",
                                awsmanagement.S3.s3_bucket_data_train)):
                        l_d_train_data.append(d_train_data)
                    # 訓練データの件数取得
                    train_count += len(l_d_train_data)

                    # テストデータを準備
                    l_d_test_date = [{
                        "車番":
                        i + 1,
                        "ハンデ":
                        scraping_client.out_l_hande[i],
                        "試走タイム":
                        scraping_client.out_l_trialrun[i],
                        "試走偏差":
                        scraping_client.out_l_deviation[i],
                        "横ポジション":
                        scraping_client.out_l_position_x[i],
                        "縦ポジション":
                        scraping_client.out_l_position_y[i],
                        "気温":
                        scraping_client.out_l_header[3],
                        "湿度":
                        scraping_client.out_l_header[4],
                        "走路温度":
                        scraping_client.out_l_header[5]
                    }]

                    # 予測実行(訓練データはs3のcsvファイル)
                    result = anaylize_client.execute_anaylize(
                        l_d_train_data, l_d_test_date, "競争タイム",
                        ["試走タイム", "横ポジション", "縦ポジション", "走路温度"])[0]

                    # 予測値整形
                    # 競争タイムから、ヨーイドンからゴールまでのタイムを計算
                    # 競争タイム ÷ 100 × (レース距離 + ハンデ)
                    result2 = result / 100 * (
                        int(scraping_client.out_l_header[1]) +
                        int(scraping_client.out_l_hande[i]))
                    # 着順を決める用に、辞書型でリストに格納
                    l_d_result.append({
                        "車番": str(i + 1),
                        "選手名": scraping_client.out_l_racer[i],
                        "競争タイム": str(result),
                        "並替用": result2
                    })

                # ソートして着順に車番を取得する
                l_d_result.sort(key=lambda x: x['並替用'])

                # S3にcsvファイル出力
                csv_value = ""
                csv_value = csv_value + "訓練レース数" + ","
                csv_value = csv_value + "車番" + ","
                csv_value = csv_value + "選手名" + ","
                csv_value = csv_value + "競争タイム" + "\n"
                for i in range(len(l_d_result)):
                    csv_value = csv_value + str(train_count) + ","
                    csv_value = csv_value + str(l_d_result[i]["車番"]) + ","
                    csv_value = csv_value + l_d_result[i]["選手名"] + ","
                    csv_value = csv_value + str(l_d_result[i]["競争タイム"]) + "\n"

                # S3にUP
                if place == "kawaguchi":
                    place_kana = "川口"
                elif place == "isesaki":
                    place_kana = "伊勢崎"
                elif place == "hamamatsu":
                    place_kana = "浜松"
                elif place == "iizuka":
                    place_kana = "飯塚"
                elif place == "sanyou":
                    place_kana = "山陽"
                else:
                    place_kana = "その他"

                s3_client.put_file(place_kana + "/" + round + "R.csv",
                                   csv_value,
                                   awsmanagement.S3.s3_bucket_data_anaylize)

        # レスポンス
        return {
            'statusCode':
            200,
            'body':
            json.dumps({
                "message": "ok",
                "TrainCount": str(train_count),
                "Anaylize": l_d_result
            })
        }

    except Exception as e:
        print(str(e))
        # レスポンス
        return {
            "headers": {
                "Access-Control-Allow-Origin": "*",
                "Access-Control-Allow-Credentials": "true"
            },
            'statusCode': 400,
            'body': json.dumps({"message": str(e)})
        }
Esempio n. 6
0
def lambda_handler(event, context):
    try:

        # 昨日分のレース情報ファイルを削除
        s3_client = awsmanagement.S3()
        l_file = s3_client.get_filelist(awsmanagement.S3.s3_bucket_data_daily)
        for file_name in l_file:
            s3_client.delete_file(file_name,
                                  awsmanagement.S3.s3_bucket_data_daily)

        # 昨日分のレース分析結果ファイルを削除
        l_file = s3_client.get_filelist(
            awsmanagement.S3.s3_bucket_data_anaylize)
        for file_name in l_file:
            s3_client.delete_file(file_name,
                                  awsmanagement.S3.s3_bucket_data_anaylize)

        # 今日を取得
        today = datetime.datetime.today()
        dated = today.strftime("%Y-%m-%d")

        # レース会場取得
        url = scraping.Scraping.url_program
        scraping_client = scraping.Scraping(
            url, [dated],
            ['isesaki', 'kawaguchi', 'hamamatsu', 'sanyou', 'iizuka'], ['1'])
        scraping_client.get_place_by_url_program()

        # レース会場をループし、レース情報を取得
        for place in scraping_client.out_list_place:
            for round in [
                    '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11',
                    '12'
            ]:
                scraping_client = scraping.Scraping(url, [dated], [place],
                                                    [round])
                scraping_client.get_raceinfo_by_url_program()

                # レース情報が取得できていて、
                if len(scraping_client.out_l_header) != 0:
                    # 8車制であれば、
                    if scraping_client.out_l_header[7] == "8車制":
                        # S3にcsvファイル出力
                        csv_value = ""
                        csv_value = csv_value + "place" + ","
                        csv_value = csv_value + "round" + ","
                        csv_value = csv_value + "car_no" + ","
                        csv_value = csv_value + "hande" + ","
                        csv_value = csv_value + "position_x" + ","
                        csv_value = csv_value + "position_y" + "\n"
                        for i in range(8):
                            csv_value = csv_value + place + ","
                            csv_value = csv_value + round + ","
                            csv_value = csv_value + str(i + 1) + ","
                            csv_value = csv_value + str(
                                scraping_client.out_l_hande[i]) + ","
                            csv_value = csv_value + str(
                                scraping_client.out_l_position_x[i]) + ","
                            csv_value = csv_value + str(
                                scraping_client.out_l_position_y[i]) + "\n"

                        # S3にUP
                        if place == "kawaguchi":
                            place_kana = "川口"
                        elif place == "isesaki":
                            place_kana = "伊勢崎"
                        elif place == "hamamatsu":
                            place_kana = "浜松"
                        elif place == "iizuka":
                            place_kana = "飯塚"
                        elif place == "sanyou":
                            place_kana = "山陽"
                        else:
                            place_kana = "その他"

                        s3_client.put_file(
                            place_kana + "/" + round + "R.csv", csv_value,
                            awsmanagement.S3.s3_bucket_data_daily)

        # レスポンス
        return {'statusCode': 200, 'body': json.dumps("ok")}

    except Exception as e:
        print(str(e))
        # レスポンス
        return {'statusCode': 400, 'body': json.dumps("ng")}