import numpy
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import time
from random import randint
import analise_auxiliar

array_passe: numpy.ndarray = numpy.concatenate([
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2021-VIRTUAL/DIVISION-B/ER_FORCE/ATA/*Pass.csv"),
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2021-VIRTUAL/DIVISION-B/KIKS/ATA/*Pass.csv"),
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2021-VIRTUAL/DIVISION-B/RoboCin/ATA/*Pass.csv"),
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2021-VIRTUAL/DIVISION-B/RoboFEI/ATA/*Pass.csv"),
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2021-VIRTUAL/DIVISION-B/TIGERs_Mannheim/ATA/*Pass.csv")
])

X, y = analise_auxiliar.get_x_y_passes(array_passe, 1.12)

x_axis: numpy.ndarray = numpy.fromiter(range(0, 500, 1), dtype=numpy.uint16)
score_train: numpy.ndarray = numpy.full(x_axis.shape, 0, dtype=numpy.float64)
score_test: numpy.ndarray = numpy.full(x_axis.shape, 0, dtype=numpy.float64)

cofs = None

start: float = time.time()
for j, i in enumerate(x_axis):
Пример #2
0
import numpy
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeRegressor
from matplotlib import pyplot
import joblib
import time
from random import randint
import analise_auxiliar

pyplot.style.use('dark_background')

array_chute: numpy.ndarray = numpy.concatenate([
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2019/ER_FORCE/ATA/*Chute.csv"),
    analise_auxiliar.get_array_from_pattern(
        "ROBOCUP-2019/ZJUNlict/ATA/*Chute.csv")
])

y: numpy.ndarray = array_chute[:, 0]
X: numpy.ndarray = array_chute[:, [1, 2, 3]]

model_out: DecisionTreeRegressor = DecisionTreeRegressor(
    criterion='mse',
    splitter='best',
    max_depth=3,
    min_samples_split=100 * 1e-3,
    min_samples_leaf=100 * 1e-3,
    min_weight_fraction_leaf=100 * 1e-3,
    max_features='auto',
    random_state=randint(0, 1000),
    max_leaf_nodes=5,
Пример #3
0
        if numpy.size(var) == 0:
            continue

        for j, _var in enumerate(
                distances_weights[i]
        ):  # apply the weight function for each distance

            if (_var >= MAXIMUM_DISTANCE):
                distances_weights[i][j] = 0.0001
                continue
            distances_weights[i][j] = 1 - _var / MAXIMUM_DISTANCE

    return distances_weights


array_passe: numpy.ndarray = analise_auxiliar.get_array_from_pattern(
    "ALL/*Passe.csv")

y: numpy.ndarray = array_passe[:, 0]
X: numpy.ndarray = array_passe[:, [1, 2, 3, 4, 4, 6, 7, 8]]

knn_out: KNeighborsRegressor = KNeighborsRegressor(
    n_neighbors=20, weights=customized_weights_linear, n_jobs=1).fit(X, y)

joblib.dump(knn_out, "models/avaliacao_passe_knn_with_weights.sav")

x_axis: numpy.ndarray = numpy.fromiter(range(1, 50), dtype=numpy.uint16)
score_train: numpy.ndarray = numpy.full(x_axis.shape, 0, dtype=numpy.float64)
score_test: numpy.ndarray = numpy.full(x_axis.shape, 0, dtype=numpy.float64)

start: float = time.time()
Пример #4
0
import numpy
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeRegressor
import joblib
import time
from random import randint
import analise_auxiliar

array_passe: numpy.ndarray = analise_auxiliar.get_array_from_pattern(
    "LARC-2020-VIRTUAL/ALL/*Passe.csv")

X, y = analise_auxiliar.get_x_y_passes(array_passe, 1.02)

tree_out: DecisionTreeRegressor = DecisionTreeRegressor(
    criterion='mse',
    splitter='best',
    max_depth=4,
    min_samples_split=33 * 1e-3,
    min_samples_leaf=80 * 1e-3,
    min_weight_fraction_leaf=87e-3,
    max_features='auto',
    random_state=38,
    max_leaf_nodes=6,
    min_impurity_decrease=0,
    min_impurity_split=None,
    presort='deprecated',
    ccp_alpha=40).fit(X, y)

joblib.dump(tree_out, "models/avaliacao_passe_tree.sav")

x_axis: numpy.ndarray = numpy.fromiter(range(1, 1000, 10), dtype=numpy.uint16)