Exemplo n.º 1
0
def main(queryfile: str, features: Optional[str] = None, halving: Optional[bool] = False, save: Optional[bool] = True):
    service = GridSearchService()
    models = ModelService()
    with open(queryfile, 'r') as f:
        query = json.load(f)
    # if save:
    #     models.clear_parameters(query)
    # else:
    #     logging.info("Results will not  be saved [--no-save]")
    search_models = models.query_models(query)
    models.clear_parameters(query)
    logging.info("[i] {} models to train".format(len(search_models)))
    for i, m in enumerate(search_models):
        # if m.parameters:
        #     logging.info("==[{}/{}]== MODEL: {} {} {} {} ==> SKIP".format(i+1, len(search_models), m.symbol, m.dataset, m.target, m.pipeline))
        #     continue  # Skip this as search has already been performed
        logging.info("==[{}/{}]== MODEL: {} {} {} {} =====".format(i+1, len(search_models), m.symbol, m.dataset, m.target, m.pipeline))
        mp = service.create_parameters_search(m, split=0.7, features=features)
        logging.info("[{}] Start grid search".format(get_timestamp()))
        mp = service.grid_search(m, mp, sync=True, verbose=1, n_jobs=8, halving=halving, save=save)
        logging.info("[{}] End grid search".format(get_timestamp()))
Exemplo n.º 2
0
def model_index(features: Optional[bool] = True,
                parameters: Optional[bool] = True,
                tests: Optional[bool] = True,
                query: Optional[dict] = Body(...),
                service: ModelService = Depends(ModelService)):
    result = {}
    if features:
        result['features'] = service.clear_features(query)
    if parameters:
        result['parameters'] = service.clear_parameters(query)
    if tests:
        result['tests'] = service.clear_tests(query)
    return result