def main(queryfile: str):
    with open(queryfile, 'r') as f:
        query = json.load(f)
    service = ModelService()
    print(f"Clearing {query}")
    time.sleep(5)
    service.clear_tests(query)
Beispiel #2
0
def model_index(features: Optional[bool] = True,
                parameters: Optional[bool] = True,
                tests: Optional[bool] = True,
                query: Optional[dict] = Body(...),
                service: ModelService = Depends(ModelService)):
    result = {}
    if features:
        result['features'] = service.clear_features(query)
    if parameters:
        result['parameters'] = service.clear_parameters(query)
    if tests:
        result['tests'] = service.clear_tests(query)
    return result
Beispiel #3
0
def main(queryfile: str,
         features: Optional[str] = None,
         parameters: Optional[str] = None,
         save: Optional[bool] = True):
    models = ModelService()
    with open(queryfile, 'r') as f:
        query = json.load(f)
    if save:
        models.clear_tests(query)
    test_models = models.query_models(query)
    logging.info("[i] {} models to test".format(len(test_models)))
    failed = []
    for i, m in enumerate(test_models):
        logging.info("==[{}/{}]== MODEL: {} {} {} {} =====".format(
            i + 1, len(test_models), m.symbol, m.dataset, m.target,
            m.pipeline))
        #t1 = models.create_model_test(model=m, split=0.7, step={'days': 1}, window={'days': 60}, parameters=parameters, features=features)
        t2 = models.create_model_test(model=m,
                                      split=0.7,
                                      step={'days': 1},
                                      window={'days': 90},
                                      parameters=parameters,
                                      features=features)
        t3 = models.create_model_test(model=m,
                                      split=0.7,
                                      step={'days': 1},
                                      window={'days': 180},
                                      parameters=parameters,
                                      features=features)
        t4 = models.create_model_test(model=m,
                                      split=0.7,
                                      step={'days': 1},
                                      window={'days': 240},
                                      parameters=parameters,
                                      features=features)
        try:
            # Test T1
            # logging.info("[{}] {} Start T1".format(get_timestamp(), m.symbol))
            # models.test_model(m, t1, sync=True)
            # Test T2
            logging.info("[{}] {} Start T2".format(get_timestamp(), m.symbol))
            models.test_model(m, t2, sync=True)
            # Test T3
            logging.info("[{}] {} Start T3".format(get_timestamp(), m.symbol))
            models.test_model(m, t3, sync=True)
            logging.info("[{}] {} Start T4".format(get_timestamp(), m.symbol))
            models.test_model(m, t4, sync=True)
        except MessageException as e:
            logging.error("[!] " + e.message)
            # failed.append((m.dict(), t1.dict(), t2.dict(), t3.dict()))
            failed.append((m.dict(), t2.dict(), t3.dict(), t4.dict()))
            pass
        except Exception as e:
            logging.exception("[!] " + str(e))
            # failed.append((m.dict(), t1.dict(), t2.dict(), t3.dict()))
            failed.append((m.dict(), t2.dict(), t3.dict(), t4.dict()))
            pass

        logging.info("[{}] Done".format(m.symbol))
    with open('test-failed.json', 'w') as f:
        json.dump(failed, f)