def update_default_model(filename: str) -> jsonify: description = request.json[Constants.DESCRIPTION_FIELD_NAME] function_parameters = request.json[Constants.FUNCTION_PARAMETERS_NAME] request_errors = analyse_patch_request_errors(request_validator, data, filename, function_parameters) if request_errors is not None: return request_errors module_path, class_name = data.get_module_and_class_from_a_model(filename) metadata_creator = Metadata(database) default_model = DefaultModel(database, filename, metadata_creator, module_path, class_name, storage) default_model.update(description, function_parameters) return ( jsonify({ Constants.MESSAGE_RESULT: f'{Constants.MICROSERVICE_URI_GET}{filename}' f'{Constants.MICROSERVICE_URI_GET_PARAMS}' }), Constants.HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_model(): database_url = os.environ[DATABASE_URL] database_replica_set = os.environ[DATABASE_REPLICA_SET] database_name = os.environ[DATABASE_NAME] train_filename = request.json[TRAINING_FILENAME] test_filename = request.json[TEST_FILENAME] classifiers_name = request.json[CLASSIFIERS_NAME] database = Database( database_url, database_replica_set, os.environ[DATABASE_PORT], database_name, ) request_validator = UserRequest(database) request_errors = analyse_request_errors( request_validator, train_filename, test_filename, classifiers_name) if request_errors is not None: return request_errors database_url_training = Database.collection_database_url( database_url, database_name, train_filename, database_replica_set, ) database_url_test = Database.collection_database_url( database_url, database_name, test_filename, database_replica_set, ) metadata_creator = Metadata(database, train_filename, test_filename) model_builder = Model(database, metadata_creator, database_url_training, database_url_test) model_builder.build( request.json[MODELING_CODE_NAME], classifiers_name ) return ( jsonify({ MESSAGE_RESULT: create_prediction_files_uri( classifiers_name, test_filename)}), HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_default_model() -> jsonify: model_name = request.json[Constants.MODEL_FIELD_NAME] description = request.json[Constants.DESCRIPTION_FIELD_NAME] module_path = request.json[Constants.MODULE_PATH_FIELD_NAME] class_name = request.json[Constants.CLASS_FIELD_NAME] class_parameters = request.json[Constants.FUNCTION_PARAMETERS_NAME] request_errors = analyse_post_request_errors(request_validator, model_name, module_path, class_name, class_parameters) if request_errors is not None: return request_errors metadata_creator = Metadata(database) default_model = DefaultModel(database, model_name, metadata_creator, module_path, class_name, storage) default_model.create(description, class_parameters) return ( jsonify({ Constants.MESSAGE_RESULT: f'{Constants.MICROSERVICE_URI_GET}{model_name}' f'{Constants.MICROSERVICE_URI_GET_PARAMS}' }), Constants.HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_projection(): database_url = os.environ[DATABASE_URL] database_replica_set = os.environ[DATABASE_REPLICA_SET] database_name = os.environ[DATABASE_NAME] parent_filename = request.json[PARENT_FILENAME_NAME] projection_filename = request.json[PROJECTION_FILENAME_NAME] projection_fields = request.json[FIELDS_NAME] database = Database( database_url, database_replica_set, os.environ[DATABASE_PORT], database_name, ) request_validator = UserRequest(database) request_errors = analyse_request_errors(request_validator, parent_filename, projection_filename, projection_fields) if request_errors is not None: return request_errors database_url_input = Database.collection_database_url( database_url, database_name, parent_filename, database_replica_set, ) database_url_output = Database.collection_database_url( database_url, database_name, projection_filename, database_replica_set, ) metadata_creator = Metadata(database) projection = Projection(metadata_creator, database_url_input, database_url_output) projection.create(parent_filename, projection_filename, projection_fields) return ( jsonify({ MESSAGE_RESULT: MICROSERVICE_URI_GET + projection_filename + MICROSERVICE_URI_GET_PARAMS }), HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_execution() -> jsonify: service_type = request.args.get(Constants.TYPE_PARAM_NAME) filename = request.json[Constants.NAME_FIELD_NAME] description = request.json[Constants.DESCRIPTION_FIELD_NAME] module_path = request.json[Constants.MODULE_PATH_FIELD_NAME] class_name = request.json[Constants.CLASS_FIELD_NAME] class_parameters = request.json[Constants.CLASS_PARAMETERS_FIELD_NAME] class_method_name = request.json[Constants.METHOD_FIELD_NAME] method_parameters = request.json[Constants.METHOD_PARAMETERS_FIELD_NAME] request_errors = analyse_post_request_errors(request_validator, filename, module_path, class_name, class_parameters, class_method_name, method_parameters) if request_errors is not None: return request_errors storage = None if service_type == Constants.EXPLORE_TENSORFLOW_TYPE or \ service_type == Constants.EXPLORE_SCIKITLEARN_TYPE: storage = explore_storage elif service_type == Constants.TRANSFORM_TENSORFLOW_TYPE or \ service_type == Constants.TRANSFORM_SCIKITLEARN_TYPE: storage = transform_storage data = Data(database, storage) parameters = Parameters(database, data) metadata_creator = Metadata(database) execution = Execution(database, filename, service_type, storage, metadata_creator, module_path, class_name, class_parameters, parameters) execution.create(class_method_name, method_parameters, description) response_params = None if service_type == Constants.TRANSFORM_TENSORFLOW_TYPE or \ service_type == Constants.TRANSFORM_SCIKITLEARN_TYPE: response_params = Constants.MICROSERVICE_URI_GET_PARAMS return ( jsonify({ Constants.MESSAGE_RESULT: f'{Constants.MICROSERVICE_URI_SWITCHER[service_type]}' f'{filename}{response_params}' }), Constants.HTTP_STATUS_CODE_SUCCESS_CREATED, )
def update_execution(filename: str) -> jsonify: service_type = request.args.get(Constants.TYPE_PARAM_NAME) tool_type = request.args.get(Constants.TOOL_PARAM_NAME) description = request.json[Constants.DESCRIPTION_FIELD_NAME] class_method_name = request.json[Constants.METHOD_FIELD_NAME] method_parameters = request.json[Constants.METHOD_PARAMETERS_FIELD_NAME] storage = None if service_type == Constants.EXPLORE_TYPE: storage = explore_storage elif service_type == Constants.TRANSFORM_TYPE: storage = transform_storage data = Data(database, storage) request_errors = analyse_patch_request_errors(request_validator, data, filename, class_method_name, method_parameters) if request_errors is not None: return request_errors module_path, class_name = data.get_module_and_class(filename) class_parameters = data.get_class_parameters(filename) parameters = Parameters(database, data) metadata_creator = Metadata(database) execution = Execution(database, filename, service_type, storage, metadata_creator, module_path, class_name, class_parameters, parameters) execution.update(class_method_name, method_parameters, description) response_params = None if service_type == Constants.TRANSFORM_TYPE: response_params = Constants.MICROSERVICE_URI_GET_PARAMS return ( jsonify({ Constants.MESSAGE_RESULT: f'{Constants.MICROSERVICE_URI_SWITCHER[service_type]}' f'{tool_type}/{filename}{response_params}' }), Constants.HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_projection(): parent_filename = request.json[PARENT_FILENAME_NAME] projection_filename = request.json[PROJECTION_FILENAME_NAME] projection_fields = request.json[FIELDS_NAME] request_errors = analyse_request_errors(request_validator, parent_filename, projection_filename, projection_fields) if request_errors is not None: return request_errors database_url_input = Database.collection_database_url( database_url, database_name, parent_filename, database_replica_set, ) database_url_output = Database.collection_database_url( database_url, database_name, projection_filename, database_replica_set, ) metadata_creator = Metadata(database) projection = Projection(metadata_creator, database_url_input, database_url_output) projection.create(parent_filename, projection_filename, projection_fields) return ( jsonify({ MESSAGE_RESULT: f'{MICROSERVICE_URI_GET}{projection_filename}' f'{MICROSERVICE_URI_GET_PARAMS}' }), HTTP_STATUS_CODE_SUCCESS_CREATED, )
def create_histogram(): parent_filename = request.json[PARENT_FILENAME_NAME] histogram_filename = request.json[HISTOGRAM_FILENAME_NAME] fields_name = request.json[FIELDS_NAME] database = Database( os.environ[DATABASE_URL], os.environ[DATABASE_REPLICA_SET], os.environ[DATABASE_PORT], os.environ[DATABASE_NAME], ) request_validator = UserRequest(database) request_errors = analyse_request_errors( request_validator, parent_filename, histogram_filename, fields_name) if request_errors is not None: return request_errors metadata = Metadata(database) histogram = Histogram(database, metadata) histogram.create_file( parent_filename, histogram_filename, fields_name, ) return ( jsonify({ MESSAGE_RESULT: MICROSERVICE_URI_GET + histogram_filename + MICROSERVICE_URI_GET_PARAMS}), HTTP_STATUS_CODE_SUCCESS_CREATED, )
from utils import UserRequest, Database, ObjectStorage, Data, Metadata from typing import Union from constants import Constants app = Flask(__name__) database = Database( os.environ[Constants.DATABASE_URL], os.environ[Constants.DATABASE_REPLICA_SET], int(os.environ[Constants.DATABASE_PORT]), os.environ[Constants.DATABASE_NAME], ) request_validator = UserRequest(database) storage = ObjectStorage(database) data = Data(database, storage) metadata_creator = Metadata(database) parameters_handler = Parameters(database, data) @app.route(Constants.MICROSERVICE_URI_PATH, methods=["POST"]) def create_execution() -> jsonify: service_type = request.args.get(Constants.TYPE_FIELD_NAME) model_name = request.json[Constants.MODEL_NAME_FIELD_NAME] parent_name = request.json[Constants.PARENT_NAME_FIELD_NAME] filename = request.json[Constants.NAME_FIELD_NAME] description = request.json[Constants.DESCRIPTION_FIELD_NAME] class_method = request.json[Constants.METHOD_FIELD_NAME] method_parameters = request.json[Constants.METHOD_PARAMETERS_FIELD_NAME] request_errors = analyse_post_request_errors(
from flask import jsonify, request, Flask import os from database import Dataset, Csv, Generic from utils import Database, UserRequest, Metadata from constants import Constants import json app = Flask(__name__) database_connector = Database(os.environ[Constants.DATABASE_URL], os.environ[Constants.DATABASE_REPLICA_SET], int(os.environ[Constants.DATABASE_PORT]), os.environ[Constants.DATABASE_NAME]) request_validator = UserRequest(database_connector) metadata_creator = Metadata(database_connector) @app.route(Constants.MICROSERVICE_URI_PATH, methods=["POST"]) def create_file(): service_type = request.args.get(Constants.TYPE_FIELD_NAME) url = request.json[Constants.URL_FIELD_NAME] filename = request.json[Constants.FILENAME_FIELD_NAME] request_errors = analyse_request_errors(request_validator, filename, url) if request_errors is not None: return request_errors if service_type == Constants.DATASET_CSV_TYPE: file_downloader = Csv(database_connector, metadata_creator) else:
DATABASE_NAME = "DATABASE_NAME" DATABASE_REPLICA_SET = "DATABASE_REPLICA_SET" MICROSERVICE_URI_GET = "/api/learningOrchestra/v1/explore/histogram/" MICROSERVICE_URI_GET_PARAMS = "?query={}&limit=10&skip=0" app = Flask(__name__) database = Database( os.environ[DATABASE_URL], os.environ[DATABASE_REPLICA_SET], os.environ[DATABASE_PORT], os.environ[DATABASE_NAME], ) request_validator = UserRequest(database) metadata = Metadata(database) @app.route("/histograms", methods=["POST"]) def create_histogram(): parent_filename = request.json[PARENT_FILENAME_NAME] histogram_filename = request.json[HISTOGRAM_FILENAME_NAME] fields_name = request.json[FIELDS_NAME] request_errors = analyse_request_errors(request_validator, parent_filename, histogram_filename, fields_name) if request_errors is not None: return request_errors histogram = Histogram(database, metadata)
mesh_normal_consistency, ) import numpy as np from model import Generator, Discriminator, ContrastiveLoss import cv2 from utils import project_mesh_silhouette, Metadata, project_mesh from NOMO import Nomo from torch.utils.data import DataLoader from tqdm import tqdm import matplotlib.pyplot as plt from pytorch3d.structures import join_meshes_as_batch, Meshes, Textures angle = 0 n = 0 meta = Metadata() print('loading data....') transformed_dataset = Nomo(folder=meta.path) dataloader = DataLoader(transformed_dataset, batch_size=meta.batch_size, shuffle=True) print('done') for i, sample in enumerate(dataloader): sample = sample break # smpl_mesh = load_objs_as_meshes([os.path.join(meta.path, 'male.obj')], device=meta.device) verts, faces_idx, _ = load_obj(os.path.join(meta.path, 'male.obj')) verts.requires_grad = True faces = faces_idx.verts_idx
FIRST_ARGUMENT = 0 DATABASE_URL = "DATABASE_URL" DATABASE_PORT = "DATABASE_PORT" DATABASE_NAME = "DATABASE_NAME" DATABASE_REPLICA_SET = "DATABASE_REPLICA_SET" MICROSERVICE_URI_GET = "/api/learningOrchestra/v1/dataset/" MICROSERVICE_URI_GET_PARAMS = "?query={}&limit=20&skip=0" app = Flask(__name__) database = Database(os.environ[DATABASE_URL], os.environ[DATABASE_REPLICA_SET], os.environ[DATABASE_PORT], os.environ[DATABASE_NAME]) request_validator = UserRequest(database) metadata_handler = Metadata(database) @app.route('/fieldTypes', methods=["PATCH"]) def change_data_type(): parent_filename = request.json[PARENT_FILENAME_NAME] field_types_names = request.json[FIELD_TYPES_NAMES] request_errors = analyse_request_errors(request_validator, parent_filename, field_types_names) if request_errors is not None: return request_errors data_type_converter = DataType(database, metadata_handler) data_type_converter.convert_existent_file(parent_filename,
parser = argparse.ArgumentParser(description='RC Camera Car Server.') parser.add_argument('port', type=int, help='port to listen on') args = parser.parse_args() PORT = args.port AP_HOST = '192.168.4.1' WLAN_HOST = '' logging.basicConfig( filename='server.log', level=logging.DEBUG, format='%(asctime)s, msg: %(message)s, level: %(levelname)s', datefmt='%m/%d/%Y %H:%M:%S' ) metadata = Metadata() connected = handlers.handle_check_connection() if not connected: logging.debug('starting access point functionality') # Access point functionality that only handles returning nearby SSIDs and # connecting to WiFi server = Server(AP_HOST, PORT, metadata) server.add_handler(MsgType.GET_SSID, handlers.handle_get_ssid) server.add_handler(MsgType.WIFI_CONN, handlers.handle_connect_wifi) server.receive_forever() else: logging.debug('stopping access point') # Stop access point subprocess.run('./shell-scripts/stop-ap.sh')