def _create(self, request): data = { 'algo_key': request.data.get('algo_key'), 'rank': request.data.get('rank'), 'compute_plan_id': request.data.get('compute_plan_id', ''), 'in_models_keys': request.data.getlist('in_models_keys'), 'worker': request.data.get('worker'), 'tag': request.data.get('tag', '') } serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) # Get aggregatetuple pkhash to handle 408 timeout in invoke_ledger args = serializer.get_args(serializer.validated_data) try: data = query_ledger(fcn='createAggregatetuple', args=args) except LedgerConflict as e: raise LedgerException({ 'message': str(e.msg), 'pkhash': e.pkhash }, e.status) except LedgerError as e: raise LedgerException({'message': str(e.msg)}, e.status) else: pkhash = data.get('key') return self.commit(serializer, pkhash)
def list(self, request, *args, **kwargs): try: data = query_ledger(fcn='queryAlgos', args=[]) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) algos_list = [data] # parse filters query_params = request.query_params.get('search', None) if query_params is not None: try: algos_list = filter_list( object_type='algo', data=data, query_params=query_params) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) except Exception as e: logging.exception(e) return Response( {'message': f'Malformed search filters {query_params}'}, status=status.HTTP_400_BAD_REQUEST) for group in algos_list: for algo in group: replace_storage_addresses(request, algo) return Response(algos_list, status=status.HTTP_200_OK)
def create(self, request, *args, **kwargs): # rely on serializer to parse and validate request data serializer = self.get_serializer(data=dict(request.data)) serializer.is_valid(raise_exception=True) # get compute_plan_id to handle 408 timeout in next invoke ledger request args = serializer.get_args(serializer.validated_data) try: ledger_response = query_ledger(fcn='createComputePlan', args=args) except LedgerConflict as e: error = {'message': str(e.msg), 'pkhash': e.pkhash} return Response(error, status=e.status) except LedgerError as e: error = {'message': str(e.msg)} return Response(error, status=e.status) # create compute plan in ledger compute_plan_id = ledger_response.get('computePlanID') try: data = serializer.create(serializer.validated_data) except LedgerError as e: error = {'message': str(e.msg), 'computePlanID': compute_plan_id} return Response(error, status=e.status) # send successful response headers = self.get_success_headers(data) status = get_success_create_code() return Response(data, status=status, headers=headers)
def list(self, request, *args, **kwargs): try: data = query_ledger(fcn='queryDataSamples', args=[]) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) data = data if data else [] return Response(data, status=status.HTTP_200_OK)
def list(self, request, *args, **kwargs): try: nodes = query_ledger(fcn=self.ledger_query_call) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) current_node_id = get_owner() for node in nodes: node.update({ 'isCurrent': node['id'] == current_node_id, }) return Response(nodes, status=status.HTTP_200_OK)
def _create(self, request): data = { 'algo_key': request.data.get('algo_key'), 'data_manager_key': request.data.get('data_manager_key'), 'rank': request.data.get('rank'), 'compute_plan_id': request.data.get('compute_plan_id', ''), 'in_head_model_key': request.data.get('in_head_model_key', ''), 'in_trunk_model_key': request.data.get('in_trunk_model_key', ''), 'out_trunk_model_permissions': { 'public': False, 'authorized_ids': request.data.getlist( 'out_trunk_model_permissions_authorized_ids', []), }, 'train_data_sample_keys': request.data.getlist('train_data_sample_keys'), 'tag': request.data.get('tag', '') } serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) # Get compositetraintuple pkhash to handle 408 timeout in invoke_ledger args = serializer.get_args(serializer.validated_data) try: data = query_ledger(fcn='createCompositeTraintuple', args=args) except LedgerConflict as e: raise LedgerException({ 'message': str(e.msg), 'pkhash': e.pkhash }, e.status) except LedgerError as e: raise LedgerException({'message': str(e.msg)}, e.status) else: pkhash = data.get('key') return self.commit(serializer, pkhash)
def leaderboard(self, request, pk): sort = request.query_params.get('sort', 'desc') try: validate_pk(pk) except Exception as e: return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) try: validate_sort(sort) except Exception as e: return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) try: leaderboard = query_ledger(fcn='queryObjectiveLeaderboard', args={ 'objectiveKey': pk, 'ascendingOrder': sort == 'asc', }) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) return Response(leaderboard, status=status.HTTP_200_OK)
def list(self, request, *args, **kwargs): try: data = query_ledger(fcn='queryAggregatetuples', args=[]) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) aggregatetuple_list = [data] query_params = request.query_params.get('search', None) if query_params is not None: try: aggregatetuple_list = filter_list(object_type='aggregatetuple', data=data, query_params=query_params) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) except Exception as e: logging.exception(e) return Response( {'message': f'Malformed search filters {query_params}'}, status=status.HTTP_400_BAD_REQUEST) return Response(aggregatetuple_list, status=status.HTTP_200_OK)
def _create(self, request): data = { 'objective_key': request.data.get('objective_key'), 'traintuple_key': request.data.get('traintuple_key'), 'data_manager_key': request.data.get('data_manager_key', ''), 'test_data_sample_keys': request.data.getlist('test_data_sample_keys'), 'tag': request.data.get('tag', '') } serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) # Get traintuple pkhash to handle 408 timeout in invoke_ledger args = serializer.get_args(serializer.validated_data) try: data = query_ledger(fcn='createTesttuple', args=args) except LedgerConflict as e: raise LedgerException({'message': str(e.msg), 'pkhash': e.pkhash}, e.status) except LedgerError as e: raise LedgerException({'message': str(e.msg)}, e.status) else: pkhash = data.get('key') return self.commit(serializer, pkhash)
def filter_list(object_type, data, query_params): filters = get_filters(query_params) object_list = [] for user_filter in filters: for filter_key, subfilters in user_filter.items(): if filter_key not in AUTHORIZED_FILTERS[object_type]: raise Exception( f'Not authorized filter key {filter_key} for asset {object_type}' ) # Will be appended in object_list after been filtered filtered_list = data if _same_nature(filter_key, object_type): # Filter by own asset if filter_key == 'model': for attribute, val in subfilters.items(): filtered_list = [ x for x in filtered_list if _get_model_tuple(x).get(attribute) in val ] elif filter_key == 'objective': for attribute, val in subfilters.items(): if attribute == 'metrics': # specific to nested metrics filtered_list = [ x for x in filtered_list if x[attribute]['name'] in val ] else: filtered_list = [ x for x in filtered_list if x[attribute] in val ] else: for attribute, val in subfilters.items(): filtered_list = [ x for x in filtered_list if x.get(attribute) in val ] else: # Filter by other asset # Get other asset list filtering_data = query_ledger(fcn=FILTER_QUERIES[filter_key], args=[]) filtering_data = filtering_data if filtering_data else [] if filter_key in ('algo', 'composite_algo', 'aggregate_algo'): for attribute, val in subfilters.items(): filtering_data = [ x for x in filtering_data if x[attribute] in val ] hashes = [x['key'] for x in filtering_data] if object_type == 'model': filtered_list = [ x for x in filtered_list if _get_model_tuple(x) ['algo']['hash'] in hashes ] elif filter_key == 'model': for attribute, val in subfilters.items(): filtering_data = [ x for x in filtering_data if x['outModel'] is not None and x['outModel'][attribute] in val ] if object_type == 'algo': hashes = [ x['algo']['hash'] for x in filtering_data ] filtered_list = [ x for x in filtered_list if x['key'] in hashes ] elif object_type == 'dataset': hashes = [ x['objective']['hash'] for x in filtering_data ] filtered_list = [ x for x in filtered_list if x['objectiveKey'] in hashes ] elif object_type == 'objective': hashes = [ x['objective']['hash'] for x in filtering_data ] filtered_list = [ x for x in filtered_list if x['key'] in hashes ] elif filter_key == 'dataset': for attribute, val in subfilters.items(): filtering_data = [ x for x in filtering_data if x[attribute] in val ] hashes = [x['key'] for x in filtering_data] if object_type == 'model': filtered_list = [ x for x in filtered_list if _get_model_tuple(x).get('dataset', {}).get( 'openerHash') in hashes ] elif object_type == 'objective': objectiveKeys = [ x['objectiveKey'] for x in filtering_data ] filtered_list = [ x for x in filtered_list if x['key'] in objectiveKeys or ( x['testDataset'] and x['testDataset'] ['dataManagerKey'] in hashes) ] elif filter_key == 'objective': for attribute, val in subfilters.items(): if attribute == 'metrics': # specific to nested metrics filtering_data = [ x for x in filtering_data if x[attribute]['name'] in val ] else: filtering_data = [ x for x in filtering_data if x[attribute] in val ] hashes = [x['key'] for x in filtering_data] if object_type == 'model': filtered_list = [ x for x in filtered_list if _get_model_tuple(x) ['objective']['hash'] in hashes ] elif object_type == 'dataset': filtered_list = [ x for x in filtered_list if x['objectiveKey'] in hashes ] object_list.append(filtered_list) return object_list