def test_get_remote_file(self): content = str(self.script.read()) remote_file = { 'storageAddress': 'localhost', 'hash': compute_hash(content) } with mock.patch( 'substrapp.utils.get_computed_hash') as mget_computed_hash: pkhash = compute_hash(content) mget_computed_hash.return_value = content, pkhash content_remote, pkhash_remote = get_remote_file(remote_file) self.assertEqual(pkhash_remote, get_hash(self.script)) self.assertEqual(content_remote, content) with mock.patch( 'substrapp.utils.get_computed_hash') as mget_computed_hash: content = content + ' FAIL' pkhash = compute_hash(content) mget_computed_hash.return_value = content, pkhash with self.assertRaises(Exception): get_remote_file( remote_file) # contents (by pkhash) are different
def _download_remote_file(self, channel_name, storage_address, asset): node_id = asset['owner'] auth = authenticate_outgoing_request(node_id) r = get_remote_file(channel_name, storage_address, auth, stream=True, headers={HTTP_HEADER_PROXY_ASSET: 'True'}) if not r.ok: return Response( { 'message': f'Cannot proxify asset from node {asset["owner"]}: {str(r.text)}' }, status=r.status_code) response = CustomFileResponse( streaming_content=(chunk for chunk in r.iter_content(512 * 1024)), status=r.status_code) for header in r.headers: # We don't use hop_by_hop headers since they are incompatible # with WSGI if not is_hop_by_hop(header): response[header] = r.headers.get(header) return response
def get_model(subtuple): model_content, model_computed_hash = None, None if subtuple.get('model', None) is not None: model_content, model_computed_hash = get_remote_file( subtuple['model'], subtuple['model']['traintupleKey']) return model_content, model_computed_hash
def get_models(subtuple): models_content, models_computed_hash = [], [] if subtuple.get('inModels', None) is not None: for subtuple_model in subtuple['inModels']: model_content, model_computed_hash = get_remote_file( subtuple_model, subtuple_model['traintupleKey']) models_content.append(model_content) models_computed_hash.append(model_computed_hash) return models_content, models_computed_hash
def download_file(self, request, django_field, ledger_field=None): lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field pk = self.kwargs[lookup_url_kwarg] try: asset = get_object_from_ledger(pk, self.ledger_query_call) except LedgerError as e: return Response({'message': str(e.msg)}, status=e.status) if not self._has_access(request.user, asset): return Response({'message': 'Unauthorized'}, status=status.HTTP_403_FORBIDDEN) if get_owner() == asset['owner']: obj = self.get_object() data = getattr(obj, django_field) response = CustomFileResponse( open(data.path, 'rb'), as_attachment=True, filename=os.path.basename(data.path) ) else: node_id = asset['owner'] auth = authenticate_outgoing_request(node_id) if not ledger_field: ledger_field = django_field r = get_remote_file(asset[ledger_field]['storageAddress'], auth, stream=True) if not r.ok: return Response({ 'message': f'Cannot proxify asset from node {asset["owner"]}: {str(r.text)}' }, status=r.status_code) response = CustomFileResponse( streaming_content=(chunk for chunk in r.iter_content(512 * 1024)), status=r.status_code) for header in r.headers: # We don't use hop_by_hop headers since they are incompatible # with WSGI if not is_hop_by_hop(header): response[header] = r.headers.get(header) return response
def get_objective(subtuple): from substrapp.models import Objective # check if objective exists and its metrics is not null objectiveHash = subtuple['objective']['hash'] try: # get objective from local db objective = Objective.objects.get(pk=objectiveHash) except: objective = None finally: if objective is None or not objective.metrics: # get objective metrics try: content, computed_hash = get_remote_file( subtuple['objective']['metrics']) except Exception as e: raise e objective, created = Objective.objects.update_or_create( pkhash=objectiveHash, validated=True) try: f = tempfile.TemporaryFile() f.write(content) objective.metrics.save( 'metrics.py', f) # update objective in local db for later use except Exception as e: logging.error( 'Failed to save objective metrics in local db for later use' ) raise e return objective
def get_algo(subtuple): algo_content, algo_computed_hash = get_remote_file(subtuple['algo']) return algo_content, algo_computed_hash