def add_file_to_dataset_view(user_data, cache): """Add the uploaded file to cloned repository.""" ctx = DatasetAddRequest().load(request.json) user = cache.ensure_user(user_data) project = cache.get_project(user, ctx['project_id']) if not ctx['commit_message']: ctx['commit_message'] = 'service: dataset add {0}'.format( ctx['short_name']) local_paths = [] for _file in ctx['files']: local_path = None if 'file_url' in _file: commit_message = '{0}{1}'.format(ctx['commit_message'], _file['file_url']) job = cache.make_job(user) _file['job_id'] = job.job_id with enqueue_retry(DATASETS_JOB_QUEUE) as queue: queue.enqueue(dataset_add_remote_file, user_data, job.job_id, project.project_id, ctx['create_dataset'], commit_message, ctx['short_name'], _file['file_url']) continue if 'file_id' in _file: file = cache.get_file(user, _file['file_id']) local_path = file.abs_path elif 'file_path' in _file: local_path = project.abs_path / Path(_file['file_path']) if not local_path or not local_path.exists(): return error_response( INVALID_PARAMS_ERROR_CODE, 'invalid file reference: {0}'.format(json.dumps(_file))) ctx['commit_message'] += ' {0}'.format(local_path.name) local_paths.append(str(local_path)) if local_paths: with chdir(project.abs_path): add_file(local_paths, ctx['short_name'], create=ctx['create_dataset'], force=ctx['force'], commit_message=ctx['commit_message']) try: _, ctx['remote_branch'] = repo_sync(Repo(project.abs_path), remote='origin') except GitCommandError: return error_response(INTERNAL_FAILURE_ERROR_CODE, 'repo sync failed') return result_response(DatasetAddResponseRPC(), ctx)
def jsonld(client, tags): """Format dataset tags as JSON-LD. :param client: LocalClient instance. :param tags: Dataset tags. """ from renku.core.models.json import dumps data = [tag.as_jsonld() for tag in tags] return dumps(data, indent=2)
def jsonld(client, datasets, **kwargs): """Format datasets as JSON-LD.""" data = [ asjsonld(dataset, basedir=os.path.relpath('.', start=str( dataset.__reference__.parent))) for dataset in datasets ] return dumps(data, indent=2)
def jsonld(client, records, **kwargs): """Format dataset files as JSON-LD. :param client: LocalClient instance. :param records: Filtered collection. """ from renku.core.models.json import dumps data = [record.as_jsonld() for record in records] return dumps(data, indent=2)
def add_file_to_dataset_view(user, cache): """Add the uploaded file to cloned repository.""" ctx = DatasetAddRequest().load(request.json) user = cache.ensure_user(user) project = cache.get_project(user, ctx['project_id']) if not project.abs_path.exists(): return error_response( INVALID_PARAMS_ERROR_CODE, 'invalid project_id: {0}'.format(ctx['project_id'])) if not ctx['commit_message']: ctx['commit_message'] = 'service: dataset add {0}'.format( ctx['dataset_name']) local_paths = [] for _file in ctx['files']: local_path = None if 'file_id' in _file: file = cache.get_file(user, _file['file_id']) local_path = file.abs_path elif 'file_path' in _file: local_path = project.abs_path / Path(_file['file_path']) if not local_path or not local_path.exists(): return error_response( INVALID_PARAMS_ERROR_CODE, 'invalid file reference: {0}'.format(json.dumps(_file))) ctx['commit_message'] += ' {0}'.format(local_path.name) local_paths.append(str(local_path)) with chdir(project.abs_path): add_file(local_paths, ctx['dataset_name'], create=ctx['create_dataset'], commit_message=ctx['commit_message']) if not repo_sync(project.abs_path): return error_response(INTERNAL_FAILURE_ERROR_CODE, 'repo sync failed') return result_response(DatasetAddResponseRPC(), ctx)
def jsonld(client, datasets, **kwargs): """Format datasets as JSON-LD.""" data = [dataset.as_jsonld() for dataset in datasets] return dumps(data, indent=2)
def add_file_to_dataset_view(user_data, cache): """Add the uploaded file to cloned repository.""" ctx = DatasetAddRequest().load(request.json) user = cache.ensure_user(user_data) project = cache.get_project(user, ctx["project_id"]) if not ctx["commit_message"]: ctx["commit_message"] = "service: dataset add {0}".format(ctx["name"]) local_paths = [] for _file in ctx["files"]: local_path = None if "file_url" in _file: commit_message = "{0}{1}".format(ctx["commit_message"], _file["file_url"]) job = cache.make_job( user, project=project, job_data={ "renku_op": "dataset_add_remote_file", "client_extras": ctx.get("client_extras") }, ) _file["job_id"] = job.job_id with enqueue_retry(DATASETS_JOB_QUEUE) as queue: queue.enqueue( dataset_add_remote_file, user_data, job.job_id, project.project_id, ctx["create_dataset"], commit_message, ctx["name"], _file["file_url"], ) continue if "file_id" in _file: file = cache.get_file(user, _file["file_id"]) local_path = file.abs_path elif "file_path" in _file: local_path = project.abs_path / Path(_file["file_path"]) if not local_path or not local_path.exists(): return error_response( INVALID_PARAMS_ERROR_CODE, "invalid file reference: {0}".format(json.dumps(_file))) ctx["commit_message"] += " {0}".format(local_path.name) local_paths.append(str(local_path)) if local_paths: with chdir(project.abs_path): add_file( local_paths, ctx["name"], create=ctx["create_dataset"], force=ctx["force"], commit_message=ctx["commit_message"], ) try: _, ctx["remote_branch"] = repo_sync(Repo(project.abs_path), remote="origin") except GitCommandError: return error_response(INTERNAL_FAILURE_ERROR_CODE, "repo sync failed") return result_response(DatasetAddResponseRPC(), ctx)