Beispiel #1
0
def migrate_project_view(user_data, cache):
    """Migrate specified project."""
    ctx = ProjectMigrateRequest().load(request.json)
    user = cache.ensure_user(user_data)

    project = cache.get_project(user, ctx["project_id"])
    commit_message = ctx.get("commit_message", None)

    if ctx.get("is_delayed", False):
        job = cache.make_job(user,
                             project=project,
                             job_data={
                                 "renku_op": "migrate_job",
                                 "client_extras": ctx.get("client_extras")
                             })

        with enqueue_retry(MIGRATIONS_JOB_QUEUE) as queue:
            queue.enqueue(
                migrate_job,
                user_data,
                project.project_id,
                job.job_id,
                commit_message,
            )

        return result_response(ProjectMigrateAsyncResponseRPC(), job)

    messages, was_migrated = execute_migration(project, commit_message)
    response = {"messages": messages, "was_migrated": was_migrated}

    if was_migrated:
        _, response["remote_branch"] = repo_sync(Repo(project.abs_path),
                                                 remote="origin")

    return result_response(ProjectMigrateResponseRPC(), response)
    def to_response(self):
        """Execute controller flow and serialize to service response."""
        if "project_id" in self.ctx:
            return result_response(MigrationsCheckCtrl.RESPONSE_SERIALIZER,
                                   self.local())

        elif "git_url" in self.ctx:
            return result_response(MigrationsCheckCtrl.RESPONSE_SERIALIZER,
                                   self.remote())
Beispiel #3
0
def job_details(user_data, cache, job_id):
    """Show details for a specific job."""
    user = cache.ensure_user(user_data)

    job = cache.get_job(user, job_id)

    if not job or not job.project_id:
        return result_response(JobDetailsResponseRPC(), None)

    job.project = cache.get_project(user, job.project_id)
    return result_response(JobDetailsResponseRPC(), job)
Beispiel #4
0
def unlink_file_view(user_data, cache):
    """Unlink a file from a dataset."""
    ctx = DatasetUnlinkRequest().load(request.json)

    include = ctx.get('include_filter')
    exclude = ctx.get('exclude_filter')

    user = cache.ensure_user(user_data)
    project = cache.get_project(user, ctx['project_id'])

    if ctx.get('commit_message') is None:
        if include and exclude:
            filters = '-I {0} -X {0}'.format(include, exclude)
        elif not include and exclude:
            filters = '-X {0}'.format(exclude)
        else:
            filters = '-I {0}'.format(include)

        ctx['commit_message'] = ('service: unlink dataset {0} {1}'.format(
            ctx['short_name'], filters))

    with chdir(project.abs_path):
        records = file_unlink(short_name=ctx['short_name'],
                              include=ctx.get('include_filters'),
                              exclude=ctx.get('exclude_filters'),
                              yes=True,
                              interactive=False,
                              commit_message=ctx['commit_message'])

        unlinked = [record.path for record in records]

    return result_response(DatasetUnlinkResponseRPC(), {'unlinked': unlinked})
Beispiel #5
0
def edit_dataset_view(user_data, cache):
    """Edit dataset metadata."""
    ctx = DatasetEditRequest().load(request.json)

    user = cache.ensure_user(user_data)
    project = cache.get_project(user, ctx['project_id'])

    if ctx.get('commit_message') is None:
        ctx['commit_message'] = 'service: dataset edit {0}'.format(
            ctx['short_name'])

    with chdir(project.abs_path):
        edited, warnings = edit_dataset(ctx['short_name'],
                                        ctx.get('title'),
                                        ctx.get('description'),
                                        ctx.get('creators'),
                                        keywords=ctx.get('keywords'),
                                        commit_message=ctx['commit_message'])

    return result_response(
        DatasetEditResponseRPC(), {
            'edited': {field: ctx.get(field)
                       for field in edited},
            'warnings': warnings
        })
Beispiel #6
0
def import_dataset_view(user_data, cache):
    """Import a dataset view."""
    user = cache.ensure_user(user_data)
    ctx = DatasetImportRequest().load(request.json)
    project = cache.get_project(user, ctx['project_id'])

    user_job = {
        'job_id': uuid.uuid4().hex,
        'state': USER_JOB_STATE_ENQUEUED,
    }
    job = cache.make_job(user, user_job, locked=project.project_id)

    with enqueue_retry(DATASETS_JOB_QUEUE) as queue:
        queue.enqueue(
            dataset_import,
            user_data,
            user_job['job_id'],
            project.project_id,
            ctx['dataset_uri'],
            short_name=ctx.get('short_name'),
            extract=ctx.get('extract', False),
            timeout=int(os.getenv('WORKER_DATASET_JOBS_TIMEOUT', 1800)),
            result_ttl=int(os.getenv('WORKER_DATASET_JOBS_RESULT_TTL', 500)))

    return result_response(DatasetImportResponseRPC(), job)
Beispiel #7
0
def edit_dataset_view(user_data, cache):
    """Edit dataset metadata."""
    ctx = DatasetEditRequest().load(request.json)

    user = cache.ensure_user(user_data)
    project = cache.get_project(user, ctx["project_id"])

    if ctx.get("commit_message") is None:
        ctx["commit_message"] = "service: dataset edit {0}".format(ctx["name"])

    with chdir(project.abs_path):
        edited, warnings = edit_dataset(
            ctx["name"],
            ctx.get("title"),
            ctx.get("description"),
            ctx.get("creators"),
            keywords=ctx.get("keywords"),
            commit_message=ctx["commit_message"],
        )

    return result_response(
        DatasetEditResponseRPC(), {
            "edited": {field: ctx.get(field)
                       for field in edited},
            "warnings": warnings
        })
Beispiel #8
0
def project_clone(user, cache):
    """Clone a remote repository."""
    ctx = ProjectCloneContext().load(
        (lambda a, b: a.update(b) or a)(request.json, user),
        unknown=EXCLUDE,
    )
    local_path = make_project_path(user, ctx)
    user = cache.ensure_user(user)

    if local_path.exists():
        shutil.rmtree(str(local_path))

        for project in cache.get_projects(user):
            if project.git_url == ctx['git_url']:
                project.delete()

    local_path.mkdir(parents=True, exist_ok=True)
    renku_clone(ctx['url_with_auth'],
                local_path,
                depth=ctx['depth'],
                raise_git_except=True,
                config={
                    'user.name': ctx['fullname'],
                    'user.email': ctx['email'],
                })

    project = cache.make_project(user, ctx)

    return result_response(ProjectCloneResponseRPC(), project)
Beispiel #9
0
def import_dataset_view(user_data, cache):
    """Import a dataset view."""
    user = cache.ensure_user(user_data)
    ctx = DatasetImportRequest().load(request.json)
    project = cache.get_project(user, ctx['project_id'])

    if project is None or project.abs_path is False:
        return error_response(
            INVALID_PARAMS_ERROR_CODE,
            'invalid project_id: {0}'.format(ctx['project_id']))

    user_job = {
        'job_id': uuid.uuid4().hex,
        'state': USER_JOB_STATE_ENQUEUED,
    }
    job = cache.make_job(user, user_job)

    with enqueue_retry(DATASETS_JOB_QUEUE) as queue:
        queue.enqueue(
            dataset_import,
            user_data,
            user_job['job_id'],
            ctx['project_id'],
            ctx['dataset_uri'],
            short_name=ctx.get('short_name'),
            extract=ctx.get('extract', False),
            timeout=int(os.getenv('WORKER_DATASET_JOBS_TIMEOUT', 1800)),
            result_ttl=int(os.getenv('WORKER_DATASET_JOBS_RESULT_TTL', 500)))

    return result_response(DatasetImportResponseRPC(), job)
def test_result_response_with_empty_tuple(svc_client):
    """Test result response with empty value."""
    response = result_response(DatasetListResponseRPC(), ()).json

    assert response
    assert {'result'} == set(response.keys())
    assert {} == response['result']
Beispiel #11
0
def test_result_response_with_empty_dict(svc_client):
    """Test result response with empty value."""
    response = result_response(DatasetListResponseRPC(), {}).json

    assert response
    assert {"result"} == set(response.keys())
    assert {} == response["result"]
def test_result_response_with_none(svc_client):
    """Test result response with None value."""
    response = result_response(DatasetListResponseRPC(), None).json

    assert response
    assert {'result'} == set(response.keys())
    assert response['result'] is None
Beispiel #13
0
def unlink_file_view(user_data, cache):
    """Unlink a file from a dataset."""
    ctx = DatasetUnlinkRequest().load(request.json)

    include = ctx.get("include_filter")
    exclude = ctx.get("exclude_filter")

    user = cache.ensure_user(user_data)
    project = cache.get_project(user, ctx["project_id"])

    if ctx.get("commit_message") is None:
        if include and exclude:
            filters = "-I {0} -X {1}".format(include, exclude)
        elif not include and exclude:
            filters = "-X {0}".format(exclude)
        else:
            filters = "-I {0}".format(include)

        ctx["commit_message"] = "service: unlink dataset {0} {1}".format(
            ctx["name"], filters)

    with chdir(project.abs_path):
        records = file_unlink(
            name=ctx["name"],
            include=ctx.get("include_filters"),
            exclude=ctx.get("exclude_filters"),
            yes=True,
            interactive=False,
            commit_message=ctx["commit_message"],
        )

        unlinked = [record.path for record in records]

    return result_response(DatasetUnlinkResponseRPC(), {"unlinked": unlinked})
Beispiel #14
0
def import_dataset_view(user_data, cache):
    """Import a dataset view."""
    user = cache.ensure_user(user_data)
    ctx = DatasetImportRequest().load(request.json)
    project = cache.get_project(user, ctx["project_id"])
    job = cache.make_job(user,
                         project=project,
                         job_data={
                             "renku_op": "dataset_import",
                             "client_extras": ctx.get("client_extras")
                         })

    with enqueue_retry(DATASETS_JOB_QUEUE) as queue:
        queue.enqueue(
            dataset_import,
            user_data,
            job.job_id,
            project.project_id,
            ctx["dataset_uri"],
            name=ctx.get("name"),
            extract=ctx.get("extract", False),
            timeout=int(os.getenv("WORKER_DATASET_JOBS_TIMEOUT", 1800)),
            result_ttl=int(os.getenv("WORKER_DATASET_JOBS_RESULT_TTL", 500)),
        )

    return result_response(DatasetImportResponseRPC(), job)
Beispiel #15
0
def add_file_to_dataset_view(user_data, cache):
    """Add the uploaded file to cloned repository."""
    ctx = DatasetAddRequest().load(request.json)
    user = cache.ensure_user(user_data)
    project = cache.get_project(user, ctx['project_id'])

    if not ctx['commit_message']:
        ctx['commit_message'] = 'service: dataset add {0}'.format(
            ctx['short_name'])

    local_paths = []
    for _file in ctx['files']:
        local_path = None

        if 'file_url' in _file:
            commit_message = '{0}{1}'.format(ctx['commit_message'],
                                             _file['file_url'])

            job = cache.make_job(user)
            _file['job_id'] = job.job_id

            with enqueue_retry(DATASETS_JOB_QUEUE) as queue:
                queue.enqueue(dataset_add_remote_file, user_data, job.job_id,
                              project.project_id, ctx['create_dataset'],
                              commit_message, ctx['short_name'],
                              _file['file_url'])
            continue

        if 'file_id' in _file:
            file = cache.get_file(user, _file['file_id'])
            local_path = file.abs_path

        elif 'file_path' in _file:
            local_path = project.abs_path / Path(_file['file_path'])

        if not local_path or not local_path.exists():
            return error_response(
                INVALID_PARAMS_ERROR_CODE,
                'invalid file reference: {0}'.format(json.dumps(_file)))

        ctx['commit_message'] += ' {0}'.format(local_path.name)
        local_paths.append(str(local_path))

    if local_paths:
        with chdir(project.abs_path):
            add_file(local_paths,
                     ctx['short_name'],
                     create=ctx['create_dataset'],
                     force=ctx['force'],
                     commit_message=ctx['commit_message'])

            try:
                _, ctx['remote_branch'] = repo_sync(Repo(project.abs_path),
                                                    remote='origin')
            except GitCommandError:
                return error_response(INTERNAL_FAILURE_ERROR_CODE,
                                      'repo sync failed')

    return result_response(DatasetAddResponseRPC(), ctx)
Beispiel #16
0
def list_projects_view(user, cache):
    """List cached projects."""
    projects = [
        project for project in cache.get_projects(cache.ensure_user(user))
        if project.abs_path.exists()
    ]

    return result_response(ProjectListResponseRPC(), {"projects": projects})
def test_result_response(svc_client):
    """Test result response utility."""
    ctx = {'datasets': [{'name': 'my-dataset'}]}
    response = result_response(DatasetListResponseRPC(), ctx).json

    assert response
    assert {'result'} == set(response.keys())
    assert {'datasets'} == set(response['result'].keys())
    assert ctx == response['result']
Beispiel #18
0
 def to_response(self):
     """Serialize to service version response."""
     return result_response(
         VersionCtrl.RESPONSE_SERIALIZER,
         {
             "latest_version": __version__,
             "supported_project_version": SUPPORTED_PROJECT_VERSION,
         },
     )
Beispiel #19
0
def list_datasets_view(user, cache):
    """List all datasets in project."""
    ctx = DatasetListRequest().load(request.args)
    project = cache.get_project(cache.ensure_user(user), ctx['project_id'])

    with chdir(project.abs_path):
        ctx['datasets'] = list_datasets()

    return result_response(DatasetListResponseRPC(), ctx)
Beispiel #20
0
def list_dataset_files_view(user, cache):
    """List files in a dataset."""
    ctx = DatasetFilesListRequest().load(request.args)
    project = cache.get_project(cache.ensure_user(user), ctx['project_id'])

    with chdir(project.abs_path):
        ctx['files'] = list_files(datasets=[ctx['short_name']])

    return result_response(DatasetFilesListResponseRPC(), ctx)
Beispiel #21
0
def project_clone_view(user_data):
    """Clone a remote repository."""
    project_data = ProjectCloneContext().load({
        **user_data,
        **request.json
    },
                                              unknown=EXCLUDE)
    project = _project_clone(user_data, project_data)

    return result_response(ProjectCloneResponseRPC(), project)
Beispiel #22
0
    def to_response(self):
        """Execute controller flow and serialize to service response."""
        self.ctx["datasets"] = []

        if "project_id" in self.ctx:
            self.ctx["datasets"] = self.local()

        elif "git_url" in self.ctx:
            self.ctx["datasets"] = self.remote()

        return result_response(DatasetsListCtrl.RESPONSE_SERIALIZER, self.ctx)
Beispiel #23
0
def list_uploaded_files_view(user, cache):
    """List uploaded files ready to be added to projects."""
    user = cache.ensure_user(user)

    files = [f for f in cache.get_files(user) if f.exists()]

    response = {
        "files": sorted(files, key=lambda rec: (rec.is_dir, rec.relative_path))
    }

    return result_response(FileListResponseRPC(), response)
Beispiel #24
0
def read_manifest_from_template(user, cache):
    """Read templates from the manifest file of a template repository."""
    project_data = ManifestTemplatesRequest().load({
        **user,
        **request.args,
    },
                                                   unknown=EXCLUDE)
    project = _project_clone(user, project_data)
    manifest = read_template_manifest(project.abs_path)

    return result_response(ManifestTemplatesResponseRPC(),
                           {"templates": manifest})
Beispiel #25
0
def list_jobs(user_data, cache):
    """List user created jobs."""
    user = cache.ensure_user(user_data)

    jobs = []
    for job in cache.get_jobs(user):
        if job.project_id:
            job.project = cache.get_project(user, job.project_id)

        jobs.append(job)

    return result_response(JobListResponseRPC(), {"jobs": jobs})
Beispiel #26
0
def migration_check_project_view(user_data, cache):
    """Migrate specified project."""
    user = cache.ensure_user(user_data)
    project = cache.get_project(user, request.json['project_id'])

    with chdir(project.abs_path):
        migration_required, project_supported = migrations_check()

    return result_response(
        ProjectMigrationCheckResponseRPC(), {
            'migration_required': migration_required,
            'project_supported': project_supported
        })
Beispiel #27
0
def list_dataset_files_view(user, cache):
    """List files in a dataset."""
    ctx = DatasetFilesListRequest().load(request.args)
    project = cache.get_project(cache.ensure_user(user), ctx['project_id'])

    if not project.abs_path.exists():
        return error_response(INVALID_PARAMS_ERROR_CODE,
                              'invalid project_id argument')

    with chdir(project.abs_path):
        ctx['files'] = list_files(datasets=[ctx['dataset_name']])

    return result_response(DatasetFilesListResponseRPC(), ctx)
Beispiel #28
0
def test_result_response(svc_client):
    """Test result response utility."""
    ctx = {"datasets": [{"name": "my-dataset"}]}
    response = result_response(DatasetListResponseRPC(), ctx).json

    assert response
    assert {"result"} == set(response.keys())
    assert {"datasets"} == set(response["result"].keys())

    expected = ctx["datasets"][0]
    received = response["result"]["datasets"][0]

    assert expected["name"] == received["name"]
Beispiel #29
0
def add_file_to_dataset_view(user, cache):
    """Add the uploaded file to cloned repository."""
    ctx = DatasetAddRequest().load(request.json)
    user = cache.ensure_user(user)
    project = cache.get_project(user, ctx['project_id'])

    if not project.abs_path.exists():
        return error_response(
            INVALID_PARAMS_ERROR_CODE,
            'invalid project_id: {0}'.format(ctx['project_id']))

    if not ctx['commit_message']:
        ctx['commit_message'] = 'service: dataset add {0}'.format(
            ctx['dataset_name'])

    local_paths = []
    for _file in ctx['files']:
        local_path = None

        if 'file_id' in _file:
            file = cache.get_file(user, _file['file_id'])
            local_path = file.abs_path

        elif 'file_path' in _file:
            local_path = project.abs_path / Path(_file['file_path'])

        if not local_path or not local_path.exists():
            return error_response(
                INVALID_PARAMS_ERROR_CODE,
                'invalid file reference: {0}'.format(json.dumps(_file)))

        ctx['commit_message'] += ' {0}'.format(local_path.name)
        local_paths.append(str(local_path))

    with chdir(project.abs_path):
        add_file(local_paths,
                 ctx['dataset_name'],
                 create=ctx['create_dataset'],
                 commit_message=ctx['commit_message'])

        if not repo_sync(project.abs_path):
            return error_response(INTERNAL_FAILURE_ERROR_CODE,
                                  'repo sync failed')

    return result_response(DatasetAddResponseRPC(), ctx)
Beispiel #30
0
def migrate_project_view(user_data, cache):
    """Migrate specified project."""
    user = cache.ensure_user(user_data)
    project = cache.get_project(user, request.json['project_id'])

    messages = []

    def collect_message(msg):
        """Collect migration message."""
        messages.append(msg)

    with chdir(project.abs_path):
        was_migrated = migrate_project(progress_callback=collect_message)

    return result_response(ProjectMigrateResponseRPC(), {
        'messages': messages,
        'was_migrated': was_migrated
    })