Example #1
0
    def test_is_private_updated_by_world_permissions(self):
        """For backward compatibility, is_private should reflect absence of world-GET"""

        from pillar.api.utils import remove_private_keys, dumps

        project_url = '/api/projects/%s' % self.project_id
        put_project = remove_private_keys(self.project)

        # Create admin user.
        self._create_user_with_token(['admin'],
                                     'admin-token',
                                     user_id='cafef00dbeefcafef00dbeef')

        # Make the project public
        put_project['permissions']['world'] = ['GET']  # make public
        put_project['is_private'] = True  # This should be overridden.

        resp = self.client.put(project_url,
                               data=dumps(put_project),
                               headers={
                                   'Authorization':
                                   self.make_header('admin-token'),
                                   'Content-Type': 'application/json',
                                   'If-Match': self.project['_etag']
                               })
        self.assertEqual(200, resp.status_code, resp.data)

        with self.app.test_request_context():
            projects = self.app.data.driver.db['projects']
            db_proj = projects.find_one(self.project_id)
            self.assertEqual(['GET'], db_proj['permissions']['world'])
            self.assertFalse(db_proj['is_private'])

        # Make the project private
        put_project['permissions']['world'] = []

        resp = self.client.put(project_url,
                               data=dumps(put_project),
                               headers={
                                   'Authorization':
                                   self.make_header('admin-token'),
                                   'Content-Type': 'application/json',
                                   'If-Match': db_proj['_etag']
                               })
        self.assertEqual(200, resp.status_code, resp.data)

        with self.app.test_request_context():
            projects = self.app.data.driver.db['projects']
            db_proj = projects.find_one(self.project_id)
            self.assertEqual([], db_proj['permissions']['world'])
            self.assertTrue(db_proj['is_private'])
Example #2
0
    def test_add_remove_user(self):
        from pillar.api.projects import utils as proj_utils
        from pillar.api.utils import dumps

        project_mng_user_url = '/api/p/users'

        # Use our API to add user to group
        payload = {
            'project_id': self.project_id,
            'user_id': self.other_user_id,
            'action': 'add'
        }

        resp = self.client.post(project_mng_user_url,
                                data=dumps(payload),
                                content_type='application/json',
                                headers={
                                    'Authorization': self.make_header('token'),
                                    'If-Match': self.project['_etag']
                                })
        self.assertEqual(200, resp.status_code, resp.data)

        # Check if the user is now actually member of the group.
        with self.app.test_request_context():
            users = self.app.data.driver.db['users']

            db_user = users.find_one(self.other_user_id)
            admin_group = proj_utils.get_admin_group(self.project)

            self.assertIn(admin_group['_id'], db_user['groups'])

        # Update payload to remove the user we just added
        payload['action'] = 'remove'

        resp = self.client.post(project_mng_user_url,
                                data=dumps(payload),
                                content_type='application/json',
                                headers={
                                    'Authorization': self.make_header('token'),
                                    'If-Match': self.project['_etag']
                                })
        self.assertEqual(200, resp.status_code, resp.data)

        # Check if the user is now actually removed from the group.
        with self.app.test_request_context():
            users = self.app.data.driver.db['users']

            db_user = users.find_one(self.other_user_id)
            self.assertNotIn(admin_group['_id'], db_user['groups'])
Example #3
0
    def test_edits_by_nonowner_admin(self):
        """Any admin should be able to edit any project."""

        from pillar.api.utils import remove_private_keys, PillarJSONEncoder
        dumps = functools.partial(json.dumps, cls=PillarJSONEncoder)

        # Create test project.
        project = self._create_user_and_project(['subscriber'])
        project_id = project['_id']
        project_url = '/api/projects/%s' % project_id

        # Create test user.
        self._create_user_with_token(['admin'],
                                     'admin-token',
                                     user_id='cafef00dbeefcafef00dbeef')

        # Admin user should be able to PUT.
        put_project = remove_private_keys(project)
        put_project['name'] = 'โครงการปั่นเมฆ'

        resp = self.client.put(project_url,
                               data=dumps(put_project),
                               headers={
                                   'Authorization':
                                   self.make_header('admin-token'),
                                   'Content-Type': 'application/json',
                                   'If-Match': project['_etag']
                               })
        self.assertEqual(200, resp.status_code, resp.data)
    def test_edits_by_nonowner_subscriber(self):
        """A subscriber should only be able to edit their own projects."""

        from pillar.api.utils import remove_private_keys, PillarJSONEncoder
        dumps = functools.partial(json.dumps, cls=PillarJSONEncoder)

        # Create test project.
        project = self._create_user_and_project(['subscriber'])
        project_id = project['_id']
        project_url = '/api/projects/%s' % project_id

        # Create test user.
        my_user_id = 'cafef00dbeefcafef00dbeef'
        self._create_user_with_token(['subscriber'], 'mortal-token', user_id=my_user_id)

        # Regular subscriber should not be able to do this.
        put_project = remove_private_keys(project)
        put_project['name'] = 'Болту́н -- нахо́дка для шпио́на.'
        put_project['user'] = my_user_id
        resp = self.client.put(project_url,
                               data=dumps(put_project),
                               headers={'Authorization': self.make_header('mortal-token'),
                                        'Content-Type': 'application/json',
                                        'If-Match': project['_etag']})
        self.assertEqual(403, resp.status_code, resp.data)
Example #5
0
def download_task_and_log(storage_path: str, task_id: str):
    """Downloads task + task log and stores them."""

    import gzip
    import pymongo

    task_oid = bson.ObjectId(task_id)
    log.info('Archiving task %s to %s', task_oid, storage_path)

    tasks_coll = current_flamenco.db('tasks')
    logs_coll = current_flamenco.db('task_logs')

    task = tasks_coll.find_one({'_id': task_oid})
    logs = logs_coll.find({
        'task': task_oid
    }).sort([
        ('received_on_manager', pymongo.ASCENDING),
        ('_id', pymongo.ASCENDING),
    ])

    # Save the task as JSON
    spath = pathlib.Path(storage_path)
    task_path = spath / f'task-{task_id}.json'
    with open(task_path, mode='w', encoding='utf8') as outfile:
        outfile.write(dumps(task, indent=4, sort_keys=True))

    # Get the task log bits and write to compressed file.
    log_path = spath / f'task-{task_id}.log.gz'
    with gzip.open(log_path, mode='wb') as outfile:
        for log_entry in logs:
            outfile.write(log_entry['log'].encode())
Example #6
0
def create_test_job(manager_id, user_email, project_url):
    """Creates a test job for a given manager."""

    from pillar.api.utils import dumps, str2id

    manager_id = str2id(manager_id)
    authentication.force_cli_user()

    # Find user
    users_coll = current_app.db()['users']
    user = users_coll.find_one({'email': user_email}, projection={'_id': 1})
    if not user:
        raise ValueError('User with email %r not found' % user_email)

    # Find project
    projs_coll = current_app.db()['projects']
    proj = projs_coll.find_one({'url': project_url},
                               projection={'_id': 1})
    if not proj:
        log.error('Unable to find project url=%s', project_url)
        return 1

    # Create the job
    job = flamenco.current_flamenco.job_manager.api_create_job(
        'CLI test job',
        'Test job created from the server CLI',
        'sleep',
        {
            'frames': '1-30, 40-44',
            'chunk_size': 13,
            'time_in_seconds': 3,
        },
        proj['_id'], user['_id'], manager_id)

    log.info('Job created:\n%s', dumps(job, indent=4))
Example #7
0
def create_manager(email, name, description):
    """Creates a Flamenco manager."""

    from pillar.api.utils import dumps

    authentication.force_cli_user()
    mngr_doc, account, token = flamenco.setup.create_manager(email, name, description)

    print('Service account information:')
    print(dumps(account, indent=4, sort_keys=True))
    print()
    print('Access token: %s' % token['token'])
    print('  expires on: %s' % token['expire_time'])
    print()
    print('Created a new manager:')
    print(dumps(mngr_doc, indent=4))
Example #8
0
def create_test_job(manager_id, user_email, project_url):
    """Creates a test job for a given manager."""

    from pillar.api.utils import dumps, str2id

    manager_id = str2id(manager_id)
    authentication.force_cli_user()

    # Find user
    users_coll = current_app.db()['users']
    user = users_coll.find_one({'email': user_email}, projection={'_id': 1})
    if not user:
        raise ValueError('User with email %r not found' % user_email)

    # Find project
    projs_coll = current_app.db()['projects']
    proj = projs_coll.find_one({'url': project_url}, projection={'_id': 1})
    if not proj:
        log.error('Unable to find project url=%s', project_url)
        return 1

    # Create the job
    job = flamenco.current_flamenco.job_manager.api_create_job(
        'CLI test job', 'Test job created from the server CLI', 'sleep', {
            'frames': '1-30, 40-44',
            'chunk_size': 13,
            'time_in_seconds': 3,
        }, proj['_id'], user['_id'], manager_id)

    log.info('Job created:\n%s', dumps(job, indent=4))
Example #9
0
    def test_compress_flamenco_task_log(self):
        from pillar.api.utils import dumps
        from flamenco.celery import job_archival

        self._perform_task_updates()

        expected_log = ''.join(
            40 * f'This is batch {batch_idx} mülti→line log entry\n'
            for batch_idx in range(3))

        task_id = self.task_ids[1]
        test_task = self.flamenco.db('tasks').find_one({'_id': bson.ObjectId(task_id)})

        with tempfile.TemporaryDirectory() as tempdir:
            # Perform the task log compression
            storage_path = pathlib.Path(tempdir)
            job_archival.download_task_and_log(tempdir, task_id)

            # Check that the files are there and contain the correct data.
            task_log_file = storage_path / f'task-{task_id}.log.gz'
            with gzip.open(task_log_file) as infile:
                contents = infile.read().decode()
                self.assertEqual(expected_log, contents)

            task_contents_file = storage_path / f'task-{task_id}.json'
            expected_task = json.loads(dumps(test_task))

            with task_contents_file.open() as infile:
                read_task = json.load(infile)
                self.assertEqual(set(expected_task.keys()), set(read_task.keys()))
                self.assertEqual(expected_task, read_task)
Example #10
0
    def client_request(self,
                       method,
                       path,
                       qs=None,
                       expected_status=200,
                       auth_token=None,
                       json=None,
                       data=None,
                       headers=None,
                       files=None,
                       content_type=None,
                       etag=None):
        """Performs a HTTP request to the server."""

        from pillar.api.utils import dumps
        import json as mod_json

        headers = headers or {}
        if auth_token is not None:
            headers['Authorization'] = self.make_header(auth_token)

        if json is not None:
            data = dumps(json)
            headers['Content-Type'] = 'application/json'

        if etag is not None:
            if method in {'PUT', 'PATCH', 'DELETE'}:
                headers['If-Match'] = etag
            elif method == 'GET':
                headers['If-None-Match'] = etag
            else:
                raise ValueError(
                    'Not sure what to do with etag and method %s' % method)

        if files:
            data = data or {}
            content_type = 'multipart/form-data'
            data.update(files)

        resp = self.client.open(path=path,
                                method=method,
                                data=data,
                                headers=headers,
                                content_type=content_type,
                                query_string=self.join_url_params(qs))
        self.assertEqual(
            expected_status, resp.status_code,
            'Expected status %i but got %i. Response: %s' %
            (expected_status, resp.status_code, resp.data))

        def get_json():
            if resp.mimetype != 'application/json':
                raise TypeError('Unable to load JSON from mimetype %r' %
                                resp.mimetype)
            return mod_json.loads(resp.data)

        resp.json = get_json
        resp.get_json = get_json

        return resp
Example #11
0
def create_manager(email, name, description):
    """Creates a Flamenco manager."""

    from pillar.api.utils import dumps

    authentication.force_cli_user()
    mngr_doc, account, token = flamenco.setup.create_manager(email, name, description)

    print('Service account information:')
    print(dumps(account, indent=4, sort_keys=True))
    print()
    print('Access token: %s' % token['token'])
    print('  expires on: %s' % token['expire_time'])
    print()
    print('Created a new manager:')
    print(dumps(mngr_doc, indent=4))
Example #12
0
 def _post(self, data):
     from pillar.api.utils import dumps
     return self.client.post('/api/service/badger',
                             data=dumps(data),
                             headers={
                                 'Authorization':
                                 self.make_header(self.badger_token),
                                 'Content-Type':
                                 'application/json'
                             })
Example #13
0
    def test_remove_self(self):
        """Every user should be able to remove themselves from a project,
         regardless of permissions.
         """

        from pillar.api.projects import utils as proj_utils
        from pillar.api.utils import dumps

        project_mng_user_url = '/api/p/users'

        # Use our API to add user to group
        payload = {
            'project_id': self.project_id,
            'user_id': self.other_user_id,
            'action': 'add'
        }

        resp = self.client.post(
            project_mng_user_url,
            data=dumps(payload),
            content_type='application/json',
            headers={'Authorization': self.make_header('token')})
        self.assertEqual(200, resp.status_code, resp.data)

        # Update payload to remove the user we just added, and call it as that user.
        payload['action'] = 'remove'

        resp = self.client.post(
            project_mng_user_url,
            data=dumps(payload),
            content_type='application/json',
            headers={'Authorization': self.make_header('other-token')})
        self.assertEqual(200, resp.status_code, resp.data)

        # Check if the user is now actually removed from the group.
        with self.app.test_request_context():
            users = self.app.data.driver.db['users']

            db_user = users.find_one(self.other_user_id)
            admin_group = proj_utils.get_admin_group(self.project)
            self.assertNotIn(admin_group['_id'], db_user['groups'])
Example #14
0
    def test_editing_as_admin(self):
        """Test that we can set all fields as admin."""

        from pillar.api.utils import remove_private_keys, PillarJSONEncoder
        dumps = functools.partial(json.dumps, cls=PillarJSONEncoder)

        project_info = self._create_user_and_project(['subscriber', 'admin'])
        project_url = '/api/projects/%(_id)s' % project_info

        resp = self.client.get(project_url)
        project = json.loads(resp.data.decode('utf-8'))

        # Create another user we can try and assign the project to.
        other_user_id = 'f00dd00df00dd00df00dd00d'
        self._create_user_with_token(['subscriber'],
                                     'other-token',
                                     user_id=other_user_id)

        # Admin user should be able to PUT everything.
        put_project = remove_private_keys(project)
        put_project['url'] = 'very-offensive-url'
        put_project['description'] = 'Blender je besplatan set alata za izradu interaktivnog 3D ' \
                                     'sadržaja pod različitim operativnim sustavima.'
        put_project['name'] = 'โครงการปั่นเมฆ'
        put_project['summary'] = 'Это переведена на Google'
        put_project['is_private'] = False
        put_project['status'] = 'pending'
        put_project['category'] = 'software'
        put_project['user'] = other_user_id

        resp = self.client.put(project_url,
                               data=dumps(put_project),
                               headers={
                                   'Authorization': self.make_header('token'),
                                   'Content-Type': 'application/json',
                                   'If-Match': project['_etag']
                               })
        self.assertEqual(200, resp.status_code, resp.data)

        # Re-fetch from database to see which fields actually made it there.
        # equal to put_project -> changed in DB
        # equal to project -> not changed in DB
        resp = self.client.get('/api/projects/%s' % project['_id'])
        db_proj = json.loads(resp.data)
        self.assertEqual(put_project['url'], db_proj['url'])
        self.assertEqual(put_project['description'], db_proj['description'])
        self.assertEqual(put_project['name'], db_proj['name'])
        self.assertEqual(put_project['summary'], db_proj['summary'])
        self.assertEqual(put_project['is_private'], db_proj['is_private'])
        self.assertEqual(put_project['status'], db_proj['status'])
        self.assertEqual(put_project['category'], db_proj['category'])
        self.assertEqual(put_project['user'], db_proj['user'])
    def create_test_node(self, project_id, status_code=201):
        from pillar.api.utils import dumps

        node = {
            'project': project_id,
            'node_type': 'group',
            'name': 'test group node',
            'user': self.user_id,
            'properties': {},
        }

        resp = self.client.post('/api/nodes', data=dumps(node),
                                headers={'Authorization': self.make_header('token'),
                                         'Content-Type': 'application/json'})
        self.assertEqual(status_code, resp.status_code, resp.data)
Example #16
0
    def test_compress_flamenco_task_log(self):
        from pillar.api.utils import dumps
        from flamenco.celery import job_archival

        # Make sure there are log entries.
        for batch_idx in range(3):
            now = datetime.datetime.now(tz=bson.tz_util.utc)
            update_batch = [{
                '_id': str(bson.ObjectId()),
                'task_id': task_id,
                'activity': f'testing logging batch {batch_idx}',
                'log':
                40 * f'This is batch {batch_idx} mülti→line log entry\n',
                'received_on_manager': now
            } for task_id in self.task_ids]
            self.post(
                f'/api/flamenco/managers/{self.mngr_id}/task-update-batch',
                json=update_batch,
                auth_token=self.mngr_token)

        expected_log = ''.join(
            40 * f'This is batch {batch_idx} mülti→line log entry\n'
            for batch_idx in range(3))

        task_id = self.task_ids[1]
        test_task = self.flamenco.db('tasks').find_one(
            {'_id': bson.ObjectId(task_id)})

        with tempfile.TemporaryDirectory() as tempdir:
            # Perform the task log compression
            storage_path = pathlib.Path(tempdir)
            job_archival.download_task_and_log(tempdir, task_id)

            # Check that the files are there and contain the correct data.
            task_log_file = storage_path / f'task-{task_id}.log.gz'
            with gzip.open(task_log_file) as infile:
                contents = infile.read().decode()
                self.assertEqual(expected_log, contents)

            task_contents_file = storage_path / f'task-{task_id}.json'
            expected_task = json.loads(dumps(test_task))

            with task_contents_file.open() as infile:
                read_task = json.load(infile)
                self.assertEqual(set(expected_task.keys()),
                                 set(read_task.keys()))
                self.assertEqual(expected_task, read_task)
Example #17
0
    def _test_user(self, test_node):
        from pillar.api.utils import dumps

        resp = self.client.post('/api/nodes',
                                data=dumps(test_node),
                                headers={
                                    'Authorization': self.make_header('token'),
                                    'Content-Type': 'application/json'
                                })
        self.assertEqual(201, resp.status_code, resp.data)
        created = json.loads(resp.data)
        resp = self.client.get(
            '/api/nodes/%s' % created['_id'],
            headers={'Authorization': self.make_header('token')})
        self.assertEqual(200, resp.status_code, resp.data)
        json_node = json.loads(resp.data)
        self.assertEqual(str(self.user_id), json_node['user'])
Example #18
0
    def test_compress_flamenco_task_log(self):
        from pillar.api.utils import dumps
        from flamenco.celery import job_archival

        # Make sure there are log entries.
        for batch_idx in range(3):
            now = datetime.datetime.now(tz=bson.tz_util.utc)
            update_batch = [
                {'_id': str(bson.ObjectId()),
                 'task_id': task_id,
                 'activity': f'testing logging batch {batch_idx}',
                 'log': 40 * f'This is batch {batch_idx} mülti→line log entry\n',
                 'received_on_manager': now}
                for task_id in self.task_ids
            ]
            self.post(f'/api/flamenco/managers/{self.mngr_id}/task-update-batch',
                      json=update_batch,
                      auth_token=self.mngr_token)

        expected_log = ''.join(
            40 * f'This is batch {batch_idx} mülti→line log entry\n'
            for batch_idx in range(3))

        task_id = self.task_ids[1]
        test_task = self.flamenco.db('tasks').find_one({'_id': bson.ObjectId(task_id)})

        with tempfile.TemporaryDirectory() as tempdir:
            # Perform the task log compression
            storage_path = pathlib.Path(tempdir)
            job_archival.download_task_and_log(tempdir, task_id)

            # Check that the files are there and contain the correct data.
            task_log_file = storage_path / f'task-{task_id}.log.gz'
            with gzip.open(task_log_file) as infile:
                contents = infile.read().decode()
                self.assertEqual(expected_log, contents)

            task_contents_file = storage_path / f'task-{task_id}.json'
            expected_task = json.loads(dumps(test_task))

            with task_contents_file.open() as infile:
                read_task = json.load(infile)
                self.assertEqual(set(expected_task.keys()), set(read_task.keys()))
                self.assertEqual(expected_task, read_task)
Example #19
0
def create_service_account(email,
                           service_roles,
                           service_definition,
                           *,
                           full_name: str = None):
    from pillar.api import service
    from pillar.api.utils import dumps

    account, token = service.create_service_account(
        email,
        service_roles,
        service_definition,
        full_name=full_name,
    )

    print('Service account information:')
    print(dumps(account, indent=4, sort_keys=True))
    print()
    print('Access token: %s' % token['token'])
    print('  expires on: %s' % token['expire_time'])
    return account, token
Example #20
0
def download_task_and_log(storage_path: str, task_id: str):
    """Downloads task + task log and stores them."""

    import gzip
    import pymongo

    task_oid = bson.ObjectId(task_id)
    log.info('Archiving task %s to %s', task_oid, storage_path)

    tasks_coll = current_flamenco.db('tasks')
    logs_coll = current_flamenco.db('task_logs')

    task = tasks_coll.find_one({'_id': task_oid})

    # Use the exact same sort as we've created an index for.
    logs = logs_coll.find({
        'task': task_oid
    }).sort([
        ('task', pymongo.ASCENDING),
        ('received_on_manager', pymongo.ASCENDING),
    ])

    # Save the task as JSON
    spath = pathlib.Path(storage_path)
    task_path = spath / f'task-{task_id}.json'
    with open(task_path, mode='w', encoding='utf8') as outfile:
        outfile.write(dumps(task, indent=4, sort_keys=True))

    # Get the task log bits and write to compressed file.
    log_path = spath / f'task-{task_id}.log.gz'
    with gzip.open(log_path, mode='wb') as outfile:
        for log_entry in logs:
            try:
                log_contents = log_entry['log']
            except KeyError:
                # No 'log' in this log entry. Bit weird, but we shouldn't crash on it.
                continue
            outfile.write(log_contents.encode())
Example #21
0
def download_task_and_log(storage_path: str, task_id: str):
    """Downloads task + task log and stores them."""

    import gzip
    import pymongo

    task_oid = bson.ObjectId(task_id)
    log.info('Archiving task %s to %s', task_oid, storage_path)

    tasks_coll = current_flamenco.db('tasks')
    logs_coll = current_flamenco.db('task_logs')

    task = tasks_coll.find_one({'_id': task_oid})

    # Use the exact same sort as we've created an index for.
    logs = logs_coll.find({'task': task_oid}).sort([
        ('task', pymongo.ASCENDING),
        ('received_on_manager', pymongo.ASCENDING),
    ])

    # Save the task as JSON
    spath = pathlib.Path(storage_path)
    task_path = spath / f'task-{task_id}.json'
    with open(task_path, mode='w', encoding='utf8') as outfile:
        outfile.write(dumps(task, indent=4, sort_keys=True))

    # Get the task log bits and write to compressed file.
    log_path = spath / f'task-{task_id}.log.gz'
    with gzip.open(log_path, mode='wb') as outfile:
        for log_entry in logs:
            try:
                log_contents = log_entry['log']
            except KeyError:
                # No 'log' in this log entry. Bit weird, but we shouldn't crash on it.
                continue
            outfile.write(log_contents.encode())
Example #22
0
        def test_for(node, expected_picture_id):
            # Create the node
            resp = self.client.post('/api/nodes',
                                    data=dumps(node),
                                    headers={
                                        'Authorization':
                                        self.make_header('token'),
                                        'Content-Type': 'application/json'
                                    })
            self.assertEqual(resp.status_code, 201, resp.data)
            node_id = json.loads(resp.data)['_id']

            # Test that the node has the attached file as picture.
            resp = self.client.get(
                '/api/nodes/%s' % node_id,
                headers={'Authorization': self.make_header('token')})
            self.assertEqual(resp.status_code, 200, resp.data)
            json_node = json.loads(resp.data)

            if expected_picture_id:
                self.assertEqual(ObjectId(json_node['picture']),
                                 expected_picture_id)
            else:
                self.assertNotIn('picture', json_node)
Example #23
0
def archive_job(job_id: str):
    """Archives a given job.

    - Sets job status "archiving" (if not already that status).
    - For each task, de-chunks the task logs and gz-compresses them.
    - Creates a ZIP file with the job+task definitions in JSON and compressed logs.
    - Uploads the ZIP to the project's file storage.
    - Records the link of the ZIP in the job document.
    - Deletes the tasks and task logs in MongoDB.
    - Sets the job status to "archived".
    """
    import tempfile
    import celery

    try:
        job_oid = bson.ObjectId(job_id)
    except bson.errors.InvalidId as ex:
        log.error('%s', ex)
        return

    jobs_coll = current_flamenco.db('jobs')
    job = jobs_coll.find_one({'_id': job_oid})
    if job is None:
        log.info('Job %s does not exist, not archiving', job_oid)
        return

    if job['status'] == 'archived':
        log.info('Job %s already archived, not archiving again', job_oid)
        return

    log.info('Archiving job %s', job_oid)

    # Create a temporary directory for the file operations.
    storage_path = tempfile.mkdtemp(prefix=f'job-archival-{job_id}-')
    zip_path = pathlib.Path(storage_path) / f'flamenco-job-{job_id}.zip'
    log.info('Job archival path: %s', storage_path)

    # TODO: store the ZIP link in the job JSON in MongoDB.

    # Write the job to JSON.
    pre_archive_status = job.get('pre_archive_status')
    if pre_archive_status:
        job['status'] = pre_archive_status
        del job['pre_archive_status']

    job_json_path = pathlib.Path(storage_path) / f'job-{job_id}.json'
    with job_json_path.open(mode='w', encoding='utf8') as outfile:
        outfile.write(dumps(job, indent=4, sort_keys=True))

    # Set job status to 'archiving'.
    res = current_flamenco.job_manager.api_set_job_status(job_oid, 'archiving')
    if res.matched_count != 1:
        raise ArchivalError(
            f'Unable to update job {job_oid}, matched count={res.matched_count}'
        )

    # Run each task log compression in a separate Celery task.
    tasks_coll = current_flamenco.db('tasks')
    tasks = tasks_coll.find({'job': job_oid}, {'_id': 1})
    tasks_count = tasks_coll.count_documents({'job': job_oid})

    # The chain of everything except downloading tasks & logs. Celery can't handle empty
    # groups, so we have to be careful in constructing the download_tasks group.
    chain = (create_upload_zip.si(str(job['project']), storage_path,
                                  str(zip_path)) | update_mongo.s(job_id)
             | cleanup.si(storage_path))

    if tasks_count:
        download_tasks = celery.group(
            *(download_task_and_log.si(storage_path, str(task['_id']))
              for task in tasks))
        chain = download_tasks | chain

    chain()
Example #24
0
def archive_job(job_id: str):
    """Archives a given job.

    - Sets job status "archiving" (if not already that status).
    - For each task, de-chunks the task logs and gz-compresses them.
    - Creates a ZIP file with the job+task definitions in JSON and compressed logs.
    - Uploads the ZIP to the project's file storage.
    - Records the link of the ZIP in the job document.
    - Deletes the tasks and task logs in MongoDB.
    - Sets the job status to "archived".
    """
    import tempfile
    import celery

    try:
        job_oid = bson.ObjectId(job_id)
    except bson.errors.InvalidId as ex:
        log.error('%s', ex)
        return

    jobs_coll = current_flamenco.db('jobs')
    job = jobs_coll.find_one({'_id': job_oid})
    if job is None:
        log.info('Job %s does not exist, not archiving', job_oid)
        return

    if job['status'] == 'archived':
        log.info('Job %s already archived, not archiving again', job_oid)
        return

    log.info('Archiving job %s', job_oid)

    # Create a temporary directory for the file operations.
    storage_path = tempfile.mkdtemp(prefix=f'job-archival-{job_id}-')
    zip_path = pathlib.Path(storage_path) / f'flamenco-job-{job_id}.zip'
    log.info('Job archival path: %s', storage_path)

    # TODO: store the ZIP link in the job JSON in MongoDB.

    # Write the job to JSON.
    pre_archive_status = job.get('pre_archive_status')
    if pre_archive_status:
        job['status'] = pre_archive_status
        del job['pre_archive_status']

    job_json_path = pathlib.Path(storage_path) / f'job-{job_id}.json'
    with job_json_path.open(mode='w', encoding='utf8') as outfile:
        outfile.write(dumps(job, indent=4, sort_keys=True))

    # Set job status to 'archiving'.
    res = current_flamenco.job_manager.api_set_job_status(job_oid, 'archiving')
    if res.matched_count != 1:
        raise ArchivalError(f'Unable to update job {job_oid}, matched count={res.matched_count}')

    # Run each task log compression in a separate Celery task.
    tasks_coll = current_flamenco.db('tasks')
    tasks = tasks_coll.find({'job': job_oid}, {'_id': 1})

    # The chain of everything except downloading tasks & logs. Celery can't handle empty
    # groups, so we have to be careful in constructing the download_tasks group.
    chain = (
            create_upload_zip.si(str(job['project']), storage_path, str(zip_path)) |
            update_mongo.s(job_id) |
            cleanup.si(storage_path)
    )

    if tasks.count():
        download_tasks = celery.group(*(
            download_task_and_log.si(storage_path, str(task['_id']))
            for task in tasks))
        chain = download_tasks | chain

    chain()
Example #25
0
    def test_autocreate_home_project_with_succubus_role(self):
        from pillar.api.utils import dumps

        # Implicitly create user by token validation.
        self.mock_blenderid_validate_happy()
        resp = self.client.get(
            '/api/users/me',
            headers={'Authorization': self.make_header('token')})
        self.assertEqual(200, resp.status_code, resp.data)
        user_id = ObjectId(json.loads(resp.data)['_id'])

        # Grant succubus role, which should allow creation of a read-only home project.
        self.badger(TEST_EMAIL_ADDRESS, {'succubus', 'homeproject'}, 'grant')

        resp = self.client.get(
            '/api/bcloud/home-project',
            headers={'Authorization': self.make_header('token')})
        self.assertEqual(200, resp.status_code)
        json_proj = json.loads(resp.data)
        self.assertEqual('home', json_proj['category'])
        self.assertEqual('home', json_proj['url'])

        # Check that the admin group of the project only has GET permissions.
        self.assertEqual({'GET'},
                         set(json_proj['permissions']['groups'][0]['methods']))
        project_id = ObjectId(json_proj['_id'])
        admin_group_id = json_proj['permissions']['groups'][0]['group']

        # Check that a Blender Sync node was created automatically.
        expected_node_permissions = {
            'users': [],
            'groups': [
                {
                    'group': ObjectId(admin_group_id),
                    'methods': ['GET', 'PUT', 'POST', 'DELETE']
                },
            ],
            'world': []
        }
        with self.app.test_request_context(
                headers={'Authorization': self.make_header('token')}):
            nodes_coll = self.app.data.driver.db['nodes']
            node = nodes_coll.find_one({
                'project': project_id,
                'node_type': 'group',
                'name': 'Blender Sync',
            })
            self.assertIsNotNone(node)

            # Check that the node itself has write permissions for the admin group.
            node_perms = node['permissions']
            self.assertEqual(node_perms, expected_node_permissions)
            sync_node_id = node['_id']

        # Check that we can create a group node inside the sync node.
        sub_sync_node = {
            'project': project_id,
            'node_type': 'group',
            'parent': sync_node_id,
            'name': '2.77',
            'user': user_id,
            'description': 'Sync folder for Blender 2.77',
            'properties': {
                'status': 'published'
            },
        }
        resp = self.client.post('/api/nodes',
                                data=dumps(sub_sync_node),
                                headers={
                                    'Authorization': self.make_header('token'),
                                    'Content-Type': 'application/json'
                                })
        self.assertEqual(201, resp.status_code, resp.data)
        sub_node_info = json.loads(resp.data)

        # Check the explicit node-level permissions are copied.
        # These aren't returned by the POST to Eve, so we have to check them in the DB manually.
        with self.app.test_request_context(
                headers={'Authorization': self.make_header('token')}):
            nodes_coll = self.app.data.driver.db['nodes']
            sub_node = nodes_coll.find_one(ObjectId(sub_node_info['_id']))

            node_perms = sub_node['permissions']
            self.assertEqual(node_perms, expected_node_permissions)
Example #26
0
from eve.methods.get import get
from eve.utils import config as eve_config
from flask import Blueprint, request, current_app
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import InternalServerError

from pillar.api import utils
from pillar.api.utils.authentication import current_user_id
from pillar.api.utils.authorization import require_login
from pillar.auth import current_user

FIRST_ADDON_VERSION_WITH_HDRI = (1, 4, 0)
TL_PROJECTION = utils.dumps({
    'name': 1,
    'url': 1,
    'permissions': 1,
})
TL_SORT = utils.dumps([('name', 1)])

TEXTURE_LIBRARY_QUERY_ARGS = {
    eve_config.QUERY_PROJECTION: TL_PROJECTION,
    eve_config.QUERY_SORT: TL_SORT,
    'max_results': 'null',  # this needs to be there, or we get a KeyError.
}

blueprint = Blueprint('blender_cloud.texture_libs', __name__)
log = logging.getLogger(__name__)


def keep_fetching_texture_libraries(proj_filter):