Beispiel #1
0
 def show(id):
     ingredient = Ingredient.find_one({
         "_id": bson.ObjectId(oid=str(id))
     })  #get id ObjectId and the informartions sent
     return dumps(ingredient)  #dumps all to json
Beispiel #2
0
 def handle_Str(self, node):
     return bson.ObjectId(node.s)
Beispiel #3
0
def containers(data_builder, as_admin, file_form, api_db):
    """Populate DB with test dataset including deleted and non-deleted entries."""
    p_1 = data_builder.create_project()
    s_1_1 = data_builder.create_session(project=p_1)
    c_1_1_1 = data_builder.create_collection()
    an_1_1_1 = as_admin.post('/sessions/' + s_1_1 + '/analyses',
                             files=file_form('analysis.csv',
                                             meta={
                                                 'label':
                                                 'no-job',
                                                 'inputs': [{
                                                     'name':
                                                     'analysis.csv'
                                                 }]
                                             })).json()['_id']
    ac_1_1_1 = data_builder.create_acquisition(session=s_1_1)
    ac_1_1_2 = data_builder.create_acquisition(session=s_1_1)
    s_1_2 = data_builder.create_session(project=p_1)
    ac_1_2_1 = data_builder.create_acquisition(session=s_1_2)
    p_2 = data_builder.create_project()
    s_2_1 = data_builder.create_session(project=p_2)
    ac_2_1_1 = data_builder.create_acquisition(session=s_2_1)
    assert as_admin.post('/acquisitions/' + ac_1_1_1 + '/files',
                         files=file_form('f_1_1_1_1')).ok
    assert as_admin.post('/acquisitions/' + ac_1_1_1 + '/files',
                         files=file_form('f_1_1_1_2')).ok
    assert as_admin.post('/acquisitions/' + ac_2_1_1 + '/files',
                         files=file_form('f_2_1_1_1')).ok

    assert as_admin.delete('/sessions/' + s_1_1 + '/analyses/' + an_1_1_1).ok
    assert as_admin.delete('/collections/' + c_1_1_1).ok
    assert as_admin.delete('/acquisitions/' + ac_1_1_1 + '/files/f_1_1_1_1').ok
    assert as_admin.delete('/acquisitions/' + ac_1_1_1).ok
    assert as_admin.delete('/sessions/' + s_1_1).ok
    assert as_admin.delete('/projects/' + p_1).ok

    containers = attrdict.AttrDict(
        p_1=p_1,
        s_1_1=s_1_1,
        c_1_1_1=c_1_1_1,
        an_1_1_1=an_1_1_1,
        ac_1_1_1=ac_1_1_1,
        ac_1_1_2=ac_1_1_2,
        s_1_2=s_1_2,
        ac_1_2_1=ac_1_2_1,
        p_2=p_2,
        s_2_1=s_2_1,
        ac_2_1_1=ac_2_1_1,
    )

    def is_deleted(cont_key, filename=None):
        cont_name = {
            'p': 'projects',
            's': 'sessions',
            'ac': 'acquisitions',
            'an': 'analyses',
            'c': 'collections',
        }[cont_key.split('_')[0]]
        url = '/{}/{}'.format(cont_name, containers[cont_key])
        if filename is None:
            return as_admin.get(url).status_code == 404
        else:
            return as_admin.get(url + '/files/' + filename).status_code == 404

    containers['is_deleted'] = is_deleted
    yield containers
    api_db.analyses.delete_one({'_id': bson.ObjectId(an_1_1_1)})
Beispiel #4
0
from my_mongodb import MyMongo
import zlib
import bson
from cStringIO import StringIO
# f = open('../test1/tianyancha/detail.html')
# s = f.read()
# content = StringIO(f.read())

db = MyMongo().get_db()


# db['test'].save({'source': bson.binary.Binary(zlib.compress(s))})
# db['test'].save({'source': content.getvalue()})
print ''
print db['raw_result_gzcc'].find({'_id': {'$gt': bson.ObjectId('5bd90a48c02bfe18842558e9')}}).count()
Beispiel #5
0
def decode(obj: Any) -> Any:
    if "__datetime__" in obj:
        return datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
    if "__object_id__" in obj:
        return bson.ObjectId(obj["as_str"])
    return obj
Beispiel #6
0
 async def convert(self, ctx, argument):
     try:
         return bson.ObjectId(argument)
     except bson.errors.InvalidId:
         raise commands.BadArgument
Beispiel #7
0
 def __project_refs(self, project_id, operation):
     project_id = bson.ObjectId(project_id)
     return self.db.collection.find_one_and_update(
         {"_id": self.user_id}, {operation: {
             "project_refs": project_id
         }})
Beispiel #8
0
 def make_metadata(self, **kwargs):
     kwargs = du.defaults(
         kwargs, {
             'hash_':
             b'\xa5\xc9\x08\xaf$\x0b\x116',
             'source_type':
             imeta.ImageSourceType.SYNTHETIC,
             'environment_type':
             imeta.EnvironmentType.INDOOR_CLOSE,
             'light_level':
             imeta.LightingLevel.WELL_LIT,
             'time_of_day':
             imeta.TimeOfDay.DAY,
             'camera_pose':
             tf.Transform((1, 3, 4), (0.2, 0.8, 0.2, -0.7)),
             'right_camera_pose':
             tf.Transform((-10, -20, -30), (0.9, -0.7, 0.5, -0.3)),
             'intrinsics':
             cam_intr.CameraIntrinsics(700, 700, 654.2, 753.3, 400, 300),
             'right_intrinsics':
             cam_intr.CameraIntrinsics(700, 710, 732.1, 612.3, 400, 300),
             'lens_focal_distance':
             5,
             'aperture':
             22,
             'simulator':
             bson.ObjectId('5a14cf0e36ed1e17a55f1e35'),
             'simulation_world':
             'TestSimulationWorld',
             'lighting_model':
             imeta.LightingModel.LIT,
             'texture_mipmap_bias':
             1,
             'normal_maps_enabled':
             True,
             'roughness_enabled':
             True,
             'geometry_decimation':
             0.8,
             'procedural_generation_seed':
             16234,
             'labelled_objects': [
                 imeta.LabelledObject(class_names=('cup', ),
                                      bounding_box=(142, 280, 54, 78),
                                      label_color=(2, 227, 34),
                                      relative_pose=tf.Transform(
                                          location=(-246, 468, 4),
                                          rotation=(0.2, 0.3, 0.4)),
                                      object_id='LabelledObject-68478'),
                 imeta.LabelledObject(class_names=('car', ),
                                      bounding_box=(542, 83, 63, 123),
                                      label_color=(26, 12, 212),
                                      relative_pose=tf.Transform(
                                          location=(61, -717, 161),
                                          rotation=(0.7, 0.6, 0.3)),
                                      object_id='LabelledObject-8246'),
                 imeta.LabelledObject(class_names=('cow', ),
                                      bounding_box=(349, 672, 124, 208),
                                      label_color=(162, 134, 163),
                                      relative_pose=tf.Transform(
                                          location=(286, -465, -165),
                                          rotation=(0.9, 0.1, 0.5)),
                                      object_id='LabelledObject-56485')
             ],
             'average_scene_depth':
             90.12,
             'base_image':
             self.parent_image,
             'transformation_matrix':
             np.array([[0.19882871, 0.58747441, 0.90084303],
                       [0.6955363, 0.48193339, 0.09503605],
                       [0.20549805, 0.6110534, 0.61145574]])
         })
     return imeta.ImageMetadata(**kwargs)
Beispiel #9
0
    def test_clone(self):
        alt_metadata = {
            'hash_': [b'\x1f`\xa8\x8aR\xed\x9f\x0b'],
            'source_type': [imeta.ImageSourceType.REAL_WORLD],
            'environment_type': [
                imeta.EnvironmentType.INDOOR,
                imeta.EnvironmentType.OUTDOOR_URBAN,
                imeta.EnvironmentType.OUTDOOR_LANDSCAPE
            ],
            'light_level': [
                imeta.LightingLevel.PITCH_BLACK, imeta.LightingLevel.DIM,
                imeta.LightingLevel.EVENLY_LIT, imeta.LightingLevel.BRIGHT
            ],
            'time_of_day': [
                imeta.TimeOfDay.DAWN, imeta.TimeOfDay.MORNING,
                imeta.TimeOfDay.AFTERNOON, imeta.TimeOfDay.TWILIGHT,
                imeta.TimeOfDay.NIGHT
            ],
            'camera_pose':
            [tf.Transform((12, 13, 14), (-0.5, 0.3, 0.8, -0.9))],
            'right_camera_pose':
            [tf.Transform((11, 15, 19), (-0.2, 0.4, 0.6, -0.8))],
            'intrinsics':
            [cam_intr.CameraIntrinsics(900, 910, 894.7, 861.2, 640, 360)],
            'right_intrinsics':
            [cam_intr.CameraIntrinsics(900, 890, 760.45, 405.1, 640, 360)],
            'lens_focal_distance': [22],
            'aperture': [1.2],
            'simulator': [bson.ObjectId()],
            'simulation_world': ['TestSimulationWorld2'],
            'lighting_model': [imeta.LightingModel.UNLIT],
            'texture_mipmap_bias': [2],
            'normal_maps_enabled': [False],
            'roughness_enabled': [False],
            'geometry_decimation': [0.3],
            'procedural_generation_seed': [7329],
            'average_scene_depth': [102.33],
            'base_image': [mock.create_autospec(core.image.Image)],
            'transformation_matrix': [np.random.uniform(0, 1, (3, 3))],
            'labelled_objects': [
                tuple(),
                (imeta.LabelledObject(class_names=('cup', ),
                                      bounding_box=(142, 280, 54, 78),
                                      label_color=(2, 227, 34),
                                      relative_pose=tf.Transform(
                                          location=(-246, 468, 4),
                                          rotation=(0.2, 0.3, 0.4)),
                                      object_id='LabelledObject-68478'),
                 imeta.LabelledObject(class_names=('cat', ),
                                      bounding_box=(542, 83, 63, 123),
                                      label_color=(26, 12, 212),
                                      relative_pose=tf.Transform(
                                          location=(61, -717, 161),
                                          rotation=(0.7, 0.6, 0.3)),
                                      object_id='LabelledObject-8246'),
                 imeta.LabelledObject(class_names=('cow', ),
                                      bounding_box=(349, 672, 124, 208),
                                      label_color=(162, 134, 163),
                                      relative_pose=tf.Transform(
                                          location=(286, -465, -165),
                                          rotation=(0.9, 0.1, 0.5)),
                                      object_id='LabelledObject-56485')),
                (imeta.LabelledObject(class_names=('cup', ),
                                      bounding_box=(142, 12, 54, 78),
                                      label_color=(2, 227, 34),
                                      relative_pose=tf.Transform(
                                          location=(-246, 468, 4),
                                          rotation=(0.2, 0.3, 0.4)),
                                      object_id='LabelledObject-68478'),
                 imeta.LabelledObject(class_names=('car', ),
                                      bounding_box=(542, 83, 63, 123),
                                      label_color=(26, 12, 212),
                                      relative_pose=tf.Transform(
                                          location=(61, -717, 161),
                                          rotation=(0.7, 0.6, 0.3)),
                                      object_id='LabelledObject-8246'),
                 imeta.LabelledObject(class_names=('cow', ),
                                      bounding_box=(349, 672, 124, 208),
                                      label_color=(162, 134, 163),
                                      relative_pose=tf.Transform(
                                          location=(286, -465, -165),
                                          rotation=(0.9, 0.1, 0.5)),
                                      object_id='LabelledObject-56485')),
                (imeta.LabelledObject(class_names=('cup', ),
                                      bounding_box=(142, 280, 54, 78),
                                      label_color=(2, 227, 34),
                                      relative_pose=tf.Transform(
                                          location=(-246, 468, 4),
                                          rotation=(0.2, 0.3, 0.4)),
                                      object_id='LabelledObject-68478'),
                 imeta.LabelledObject(class_names=('car', ),
                                      bounding_box=(542, 83, 63, 123),
                                      label_color=(26, 12, 212),
                                      relative_pose=tf.Transform(
                                          location=(61, -717, 161),
                                          rotation=(0.7, 0.6, 0.3)),
                                      object_id='LabelledObject-8246'),
                 imeta.LabelledObject(class_names=('cow', ),
                                      bounding_box=(349, 672, 124, 208),
                                      label_color=(162, 134, 255),
                                      relative_pose=tf.Transform(
                                          location=(286, -465, -165),
                                          rotation=(0.9, 0.1, 0.5)),
                                      object_id='LabelledObject-56485'))
            ]
        }
        a = self.make_metadata()
        b = a.clone()
        self.assert_metadata_equal(a, b)

        # Change single keys, and make sure it is no longer equal
        for key, values in alt_metadata.items():
            for val in values:
                b = a.clone(**{key: val})

                if key == 'hash_':
                    self.assertEqual(val, b.hash)
                    self.assertNotEqual(a.hash, b.hash)
                else:
                    self.assertEqual(a.hash, b.hash)
                if key == 'source_type':
                    self.assertEqual(val, b.source_type)
                    self.assertNotEqual(a.source_type, b.source_type)
                else:
                    self.assertEqual(a.source_type, b.source_type)
                if key == 'environment_type':
                    self.assertEqual(val, b.environment_type)
                    self.assertNotEqual(a.environment_type, b.environment_type)
                else:
                    self.assertEqual(a.environment_type, b.environment_type)
                if key == 'light_level':
                    self.assertEqual(val, b.light_level)
                    self.assertNotEqual(a.light_level, b.light_level)
                else:
                    self.assertEqual(a.light_level, b.light_level)
                if key == 'time_of_day':
                    self.assertEqual(val, b.time_of_day)
                    self.assertNotEqual(a.time_of_day, b.time_of_day)
                else:
                    self.assertEqual(a.time_of_day, b.time_of_day)
                if key == 'camera_pose':
                    self.assertEqual(val, b.camera_pose)
                    self.assertNotEqual(a.camera_pose, b.camera_pose)
                else:
                    self.assertEqual(a.camera_pose, b.camera_pose)
                if key == 'right_camera_pose':
                    self.assertEqual(val, b.right_camera_pose)
                    self.assertNotEqual(a.right_camera_pose,
                                        b.right_camera_pose)
                else:
                    self.assertEqual(a.right_camera_pose, b.right_camera_pose)
                if key == 'intrinsics':
                    self.assertEqual(val, b.camera_intrinsics)
                    self.assertNotEqual(a.camera_intrinsics,
                                        b.camera_intrinsics)
                else:
                    self.assertEqual(a.camera_intrinsics, b.camera_intrinsics)
                    self.assertEqual(a.width, b.width)
                    self.assertEqual(a.height, b.height)
                if key == 'right_intrinsics':
                    self.assertEqual(val, b.right_camera_intrinsics)
                    self.assertNotEqual(a.right_camera_intrinsics,
                                        b.right_camera_intrinsics)
                else:
                    self.assertEqual(a.right_camera_intrinsics,
                                     b.right_camera_intrinsics)
                if key == 'lens_focal_distance':
                    self.assertEqual(val, b.lens_focal_distance)
                    self.assertNotEqual(a.lens_focal_distance,
                                        b.lens_focal_distance)
                else:
                    self.assertEqual(a.lens_focal_distance,
                                     b.lens_focal_distance)
                if key == 'aperture':
                    self.assertEqual(val, b.aperture)
                    self.assertNotEqual(a.aperture, b.aperture)
                else:
                    self.assertEqual(a.aperture, b.aperture)
                if key == 'simulation_world':
                    self.assertEqual(val, b.simulation_world)
                    self.assertNotEqual(a.simulation_world, b.simulation_world)
                else:
                    self.assertEqual(a.simulation_world, b.simulation_world)
                if key == 'lighting_model':
                    self.assertEqual(val, b.lighting_model)
                    self.assertNotEqual(a.lighting_model, b.lighting_model)
                else:
                    self.assertEqual(a.lighting_model, b.lighting_model)
                if key == 'texture_mipmap_bias':
                    self.assertEqual(val, b.texture_mipmap_bias)
                    self.assertNotEqual(a.texture_mipmap_bias,
                                        b.texture_mipmap_bias)
                else:
                    self.assertEqual(a.texture_mipmap_bias,
                                     b.texture_mipmap_bias)
                if key == 'normal_maps_enabled':
                    self.assertEqual(val, b.normal_maps_enabled)
                    self.assertNotEqual(a.normal_maps_enabled,
                                        b.normal_maps_enabled)
                else:
                    self.assertEqual(a.normal_maps_enabled,
                                     b.normal_maps_enabled)
                if key == 'roughness_enabled':
                    self.assertEqual(val, b.roughness_enabled)
                    self.assertNotEqual(a.roughness_enabled,
                                        b.roughness_enabled)
                else:
                    self.assertEqual(a.roughness_enabled, b.roughness_enabled)
                if key == 'geometry_decimation':
                    self.assertEqual(val, b.geometry_decimation)
                    self.assertNotEqual(a.geometry_decimation,
                                        b.geometry_decimation)
                else:
                    self.assertEqual(a.geometry_decimation,
                                     b.geometry_decimation)
                if key == 'procedural_generation_seed':
                    self.assertEqual(val, b.procedural_generation_seed)
                    self.assertNotEqual(a.procedural_generation_seed,
                                        b.procedural_generation_seed)
                else:
                    self.assertEqual(a.procedural_generation_seed,
                                     b.procedural_generation_seed)
                if key == 'labelled_objects':
                    self.assertEqual(val, b.labelled_objects)
                    self.assertNotEqual(a.labelled_objects, b.labelled_objects)
                else:
                    self.assertEqual(a.labelled_objects, b.labelled_objects)
                if key == 'average_scene_depth':
                    self.assertEqual(val, b.average_scene_depth)
                    self.assertNotEqual(a.average_scene_depth,
                                        b.average_scene_depth)
                else:
                    self.assertEqual(a.average_scene_depth,
                                     b.average_scene_depth)
Beispiel #10
0
    def post(self, cont_name):
        self.config = self.container_handler_configurations[cont_name]
        self.storage = self.config['storage']
        mongo_validator, payload_validator = self._get_validators()

        payload = self.request.json_body
        #validate the input payload
        payload_validator(payload, 'POST')
        if cont_name == 'subjects':
            if 'project' not in payload:
                # The new POST /subjects reuses json schema used for "embedded" subject creation,
                # but requires project in the payload, too
                raise APIValidationException('project required')
            subject_code = payload.get('code') or payload.get('label')
            if not subject_code:
                raise APIValidationException('label or code required')

            if self.storage.get_all_el(
                {
                    'project': bson.ObjectId(payload['project']),
                    'code': subject_code,
                }, None, {'_id': 1}):
                raise APIValidationException(
                    'subject code "{}" already exists in project {}'.format(
                        subject_code, payload['project']))

        # Load the parent container in which the new container will be created
        # to check permissions.
        parent_container, parent_id_property = self._get_parent_container(
            payload)
        # Always add the id of the parent to the container
        payload[parent_id_property] = parent_container['_id']
        # If the new container is a session add the group of the parent project in the payload
        if cont_name == 'sessions':
            payload['group'] = parent_container['group']
        # Optionally inherit permissions of a project from the parent group. The default behaviour
        # for projects is to give admin permissions to the requestor.
        # The default for other containers is to inherit.
        if self.is_true('inherit') and cont_name == 'projects':
            payload['permissions'] = parent_container.get('permissions')
        elif cont_name == 'projects':
            payload['permissions'] = [{
                '_id': self.uid,
                'access': 'admin'
            }] if self.uid else []

            # Unsorted projects are reserved for reaper uploads
            if payload['label'] in PROJECT_BLACKLIST:
                self.abort(
                    400,
                    'The project "{}" can\'t be created as it is integral within the API'
                    .format(payload['label']))
        else:
            payload['permissions'] = parent_container.get('permissions', [])
        # Created and modified timestamps are added here to the payload
        payload['created'] = payload['modified'] = datetime.datetime.utcnow()
        if payload.get('timestamp'):
            payload['timestamp'] = dateutil.parser.parse(payload['timestamp'])
        permchecker = self._get_permchecker(parent_container=parent_container)

        if cont_name == 'projects':
            # Validate any changes to storage providers
            providers.validate_provider_updates({}, payload.get('providers'),
                                                self.user_is_admin)

        # Handle embedded subjects for backwards-compatibility
        if cont_name == 'sessions':
            self._handle_embedded_subject(payload, parent_container)

        # This line exec the actual request validating the payload that will create the new container
        # and checking permissions using respectively the two decorators, mongo_validator and permchecker
        result = mongo_validator(permchecker(self.storage.exec_op))(
            'POST', payload=payload)
        if result.acknowledged:
            return {'_id': result.inserted_id}
        else:
            self.abort(
                404, 'Element not added in container {}'.format(
                    self.storage.cont_name))
Beispiel #11
0
    def put(self, cont_name, **kwargs):
        _id = kwargs.pop('cid')
        self.config = self.container_handler_configurations[cont_name]
        self.storage = self.config['storage']
        container = self._get_container(_id)
        mongo_validator, payload_validator = self._get_validators()

        payload = self.request.json_body
        payload_validator(payload, 'PUT')

        # Check if any payload keys are any propogated property, add to r_payload
        rec = False
        r_payload = {}
        prop_keys = set(payload.keys()).intersection(
            set(self.config.get('propagated_properties', [])))
        if prop_keys:
            rec = True
            for key in prop_keys:
                r_payload[key] = payload[key]

        if cont_name == 'projects':
            # Validate any changes to storage providers
            providers.validate_provider_updates(container,
                                                payload.get('providers'),
                                                self.user_is_admin)

        if cont_name == 'subjects':
            # Check for code collision if changing code/label or moving to a new project
            # TODO: Minor duplication of code below, resolve when ability to edit subject
            # via session is resolved
            current_project, _ = self._get_parent_container(container)
            target_project, _ = self._get_parent_container(payload)
            project_id = (
                target_project or current_project
            )['_id']  # It's current project or the new project it is moving to
            subject_code = payload.get('code') or payload.get(
                'label') or container.get('code') or container.get(
                    'label'
                )  # It's current label or the new label it is moving to

            # Check for subject code collision 1st when changing project and/or subject code
            if subject_code and self.storage.get_all_el(
                {
                    'project': project_id,
                    'code': subject_code,
                    '_id': {
                        '$ne': container['_id']
                    }  # Make sure that if neither code nor project changed, we allow it
                },
                    None,
                {'_id': 1}):
                raise APIValidationException(
                    'subject code "{}" already exists in project {}'.format(
                        subject_code, project_id))

            payload['code'] = subject_code
            payload['label'] = subject_code

        # Handle embedded subjects for backwards-compatibility
        if cont_name == 'sessions':
            current_project, _ = self._get_parent_container(container)
            target_project, _ = self._get_parent_container(payload)
            project_id = (target_project or current_project)['_id']

            current_subject = container['subject']
            payload_subject = payload.get('subject', {})
            target_subject_id = payload_subject.get('_id')
            target_subject_code = payload_subject.get(
                'code') or payload_subject.get('label')
            subject_code = target_subject_code or container['subject'].get(
                'code')
            subject_storage = containerstorage.SubjectStorage()

            # Check for subject code collision 1st when changing project and/or subject code
            if ((target_project and project_id != current_project['_id'])
                    or (target_subject_code
                        and subject_code != current_subject.get('code'))):

                if subject_storage.get_all_el(
                    {
                        'project': project_id,
                        'code': subject_code
                    }, None, {'_id': 1}):
                    raise APIValidationException(
                        'subject code "{}" already exists in project {}'.
                        format(subject_code, project_id))

            # Handle changing subject id (moving session to another subject)
            if target_subject_id:
                target_subject = subject_storage.get_container(
                    target_subject_id)

                # If payload also contains project, verify that the target_subject is in it
                if target_project and project_id != target_subject['project']:
                    raise APIValidationException(
                        'subject {} is not in project {}'.format(
                            target_subject_id, project_id))

                # Make sure session.project is also updated
                if not target_project:
                    payload['project'] = target_subject['project']
                    target_project, _ = self._get_parent_container(payload)

            # Handle changing project (moving session and subject to another project)
            # * Copy subject into target project if there are other sessions on it
            # * Move if this is the only session on it (else branch)
            elif (target_project and project_id != current_project['_id']
                  ) and config.db.sessions.count(
                      {'subject': container['subject']['_id']}) > 1:
                subject = copy.deepcopy(container['subject'])
                subject.pop('parents')
                subject.update(payload_subject
                               )  # Still apply any embedded subject changes
                subject['_id'] = bson.ObjectId(
                )  # Causes new subject creation via extract_subject
                payload['subject'] = subject

            # Enable embedded subject updates via session updates: match on subject._id
            else:
                payload.setdefault('subject',
                                   {})['_id'] = container['subject']['_id']

            self._handle_embedded_subject(payload, target_project
                                          or current_project)

        # Check if we are updating the parent container of the element (ie we are moving the container)
        # In this case, we will check permissions on it.
        target_parent_container, parent_id_property = self._get_parent_container(
            payload)
        if target_parent_container:
            if cont_name in ['sessions', 'acquisitions']:
                payload[parent_id_property] = bson.ObjectId(
                    payload[parent_id_property])
                parent_perms = target_parent_container.get('permissions', [])
                payload['permissions'] = parent_perms

            if cont_name == 'sessions':
                payload['group'] = target_parent_container['group']
                # Propagate permissions down to acquisitions
                rec = True
                r_payload['permissions'] = parent_perms

        payload['modified'] = datetime.datetime.utcnow()
        if payload.get('timestamp'):
            payload['timestamp'] = dateutil.parser.parse(payload['timestamp'])

        permchecker = self._get_permchecker(container, target_parent_container)

        # Specifies wether the metadata fields should be replaced or patched with payload value
        replace_metadata = self.get_param('replace_metadata', default=False)
        # This line exec the actual request validating the payload that will update the container
        # and checking permissions using respectively the two decorators, mongo_validator and permchecker
        result = mongo_validator(permchecker(self.storage.exec_op))(
            'PUT',
            _id=_id,
            payload=payload,
            recursive=rec,
            r_payload=r_payload,
            replace_metadata=replace_metadata)

        if result.modified_count == 1:
            return {'modified': result.modified_count}
        else:
            self.abort(
                404, 'Element not updated in container {} {}'.format(
                    self.storage.cont_name, _id))
Beispiel #12
0
    def get_jobs(self, cid):
        # Only enabled for sessions container type per url rule in api.py
        self.config = self.container_handler_configurations["sessions"]
        self.storage = self.config['storage']
        cont = self._get_container(cid,
                                   projection={
                                       'files': 0,
                                       'metadata': 0
                                   },
                                   get_children=True)

        permchecker = self._get_permchecker(cont)

        permchecker(noop)('GET', cid)

        analyses = AnalysisStorage().get_analyses(None, 'session', cont['_id'])
        acquisitions = cont.get('acquisitions', [])

        # Get query params
        states = self.request.GET.getall('states')
        tags = self.request.GET.getall('tags')
        join_cont = 'containers' in self.request.params.getall('join')
        join_gears = 'gears' in self.request.params.getall('join')
        limit = int(self.request.params.get('limit', 10000))
        skip = int(self.request.params.get('skip', 0))

        cont_refs = [
            containerutil.ContainerReference(cont_type, str(cont_id))
            for cont_type, cont_id in [('session', cont['_id'])] +
            [('analysis', an['_id'])
             for an in analyses] + [('acquisition', aq['_id'])
                                    for aq in acquisitions]
        ]
        jobs = Queue.search_containers(cont_refs,
                                       states=states,
                                       tags=tags,
                                       limit=limit,
                                       skip=skip)

        unique_jobs = {}
        gear_ids = set()
        for job in jobs:
            if job['_id'] not in unique_jobs:
                clean_job = remove_potential_phi_from_job(job)
                unique_jobs[job['_id']] = Job.load(clean_job)
                if clean_job.get(
                        'gear_id') and clean_job['gear_id'] not in gear_ids:
                    gear_ids.add(clean_job['gear_id'])

        #response = {'jobs': sorted(unique_jobs.values(), key=lambda job: job.created)}
        response = {'jobs': unique_jobs.values()}
        if join_gears:
            gears = config.db.gears.find({
                '_id': {
                    '$in': [bson.ObjectId(gear_id) for gear_id in gear_ids]
                }
            })
            response['gears'] = {str(gear['_id']): gear for gear in gears}
        if join_cont:
            # create a map of analyses and acquisitions by _id
            containers = {
                str(cont['_id']): cont
                for cont in analyses + acquisitions
            }
            for container in containers.itervalues():
                # No need to return perm arrays
                container.pop('permissions', None)
            response['containers'] = containers
        return response
Beispiel #13
0
 def validate(cls, v: Any) -> bson.ObjectId:
     if isinstance(v, (bson.ObjectId, cls)):
         return v
     if isinstance(v, str) and bson.ObjectId.is_valid(v):
         return bson.ObjectId(v)
     raise TypeError("invalid ObjectId specified")
Beispiel #14
0
 def delete(id):
     delete = Ingredient.delete_one({
         "_id": bson.ObjectId(oid=str(id))
     })  #Take the ObjectId from recepie and delete that documnet
     delete_count = delete.deleted_count
     return 'Delete ' + dumps(delete_count) + ' document'
Beispiel #15
0
 def find_by_session_id(self, session_id, *args, **kwargs):
     """Returns matched TopicStore documents collected in the same session"""
     if isinstance(session_id, str):
         session_id = bson.ObjectId(session_id)
     return self.find({"_ts_meta.session": session_id}, *args, **kwargs)
Beispiel #16
0
def ObjectIdSilent(oid=None):
    if oid is not None:
        oid = str(oid)
    if oid is None or len(oid) != 32:
        return bson.ObjectId(oid)
    return oid
class MailMessage(object):
    FOLDER_INBOX = bson.ObjectId('4f168258e6cdaf1620000000')
    FOLDER_SENT = bson.ObjectId('4f16826ce6cdaf1620000001')
    FOLDER_SPAM = bson.ObjectId('4f16826ce6cdaf1620000002')
    FOLDER_DRAFT = bson.ObjectId('4f16826ce6cdaf1620000003')
    #Warning! This-deleted folder is not folder! Only for client side
    FOLDER_DELETED = bson.ObjectId('4f16826ce6cdaf1620000004')

    def __init__(self,
                 owner,
                 text,
                 company_rek='',
                 subject='Без темы',
                 folder=FOLDER_DRAFT,
                 headers=None,
                 attaches=None,
                 input_number=0,
                 output_number=0,
                 send_date=mongodb_datetime_now(),
                 is_important=False,
                 is_official=False,
                 is_auto_answer=False,
                 is_read=False,
                 is_deleted=False):

        attaches = attaches or list(
        )  #Attache name, attache_id, attache size ...

        headers = headers or [{
            'FROM': owner,
            'TO': '',
            'DATE': send_date
        }]  #FROM: TO:

        self._id = None
        self.owner = owner
        # FROM USER COMPANY if user has many companies - REK
        self.company_rek = company_rek
        self.text = text
        self.subject = subject
        self.folder = folder
        self.headers = headers
        self.attaches = attaches
        self.input_number = input_number
        self.output_number = output_number
        self.send_date = send_date
        self.is_important = is_important
        self.is_official = is_official
        self.is_auto_answer = is_auto_answer
        self.is_read = is_read
        self.is_deleted = is_deleted

    def __eq__(self, other):
        if not isinstance(other, MailMessage):
            return False
        return self.owner == other.owner and self.company_rek == other.company_rek and self.text == other.text and self.folder == other.folder \
            and self._id == other._id and  self.headers == other.headers and self.attaches == other.attaches \
            and self.input_number == other.output_number and self.send_date == other.send_date and self.is_important == other.is_important \
            and self.is_official == other.is_official and self.is_auto_answer == other.is_auto_answer and self.is_read == other.is_read and self.is_deleted == other.is_deleted

    def save(self):
        if self._id:
            if messages_manager.update_message(self._id, self):
                result = self._id
            else:
                raise rek.mail_messages.messages_exceptions.CantSaveMessage(
                    'Can not save message')
        else:
            result = messages_manager.save_message(self)
        return result

    def clone(self):
        return messages_manager.save_message(self)

    def drop(self):
        messages_manager.del_message(self._id, self.owner)

    def _get_last_recipient(self):
        if len(self.headers):
            return self.headers[-1][u'TO']
        else:
            return ''

    to = property(_get_last_recipient)

    def _get_last_sender(self):  #sender
        if len(self.headers):
            header_item = self.headers[-1]
            return header_item['FROM']
        else:
            return self.owner

    sender = property(_get_last_sender)

    def set_sender_and_recipient(self, sender, to, send_date=None):
        if (self.to == to
                or self.to == '') and self.sender == sender:  #edit last header
            self.update_or_add_last_header(to)
        else:  #add_new_header
            self.add_new_header(sender, to, send_date)

    def add_new_header(self,
                       sender=None,
                       to='',
                       send_date=mongodb_datetime_now()):
        # when new message saved as draft massage have not from or to header data
        sender = sender or self.owner
        return self.headers.append({
            u'FROM': sender,
            u'TO': to,
            u'DATE': send_date
        })

    def update_or_add_last_header(self, to, sender=None):
        sender = sender or self.owner
        if len(self.headers):
            last_header = self.headers[-1]
            last_header[u'TO'] = to
            last_header[u'FROM'] = sender
            return True
        else:
            return self.add_new_header(self, to)
Beispiel #18
0
def ParseObjectId(oid):
    if oid:
        return bson.ObjectId(str(oid))
Beispiel #19
0
 def reset_active_project(self):
     project_id = bson.ObjectId(project_id)
     return self.db.collection.find_one_and_update(
         {"_id": self.user_id}, {"$set": {
             "active_project": None
         }})
Beispiel #20
0
    def verify_otp_code(self, code, remote_ip=None):
        if remote_ip:
            doc = self.otp_cache_collection.find_one({
                '_id':
                bson.ObjectId(self.id),
            })

            if doc:
                _, hash_salt, cur_otp_hash = doc['otp_hash'].split('$')
                hash_salt = base64.b64decode(hash_salt)
            else:
                hash_salt = os.urandom(8)
                cur_otp_hash = None

            otp_hash = hashlib.sha512()
            otp_hash.update(code + remote_ip)
            otp_hash.update(hash_salt)
            otp_hash = base64.b64encode(otp_hash.digest())

            if otp_hash == cur_otp_hash:
                self.otp_cache_collection.update(
                    {
                        '_id': bson.ObjectId(self.id),
                    }, {'$currentDate': {
                        'timestamp': True,
                    }})
                return True

            otp_hash = '$'.join((
                '1',
                base64.b64encode(hash_salt),
                otp_hash,
            ))

        otp_secret = self.otp_secret
        padding = 8 - len(otp_secret) % 8
        if padding != 8:
            otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=')
        otp_secret = base64.b32decode(otp_secret.upper())
        valid_codes = []
        epoch = int(time.time() / 30)
        for epoch_offset in range(-1, 2):
            value = struct.pack('>q', epoch + epoch_offset)
            hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest()
            offset = ord(hmac_hash[-1]) & 0x0F
            truncated_hash = hmac_hash[offset:offset + 4]
            truncated_hash = struct.unpack('>L', truncated_hash)[0]
            truncated_hash &= 0x7FFFFFFF
            truncated_hash %= 1000000
            valid_codes.append('%06d' % truncated_hash)
        if code not in valid_codes:
            return False

        response = self.otp_collection.update(
            {
                '_id': {
                    'user_id': self.id,
                    'code': code,
                },
            }, {'$currentDate': {
                'timestamp': True,
            }},
            upsert=True)
        if response['updatedExisting']:
            return False

        if remote_ip:
            self.otp_cache_collection.update({
                '_id': bson.ObjectId(self.id),
            }, {
                '$set': {
                    'otp_hash': otp_hash,
                },
                '$currentDate': {
                    'timestamp': True,
                },
            },
                                             upsert=True)

        return True
Beispiel #21
0
 def __init__(self, user_id):
     self.user_id = bson.ObjectId(user_id)
     self.db = DB("users")
Beispiel #22
0
 def get_pk(self, pk):
     try:
         return bson.ObjectId(pk)
     except bson.errors.InvalidId:
         raise NotFoundError()
    def create_el(self, analysis, parent_type, parent_id, origin, uid=None):
        """
        Create an analysis.
        * Fill defaults if not provided
        * Flatten input filerefs using `FileReference.get_file()`
        If `analysis` has a `job` key, create a "job-based" analysis:
            * Analysis inputs will be copied from the job inputs
            * Create analysis and job, both referencing each other
            * Do not create (remove) analysis if can't enqueue job
        """
        parent_type = containerutil.singularize(parent_type)
        parent = self.get_parent(
            None, cont={'parent': {
                'type': parent_type,
                'id': parent_id
            }})
        defaults = {
            '_id': bson.ObjectId(),
            'parent': {
                'type': parent_type,
                'id': bson.ObjectId(parent_id)
            },
            'created': datetime.datetime.utcnow(),
            'modified': datetime.datetime.utcnow(),
            'user': origin.get('id')
        }

        for key in defaults:
            analysis.setdefault(key, defaults[key])
        if 'public' in parent:
            analysis.setdefault('public', parent['public'])

        job = analysis.pop('job', None)
        if job is not None:
            if parent_type not in [
                    'project', 'session', 'subject', 'acquisition'
            ]:
                raise APIValidationException(
                    reason='Cannot create analysis via job at the {} level'.
                    format(parent_type))
            analysis.setdefault('inputs', [])
            for key, fileref_dict in job['inputs'].iteritems():
                analysis['inputs'].append(fileref_dict)

        # Verify and flatten input filerefs
        for i, fileref_dict in enumerate(analysis.get('inputs', [])):
            try:
                fileref = containerutil.create_filereference_from_dictionary(
                    fileref_dict)
            except KeyError:
                # Legacy analyses already have fileinfos as inputs instead of filerefs
                pass
            else:
                analysis['inputs'][i] = fileref.get_file()

        if analysis.get('info') is not None:
            analysis['info'] = util.mongo_sanitize_fields(analysis['info'])

        result = super(AnalysisStorage, self).create_el(analysis, origin)
        if not result.acknowledged:
            raise APIStorageException(
                'Analysis not created for container {} {}'.format(
                    parent_type, parent_id))

        if job is not None:
            # Create job
            job['destination'] = {
                'type': 'analysis',
                'id': str(analysis['_id'])
            }
            tags = job.get('tags', [])
            if 'analysis' not in tags:
                tags.append('analysis')
                job['tags'] = tags

            try:
                job = Queue.enqueue_job(job, origin, perm_check_uid=uid)
                job.insert()

                # Copy gear info and add id
                gear_info = job.gear_info.copy()
                gear_info['id'] = job.gear_id

                self.update_el(analysis['_id'], {
                    'job': job.id_,
                    'gear_info': gear_info
                }, None)
            except:
                # NOTE #775 remove unusable analysis - until jobs have a 'hold' state
                self.delete_el(analysis['_id'])
                raise

        return result
Beispiel #24
0
def read_data(temp_id,backupi):

	db2 = client.odm
	collection=db2.html_jobs
	post_id=bson.ObjectId(temp_id)
	job=collection.find_one({"_id":post_id})


	url1=str(job['url'])
	url=str(job['cat_url'].encode('utf-8'))
	
	id1=str(job['step'])
	afterid=str(job['afterurl'])
	


	dataset_keyword2=str(job['identifier'])
	if ',' in dataset_keyword2:
		dataset_keyword3=dataset_keyword2.split(',')
		dataset_keyword=dataset_keyword3[0]
		dataset_keyword1=dataset_keyword3[1]
		
	else:
		dataset_keyword=dataset_keyword2
		dataset_keyword1="nothingatall"

	
	title=job['title']
	title1=title.split('@/@')
	if title1[0]!="":
	  commands.append(title1[0])
	ckantitle=title1[0]

	notes=job['notes']
	notes1=notes.split('@/@')
	if notes1[1]=='value' and notes1[0]!='':
		commands.append(notes1[0])
	if notes1[1]=='label' and notes1[0]!='':
		label.append(notes1[0])
	if notes1[1]=='link' and notes1[0]!='':
		links.append(notes1[0])
	ckannotes=notes1[0]

	author=job['author']
	author1=author.split('@/@')
	if author1[1]=='value' and author1[0]!='':
		commands.append(author1[0])
	if author1[1]=='label' and author1[0]!='':
		label.append(author1[0])
	if author1[1]=='link' and author1[0]!='':
		links.append(author1[0])
	ckanauthor=author1[0]

	country=job['country']
	country1=country.split('@/@')
	if country1[1]=='value' and country1[0]!='':
		commands.append(country1[0])
	if country1[1]=='label' and country1[0]!='':
		label.append(country1[0])
	if country1[1]=='link' and country1[0]!='':
		links.append(country1[0])
	ckancountry=country1[0]

	temporal_coverage=job['temporal_coverage']
	temporal_coverage1=temporal_coverage.split('@/@')
	if temporal_coverage1[1]=='value' and temporal_coverage1[0]!='':
		commands.append(temporal_coverage1[0])
	if temporal_coverage1[1]=='label' and temporal_coverage1[0]!='':
		label.append(temporal_coverage1[0])
	if temporal_coverage1[1]=='link' and temporal_coverage1[0]!='':
		links.append(temporal_coverage1[0])
	ckantemporalcoverage=temporal_coverage1[0]

	date_released=job['date_released']
	date_released1=date_released.split('@/@')
	if date_released1[1]=='value' and date_released1[0]!='':
		commands.append(date_released1[0])
	if date_released1[1]=='label' and date_released1[0]!='':
		label.append(date_released1[0])
	if date_released1[1]=='link' and date_released1[0]!='':
		links.append(date_released1[0])
	ckandate_released=date_released1[0]

	author_email=job['author_email']
	author_email1=author_email.split('@/@')
	if author_email1[1]=='value' and author_email1[0]!='':
		commands.append(author_email1[0])
	if author_email1[1]=='label' and author_email1[0]!='':
		label.append(author_email1[0])
	if author_email1[1]=='link' and author_email1[0]!='':
		links.append(author_email1[0])
	ckanauthor_email=author_email1[0]

	tags=job['tags']
	tags1=tags.split('@/@')
	if tags1[1]=='value' and tags1[0]!='':
		commands.append(tags1[0])
	if tags1[1]=='label' and tags1[0]!='':
		label.append(tags1[0])
	if tags1[1]=='link' and tags1[0]!='':
		links.append(tags1[0])
	ckantags=tags1[0]

	resource=job['resource']
	resource1=resource.split('@/@')
	if resource1[1]=='value' and resource1[0]!='':
		commands.append(resource1[0])
	if resource1[1]=='label' and resource1[0]!='':
		label.append(resource1[0])
	if resource1[1]=='link' and resource1[0]!='':
		links.append(resource1[0])
	ckanresource=resource1[0]


	licence=job['license']
	licence1=licence.split('@/@')
	if licence1[1]=='label' and licence1[0]!='':
		label.append(licence1[0])
	if licence1[1]=='value' and licence1[0]!='':
		commands.append(licence1[0])
	if licence1[1]=='link' and licence1[0]!='':
		links.append(licence1[0])
	ckanlicense=licence1[0]

	date_updated=job['date_updated']
	date_updated1=date_updated.split('@/@')
	if date_updated1[1]=='value' and date_updated1[0]!='':
		commands.append(date_updated1[0])
	if date_updated1[1]=='label' and date_updated1[0]!='':
		label.append(date_updated1[0])
	if date_updated1[1]=='link' and date_updated1[0]!='':
		links.append(date_updated1[0])
	ckandate_updated=date_updated1[0]


	organization=job['organization']
	organization1=organization.split('@/@')
	if organization1[1]=='value' and organization1[0]!='':
		commands.append(organization1[0])
	if organization1[1]=='label' and organization1[0]!='':
		label.append(organization1[0])
	if organization1[1]=='link' and organization1[0]!='':
		links.append(organization1[0])
	ckanorganization=organization1[0]	



	maintainer_email=job['maintainer_email']
	maintainer_email1=maintainer_email.split('@/@')
	if maintainer_email1[1]=='value' and maintainer_email1[0]!='':
		commands.append(maintainer_email1[0])
	if maintainer_email1[1]=='label' and maintainer_email1[0]!='':
		label.append(maintainer_email1[0])
	if maintainer_email1[1]=='link' and maintainer_email1[0]!='':
		links.append(maintainer_email1[0])
	ckanmaintainer_email=maintainer_email1[0]


	state=job['state']
	state1=state.split('@/@')
	if state1[1]=='value' and state1[0]!='':
		commands.append(state1[0])
	if state1[1]=='label' and state1[0]!='':
		label.append(state1[0])
	if state1[1]=='link' and state1[0]!='':
		links.append(state1[0])
	ckanstate=state1[0]	


	city=job['city']
	city1=city.split('@/@')
	if city1[1]=='value' and city1[0]!='':
		commands.append(city1[0])
	if city1[1]=='label' and city1[0]!='':
		label.append(city1[0])
	if city1[1]=='link' and city1[0]!='':
		links.append(city1[0])
	ckancity=city1[0]	


	category=job['category']
	category1=category.split('@/@')
	if category1[1]=='value' and category1[0]!='':
		commands.append(category1[0])
	if category1[1]=='label' and category1[0]!='':
		label.append(category1[0])
	if category1[1]=='link' and category1[0]!='':
		links.append(category1[0])
	ckanExtrasCategory=category1[0]


	frequency=job['frequency']
	frequency1=frequency.split('@/@')
	if frequency1[1]=='value' and frequency1[0]!='':
		commands.append(frequency1[0])
	if frequency1[1]=='label' and frequency1[0]!='':
		label.append(frequency1[0])
	if frequency1[1]=='link' and frequency1[0]!='':
		links.append(frequency1[0])
	ckanExtrasFrequency=frequency1[0]


	language=job['language']
	ckanExtrasLanguage=language


	maintainer=job['maintainer']
	maintainer1=maintainer.split('@/@')
	if maintainer1[1]=='value' and maintainer1[0]!='':
		commands.append(maintainer1[0])
	if maintainer1[1]=='label' and maintainer1[0]!='':
		label.append(maintainer1[0])
	if maintainer1[1]=='link' and maintainer1[0]!='':
		links.append(maintainer1[0])
	ckanMaintainer=maintainer1[0]
	try:
	  step=int(id1)
	except ValueError:
	  step=""

## call function  StartHarvestProcedure
	dataset_urls=StartHarvestProcedure(commands,label,links,url,step,afterid,endpoint,dataset_keyword,dataset_keyword1,ckannotes,ckanlicense,ckanresource,ckantags,ckanauthor_email,ckanauthor,ckantitle,ckandate_updated,ckanExtrasCategory
	,ckanExtrasFrequency,ckanExtrasLanguage,ckanMaintainer,ckandate_released,ckancountry,ckantemporalcoverage,backupi,ckanorganization,ckanmaintainer_email,ckanstate,ckancity)
	return dataset_urls
Beispiel #25
0
    def setUp(self):
        super(RuleEnforcementPermissionsResolverTestCase, self).setUp()

        register_internal_trigger_types()

        # Create some mock users
        user_1_db = UserDB(name='1_role_rule_pack_grant')
        user_1_db = User.add_or_update(user_1_db)
        self.users['custom_role_rule_pack_grant'] = user_1_db

        user_2_db = UserDB(name='1_role_rule_grant')
        user_2_db = User.add_or_update(user_2_db)
        self.users['custom_role_rule_grant'] = user_2_db

        user_3_db = UserDB(name='custom_role_pack_rule_all_grant')
        user_3_db = User.add_or_update(user_3_db)
        self.users['custom_role_pack_rule_all_grant'] = user_3_db

        user_4_db = UserDB(name='custom_role_rule_all_grant')
        user_4_db = User.add_or_update(user_4_db)
        self.users['custom_role_rule_all_grant'] = user_4_db

        user_5_db = UserDB(name='custom_role_rule_modify_grant')
        user_5_db = User.add_or_update(user_5_db)
        self.users['custom_role_rule_modify_grant'] = user_5_db

        user_6_db = UserDB(name='rule_pack_rule_create_grant')
        user_6_db = User.add_or_update(user_6_db)
        self.users['rule_pack_rule_create_grant'] = user_6_db

        user_7_db = UserDB(name='rule_pack_rule_all_grant')
        user_7_db = User.add_or_update(user_7_db)
        self.users['rule_pack_rule_all_grant'] = user_7_db

        user_8_db = UserDB(name='rule_rule_create_grant')
        user_8_db = User.add_or_update(user_8_db)
        self.users['rule_rule_create_grant'] = user_8_db

        user_9_db = UserDB(name='rule_rule_all_grant')
        user_9_db = User.add_or_update(user_9_db)
        self.users['rule_rule_all_grant'] = user_9_db

        user_10_db = UserDB(name='custom_role_rule_list_grant')
        user_10_db = User.add_or_update(user_10_db)
        self.users['custom_role_rule_list_grant'] = user_10_db

        # Create some mock resources on which permissions can be granted
        rule_1_db = RuleDB(pack='test_pack_1',
                           name='rule1',
                           action={'ref': 'core.local'},
                           trigger='core.st2.key_value_pair.create')
        rule_1_db = Rule.add_or_update(rule_1_db)
        self.resources['rule_1'] = rule_1_db

        rule_enforcement_1_db = RuleEnforcementDB(
            trigger_instance_id=str(bson.ObjectId()),
            execution_id=str(bson.ObjectId()),
            rule={
                'ref': rule_1_db.ref,
                'uid': rule_1_db.uid,
                'id': str(rule_1_db.id)
            })
        rule_enforcement_1_db = RuleEnforcement.add_or_update(
            rule_enforcement_1_db)
        self.resources['rule_enforcement_1'] = rule_enforcement_1_db

        rule_2_db = RuleDB(pack='test_pack_1', name='rule2')
        rule_2_db = Rule.add_or_update(rule_2_db)
        self.resources['rule_2'] = rule_2_db

        rule_enforcement_2_db = RuleEnforcementDB(
            trigger_instance_id=str(bson.ObjectId()),
            execution_id=str(bson.ObjectId()),
            rule={
                'ref': rule_2_db.ref,
                'uid': rule_2_db.uid,
                'id': str(rule_2_db.id)
            })
        rule_enforcement_2_db = RuleEnforcement.add_or_update(
            rule_enforcement_2_db)
        self.resources['rule_enforcement_2'] = rule_enforcement_2_db

        rule_3_db = RuleDB(pack='test_pack_2', name='rule3')
        rule_3_db = Rule.add_or_update(rule_3_db)
        self.resources['rule_3'] = rule_3_db

        rule_enforcement_3_db = RuleEnforcementDB(
            trigger_instance_id=str(bson.ObjectId()),
            execution_id=str(bson.ObjectId()),
            rule={
                'ref': rule_3_db.ref,
                'uid': rule_3_db.uid,
                'id': str(rule_3_db.id)
            })
        rule_enforcement_3_db = RuleEnforcement.add_or_update(
            rule_enforcement_3_db)
        self.resources['rule_enforcement_3'] = rule_enforcement_3_db

        # Create some mock roles with associated permission grants
        # Custom role 2 - one grant on parent pack
        # "rule_view" on pack_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['pack_1'].get_uid(),
            resource_type=ResourceType.PACK,
            permission_types=[PermissionType.RULE_VIEW])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_3_db = RoleDB(name='custom_role_rule_pack_grant',
                           permission_grants=permission_grants)
        role_3_db = Role.add_or_update(role_3_db)
        self.roles['custom_role_rule_pack_grant'] = role_3_db

        # Custom role 4 - one grant on rule
        # "rule_view on rule_3
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['rule_3'].get_uid(),
            resource_type=ResourceType.RULE,
            permission_types=[PermissionType.RULE_VIEW])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_4_db = RoleDB(name='custom_role_rule_grant',
                           permission_grants=permission_grants)
        role_4_db = Role.add_or_update(role_4_db)
        self.roles['custom_role_rule_grant'] = role_4_db

        # Custom role - "rule_all" grant on a parent rule pack
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['pack_1'].get_uid(),
            resource_type=ResourceType.PACK,
            permission_types=[PermissionType.RULE_ALL])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_4_db = RoleDB(name='custom_role_pack_rule_all_grant',
                           permission_grants=permission_grants)
        role_4_db = Role.add_or_update(role_4_db)
        self.roles['custom_role_pack_rule_all_grant'] = role_4_db

        # Custom role - "rule_all" grant on a rule
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['rule_1'].get_uid(),
            resource_type=ResourceType.RULE,
            permission_types=[PermissionType.RULE_ALL])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_4_db = RoleDB(name='custom_role_rule_all_grant',
                           permission_grants=permission_grants)
        role_4_db = Role.add_or_update(role_4_db)
        self.roles['custom_role_rule_all_grant'] = role_4_db

        # Custom role - "rule_modify" on role_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['rule_1'].get_uid(),
            resource_type=ResourceType.RULE,
            permission_types=[PermissionType.RULE_MODIFY])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_5_db = RoleDB(name='custom_role_rule_modify_grant',
                           permission_grants=permission_grants)
        role_5_db = Role.add_or_update(role_5_db)
        self.roles['custom_role_rule_modify_grant'] = role_5_db

        # Custom role - "rule_create" grant on pack_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['pack_1'].get_uid(),
            resource_type=ResourceType.PACK,
            permission_types=[PermissionType.RULE_CREATE])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_6_db = RoleDB(name='rule_pack_rule_create_grant',
                           permission_grants=permission_grants)
        role_6_db = Role.add_or_update(role_6_db)
        self.roles['rule_pack_rule_create_grant'] = role_6_db

        # Custom role - "rule_all" grant on pack_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['pack_1'].get_uid(),
            resource_type=ResourceType.PACK,
            permission_types=[PermissionType.RULE_ALL])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_7_db = RoleDB(name='rule_pack_rule_all_grant',
                           permission_grants=permission_grants)
        role_7_db = Role.add_or_update(role_7_db)
        self.roles['rule_pack_rule_all_grant'] = role_7_db

        # Custom role - "rule_create" grant on rule_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['rule_1'].get_uid(),
            resource_type=ResourceType.RULE,
            permission_types=[PermissionType.RULE_CREATE])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_8_db = RoleDB(name='rule_rule_create_grant',
                           permission_grants=permission_grants)
        role_8_db = Role.add_or_update(role_8_db)
        self.roles['rule_rule_create_grant'] = role_8_db

        # Custom role - "rule_all" grant on rule_1
        grant_db = PermissionGrantDB(
            resource_uid=self.resources['rule_1'].get_uid(),
            resource_type=ResourceType.RULE,
            permission_types=[PermissionType.RULE_ALL])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_9_db = RoleDB(name='rule_rule_all_grant',
                           permission_grants=permission_grants)
        role_9_db = Role.add_or_update(role_9_db)
        self.roles['rule_rule_all_grant'] = role_9_db

        # Custom role - "rule_list" grant
        grant_db = PermissionGrantDB(
            resource_uid=None,
            resource_type=None,
            permission_types=[PermissionType.RULE_LIST])
        grant_db = PermissionGrant.add_or_update(grant_db)
        permission_grants = [str(grant_db.id)]
        role_10_db = RoleDB(name='custom_role_rule_list_grant',
                            permission_grants=permission_grants)
        role_10_db = Role.add_or_update(role_10_db)
        self.roles['custom_role_rule_list_grant'] = role_10_db

        # Create some mock role assignments
        user_db = self.users['custom_role_rule_pack_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_rule_pack_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['custom_role_rule_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_rule_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['custom_role_pack_rule_all_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_pack_rule_all_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['custom_role_rule_all_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_rule_all_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['custom_role_rule_modify_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_rule_modify_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['rule_pack_rule_create_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['rule_pack_rule_create_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['rule_pack_rule_all_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['rule_pack_rule_all_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['rule_rule_create_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['rule_rule_create_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['rule_rule_all_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['rule_rule_all_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)

        user_db = self.users['custom_role_rule_list_grant']
        role_assignment_db = UserRoleAssignmentDB(
            user=user_db.name,
            role=self.roles['custom_role_rule_list_grant'].name,
            source='assignments/%s.yaml' % user_db.name)
        UserRoleAssignment.add_or_update(role_assignment_db)
Beispiel #26
0
 def test_save_id(self):
     doc = {'_id': bson.ObjectId(), 'x': 1}
     self.bind.db.coll.save(doc)
    def setUp(self):
        self.guid = "urn:newsml:localhost:2016-09-12T12:11:40.160498:7237e59f-c42d-4865-aee5-e364aeb2966a"

        self.archived_only_data = [
            {
                "_id": bson.ObjectId("588c1b901d41c805dce70df0"),
                "guid": self.guid,
                "item_id": self.guid,
                "_current_version": 3,
                "type": "text",
                "abstract": "test",
                "state": "fetched",
                "slugline": "slugline",
                "headline": "headline",
                "flags": {"marked_archived_only": True},
                "subject": [{"qcode": "17004000", "name": "Statistics"}],
                "body_html": "Test Document body",
                "source": "AAP",
                "linked_in_packages": [
                    {
                        "package": "urn:newsml:localhost:2017-01-28T15:28:33.535974:30c01757-cfb5-4985-a8f4-c3ecb253a244",
                        "package_type": "takes",
                    }
                ],
            },
            {
                "_id": bson.ObjectId("57d224de069b7f038e9d2a53"),
                "guid": "urn:newsml:localhost:2016-08-30T06:15:35.379754:3d68cc4c-1f16-4a7f-bfca-92ca50bcdd8f",
                "item_id": "urn:newsml:localhost:2016-08-30T06:15:35.379754:3d68cc4c-1f16-4a7f-bfca-92ca50bcdd8f",
                "_current_version": 4,
                "type": "text",
                "abstract": "test",
                "state": "fetched",
                "slugline": "slugline",
                "headline": "headline",
                "flags": {"marked_archived_only": True},
                "subject": [{"qcode": "17004000", "name": "Statistics"}],
                "body_html": "Test Document body",
                "source": "AAP",
            },
            {
                "_id": "213456",
                "guid": self.guid,
                "item_id": "urn:newsml:localhost:2017-01-28T15:28:33.535974:30c01757-cfb5-4985-a8f4-c3ecb253a244",
                "_current_version": 1,
                "type": "composite",
                "abstract": "test",
                "state": "fetched",
                "slugline": "slugline",
                "headline": "headline",
                "groups": [
                    {"id": "root", "refs": [{"idRef": "main"}], "role": "grpRole:NEP"},
                    {
                        "id": "main",
                        "refs": [
                            {
                                "residRef": "urn:newsml:localhost:2016-09-12T12:11:40.160498:7237e59f-c42d-4865-aee5-e364aeb2966a",
                                "guid": "urn:newsml:localhost:2016-09-12T12:11:40.160498:7237e59f-c42d-4865-aee5-e364aeb2966a",
                                "itemClass": "icls:text",
                                "headline": "Testing takes",
                                "type": "text",
                                "renditions": {},
                                "_current_version": 3,
                                "is_published": True,
                                "slugline": "Takes test",
                                "sequence": 1,
                                "location": "archived",
                            },
                            {
                                "residRef": "urn:newsml:localhost:2016-08-30T06:15:35.379754:3d68cc4c-1f16-4a7f-bfca-92ca50bcdd8f",
                                "guid": "urn:newsml:localhost:2016-08-30T06:15:35.379754:3d68cc4c-1f16-4a7f-bfca-92ca50bcdd8f",
                                "itemClass": "icls:text",
                                "headline": "Testing takes",
                                "type": "text",
                                "renditions": {},
                                "_current_version": 4,
                                "is_published": True,
                                "slugline": "Takes test",
                                "sequence": 2,
                                "location": "archived",
                            },
                        ],
                        "role": "grpRole:main",
                    },
                ],
            },
        ]

        self.archivedService = get_resource_service("archived")
Beispiel #28
0
def test_online_analysis(data_builder, as_admin, as_drone, file_form, api_db):
    gear = data_builder.create_gear(gear={'inputs': {'csv': {'base': 'file'}}})
    group = data_builder.create_group()
    project = data_builder.create_project()
    session = data_builder.create_session()
    acquisition = data_builder.create_acquisition()
    assert as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form('input.csv')).ok

    # Try to create job-based analysis with invalid fileref
    r = as_admin.post('/sessions/' + session + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': gear,
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'nosuch.csv'}}}
    })
    assert r.status_code == 404

    # Try to create job-based analysis with invalid gear id
    r = as_admin.post('/sessions/' + session + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': '000000000000000000000000',
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'input.csv'}}}
    })
    assert r.status_code == 404

    # Try to create job-based analysis at group level
    r = as_admin.post('/groups/' + group + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': gear,
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'input.csv'}}}
    })
    # No endpoint to make group analyses but the handler/dao layer specifically allows it
    assert r.status_code == 404

    # Create analysis job at project level
    r = as_admin.post('/projects/' + project + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': gear,
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'input.csv'}}}
    })
    assert r.ok
    analysis = r.json()['_id']

    # Test that permission updates don't make it to analyses
    r = as_admin.post('/projects/' + project + '/permission', json={
        '_id': '*****@*****.**',
        'access': 'ro'})

    # Verify job was created with it
    r = as_admin.get('/analyses/' + analysis)
    assert r.ok
    job = r.json().get('job')
    assert job
    assert not r.json().get('permissions')

    # Engine upload
    r = as_drone.post('/engine',
        params={'level': 'analysis', 'id': analysis, 'job': job},
        files=file_form('output.csv', meta={'type': 'tabular data'}))
    assert r.ok

    check_files(as_admin, analysis, 'files', 'output.csv')
    api_db.analyses.delete_one({'_id': bson.ObjectId(analysis)})

    # Create job-based analysis at acquisition level
    r = as_admin.post('/acquisitions/' + acquisition + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': gear,
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'input.csv'}}}
    })
    assert r.ok
    analysis = r.json()['_id']

    # Verify job was created with it
    r = as_admin.get('/analyses/' + analysis)
    assert r.ok
    job = r.json().get('job')
    assert job

    # Engine upload
    r = as_drone.post('/engine',
        params={'level': 'analysis', 'id': analysis, 'job': job},
        files=file_form('output.csv', meta={'type': 'tabular data'}))
    assert r.ok

    check_files(as_admin, analysis, 'files', 'output.csv')
    api_db.analyses.delete_one({'_id': bson.ObjectId(analysis)})

    # Create job-based analysis
    r = as_admin.post('/sessions/' + session + '/analyses', json={
        'label': 'online',
        'job': {'gear_id': gear,
                'inputs': {'csv': {'type': 'acquisition', 'id': acquisition, 'name': 'input.csv'}}}
    })
    assert r.ok
    analysis = r.json()['_id']

    # Verify job was created with it
    r = as_admin.get('/analyses/' + analysis)
    assert r.ok
    job = r.json().get('job')
    assert job

    # Verify that gear info was stored
    gear_info = r.json().get('gear_info')
    assert gear_info.get('id') == gear
    assert gear_info.get('name')
    assert gear_info.get('version') == '0.0.1'

    check_files(as_admin, analysis, 'inputs', 'input.csv')

    # Try manual upload - not allowed for job-based analysis
    r = as_admin.post('/analyses/' + analysis + '/files', files=file_form('output.csv'))
    assert r.status_code == 400

    # Engine upload
    r = as_drone.post('/engine',
        params={'level': 'analysis', 'id': analysis, 'job': job},
        files=file_form('output.csv', meta={'type': 'tabular data'}))
    assert r.ok

    check_files(as_admin, analysis, 'files', 'output.csv')

    # Force retry the job
    r = as_drone.get('/jobs/next')
    assert r.ok
    r = as_drone.get('/jobs/next')
    assert r.ok
    r = as_drone.get('/jobs/next')
    assert r.ok

    # set job to failed
    r = as_drone.put('/jobs/' + job, json={'state': 'failed'})
    assert r.ok

    r = as_admin.post('/jobs/' + job + '/retry', params={'ignoreState': True, 'root': True})
    assert r.ok
    retried_job_id = r.json()['_id']

    r = as_drone.get('/jobs/next')
    assert r.ok

    # Verify job was created with it
    r = as_admin.get('/analyses/' + analysis)
    assert r.ok
    print job
    assert r.json().get('job') == retried_job_id

    # Engine upload
    r = as_drone.post('/engine',
        params={'level': 'analysis', 'id': analysis, 'job': retried_job_id},
        files=file_form('output.csv', meta={'type': 'tabular data'}))
    assert r.ok
    check_files(as_admin, analysis, 'files', 'output.csv')

    # Verify job was created with it
    r = as_admin.get('/analyses/' + analysis)
    assert r.ok
    print job
    assert r.json().get('job') == retried_job_id

    r = as_admin.get('/jobs/' + retried_job_id)
    assert r.ok
    print r.json()['saved_files']
    assert r.json()['saved_files'] != []

    check_files(as_admin, analysis, 'files', 'output.csv')

    api_db.analyses.delete_one({'_id': bson.ObjectId(analysis)})
Beispiel #29
0
 def _deserialize(self, value, attr, data, **kwargs):
     try:
         return bson.ObjectId(value)
     except BSONError:
         raise ValidationError('invalid ObjectId `%s`' % value)
Beispiel #30
0
def get_todo_by_id(id):
    todos = mongo.db.todos
    todo = todos.find_one({'_id': bson.ObjectId(oid=str(id))})

    sanitized = json.loads(json_util.dumps(todo))
    return jsonify(sanitized)