Example #1
0
def put_item(collection, item):
    item_id = item['_id']
    internal_fields = ['_id', '_etag', '_updated', '_created']
    for field in internal_fields:
        item.pop(field, None)
    # print item
    # print type(item_id)
    p = put_internal(collection, item, **{'_id': item_id})
    if p[0]['_status'] == 'ERR':
        print(p)
        print(item)
Example #2
0
def put_item(collection, item):
    item_id = item['_id']
    internal_fields = ['_id', '_etag', '_updated', '_created']
    for field in internal_fields:
        item.pop(field, None)
    # print item
    # print type(item_id)
    p = put_internal(collection, item, **{'_id': item_id})
    if p[0]['_status'] == 'ERR':
        print(p)
        print(item)
Example #3
0
File: put.py Project: kidaa/eve
 def test_put_internal(self):
     # test that put_internal is available and working properly.
     test_field = 'ref'
     test_value = "9876543210987654321098765"
     data = {test_field: test_value}
     with self.app.test_request_context(self.item_id_url):
         r, _, _, status = put_internal(
             self.known_resource, data, concurrency_check=False,
             **{'_id': self.item_id})
     db_value = self.compare_put_with_get(test_field, r)
     self.assertEqual(db_value, test_value)
     self.assert200(status)
Example #4
0
 def test_put_internal(self):
     # test that put_internal is available and working properly.
     test_field = 'ref'
     test_value = "9876543210987654321098765"
     data = {test_field: test_value}
     with self.app.test_request_context(self.item_id_url):
         r, _, _, status = put_internal(
             self.known_resource, data, concurrency_check=False,
             **{'_id': self.item_id})
     db_value = self.compare_put_with_get(test_field, r)
     self.assertEqual(db_value, test_value)
     self.assert200(status)
Example #5
0
def create_file_doc_for_upload(project_id, uploaded_file):
    """Creates a secure filename and a document in MongoDB for the file.

    The (project_id, filename) tuple should be unique. If such a document already
    exists, it is updated with the new file.

    :param uploaded_file: file from request.files['form-key']
    :type uploaded_file: werkzeug.datastructures.FileStorage
    :returns: a tuple (file_id, filename, status), where 'filename' is the internal
            filename used on GCS.
    """

    project_id = ObjectId(project_id)

    # Hash the filename with path info to get the internal name. This should
    # be unique for the project.
    # internal_filename = uploaded_file.filename
    _, ext = os.path.splitext(uploaded_file.filename)
    internal_filename = uuid.uuid4().hex + ext

    # For now, we don't support overwriting files, and create a new one every time.
    # # See if we can find a pre-existing file doc.
    # files = current_app.data.driver.db['files']
    # file_doc = files.find_one({'project': project_id,
    #                            'name': internal_filename})
    file_doc = None

    # TODO: at some point do name-based and content-based content-type sniffing.
    new_props = {
        'filename': uploaded_file.filename,
        'content_type': uploaded_file.mimetype,
        'length': uploaded_file.content_length,
        'project': project_id,
        'status': 'uploading'
    }

    if file_doc is None:
        # Create a file document on MongoDB for this file.
        file_doc = create_file_doc(name=internal_filename, **new_props)
        file_fields, _, _, status = post_internal('files', file_doc)
    else:
        file_doc.update(new_props)
        file_fields, _, _, status = put_internal('files',
                                                 remove_private_keys(file_doc))

    if status not in (200, 201):
        log.error(
            'Unable to create new file document in MongoDB, status=%i: %s',
            status, file_fields)
        raise InternalServerError()

    return file_fields['_id'], internal_filename, status
Example #6
0
        def perform_test(file_id, expected_type):
            node_doc = {
                'picture': file_id_image,
                'description': '',
                'project': project_id,
                'node_type': 'asset',
                'user': user_id,
                'properties': {
                    'status': 'published',
                    'tags': [],
                    'order': 0,
                    'categories': ''
                },
                'name': 'My first test node'
            }

            with self.app.test_request_context():
                g.current_user = {
                    'user_id': user_id,
                    # This group is hardcoded in the EXAMPLE_PROJECT.
                    'groups': [ObjectId('5596e975ea893b269af85c0e')],
                    'roles': {u'subscriber', u'admin'}
                }
                nodes = self.app.data.driver.db['nodes']

                # Create the node.
                r, _, _, status = post_internal('nodes', node_doc)
                self.assertEqual(status, 201, r)
                node_id = r['_id']

                # Get from database to check its default content type.
                db_node = nodes.find_one(node_id)
                self.assertNotIn('content_type', db_node['properties'])

                # PUT it again, without a file -- should be blocked.
                self.assertRaises(UnprocessableEntity,
                                  put_internal,
                                  'nodes',
                                  node_doc,
                                  _id=node_id)

                # PUT it with a file.
                node_doc['properties']['file'] = str(file_id)
                r, _, _, status = put_internal('nodes', node_doc, _id=node_id)
                self.assertEqual(status, 200, r)

                # Get from database to test the final node.
                db_node = nodes.find_one(node_id)
                self.assertEqual(expected_type,
                                 db_node['properties']['content_type'])
Example #7
0
def create_file_doc_for_upload(project_id, uploaded_file):
    """Creates a secure filename and a document in MongoDB for the file.

    The (project_id, filename) tuple should be unique. If such a document already
    exists, it is updated with the new file.

    :param uploaded_file: file from request.files['form-key']
    :type uploaded_file: werkzeug.datastructures.FileStorage
    :returns: a tuple (file_id, filename, status), where 'filename' is the internal
            filename used on GCS.
    """

    project_id = ObjectId(project_id)

    # Hash the filename with path info to get the internal name. This should
    # be unique for the project.
    # internal_filename = uploaded_file.filename
    _, ext = os.path.splitext(uploaded_file.filename)
    internal_filename = uuid.uuid4().hex + ext

    # For now, we don't support overwriting files, and create a new one every time.
    # # See if we can find a pre-existing file doc.
    # files = current_app.data.driver.db['files']
    # file_doc = files.find_one({'project': project_id,
    #                            'name': internal_filename})
    file_doc = None

    # TODO: at some point do name-based and content-based content-type sniffing.
    new_props = {'filename': uploaded_file.filename,
                 'content_type': uploaded_file.mimetype,
                 'length': uploaded_file.content_length,
                 'project': project_id,
                 'status': 'uploading'}

    if file_doc is None:
        # Create a file document on MongoDB for this file.
        file_doc = create_file_doc(name=internal_filename, **new_props)
        file_fields, _, _, status = post_internal('files', file_doc)
    else:
        file_doc.update(new_props)
        file_fields, _, _, status = put_internal('files', remove_private_keys(file_doc))

    if status not in (200, 201):
        log.error('Unable to create new file document in MongoDB, status=%i: %s',
                  status, file_fields)
        raise InternalServerError()

    return file_fields['_id'], internal_filename, status
Example #8
0
def classes():
    app_config_ori = deepcopy(app.config)
    app.config['PAGINATION_DEFAULT'] = 9999
    app.config['DOMAIN']['attendances_tutors'].update({
        'embedded_fields': [
            'attendance',
            'attendance.class_',
            'attendance.class_.branch',
            'attendance.module',
        ]
    })

    users, *_ = get_internal('users', **{'role': 'tutor'})
    users = users['_items']

    for user in users:
        lookup = {
            'tutor': user['id'],
        }
        attendances, *_ = get_internal('attendances_tutors', **lookup)
        attendances = attendances['_items']
        count_attendances = len(attendances)
        attendances = map(_class_avg_rating, attendances)

        attendances = _group_by_class(attendances)

        payload = {
            '_items': attendances,
            'meta': {
                'total': len(attendances),
                'total_item': count_attendances,
            }
        }
        key = 'progress_classes_%s' % user['id']
        try:
            lookup = {'key': key}
            res, *_ = getitem_internal('caches', **lookup)
            payload = {'value': render_json(payload)}
            res, *_ = put_internal('caches', payload, **{'id': res['id']})
        except NotFound:
            payload = {'key': key, 'value': render_json(payload)}
            res, *_ = post_internal('caches', payload)

    app.config = app_config_ori
    # print(res)
    return jsonify({})
Example #9
0
    def put_internal(self,
                     resource: str,
                     payload=None,
                     concurrency_check=False,
                     skip_validation=False,
                     **lookup):
        """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
        from eve.methods.put import put_internal

        url = self.config['URLS'][resource]
        path = '%s/%s/%s' % (self.api_prefix, url, lookup['_id'])
        with self.__fake_request_url_rule('PUT', path):
            return put_internal(resource,
                                payload=payload,
                                concurrency_check=concurrency_check,
                                skip_validation=skip_validation,
                                **lookup)[:4]
Example #10
0
    def upgrade(node_type, old_ids):
        print("Node {0}".format(node_type['name']))
        node_name = node_type['name']
        if node_name in old_ids:
            node_type = mix_node_type(old_ids[node_name], node_type)
            node_id = node_type['_id']

            # Removed internal fields that would cause validation error
            internal_fields = ['_id', '_etag', '_updated', '_created']
            for field in internal_fields:
                node_type.pop(field, None)

            p = put_internal('node_types', node_type, **{'_id': node_id})
            print p

        else:
            print("Making the node")
            print(node_type)
            post_item('node_types', node_type)
Example #11
0
def setup_for_film(project_url):
    """Add Blender Cloud extension_props specific for film projects.

    Returns the updated project.
    """

    projects_collection = current_app.data.driver.db['projects']

    # Find the project in the database.
    project = projects_collection.find_one({'url': project_url})
    if not project:
        raise RuntimeError('Project %s does not exist.' % project_url)

    # Set default extension properties. Be careful not to overwrite any properties that
    # are already there.
    all_extension_props = project.setdefault('extension_props', {})
    cloud_extension_props = {
        'category': 'film',
        'theme_css': '',
        # The accent color (can be 'blue' or '#FFBBAA' or 'rgba(1, 1, 1, 1)
        'theme_color': '',
        'is_in_production': False,
        'video_url': '',  # Oembeddable url
        'poster': None,  # File ObjectId
        'logo': None,  # File ObjectId
        # TODO(fsiddi) when we introduce other setup_for_* in Blender Cloud, make available
        # at a higher scope
        'is_featured': False,
    }

    all_extension_props.setdefault(EXTENSION_NAME, cloud_extension_props)

    project_id = ObjectId(project['_id'])
    project = remove_private_keys(project)
    result, _, _, status_code = put_internal('projects',
                                             project,
                                             _id=project_id)

    if status_code != 200:
        raise RuntimeError("Can't update project %s, issues: %s", project_id,
                           result)

    log.info('Project %s was updated for Blender Cloud.', project_url)
Example #12
0
    def upgrade(node_type, old_ids):
        print("Node {0}".format(node_type['name']))
        node_name = node_type['name']
        if node_name in old_ids:
            node_type = mix_node_type(old_ids[node_name], node_type)
            node_id = node_type['_id']

            # Removed internal fields that would cause validation error
            internal_fields = ['_id', '_etag', '_updated', '_created']
            for field in internal_fields:
                node_type.pop(field, None)

            p = put_internal('node_types', node_type, **{'_id': node_id})
            print p

        else:
            print("Making the node")
            print(node_type)
            post_item('node_types', node_type)
Example #13
0
def _update_project(project):
    """Updates a project in the database, or SystemExit()s.

    :param project: the project data, should be the entire project document
    :type: dict
    :return: the project
    :rtype: dict
    """

    from pillar.api.utils import remove_private_keys

    project_id = ObjectId(project['_id'])
    project = remove_private_keys(project)
    result, _, _, status_code = put_internal('projects',
                                             project,
                                             _id=project_id)

    if status_code != 200:
        raise RuntimeError("Can't update project %s, issues: %s", project_id,
                           result)
Example #14
0
def _update_project(project_uuid, project):
    """Updates a project in the database, or SystemExit()s.

    :param project_uuid: UUID of the project
    :type: str
    :param project: the project data, should be the entire project document
    :type: dict
    :return: the project
    :rtype: dict
    """

    from application.utils import remove_private_keys

    project_id = ObjectId(project_uuid)
    project = remove_private_keys(project)
    result, _, _, _ = put_internal('projects', project, _id=project_id)

    if result['_status'] != 'OK':
        log.error("Can't update project %s, issues: %s", project_uuid, result['_issues'])
        raise SystemExit()
Example #15
0
        def perform_test(file_id, expected_type):
            node_doc = {'picture': file_id_image,
                        'description': '',
                        'project': project_id,
                        'node_type': 'asset',
                        'user': user_id,
                        'properties': {'status': 'published',
                                       'tags': [],
                                       'order': 0,
                                       'categories': ''},
                        'name': 'My first test node'}

            with self.app.test_request_context():
                g.current_user = {'user_id': user_id,
                                  # This group is hardcoded in the EXAMPLE_PROJECT.
                                  'groups': [ObjectId('5596e975ea893b269af85c0e')],
                                  'roles': {u'subscriber', u'admin'}}
                nodes = self.app.data.driver.db['nodes']

                # Create the node.
                r, _, _, status = post_internal('nodes', node_doc)
                self.assertEqual(status, 201, r)
                node_id = r['_id']

                # Get from database to check its default content type.
                db_node = nodes.find_one(node_id)
                self.assertNotIn('content_type', db_node['properties'])

                # PUT it again, without a file -- should be blocked.
                self.assertRaises(UnprocessableEntity, put_internal, 'nodes', node_doc,
                                  _id=node_id)

                # PUT it with a file.
                node_doc['properties']['file'] = str(file_id)
                r, _, _, status = put_internal('nodes', node_doc, _id=node_id)
                self.assertEqual(status, 200, r)

                # Get from database to test the final node.
                db_node = nodes.find_one(node_id)
                self.assertEqual(expected_type, db_node['properties']['content_type'])
Example #16
0
    def on_update_host(updates, original):
        """
        Called by EVE HOOK (app.on_update_host)

        On update host, if not template, remove in '_template_fields' fields in updates because
        we update these fields, so they are now not dependant of template

        :param updates: modified fields
        :type updates: dict
        :param original: original fields
        :type original: dict
        :return: None
        """
        if g.get('ignore_hook_patch', False):
            return
        if not original['_is_template']:
            ignore_schema_fields = [
                'realm', '_template_fields', '_templates', '_is_template',
                '_templates_with_services'
            ]
            template_fields = original['_template_fields']
            do_put = False
            for (field_name, _) in iteritems(updates):
                if field_name not in ignore_schema_fields:
                    if field_name in template_fields:
                        del template_fields[field_name]
                        do_put = True
            if do_put:
                lookup = {"_id": original['_id']}
                putdata = deepcopy(original)
                putdata['_template_fields'] = template_fields
                del putdata['_etag']
                del putdata['_updated']
                del putdata['_created']
                response = put_internal('host', putdata, False, False,
                                        **lookup)
                updates['_etag'] = response[0]['_etag']
                original['_etag'] = response[0]['_etag']
    def on_update_host(updates, original):
        """
        Called by EVE HOOK (app.on_update_host)

        On update host, if not template, remove in '_template_fields' fields in updates because
        we update these fields, so they are now not dependant of template

        :param updates: modified fields
        :type updates: dict
        :param original: original fields
        :type original: dict
        :return: None
        """
        if g.get('ignore_hook_patch', False):
            return
        if not original['_is_template']:
            ignore_schema_fields = ['realm', '_template_fields', '_templates',
                                    '_is_template',
                                    '_templates_with_services']
            template_fields = original['_template_fields']
            do_put = False
            for (field_name, _) in iteritems(updates):
                if field_name not in ignore_schema_fields:
                    if field_name in template_fields:
                        del template_fields[field_name]
                        do_put = True
            if do_put:
                lookup = {"_id": original['_id']}
                putdata = deepcopy(original)
                putdata['_template_fields'] = template_fields
                del putdata['_etag']
                del putdata['_updated']
                del putdata['_created']
                response = put_internal('host', putdata, False, False, **lookup)
                updates['_etag'] = response[0]['_etag']
                original['_etag'] = response[0]['_etag']
Example #18
0
def before_update(resource_name, item, payload):

    if resource_name == 'drivings':
        driving = json.loads(item.get_data())

        if 'driver' in driving and driving['driver'] == 'CLEAN':
            original = get_document(resource_name, False, **{"_id": payload['_id']})
            new = {}
            for k in original:
                new[k] = original[k]

            new['customer'] = str(new['customer'])
            del new['_created']
            del new['driver']
            del new['_updated']
            del new['_etag']
            del new['_id']

            del driving['driver']

            for k in driving:
                new[k] = driving[k]

            return put_internal(resource_name, new, False, True, **{"_id": payload['_id']})
Example #19
0
def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
    """Validates a user against Blender ID, creating the user in our database.

    :param blender_id_user_id: the user ID at the BlenderID server.
    :param token: the OAuth access token.
    :param oauth_subclient_id: the subclient ID, or empty string if not a subclient.
    :returns: (user in MongoDB, HTTP status 200 or 201)
    """

    # Verify with Blender ID
    log.debug('Storing token for BlenderID user %s', blender_id_user_id)
    user_info, token_expiry = validate_token(blender_id_user_id, token, oauth_subclient_id)

    if user_info is None:
        log.debug('Unable to verify token with Blender ID.')
        return None, None

    # Blender ID can be queried without user ID, and will always include the
    # correct user ID in its response.
    log.debug('Obtained user info from Blender ID: %s', user_info)
    blender_id_user_id = user_info['id']

    # Store the user info in MongoDB.
    db_user = find_user_in_db(blender_id_user_id, user_info)

    r = {}
    for retry in range(5):
        if '_id' in db_user:
            # Update the existing user
            attempted_eve_method = 'PUT'
            db_id = db_user['_id']
            r, _, _, status = put_internal('users', remove_private_keys(db_user),
                                           _id=db_id)
            if status == 422:
                log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
                          status, db_id, remove_private_keys(db_user), r)
        else:
            # Create a new user, retry for non-unique usernames.
            attempted_eve_method = 'POST'
            r, _, _, status = post_internal('users', db_user)

            db_id = r['_id']
            db_user.update(r)  # update with database/eve-generated fields.

        if status == 422:
            # Probably non-unique username, so retry a few times with different usernames.
            log.info('Error creating new user: %s', r)
            username_issue = r.get('_issues', {}).get(u'username', '')
            if u'not unique' in username_issue:
                # Retry
                db_user['username'] = authentication.make_unique_username(db_user['email'])
                continue

        # Saving was successful, or at least didn't break on a non-unique username.
        break
    else:
        log.error('Unable to create new user %s: %s', db_user, r)
        return abort(500)

    if status not in (200, 201):
        log.error('internal response from %s to Eve: %r %r', attempted_eve_method, status, r)
        return abort(500)

    # Store the token in MongoDB.
    authentication.store_token(db_id, token, token_expiry, oauth_subclient_id)

    return db_user, status
Example #20
0
def do_melwin_update(app):

    app.logger.info("[MELWIN] Updater started")
    use_pickle = False

    result = {'replaced': 0, 'created': 0, 'errors': 0, 'error_ids': []}

    try:

        with app.test_request_context('api/v1/melwin/users'):

            try:
                if not use_pickle:
                    raise FileNotFoundError

                with open("persons.p", "rb") as f:
                    persons = pickle.load(f)
                    app.logger.info("[MELWIN] Using local person pickle file")

            except FileNotFoundError:
                app.logger.info("[MELWIN] requesting data from Melwin")
                m = Melwin()
                persons = m.get_all()
                with open("persons.p", "wb") as f:
                    pickle.dump(persons, f)
            except:
                pass

            for key, user in persons.items():
                """"Iterate every person from Melwin"""

                try:
                    existing_user, _, _, status = getitem_internal(resource='melwin/users', **{'id': user['id']})
                    if not existing_user or status != 200:
                        existing_user = None
                except:
                    app.logger.info("[MELWIN] No existing user %i" % user['id'])
                    existing_user = None

                if existing_user is None \
                        or user['location']['street'] != existing_user['location']['street'] \
                        or user['location']['zip'] != existing_user['location']['zip'] \
                        or user['location']['country'] != existing_user['location']['country'] \
                        or user['location']['city'] != existing_user['location']['city']:

                    app.logger.info("[MELWIN] Geocoding %i" % user['id'])
                    try:
                        geo = m.get_geo(user['location']['street'],
                                        user['location']['city'],
                                        user['location']['zip'],
                                        user['location']['country'])
                        if geo != None:
                            user['location'].update(
                                {'geo': {"type": "Point", "coordinates": [geo.latitude, geo.longitude]}})
                            user['location'].update({'geo_type': geo.raw['type']})
                            user['location'].update({'geo_class': geo.raw['class']})
                            user['location'].update({'geo_importance': float(geo.raw['importance'])})
                            user['location'].update({'geo_place_id': int(geo.raw['place_id'])})
                    except:
                        app.logger.error("[MELWIN] Geocoding for user %i failed" % user['id'])


                if not 'fullname' in user:
                    user.update({'fullname': "%s %s" % (user['firstname'], user['lastname'])})

                lookup = dict({})

                try:
                    lookup = dict({'id': key})
                    r, _, _, status = put_internal(resource='melwin/users', payload=user, concurrency_check=False,
                                                   skip_validation=False, **lookup)

                    if status == 200:
                        result['replaced'] += 1
                    elif status == 201:
                        result['created'] += 1
                    else:
                        app.logger.info("[MELWIN] Status %i for put_internal" % status)

                except KeyError:
                    r, _, _, status, header = post_internal(resource='melwin/users', payl=user, skip_validation=True)

                    if status == 201:
                        result['created'] += 1

                except:
                    result['errors'] += 1
                    result['error_ids'].append(user['id'])
                    app.logger.error("[MELWIN] Error for user %i" % user['id'])


    except:
        """Major error, warn"""
        from ext.notifications.sms import Sms
        app.logger.exception("[MELWIN] Error updating users from Melwin")
        sms = Sms()
        sms.send(mobile=sms.get_warn_sms(), message="[%s] %s" % (500, "Error updating users from Melwin"))
        result['errors'] += 1

    app.logger.info("[MELWIN] Updater finished (created: %i updated: %i errors: %i)" %
                    (result['created'], result['replaced'], result['errors']))
    # Restart in one day!
    threading.Timer(get_timer(), do_melwin_update, [app]).start()
Example #21
0
def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
    """Validates a user against Blender ID, creating the user in our database.

    :param blender_id_user_id: the user ID at the BlenderID server.
    :param token: the OAuth access token.
    :param oauth_subclient_id: the subclient ID, or empty string if not a subclient.
    :returns: (user in MongoDB, HTTP status 200 or 201)
    """

    # Verify with Blender ID
    log.debug('Storing token for BlenderID user %s', blender_id_user_id)
    user_info, token_expiry = validate_token(blender_id_user_id, token,
                                             oauth_subclient_id)

    if user_info is None:
        log.debug('Unable to verify token with Blender ID.')
        return None, None

    # Blender ID can be queried without user ID, and will always include the
    # correct user ID in its response.
    log.debug('Obtained user info from Blender ID: %s', user_info)
    blender_id_user_id = user_info['id']

    # Store the user info in MongoDB.
    db_user = find_user_in_db(blender_id_user_id, user_info)

    r = {}
    for retry in range(5):
        if '_id' in db_user:
            # Update the existing user
            attempted_eve_method = 'PUT'
            db_id = db_user['_id']
            r, _, _, status = put_internal('users',
                                           remove_private_keys(db_user),
                                           _id=db_id)
            if status == 422:
                log.error(
                    'Status %i trying to PUT user %s with values %s, should not happen! %s',
                    status, db_id, remove_private_keys(db_user), r)
        else:
            # Create a new user, retry for non-unique usernames.
            attempted_eve_method = 'POST'
            r, _, _, status = post_internal('users', db_user)

            db_id = r['_id']
            db_user.update(r)  # update with database/eve-generated fields.

        if status == 422:
            # Probably non-unique username, so retry a few times with different usernames.
            log.info('Error creating new user: %s', r)
            username_issue = r.get('_issues', {}).get(u'username', '')
            if u'not unique' in username_issue:
                # Retry
                db_user['username'] = authentication.make_unique_username(
                    db_user['email'])
                continue

        # Saving was successful, or at least didn't break on a non-unique username.
        break
    else:
        log.error('Unable to create new user %s: %s', db_user, r)
        return abort(500)

    if status not in (200, 201):
        log.error('internal response from %s to Eve: %r %r',
                  attempted_eve_method, status, r)
        return abort(500)

    # Store the token in MongoDB.
    authentication.store_token(db_id, token, token_expiry, oauth_subclient_id)

    return db_user, status
Example #22
0
def zencoder_notifications():
    """

    See: https://app.zencoder.com/docs/guides/getting-started/notifications#api_version_2

    """
    if current_app.config['ENCODING_BACKEND'] != 'zencoder':
        log.warning(
            'Received notification from Zencoder but app not configured for Zencoder.'
        )
        return abort(403)

    if not current_app.config['DEBUG']:
        # If we are in production, look for the Zencoder header secret
        try:
            notification_secret_request = request.headers[
                'X-Zencoder-Notification-Secret']
        except KeyError:
            log.warning('Received Zencoder notification without secret.')
            return abort(401)
        # If the header is found, check it agains the one in the config
        notification_secret = current_app.config[
            'ZENCODER_NOTIFICATIONS_SECRET']
        if notification_secret_request != notification_secret:
            log.warning(
                'Received Zencoder notification with incorrect secret.')
            return abort(401)

    # Cast request data into a dict
    data = request.get_json()

    if log.isEnabledFor(logging.DEBUG):
        from pprint import pformat
        log.debug('Zencoder job JSON: %s', pformat(data))

    files_collection = current_app.data.driver.db['files']
    # Find the file object based on processing backend and job_id
    zencoder_job_id = data['job']['id']
    lookup = {
        'processing.backend': 'zencoder',
        'processing.job_id': str(zencoder_job_id)
    }
    file_doc = files_collection.find_one(lookup)
    if not file_doc:
        log.warning('Unknown Zencoder job id %r', zencoder_job_id)
        # Return 200 OK when debugging, or Zencoder will keep trying and trying and trying...
        # which is what we want in production.
        return "Not found, but that's okay.", 200 if current_app.config[
            'DEBUG'] else 404

    file_id = ObjectId(file_doc['_id'])
    # Remove internal keys (so that we can run put internal)
    file_doc = utils.remove_private_keys(file_doc)

    # Update processing status
    job_state = data['job']['state']
    file_doc['processing']['status'] = job_state

    if job_state == 'failed':
        log.warning('Zencoder job %i for file %s failed.', zencoder_job_id,
                    file_id)
        # Log what Zencoder told us went wrong.
        for output in data['outputs']:
            if not any('error' in key for key in output):
                continue
            log.warning('Errors for output %s:', output['url'])
            for key in output:
                if 'error' in key:
                    log.info('    %s: %s', key, output[key])

        file_doc['status'] = 'failed'
        put_internal('files', file_doc, _id=file_id)
        return "You failed, but that's okay.", 200

    log.info('Zencoder job %s for file %s completed with status %s.',
             zencoder_job_id, file_id, job_state)

    # For every variation encoded, try to update the file object
    root, _ = os.path.splitext(file_doc['file_path'])

    for output in data['outputs']:
        video_format = output['format']
        # Change the zencoder 'mpeg4' format to 'mp4' used internally
        video_format = 'mp4' if video_format == 'mpeg4' else video_format

        # Find a variation matching format and resolution
        variation = next(
            (v for v in file_doc['variations']
             if v['format'] == format and v['width'] == output['width']), None)
        # Fall back to a variation matching just the format
        if variation is None:
            variation = next((v for v in file_doc['variations']
                              if v['format'] == video_format), None)
        if variation is None:
            log.warning(
                'Unable to find variation for video format %s for file %s',
                video_format, file_id)
            continue

        # Rename the file to include the now-known size descriptor.
        size = size_descriptor(output['width'], output['height'])
        new_fname = '{}-{}.{}'.format(root, size, video_format)

        # Rename on Google Cloud Storage
        try:
            rename_on_gcs(file_doc['project'], '_/' + variation['file_path'],
                          '_/' + new_fname)
        except Exception:
            log.warning(
                'Unable to rename GCS blob %r to %r. Keeping old name.',
                variation['file_path'],
                new_fname,
                exc_info=True)
        else:
            variation['file_path'] = new_fname

        # TODO: calculate md5 on the storage
        variation.update({
            'height': output['height'],
            'width': output['width'],
            'length': output['file_size_in_bytes'],
            'duration': data['input']['duration_in_ms'] / 1000,
            'md5': output['md5_checksum']
            or '',  # they don't do MD5 for GCS...
            'size': size,
        })

    file_doc['status'] = 'complete'

    # Force an update of the links on the next load of the file.
    file_doc['link_expires'] = datetime.datetime.now(
        tz=tz_util.utc) - datetime.timedelta(days=1)

    put_internal('files', file_doc, _id=file_id)

    return '', 204
Example #23
0
def process_file(gcs, file_id, local_file):
    """Process the file by creating thumbnails, sending to Zencoder, etc.

    :param file_id: '_id' key of the file
    :type file_id: ObjectId or str
    :param local_file: locally stored file, or None if no local processing is needed.
    :type local_file: file
    """

    file_id = ObjectId(file_id)

    # Fetch the src_file document from MongoDB.
    files = current_app.data.driver.db['files']
    src_file = files.find_one(file_id)
    if not src_file:
        log.warning('process_file(%s): no such file document found, ignoring.')
        return
    src_file = utils.remove_private_keys(src_file)

    # Update the 'format' field from the content type.
    # TODO: overrule the content type based on file extention & magic numbers.
    mime_category, src_file['format'] = src_file['content_type'].split('/', 1)

    # Prevent video handling for non-admins.
    if not user_has_role(u'admin') and mime_category == 'video':
        if src_file['format'].startswith('x-'):
            xified = src_file['format']
        else:
            xified = 'x-' + src_file['format']

        src_file['content_type'] = 'application/%s' % xified
        mime_category = 'application'
        log.info('Not processing video file %s for non-admin user', file_id)

    # Run the required processor, based on the MIME category.
    processors = {
        'image': _process_image,
        'video': _process_video,
    }

    try:
        processor = processors[mime_category]
    except KeyError:
        log.info(
            "POSTed file %s was of type %r, which isn't thumbnailed/encoded.",
            file_id, mime_category)
        src_file['status'] = 'complete'
    else:
        log.debug('process_file(%s): marking file status as "processing"',
                  file_id)
        src_file['status'] = 'processing'
        update_file_doc(file_id, status='processing')

        try:
            processor(gcs, file_id, local_file, src_file)
        except Exception:
            log.warning(
                'process_file(%s): error when processing file, resetting status to '
                '"queued_for_processing"',
                file_id,
                exc_info=True)
            update_file_doc(file_id, status='queued_for_processing')
            return

    # Update the original file with additional info, e.g. image resolution
    r, _, _, status = put_internal('files', src_file, _id=file_id)
    if status not in (200, 201):
        log.warning(
            'process_file(%s): status %i when saving processed file info to MongoDB: %s',
            file_id, status, r)
Example #24
0
def process_file(gcs, file_id, local_file):
    """Process the file by creating thumbnails, sending to Zencoder, etc.

    :param file_id: '_id' key of the file
    :type file_id: ObjectId or str
    :param local_file: locally stored file, or None if no local processing is needed.
    :type local_file: file
    """

    file_id = ObjectId(file_id)

    # Fetch the src_file document from MongoDB.
    files = current_app.data.driver.db['files']
    src_file = files.find_one(file_id)
    if not src_file:
        log.warning('process_file(%s): no such file document found, ignoring.')
        return
    src_file = utils.remove_private_keys(src_file)

    # Update the 'format' field from the content type.
    # TODO: overrule the content type based on file extention & magic numbers.
    mime_category, src_file['format'] = src_file['content_type'].split('/', 1)

    # Prevent video handling for non-admins.
    if not user_has_role(u'admin') and mime_category == 'video':
        if src_file['format'].startswith('x-'):
            xified = src_file['format']
        else:
            xified = 'x-' + src_file['format']

        src_file['content_type'] = 'application/%s' % xified
        mime_category = 'application'
        log.info('Not processing video file %s for non-admin user', file_id)

    # Run the required processor, based on the MIME category.
    processors = {
        'image': _process_image,
        'video': _process_video,
    }

    try:
        processor = processors[mime_category]
    except KeyError:
        log.info("POSTed file %s was of type %r, which isn't thumbnailed/encoded.", file_id,
                 mime_category)
        src_file['status'] = 'complete'
    else:
        log.debug('process_file(%s): marking file status as "processing"', file_id)
        src_file['status'] = 'processing'
        update_file_doc(file_id, status='processing')

        try:
            processor(gcs, file_id, local_file, src_file)
        except Exception:
            log.warning('process_file(%s): error when processing file, resetting status to '
                        '"queued_for_processing"', file_id, exc_info=True)
            update_file_doc(file_id, status='queued_for_processing')
            return

    # Update the original file with additional info, e.g. image resolution
    r, _, _, status = put_internal('files', src_file, _id=file_id)
    if status not in (200, 201):
        log.warning('process_file(%s): status %i when saving processed file info to MongoDB: %s',
                    file_id, status, r)