Esempio n. 1
0
def thumbnail_viewer(resource):
    width = int(request.args.get('width', 100))
    # fix(later): switch back to .one() query after fix issues with duplicates
    thumbnails = Thumbnail.query.filter(Thumbnail.resource_id == resource.id,
                                        Thumbnail.width == width)
    if thumbnails.count():
        thumbnail = thumbnails[0]
        thumbnail_contents = thumbnail.data
    else:
        contents = read_resource(resource)
        (
            thumbnail_contents, thumbnail_width, thumbnail_height
        ) = compute_thumbnail(
            contents, width
        )  # fix(later): if this returns something other than requested width, we'll keep missing the cache
        thumbnail = Thumbnail()
        thumbnail.resource_id = resource.id
        thumbnail.width = thumbnail_width
        thumbnail.height = thumbnail_height
        thumbnail.format = 'jpg'
        thumbnail.data = thumbnail_contents
        db.session.add(thumbnail)
        db.session.commit()
    return Response(response=thumbnail_contents,
                    status=200,
                    mimetype='image/jpeg')
def _create_file(file_name, creation_timestamp, modification_timestamp,
                 file_data):
    last_slash = file_name.rfind('/')
    path = file_name[:last_slash]
    short_file_name = file_name[last_slash + 1:]
    folder = _create_folders(path)

    # check for existing resource with same name
    try:
        resource = Resource.query.filter(Resource.parent_id == folder.id,
                                         Resource.name == short_file_name,
                                         not_(Resource.deleted)).one()
        new_resource = False
    except NoResultFound:

        # create new resource record
        resource = Resource()
        resource.parent_id = folder.id
        resource.organization_id = folder.organization_id
        resource.name = short_file_name
        resource.creation_timestamp = creation_timestamp
        resource.type = Resource.FILE
        new_resource = True

    # update or init resource record
    resource.deleted = False
    resource.modification_timestamp = modification_timestamp
    if resource.type != Resource.SEQUENCE:
        if resource.system_attributes:
            system_attributes = json.loads(resource.system_attributes)
        else:
            system_attributes = {}
        system_attributes['hash'] = hashlib.sha1(file_data).hexdigest()
        system_attributes['size'] = len(file_data)
        resource.system_attributes = json.dumps(system_attributes)
    if new_resource:
        db.session.add(resource)
        db.session.commit()

    # write file contents to a resource revision (possibly bulk storage)
    add_resource_revision(resource, modification_timestamp, file_data)
    db.session.commit()

    # compute thumbnail for images
    if file_name.endswith('.png') or file_name.endswith(
            '.jpg'):  # fix(soon): handle more types, capitalizations
        for width in [120]:  # fix(soon): what will be our standard sizes?
            # fix(later): if this returns something other than requested width, we'll keep missing the cache
            (thumbnail_contents, thumbnail_width,
             thumbnail_height) = compute_thumbnail(file_data, width)
            thumbnail = Thumbnail()
            thumbnail.resource_id = resource.id
            thumbnail.width = thumbnail_width
            thumbnail.height = thumbnail_height
            thumbnail.format = 'jpg'
            thumbnail.data = thumbnail_contents
            db.session.add(thumbnail)
        db.session.commit()
    return resource
Esempio n. 3
0
def update_sequence_value(resource, resource_path, timestamp, value, emit_message=True):
    data_type = json.loads(resource.system_attributes)['data_type']

    # determine min interval between updates
    system_attributes = json.loads(resource.system_attributes) if resource.system_attributes else {}
    min_storage_interval = system_attributes.get('min_storage_interval')
    if min_storage_interval is None:
        if data_type == Resource.TEXT_SEQUENCE:
            min_storage_interval = 0
        else:
            min_storage_interval = 50

    # prep sequence update message data
    if emit_message:
        message_params = {
            'id': resource.id,
            'name': resource_path,  # full/absolute path of the sequence
            'timestamp': timestamp.isoformat() + 'Z',
        }
        if data_type != Resource.IMAGE_SEQUENCE:  # for images we'll send revision IDs
            message_params['value'] = value  # fix(soon): json.dumps crashes if this included binary data

    # if too soon since last update, don't store a new value (but do still send out an update message)
    if min_storage_interval == 0 or timestamp >= resource.modification_timestamp + datetime.timedelta(seconds=min_storage_interval):
        resource_revision = add_resource_revision(resource, timestamp, value.encode())
        resource.modification_timestamp = timestamp

        # create thumbnails for image sequences
        if data_type == Resource.IMAGE_SEQUENCE:
            max_width = 240
            name = 'thumbnail-%d-x' % max_width
            thumbnail_contents = compute_thumbnail(value, max_width)[0]
            try:
                thumbnail_resource = Resource.query.filter(Resource.parent_id == resource.id, Resource.name == name, not_(Resource.deleted)).one()
            except NoResultFound:
                thumbnail_resource = create_sequence(resource, name, Resource.IMAGE_SEQUENCE)
            thumbnail_revision = add_resource_revision(thumbnail_resource, timestamp, thumbnail_contents)
            if emit_message:
                message_params['revision_id'] = resource_revision.id
                message_params['thumbnail_revision_id'] = thumbnail_revision.id

    # create a short lived update message for subscribers to the folder containing this sequence
    if emit_message:
        folder_path = resource_path.rsplit('/', 1)[0]
        message_queue.add(
            folder_id=resource.parent_id, folder_path=folder_path, message_type='sequence_update', parameters=message_params, timestamp=timestamp)
Esempio n. 4
0
    def post(self):
        args = request.values

        # get parent
        path = args.get('path', args.get('parent'))  # fix(soon): decide whether to use path or parent
        if not path:
            abort(400)
        parent_resource = find_resource(path)  # expects leading slash
        if not parent_resource:
            try:  # fix(soon): need to traverse up tree to check permissions, not just check org permissions
                org_name = path.split('/')[1]
                org_resource = Resource.query.filter(Resource.name == org_name, Resource.parent_id == None, Resource.deleted == False).one()
                if access_level(org_resource.query_permissions()) < ACCESS_LEVEL_WRITE:
                    abort(403)
            except NoResultFound:
                abort(403)
            _create_folders(path.strip('/'))
            parent_resource = find_resource(path)
            if not parent_resource:
                abort(400)

        # make sure we have write access to parent
        if access_level(parent_resource.query_permissions()) < ACCESS_LEVEL_WRITE:
            abort(403)

        # get main parameters
        file = request.files.get('file', None)
        name = file.filename if file else args['name']
        type = int(args['type'])  # fix(soon): safe int conversion

        # get timestamps
        if 'creation_timestamp' in args:
            creation_timestamp = parse_json_datetime(args['creation_timestamp'])
        elif 'creationTimestamp' in args:
            creation_timestamp = parse_json_datetime(args['creationTimestamp'])
        else:
            creation_timestamp = datetime.datetime.utcnow()
        if 'modification_timestamp' in args:
            modification_timestamp = parse_json_datetime(args['modification_timestamp'])
        elif 'modificationTimestamp' in args:
            modification_timestamp = parse_json_datetime(args['modificationTimestamp'])
        else:
            modification_timestamp = creation_timestamp

        # check for existing resource
        try:
            resource = Resource.query.filter(Resource.parent_id == parent_resource.id, Resource.name == name, Resource.deleted == False).one()
            return {'message': 'Resource already exists.', 'status': 'error'}  # fix(soon): return 400 status code
        except NoResultFound:
            pass

        # create resource
        r = Resource()
        r.parent_id = parent_resource.id
        r.organization_id = parent_resource.organization_id
        r.name = name
        r.type = type
        r.creation_timestamp = creation_timestamp
        r.modification_timestamp = modification_timestamp
        if type == Resource.FILE:  # temporarily mark resource as deleted in case we fail to create resource revision record
            r.deleted = True
        else:
            r.deleted = False
        if 'user_attributes' in args:
            r.user_attributes = args['user_attributes']  # we assume that the attributes are already a JSON string

        # handle sub-types
        if type == Resource.FILE:

            # get file contents (if any) from request
            if file:
                stream = cStringIO.StringIO()
                file.save(stream)
                data = stream.getvalue()
            else:
                data = base64.b64decode(args.get('contents', args.get('data', '')))  # fix(clean): remove contents version

            # convert files to standard types/formgat
            # fix(soon): should give the user a warning or ask for confirmation
            if name.endswith('xls') or name.endswith('xlsx'):
                data = convert_xls_to_csv(data)
                name = name.rsplit('.')[0] + '.csv'
                r.name = name
            if name.endswith('csv') or name.endswith('txt'):
                data = convert_new_lines(data)

            # compute other file attributes
            system_attributes = {
                'hash': hashlib.sha1(data).hexdigest(),
                'size': len(data),
            }
            if 'file_type' in args:  # fix(soon): can we remove this? current just using for markdown files
                system_attributes['file_type'] = args['file_type']
            r.system_attributes = json.dumps(system_attributes)
        elif type == Resource.SEQUENCE:
            data_type = int(args['data_type'])  # fix(soon): safe convert to int
            system_attributes = {
                'max_history': 10000,
                'data_type': data_type,
            }
            if args.get('decimal_places', '') != '':
                system_attributes['decimal_places'] = int(args['decimal_places'])  # fix(soon): safe convert to int
            if args.get('min_storage_interval', '') != '':
                min_storage_interval = int(args['min_storage_interval'])  # fix(soon): safe convert to int
            else:
                if data_type == Resource.TEXT_SEQUENCE:
                    min_storage_interval = 0  # default to 0 seconds for text sequences (want to record all log entries)
                else:
                    min_storage_interval = 50  # default to 50 seconds for numeric and image sequences
            if args.get('units'):
                system_attributes['units'] = args['units']
            system_attributes['min_storage_interval'] = min_storage_interval
            r.system_attributes = json.dumps(system_attributes)
        elif type == Resource.REMOTE_FOLDER:
            r.system_attributes = json.dumps({
                'remote_path': args['remote_path'],
            })

        # save resource record
        db.session.add(r)
        db.session.commit()

        # save file contents (after we have resource ID) and compute thumbnail if needed
        if type == Resource.FILE:
            add_resource_revision(r, r.creation_timestamp, data)
            r.deleted = False  # now that have sucessfully created revision, we can make the resource live
            db.session.commit()

            # compute thumbnail
            # fix(soon): recompute thumbnail on resource update
            if name.endswith('.png') or name.endswith('.jpg'):  # fix(later): handle more types, capitalizations
                for width in [120]:  # fix(later): what will be our standard sizes?
                    (thumbnail_contents, thumbnail_width, thumbnail_height) = compute_thumbnail(data, width)  # fix(later): if this returns something other than requested width, we'll keep missing the cache
                    thumbnail = Thumbnail()
                    thumbnail.resource_id = r.id
                    thumbnail.width = thumbnail_width
                    thumbnail.height = thumbnail_height
                    thumbnail.format = 'jpg'
                    thumbnail.data = thumbnail_contents
                    db.session.add(thumbnail)

        # handle the case of creating a controller; requires creating some additional records
        elif type == Resource.CONTROLLER_FOLDER:

            # create controller status record
            controller_status = ControllerStatus()
            controller_status.id = r.id
            controller_status.client_version = ''
            controller_status.web_socket_connected = False
            controller_status.watchdog_notification_sent = False
            controller_status.attributes = '{}'
            db.session.add(controller_status)
            db.session.commit()

            # create log sequence
            create_sequence(r, 'log', Resource.TEXT_SEQUENCE, max_history = 10000)

            # create a folder for status sequences
            status_folder = Resource()
            status_folder.parent_id = r.id
            status_folder.organization_id = r.organization_id
            status_folder.name = 'status'
            status_folder.type = Resource.BASIC_FOLDER
            status_folder.creation_timestamp = datetime.datetime.utcnow()
            status_folder.modification_timestamp = status_folder.creation_timestamp
            db.session.add(status_folder)
            db.session.commit()

            # create status sequences
            create_sequence(status_folder, 'free_disk_space', Resource.NUMERIC_SEQUENCE, max_history = 10000, units = 'bytes')
            create_sequence(status_folder, 'processor_usage', Resource.NUMERIC_SEQUENCE, max_history = 10000, units = 'percent')
            create_sequence(status_folder, 'messages_sent', Resource.NUMERIC_SEQUENCE, max_history = 10000)
            create_sequence(status_folder, 'messages_received', Resource.NUMERIC_SEQUENCE, max_history = 10000)
            create_sequence(status_folder, 'serial_errors', Resource.NUMERIC_SEQUENCE, max_history = 10000)

        return {'status': 'ok', 'id': r.id}