Exemplo n.º 1
0
class Store(models.Model):

    _name = 'muk_converter.store'
    _description = 'Converter Store'

    #----------------------------------------------------------
    # Database
    #----------------------------------------------------------

    name = fields.Char(compute="_compute_name", string="Name", store=True)

    used_date = fields.Datetime(string="Used on", default=fields.Datetime.now)

    checksum = fields.Char(string="Checksum", required=True)

    format = fields.Char(string="Format", required=True)

    content_fname = fields.Char(string="Filename", required=True)

    content = LargeObject(string="Data", required=True)

    #----------------------------------------------------------
    # Read
    #----------------------------------------------------------

    @api.depends('checksum', 'content_fname')
    def _compute_name(self):
        for record in self:
            record.name = "%s (%s)" % (record.content_fname, record.checksum)
Exemplo n.º 2
0
class LargeObjectDataModel(models.Model):

    _name = 'muk_dms.data_lobject'
    _description = 'Large Object Data Model'
    _inherit = 'muk_dms.data'

    #----------------------------------------------------------
    # Database
    #----------------------------------------------------------

    data = LargeObject(string="Content")

    #----------------------------------------------------------
    # Abstract Implementation
    #----------------------------------------------------------

    @api.multi
    def type(self):
        return "lobject"

    @api.multi
    def content(self):
        self.ensure_one()
        if self.env.context.get('bin_size'):
            return self.with_context({'bin_size': True}).data
        else:
            return self.with_context({'base64': True}).data

    @api.multi
    def update(self, values):
        if 'content' in values:
            self.write({'data': values['content']})
Exemplo n.º 3
0
class Dummy(models.Model):
    
    _name = 'muk_test_fields_lobject.dummy'
    
    content_fname = fields.Char(string="Filename")
    content = LargeObject(string="Data")

    
    
Exemplo n.º 4
0
class LObjectIrAttachment(models.Model):

    _inherit = 'ir.attachment'

    #----------------------------------------------------------
    # Database
    #----------------------------------------------------------

    store_lobject = LargeObject(string="Data")

    #----------------------------------------------------------
    # Helper
    #----------------------------------------------------------

    @api.model
    def _get_datas_inital_vals(self):
        vals = super(LObjectIrAttachment, self)._get_datas_inital_vals()
        vals.update({'store_lobject': False})
        return vals

    #----------------------------------------------------------
    # Function
    #----------------------------------------------------------

    @api.model
    def storage_locations(self):
        locations = super(LObjectIrAttachment, self).storage_locations()
        locations.append('lobject')
        return locations

    @api.model
    def force_storage(self):
        if not self.env.user._is_admin():
            raise AccessError(
                _('Only administrators can execute this action.'))
        if self._storage() != 'lobject':
            return super(LObjectIrAttachment, self).force_storage()
        else:
            storage_domain = {
                'lobject': ('store_lobject', '=', False),
            }
            record_domain = [
                '&', ('type', '=', 'binary'),
                '&', storage_domain[self._storage()], '|',
                ('res_field', '=', False), ('res_field', '!=', False)
            ]
            self.search(record_domain).migrate()
            return True

    #----------------------------------------------------------
    # Read
    #----------------------------------------------------------

    @api.depends('store_lobject')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_lobject:
                if bin_size:
                    attach.datas = attach.with_context({
                        'human_size': True
                    }).store_lobject
                else:
                    attach.datas = attach.with_context({
                        'base64': True
                    }).store_lobject
            else:
                super(LObjectIrAttachment, attach)._compute_datas()

    #----------------------------------------------------------
    # Create, Write, Delete
    #----------------------------------------------------------

    @api.multi
    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            if location == 'lobject':
                value = attach.datas
                bin_data = base64.b64decode(value) if value else b''
                vals = self._get_datas_inital_vals()
                vals = self._update_datas_vals(vals, attach, bin_data)
                vals['store_lobject'] = bin_data
                clean_vals = self._get_datas_clean_vals(attach)
                models.Model.write(attach.sudo(), vals)
                self._clean_datas_after_write(clean_vals)
            else:
                super(LObjectIrAttachment, attach)._inverse_datas()
Exemplo n.º 5
0
class LObjectIrAttachment(models.Model):

    _inherit = 'ir.attachment'

    store_lobject = LargeObject(string="Data")

    def _force_storage_prepare_chunks(self):
        """ Technical method to select attachments that need to be migrated
            This method automaticaly splits attachment by chunks,
            to speed up migration.

            :return list: list of chunks where each chunk is list of attachment ids
                          [[1,2,3],[40, 42, 12,33], ...]
        """
        CHUNK_SIZE = 100
        attachments = self.search(
            ['|', ['res_field', '=', False], ['res_field', '!=', False]])
        storage = self._storage()
        chunks = []
        current_chunk = []
        for attach in attachments:
            # Detect storage_type of attachment
            if attach.db_datas:
                current = 'db'
            elif attach.store_lobject:
                current = 'lobject'
            elif attach.store_fname:
                current = 'file'
            else:
                current = None

            if storage != current:
                # This attachment needs migration, thus adding it to result
                current_chunk += [attach.id]
                if len(current_chunk) >= CHUNK_SIZE:
                    chunks += [current_chunk]
                    current_chunk = []

        if current_chunk:
            chunks += [current_chunk]
        return chunks

    @api.model
    def force_storage(self):
        if not self.env.user._is_admin():
            raise AccessError(
                _('Only administrators can execute this action.'))

        # Do migration by chunks to make it faster.
        chunks_to_migrate = self._force_storage_prepare_chunks()
        for chunk_index, chunk in enumerate(
                self._force_storage_prepare_chunks()):
            # Here we need to precess each chunk in new transaction.
            # When all attachments in chunk processed, then commit.
            # In case of any errors - rollback
            with api.Environment.manage():
                with odoo.registry(self.env.cr.dbname).cursor() as new_cr:
                    new_env = api.Environment(new_cr, self.env.uid,
                                              self.env.context.copy())
                    attachments = new_env['ir.attachment'].browse(chunk)
                    try:
                        for index, attach in enumerate(attachments):
                            _logger.info(
                                "Migrate Attachment %s of %s [chunk %s of %s]",
                                index, len(attachments), chunk_index,
                                len(chunks_to_migrate))
                            attach.write({'datas': attach.datas})
                    except Exception:
                        _logger.error("Cannot migrate attachments.",
                                      exc_info=True)
                        new_cr.rollback()
                        raise
                    else:
                        new_cr.commit()

    @api.depends('store_fname', 'db_datas', 'store_lobject')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_lobject:
                if bin_size:
                    attach.datas = attach.store_lobject
                else:
                    attach.datas = attach.with_context({
                        'base64': True
                    }).store_lobject
            else:
                super(LObjectIrAttachment, attach)._compute_datas()

    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            if location == 'lobject':
                value = attach.datas
                bin_data = base64.b64decode(value) if value else b''
                vals = {
                    'file_size':
                    len(bin_data),
                    'checksum':
                    self._compute_checksum(bin_data),
                    'index_content':
                    self._index(bin_data, attach.datas_fname, attach.mimetype),
                    'store_fname':
                    False,
                    'db_datas':
                    False,
                    'store_lobject':
                    bin_data,
                }
                fname = attach.store_fname
                super(LObjectIrAttachment, attach.sudo()).write(vals)
                if fname:
                    self._file_delete(fname)
            else:
                super(LObjectIrAttachment, attach)._inverse_datas()
                # It is required to set 'store_lobject' to false, because it is
                # used in muk_dms_attachment to detect storage type of
                # attachment, thus it is impossible to detect attachments that
                # need migration 'LObject -> File' or 'LObject -> smthng'
                attach.write({'store_lobject': False})

    def _compute_mimetype(self, values):
        mimetype = super(LObjectIrAttachment, self)._compute_mimetype(values)
        if not mimetype or mimetype == 'application/octet-stream':
            mimetype = None
            for attach in self:
                if attach.mimetype:
                    mimetype = attach.mimetype
                if not mimetype and attach.datas_fname:
                    mimetype = mimetypes.guess_type(attach.datas_fname)[0]
        return mimetype or 'application/octet-stream'
Exemplo n.º 6
0
class LObjectIrAttachment(models.Model):

    _inherit = "ir.attachment"

    # ----------------------------------------------------------
    # Database
    # ----------------------------------------------------------

    store_lobject = LargeObject(string="Data")

    # ----------------------------------------------------------
    # Helper
    # ----------------------------------------------------------

    @api.model
    def _get_storage_domain(self, storage):
        if storage == "lobject":
            return [("store_lobject", "=", False)]
        return super(LObjectIrAttachment, self)._get_storage_domain(storage)

    # ----------------------------------------------------------
    # Function
    # ----------------------------------------------------------

    @api.model
    def storage_locations(self):
        locations = super(LObjectIrAttachment, self).storage_locations()
        locations.append("lobject")
        return locations

    # ----------------------------------------------------------
    # Read
    # ----------------------------------------------------------

    @api.depends("store_lobject")
    def _compute_datas(self):
        bin_size = self._context.get("bin_size")
        for attach in self:
            if attach.store_lobject:
                if bin_size:
                    attach.datas = attach.with_context({
                        "human_size": True
                    }).store_lobject
                else:
                    attach.datas = attach.with_context({
                        "base64": True
                    }).store_lobject
            else:
                super(LObjectIrAttachment, attach)._compute_datas()

    # ----------------------------------------------------------
    # Create, Write, Delete
    # ----------------------------------------------------------

    def _get_datas_related_values(self, data, mimetype):
        if self._storage() == "lobject":
            bin_data = base64.b64decode(data) if data else b""
            values = {
                "file_size": len(bin_data),
                "checksum": self._compute_checksum(bin_data),
                "index_content": self._index(bin_data, mimetype),
                "store_lobject": bin_data,
                "store_fname": False,
                "db_datas": False,
            }
            return values
        return super(LObjectIrAttachment,
                     attach)._get_datas_related_values(data, mimetype)
Exemplo n.º 7
0
class LObjectIrAttachment(models.Model):
    
    _inherit = 'ir.attachment'

    store_lobject = LargeObject(
        string="Data")
    
    @api.model
    def storage_locations(self):
        locations = super(LObjectIrAttachment, self).storage_locations()
        locations.append('lobject')
        return locations
    
    @api.model
    def force_storage(self):
        if not self.env.user._is_admin():
            raise AccessError(_('Only administrators can execute this action.'))
        if self._storage() != 'lobject':
            return super(LObjectIrAttachment, self).force_storage()
        else:
            storage_domain = {
                'lobject': ('store_lobject', '=', False),
            }
            record_domain = [
                '&', storage_domain[self._storage()], 
                '|', ('res_field', '=', False), ('res_field', '!=', False)
            ]
            self.search(record_domain).migrate()
            return True
                
    @api.depends('store_lobject')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_lobject:
                if bin_size:
                    attach.datas = attach.store_lobject
                else:
                    attach.datas = attach.with_context({'base64': True}).store_lobject
            else:
                super(LObjectIrAttachment, attach)._compute_datas()
        
    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            if location == 'lobject':
                value = attach.datas
                bin_data = base64.b64decode(value) if value else b''
                vals = {
                    'file_size': len(bin_data),
                    'checksum': self._compute_checksum(bin_data),
                    'index_content': self._index(bin_data, attach.datas_fname, attach.mimetype),
                    'store_fname': False,
                    'db_datas': False,
                    'store_lobject': bin_data,
                }
                fname = attach.store_fname
                super(LObjectIrAttachment, attach.sudo()).write(vals)
                if fname:
                    self._file_delete(fname)
            else:
                super(LObjectIrAttachment, attach)._inverse_datas()
Exemplo n.º 8
0
class LObjectIrAttachment(models.Model):

    _inherit = 'ir.attachment'

    store_lobject = LargeObject(string="Data")

    @api.model
    def force_storage(self):
        if not self.env.user._is_admin():
            raise AccessError(
                _('Only administrators can execute this action.'))
        attachments = self.search(
            ['|', ['res_field', '=', False], ['res_field', '!=', False]])
        for index, attach in enumerate(attachments):
            _logger.info(
                _("Migrate Attachment %s of %s") % (index, len(attachments)))
            attach.write({'datas': attach.datas})
        return True

    @api.depends('store_fname', 'db_datas', 'store_lobject')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_lobject:
                if bin_size:
                    attach.datas = attach.store_lobject
                else:
                    attach.datas = attach.with_context({
                        'base64': True
                    }).store_lobject
            else:
                super(LObjectIrAttachment, attach)._compute_datas()

    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            if location == 'lobject':
                value = attach.datas
                bin_data = base64.b64decode(value) if value else b''
                vals = {
                    'file_size':
                    len(bin_data),
                    'checksum':
                    self._compute_checksum(bin_data),
                    'index_content':
                    self._index(bin_data, attach.datas_fname, attach.mimetype),
                    'store_fname':
                    False,
                    'db_datas':
                    False,
                    'store_lobject':
                    bin_data,
                }
                fname = attach.store_fname
                super(LObjectIrAttachment, attach.sudo()).write(vals)
                if fname:
                    self._file_delete(fname)
            else:
                super(LObjectIrAttachment, attach)._inverse_datas()

    def _compute_mimetype(self, values):
        mimetype = super(LObjectIrAttachment, self)._compute_mimetype(values)
        if not mimetype or mimetype == 'application/octet-stream':
            mimetype = None
            for attach in self:
                if attach.mimetype:
                    mimetype = attach.mimetype
                if not mimetype and attach.datas_fname:
                    mimetype = mimetypes.guess_type(attach.datas_fname)[0]
        return mimetype or 'application/octet-stream'