Exemplo n.º 1
0
    def session_info(self):
        res = super(IrHttp, self).session_info()

        Config = self.env["ir.config_parameter"].sudo()
        try:
            database_limit_size = int(
                Config.get_param("database_limit_size", 0))
        except ValueError:
            return res

        if not database_limit_size:
            return res

        self.env.cr.execute("select pg_database_size(%s)",
                            [self.env.cr.dbname])
        database_size = self.env.cr.fetchone()[0]

        filestore_size = get_directory_size(
            self.env["ir.attachment"]._filestore())

        total_size = database_size + filestore_size
        if total_size > database_limit_size:
            res["database_block_message"] = "Database size exceed ({} / {})".format(
                human_size(total_size),
                human_size(database_limit_size),
            )
        elif total_size > database_limit_size * 0.9:
            res["database_block_message"] = "Database size is about to be exceed (%s / %s)" % (
                human_size(total_size),
                human_size(database_limit_size),
            )
            res["database_block_is_warning"] = True

        return res
Exemplo n.º 2
0
 def _compute_db_statistics(self):
     for rec in self:
         data = prepare_db_statistic_data(self.env.cr.dbname)
         data.update({
             'db_storage': tools.human_size(data['db_storage']),
             'file_storage': tools.human_size(data['file_storage']),
         })
         data = {f: data[f] for f in STAT_FIELDS}
         rec.update(data)
Exemplo n.º 3
0
 def content(self):
     if self.env.context.get('bin_size'):
         file_path = self._build_path()
         return human_size(self._read_size(file_path))
     else:
         file_path = self._build_path()
         return self._read_file(file_path)
Exemplo n.º 4
0
 def convert_to_cache(self, value, record, validate=True):
     if value and isinstance(value, str) and os.path.exists(value):
         try:
             with open(value, "rb") as file:
                 if record._context.get("human_size"):
                     return human_size(file.seek(0, 2))
                 elif record._context.get("bin_size"):
                     return file.seek(0, 2)
                 elif record._context.get("path"):
                     return value
                 elif record._context.get("bytes"):
                     return file.read()
                 elif record._context.get("stream"):
                     temp = tempfile.TemporaryFile()
                     while True:
                         chunk = file.read(4096)
                         if not chunk:
                             temp.seek(0)
                             return temp
                         temp.write(chunk)
                 elif record._context.get("checksum"):
                     checksum = hashlib.sha1()
                     while True:
                         chunk = file.read(4096)
                         if not chunk:
                             return checksum.hexdigest()
                         checksum.update(chunk)
                 else:
                     return base64.b64encode(file.read())
         except (IOError, OSError):
             _logger.warn("Reading file from %s failed!",
                          value,
                          exc_info=True)
     return None if value is False else value
Exemplo n.º 5
0
 def content(self):
     self.ensure_one()
     file_path = self._build_path()
     if self.env.context.get('bin_size'):
         return human_size(self._read_size(file_path))
     else:
         return self._read_file(file_path)
Exemplo n.º 6
0
 def convert_to_record(self, value, record):
     if value and isinstance(value, int):
         lobject = record.env.cr._cnx.lobject(value, "rb")
         if record._context.get("human_size"):
             return human_size(lobject.seek(0, 2))
         elif record._context.get("bin_size"):
             return lobject.seek(0, 2)
         elif record._context.get("oid"):
             return lobject.oid
         elif record._context.get("bytes"):
             return lobject.read()
         elif record._context.get("stream"):
             file = tempfile.TemporaryFile()
             while True:
                 chunk = lobject.read(4096)
                 if not chunk:
                     file.seek(0)
                     return file
                 file.write(chunk)
         elif record._context.get("checksum"):
             checksum = hashlib.sha1()
             while True:
                 chunk = lobject.read(4096)
                 if not chunk:
                     return checksum.hexdigest()
                 checksum.update(chunk)
         else:
             return base64.b64encode(lobject.read())
     return None if value is False else value
Exemplo n.º 7
0
 def convert_to_record(self, value, record):
     if value and isinstance(value, str) and os.path.exists(value):
         try:
             with open(value, 'rb') as file:
                 if record._context.get('human_size'):
                     return human_size(file.seek(0, 2))
                 elif record._context.get('bin_size'):
                     return file.seek(0, 2)
                 elif record._context.get('path'):
                     return value
                 elif record._context.get('base64'):
                     return base64.b64encode(file.read())
                 elif record._context.get('stream'):
                     temp = tempfile.TemporaryFile()
                     while True:
                         chunk = file.read(4096)
                         if not chunk:
                             temp.seek(0)
                             return temp
                         temp.write(chunk)
                 elif record._context.get('checksum'):
                     checksum = hashlib.sha1()
                     while True:
                         chunk = file.read(4096)
                         if not chunk:
                             return checksum.hexdigest()
                         checksum.update(chunk)
                 else:
                     return file.read()
         except (IOError, OSError):
             _logger.warn("Reading file from %s failed!", value, exc_info=True)
     return value
Exemplo n.º 8
0
 def convert_to_record(self, value, record):
     if value and isinstance(value, int):
         lobject = record.env.cr._cnx.lobject(value, 'rb')
         if record._context.get('human_size'):
             return human_size(lobject.seek(0, 2))
         elif record._context.get('bin_size'):
             return lobject.seek(0, 2)
         elif record._context.get('oid'):
             return lobject.oid
         elif record._context.get('base64'):
             return base64.b64encode(lobject.read())
         elif record._context.get('stream'):
             file = tempfile.TemporaryFile()
             while True:
                 chunk = lobject.read(4096)
                 if not chunk:
                     file.seek(0)
                     return file
                 file.write(chunk)
         elif record._context.get('checksum'):
             checksum = hashlib.md5()
             while True:
                 chunk = lobject.read(4096)
                 if not chunk:
                     return checksum.hexdigest()
                 checksum.update(chunk)
         else:
             return lobject.read()
     return value
Exemplo n.º 9
0
 def test_index_size(self):
     self.assertTrue(self.partner_binding.data)
     self.assertEqual(
         self.partner_binding.data_size,
         human_size(self.partner_binding._get_bytes_size()),
     )
     self.partner_binding.data = {}
     self.assertEqual(self.partner_binding.data_size, "2.00 bytes")
Exemplo n.º 10
0
    def _compute_datas(self):
        if self._context.get('bin_size'):
            for attach in self:
                attach.datas = human_size(attach.file_size)
            return

        for attach in self:
            attach.datas = base64.b64encode(attach.raw or b'')
Exemplo n.º 11
0
 def _file_read(self, fname, bin_size=False):
     full_path = self._full_path(fname)
     r = ''
     try:
         if bin_size:
             r = human_size(os.path.getsize(full_path))
         else:
             r = open(full_path, 'rb').read().encode('base64')
     except (IOError, OSError):
         _logger.info("_read_file reading %s", full_path, exc_info=True)
     return r
Exemplo n.º 12
0
 def _file_read(self, fname, bin_size=False):
     full_path = self._full_path(fname)
     r = ''
     try:
         if bin_size:
             r = human_size(os.path.getsize(full_path))
         else:
             r = base64.b64encode(open(full_path,'rb').read())
     except (IOError, OSError):
         pass
     return r
Exemplo n.º 13
0
 def _file_read(self, fname, bin_size=False):
     full_path = self._full_path(fname)
     r = ''
     try:
         if bin_size:
             r = human_size(os.path.getsize(full_path))
         else:
             r = open(full_path,'rb').read().encode('base64')
     except (IOError, OSError):
         _logger.info("_read_file reading %s", full_path, exc_info=True)
     return r
Exemplo n.º 14
0
 def _file_read(self, full_path, fname, bin_size=False):
     import_config = self.env['import.config'].search([], limit=1)
     source_path = str(import_config.source_path)
     destination_path = str(import_config.destination_path)
     full_path = source_path
     r = ''
     try:
         if bin_size:
             r = human_size(os.path.getsize(full_path))
         else:
             r = base64.b64encode(open(full_path, 'rb').read())
     except (IOError, OSError):
         _logger.info("_read_file reading %s", full_path, exc_info=True)
     return r
Exemplo n.º 15
0
    def _file_read(self, fname, bin_size=False):
        if not fname.startswith(PREFIX):
            return super(IrAttachment, self)._file_read(fname, bin_size)

        bucket = self.env["res.config.settings"].get_google_cloud_storage_bucket()

        file_id = fname[len(PREFIX) :]
        _logger.debug("reading file with id {}".format(file_id))

        blob = bucket.get_blob(file_id)

        if bin_size:
            return human_size(blob.size)
        else:
            return base64.b64encode(blob.download_as_string())
Exemplo n.º 16
0
    def _file_read(self, fname, bin_size=False):
        if not fname.startswith(PREFIX):
            return super(IrAttachment, self)._file_read(fname, bin_size)

        bucket = self.env["res.config.settings"].get_s3_bucket()

        file_id = fname[len(PREFIX):]
        _logger.debug("reading file with id {}".format(file_id))

        obj = bucket.Object(file_id)
        data = obj.get()

        if bin_size:
            return human_size(data["ContentLength"])
        else:
            return base64.b64encode(b"".join(data["Body"]))
Exemplo n.º 17
0
 def convert_to_record(self, value, record):
     if value and isinstance(value, int):
         lobject = record.env.cr._cnx.lobject(value, 'rb')
         if record._context.get('human_size'):
             return human_size(lobject.seek(0, 2))
         elif record._context.get('bin_size'):
             return lobject.seek(0, 2)
         elif record._context.get('oid'):
             return lobject.oid
         elif record._context.get('stream'):
             file = tempfile.TemporaryFile()
             while True:
                 chunk = lobject.read(4096)
                 if not chunk:
                     file.seek(0)
                     return file
                 file.write(chunk)
         else:
             return lobject.read()
     return value
Exemplo n.º 18
0
    def _file_read(self, fname, bin_size=False):
        def file_not_found(fname):
            raise UserError(
                _('''Error while reading file %s.
                Maybe it was removed or permission is changed.
                Please refresh the list.''' % fname))

        self.ensure_one()
        r = ''
        directory = self.directory_id.get_dir()
        full_path = directory + fname
        if not (directory and os.path.isfile(full_path)):
            file_not_found(fname)
        try:
            if bin_size:
                r = human_size(os.path.getsize(full_path))
            else:
                r = open(full_path, 'rb').read().encode('base64')
        except (IOError, OSError):
            _logger.info("_read_file reading %s", fname, exc_info=True)
        return r
Exemplo n.º 19
0
 def _compute_human_file_size(self):
     for record in self:
         record.human_file_size = human_size(record.file_size)
Exemplo n.º 20
0
 def _compute_data_size(self):
     for rec in self:
         rec.data_size = human_size(rec._get_bytes_size())
Exemplo n.º 21
0
 def get_human_size(self):
     return human_size(self.size)