Exemplo n.º 1
0
def collect(ctx):
    logger = (ctx.obj['logger'])

    from odoo.modules import get_modules, get_module_path
    from odoo.tools.osutil import listdir
    from odooku.backends import get_backend

    s3_backend = get_backend('s3')

    for module in get_modules():
        if module in RESERVED:
            logger.warning("Module name %s clashes with a reserved key",
                           module)
            continue
        static_dir = os.path.join(get_module_path(module), 'static')
        if os.path.exists(static_dir):
            for filename in listdir(static_dir, True):
                path = os.path.join(static_dir, filename)
                url = os.path.join(module, 'static', filename)
                logger.info("Uploading %s", url)
                s3_backend.client.upload_file(path,
                                              s3_backend.bucket,
                                              url,
                                              ExtraArgs={
                                                  'ACL':
                                                  'public-read',
                                                  'CacheControl':
                                                  ('max-age=%d, public' %
                                                   (s3_backend.cache_time))
                                              })
Exemplo n.º 2
0
def restore(ctx, db_name, copy, s3_file):
    config = (
        ctx.obj['config']
    )

    if update:
        config['update']['all'] = 1

    from odooku.backends import get_backend
    from odoo.api import Environment
    from odoo.service.db import restore_db

    s3_backend = get_backend('s3')

    with tempfile.NamedTemporaryFile(delete=False) as t:
        if s3_file:
            s3_backend.client.download_fileobj(s3_backend.bucket, s3_file, t)
        else:
            # Read from stdin
            while True:
                chunk = sys.stdin.read(CHUNK_SIZE)
                if not chunk:
                    break
                t.write(chunk)
        t.close()

        with Environment.manage():
            restore_db(
                db_name,
                t.name,
                copy=copy
            )

        os.unlink(t.name)
Exemplo n.º 3
0
def dump(ctx, db_name, s3_file):
    config = (
        ctx.obj['config']
    )

    from odooku.backends import get_backend
    from odoo.api import Environment
    from odoo.service.db import dump_db

    s3_backend = get_backend('s3')

    with tempfile.TemporaryFile() as t:
        with Environment.manage():
            dump_db(db_name, t)

        t.seek(0)
        if s3_file:
            s3_backend.client.upload_fileobj(t, s3_backend.bucket, s3_file)
        else:
            # Pipe to stdout
            while True:
                chunk = t.read(CHUNK_SIZE)
                if not chunk:
                    break
                sys.stdout.write(chunk)
Exemplo n.º 4
0
 def session_store(self):
     redis = get_backend('redis')
     if redis:
         _logger.info("HTTP Sessions stored in redis")
         return RedisSessionStore(redis, session_class=OpenERPSession)
     else:
         path = odoo.tools.config.session_dir
         _logger.info("HTTP sessions stored locally in: %s", path)
         return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
import os
import logging

from odoo import api, fields, models, tools, _

from botocore.exceptions import ClientError

from odooku.backends import get_backend

_logger = logging.getLogger(__name__)

s3_backend = get_backend('s3')


class S3Error(Exception):
    pass


class S3NoSuchKey(S3Error):
    pass


class IrAttachment(models.Model):

    _inherit = 'ir.attachment'

    s3_exists = fields.Boolean(string='Exists in S3 bucket', default=None)

    @api.depends('store_fname', 'db_datas')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')