class BindingDataMixin(models.AbstractModel):
    _name = 'binding.data.mixin'

    updated = fields.Boolean()
    data = fields.Serialized()

    def _synchronize_magento_record(self, backend_id):
        backend = self.env['magento.backend'].browse(backend_id)
        with backend.work_on(self._name) as work:
            adapter = work.component(usage='backend.adapter')
            records = self.search([
                ('backend_id', '=', backend_id),
                ('updated', '=', False),
            ])
            total = len(records)
            missing_ids = []
            for idx, record in enumerate(records):
                if idx % 10 == 0:
                    _logger.info('progress {} / {}'.format(idx, total))
                try:
                    data = {}
                    for storeview in backend.mapped(
                            'website_ids.store_ids.storeview_ids'):
                        data[storeview.code] = adapter.read(
                            record.external_id,
                            storeview_id=storeview.external_id)
                except Exception as e:
                    missing_ids.append(record.id)
                record.write({'updated': True, 'data': data})
                self._cr.commit()
        return missing_ids
class ShopinvaderVariant(models.Model):
    _inherit = ['shopinvader.variant', 'se.binding']
    _name = 'shopinvader.variant'
    _description = 'Shopinvader Variant'

    index_id = fields.Many2one(compute="_compute_index",
                               store=True,
                               required=False)

    price = fields.Serialized(compute='_compute_price',
                              string='Shopinvader Price')

    def _compute_price(self):
        for record in self:
            record.price = {}
            for role in record.backend_id.role_ids:
                fposition = role.fiscal_position_ids
                if len(fposition) > 0:
                    fposition = fposition[0]
                record.price[role.code] = record._get_price(
                    role.pricelist_id, fposition)

    @api.depends('lang_id', 'backend_id.se_backend_id')
    def _compute_index(self):
        for record in self:
            se_backend = record.backend_id.se_backend_id
            if se_backend:
                record.index_id = self.env['se.index'].search([
                    ('backend_id', '=', se_backend.id),
                    ('lang_id', '=', record.lang_id.id),
                    ('model_id.model', '=', record._name),
                ])
class BindingDataMixin(models.AbstractModel):
    _name = 'binding.data.mixin'

    data = fields.Serialized()

    @api.model
    def fields_get(self, allfields=None, attributes=None):
        res = super(BindingDataMixin, self).fields_get(allfields=allfields,
                                                       attributes=attributes)
        if 'data' in res:
            res['data']['translate'] = True
        return res

    def write(self, vals):
        if 'data' in vals:
            self.ensure_one()
            lang = self._context.get('lang', 'en_US')
            data = self.data
            data[lang] = vals['data']
            vals['data'] = data
        return super(BindingDataMixin, self).write(vals)

    def create(self, vals):
        if 'data' in vals:
            lang = self._context.get('lang', 'en_US')
            vals['data'] = {lang: vals['data']}
        return super(BindingDataMixin, self).write(vals)
Beispiel #4
0
class tms_host_group(models.Model):

    _name = "tms.host.group"
    _description = "Host Group"

    # Columns
    name = fields.Char('Name', size=256, required=True)
    config = fields.Serialized('Config',
                               help="Config variable used by Ansible")
Beispiel #5
0
class Data(models.Model):
    _name = 'pubsub.data'

    ref = fields.Id(string='Ref', required=True)
    type = fields.Char(string='Type', required=True)
    data = fields.Serialized(string='Data')

    def _get_headers(self, ids):
        record = self.env['pubsub.header'].search_read([('id', 'in', ids)])

        headers = {}

        # Iterate over the headers and add them to the object
        for header in record:
            headers[header.get('field_id')[1]] = header.get('value')

        # Set `Content-Type` to `application/json` regarding of what is set in the settings
        headers['Content-Type'] = 'application/json'

        return headers

    def _in_type_list(self, type, ids):
        # Check if the type is associated with the subscription
        length = self.env['pubsub.type'].search_count([('id', 'in', ids),
                                                       ('type', '=', type)])

        return length > 0

    def _publish(self, vals):
        # Copy the `vals` object and change `ref` to `id`
        data = copy.copy(vals)
        data['id'] = data['ref']

        del data['ref']

        result = self.env['pubsub.subscription'].search_read([])

        for r in result:
            types = r.get('type_ids')

            if len(types) == 0 or self._in_type_list(data['type'], types):
                _logger.info('invoke')

                url = r.get('url')
                headers = self._get_headers(r.get('header_ids'))

                requests.post(url, headers=headers, data=json.dumps(data))

    @api.model
    def create(self, vals):
        rec = super(Data, self).create(vals)

        # Publish to all the subscriptions
        self._publish(vals)

        return rec
Beispiel #6
0
class Sparse(models.Model):
    _name = 'test_new_api.sparse'

    data = fields.Serialized()
    boolean = fields.Boolean(sparse='data')
    integer = fields.Integer(sparse='data')
    float = fields.Float(sparse='data')
    char = fields.Char(sparse='data')
    selection = fields.Selection([('one', 'One'), ('two', 'Two')],
                                 sparse='data')
    partner = fields.Many2one('res.partner', sparse='data')
Beispiel #7
0
class TestSparse(models.TransientModel):
    _name = 'sparse_fields.test'

    data = fields.Serialized()
    boolean = fields.Boolean(sparse='data')
    integer = fields.Integer(sparse='data')
    float = fields.Float(sparse='data')
    char = fields.Char(sparse='data')
    selection = fields.Selection([('one', 'One'), ('two', 'Two')],
                                 sparse='data')
    partner = fields.Many2one('res.partner', sparse='data')
Beispiel #8
0
class ProductTemplate(models.Model):
    _inherit = 'product.template'

    price_quantity_tiers = fields.Serialized(
        compute='_compute_price_quantity_tiers',
        help='Unit prices by pricelist and minimum quantity',
    )

    @api.multi
    def _compute_price_quantity_tiers(self):
        pricelists = self.env['product.pricelist'].search([('id', '>', 0)])

        for record in self:
            if not record.product_variant_ids:
                continue

            results = {}

            for pricelist in pricelists:
                pricelist_items = self.env['product.pricelist.item'].search([
                    ('pricelist_id', '=', pricelist.id),
                    ('product_tmpl_id', '=', record.id),
                    '|',
                    ('date_start', '<=', fields.Date.today()),
                    ('date_start', '=', False),
                    '|',
                    ('date_end', '>=', fields.Date.today()),
                    ('date_end', '=', False),
                ])

                min_quantities = set([])
                for price in pricelist_items:
                    min_quantities.add(
                        1 if price.min_quantity < 1 else price.min_quantity
                    )
                min_quantities.add(1)

                list_results = set([])
                for min_qty in min_quantities:
                    res = pricelist.price_rule_get(
                        record.product_variant_ids[0].id, min_qty
                    )
                    if res[pricelist.id][1] in pricelist_items.ids \
                            or min_qty == 1:
                        list_results.add((min_qty, res[pricelist.id][0]))

                list_results = sorted(list(list_results))
                # A tier with min qty of 1 should not exist on its own
                if len(list_results) == 1 and list_results[0][0] == 1:
                    list_results = []
                results[pricelist.id] = list_results

            record.price_quantity_tiers = results
class TmsDockerRepoTags(models.Model):
    _name = "tms.docker.repo.tags"
    _description = "Tms docker repository tags"
    _order = "create_date desc"

    name = fields.Char('Name')
    create_date = fields.Datetime('Creation Date')
    size = fields.Float('Size (MB)')
    active = fields.Boolean('Active', default=True)
    tms_docker_repo_id = fields.Many2one('tms.docker.repo',
                                         string='Tms docker repository')
    db_file = fields.Char('Database file')
    meta_data = fields.Serialized('Meta Data', readonly=True)
Beispiel #10
0
class ShopinvaderVariant(models.Model):
    _inherit = 'shopinvader.variant'

    hierarchical_categories = fields.Serialized(
        compute='_compute_shopinvader_category',
        string='Hierarchical Categories')

    def _compute_shopinvader_category(self):
        super(ShopinvaderVariant, self)._compute_shopinvader_category()

        def get_full_name(categ):
            result = []
            while categ:
                result.insert(0, categ.name)
                categ = categ.parent_id
            return ' > '.join(result)

        for record in self:
            record.hierarchical_categories = {}
            for categ in record.shopinvader_categ_ids:
                record.hierarchical_categories['lvl%s' % categ.level] =\
                    get_full_name(categ.record_id)
Beispiel #11
0
class ShopinvaderVariant(models.Model):
    _inherit = 'shopinvader.variant'

    rating = fields.Serialized(compute='_compute_rating', string='Rating')

    def _compute_rating(self):
        for record in self:
            reviews = []
            distribution = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0}
            for rating in record.rating_ids:
                if rating.state == 'approved':
                    reviews.append({
                        'nickname':
                        rating.nickname,
                        'name':
                        rating.name,
                        'comment':
                        rating.comment,
                        'rating':
                        rating.rating,
                        'product_code':
                        rating.product_id.default_code,
                    })
                    distribution[rating.rating] += 1
            if reviews:
                count = len(reviews)
                average = sum([c['rating'] for c in reviews]) / count
                record.rating = {
                    'reviews': reviews,
                    'summary': {
                        'average': average,
                        'count': count,
                        'distribution': distribution,
                    }
                }
            else:
                record.rating = {}
class KeychainBackend(models.AbstractModel):
    _name = 'keychain.backend'
    _backend_name = None

    password = fields.Char(compute="_compute_password",
                           inverse="_inverse_password",
                           required=True)
    data = fields.Serialized(compute="_compute_keychain",
                             inverse="_inverse_keychain")

    def _get_technical_name(self):
        return '%s,%s' % (self._name, self.id)

    def _get_existing_keychain(self):
        self.ensure_one()
        return self.env['keychain.account'].retrieve([
            ('namespace', '=', self._backend_name),
            ('technical_name', '=', self._get_technical_name())
        ])

    def _prepare_keychain(self):
        env = config.get('running_env')
        return {
            'name': "%s %s" % (self.name, env),
            'technical_name': self._get_technical_name(),
            'namespace': self._backend_name,
            'environment': env,
        }

    def _get_keychain_account(self):
        self.ensure_one()
        account = self._get_existing_keychain()
        if not account:
            vals = self._prepare_keychain()
            account = self.env['keychain.account'].create(vals)
        return account

    def _inverse_password(self):
        for record in self:
            account = self._get_keychain_account()
            if record.password and record.password != '******':
                account.clear_password = record.password

    def _compute_password(self):
        for record in self:
            account = record._get_existing_keychain()
            if account and account.password:
                record.password = "******"
            else:
                record.password = ""

    def _inverse_keychain(self):
        for record in self:
            account = record._get_keychain_account()
            account.data = account._serialize_data(record.data)

    def _compute_keychain(self):
        for record in self:
            account = record._get_existing_keychain()
            if account:
                record.data = account.get_data()
            else:
                record.data = {}
class BaseFieldSerializedTestModel(models.Model):
    _name = 'base.field.serialized.test.model'

    serialized = fields.Serialized('Serialized')
Beispiel #14
0
class ShopinvaderVariant(models.Model):
    _name = 'shopinvader.variant'
    _description = 'Shopinvader Variant'
    _inherits = {
        'shopinvader.product': 'shopinvader_product_id',
        'product.product': 'record_id'}

    default_code = fields.Char(
        related='record_id.default_code',
        readonly=True)
    shopinvader_product_id = fields.Many2one(
        'shopinvader.product',
        required=True,
        ondelete='cascade')
    record_id = fields.Many2one(
        'product.product',
        required=True,
        ondelete='cascade')
    object_id = fields.Integer(
        compute='_compute_object_id',
        store=True)
    shopinvader_categ_ids = fields.Many2many(
        comodel_name='shopinvader.category',
        compute='_compute_shopinvader_category',
        string='Shopinvader Categories')
    images = fields.Serialized(
        compute='_compute_image',
        string='Shopinvader Image')
    variant_count = fields.Integer(
        related='product_variant_count')
    attributes = fields.Serialized(
        compute='_compute_attributes',
        string='Shopinvader Attributes')
    main = fields.Boolean(
        compute='_compute_main_product')
    redirect_url_key = fields.Serialized(
        compute='_compute_redirect_url_key',
        string='Redirect Url Keys')

    @api.depends('record_id')
    def _compute_object_id(self):
        for record in self:
            record.object_id = record.record_id.id

    def _compute_redirect_url_key(self):
        for record in self:
            res = []
            for url in record.redirect_url_key_ids:
                res.append(url.url_key)
            record.redirect_url_key = res

    def _get_categories(self):
        self.ensure_one()
        return self.categ_id

    def _compute_shopinvader_category(self):
        for record in self:
            ids = []
            categs = record._get_categories()
            for categ in categs:
                parents = self.env['shopinvader.category'].search([
                    ('parent_left', '<=', categ.parent_left),
                    ('parent_right', '>=', categ.parent_right),
                    ('backend_id', '=', record.backend_id.id),
                    ])
                ids += parents.ids
            record.shopinvader_categ_ids = ids

    def _compute_image(self):
        for record in self:
            images = []
            for image in record.record_id.image_ids:
                res = {'original': image.url}
                for resize in record.backend_id.product_image_resize_ids:
                    res[resize.key] = \
                        image.get_thumbnail_from_resize(resize).url
                images.append(res)
            record.images = images

    def _compute_attributes(self):
        for record in self:
            attributes = dict()
            for att_value in record.attribute_value_ids:
                sanitized_key = sanitize_attr_name(att_value.attribute_id)
                attributes[sanitized_key] = att_value.name
            record.attributes = attributes

    def _get_price(self, pricelist, fposition):
        self.ensure_one()
        return self._get_price_per_qty(1, pricelist, fposition)

    def _extract_price_from_onchange(self, pricelist, onchange_vals):
        tax_ids = onchange_vals['value']['tax_id']
        tax_included = False
        if tax_ids:
            # Becarefull we only take in account the first tax for
            # determinating if the price is tax included or tax excluded
            # This may not work in multi-tax case
            tax = self.env['account.tax'].browse(tax_ids[0])
            tax_included = tax.price_include
        prec = self.env['decimal.precision'].precision_get('Product')
        return {
            'value': round(onchange_vals['value']['price_unit'], prec),
            'tax_included': tax_included,
            }

    def _get_price_per_qty(self, qty, pricelist, fposition):
        # Get an partner in order to avoid the raise in the onchange
        # the partner selected will not impacted the result
        partner = self.env['res.partner'].search([], limit=1)
        result = self.env['sale.order.line'].product_id_change(
            pricelist.id, self.record_id.id, qty=qty,
            fiscal_position=fposition.id, partner_id=partner.id)
        return self._extract_price_from_onchange(pricelist, result)

    def _compute_main_product(self):
        for record in self:
            if record.record_id \
                    == record.product_tmpl_id.product_variant_ids[0]:
                record.main = True
            else:
                record.main = False
Beispiel #15
0
class tms_host(models.Model):

    _name = "tms.host"
    _description = "Host"
    _order = 'operating_system_id'
    _inherit = ['mail.thread']

    @api.one
    @api.onchange('physical_host_id', 'container_id')
    def on_change_physical_host_container(self):
        """
        F#13948 - auto compute ssh port for host (virtual machine)
        """
        if self.virtual_machine:
            self.ip = self.env['ir.config_parameter'].get_param(
                'default_ip_host', False) + str(self.container_id)

        temp = self.config.copy()
        node_add = self.physical_host_id and \
            self.physical_host_id.host_address or False
        if node_add:
            if 'eu' in node_add:
                temp['timezone'] = 'Europe/Berlin'
                self.config = temp
            else:
                temp['timezone'] = 'Asia/Ho_Chi_Minh'
                self.config = temp

        if self.virtual_machine and \
                self.virtualization_id and \
                self.virtualization_id.name == 'Proxmox':
            suggest_ssh_port = '{AREA_CODE}{NODE_NUMBER}{CONTAINER_ID}'
            if self.physical_host_id and \
                    self.physical_host_id.name:
                host_name_prefix = self.physical_host_id.name[:2]
                host_name_suffix = self.physical_host_id.name[-2:]
                if host_name_prefix == 'eu':
                    suggest_ssh_port = suggest_ssh_port.replace(
                        '{AREA_CODE}', '1')
                elif host_name_prefix == 'vn':
                    suggest_ssh_port = suggest_ssh_port.replace(
                        '{AREA_CODE}', '2')
                if host_name_suffix.isnumeric():
                    suggest_ssh_port = suggest_ssh_port.replace(
                        '{NODE_NUMBER}',
                        str(int(host_name_suffix)))
            if self.container_id >= 0:
                suggest_ssh_port = suggest_ssh_port.replace(
                    '{CONTAINER_ID}', str(self.container_id))
            self.port = '{' not in suggest_ssh_port and \
                suggest_ssh_port or '22'

    @api.multi
    def _get_num_of_vm(self):
        for host in self:
            host.num_VM = len(host.virtual_host_ids)

    @api.multi
    @api.depends('name', 'virtual_machine', 'physical_host_id')
    def _get_physical_host(self):
        """
            get physical host for virtual machine
            if host is virtual machine then get Physical Host
            if host is not virtual machine then get name of this Host
        """
        # looking for all virtual host belong to this physical_host and
        # including it
        hosts = self.search(["|", ('physical_host_id', 'in', self._ids),
                             ('id', 'in', self._ids)])
        for host in hosts:
            if host.virtual_machine:
                host.physical_host = host.physical_host_id.name
            else:
                host.physical_host = host.name

    @api.model
    def get_grouped_host(self):
        """
        Retrieve host grouped by physical host
        """

        self._cr.execute('''
        SELECT ph.name, p.name, p.host_address, p.port, p.config
        FROM tms_host AS h
        JOIN tms_host AS ph ON h.physical_host_id = ph.id
        GROUP BY ph.name
        ''')

    @api.model
    def get_authorized_keys(self):
        """
        Retrieve all authorized keys for hosts
        Note: used by configuration manager tools (Ansible)

        Format:
        results = {
            'host-name': {
                'admin': [list of admin users' keys],
                'user': [list of host users' keys]
            }
        }
        """
        # Read private token from cofig file
        # Request to git lab with number users is 400
        results = {}
        private_token_key_config = config.get('private_token') or ''
        if private_token_key_config:
            # Get all user from gitlab
            payload = {
                'private_token': private_token_key_config,
                'per_page': 400
            }
            r = requests.get(
                "https://gitlab.trobz.com/api/v3/users",
                data=payload, verify=False
            )
            gitlab_users = json.loads(r.text) or []

            # First, get all ssh keys from Admin users
            # and Sysadmin users who have full access.
            self._cr.execute('''
                SELECT login
                FROM res_users
                WHERE is_sysadmin = TRUE
                    AND has_full_sysadmin_access = TRUE
                    AND active = TRUE
                    AND id != 1
                ORDER BY id;
            ''')
            admin_usernames = map(lambda r: r[0], self._cr.fetchall())

            host_users = []
            admin_keys = []
            for gitlab_user in gitlab_users:
                if gitlab_user['state'] == 'active':
                    req = requests.get(
                        "https://gitlab.trobz.com/api/v3/users/" +
                        str(gitlab_user['id']) + "/keys",
                        data=payload, verify=False
                    )
                    ssh_vals = json.loads(req.text) or []
                    ssh_keys = []
                    for val in ssh_vals:
                        if 'key' in val:
                            ssh_keys.append(val['key'])
                            if gitlab_user['username'] in admin_usernames:
                                admin_keys.append(val['key'])
                    host_users.append({
                        'login': gitlab_user['username'],
                        'ssh_key': ssh_keys
                    })

            self._cr.execute('''
                SELECT h.name, u.login, u.is_sysadmin
                FROM tms_host AS h
                LEFT JOIN host_users_rel AS hu ON hu.host_id = h.id
                LEFT JOIN res_users AS u ON u.id = hu.user_id
                WHERE (u.is_trobz_member = TRUE AND u.active = TRUE)
                OR u.login IS NULL
                AND h.state != 'deleted'
                ORDER BY h.name, u.id;
            ''')
            # result[0] => host name
            # result[1] => user login name
            # result[2] => is_sysadmin
            for result in self._cr.fetchall():
                if result[0] not in results:
                    # All sysadmin users
                    # who have full access are admins on this host.
                    results[result[0]] = {'admin': admin_keys, 'user': []}

                if result[1]:
                    for host_user in host_users:
                        # All users who are responsible to maintain
                        # odoo instances are users on this host.
                        if result[1] == host_user['login']:
                            results[result[0]]['user'] = list(
                                set(results[result[0]]['user'] +
                                    host_user['ssh_key']))
                            # All sysadmin users who don't have full access,
                            # but are responsible to maintain this host are
                            # admins on this host.
                            if result[2]:
                                results[result[0]]['admin'] = list(
                                    set(results[result[0]]['admin'] +
                                        host_user['ssh_key']))
        else:
            logging.info("You don't have a token key in the config file!")

        return results

    @api.constrains('container_id', 'virtual_machine', 'physical_host_id')
    def check_container_id(self):
        '''
        For Host (Virtual Machine) inside a Physical Host:
            - Container ID between 1 and 255
            - F#13552 - Unique container_id
                (only for Hosts that belong to the same Physical Host)
        '''
        for host in self:
            if host.virtual_machine and host.physical_host_id:
                if host.container_id < 1 or host.container_id > 255:
                    raise Warning(
                        _('You have to input Container ID between 1 and 255 !')
                    )
                # find existing Hosts in Physical Host
                # using the Container ID
                existed_hosts = self.search([
                    ('virtual_machine', '=', True),
                    ('physical_host_id', '=', host.physical_host_id.id),
                    ('container_id', '=', host.container_id),
                    ('id', '!=', host.id),
                    ('state', '!=', 'deleted')
                ])
                if existed_hosts:
                    raise Warning(
                        _('The Container ID should be unique for '
                          'Hosts that belong to the same Physical Host! '
                          'Container ID %s has been used for Host %s!' % (
                              host.container_id, existed_hosts[0].name))
                    )

    @api.multi
    def _get_groups(self):
        for item in self:
            if item.group_ids:
                item.groups = map(lambda group: {
                    'name': group.name,
                    'config': group.config}, item.group_ids)
            else:
                item.groups = []

    # Columns
    name = fields.Char('Host Name', size=256, required=True,
                       track_visibility='onchange')
    host_address = fields.Char('Host Address', size=256, required=True,
                               track_visibility='onchange')
    port = fields.Char('SSH Port', size=256, required=True,
                       track_visibility='onchange')
    ip = fields.Char(
        'IP', size=64, help='For nodes it is the IP address of the node, '
                            'for VM it is the Virtual IP of the VM',
        track_visibility='onchange')
    internal_ip = fields.Char('Internal IP', size=64)
    using_pgbouncer = fields.Boolean(
        string='Using PgBouncer', default=False)
    pgbouncer_port = fields.Char('PgBouncer Port')
    operating_system_id = fields.Many2one(
        'tms.operating.system', string='Operating System',
        track_visibility='onchange')
    # Adjustments of the Hosts
    service = fields.Char('Services', size=64)
    group_ids = fields.Many2many(
        "tms.host.group", "host_host_group_rel", "host_id", "group_id",
        string='Groups', help="Host group, used by ansible",
        track_visibility='onchange')
    groups = fields.Serialized(
        compute='_get_groups', string='Group list',
        readonly=True)
    user_ids = fields.Many2many(
        "res.users", "host_users_rel", "host_id", "user_id",
        string='Users', track_visibility='onchange',
        help="User with permissions to deploy on the host.",
        domain=[('is_trobz_member', '=', True)])
    instance_ids = fields.One2many(
        'tms.instance', 'host_id', string='Instances')
    config = fields.Serialized(
        string='Config', default={"timezone": "Asia/Ho_Chi_Minh"})
    type = fields.Selection([
        ('production', 'Production'), ('staging', 'Staging'),
        ('integration', 'Integration'), ('test', 'Test'),
        ('demo', 'Demo'), ('unknown', 'Unknown'), ('node', 'Node'),
        ('utils', 'Utils')
    ], string='Type', required=True, track_visibility='onchange')
    action_required = fields.Boolean('Action Required',
                                     track_visibility='onchange')
    backup_checking = fields.Date('Audit Date',
                                  track_visibility='onchange')
    update = fields.Datetime('Update', readonly=True)
    virtual_machine = fields.Boolean('Virtual Machine',
                                     track_visibility='onchange')
    virtualization_id = fields.Many2one('tms.host.virtualization',
                                        string='Virtualization',
                                        track_visibility='onchange')
    physical_host_id = fields.Many2one('tms.host', string='Physical Host',
                                       track_visibility='onchange')
    virtual_host_ids = fields.One2many('tms.host', 'physical_host_id',
                                       string='Virtual Machines')
    num_VM = fields.Integer(
        compute='_get_num_of_vm', string='Number of VM')
    # the temp for group by physical host
    physical_host = fields.Char(
        compute='_get_physical_host', string='Physical Host',
        store=True)
    state = fields.Selection(
        [
            ('active', 'Active'), ('exception', 'Exception'),
            ('asleep', 'Asleep'), ('deleted', 'Deleted')
        ], string='Status', track_visibility='onchange',
        help='For the field status:\n'
        '- Active: The host is active and in a normal state.\n'
        '- Exception: Something is wrong (ex: We lost the connection '
        'to host). This is set manually.\n'
        '- Asleep: This host is temporarily disabled.'
        ' -Deleted: We do not handle this host anymore, keeping record.')
    is_managed_by_trobz = fields.Boolean(
        string='Managed by Trobz', help='Instances managed by Trobz.',
        track_visibility='onchange')
    container_id = fields.Integer(
        string='Container ID', help='required for Virtual Machines')
    # Resources
    processors = fields.Integer(string='Processors',
                                track_visibility='onchange')
    ram = fields.Integer(string='Memory (RAM in MB)',
                         track_visibility='onchange')
    disk_size = fields.Integer(string='Disk Size (GB)',
                               track_visibility='onchange')
    swap = fields.Integer(string='Swap (MB)',
                          track_visibility='onchange')
    allow_edit_users = fields.Boolean(
        'Allow Edit Users', compute='_compute_allow_edit_users')
    awx_job_history_ids = fields.One2many(
        comodel_name='tms.awx.job.history',
        inverse_name='host_id', string='AWX Job History')

    _sql_constraints = [
        ('host_unique', 'unique(name)', 'This host already exists!'),
        ('host_add_unique', 'unique(host_address)',
         'This host address already exists!'),
    ]

    _defaults = {
        'port': '22',
        'state': 'active',
        'is_managed_by_trobz': True,
    }

    @api.multi
    def _compute_allow_edit_users(self):
        '''
        Users can only add users to hosts related to their projects
        '''
        user = self.env.user
        for host in self:
            allow = False
            if user.id == SUPERUSER_ID or user.has_full_sysadmin_access:
                allow = True
            else:
                # Apply for PM without has_full_sysadmin_access
                project_owners = host.instance_ids.\
                    mapped('project_id').\
                    mapped('owner_id')
                if user in project_owners:
                    allow = True
            host.allow_edit_users = allow

        return True

    @api.multi
    def write(self, vals):
        """
        when change virtual machine, clear value of field physical host
        or virtual host
        """
        if 'virtual_machine' in vals:
            if vals['virtual_machine']:
                virtual_machines = self.search(
                    [('physical_host_id', '=', self._ids)])
                virtual_machines.write({'physical_host_id': False})
            else:
                vals['physical_host_id'] = False
        # User of group TMS Add User To Host can edit users of host only
        current_user = self.env['res.users'].browse(self._uid)
        if not current_user.has_full_sysadmin_access and \
                current_user.has_groups('tms_modules.group_add_user_to_host'):
            if not 'bypass_security' in self._context and \
                    not ('user_ids' in vals and len(vals) == 1):
                raise Warning(
                    'Warning',
                    'You only allow modify users of host!')

        vals['update'] = time.strftime("%Y-%m-%d")
        return super(tms_host, self).write(vals)

    @api.multi
    def name_get(self):
        return [(host.id, u"{0} ({1})".format(
            host.name, host.physical_host_id.name)) if host.physical_host_id
            else (host.id, u"{0}".format(host.name)) for host in self]
class ShopinvaderCategory(models.Model):
    _name = 'shopinvader.category'
    _description = 'Shopinvader Category'
    _inherit = ['locomotive.binding', 'abstract.url']
    _inherits = {'product.category': 'record_id'}

    record_id = fields.Many2one('product.category',
                                required=True,
                                ondelete='cascade')
    object_id = fields.Integer(compute='_compute_object_id', store=True)
    lang_id = fields.Many2one('res.lang', 'Lang', required=True)
    seo_title = fields.Char()
    meta_description = fields.Char()
    meta_keywords = fields.Char()
    subtitle = fields.Char()
    short_description = fields.Html()
    description = fields.Html()
    images = fields.Serialized(compute='_compute_image',
                               string='Shopinvader Image')
    shopinvader_parent_id = fields.Many2one('shopinvader.category',
                                            'Shopinvader Parent',
                                            compute='_compute_parent_category',
                                            store=True)
    shopinvader_child_ids = fields.Many2many('shopinvader.category',
                                             'Shopinvader Childs',
                                             compute='_compute_child_category')
    level = fields.Integer(compute='_compute_level')
    redirect_url_key = fields.Serialized(compute='_compute_redirect_url_key',
                                         string='Redirect Url Keys')

    _sql_constraints = [
        ('record_uniq', 'unique(backend_id, record_id, lang_id)',
         'A category can only have one binding by backend.'),
    ]

    def _compute_redirect_url_key(self):
        for record in self:
            res = []
            for url in record.redirect_url_key_ids:
                res.append(url.url_key)
            record.redirect_url_key = res

    def _compute_image(self):
        for record in self:
            images = []
            for image in record.record_id.image_ids:
                res = {'original': image.url}
                for resize in record.backend_id.categ_image_resize_ids:
                    res[resize.key] = \
                        image.get_thumbnail_from_resize(resize).url
                images.append(res)
            record.images = images

    @api.depends('record_id')
    def _compute_object_id(self):
        for record in self:
            record.object_id = record.record_id.id

    @api.depends('parent_id.shopinvader_bind_ids')
    def _compute_parent_category(self):
        for record in self:
            for binding in record.parent_id.shopinvader_bind_ids:
                if binding.backend_id == record.backend_id:
                    record.shopinvader_parent_id = binding
                    break

    def _compute_child_category(self):
        for record in self:
            record.shopinvader_child_ids = self.search([
                ('record_id.parent_id', '=', record.record_id.id),
                ('backend_id', '=', record.backend_id.id),
            ])

    def _build_url_key(self):
        key = super(ShopinvaderCategory, self)._build_url_key()
        if self.parent_id and self.shopinvader_parent_id:
            # TODO using self.shopinvader_parent_id.url_key fail...
            if self.shopinvader_parent_id.url_builder == 'manual':
                parent_url = self.shopinvader_parent_id.manual_url_key
            else:
                parent_url = self.shopinvader_parent_id._build_url_key()
            key = '/'.join([parent_url, key])
        return key

    @api.depends('url_builder', 'record_id.name',
                 'shopinvader_parent_id.url_key')
    def _compute_url(self):
        return super(ShopinvaderCategory, self)._compute_url()

    @api.depends('shopinvader_parent_id.level')
    def _compute_level(self):
        for record in self:
            record.level = 0
            parent = record.shopinvader_parent_id
            while parent:
                record.level += 1
                parent = parent.shopinvader_parent_id
Beispiel #17
0
class ImportOdooDatabase(models.Model):
    _name = 'import.odoo.database'
    _description = 'An Odoo database to import'

    url = fields.Char(required=True)
    database = fields.Char(required=True)
    user = fields.Char(default='admin', required=True)
    password = fields.Char(default='admin')
    import_line_ids = fields.One2many(
        'import.odoo.database.model',
        'database_id',
        string='Import models',
    )
    import_field_mappings = fields.One2many(
        'import.odoo.database.field',
        'database_id',
        string='Field mappings',
    )
    cronjob_id = fields.Many2one(
        'ir.cron',
        string='Import job',
        readonly=True,
        copy=False,
    )
    cronjob_running = fields.Boolean(compute='_compute_cronjob_running')
    status_data = fields.Serialized('Status', readonly=True, copy=False)
    status_html = fields.Html(
        compute='_compute_status_html',
        readonly=True,
        sanitize=False,
    )
    duplicates = fields.Selection(
        [
            ('skip', 'Skip existing'),
            ('overwrite', 'Overwrite existing'),
            ('overwrite_empty', 'Overwrite empty fields'),
        ],
        'Duplicate handling',
        default='skip',
        required=True,
    )

    @api.multi
    def action_import(self):
        """Create a cronjob to run the actual import"""
        self.ensure_one()
        if self.cronjob_id:
            return self.cronjob_id.write({
                'numbercall': 1,
                'doall': True,
                'active': True,
            })
        return self.write({
            'cronjob_id': self._create_cronjob().id,
        })

    @api.multi
    def _run_import(self, commit=True, commit_threshold=100):
        """Run the import as cronjob, commit often"""
        self.ensure_one()
        if not self.password:
            return
        # model name: [ids]
        remote_ids = {}
        # model name: count
        remote_counts = {}
        # model name: count
        done = {}
        # mapping_key: local_id
        idmap = {}
        # mapping_key: local_id
        # this are records created or linked when we need to fill a required
        # field, but the local record is not yet created
        dummies = {}
        # model name: [local_id]
        # this happens when we create a dummy we can throw away again
        to_delete = {}
        # dummy_instance
        dummy_instances = []
        remote = self._get_connection()
        self.write({'password': False})
        if commit and not tools.config['test_enable']:
            # pylint: disable=invalid-commit
            self.env.cr.commit()
        for model_line in self.import_line_ids:
            model = model_line.model_id
            remote_ids[model.model] = remote.execute(
                model.model, 'search',
                tools.safe_eval(model_line.domain)
                if model_line.domain else [])
            remote_counts[model.model] = len(remote_ids[model.model])
        self.write({
            'status_data': {
                'counts': remote_counts,
                'ids': remote_ids,
                'error': None,
                'dummies': None,
                'done': {},
            }
        })
        if commit and not tools.config['test_enable']:
            # pylint: disable=invalid-commit
            self.env.cr.commit()
        for model_line in self.import_line_ids:
            model = self.env[model_line.model_id.model]
            done[model._name] = 0
            chunk_len = commit and (commit_threshold or 1) or len(
                remote_ids[model._name])

            for start_index in range(
                    len(remote_ids[model._name]) / chunk_len + 1):
                index = start_index * chunk_len
                ids = remote_ids[model._name][index:index + chunk_len]
                context = ImportContext(
                    remote,
                    model_line,
                    ids,
                    idmap,
                    dummies,
                    dummy_instances,
                    to_delete,
                    field_context(None, None, None),
                )
                try:
                    self._run_import_model(context)
                except:  # noqa: E722
                    # pragma: no cover
                    error = traceback.format_exc()
                    self.env.cr.rollback()
                    self.write({
                        'status_data':
                        dict(self.status_data, error=error),
                    })
                    # pylint: disable=invalid-commit
                    self.env.cr.commit()
                    raise
                done[model._name] += len(ids)
                self.write({'status_data': dict(self.status_data, done=done)})

                if commit and not tools.config['test_enable']:
                    # pylint: disable=invalid-commit
                    self.env.cr.commit()
        missing = {}
        for dummy_model, remote_id in dummies.keys():
            if remote_id:
                missing.setdefault(dummy_model, []).append(remote_id)
        self.write({
            'status_data':
            dict(self.status_data, dummies=dict(missing)),
        })

    @api.multi
    def _run_import_model(self, context):
        """Import records of a configured model"""
        model = self.env[context.model_line.model_id.model]
        fields = self._run_import_model_get_fields(context)
        for data in context.remote.execute(model._name, 'read', context.ids,
                                           fields.keys()):
            self._run_import_get_record(
                context,
                model,
                data,
                create_dummy=False,
            )
            if (model._name, data['id']) in context.idmap:
                # one of our mappings hit, create an xmlid to persist
                # this knowledge
                self._create_record_xmlid(
                    model, context.idmap[(model._name, data['id'])],
                    data['id'])
                if self.duplicates == 'skip':
                    # there's a mapping for this record, nothing to do
                    continue
            data = self._run_import_map_values(context, data)
            _id = data['id']
            record = self._create_record(context, model, data)
            self._run_import_model_cleanup_dummies(
                context,
                model,
                _id,
                record.id,
            )

    @api.multi
    def _create_record(self, context, model, record):
        """Create a record, add an xmlid"""
        _id = record.pop('id')
        xmlid = '%d-%s-%d' % (
            self.id,
            model._name.replace('.', '_'),
            _id or 0,
        )
        record = self._create_record_filter_fields(model, record)
        new = self.env.ref('base_import_odoo.%s' % xmlid, False)
        if new and new.exists():
            if self.duplicates == 'overwrite_empty':
                record = {
                    key: value
                    for key, value in record.items() if not new[key]
                }
            new.with_context(
                **self._create_record_context(model, record)).write(record)
            _logger.debug('Updated record %s', xmlid)
        else:
            new = model.with_context(
                **self._create_record_context(model, record)).create(record)
            self._create_record_xmlid(model, new.id, _id)
            _logger.debug('Created record %s', xmlid)
        context.idmap[mapping_key(model._name, _id)] = new.id
        return new

    def _create_record_xmlid(self, model, local_id, remote_id):
        xmlid = '%d-%s-%d' % (
            self.id,
            model._name.replace('.', '_'),
            remote_id or 0,
        )
        if self.env.ref('base_import_odoo.%s' % xmlid, False):
            return
        return self.env['ir.model.data'].create({
            'name':
            xmlid,
            'model':
            model._name,
            'module':
            'base_import_odoo',
            'res_id':
            local_id,
            'noupdate':
            True,
            'import_database_id':
            self.id,
            'import_database_record_id':
            remote_id,
        })

    def _create_record_filter_fields(self, model, record):
        """Return a version of record with unknown fields for model removed"""
        return {
            key: value
            for key, value in record.items() if key in model._fields
        }

    def _create_record_context(self, model, record):
        """Return a context that is used when creating a record"""
        context = {
            'tracking_disable': True,
        }
        if model._name == 'res.users':
            context['no_reset_password'] = True
        return context

    @api.multi
    def _run_import_get_record(
        self,
        context,
        model,
        record,
        create_dummy=True,
    ):
        """Find the local id of some remote record. Create a dummy if not
        available"""
        _id = context.idmap.get((model._name, record['id']))
        logged = False
        if not _id:
            _id = context.dummies.get((model._name, record['id']))
            if _id:
                context.dummy_instances.append(
                    dummy_instance(*(context.field_context + (_id, ))))
        else:
            logged = True
            _logger.debug(
                'Got %s(%d[%d]) from idmap',
                model._model,
                _id,
                record['id'] or 0,
            )
        if not _id:
            _id = self._run_import_get_record_mapping(
                context,
                model,
                record,
                create_dummy=create_dummy,
            )
        elif not logged:
            logged = True
            _logger.debug(
                'Got %s(%d[%d]) from dummies',
                model._model,
                _id,
                record['id'],
            )
        if not _id:
            xmlid = self.env['ir.model.data'].search([
                ('import_database_id', '=', self.id),
                ('import_database_record_id', '=', record['id']),
                ('model', '=', model._name),
            ],
                                                     limit=1)
            if xmlid:
                _id = xmlid.res_id
                context.idmap[(model._name, record['id'])] = _id
        elif not logged:
            logged = True
            _logger.debug(
                'Got %s(%d[%d]) from mappings',
                model._model,
                _id,
                record['id'],
            )
        if not _id and create_dummy:
            _id = self._run_import_create_dummy(
                context,
                model,
                record,
                forcecreate=record['id']
                not in self.status_data['ids'].get(model._name, []))
        elif _id and not logged:
            _logger.debug(
                'Got %s(%d[%d]) from xmlid',
                model._model,
                _id,
                record['id'],
            )
        return _id

    @api.multi
    def _run_import_get_record_mapping(
        self,
        context,
        model,
        record,
        create_dummy=True,
    ):
        current_field = self.env['ir.model.fields'].search([
            ('name', '=', context.field_context.field_name),
            ('model_id.model', '=', context.field_context.record_model),
        ])
        mappings = self.import_field_mappings.filtered(
            lambda x: x.mapping_type == 'fixed' and x.model_id.model == model.
            _name and (not x.field_ids or current_field in x.field_ids) and x.
            local_id and (x.remote_id == record['id'] or not x.remote_id) or x.
            mapping_type == 'by_field' and x.model_id.model == model._name)
        _id = None
        for mapping in mappings:
            if mapping.mapping_type == 'fixed':
                assert mapping.local_id
                _id = mapping.local_id
                context.idmap[(model._name, record['id'])] = _id
                break
            elif mapping.mapping_type == 'by_field':
                assert mapping.field_ids
                if len(record) == 1:
                    # just the id of a record we haven't seen yet.
                    # read the whole record from remote to check if
                    # this can be mapped to an existing record
                    record = context.remote.execute(
                        model._name,
                        'read',
                        record['id'],
                        mapping.field_ids.mapped('name'),
                    ) or None
                    if not record:
                        continue
                    if isinstance(record, list):
                        record = record[0]
                domain = [(field.name, '=', record.get(field.name))
                          for field in mapping.field_ids
                          if record.get(field.name)]
                if len(domain) < len(mapping.field_ids):
                    # play it save, only use mapping if we really select
                    # something specific
                    continue
                records = model.with_context(active_test=False).search(
                    domain,
                    limit=1,
                )
                if records:
                    _id = records.id
                    context.idmap[(model._name, record['id'])] = _id
                    break
            else:
                raise exceptions.UserError(_('Unknown mapping'))
        return _id

    @api.multi
    def _run_import_create_dummy(
        self,
        context,
        model,
        record,
        forcecreate=False,
    ):
        """Either misuse some existing record or create an empty one to satisfy
        required links"""
        dummy = model.search([
            ('id', 'not in', [
                v for (model_name, remote_id), v in context.dummies.items()
                if model_name == model._name
            ] + [
                mapping.local_id for mapping in self.import_field_mappings
                if mapping.model_id.model == model._name and mapping.local_id
            ]),
        ],
                             limit=1)
        if dummy and not forcecreate:
            context.dummies[mapping_key(model._name, record['id'])] = dummy.id
            context.dummy_instances.append(
                dummy_instance(*(context.field_context + (dummy.id, ))))
            _logger.debug(
                'Using %d as dummy for %s(%d[%d]).%s[%d]',
                dummy.id,
                context.field_context.record_model,
                context.idmap.get(context.field_context.record_id, 0),
                context.field_context.record_id,
                context.field_context.field_name,
                record['id'],
            )
            return dummy.id
        required = [
            name for name, field in model._fields.items() if field.required
        ]
        defaults = model.default_get(required)
        values = {'id': record['id']}
        for name, field in model._fields.items():
            if name not in required or defaults.get(name):
                continue
            value = None
            if field.type in ['char', 'text', 'html']:
                value = '/'
            elif field.type in ['boolean']:
                value = False
            elif field.type in ['integer', 'float']:
                value = 0
            elif model._fields[name].type in ['date', 'datetime']:
                value = '2000-01-01'
            elif field.type in ['many2one']:
                if name in model._inherits.values():
                    continue
                new_context = context.with_field_context(
                    model._name, name, record['id'])
                value = self._run_import_get_record(
                    new_context,
                    self.env[model._fields[name].comodel_name],
                    {'id': record.get(name, [None])[0]},
                )
            elif field.type in ['selection'] and not callable(field.selection):
                value = field.selection[0][0]
            elif field.type in ['selection'] and callable(field.selection):
                value = field.selection(model)[0][0]
            values[name] = value
        dummy = self._create_record(context, model, values)
        del context.idmap[mapping_key(model._name, record['id'])]
        context.dummies[mapping_key(model._name, record['id'])] = dummy.id
        context.to_delete.setdefault(model._name, [])
        context.to_delete[model._name].append(dummy.id)
        context.dummy_instances.append(
            dummy_instance(*(context.field_context + (dummy.id, ))))
        _logger.debug(
            'Created %d as dummy for %s(%d[%d]).%s[%d]',
            dummy.id,
            context.field_context.record_model,
            context.idmap.get(context.field_context.record_id, 0),
            context.field_context.record_id or 0,
            context.field_context.field_name,
            record['id'],
        )
        return dummy.id

    @api.multi
    def _run_import_map_values(self, context, data):
        model = self.env[context.model_line.model_id.model]
        for field_name in data.keys():
            if not isinstance(model._fields[field_name],
                              fields._Relational) or not data[field_name]:
                continue
            if model._fields[field_name].type == 'one2many':
                # don't import one2many fields, use an own configuration
                # for this
                data.pop(field_name)
                continue
            ids = data[field_name] if (model._fields[field_name].type !=
                                       'many2one') else [data[field_name][0]]
            new_context = context.with_field_context(model._name, field_name,
                                                     data['id'])
            comodel = self.env[model._fields[field_name].comodel_name]
            data[field_name] = [
                self._run_import_get_record(
                    new_context,
                    comodel,
                    {'id': _id},
                    create_dummy=model._fields[field_name].required
                    or any(m.model_id.model == comodel._name
                           for m in self.import_line_ids),
                ) for _id in ids
            ]
            data[field_name] = filter(None, data[field_name])
            if model._fields[field_name].type == 'many2one':
                if data[field_name]:
                    data[field_name] = data[field_name] and data[field_name][0]
                else:
                    data[field_name] = None
            else:
                data[field_name] = [(6, 0, data[field_name])]
        for mapping in self.import_field_mappings:
            if mapping.model_id.model != model._name:
                continue
            if mapping.mapping_type == 'unique':
                for field in mapping.field_ids:
                    value = data.get(field.name, '')
                    counter = 1
                    while model.with_context(active_test=False).search([
                        (field.name, '=', data.get(field.name, value)),
                    ]):
                        data[field.name] = '%s (%d)' % (value, counter)
                        counter += 1
            elif mapping.mapping_type == 'by_reference':
                res_model = data.get(mapping.model_field_id.name)
                res_id = data.get(mapping.id_field_id.name)
                update = {
                    mapping.model_field_id.name: None,
                    mapping.id_field_id.name: None,
                }
                if res_model in self.env.registry and res_id:
                    new_context = context.with_field_context(
                        model._name, res_id, data['id'])
                    record_id = self._run_import_get_record(
                        new_context,
                        self.env[res_model], {'id': res_id},
                        create_dummy=False)
                    if record_id:
                        update.update({
                            mapping.model_field_id.name: res_model,
                            mapping.id_field_id.name: record_id,
                        })
                data.update(update)
        return data

    @api.multi
    def _run_import_model_get_fields(self, context):
        return {
            name: field
            for name, field in self.env[
                context.model_line.model_id.model]._fields.items()
            if not field.compute or field.related
        }

    @api.multi
    def _run_import_model_cleanup_dummies(self, context, model, remote_id,
                                          local_id):
        if not (model._name, remote_id) in context.dummies:
            return
        for instance in context.dummy_instances:
            key = mapping_key(instance.model_name, instance.remote_id)
            if key not in context.idmap:
                continue
            dummy_id = context.dummies[(model._name, remote_id)]
            record_model = self.env[instance.model_name]
            comodel = record_model._fields[instance.field_name].comodel_name
            if comodel != model._name or instance.dummy_id != dummy_id:
                continue
            record = record_model.browse(context.idmap[key])
            field_name = instance.field_name
            _logger.debug(
                'Replacing dummy %d on %s(%d).%s with %d',
                dummy_id,
                record_model._name,
                record.id,
                field_name,
                local_id,
            )
            if record._fields[field_name].type == 'many2one':
                record.write({field_name: local_id})
            elif record._fields[field_name].type == 'many2many':
                record.write({field_name: [
                    (3, dummy_id),
                    (4, local_id),
                ]})
            else:
                raise exceptions.UserError(
                    _('Unhandled field type %s') %
                    record._fields[field_name].type)
            context.dummy_instances.remove(instance)
            if dummy_id in context.to_delete:
                model.browse(dummy_id).unlink()
                _logger.debug('Deleting dummy %d', dummy_id)
        if (model._name, remote_id) in context.dummies:
            del context.dummies[(model._name, remote_id)]

    def _get_connection(self):
        self.ensure_one()
        url = urlparse(self.url)
        hostport = url.netloc.split(':')
        if len(hostport) == 1:
            hostport.append('80')
        host, port = hostport
        if not odoorpc:  # pragma: no cover
            raise exceptions.UserError(
                _('Please install the "odoorpc" libary in your environment'))
        remote = odoorpc.ODOO(
            host,
            protocol='jsonrpc+ssl' if url.scheme == 'https' else 'jsonrpc',
            port=int(port),
        )
        remote.login(self.database, self.user, self.password)
        return remote

    @api.constrains('url', 'database', 'user', 'password')
    @api.multi
    def _constrain_url(self):
        for this in self:
            if this == self.env.ref('base_import_odoo.demodb', False):
                continue
            if tools.config['test_enable']:
                continue
            if not this.password:
                continue
            this._get_connection()

    @api.depends('status_data')
    @api.multi
    def _compute_status_html(self):
        for this in self:
            if not this.status_data:
                continue
            this.status_html = self.env.ref(
                'base_import_odoo.view_import_odoo_database_qweb').render(
                    {'object': this})

    @api.depends('cronjob_id')
    @api.multi
    def _compute_cronjob_running(self):
        for this in self:
            if not this.cronjob_id:
                continue
            try:
                with self.env.cr.savepoint():
                    self.env.cr.execute(
                        'select id from "%s" where id=%%s for update nowait' %
                        self.env['ir.cron']._table,
                        (this.cronjob_id.id, ),
                        log_exceptions=False,
                    )
            except psycopg2.OperationalError:
                this.cronjob_running = True

    @api.multi
    def _create_cronjob(self):
        self.ensure_one()
        return self.env['ir.cron'].create({
            'name': self.display_name,
            'model': self._name,
            'function': '_run_import',
            'doall': True,
            'args': str((self.ids, )),
        })

    @api.multi
    def name_get(self):
        return [(this.id, '%s@%s, %s' % (this.user, this.url, this.database))
                for this in self]
Beispiel #18
0
class TmsInstance(models.SecureModel):
    _name = "tms.instance"
    _inherit = ['mail.thread']
    _order = "name"

    _sql_constraints = [
        ('instance_unique', 'unique (name)', 'This instance already exists!'),
        ('host_xmlrpc_unique', 'unique (host_id,xmlrpc_port)',
         'This xmlrpc port already exists on that host!')
    ]

    # ========================================================================
    # FIELD DEFINITIONS
    # ========================================================================

    https_password = fields.Secure(  # @UndefinedVariable
        string="HTTP Authen Test Password",
        security="_security_https_password")

    name = fields.Char('Instance Name', size=256, required=True,
                       track_visibility='onchange')
    mail_instance = fields.Char(compute='_get_mail', string='Mail')
    project_id = fields.Many2one('tms.project', 'Project name',
                                 required=True, track_visibility='onchange')

    milestone_id = fields.Many2one(
        'tms.milestone', 'Milestone',
        track_visibility='onchange',
        required=True)

    host_id = fields.Many2one('tms.host', 'Host',
                              required=True, track_visibility='onchange')

    physical_host_id = fields.Many2one('tms.host',
                                       related='host_id.physical_host_id',
                                       string="Node", store=True)

    parent_bzr_repository_suffix = fields.Char(
        'Parent bzr Repository Suffix',
        size=64,
        help='A possible value is "-stable" '
        'if we want to use the repositories '
        'in {project_name}-stable instead of '
        'the one in {project_name}.')

    server_type = fields.Selection(
        server_type_array, 'Server Type', required=True,
        track_visibility='onchange')

    url = fields.Char('URL', size=256, required=True,
                      track_visibility='onchange')

    xmlrpc_port = fields.Char('xmlrpc port', size=256,
                              track_visibility='onchange')

    psql_host = fields.Char('PostgreSQL Host', size=120, required=True,
                            track_visibility='onchange', default='localhost')

    psql_port = fields.Char('PostgreSQL Port', size=120, required=True,
                            track_visibility='onchange', default='5432')

    psql_user = fields.Char('PostgreSQL User', size=120, required=True,
                            track_visibility='onchange')

    psql_pass = fields.Char('PostgreSQL Pass', size=120, required=True,
                            track_visibility='onchange')

    is_set_up_domain = fields.Boolean(
        'Domain',
        help='Create the instance for: {projectname}.tms.com')

    is_set_up_http_authentication = fields.Boolean(
        'Http Authentication',
        help='Set http authentication to '
        '{projectname}.tms.com to (denis, jc, '
        'tam, TPM,{projectname})')

    is_set_up_https = fields.Boolean('https')

    is_set_up_xmlrpc = fields.Boolean('xmlrpc')

    is_set_up_ssh_access = fields.Boolean('ssh access',
                                          help='Set ssh access & '
                                          'passwordless to '
                                          '{projectname}.tms.com to '
                                          '(denis, jc,  TPM,{projectname})')

    is_project_manager = fields.Boolean(
        compute='compute_is_project_manager',
        string='Is Project Manager')

    state = fields.Selection(
        instance_state_array,
        string='Status', required=True,
        default='active',
        help='Sleep: the instance is active but is facing temporary '
        'issues not under our control. Inactive: This instance will '
        'not be used anymore. Deleted: This instance has been removed '
        'from our servers.',
        track_visibility='onchange'
    )

    datetime_test = fields.Datetime('Last Test', readonly=True)

    note = fields.Text('Note')

    last_error = fields.Text('Last Error')

    operating_system = fields.Char(compute='compute_operating_system',
                                   method=True, string="Operating System",
                                   store=True)

    ssh_port = fields.Char(compute='compute_ssh_port', string="SSH Port",
                           store=True)

    custom_parameter = fields.Serialized('Custom parameter',
                                         track_visibility='onchange')

    instance_database_ids = fields.One2many('instance.database',
                                            'tms_instance_id',
                                            string='Instance Database')

    active = fields.Boolean('Active', default=True,
                            help="When unchecked, the instance will not "
                            "be visible in the user interface unless the "
                            "search filters specify that you want to "
                            "display non-active records (this a native "
                            "behavior of Odoo)")

    test_instance = fields.Selection(
        test_instance_array,
        default='access_login',
        required=True, string='Test instance',
        help="- Access only: Test if instance is up by connecting xmlrpc "
        "and test database names\n"
        "- Access and login: Test if instance is up by connecting xmlrpc "
        "and can be login into admin account and test database names\n"
        "- None: Don't test status of the instance, "
        "don't check the database names.",
        track_visibility='onchange'
    )

    xmlrpc_url = fields.Char(compute='compute_xmlrpc_url',
                             string="XML-RPC URL")

    https_login = fields.Char('HTTP Authen Test Login', size=64)

    proj_owner_id = fields.Many2one('res.users',
                                    related='project_id.owner_id',
                                    string="Project's Owner",
                                    store=True)
    team_id = fields.Many2one(string='Team', related='project_id.team_id',
                              store=True)
    team_manager_id = fields.Many2one(
        string="Team Manager", related='project_id.team_id.team_manager',
        store=True)

    backend_ip = fields.Char('Backend IP')

    backend_port = fields.Char('Backend Port', default='8069')

    ssl = fields.Boolean(
        'SSL',
        default=True,
        help="SSL termination is handled by nginx"
    )

    http_auth = fields.Boolean('HTTP Auth', default=True)

    htpasswd_file = fields.Char('htpasswd file')

    instance_user_ids = fields.Many2many(
        comodel_name='res.users',
        relation='tms_instance_user_rel',
        column1='instance_id',
        column2='user_id',
        string="Users of Instance",
        help="list users who can access to this instance "
        "( to generate htpasswd file)")

    multi_host = fields.Boolean(string="is Multi-host?",
                                track_visibility='onchange')

    haproxy_host_id = fields.Many2one('tms.host', string="HA Proxy Host",
                                      track_visibility='onchange')

    front_end_ids = fields.Many2many(
        comodel_name='tms.host',
        relation="tms_instance_host_front_end_rel",
        column1="instance_id",
        column2="host_id",
        string="Front End",
        track_visibility='onchange')

    back_end_ids = fields.Many2many(
        comodel_name='tms.host',
        relation="tms_instance_host_back_end_rel",
        column1="instance_id",
        column2="host_id",
        string="Back Office",
        track_visibility='onchange')

    database_ids = fields.One2many(
        'multi.host.database', 'instance_id', string="Databases",
        track_visibility='onchange')

    nfs_host_id = fields.Many2one(
        'tms.host', string="NFS Host",
        track_visibility='onchange')

    awx_job_history_ids = fields.One2many(
        comodel_name='tms.awx.job.history',
        inverse_name='instance_id', string='AWX Job History')

    @api.multi
    def write(self, vals):
        res = super(TmsInstance, self).write(vals)
        if vals.get('database_ids', False):
            for instance in self:
                databases = instance.database_ids
                master_no = len(databases.filtered('master'))
                if master_no > 1:
                    raise Warning('ONLY ONE DATABASE CAN BE SET AS MASTER')
        return res

    # ========================================================================
    # COMPUTE FUNCTION DEFINITIONS
    # ========================================================================

    @api.multi
    def _get_mail(self):
        mail_content = ''
        numb = 1

        # Only check on active instances with databases defined
        domain_build = [
            ('state', 'in', ['active']),
            ('instance_database_ids', '!=', False),
            ('test_instance', '=', 'access_login')
        ]
        instances = self.search(domain_build)
        logging.info('{0} instance(s) have databases need '
                     'to be compared'.format(len(instances)))

        for instance in instances:
            list_db_in_instance, list_db_in_tms_instances = \
                self.get_lst_db_instance(instance)
            if not list_db_in_instance and not list_db_in_tms_instances:
                continue

            mail_content += '<div><b>' + str(numb) + '.'
            mail_content += instance.name + '</b>:</div>'
            if list_db_in_instance and list_db_in_tms_instances:
                mail_content += '<ul>'
                mail_content += '<li>Database(s) in instance:' + \
                    ','.join(list_db_in_instance) + '</li>'
                mail_content += '<li>Database(s) in TMS:' + \
                    ','.join(list_db_in_tms_instances) + '</li>'
                mail_content += '</ul>'
            else:
                mail_content += '<ul>'
                mail_content += "<li><font color=red>Cannot access" +\
                    " the instance!</font></li>"
                mail_content += '</ul>'
            numb += 1

        for user in self:
            user.mail_instance = mail_content

    @api.model
    def get_lst_db_instance(self, instance):
        # Get instance address to test - including port
        # Get the https login and password
        https_login = instance.https_login or 'guest'
        https_password = instance.read_secure(
            fields=['https_password'])[0].get('https_password', 'n0-@pplY')
        inject_idx = instance.xmlrpc_url.find('://')
        uri_base = '%s://%s:%s@%s' % (instance.xmlrpc_url[:inject_idx],
                                      https_login, https_password,
                                      instance.xmlrpc_url[inject_idx + 3:])

        # List to store database for each instances and in TMS
        instance_databases_list = []

        # Databases configured for instance in TMS
        tms_instances_databases_list = [
            db_info.name for db_info in instance.instance_database_ids]
        # log in uri
        message = ''
        error_stack = []
        try:
            # Link to instance's db service, should be:
            # http://<user>:<pass>@<host-name>:<port>/xmlrpc/db
            # can be changed by later versions (odoo)
            conn = xmlrpclib.ServerProxy(uri_base + '/xmlrpc/db')
            instance_databases_list = conn.list()

        except Exception as e:
            logging.warning('Failed at sock common')
            message += 'Warning: Failed at sock common\n'
            error_stack.append(e)
            return [], []

        # Compare 2 database list before calling check_database to improve the
        # performance
        tms_instances_databases_list = sorted(tms_instances_databases_list)
        instance_databases_list = sorted(instance_databases_list)
        compare_list = set(tms_instances_databases_list).symmetric_difference(
            instance_databases_list)
        if not compare_list:
            return [], []

        list_db_in_tms_instances = self.check_database(
            tms_instances_databases_list, instance_databases_list
        )
        list_db_in_instance = self.check_database(
            instance_databases_list, tms_instances_databases_list
        )
        return list_db_in_instance, list_db_in_tms_instances

    @api.multi
    def compute_is_project_manager(self):
        user = self.env["res.users"].browse(self._uid)
        user_group_ids = [group.id for group in user.groups_id]
        pm_group_ids = self.env["res.groups"].search(
            [('name', '=', 'TMS Activity Viewer')])
        if pm_group_ids and user_group_ids:
            for record in self:
                record.is_project_manager = pm_group_ids[0].id in \
                    user_group_ids

    @api.multi
    @api.depends(
        "host_id",
        "host_id.operating_system_id",
        "host_id.operating_system_id.name")
    def compute_operating_system(self):
        for instance in self:
            instance.operating_system = \
                instance.host_id \
                and instance.host_id.operating_system_id \
                and instance.host_id.operating_system_id.name \
                or False

    @api.multi
    @api.depends(
        "host_id",
        "host_id.port")
    def compute_ssh_port(self):
        for instance in self:
            ssh_port = instance.host_id and instance.host_id.port or False
            instance.ssh_port = ssh_port

    @api.multi
    def compute_xmlrpc_url(self):

        for instance in self:

            # Normal authentication, use custom xmlrpc port
            if instance.xmlrpc_port:
                uri_base = '%s:%s' % (instance.url, instance.xmlrpc_port)

                if uri_base[:4] != 'http':
                    uri_base = 'http://%s' % uri_base

            # 8113: Special uri for the instances which require https
            # authentication
            else:
                uri_base = '%s:443' % instance.url
                if uri_base[:5] != 'https':
                    uri_base = 'https://' + uri_base

            instance.xmlrpc_url = uri_base

    # ========================================================================
    # ONCHANGE FUNCTION DEFINITIONS
    # ========================================================================

    @api.onchange('server_type')
    def on_change_server_type(self):
        for instance in self:
            project = instance.project_id
            server_type = instance.server_type
            if not project or not server_type:
                return {}
            name = 'openerp-%s-%s' % (project.name, server_type)
            if server_type == 'production':
                url = '%s.trobz.com' % (project.name)
            else:
                url = '%s-%s.trobz.com' % (project.name, server_type)
            instance.name = name
            instance.url = url
            instance.psql_user = name
            instance.psql_pass = name
            # F#13799 - htpasswd_file field on instance should be editable
            if project and server_type:
                htpasswd_file = '/usr/local/var/auth/htpasswd_%s_%s' %\
                    (project.name, server_type)
                instance.htpasswd_file = htpasswd_file

    @api.onchange('state')
    def on_change_state(self):
        for instance in self:
            if instance.state in ('inactive', 'deleted'):
                instance.active = False
            else:
                instance.active = True

    @api.onchange('project_id')
    def on_change_project_id(self):
        for instance in self:
            project = instance.project_id
            server_type = instance.server_type
            # F#13799 - htpasswd_file field on instance should be editable
            if project and server_type:
                htpasswd_file = '/usr/local/var/auth/htpasswd_%s_%s' %\
                    (project.name, server_type)
                instance.htpasswd_file = htpasswd_file

    @api.onchange('host_id')
    def on_change_host(self):
        for instance in self:
            host = instance.host_id
            if host:
                instance.backend_ip = '10.26.%d.y' % host.container_id
                instance.ssh_port = host.port
            else:
                instance.backend_ip = False
                instance.ssh_port = False

    # ========================================================================
    # FORM BUTTON FUNCTION DEFINITIONS
    # ========================================================================

    @api.multi
    def button_request_in_ticket(self):
        # FIXME: should we consider to remove this function, this is not used
        ticket_pool = self.env['tms.ticket']
        for instance in self:
            ticket_pool.create({
                'summary': u'Configure the instance {0}'.format(instance.name)
            })

    # ========================================================================
    # Daily Check List Instance Databases scheduler (email: Test List DB)
    # ========================================================================

    @api.model
    def check_database(self, list_check, list_to_check):
        RENDER_COLOR = '<font color=red>%s</font>'
        list_new = []
        for i in list_check:
            if i not in list_to_check:
                i = RENDER_COLOR % i
            list_new.append(i)
        return list_new

    @api.model
    def run_scheduler_compare_instance_in_tms_and_database(self):
        logging.info('[Scheduler] [Start] Compare list of databases in TMS '
                     'and in instances')

        domain_build = [
            ('state', 'in', ['active']),
            ('instance_database_ids', '!=', False),
            ('test_instance', '!=', 'none')
        ]
        instances = self.search(domain_build)

        for instance in instances:
            list_db_in_instance, list_db_in_tms_instances = \
                self.get_lst_db_instance(instance)
            if not list_db_in_instance and not list_db_in_tms_instances:
                continue

            # send notification email
            template = self.env.ref(
                'tms_modules.daily_instances_db_template'
            )
            template._send_mail_asynchronous(instance.id)
            logging.info('[Scheduler] [End] Compare list of databases '
                         'in TMS and in instances')
            return True

    # ========================================================================
    # Test Instances Scheduler (email: Instance Down Mail)
    # ========================================================================

    @api.model
    def run_test_instance_scheduler(self):
        logging.info("run_test_instance_scheduler: start")
        try:
            self.button_test_all_instances()
        except Exception as e:
            logging.error("Error in run_test_instance_scheduler: %s" % str(e))
        logging.info("run_test_instance_scheduler: end")
        return True

    @api.multi
    def button_test(self):
        """
        Test instance.
        """
        logging.info('Entered button_test of tms_instance '
                     '(to test availability of instances)...')

        # Number of attempts and Sleep time between each attempt
        NB_ATTEMPS = 2
        TIME_SLEEP = 3
        error_stack = []

        # message for test instance log
        message = ''

        for instance in self:
            message = ''
            vals = {}

            # get the db_login, db_password and db_name
            # from instance database field
            db_login = '******'

            if instance.test_instance == 'access_login':
                # check if a database is created for this instance
                if not (instance.instance_database_ids and
                        instance.instance_database_ids.ids or False):
                    logging.warning(
                        'No database defined for the instance %s.'
                        % instance.name
                    )
                    continue

                # only check the first database in the list
                instance_db = instance.instance_database_ids and \
                    instance.instance_database_ids[0] or False

                if instance_db:
                    # extract db name and password from first line
                    db_name = instance_db.name
                    db_password = instance_db\
                        .read_secure(fields=['password'])[0]\
                        .get('password', 'n0-@pplY')
            else:
                db_name = 'unnamed'
                db_password = '******'

            # Get the https login and password (from instance)
            https_login = instance.https_login or 'guest'
            https_password = instance\
                .read_secure(fields=['https_password'])[0]\
                .get('https_password', 'n0-@pplY')

            inject_idx = instance.xmlrpc_url.find('://')
            uri_base = '%s://%s:%s@%s' % (instance.xmlrpc_url[:inject_idx],
                                          https_login, https_password,
                                          instance.xmlrpc_url[inject_idx + 3:])

            # To remove the https_login_pass from the instance down message
            https_real_pass = '******' % https_password
            https_replace_pass = '******'
            logging.info('Checking ' + instance.xmlrpc_url)

            for i in range(NB_ATTEMPS):
                instance_exc = None
                state = 'active'

                # Try to reach the instance
                try:
                    sock_common = xmlrpclib.ServerProxy(
                        '%s/xmlrpc/common' % uri_base)
                except Exception as e:
                    logging.warning('Failed at sock common')
                    message += 'Warning: Failed at sock common\n'
                    state = 'exception'
                    instance_exc = str(e).replace(
                        https_real_pass, https_replace_pass
                    )

                # Try to login at the instance
                try:
                    if instance.test_instance == 'access_login':
                        logging.info(
                            'Trying to login as Admin into instance %s...' %
                            instance.name)
                        message += 'Info: Trying to login as Admin into '\
                            'instance %s...\n' % instance.name
                        connection = sock_common.login(
                            db_name, db_login, db_password)
                        if not connection:
                            raise Exception(
                                'Could not login to the instance %s' %
                                instance.name)
                        else:
                            # reset state and message then break, no more try
                            # after successful check
                            state, message = 'active', ''
                            break
                    elif instance.test_instance == 'access':
                        try:
                            connection = sock_common.login(
                                'test_access_only', 'test', 'test')
                        except Exception as e:
                            if 'FATAL:  database "test_access_only" ' +\
                                    'does not exist' in str(e):
                                state, message = 'active', ''
                                break
                            else:
                                raise Exception(
                                    'Could not connect to the instance %s' %
                                    instance.name)
                except Exception as e:
                    # TODO: to remove the password from the exception
                    exc_str = str(e).replace(https_real_pass,
                                             https_replace_pass)
                    logging.warning('ATTEMPT FAILED: Could not connect '
                                    'to the instance %s: %s' %
                                    (instance.name, exc_str))
                    message += 'Warning: ATTEMPT FAILED: Could not connect '\
                               'to the instance %s: %s' % (instance.name,
                                                           exc_str)
                    state = 'exception'
                    instance_exc = exc_str
                finally:
                    sock_common = None

                if state == 'exception':
                    logging.warning('ATTEMPT FAILED: Tried %s time(s), '
                                    'trying again ...' % (i + 1,))
                    if i < NB_ATTEMPS - 1:
                        time.sleep(TIME_SLEEP)
                    elif i == NB_ATTEMPS - 1:
                        error_stack.append(instance_exc)

            # after N times to check on the instance ==> if instance down
            if state == 'exception':
                logging.error('The instance %s is down !!!' % instance.name)
                message += 'Error: The instance %s is down !!!\n'\
                    % instance.name
                logging.error('Could not connect to the instance %s after %s '
                              'attempts!!' % (instance.name, NB_ATTEMPS))
                message += 'Error: Could not connect to the instance %s '\
                    'after %s attempts!!\n' % (instance.name, NB_ATTEMPS)
                logging.error('uri_base: %s, db_name: %s, db_login: %s, '
                              'db_password: ****' % (instance.xmlrpc_url,
                                                     db_name, db_login))
                message += 'Error: uri_base: %s, db_name: %s, '\
                    'db_login: %s, db_password: ****\n' % (instance.xmlrpc_url,
                                                           db_name, db_login)
                logging.error(error_stack)
                vals['last_error'] = message
                state = 'exception'
            else:
                logging.info(
                    'Connect to the instance %s successful!' % instance.name)
                state = 'active'
                vals['last_error'] = False

            vals.update({
                'state': state, 'datetime_test': datetime.now()
            })
            logging.info('Writing new data to current instance: %s' % vals)

            instance.write(vals)  # write test data to instance

    @api.model
    def button_test_all_instances(self):
        """
        Test instance.
        """

        logging.info('Entered button_test of tms_instance '
                     '(to test availability of instances)...')

        instances = self.search(
            [('state', 'in', ('active', 'exception')),
             ('test_instance', '!=', 'none')]
        )
        for instance in instances:
            instance.button_test()
            # commit the write transaction (before sending email)
            self.env.cr.commit()

        down_instances = self.search([
            ('state', '=', 'exception'),
            ('test_instance', '!=', 'none')
        ])
        if down_instances and down_instances.ids:
            # context should be passed in email process
            context = {'instances_down_ids': down_instances.ids}

            # send email to inform number of down instances
            email_template = self.env.ref(
                'tms_modules.tms_instance_down_mail_template'
            )
            email_template.with_context(context).send_mail(
                down_instances[0].id)

        logging.info('Leaving button_test of tms_instance '
                     '(to test availability of instances)...')

    @api.multi
    def get_mail_down_instances_subject(self):
        """
            this function will be called by email template
            to prepare the subject of the email
        """
        context = self._context and self._context.copy() or {}

        # build domain to get only down instances
        domain = [
            ('state', '=', 'exception'), ('test_instance', '!=', 'none')
        ]
        if context.get('instances_down_ids'):
            domain.append(('id', 'in', context.get('instances_down_ids')))

        # get down instances
        down_instances = self.search(domain)

        if not down_instances.ids:
            return 'No instance down'

        nb_production_instance_down = 0
        nb_instance_down = 0

        for down_instance in down_instances:
            nb_instance_down += 1
            if down_instance.name[-11:] == '-production':
                nb_production_instance_down += 1

        if nb_production_instance_down == 0:
            return '%s Instance(s) down, none from production' %\
                str(nb_instance_down)

        return '%s Instance(s) down, including %s from production' %\
            (str(nb_instance_down), str(nb_production_instance_down))

    @api.multi
    def get_mail_down_instances(self):
        """
        # Ticket #1075 Send list of down instances.
        """
        context = self._context and self._context.copy() or {}
        domain = [('state', '=', 'exception'), ('test_instance', '!=', 'none')]
        if context.get('instances_down_ids'):
            domain.append(('id', 'in', context.get('instances_down_ids')))

        down_instances = self.search(domain)
        if not down_instances:
            return 'No instance down'

        config_pool = self.env['ir.config_parameter']
        base_url = config_pool.get_param(
            'web.base.url',
            default='https://tms.trobz.com')
        base_url = u'{0}#model=tms.instance&id='.format(base_url)

        # 3051
        list_instances_production, list_instances_down = [], []

        # Get name of down instances - classified through instance type
        for instance in down_instances:
            if "production" in instance.name:
                list_instances_production.append(instance.name)
            else:
                list_instances_down.append(instance.name)

        # predefined mail templates and mail contents
        mail_content = u''
        list_template = u"<ol>{0}</ol>"
        details_template = u"<li>{0}: {1}</li>"
        instance_down_contents, instance_down_details = u"", u""

        # compose list of down production instance names
        for instance in list_instances_production:
            instance_down_contents += u'<li>{0} down</li>'.format(instance)

        # compose list of down staging/integration instances name
        for instance in list_instances_down:
            instance_down_contents += u'<li>{0} down</li>'.format(instance)

        # compose list of down instance names
        mail_content += list_template.format(instance_down_contents)

        # detailed information section for down instances
        mail_content += u'More details:<br />'

        # information to be displayed in details, read from database
        for instance in down_instances:
            # name of the down instance first - followed by the details
            instance_down_details += u'<li style="margin-bottom: 15px;">'
            instance_down_details += u'<span>{0} down</span>'.format(
                instance.name)
            # begin detailed information
            instance_down_details += u'<ul style="padding-left: 20px;">'
            instance_down_details += details_template.format(
                "URL", instance.url)
            instance_db_list = [
                db_info.name for db_info in instance.instance_database_ids]
            instance_down_details += details_template.format(
                "Database info", str(instance_db_list))
            instance_down_details += details_template.format(
                "XML-RPC port", instance.xmlrpc_port)
            instance_down_details += details_template.format(
                "Last error", escape(instance.last_error))
            instance_down_details += details_template.format(
                "Link to TMS", base_url + str(instance.id))
            instance_down_details += u'</ul>'
            # end detailed information
            instance_down_details += u'</li>'

        # compose list of down instance details
        mail_content += list_template.format(instance_down_details)

        return mail_content

    @api.multi
    def get_sysadmin_tpm_email_list(self):
        """
        Get list of users who are active Trobz members and
        have jobs in (Manager, Technical Project Manager)
        and have departments in (Management, OpenErp).
        """

        departments = self.env['hr.department'].search(
            [('name', 'in', ('Management', 'OpenErp', 'Sysadmin'))])
        if not departments:
            return '[email protected],[email protected]'
        jobs = self.env['hr.job'].search([
            ('name', 'in', ('Manager', 'Technical Project Manager',
                            'System Admin'))])
        if not jobs:
            return '[email protected],[email protected]'
        employee_obj = self.env['hr.employee']
        employees = employee_obj.search(
            [('job_id', 'in', jobs.ids),
             ('department_id', 'in', departments.ids)])
        if not employees:
            return '[email protected],[email protected]'
        mail_list = ['*****@*****.**']
        for employee in employees:
            if employee and employee.user_id\
                    and employee.user_id.active\
                    and employee.user_id.is_trobz_member\
                    and employee.user_id.email:
                mail_list.append(employee.user_id.email)
        if mail_list:
            all_mail = ",".join(mail_list) + ",[email protected]"
        else:
            all_mail = '[email protected],[email protected]'
        return all_mail

    @api.multi
    def get_admin_tpm_sysadmin_email(self):
        result = ""
        res_users_obj = self.env['res.users']
        res_groups_obj = self.env['res.groups']
        admin_tpm_profs = res_groups_obj.search(
            [('name', 'in', ('Admin Profile',
                             'Sysadmin Profile',
                             'Technical Project Manager Profile')),
             ('is_profile', '=', True)])
        res_users = res_users_obj.search(
            [('group_profile_id', 'in', admin_tpm_profs.ids)])
        mail_list = []
        for user in res_users:
            if user.email:
                mail_list.append(user.email)
        if mail_list:
            result = ",".join(mail_list)
        else:
            result = '*****@*****.**'
        return result

    # ========================================================================
    # OTHER FUNCTIONS
    # ========================================================================

    @api.model
    def _check_password_security(self, instance):
        """
        @param instance: recordset tms.instance
        @return:
            - Admin profile, return True
            - TPM/FC profiles and in the supporters, return True
            - The rest, return False
        """
        if self._uid == SUPERUSER_ID:
            return True
        config_pool = self.env['ir.config_parameter']
        user_profile = self.env.user.group_profile_id
        # If this user is a Sysadmin and has full access
        if user_profile.is_sysadmin and self.env.user.has_full_sysadmin_access:
            return True
        # If this user is a Sysadmin but don't have full access
        # check if he is in the list of users of the host of this instance
        if user_profile.is_sysadmin\
                and self.env.user.id in instance.host_id.user_ids.ids:
            return True
        # If this user is not a sysadmin, he must be in the list of supporters
        db_instance_profiles = config_pool.get_param(
            'db_instance_profiles')
        db_instance_profiles = safe_eval(db_instance_profiles)

        if user_profile.name not in db_instance_profiles:
            return False
        project_supporters = instance and\
            instance.project_id.project_supporter_rel_ids.ids\
            or []
        if self.env.user.id not in project_supporters:
            return False
        return True

    @api.multi
    def _security_https_password(self):
        """
        Only allow Admin/TPM or FC update read/update password field.
        """
        is_allow = False
        for rec in self:
            if self._check_password_security(rec):
                is_allow = True
            else:
                is_allow = False
                break
        return is_allow

    @api.model
    def run_get_module_quality_check_scheduler(self):
        instance_ids = self.search([
            ('state', '=', 'active')]
        )
        self.button_module_quality_check(instance_ids)
        self.env['email.template']._send_mail_asynchronous(
            self._uid, 'Module Quality Check Result')
        return True

    def _build_result_detail(self, result, result_details):
        detail_dict = {}
        for detail in result_details:
            module_name = result[detail['quality_check_id'][0]]['name']
            module_score = result[detail['quality_check_id'][0]]['final_score']
            if module_name not in detail_dict:
                detail_dict[module_name] = [
                    u'<li> Module {0}: {1}</li>'.format(
                        module_name, module_score)
                ]

        result_detail = u'Last update: {0}<br/>'.format(datetime.now())\
            + u'<li>Modules: </li><ul>'
        for detail in detail_dict.values():
            detail_str = u'<br />  '.join(tuple(detail))
            result_detail = str(result_detail) + detail_str

        return result_detail

    @api.model
    def migrate_database_instance(self):
        logging.info('==== START migrate database instance ====')
        tms_instance_datas = self.search([])
        instance_db_env = self.env['instance.database']
        for data in tms_instance_datas:
            if data.databases:
                databases = data.databases.split(",")
                for database in databases:
                    dict_data = database.split(":")
                    vals = {'name': dict_data[0],
                            'tms_instance_id': data.id}
                    new_data = instance_db_env.create(vals)
                    if len(dict_data) > 1:
                        vals.update({'password': dict_data[1]})
                        new_data.secure_write(vals)
        logging.info('==== END migrate database instance  ====')
        return True

    @api.multi
    def get_databases_list(self):
        """
        Get a list of databases for each given instance.
        """
        databases_lst = {}  # {instance: list of databases}
        for instance in self:
            db_names = [db_info.name
                        for db_info in instance.instance_database_ids]
            databases_lst.update({
                instance.name: db_names
            })
        return databases_lst

    @api.multi
    def get_instance_info(self, context=None):
        """
        Get cleartext password of a database.
        """
        databases_lst = {}
        for instance in self:
            instance_db = instance.instance_database_ids and \
                instance.instance_database_ids[0] or False
            if instance_db:
                db_name = instance_db.name
                db_password = instance_db.read_secure(fields=['password'])
                databases_lst.update({
                    instance.name: [
                        instance.state, instance.server_type,
                        db_name, db_password[0]['password']]
                })
        return databases_lst