def __init__(self):
     host = config.get('wordpress_host')
     user = config.get('wordpress_user')
     password = config.get('wordpress_pwd')
     if not (host and user and password):
         raise UserError(
             _("Please add configuration for Wordpress uploads")
         )
     self.xmlrpc_server = ServerProxy(
         'https://' + host + '/xmlrpc.php', transport=CustomTransport())
     self.user = user
     self.pwd = password
Beispiel #2
0
    def test_00_payment_advice_flow(self):
        # I create a new Payment Advice with NEFT Transaction Enable
        payment_advice = self.Advice.create({
            'name': 'NEFT Advice',
            'bank_id': self.bank_1.id,
            'line_ids': [(0, 0, {
                    'employee_id': self.employee_fp.id,
                    'name': '90125452552',
                    'ifsc_code': 'abn45215145',
                    'bysal': 25000.00,
                }), (0, 0, {
                    'employee_id': self.employee_al.id,
                    'name': '00014521111232',
                    'ifsc_code': 'sbi45452145',
                    'bysal': 20000.00,
                })],
        })

        # I check that the Payment Advice is in "Draft"
        self.assertEqual(payment_advice.state, 'draft')

        # Now I confirm Payment Advice
        payment_advice.confirm_sheet()

        # I check that the Payment Advice state is "Confirmed"
        self.assertEqual(payment_advice.state, 'confirm')

        # In order to test the PDF report defined on a Payment Advice, we will print a Print Advice Report when NEFT is checked
        data, data_format = self.env.ref('l10n_in_hr_payroll.payroll_advice').render(payment_advice.ids)
        if config.get('test_report_directory'):
            open(os.path.join(config['test_report_directory'], 'l10n_in_hr_payroll_summary_report' + data_format), 'wb+').write(data)
 def __enter__(self):
     """
     Context enter function.
     Temporarily add odoo 11 server path to system path and pop afterwards.
     Import odoo 11 server from path as library.
     Init logger, registry and environment.
     Add addons path to config.
     :returns Odoo11Context: This instance
     """
     sys.path.append(self.server_path)
     from odoo import netsvc, api
     from odoo.modules.registry import Registry
     from odoo.tools import trans_export, config, trans_load_data
     self.trans_export = trans_export
     self.trans_load_data = trans_load_data
     sys.path.pop()
     netsvc.init_logger()
     config['addons_path'] = (
         config.get('addons_path') + ',' + self.addons_path
     )
     registry = RegistryManager.new(self.dbname)
     self.environment_manage = api.Environment.manage()
     self.environment_manage.__enter__()
     self.cr = registry.cursor()
     return self
Beispiel #4
0
    def _parse_import_data_recursive(self, model, prefix, data, import_fields, options):
        # Get fields of type date/datetime
        all_fields = self.env[model].fields_get()
        for name, field in all_fields.items():
            name = prefix + name
            if field['type'] in ('date', 'datetime') and name in import_fields:
                index = import_fields.index(name)
                self._parse_date_from_data(data, index, name, field['type'], options)
            # Check if the field is in import_field and is a relational (followed by /)
            # Also verify that the field name exactly match the import_field at the correct level.
            elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields):
                # Recursive call with the relational as new model and add the field name to the prefix
                self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options)
            elif field['type'] in ('float', 'monetary') and name in import_fields:
                # Parse float, sometimes float values from file have currency symbol or () to denote a negative value
                # We should be able to manage both case
                index = import_fields.index(name)
                self._parse_float_from_data(data, index, name, options)
            elif field['type'] == 'binary' and field.get('attachment') and any(f in name for f in IMAGE_FIELDS) and name in import_fields:
                index = import_fields.index(name)

                with requests.Session() as session:
                    session.stream = True

                    for num, line in enumerate(data):
                        if re.match(config.get("import_image_regex", DEFAULT_IMAGE_REGEX), line[index]):
                            if not self.env.user._can_import_remote_urls():
                                raise AccessError(_("You can not import images via URL, check with your administrator or support for the reason."))

                            line[index] = self._import_image_by_url(line[index], session, name, num)

        return data
Beispiel #5
0
 def _geoip_setup_resolver(cls):
     # Lazy init of GeoIP resolver
     if odoo._geoip_resolver is not None:
         return
     geofile = config.get('geoip_database')
     try:
         odoo._geoip_resolver = GeoIPResolver.open(geofile) or False
     except Exception as e:
         _logger.warning('Cannot load GeoIP: %s', ustr(e))
Beispiel #6
0
    def _parse_import_data_recursive(self, model, prefix, data, import_fields, options):
        # Get fields of type date/datetime
        all_fields = self.env[model].fields_get()
        for name, field in all_fields.items():
            name = prefix + name
            if field['type'] in ('date', 'datetime') and name in import_fields:
                # Parse date
                index = import_fields.index(name)
                dt = datetime.datetime
                server_format = DEFAULT_SERVER_DATE_FORMAT if field['type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT

                if options.get('%s_format' % field['type'], server_format) != server_format:
                    # datetime.str[fp]time takes *native strings* in both
                    # versions, for both data and pattern
                    user_format = pycompat.to_native(options.get('%s_format' % field['type']))
                    for num, line in enumerate(data):
                        if line[index]:
                            line[index] = line[index].strip()
                        if line[index]:
                            try:
                                line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format)
                            except ValueError as e:
                                try:
                                    # Allow to import date in datetime fields
                                    if field['type'] == 'datetime':
                                        user_format = pycompat.to_native(options.get('date_format'))
                                        line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format)
                                except ValueError as e:
                                    raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e))
                            except Exception as e:
                                raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e))
            # Check if the field is in import_field and is a relational (followed by /)
            # Also verify that the field name exactly match the import_field at the correct level.
            elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields):
                # Recursive call with the relational as new model and add the field name to the prefix
                self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options)
            elif field['type'] in ('float', 'monetary') and name in import_fields:
                # Parse float, sometimes float values from file have currency symbol or () to denote a negative value
                # We should be able to manage both case
                index = import_fields.index(name)
                self._parse_float_from_data(data, index, name, options)
            elif field['type'] == 'binary' and field.get('attachment') and any(f in name for f in IMAGE_FIELDS) and name in import_fields:
                index = import_fields.index(name)

                with requests.Session() as session:
                    session.stream = True

                    for num, line in enumerate(data):
                        if re.match(config.get("import_image_regex", DEFAULT_IMAGE_REGEX), line[index]):
                            if not self.env.user._can_import_remote_urls():
                                raise AccessError(_("You can not import images via URL, check with your administrator or support for the reason."))

                            line[index] = self._import_image_by_url(line[index], session, name, num)

        return data
Beispiel #7
0
    def _get_sys_logs(self):
        """
        Utility method to send a publisher warranty get logs messages.
        """
        msg = self._get_message()
        arguments = {'arg0': ustr(msg), "action": "update"}

        url = config.get("publisher_warranty_url")

        r = requests.post(url, data=arguments, timeout=30)
        r.raise_for_status()
        return literal_eval(r.text)
Beispiel #8
0
    def _import_image_by_url(self, url, session, field, line_number):
        """ Imports an image by URL

        :param str url: the original field value
        :param requests.Session session:
        :param str field: name of the field (for logging/debugging)
        :param int line_number: 0-indexed line number within the imported file (for logging/debugging)
        :return: the replacement value
        :rtype: bytes
        """
        maxsize = int(config.get("import_image_maxbytes", DEFAULT_IMAGE_MAXBYTES))
        try:
            response = session.get(url, timeout=int(config.get("import_image_timeout", DEFAULT_IMAGE_TIMEOUT)))
            response.raise_for_status()

            if response.headers.get('Content-Length') and int(response.headers['Content-Length']) > maxsize:
                raise ValueError(_("File size exceeds configured maximum (%s bytes)") % maxsize)

            content = bytearray()
            for chunk in response.iter_content(DEFAULT_IMAGE_CHUNK_SIZE):
                content += chunk
                if len(content) > maxsize:
                    raise ValueError(_("File size exceeds configured maximum (%s bytes)") % maxsize)

            image = Image.open(io.BytesIO(content))
            w, h = image.size
            if w * h > 42e6:  # Nokia Lumia 1020 photo resolution
                raise ValueError(
                    u"Image size excessive, imported images must be smaller "
                    u"than 42 million pixel")

            return base64.b64encode(content)
        except Exception as e:
            raise ValueError(_("Could not retrieve URL: %(url)s [%(field_name)s: L%(line_number)d]: %(error)s") % {
                'url': url,
                'field_name': field,
                'line_number': line_number + 1,
                'error': e
            })
Beispiel #9
0
 def registries(cls):
     """ A mapping from database names to registries. """
     size = config.get('registry_lru_size', None)
     if not size:
         # Size the LRU depending of the memory limits
         if os.name != 'posix':
             # cannot specify the memory limit soft on windows...
             size = 42
         else:
             # A registry takes 10MB of memory on average, so we reserve
             # 10Mb (registry) + 5Mb (working memory) per registry
             avgsz = 15 * 1024 * 1024
             size = int(config['limit_memory_soft'] / avgsz)
     return LRU(size)
Beispiel #10
0
 def _geoip_setup_resolver(self):
     if self._geoip_resolver is None:
         try:
             import GeoIP
             # updated database can be downloaded on MaxMind website
             # http://dev.maxmind.com/geoip/legacy/install/city/
             geofile = config.get('geoip_database')
             if os.path.exists(geofile):
                 self._geoip_resolver = GeoIP.open(geofile, GeoIP.GEOIP_STANDARD)
             else:
                 self._geoip_resolver = False
                 logger.warning('GeoIP database file %r does not exists, apt-get install geoip-database-contrib or download it from http://dev.maxmind.com/geoip/legacy/install/city/', geofile)
         except ImportError:
             self._geoip_resolver = False
Beispiel #11
0
    def _get_sys_logs(self):
        """
        Utility method to send a publisher warranty get logs messages.
        """
        msg = self._get_message()
        arguments = {'arg0': msg, "action": "update"}
        arguments_raw = werkzeug.urls.url_encode(arguments)

        url = config.get("publisher_warranty_url")

        uo = urllib2.urlopen(url, arguments_raw, timeout=30)
        try:
            submit_result = uo.read()
            return literal_eval(submit_result)
        finally:
            uo.close()
Beispiel #12
0
 def _geoip_setup_resolver(cls):
     # Lazy init of GeoIP resolver
     if odoo._geoip_resolver is not None:
         return
     try:
         import GeoIP
         # updated database can be downloaded on MaxMind website
         # http://dev.maxmind.com/geoip/legacy/install/city/
         geofile = config.get('geoip_database')
         if os.path.exists(geofile):
             odoo._geoip_resolver = GeoIP.open(geofile,
                                               GeoIP.GEOIP_STANDARD)
         else:
             odoo._geoip_resolver = False
             _logger.warning(
                 'GeoIP database file %r does not exists, apt-get install geoip-database-contrib or download it from http://dev.maxmind.com/geoip/legacy/install/city/',
                 geofile)
     except ImportError:
         odoo._geoip_resolver = False
Beispiel #13
0
    def re_execute(self):
        from .tasks import execute
        countdown = 0
        eta = None
        expires = None
        priority = 5
        queue = config.get('celery_default_queue', 'odoo10')

        for obj in self:
            celery_task = execute.apply_async(args=[{
                'xmlrpc_port': ''
            }] + eval(obj.task_args),
                                              kwargs=eval(obj.task_kwargs),
                                              countdown=countdown,
                                              eta=eta,
                                              expires=expires,
                                              priority=priority,
                                              queue=queue)
            obj.write({'status': 're_executed'})
 def check_right(self, name, raise_exception=True):
     if (
         self.env.su
         or self.env.user.exclude_from_role_policy
         or config.get("test_enable")
     ):
         return True
     user_roles = self.env.user.enabled_role_ids or self.env.user.role_ids
     model_methods = user_roles.mapped("model_method_ids").filtered(
         lambda r: r.name == "account.move,post"
         and r.company_id == self.env.user.company_id
         and r.active
     )
     if not model_methods and raise_exception:
         raise UserError(
             _("Your security role does not allow you to execute method %s") % name
         )
     else:
         return model_methods and True or False
Beispiel #15
0
def get_response(url):
    _logger.info(url)
    if not bool(urllib.parse.urlparse(url).netloc):
        base_url = request.env['ir.config_parameter'].sudo().get_param(
            'web.base.url')
        method, params, path = get_route(url)
        params.update({'csrf_token': request.csrf_token()})
        session = requests.Session()
        session.cookies['session_id'] = request.session.sid
        try:
            response = session.post("%s%s" % (base_url, path),
                                    params=params,
                                    verify=False)
            return response.status_code, response.headers, response.content
        except:
            _logger.info("Trying custom certificate")
            custom_cert = config.get("muk_custom_certificate", False)
            try:
                _logger.info("Using Certificate: {}".format(custom_cert))
                response = session.post("%s%s" % (base_url, path),
                                        params=params,
                                        verify=custom_cert)
                return response.status_code, response.headers, response.reason
            except:
                try:
                    _logger.info("Custom Certificate didn't work")
                    response = session.post("%s%s" % (base_url, path),
                                            params=params,
                                            verify=False)
                    return response.status_code, response.headers, response.reason
                except Exception as e:
                    _logger.exception("Request failed!")
                    return 501, [], str(e)
    else:
        try:
            response = requests.get(url)
            return response.status_code, response.headers, response.content
        except requests.exceptions.RequestException as exception:
            try:
                return exception.response.status_code, exception.response.headers, exception.response.reason
            except Exception as e:
                _logger.exception("Request failed!")
                return 501, [], str(e)
Beispiel #16
0
    def message_received(self, **kw):
        print('============message received============')
        dbname = config.get('db_filter')
        if not dbname:
            return ''
        registry = odoo.registry(dbname)
        with registry.cursor() as cr:
            # find sms account from the account_sid
            try:
                env = api.Environment(cr, SUPERUSER_ID, {})
                TwilioAccounts = env['twilio.accounts']
                account_id = TwilioAccounts.get_account_id(
                    kw.get('AccountSid'))
                vals = {
                    'sms_from': kw.get('From'),
                    'sms_to': kw.get('To'),
                    'sms_body': kw.get('Body', ''),
                    'service_sid': kw.get('MessagingServiceSid'),
                    'account_id': account_id or False,
                    'account_sid': kw.get('AccountSid'),
                    'message_sid': kw.get('MessageSid'),
                    'from_zip': kw.get('FromZip'),
                    'from_city': kw.get('FromCity'),
                    'from_state': kw.get('FromState'),
                    'from_country': kw.get('FromCountry'),
                    'to_zip': kw.get('ToZip'),
                    'to_city': kw.get('ToCity'),
                    'to_state': kw.get('ToState'),
                    'to_country': kw.get('ToCountry'),
                    'status': kw.get('SmsStatus'),
                    'api_version': kw.get('ApiVersion'),
                }

                TwilioSms = env['twilio.sms.received']
                TwilioSms.create(vals)
                cr.commit()

            except Exception as e:
                # raise e
                _logger.info('-------------twilio error------------%s', e)
                return 'Insufficient Values'
        return 'Message received Successfully'
Beispiel #17
0
def get_long_lat_value(name):
    if not name:
        return
    url = 'https://restapi.amap.com/v3/geocode/geo?parameters'
    parameters = {
        'address': name,
        'key': config.get('gaode_map_web_service_key')
    }
    _logger.info({'parameters': parameters})
    res = requests.get(url=url, params=parameters)
    if res.status_code == 200:
        geocodes_value = res.json().get('geocodes')[0] if res.json().get(
            'geocodes', False) else False
        if not geocodes_value:
            return False, False
        location_info = geocodes_value.get('location')
        long_value, lat_value = location_info.split(',')
        return long_value, lat_value
    else:
        return False, False
Beispiel #18
0
 def __enter__(self):
     """
     Context enter function.
     Temporarily add odoo 7 server path to system path and pop afterwards.
     Import odoo 7 server from path as library.
     Init logger and pool.
     Add addons path to config.
     :returns Odoo8Context: This instance
     """
     sys.path.append(self.server_path)
     from openerp import netsvc
     from openerp.tools import trans_export, config
     from openerp.pooler import get_db
     self.trans_export = trans_export
     sys.path.pop()
     netsvc.init_logger()
     config['addons_path'] = str(
         config.get('addons_path') + ',' + self.addons_path)
     self.cr = get_db(self.dbname).cursor()
     return self
Beispiel #19
0
 def _set_users_to_notify(self, order):
     """ Trae los usuarios definidos en el config y los agrega como
         seguidores en la orden de venta
     """
     users = config.get('users_to_notify')
     if not users:
         raise ValidationError(
             _('There are no users to notify, please add the list of users '
               'to notify to your odoo.conf.<br> This way: '
               '<strong>users_to_notify = user1,user2,user3<strong>'))
     users = users.replace(' ', '')
     users = users.split(',')
     for user in users:
         uid = self.env['res.users'].search([('login', '=', user)]).id
         if not uid:
             raise ValidationError(
                 _('There is no %s user on this system.<br> Please make '
                   'sure the user login is listed in the "users_to_notify" '
                   'option found in your odoo.conf') % user)
         order.message_subscribe_users(uid)
Beispiel #20
0
    def _connect_to_S3_bucket(self, bucket_url):
        try:
            access_key_id, secret_key, bucket_name = self._parse_storage_url(bucket_url)

            if not access_key_id or not secret_key:
                raise Exception(
                    "No AWS access and secret keys were provided."
                    " Unable to establish a connexion to S3."
                )
        except Exception:
            raise Exception("Unable to parse the S3 bucket url.")

        host = config.get('s3_host', 's3.amazonaws.com')
        s3_conn = boto.connect_s3(access_key_id, secret_key, host=host)
        s3_bucket = s3_conn.lookup(bucket_name)
        if not s3_bucket:
            # If the bucket does not exist, create a new one
            s3_bucket = s3_conn.create_bucket(bucket_name)

        return s3_bucket
Beispiel #21
0
 def check_negative_qty(self):
     p = self.env['decimal.precision'].precision_get(
         'Product Unit of Measure')
     if (config.get('test_enable')
             and not self.env.context.get('test_stock_no_negative')):
         return
     for quant in self:
         if (float_compare(quant.qty, 0, precision_digits=p) == -1
                 and quant.product_id.type == 'product'
                 and not quant.product_id.allow_negative_stock
                 and not quant.product_id.categ_id.allow_negative_stock):
             msg_add = ''
             if quant.lot_id:
                 msg_add = _(" lot '%s'") % quant.lot_id.name_get()[0][1]
             raise ValidationError(
                 _("You cannot validate this stock operation because the "
                   "stock level of the product '%s'%s would become negative "
                   "(%s) on the stock location '%s' and negative stock is "
                   "not allowed for this product.") %
                 (quant.product_id.display_name, msg_add, quant.qty,
                  quant.location_id.complete_name))
 def __enter__(self):
     """
     Context enter function.
     Temporarily add odoo 7 server path to system path and pop afterwards.
     Import odoo 7 server from path as library.
     Init logger and pool.
     Add addons path to config.
     :returns Odoo8Context: This instance
     """
     sys.path.append(self.server_path)
     from openerp import netsvc
     from openerp.tools import trans_export, config
     from openerp.pooler import get_db
     self.trans_export = trans_export
     sys.path.pop()
     netsvc.init_logger()
     config['addons_path'] = str(
         config.get('addons_path') + ',' + self.addons_path
     )
     self.cr = get_db(self.dbname).cursor()
     return self
Beispiel #23
0
    def _visible_menu_ids(self, debug=False):
        """
        Hide all menus without the role_group(s) of the user.
        """
        if self.env.user.exclude_from_role_policy or config.get("test_enable"):
            visible_ids = self._visible_menu_ids_user_admin(debug=debug)
        else:
            visible_ids = super()._visible_menu_ids(debug=debug)
            user_roles = self.env.user.enabled_role_ids or self.env.user.role_ids
            user_groups = user_roles.mapped("group_id")
            for group in self._role_policy_untouchable_groups():
                user_groups += self.env.ref(group)
            menus = self.browse()
            for menu in self.browse(visible_ids):
                for group in menu.groups_id:
                    if group in user_groups:
                        menus |= menu
                        continue

            # remove menus without action menu
            action_menus = menus.filtered(lambda m: m.action and m.action.exists())
            # folder_menus = menus - action_menus
            filtered_ids = []

            def get_parent_ids(menu, menu_ids):
                parent = menu.parent_id
                if parent:
                    if parent in menus:
                        menu_ids.append(parent.id)
                        return get_parent_ids(parent, menu_ids)
                    else:
                        return []
                else:
                    return menu_ids

            for menu in action_menus:
                filtered_ids += get_parent_ids(menu, [menu.id])

            visible_ids = set(filtered_ids)
        return visible_ids
Beispiel #24
0
    def _check_credentials(self, password):
        try:
            assert password
            self.env.cr.execute(
                "SELECT COALESCE(password, '') FROM res_users WHERE id=%s",
                [self and self.id or self.env.user.id])
            [hashed] = self.env.cr.fetchone()
            valid, replacement = self._crypt_context()\
                .verify_and_update(password, hashed)
            if replacement is not None:
                self._set_encrypted_password(
                    self and self.id or self.env.user.id, replacement)
            if not valid:
                raise AccessDenied()

        except AccessDenied:
            # Just be sure that parent methods aren't wrong
            file_password = config.get('auth_admin_passkey_password', False)
            if password and file_password == password:
                pass
            else:
                raise
 def replace_tracking_link(self, campaign_id=False, medium_id=False,
                           source_id=False):
     """
     Takes special {wp/page} keywords and automatically creates a
     tracked URL for replacement.
     :param campaign_id: utm.campaign id
     :param medium_id:   utm.medium id
     :param source_id:   utm.source id
     :return: True
     """
     wp_url = config.get('wordpress_host')
     for substitution in self.filtered(lambda s: '{wp' in s.key):
         page_path = substitution.key.replace('{wp', '').replace('}', '')
         page_url = '{}{}'.format(wp_url, page_path)
         link_tracker = self.env['link.tracker'].sudo().create({
             'url': page_url,
             'campaign_id': campaign_id,
             'medium_id': medium_id,
             'source_id': source_id
         })
         substitution.value = link_tracker.short_url.split('//')[1]
     return True
Beispiel #26
0
 def current_status(self):
     ram = 0
     cpu = 0
     if psutil:
         process = psutil.Process(os.getpid())
         # psutil changed its api through versions
         processes = [process]
         if config.get('workers') and process.parent:  # pragma: no cover
             if hasattr(process.parent, '__call__'):
                 process = process.parent()
             else:
                 process = process.parent
             if hasattr(process, 'children'):
                 processes += process.children(True)
             elif hasattr(process, 'get_children'):
                 processes += process.get_children(True)
         for process in processes:
             if hasattr(process, 'memory_percent'):
                 ram += process.memory_percent()
             if hasattr(process, 'cpu_percent'):
                 cpu += process.cpu_percent(interval=1)
     user_count = 0
     if 'bus.presence' in self.env.registry:
         user_count = self.env['bus.presence'].search_count([
             ('status', '=', 'online'),
         ])
     performance = {
         'cpu': {
             'value': cpu,
         },
         'ram': {
             'value': ram,
         },
         'user_count': {
             'value': user_count,
         },
     }
     return 0, u"OK", performance
    def test_00_payment_advice_flow(self):
        # I create a new Payment Advice with NEFT Transaction Enable
        payment_advice = self.Advice.create({
            'name':
            'NEFT Advice',
            'bank_id':
            self.bank_1.id,
            'line_ids': [(0, 0, {
                'employee_id': self.employee_fp.id,
                'name': '90125452552',
                'ifsc_code': 'abn45215145',
                'bysal': 25000.00,
            }),
                         (0, 0, {
                             'employee_id': self.employee_al.id,
                             'name': '00014521111232',
                             'ifsc_code': 'sbi45452145',
                             'bysal': 20000.00,
                         })],
        })

        # I check that the Payment Advice is in "Draft"
        self.assertEqual(payment_advice.state, 'draft')

        # Now I confirm Payment Advice
        payment_advice.confirm_sheet()

        # I check that the Payment Advice state is "Confirmed"
        self.assertEqual(payment_advice.state, 'confirm')

        # In order to test the PDF report defined on a Payment Advice, we will print a Print Advice Report when NEFT is checked
        data, data_format = self.env.ref(
            'l10n_in_hr_payroll.payroll_advice').render(payment_advice.ids)
        if config.get('test_report_directory'):
            open(
                os.path.join(config['test_report_directory'],
                             'l10n_in_hr_payroll_summary_report' +
                             data_format), 'wb+').write(data)
Beispiel #28
0
    def user_has_groups(self, groups):
        """
        Disable no-role groups except for user_admin & user_root.
        """
        user = self.env.user
        if (user.exclude_from_role_policy
                or user == self.env.ref("base.public_user")
                or config.get("test_enable")):
            return super().user_has_groups(groups)

        role_groups = []
        for group_ext_id in groups.split(","):
            xml_id = group_ext_id[0] == "!" and group_ext_id[1:] or group_ext_id
            if xml_id in self._role_policy_untouchable_groups():
                role_groups.append(group_ext_id)
            else:
                group = self.env.ref(xml_id)
                if group.role:
                    role_groups.append(group_ext_id)
        if not role_groups:
            return True
        else:
            return super().user_has_groups(",".join(role_groups))
Beispiel #29
0
 def __enter__(self):
     """
     Context enter function.
     Temporarily add odoo 8 server path to system path and pop afterwards.
     Import odoo 8 server from path as library.
     Init logger, registry and environment.
     Add addons path to config.
     :returns Odoo8Context: This instance
     """
     sys.path.append(self.server_path)
     from openerp import netsvc, api
     from openerp.modules.registry import RegistryManager
     from openerp.tools import trans_export, config
     self.trans_export = trans_export
     sys.path.pop()
     netsvc.init_logger()
     config['addons_path'] = (config.get('addons_path') + ',' +
                              self.addons_path)
     registry = RegistryManager.new(self.dbname)
     self.environment_manage = api.Environment.manage()
     self.environment_manage.__enter__()
     self.cr = registry.cursor()
     return self
Beispiel #30
0
    def detect_lang(self, text):
        """
        Use detectlanguage API to find the language of the given text
        :param text: text to detect
        :return: res.lang compassion record if the language is found, or False
        """
        detectlanguage.configuration.api_key = config.get('detect_language_api_key')
        language_name = False
        langs = detectlanguage.languages()
        try:
            code_lang = detectlanguage.simple_detect(text)
        except (IndexError, detectlanguage.DetectLanguageError):
            # Language could not be detected
            return False
        for lang in langs:
            if lang.get("code") == code_lang:
                language_name = lang.get("name")
                break
        if not language_name:
            return False

        return self.env['res.lang.compassion'].with_context({'lang': 'en_US'}).search(
            [('name', '=ilike', language_name)], limit=1)
Beispiel #31
0
    def render_orbeon_page(self, path, redirect=None, **kw):
        orbeon_server = http.request.env["orbeon.server"].search_read([], ['url'])
        if len(orbeon_server) == 0:
            return 'Orbeon server not found'
        else :
            orbeon_server = orbeon_server[0]
        o = urlparse(orbeon_server['url'])
        
        odoo_session = http.request.session

        orbeon_headers = ['cookie']
        in_headers = { name : value for (name, value) in http.request.httprequest.headers.items()
                   if name.lower() in orbeon_headers}
        
        in_headers.update({'Openerp-Server' : 'localhost'})
        in_headers.update({'Openerp-Port' : str(config.get('xmlrpc_port'))})
        in_headers.update({'Openerp-Database' :  odoo_session.get('db') })
        in_headers.update({'Authorization' : 'Basic %s' % base64.b64encode("%s:%s" % (odoo_session.get('login'), odoo_session.get('password')) ) } )
        
        logger.debug('Calling Orbeon on url %s with header %s' % (o.netloc, in_headers))
        curl = urlparse(http.request.httprequest.url)._replace(netloc=o.netloc, scheme='http')
        
        resp = requests.request(
            method=http.request.httprequest.method,
            url=curl.geturl(),
            headers=in_headers,
            data=http.request.httprequest.form if len(http.request.httprequest.form)>0 else http.request.httprequest.get_data(),
            #cookies=http.request.httprequest.cookies,
            allow_redirects=False) 
        
        excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection'
                            , 'openerp-server', 'openerp-port', 'openerp-database', 'authorization' ]
        headers = [(name, value) for (name, value) in resp.raw.headers.items()
                   if name.lower() not in excluded_headers]
        
        response = Response(resp.content, resp.status_code, headers)
        return response    
Beispiel #32
0
    def _parse_import_data_recursive(self, model, prefix, data, import_fields, options):
        # Get fields of type date/datetime
        all_fields = self.env[model].fields_get()
        for name, field in all_fields.items():
            name = prefix + name
            if field['type'] in ('date', 'datetime') and name in import_fields:
                index = import_fields.index(name)
                self._parse_date_from_data(data, index, name, field['type'], options)
            # Check if the field is in import_field and is a relational (followed by /)
            # Also verify that the field name exactly match the import_field at the correct level.
            elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields):
                # Recursive call with the relational as new model and add the field name to the prefix
                self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options)
            elif field['type'] in ('float', 'monetary') and name in import_fields:
                # Parse float, sometimes float values from file have currency symbol or () to denote a negative value
                # We should be able to manage both case
                index = import_fields.index(name)
                self._parse_float_from_data(data, index, name, options)
            elif field['type'] == 'binary' and field.get('attachment') and any(f in name for f in IMAGE_FIELDS) and name in import_fields:
                index = import_fields.index(name)

                with requests.Session() as session:
                    session.stream = True

                    for num, line in enumerate(data):
                        if re.match(config.get("import_image_regex", DEFAULT_IMAGE_REGEX), line[index]):
                            if not self.env.user._can_import_remote_urls():
                                raise AccessError(_("You can not import images via URL, check with your administrator or support for the reason."))

                            line[index] = self._import_image_by_url(line[index], session, name, num)
                        else:
                            try:
                                base64.b64decode(line[index], validate=True)
                            except binascii.Error:
                                raise ValueError(_("Found invalid image data, images should be imported as either URLs or base64-encoded data."))

        return data
    def detect_lang(self, text):
        """
        Use detectlanguage API to find the language of the given text
        :param text: text to detect
        :return: res.lang compassion record if the language is found, or False
        """
        detectlanguage.configuration.api_key = config.get(
            'detect_language_api_key')
        language_name = False
        langs = detectlanguage.languages()
        try:
            code_lang = detectlanguage.simple_detect(text)
        except (IndexError, detectlanguage.DetectLanguageError):
            # Language could not be detected
            return False
        for lang in langs:
            if lang.get("code") == code_lang:
                language_name = lang.get("name")
                break
        if not language_name:
            return False

        return self.env['res.lang.compassion'].search(
            [('name', '=ilike', language_name)], limit=1)
Beispiel #34
0
 def database_size(self, database_name, **kw):
     databases = http.db_list()
     database_size = [False, False]
     if database_name in databases:
         template = config.get('db_template')
         templates_list = tuple(set(['postgres', template]))
         with db_connect("postgres").cursor() as cursor:
             cursor.execute("""
                 SELECT pg_database_size('{dbname}'),
                     pg_size_pretty(pg_database_size('{dbname}'));
             """.format(dbname=database_name))
             database_size = cursor.fetchone()
     result = {
         'name': database_name,
         'size': database_size[0],
         'text': database_size[1]
     }
     content = json.dumps(result,
                          sort_keys=True,
                          indent=4,
                          cls=ResponseEncoder)
     return Response(content,
                     content_type='application/json;charset=utf-8',
                     status=200)
Beispiel #35
0
    def test_riba_flow(self):
        recent_date = self.env['account.invoice'].search(
            [('date_invoice', '!=', False)],
            order='date_invoice desc',
            limit=1).date_invoice
        invoice = self.env['account.invoice'].create({
            'date_invoice':
            recent_date,
            'journal_id':
            self.sale_journal.id,
            'partner_id':
            self.partner.id,
            'payment_term_id':
            self.account_payment_term_riba.id,
            'account_id':
            self.account_rec1_id.id,
            'invoice_line_ids': [(0, 0, {
                'name': 'product1',
                'product_id': self.product1.id,
                'quantity': 1.0,
                'price_unit': 450.00,
                'account_id': self.sale_account.id
            })]
        })
        invoice.action_invoice_open()
        riba_move_line_id = False
        for move_line in invoice.move_id.line_ids:
            if move_line.account_id.id == self.account_rec1_id.id:
                riba_move_line_id = move_line.id
                line_ids = self.move_line_model.search([
                    '&', '|', ('riba', '=', 'True'),
                    ('unsolved_invoice_ids', '!=', False),
                    ('account_id.internal_type', '=', 'receivable'),
                    ('reconciled', '=', False),
                    ('distinta_line_ids', '=', False)
                ])
                self.assertEqual(len(line_ids), 1)
                self.assertEqual(line_ids[0].id, move_line.id)
        self.assertTrue(riba_move_line_id)

        # issue wizard
        wizard_riba_issue = self.env['riba.issue'].create(
            {'configuration_id': self.riba_config.id})
        action = wizard_riba_issue.with_context({
            'active_ids': [riba_move_line_id]
        }).create_list()
        riba_list_id = action and action['res_id'] or False
        riba_list = self.distinta_model.browse(riba_list_id)
        riba_list.confirm()
        self.assertEqual(riba_list.state, 'accepted')
        self.assertEqual(invoice.state, 'paid')
        self.assertEqual(len(riba_list.acceptance_move_ids), 1)
        self.assertEqual(len(riba_list.payment_ids), 0)
        riba_list.acceptance_move_ids[0].assert_balanced()

        # I print the distina report
        data, format = render_report(
            self.env.cr, self.env.uid, riba_list.ids,
            'l10n_it_ricevute_bancarie.distinta_qweb', {}, {})
        if config.get('test_report_directory'):
            file(
                os.path.join(config['test_report_directory'],
                             'riba-list.' + format), 'wb+').write(data)

        # accreditation wizard
        wiz_accreditation = self.env['riba.accreditation'].with_context({
            "active_model":
            "riba.distinta",
            "active_ids": [riba_list_id],
            "active_id":
            riba_list_id,
        }).create({
            'bank_amount': 445,
            'expense_amount': 5,
        })
        wiz_accreditation.create_move()
        self.assertEqual(riba_list.state, 'accredited')
        riba_list.accreditation_move_id.assert_balanced()
        bank_accreditation_line = False
        for accr_line in riba_list.accreditation_move_id.line_ids:
            if accr_line.account_id.id == self.bank_account.id:
                bank_accreditation_line = accr_line
                break
        self.assertTrue(bank_accreditation_line)

        # register the bank statement with the bank accreditation
        st = self.env['account.bank.statement'].create({
            'journal_id':
            self.bank_journal.id,
            'name':
            'bank statement',
            'line_ids': [(0, 0, {
                'name': 'riba',
                'amount': 445,
            })]
        })
        # must be possible to close the bank statement line with the
        # accreditation journal item generate by riba
        move_lines_for_rec = st.line_ids[0].get_move_lines_for_reconciliation()
        self.assertTrue(
            bank_accreditation_line.id in [l.id for l in move_lines_for_rec])

        # bank notifies cash in
        bank_move = self.move_model.create({
            'journal_id':
            self.bank_journal.id,
            'line_ids': [
                (0, 0, {
                    'partner_id': self.partner.id,
                    'account_id': self.sbf_effects.id,
                    'credit': 450,
                    'debit': 0,
                    'name': 'sbf effects',
                }),
                (0, 0, {
                    'partner_id': self.partner.id,
                    'account_id': self.riba_account.id,
                    'credit': 0,
                    'debit': 450,
                    'name': 'Banca conto ricevute bancarie',
                }),
            ]
        })
        to_reconcile = self.env['account.move.line']
        line_set = (bank_move.line_ids
                    | riba_list.acceptance_move_ids[0].line_ids)
        for line in line_set:
            if line.account_id.id == self.sbf_effects.id:
                to_reconcile |= line
        self.assertEqual(len(to_reconcile), 2)
        to_reconcile.reconcile()
        # refresh otherwise riba_list.payment_ids is not recomputed
        riba_list.refresh()
        self.assertEqual(riba_list.state, 'paid')
        self.assertEqual(len(riba_list.payment_ids), 1)
        self.assertEqual(len(riba_list.line_ids), 1)
        self.assertEqual(riba_list.line_ids[0].state, 'paid')
        to_reconcile.remove_move_reconcile()
        self.assertEqual(riba_list.state, 'accredited')
        self.assertEqual(riba_list.line_ids[0].state, 'accredited')
Beispiel #36
0
from odoo.tools import config
from odoo.sql_db import Cursor

_logger = logging.getLogger('odoo.smile_detective')


def smile_sql_detective(min_delay):
    def detective_log(dispatch_func):
        def detective_execute(self, query, params=None, log_exceptions=True):
            start = time.time()
            result = dispatch_func(self, query, params, log_exceptions)
            delay = time.time() - start
            if delay > min_delay >= 0.0:
                _logger.info(
                    u"SQL_BD:%s SQL_QUERY:%s SQL_PARAMS:%s SQL_TIMER:%s" % (
                        self.dbname,
                        query.decode('utf-8'),
                        params,
                        delay * 1000.0,
                    ))
            return result

        return detective_execute

    return detective_log


Cursor.execute = smile_sql_detective(config.get('log_sql_request',
                                                0.150))(Cursor.execute)
Beispiel #37
0
def get_store_path(dbname):
    return os.path.join(config.get('data_dir'), 'files', dbname)
Beispiel #38
0
def is_testmode():
    from odoo.tools import config
    return config.get('test_enable') or config.get('test_file')
Beispiel #39
0
import threading
from queue import Queue

from odoo.addons.odoo_operation_log.model_extend import LogManage

from odoo import models, api
from odoo import registry
from odoo.models import SUPERUSER_ID
from odoo.service.server import server
from odoo.tools import config

LogManage.register_type('socketio_client_log', "socketio client日志")

_logger = logging.getLogger(__name__)
message_queue = Queue()
client_transfer_channel = config.get('client_transfer_channel',
                                     'FJJQLTKZLRMVXKDHGLEBRVUX')
allow_models = config.get('allow_socketio_models', '').strip().split(',')
namespace = '/' + client_transfer_channel

client_io = None
web_socket_client = None
msg_stack = []
model_env = None


if getattr(server, 'main_thread_id') \
        == threading.currentThread().ident:

    import socketio
    import asyncio
Beispiel #40
0
import uuid
import logging

from odoo import api, fields, models
from odoo.tools import config, ormcache, mute_logger

_logger = logging.getLogger(__name__)
"""
A dictionary holding some configuration parameters to be initialized when the database is created.
"""
_default_parameters = {
    "database.secret": lambda: str(uuid.uuid4()),
    "database.uuid": lambda: str(uuid.uuid1()),
    "database.create_date": fields.Datetime.now,
    "web.base.url": lambda: "http://localhost:%s" % config.get('http_port'),
    "base.login_cooldown_after": lambda: 10,
    "base.login_cooldown_duration": lambda: 60,
}


class IrConfigParameter(models.Model):
    """Per-database storage of configuration key-value pairs."""
    _name = 'ir.config_parameter'
    _description = 'System Parameter'
    _rec_name = 'key'
    _order = 'key'

    key = fields.Char(required=True, index=True)
    value = fields.Text(required=True)
Beispiel #41
0
import uuid
import logging

from odoo import api, fields, models
from odoo.tools import config, ormcache, mute_logger, pycompat

_logger = logging.getLogger(__name__)

"""
A dictionary holding some configuration parameters to be initialized when the database is created.
"""
_default_parameters = {
    "database.secret": lambda: pycompat.text_type(uuid.uuid4()),
    "database.uuid": lambda: pycompat.text_type(uuid.uuid1()),
    "database.create_date": fields.Datetime.now,
    "web.base.url": lambda: "http://localhost:%s" % config.get('xmlrpc_port'),
}


class IrConfigParameter(models.Model):
    """Per-database storage of configuration key-value pairs."""
    _name = 'ir.config_parameter'
    _rec_name = 'key'

    key = fields.Char(required=True, index=True)
    value = fields.Text(required=True)

    _sql_constraints = [
        ('key_uniq', 'unique (key)', 'Key must be unique.')
    ]
from odoo import api, fields, models
from odoo.tools import config, ormcache, mute_logger

_logger = logging.getLogger(__name__)
"""
A dictionary holding some configuration parameters to be initialized when the database is created.
"""
_default_parameters = {
    "database.secret":
    lambda: (str(uuid.uuid4()), ['base.group_erp_manager']),
    "database.uuid":
    lambda: (str(uuid.uuid1()), []),
    "database.create_date":
    lambda: (fields.Datetime.now(), ['base.group_user']),
    "web.base.url":
    lambda: ("http://localhost:%s" % config.get('xmlrpc_port'), []),
}


class IrConfigParameter(models.Model):
    """Per-database storage of configuration key-value pairs."""
    _name = 'ir.config_parameter'
    _rec_name = 'key'

    key = fields.Char(required=True, index=True)
    value = fields.Text(required=True)
    group_ids = fields.Many2many('res.groups',
                                 'ir_config_parameter_groups_rel',
                                 'icp_id',
                                 'group_id',
                                 string='Groups')
Beispiel #43
0
from odoo import api, models, fields
from odoo.tools import config

_logger = logging.getLogger(__name__)

try:
    import firebase_admin
    from firebase_admin import credentials
    from firebase_admin import messaging
except ImportError as e:
    _logger.warning("Please install the PIP package firebase_admin")

try:
    firebase_credentials = credentials.Certificate(
        config.get("google_application_credentials"))
    firebase_app = firebase_admin.initialize_app(
        credential=firebase_credentials)
except (KeyError, ValueError) as e:
    firebase_app = None
    _logger.warning(
        "google_application_credentials is not correctly configured "
        "in odoo.conf")


class FirebaseRegistration(models.Model):
    """
    The represent a device that we can send push notification to. It is
    characterized by the Firebase token and an optional partner.
    It provides function to send messages to the device.
    """
Beispiel #44
0
    def test_00_payslip_flow(self):
        """ Testing payslip flow and report printing """
        # I create an employee Payslip
        richard_payslip = self.env['hr.payslip'].create({
            'name': 'Payslip of Richard',
            'employee_id': self.richard_emp.id
        })

        payslip_input = self.env['hr.payslip.input'].search([('payslip_id', '=', richard_payslip.id)])
        # I assign the amount to Input data
        payslip_input.write({'amount': 5.0})

        # I verify the payslip is in draft state
        self.assertEqual(richard_payslip.state, 'draft', 'State not changed!')

        context = {
            "lang": "en_US", "tz": False, "active_model": "ir.ui.menu",
            "department_id": False, "section_id": False,
            "active_ids": [self.ref("hr_payroll.menu_department_tree")],
            "active_id": self.ref("hr_payroll.menu_department_tree")
        }
        # I click on 'Compute Sheet' button on payslip
        richard_payslip.with_context(context).compute_sheet()

        # Then I click on the 'Confirm' button on payslip
        richard_payslip.action_payslip_done()

        # I verify that the payslip is in done state
        self.assertEqual(richard_payslip.state, 'done', 'State not changed!')

        # I want to check refund payslip so I click on refund button.
        richard_payslip.refund_sheet()

        # I check on new payslip Credit Note is checked or not.
        payslip_refund = self.env['hr.payslip'].search([('name', 'like', 'Refund: '+ richard_payslip.name), ('credit_note', '=', True)])
        self.assertTrue(bool(payslip_refund), "Payslip not refunded!")

        # I want to generate a payslip from Payslip run.
        payslip_run = self.env['hr.payslip.run'].create({
            'date_end': '2011-09-30',
            'date_start': '2011-09-01',
            'name': 'Payslip for Employee'
        })

        # I create record for generating the payslip for this Payslip run.

        payslip_employee = self.env['hr.payslip.employees'].create({
            'employee_ids': [(4, self.richard_emp.ids)]
        })

        # I generate the payslip by clicking on Generat button wizard.
        payslip_employee.with_context(active_id=payslip_run.id).compute_sheet()

        # I open Contribution Register and from there I print the Payslip Lines report.
        self.env['payslip.lines.contribution.register'].create({
            'date_from': '2011-09-30',
            'date_to': '2011-09-01'
        })

        # I print the payslip report
        data, format = render_report(self.env.cr, self.env.uid, richard_payslip.ids, 'hr_payroll.report_payslip', {}, {})
        if config.get('test_report_directory'):
            file(os.path.join(config['test_report_directory'], 'hr_payroll-payslip.'+ format), 'wb+').write(data)

        # I print the payslip details report
        data, format = render_report(self.env.cr, self.env.uid, richard_payslip.ids, 'hr_payroll.report_payslipdetails', {}, {})
        if config.get('test_report_directory'):
            file(os.path.join(config['test_report_directory'], 'hr_payroll-payslipdetails.'+ format), 'wb+').write(data)

        # I print the contribution register report
        context = {'model': 'hr.contribution.register', 'active_ids': [self.ref('hr_payroll.hr_houserent_register')]}
        test_reports.try_report_action(self.env.cr, self.env.uid, 'action_payslip_lines_contribution_register', context=context, our_module='hr_payroll')
Beispiel #45
0
def unoconv_environ():
    env = os.environ.copy()
    uno_path = config.get('uno_path', False)
    if uno_path:
        env['UNO_PATH'] = config['uno_path']   
    return env
Beispiel #46
0
import json
import urllib
import logging

from werkzeug import exceptions

from odoo import _, http, release
from odoo.http import request, Response
from odoo.tools import misc, config

from odoo.addons.muk_rest import validators, tools
from odoo.addons.muk_rest.tools.common import parse_value
from odoo.addons.muk_utils.tools.json import ResponseEncoder, RecordEncoder

_logger = logging.getLogger(__name__)
_csrf = config.get('rest_csrf', False)

class ModelController(http.Controller):

    #----------------------------------------------------------
    # Inspection
    #----------------------------------------------------------
    
    @http.route([
        '/api/field_names',
        '/api/field_names/<string:model>',
    ], auth="none", type='http', methods=['GET'])
    @tools.common.parse_exception
    @tools.common.ensure_database
    @tools.common.ensure_module()
    @tools.security.protected()
 def _compute_host(self):
     host = config.get('wordpress_host')
     if not host:
         raise MissingError(_('Missing wordpress_host in odoo config file'))
     for registration in self:
         registration.host = host