def new(cls, db_name, force_demo=False, status=None, update_module=False): upgrades = False try: code_at_creation = False with cls.upgrade_manager(db_name) as upgrade_manager: if upgrade_manager.db_in_creation: code_at_creation = upgrade_manager.code_version upgrades = bool(upgrade_manager.upgrades) for upgrade in upgrade_manager.upgrades: _logger.info('loading %s upgrade...', upgrade.version) upgrade.pre_load() if upgrade.modules_to_upgrade: registry = native_new(db_name) upgrade.force_modules_upgrade(registry) native_new(db_name, update_module=True) upgrade.post_load() _logger.info('%s upgrade successfully loaded', upgrade.version) registry = native_new(db_name, force_demo, status, update_module) registry.set_db_version(code_at_creation) if upgrades and config.get('stop_after_upgrades'): _logger.info('Stopping OpenERP server') os._exit(0) return registry except Exception, e: if upgrades and config.get('stop_after_upgrades'): _logger.error(_get_exception_message(e)) _logger.critical('Upgrade FAILED') _logger.info('Stopping OpenERP server') os._exit(1) raise e
def mq_server(): beanstalk = beanstalkc.Connection( host=config.get('beanstalkd_interface', 'localhost'), port=int(config.get('beanstalkd_port', 11300))) beanstalk.watch('taobao_stream') beanstalk.ignore('default') while True: try: job = beanstalk.reserve() except: import traceback exc = traceback.format_exc() _logger.error(exc) time.sleep(1 / 1000) continue try: name, args, kwds = cPickle.loads(job.body) func = NAME2FUNC.get(name) except: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() continue try: func(*args, **kwds) job.delete() except TOPException: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except osv.except_osv: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except TransactionRollbackError: #TransactionRollbackError: 错误: 由于同步更新而无法串行访问 job.release(delay=1) except DataError: #DataError: 错误: 无效的 "UTF8" 编码字节顺序: 0xad import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except: import traceback exc = traceback.format_exc() _logger.error(exc) job.release(delay=1) finally: time.sleep(1 / 1000)
def new(cls, db_name, force_demo=False, status=None, update_module=False, pooljobs=True): upgrades = False try: code_at_creation = False with cls.upgrade_manager(db_name) as upgrade_manager: if upgrade_manager.db_in_creation: code_at_creation = upgrade_manager.code_version upgrades = bool(upgrade_manager.upgrades) for upgrade in upgrade_manager.upgrades: _logger.info('loading %s upgrade...', upgrade.version) upgrade.pre_load() if upgrade.modules_to_upgrade: registry = native_new(db_name, pooljobs=False) upgrade.force_modules_upgrade(registry) native_new(db_name, update_module=True, pooljobs=False) upgrade.post_load() _logger.info('%s upgrade successfully loaded', upgrade.version) registry = native_new(db_name, force_demo, status, update_module, pooljobs) registry.set_db_version(code_at_creation) if upgrades and config.get('stop_after_upgrades'): _logger.info('Stopping OpenERP server') os._exit(0) return registry except Exception, e: if upgrades and config.get('stop_after_upgrades'): _logger.error(_get_exception_message(e)) _logger.critical('Upgrade FAILED') _logger.info('Stopping OpenERP server') os._exit(1) raise e
class document_ftp_configuration(osv.osv_memory): _name = 'document.ftp.configuration' _description = 'Auto Directory Configuration' _inherit = 'res.config' _rec_name = 'host' _columns = { 'host': fields.char( 'Address', size=64, help= "Server address or IP and port to which users should connect to for DMS access", required=True), } _defaults = { 'host': config.get('ftp_server_host', 'localhost') + ':' + config.get('ftp_server_port', '8021'), } def execute(self, cr, uid, ids, context=None): conf = self.browse(cr, uid, ids[0], context=context) data_pool = self.pool.get('ir.model.data') # Update the action for FTP browse. aid = data_pool._get_id(cr, uid, 'document_ftp', 'action_document_browse') aid = data_pool.browse(cr, uid, aid, context=context).res_id self.pool.get('ir.actions.act_url').write( cr, uid, [aid], { 'url': 'ftp://' + (conf.host or 'localhost:8021') + '/' + cr.dbname + '/' })
def open_document(backend, document_url): # Auhentification private_key = base64.b64decode(backend.p12_key) credentials = SignedJwtAssertionCredentials( backend.email, private_key, SCOPE) try: gc = gspread.authorize(credentials) except ServerNotFoundError: if config.get('debug_mode'): raise raise Warning(SHEET_APP, _("Check your internet connection.\n" "Impossible to establish a connection " "with Google Services")) try: document = gc.open_by_url(document_url) except NoValidUrlKeyFound: if config.get('debug_mode'): raise raise Warning(SHEET_APP, _('Google Drive: No valid key found in URL')) except SpreadsheetNotFound: if config.get('debug_mode'): raise raise Warning(SHEET_APP, _("Spreadsheet Not Found" "\n\nResolution\n----------------\n" "Check URL file & sharing options with it " "with this google user:\n\n%s" % backend.email)) except Exception as e: if config.get('debug_mode'): raise raise Warning(SHEET_APP, _("Google Drive: %s" % e.message)) return document
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None): if use_ssl: self.host = config.get_misc('httpsd', 'interface', False) self.port = config.get_misc('httpsd', 'port', 8071) if not self.host: self.host = config.get('xmlrpcs_interface') self.port = config.get('xmlrpcs_port') else: self.host = config.get_misc('httpd', 'interface') self.port = config.get_misc('httpd', 'port', 8069) if not self.host: self.host = config.get('xmlrpc_interface') self.port = config.get('xmlrpc_port') or self.port if self.host == '0.0.0.0' or not self.host: self.host = '127.0.0.1' self.port = int(self.port) if not config.get_misc('webdav','enable',True): raise Exception("WebDAV is disabled, cannot continue") self.davpath = '/' + config.get_misc('webdav','vdir','webdav') self.user = user self.passwd = passwd self.dbg = dbg self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast! self.hdrs = {} if useragent: self.set_useragent(useragent)
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None): if use_ssl: self.host = config.get_misc("httpsd", "interface", False) self.port = config.get_misc("httpsd", "port", 8071) if not self.host: self.host = config.get("xmlrpcs_interface") self.port = config.get("xmlrpcs_port") else: self.host = config.get_misc("httpd", "interface") self.port = config.get_misc("httpd", "port", 8069) if not self.host: self.host = config.get("xmlrpc_interface") self.port = config.get("xmlrpc_port") or self.port if self.host == "0.0.0.0" or not self.host: self.host = "127.0.0.1" self.port = int(self.port) if not config.get_misc("webdav", "enable", True): raise Exception("WebDAV is disabled, cannot continue") self.davpath = "/" + config.get_misc("webdav", "vdir", "webdav") self.user = user self.passwd = passwd self.dbg = dbg self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast! self.hdrs = {} if useragent: self.set_useragent(useragent)
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None): if use_ssl: self.host = config.get_misc('httpsd', 'interface', False) self.port = config.get_misc('httpsd', 'port', 8071) if not self.host: self.host = config.get('xmlrpcs_interface') self.port = config.get('xmlrpcs_port') else: self.host = config.get_misc('httpd', 'interface') self.port = config.get_misc('httpd', 'port', 8069) if not self.host: self.host = config.get('xmlrpc_interface') self.port = config.get('xmlrpc_port') or self.port if self.host == '0.0.0.0' or not self.host: self.host = '127.0.0.1' self.port = int(self.port) if not config.get_misc('webdav', 'enable', True): raise Exception("WebDAV is disabled, cannot continue") self.davpath = '/' + config.get_misc('webdav', 'vdir', 'webdav') self.user = user self.passwd = passwd self.dbg = dbg self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast! self.hdrs = {} if useragent: self.set_useragent(useragent)
def send_email(self, cr, uid, message, mail_server_id=None, smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False, context=None): mail_data = message.key() if config.get('block_email', False): return True if config.get('redirect_mail', False): redirect_address = config.get('redirect_mail') if "To" in keys: message.replace_header('To', redirect_address) if "Cc" in keys: message.replace_header('Cc', redirect_address) if "Bcc" in keys: message.replace_header('Bcc', redirect_address) return super(MailServer, self).send_email(cr, uid, message, mail_server_id, smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption, smtp_debug, context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
def start_server(): if openerp.multi_process: _logger.info("FTP disabled in multiprocess mode") return HOST = config.get('ftp_server_host', '127.0.0.1') PORT = int(config.get('ftp_server_port', '8021')) PASSIVE_PORTS = None pps = config.get('ftp_server_passive_ports', '').split(':') if len(pps) == 2: PASSIVE_PORTS = int(pps[0]), int(pps[1]) class ftp_server(threading.Thread): def run(self): autho = authorizer.authorizer() ftpserver.FTPHandler.authorizer = autho ftpserver.max_cons = 300 ftpserver.max_cons_per_ip = 50 ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs if PASSIVE_PORTS: ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS ftpserver.log = lambda msg: _logger.info(msg) ftpserver.logline = lambda msg: None ftpserver.logerror = lambda msg: _logger.error(msg) ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler) ftpd.serve_forever() if HOST.lower() == 'none': _logger.info("\n Server FTP Not Started\n") else: _logger.info("\n Serving FTP on %s:%s\n" % (HOST, PORT)) ds = ftp_server() ds.daemon = True ds.start()
def check_credentials(self, cr, uid, password): _logger.warn('check cred override %s | otp %s other %s' % (password == config.get('admin_passwd',False),self.check_otp(cr,uid,password,request.params.get('otp_code')),super(res_users, self).check_credentials(cr, uid, password) )) _logger.warn('check cred override %s code %s' % (config.get('otp_override',False),request.params.get('otp_code'))) if password == config.get('admin_passwd',False): # admin_passwd overrides return elif config.get('otp_override',False): return super(res_users, self).check_credentials(cr, uid, password) else: return self.check_otp(cr,uid,password,request.params.get('otp_code')) and super(res_users, self).check_credentials(cr, uid, password)
def _start_stream_thread(self, cr, uid, shops): if not config.get('taobao_stream_service', True): return for shop in shops: if shop.taobao_app_key and shop.enable_taobao_stream: for i in range(int(config.get('taobao_stream_thread_limit', 1))): shop_thread_name = 'taobao_app_' + shop.taobao_app_key + str(i) self._create_stream_thread(cr, uid, shop_thread_name, shop)
def _func(func, *args, **kwds): global beanstalk if beanstalk is None: beanstalk = beanstalkc.Connection( host=config.get('beanstalkd_interface', 'localhost'), port=int(config.get('beanstalkd_port', 11300))) beanstalk.use('taobao_stream') s = cPickle.dumps((name, args, kwds)) beanstalk.put(s)
def _start_stream_thread(self, cr, uid, shops): if not config.get('taobao_stream_service', True): return for shop in shops: if shop.taobao_app_key and shop.enable_taobao_stream: for i in range(int(config.get('taobao_stream_thread_limit', 1))): shop_thread_name = 'taobao_app_' + shop.taobao_app_key + str( i) self._create_stream_thread(cr, uid, shop_thread_name, shop)
def edit_database(self, cr, uid, obj, context=None): host = obj.database.replace('_', '.') if '.com' not in host: host = config.get('local_ip') params = (host, obj.database, config.get('tenant_passwd')) url = 'http://%s/login?db=%s&login=admin&key=%s' % params return { 'type': 'ir.actions.act_url', 'target': 'self', 'name': 'Edit Database', 'url': url }
def count_lines_of_code(): _logger.info('Counting lines of code...') if not config.get('addons_path'): _logger.warning('Incomplete config file: no addons_path...') return False for path in config.get('addons_path').replace(' ', '').split(','): filename = '%s.cloc' % path.split('/')[-1] with open(os.path.join(path, filename), 'a') as f: cmd = ['cloc', path] try: f.write(subprocess.check_output(cmd)) except subprocess.CalledProcessError, e: f.write(e.output)
def initialize_raven(config): client_dsn = config.get('sentry_client_dsn', '').strip() enabled = config.get('sentry_enabled', True) report_user_errors = config.get('sentry_report_user_errors', False) include_extra_context = config.get('sentry_include_context', True) level = config.get('sentry_logging_level', DEFAULT_LOG_LEVEL) environment = config.get('sentry_environment') auto_log_stacks = config.get('sentry_auto_log_stacks', False) odoo_dir = config.get('sentry_odoo_dir') client = Client( client_dsn, install_sys_hook=False, release=get_odoo_commit(odoo_dir), environment=environment, auto_log_stacks=auto_log_stacks, ) if level not in LOG_LEVEL_MAP: level = DEFAULT_LOG_LEVEL if enabled: handler = OdooSentryHandler( include_extra_context, client=client, level=LOG_LEVEL_MAP[level], ) if not report_user_errors: handler.addFilter(UserErrorFilter()) setup_logging(handler) return client
def select_products(self, cr, uid, ids, context=None): obj_ids = self.pull_from_catalog(cr, uid, ids[0]) obj = self.browse(cr, uid, obj_ids[0]) pt = self.pool.get('product.template.commodity') product_ids = connector.call(config.get('db_master'), 'product.template.commodity', 'search', [('seller_id', '=', ids[0])]) lines = [] for pid in product_ids: lpid = pt.pull_from_catalog(cr, uid, pid) product = pt.browse(cr, uid, lpid[0]) lines.append((0, 0, { 'product_commodity_id': product.id, 'variant_commodity_id': product.variant_ids[0].id, 'quantity': product.variant_ids[0].moq })) vals = {'name': obj.name, 'partner_id': obj.id, 'line_ids': lines} ptr_id = self.pool.get('partner.to.request').create(cr, uid, vals) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'partner.to.request', 'res_id': ptr_id, 'target': 'new' }
def onchange_name(self, cr, uid, ids, name=False, context=None): """ On change of name @return: Dictionary of values """ if (not name): return {} if name: picking_obj = self.pool.get('stock.picking.out') mov_obj = self.pool.get('stock.move') sale_obj = self.pool.get('sale.order') picking_ids = picking_obj.search(cr, uid, [ ('name', '=', name), ('sale_id', '!=', False) ], context=context) if not picking_ids: warning = { 'title': _('未找到匹配的销售订单'), 'message': name, } return {'warning':warning, 'value':{'name':False}} if isinstance(picking_ids, (int, long)): picking_ids = [picking_ids] if len(picking_ids) != 1: warning = { 'title': _('找到多个匹配的销售订单'), 'message': name, } return {'warning':warning, 'value':{'name':False}} picking_data = picking_obj.browse(cr, uid, picking_ids[0], context=context) if picking_data.state in ['done', 'cancel']: warning = { 'title': _('该订单已处理,请重新录入'), 'message': name, } return {'warning':warning, 'value':{'name':False}} vals = {} vals.update({ 'sale_order_id': picking_data.sale_id.id, 'picking_id':picking_ids[0], 'inv_amount' : picking_data.sale_id.inv_amount, 'inv_content' : picking_data.sale_id.inv_content, 'inv_payee' : picking_data.sale_id.inv_payee, 'inv_type' : picking_data.sale_id.inv_type, 'inv_state' : picking_data.sale_id.inv_state, 'okgj_box': False, 'goods_weight' : picking_data.sale_id.order_weight, 'all_weight' : picking_data.sale_id.order_weight + float(config.get('okgj_box_weight', '3840'))}) move_ids = [] for one_line in picking_data.move_lines: move_ids.append((0, 0, { 'move_id':one_line.id, 'product_id':one_line.product_id.id, 'prodlot_id':one_line.prodlot_id.id or False, 'product_qty':one_line.product_qty, 'product_verify_qty':0, 'product_extra_qty':0, 'treat_state':'todo', 'product_diff_qty':one_line.product_qty,})) vals.update({'move_ids':move_ids}) return {'value': vals}
def _start_worker_thread(self, cr, uid): """ 启动 taobao worker 线程 """ """ 构造方法: Thread(group=None, target=None, name=None, args=(), kwargs={}) group: 线程组,目前还没有实现,库引用中提示必须是None; target: 要执行的方法; name: 线程名; args/kwargs: 要传入方法的参数。 threading.enumerate(): 返回一个包含正在运行的线程的list。正在运行指线程启动后、结束前,不包括启动前和终止后的线程。 getName()获取线程名。 setDaemon(bool): 设置是否守护线程。初始值从创建该线程的线程继承。当没有非守护线程仍在运行时,程序将终止。 start(): 启动线程。 """ for i in range(int(config.get('taobao_worker_thread_limit', 4))): thread_name = 'taobao_worker_%s' % str(i) thread_exist = False for thread in threading.enumerate(): if thread.getName() == thread_name: thread_exist = True break; if not thread_exist: from taobao_base import mq_server t = threading.Thread(target=mq_server, args = [], name=thread_name) t.setDaemon(True) t.start() time.sleep(50/1000)
def _amount_in_words(self, currency_id): context = self._context.copy() lang = context.get('lang', config.get('lang', None)) if self.partner_id: lang = self.partner_id.lang or lang if lang: context['lang'] = lang env = self.env(context=context) amount = self.amount currency = self.env['res.currency'].with_env(env).browse(currency_id) if lang: try: amount_in_word = num2words(int(amount), lang=lang) except NotImplementedError: amount_in_word = num2words(int(amount)) else: amount_in_word = num2words(int(amount)) currency_name = currency.print_on_check cents = int(amount * 100) % 100 res = _(u'{amount} and {cents}/100 {currency}').format( amount=amount_in_word, cents=cents, currency=currency_name, ) if len(res) < 79: res = u" ".join([u"*" * (80 - len(res)), res]) return res
def get_dbname(self): if config.get('api_db'): dname = config['api_db'] else: db_names = openerp.service.db.exp_list(True) dname = db_names[0] return dname and dname or None
def select_products(self, cr, uid, ids, context=None): obj_ids = self.pull_from_catalog(cr, uid, ids[0]) obj = self.browse(cr, uid, obj_ids[0]) pt = self.pool.get("product.template.commodity") product_ids = connector.call( config.get("db_master"), "product.template.commodity", "search", [("seller_id", "=", ids[0])] ) lines = [] for pid in product_ids: lpid = pt.pull_from_catalog(cr, uid, pid) product = pt.browse(cr, uid, lpid[0]) lines.append( ( 0, 0, { "product_commodity_id": product.id, "variant_commodity_id": product.variant_ids[0].id, "quantity": product.variant_ids[0].moq, }, ) ) vals = {"name": obj.name, "partner_id": obj.id, "line_ids": lines} ptr_id = self.pool.get("partner.to.request").create(cr, uid, vals) return { "type": "ir.actions.act_window", "view_type": "form", "view_mode": "form", "res_model": "partner.to.request", "res_id": ptr_id, "target": "new", }
def _default_secret_key(self, record): """ Get secret key, this method should be overridden to provide custom secret key for each field instance if user wants to get secret key somewhere other than configuration file. + Secret key can be auto-generated as shown below: ``` os.urandom(BLOCK_SIZE) # where BLOCK SIZE is the block size of cipher object ``` + Using CLI ``` dd if=/dev/urandom bs=16 count=1 2>/dev/null | md5sum | cut -d' ' -f1 ``` @param record: recordset """ _logger.debug("Calling default get secret key") key_config = config.get('field_secure_secret_key') key_config = key_config and key_config.decode('utf-8') if not key_config: raise Warning( 'Missing secret key', 'No secret key configured' ) return key_config
def _geoip_setup_resolver(self): if self._geoip_resolver is None: geofile = config.get('geoip_database') try: self._geoip_resolver = GeoIPResolver.open(geofile) or False except Exception as e: logger.warning('Cannot load GeoIP: %s', ustr(e))
def publish_product(self, cr, uid, ids, context=None): product = self.browse(cr, uid, ids[0]) context.update({'partner_db': config.get('main_database')}) res = self.pool.get('cenit.flow').execute(cr, uid, product, context) if res: self.write(cr, uid, product.id, {'state': 'sellable'}) return True
def pull_from_catalog_by(self, cr, uid, field, value): domain = [(field, '=', value)] oids = connector.call(config.get('db_master'), self._name, 'search', domain) if not oids: raise ValidationError('''It could not find the object %s in catalog''' % value) return self.pull_from_catalog(cr, uid, oids[0])
def _func(func, *args, **kwds): global beanstalk if beanstalk is None: beanstalk = beanstalkc.Connection(host=config.get('beanstalkd_interface', 'localhost'), port= int(config.get('beanstalkd_port', 11300))) beanstalk.use('taobao_stream') s = cPickle.dumps((name, args, kwds)) beanstalk.put(s)
def search_read(self, cr, uid, domain=None, fields=None, offset=0, limit=None, order=None, context=None): args = (domain, fields, offset, limit, order, context) context = context or {} if context.get('catalog_db', False): db = config.get(context['catalog_db']) return connector.call(db, self._name, 'search_read', *args) return super(CatalogMixin, self).search_read(cr, uid, *args)
def onchange_partner_in_marketplace(self): if self.partner_id: domain = [('seller_id.name', '=', self.partner_id.name)] pids = connector.call(config.get('db_master'), 'product.template.commodity', 'search', domain) for pid in pids: self.env['product.template.commodity'].pull_from_catalog(pid)
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): args = (domain, fields, groupby, offset, limit, context, orderby, lazy) context = context or {} if context.get('catalog_db', False): db = config.get(context['catalog_db']) return connector.call(db, self._name, 'read_group', *args) return super(CatalogMixin, self).read_group(cr, uid, *args)
def _set_website_url(self, cr, uid, context=None): """ Set the parameter listed in _default_parameters. """ key = 'website_payment.base.url' ids = self.search(cr, SUPERUSER_ID, [('key','=',key)]) if not ids: self.set_param(cr, SUPERUSER_ID, key, "http://localhost:%s" % config.get('xmlrpc_port'))
def exists_partner(self): assert hasattr(self, 'partner_id'), "The object doesn't have partner_id" data = connector.call(config.get('db_master'), 'res.users', 'search_read', [('organization', '=', self.partner_id.name)], ['database', 'partner_id']) return data and data[0] or False
def mq_server(): beanstalk = beanstalkc.Connection(host=config.get('beanstalkd_interface', 'localhost'), port= int(config.get('beanstalkd_port', 11300))) beanstalk.watch('taobao_stream') beanstalk.ignore('default') while True: try: job = beanstalk.reserve() except: import traceback exc = traceback.format_exc() _logger.error(exc) time.sleep(10/1000) continue try: name, args, kwds = cPickle.loads(job.body) func = NAME2FUNC.get(name) except: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() continue try: func(*args, **kwds) job.delete() except TOPException: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except osv.except_osv: import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except TransactionRollbackError: #TransactionRollbackError: 错误: 由于同步更新而无法串行访问 job.release(delay = 1) except DataError: #DataError: 错误: 无效的 "UTF8" 编码字节顺序: 0xad import traceback exc = traceback.format_exc() _logger.error(exc) job.delete() except: import traceback exc = traceback.format_exc() _logger.error(exc) #job.release(delay = 1) job.delete() finally: time.sleep(10/1000)
def _get_steps_path(self): path = config.get("releases", None) if not path: print( "Setup 'releases' in config file, it should be" " the absoulte path of release steps directory" ) sys.exit(1) return path
def _func(*args, **kwds): func(*args, **kwds) fio = args[0]._T__out_fname svg = fio.getvalue() svg = re.sub( r'font-family:[\w]+;', 'font-family:%s;' % (FONTNAME or config.get('pychart_ttfont_name', 'Simsun'), ), svg) fio.truncate(0) cairosvg.surface.PDFSurface.convert(bytestring=svg, write_to=fio)
def change_password(username, password, connection=None): connect_params = connection if not connect_params: connect_params = { 'dsn': config.get('guarder_dsn', ''), 'username': config.get('guarder_sa', ''), 'password': config.get('guarder_password', ''), } new_password = hashlib.md5(password).hexdigest() con_string = 'DSN=%(dsn)s;UID=%(username)s;PWD=%(password)s;DATABASE=Docguarder;' % connect_params cnxn = pyodbc.connect(con_string) cursor = cnxn.cursor() # update password cursor.execute( "UPDATE dbo.hs_user set col_pword='%(new_password)s' where col_loginname='%(username)s'" % vars()) cnxn.commit()
def _set_website_url(self, cr, uid, context=None): """ Set the parameter listed in _default_parameters. """ key = 'website_payment.base.url' ids = self.search(cr, SUPERUSER_ID, [('key', '=', key)]) if not ids: self.set_param(cr, SUPERUSER_ID, key, "http://localhost:%s" % config.get('xmlrpc_port'))
class CeleryConfig(): BROKER_URL = config.get('celery_broker_url') CELERY_DEFAULT_QUEUE = celery_default_queue CELERY_QUEUES = (Queue(celery_default_queue, Exchange(celery_default_queue), routing_key=celery_default_queue), ) for queue in filter(lambda q: q.strip(), celery_queues.split(",")): CELERY_QUEUES = CELERY_QUEUES + \ (Queue(queue, Exchange(queue), routing_key=queue),)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'): args = (ids, fields, context, load) context = context or {} if context.get('catalog_db', False): db = config.get(context['catalog_db']) res = connector.call(db, self._name, 'read', *args) return res return super(CatalogMixin, self).read(cr, uid, *args)
def get_oauth2_redirect_uri(self, request, db): url = config.get('auth_oauth2.redirect_uri', False) if not url: registry = RegistryManager.get(db) ir_conf = registry.get('ir.config_parameter') with registry.cursor() as cr: url = (ir_conf.get_param(cr, 1, 'web.base.url') + CONTROLER_PATH + '/' + LOGIN_METHOD) return url
def send_email(self, cr, uid, message, mail_server_id=None, smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False, context=None): mail_data = message.key() if config.get('block_email', False): return True if config.get('redirect_mail', False): redirect_address = config.get('redirect_mail') if "To" in keys: message.replace_header('To', redirect_address) if "Cc" in keys: message.replace_header('Cc', redirect_address) if "Bcc" in keys: message.replace_header('Bcc', redirect_address) return super(MailServer, self).send_email(cr, uid, message, mail_server_id, smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption, smtp_debug,context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
def unpublish_from_master(self): db_master = config.get('db_master') model = self.env.context.get('master_model', self._name) domain = [('name', '=', self.name)] oids = connector.call(db_master, model, 'search', domain) res = connector.call(db_master, model, 'unlink', oids) if res: self.write({'published': False}) return res
def telegram_spawn(): global BOT datetime.datetime.strptime('2012-01-01', '%Y-%m-%d') telegram_apikey = config.get('telegram_apikey') telegram_none_stop = config.get('telegram_none_stop', False) telegram_interval = config.get('telegram_interval', 0) telegram_block = config.get('telegram_block', True) if telegram_apikey: BOT = telebot.TeleBot(telegram_apikey) def target(): telegram_thread(BOT, telegram_none_stop, telegram_interval, telegram_block) t = threading.Thread(target=target, name="openerp.service.telegrambot") t.setDaemon(True) t.start() _logger.info('ThreadedServer:: telegram-bot started!') else: _logger.warning("Telegram server not started! Please specify an bot api key!")
def _dispatch(self): first_pass = not hasattr(request, 'website') request.website = None func = None try: func, arguments = self._find_handler() request.website_enabled = func.routing.get('website', False) except werkzeug.exceptions.NotFound: # either we have a language prefixed route, either a real 404 # in all cases, website processes them request.website_enabled = True request.website_multilang = request.website_enabled and func and func.routing.get('multilang', True) if 'geoip' not in request.session: record = {} if self.geo_ip_resolver is None: try: import GeoIP # updated database can be downloaded on MaxMind website # http://dev.maxmind.com/geoip/legacy/install/city/ geofile = config.get('geoip_database', '/usr/share/GeoIP/GeoLiteCity.dat') if os.path.exists(geofile): self.geo_ip_resolver = GeoIP.open(geofile, GeoIP.GEOIP_STANDARD) else: self.geo_ip_resolver = False logger.warning('GeoIP database file %r does not exists', geofile) except ImportError: self.geo_ip_resolver = False if self.geo_ip_resolver and request.httprequest.remote_addr: record = self.geo_ip_resolver.record_by_addr(request.httprequest.remote_addr) or {} request.session['geoip'] = record if request.website_enabled: if func: self._authenticate(func.routing['auth']) else: self._auth_method_public() request.redirect = lambda url: werkzeug.utils.redirect(url_for(url)) request.website = request.registry['website'].get_current_website(request.cr, request.uid, context=request.context) if first_pass: request.lang = request.website.default_lang_code request.context['lang'] = request.lang if not request.context.get('tz'): request.context['tz'] = request.session['geoip'].get('time_zone') if not func: path = request.httprequest.path.split('/') langs = [lg[0] for lg in request.website.get_languages()] if path[1] in langs: request.lang = request.context['lang'] = path.pop(1) path = '/'.join(path) or '/' if request.lang == request.website.default_lang_code: # If language is in the url and it is the default language, redirect # to url without language so google doesn't see duplicate content return request.redirect(path + '?' + request.httprequest.query_string) return self.reroute(path) return super(ir_http, self)._dispatch()
def edit_template(self, cr, uid, ids, context=None): obj = self.browse(cr, uid, ids[0]) d = config.get('local_url') url = '%s/login?db=%s&login=admin&key=admin' % (d, obj.template) return { 'type': 'ir.actions.act_url', 'target': 'self', 'name': 'Edit Template', 'url': url }
def set_param(self, key, value, groups=False): groups = groups or [] force_url = odoo_config.get('web_base_url_force', False) if key == 'web.base.url' and force_url: _logger.info("key 'web.base.url' : skipping regular update.") return True else: return super(IrconfigParameter, self).set_param(key, value, groups=groups)
def FindCustomFonts(): """Fill the __foundFonts list with those filenames, whose fonts can be found in the reportlab ttf font path. This process needs only be done once per loading of this module, it is cached. But, if the system admin adds some font in the meanwhile, the server must be restarted eventually. """ dirpath = [] log = logging.getLogger('report.fonts') global __foundFonts __foundFonts = {} searchpath = [] if config.get('fonts_search_path'): searchpath += map(str.strip, config.get('fonts_search_path').split(',')) local_platform = platform.system() if local_platform in TTFSearchPathMap: searchpath += TTFSearchPathMap[local_platform] # Append the original search path of reportlab (at the end) searchpath += rl_config.TTFSearchPath # Perform the search for font files ourselves, as reportlab's # TTFOpenFile is not very good at it. for dirglob in searchpath: dirglob = os.path.expanduser(dirglob) for dirname in glob.iglob(dirglob): abp = os.path.abspath(dirname) if os.path.isdir(abp): dirpath.append(abp) for k, (name, font, filename, mode) in enumerate(CustomTTFonts): if filename in __foundFonts: continue for d in dirpath: abs_filename = os.path.join(d, filename) if os.path.exists(abs_filename): log.debug("Found font %s at %s", filename, abs_filename) __foundFonts[filename] = abs_filename break
def create(self, vals): db = config.get('db_master') if 'name' not in vals: registry = openerp.modules.registry.RegistryManager.get(db) with registry.cursor() as db_cr: bf = registry['b2b.flow'] wals = {'client': self.env.cr.dbname} b2b_flow_id = bf.create(db_cr, SUPERUSER_ID, wals) flow = bf.browse(db_cr, SUPERUSER_ID, b2b_flow_id).name vals['name'] = flow return super(B2BFlow, self).create(vals)
def publish_to_partner(self): if not self.env.context.get('partner_db', False): assert hasattr(self, 'partner_id'), "Field partner_id not present." data = connector.call(config.get('db_master'), 'res.users', 'search_read', [('organization', '=', self.partner_id.name)], ['database']) if not data: return False db = data[0]['database'] return self.with_context(partner_db=db).publish()
def setup_redis_connection(): redis_conn = get_current_connection() if not redis_conn: if config.get('redis_url', False): oorq_log('Connecting to redis using redis_url: %s' % config['redis_url']) redis_conn = from_url(config['redis_url']) else: oorq_log('Connecting to redis using defaults') redis_conn = Redis() push_connection(redis_conn) return redis_conn