def get_cursor(self, create_session_store_db=True): db_name = config.get("session_store_db", "session_store") try: con = db_connect(db_name) cr = con.cursor() except Exception: if not create_session_store_db: raise db_connect("postgres") with closing(db_connect("postgres").cursor()) as cr: cr.autocommit(True) # avoid transaction block cr.execute( """CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (db_name, config["db_template"])) return self.get_cursor(create_session_store_db=False) cr.execute(""" CREATE TABLE IF NOT EXISTS sessionstore ( id varchar(40), data bytea ); """) cr.commit() return cr
def init_database(): try: # 判断是否能够链接该数据库 db_connect(db_name).cursor().close() except Exception: db_connect('postgres') with closing(db_connect('postgres').cursor()) as cr: cr.autocommit(True) cr.execute( """CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (db_name, config['db_template'])) cr.commit()
def list_db_incompatible(databases): """"Check a list of databases if they are compatible with this version of Odoo :param databases: A list of existing Postgresql databases :return: A list of databases that are incompatible """ incompatible_databases = [] server_version = '.'.join(map(str, version_info[:2])) for database_name in databases: with closing(db_connect(database_name).cursor()) as cr: cr.execute( "SELECT 1 FROM information_schema.tables WHERE table_name='ir_module_module'" ) if cr.fetchone(): cr.execute( "SELECT latest_version FROM ir_module_module WHERE name=%s", ('base', )) base_version = cr.fetchone()[0] # e.g. 10.saas~15 if '.'.join(base_version.split('.')[:2]) != server_version: incompatible_databases.append(database_name) else: incompatible_databases.append(database_name) # release connection odoo.sql_db.close_db(database_name) return incompatible_databases
def send_whatsapp_automatic(self): for inv in self: new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() MailMessage = self.env['mail.message'] WhatsappComposeMessage = self.env['whatsapp.compose.message'] if inv.invoice_payment_state == 'paid': template_id = self.env.ref('aos_whatsapp_account.invoice_paid_status', raise_if_not_found=False) else: template_id = self.env.ref('aos_whatsapp_account.invoice_outstanding_status', raise_if_not_found=False) if self._get_whatsapp_server() and self._get_whatsapp_server().status == 'authenticated': KlikApi = self._get_whatsapp_server().klikapi() KlikApi.auth() template = template_id.generate_email(inv.id) body = template.get('body') subject = template.get('subject') try: body = body.replace('_PARTNER_', inv.partner_id.name) except: _logger.warning('Failed to send Message to WhatsApp number %s', inv.partner_id.whatsapp) if inv.partner_id: partners = inv.partner_id if inv.partner_id.child_ids: #ADDED CHILD FROM PARTNER for partner in inv.partner_id.child_ids: partners += partner attachment_ids = [] chatIDs = [] message_data = {} send_message = {} status = 'error' for partner in partners: if partner.country_id and partner.whatsapp: #SEND MESSAGE whatsapp = partner._formatting_mobile_number() message_data = { 'phone': whatsapp, 'body': html2text.html2text(body) + inv.get_link(), } if partner.chat_id: message_data.update({'chatId': partner.chat_id, 'phone': ''}) data_message = json.dumps(message_data) send_message = KlikApi.post_request(method='sendMessage', data=data_message) if send_message.get('message')['sent']: chatID = send_message.get('chatID') status = 'send' partner.chat_id = chatID chatIDs.append(chatID) _logger.warning('Success to send Message to WhatsApp number %s', whatsapp) else: status = 'error' _logger.warning('Failed to send Message to WhatsApp number %s', whatsapp) new_cr.commit() #time.sleep(3) AllchatIDs = ';'.join(chatIDs) vals = WhatsappComposeMessage._prepare_mail_message(self.env.user.id, AllchatIDs, inv and inv.id, 'account.move', body, message_data, subject, partners.ids, attachment_ids, send_message, status) #vals = WhatsappComposeMessage._prepare_mail_message(self.env.user.id, AllchatIDs, [inv.id], 'account.invoice', body, message_data, subject, partners.ids, attachment_ids, send_message, status) MailMessage.sudo().create(vals) new_cr.commit() #time.sleep(3)
def _install_modules(self, db_name, modules): if self.type != 'local': raise NotImplementedError() db = sql_db.db_connect(db_name) with api.Environment.manage(), db.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) # Set odoo.http.request to None. # # Odoo tries to use its values in translation system, which may eventually # change currentThread().dbname to saas master value. _request_stack.push(None) module_ids = env['ir.module.module'].search([('state', '=', 'uninstalled')] + modules) with turn_off_tests(): module_ids.button_immediate_install() # Some magic to force reloading registry in other workers env.registry.registry_invalidated = True env.registry.signal_changes() # return request back _request_stack.pop()
def empty_cursor_pool(self): """This method cleans (rollback) all current transactions over actual cursor in order to avoid errors with waiting transactions. - request.cr.rollback() Also connections on current database's only are closed by the next statement - dsn = odoo.sql_db.connection_info_for(request.cr.dbname) - odoo.sql_db._Pool.close_all(dsn[1]) Otherwise next error will be trigger 'InterfaceError: connection already closed' Finally new cursor is assigned to the request object, this cursor will take the os.environ setted. In this case the os.environ is setted with all 'PGOPTIONS' required to log all sql transactions in postgres.log file. If this method is called one more time, it will create a new cursor and take the os.environ again, this is usefully if we want to reset 'PGOPTIONS' """ request.cr.rollback() dsn = sql_db.connection_info_for(request.cr.dbname) sql_db._Pool.close_all(dsn[1]) db = sql_db.db_connect(request.cr.dbname) request._cr = db.cursor()
def list_db_incompatible(databases): """"Check a list of databases if they are compatible with this version of Odoo :param databases: A list of existing Postgresql databases :return: A list of databases that are incompatible """ incompatible_databases = [] server_version = '.'.join(str(v) for v in version_info[:2]) for database_name in databases: with closing(db_connect(database_name).cursor()) as cr: if odoo.tools.table_exists(cr, 'ir_module_module'): cr.execute("SELECT latest_version FROM ir_module_module WHERE name=%s", ('base',)) base_version = cr.fetchone() if not base_version or not base_version[0]: incompatible_databases.append(database_name) else: # e.g. 10.saas~15 local_version = '.'.join(base_version[0].split('.')[:2]) if local_version != server_version: incompatible_databases.append(database_name) else: incompatible_databases.append(database_name) for database_name in incompatible_databases: # release connection odoo.sql_db.close_db(database_name) return incompatible_databases
def __exit__(self, *args): try: for collector in self.collectors: collector.stop() self.duration = time.time() - self.start_time self._add_file_lines(self.init_stack_trace) if self.db: # pylint: disable=import-outside-toplevel from odoo.sql_db import db_connect # only import from odoo if/when needed. with db_connect(self.db).cursor() as cr: values = { "name": self.description, "session": self.profile_session, "create_date": datetime.datetime.now(), "init_stack_trace": json.dumps(_format_stack(self.init_stack_trace)), "duration": self.duration, "entry_count": self.entry_count(), } for collector in self.collectors: if collector.entries: values[collector.name] = json.dumps(collector.entries) query = sql.SQL("INSERT INTO {}({}) VALUES %s RETURNING id").format( sql.Identifier("ir_profile"), sql.SQL(",").join(map(sql.Identifier, values)), ) cr.execute(query, [tuple(values.values())]) profile_id = cr.fetchone()[0] _logger.info('ir_profile %s (%s) created', profile_id, self.profile_session) finally: if self.disable_gc: gc.enable() if self.params: del self.init_thread.profiler_params
def perf_cursor(dbname): perf_dbname = dbname + '_perf' try: _create_empty_database(perf_dbname) except DatabaseExists: pass return db_connect(perf_dbname).cursor()
def ks_run_query(self): for rec in self: with api.Environment.manage(): if rec.ks_data_calculation_type == 'query' and rec.ks_dashboard_item_type not in ['ks_tile','ks_kpi']\ and rec.ks_custom_query: ks_query = rec.ks_custom_query try: conn = sql_db.db_connect(self.env.cr.dbname) new_env = api.Environment(conn.cursor(), self.env.uid, self.env.context) new_env.cr.execute(ks_query) records = new_env.cr.dictfetchall() except ProgrammingError as e: if e.args[0] == 'no results to fetch': raise ValidationError( _("You can only read the Data from Database")) else: raise ValidationError(_(e)) except Exception as e: raise ValidationError(_(e)) finally: new_env.cr.close() for res in records: for key in res: if type(res[key]).__name__ == 'datetime': res[key] = res[key].strftime( DEFAULT_SERVER_DATETIME_FORMAT) elif type(res[key]).__name__ == 'date': res[key] = res[key].strftime( DEFAULT_SERVER_DATE_FORMAT) rec.ks_query_result = json.dumps(records) else: rec.ks_query_result = False
def log_call(self, args=None, kwargs=None, res='', err=''): tm = time.time() - self.ts if tm < self.min_duration: return vals = { 'path': self.path, 'date': datetime.fromtimestamp(self.ts).strftime(DATETIME_FORMAT), 'uid': self.uid, 'model': self.model, 'method': self.method, 'total_time': tm, 'db_time': self.db_tm, 'db_count': self.db_nb, 'args': self._format_args(args, kwargs), 'result': self._format_res(res), 'error': get_exception_message(err), 'stats': self.stats, 'db_stats': repr(self.db_stats), 'slow_queries': repr(self.slow_queries), 'slow_recomputation': repr(self.slow_recomputation), } PerfLog = self.env['ir.logging.perf.log'] updates = [('id', "nextval('%s')" % PerfLog._sequence)] for col in vals: field = PerfLog._fields[col] updates.append((col, field.column_format, field.convert_to_column(vals[col], PerfLog))) with db_connect(self.db + '_perf').cursor() as cr: columns = ', '.join('"%s"' % u[0] for u in updates) values = ', '.join(u[1] for u in updates) query = 'INSERT INTO ir_logging_perf_log (%s) VALUES (%s)' % (columns, values) params = [u[2] for u in updates if len(u) > 2] cr.execute(query, tuple(params))
def import_lang(self): this = self[0] with TemporaryFile('wb+') as buf: try: buf.write(base64.decodebytes(this.data)) # now we determine the file format buf.seek(0) fileformat = os.path.splitext(this.filename)[-1][1:].lower() Lang = self.env["res.lang"] lang = Lang._activate_lang(self.code) or Lang._create_lang( self.code, lang_name=self.name ) tools.trans_load_data( this._cr, buf, fileformat, this.code, overwrite=self.overwrite ) except ProgrammingError as e: _logger.exception('File unsuccessfully imported, due to a malformed file.') with closing(sql_db.db_connect(self._cr.dbname).cursor()) as cr: raise UserError(_('File %r not imported due to a malformed file.\n\n' 'This issue can be caused by duplicates entries who are referring to the same field. ' 'Please check the content of the file you are trying to import.\n\n' 'Technical Details:\n%s') % (self.filename, tools.ustr(e))) except Exception as e: _logger.exception('File unsuccessfully imported, due to format mismatch.') raise UserError( _('File %r not imported due to format mismatch or a malformed file.' ' (Valid formats are .csv, .po, .pot)\n\nTechnical Details:\n%s') % \ (this.filename, tools.ustr(e)) ) return True
def whatsapp_message_resend(self, company_id): try: new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() uid, context = self.env.uid, self.env.context with api.Environment.manage(): self.env = api.Environment(new_cr, uid, context) message_log_rec = self.env['whatsapp.message.log'].search([ ('status', '=', 'error') ]) token_value = {'token': company_id.api_token} for log_rec in message_log_rec: request_meeting = requests.post( log_rec.link, data=log_rec.data, params=token_value, headers={'Content-Type': 'application/json'}) if request_meeting.status_code == 200: log_rec.write({ 'msg_date': datetime.now(), 'message': request_meeting.text, 'status': 'send' }) else: log_rec.write({ 'msg_date': datetime.now(), 'message': request_meeting.text, 'status': 'error' }) new_cr.commit() finally: self.env.cr.close()
def getNewEnv(self): ctx = self.env.context.copy() ctx.update({'bulk_import': True}) uid = self.env.uid new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() new_env = api.Environment(new_cr, 1, ctx) return new_env
def install_modules(self, template_id, template_operator_id): self.ensure_one() modules = [module.name for module in template_id.template_module_ids] modules = [('name', 'in', MANDATORY_MODULES + modules)] if self.type == 'local': db = sql_db.db_connect(template_operator_id.operator_db_name) with api.Environment.manage(), db.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) # Set odoo.http.request to None. # # Odoo tries to use its values in translation system, which may eventually # change currentThread().dbname to saas master value. _request_stack.push(None) module_ids = env['ir.module.module'].search([('state', '=', 'uninstalled')] + modules) module_ids.button_immediate_install() # Some magic to force reloading registry in other workers env.registry.registry_invalidated = True env.registry.signal_changes() # return request back _request_stack.pop() template_operator_id.state = 'post_init' self.with_delay().post_init(template_id, template_operator_id)
def restore_via_odoo_backup_sh(self, master_pwd, backup_file_name, name, encryption_password, copy=False): if config['admin_passwd'] != master_pwd: return env.get_template("backup_list.html").render( error="Incorrect master password") if os.path.exists(config.filestore(name)): return env.get_template("backup_list.html").render( error= 'Filestore for database "{}" already exists. Please choose another database name' .format(name)) cloud_params = self.get_cloud_params(request.httprequest.url, call_from='frontend') backup_object = BackupCloudStorage.get_object( cloud_params, filename=backup_file_name) backup_file = tempfile.NamedTemporaryFile() backup_file.write(backup_object['Body'].read()) if backup_file_name.split('|')[0][-4:] == '.enc': if not encryption_password: raise UserError( _('The backup are encrypted. But encryption password is not found. Please check your module settings.' )) # GnuPG ignores the --output parameter with an existing file object as value decrypted_backup_file = tempfile.NamedTemporaryFile() decrypted_backup_file_name = decrypted_backup_file.name os.unlink(decrypted_backup_file_name) backup_file.seek(0) r = gnupg.GPG().decrypt_file(backup_file, passphrase=encryption_password, output=decrypted_backup_file_name) if not r.ok: error = 'gpg: {0}'.format(r.status) if not r.valid: error += ". Maybe wrong password?" return env.get_template("backup_list.html").render(error=error) backup_file = open(decrypted_backup_file_name, 'rb') try: db.restore_db(name, backup_file.name, str2bool(copy)) # Make all auto backup cron records inactive with closing(db_connect(name).cursor()) as cr: cr.autocommit(True) try: cr.execute(""" UPDATE ir_cron SET active=false WHERE active=true AND id IN (SELECT ir_cron_id FROM odoo_backup_sh_config_cron); UPDATE odoo_backup_sh_config SET active=false WHERE active=true; """) except Exception: pass return http.local_redirect('/web/database/manager') except Exception as e: error = "Database restore error: %s" % (str(e) or repr(e)) return env.get_template("backup_list.html").render(error=error) finally: os.unlink(backup_file.name)
def grant_asterisk_access(self): cr = sql_db.db_connect(self.env.cr.dbname).cursor() sql = "GRANT ALL on asterisk_cdr to %s" % ASTERISK_ROLE cr.execute(sql) sql = "GRANT ALL on asterisk_cdr_id_seq to %s" % ASTERISK_ROLE cr.execute(sql) cr.commit() cr.close()
def _create_database(self): with db_connect("postgres").cursor() as cursor: cursor.autocommit(True) cursor.execute(""" CREATE DATABASE {dbname} ENCODING 'unicode' TEMPLATE 'template0'; """.format(dbname=self.dbname))
def _setup_database(self, raise_exception=True): try: with db_connect(self.dbname, allow_uri=True).cursor() as cursor: cursor.autocommit(True) self._create_table(cursor) except: self._create_database() self._setup_database()
def _process_job(cls, job_cr, job, cron_cr): db = sql_db.db_connect(cls.pool._db.dbname) locked_crons = cls._lock_mutually_exclusive_cron(db, job['id']) try: res = super(IrCron, cls)._process_job(job_cr, job, cron_cr) finally: locked_crons.close() _logger.debug("released blocks for cron job %s" % job['cron_name']) return res
def _table_exist(self): perf_dbname = self._cr.dbname + '_perf' query = "SELECT datname from pg_database WHERE datname='%s'" % perf_dbname self._cr.execute(query) if not self._cr.rowcount: return False query = "SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s" with db_connect(self._cr.dbname + '_perf').cursor() as new_cr: new_cr.execute(query, (self._table, )) return new_cr.rowcount
def create(self, vals): config_id = self.env['ir.config_parameter'].sudo().get_param( 'portal.database') if config_id: new_cr = db_connect(str(config_id)).cursor() new_env = Environment(new_cr, SUPERUSER_ID, {}) user_id = new_env['res.users'].search([('id', '=', 1)]) if user_id: vals.update({'partner_id': user_id.partner_id.id}) return super(Productrequest, self).create(vals)
def _open_connection(self, create_db=True): try: connection = db_connect(self.dbname, allow_uri=True) self.cursor = connection.cursor() self.cursor.autocommit(True) except: if not create_db: raise self._create_database() return self._open_connection(create_db=False)
def __init__(self, db_name): self.db_name = db_name self.db = sql_db.db_connect(db_name) self.cr = self.db.cursor() self.cr.autocommit(True) self.upgrades = self._get_upgrades() self.modules_to_upgrade = list(set(sum( [upgrade.modules_to_upgrade for upgrade in self.upgrades], []))) self.modules_to_install_at_creation = self.upgrades and \ self.upgrades[-1].modules_to_install_at_creation or []
def _get_modules_list(dbname): db = sql_db.db_connect(dbname) with closing(db.cursor()) as cr: # INFO: Need to take modules in state 'to upgrade', # for compatibility with versions older than 7.0 # The update of a module was done in two steps : # 1) mark module to upgrade, 2) upgrade all marked modules cr.execute( "SELECT name from ir_module_module WHERE state IN " "('installed', 'to upgrade') ORDER BY sequence, name") return [name for (name,) in cr.fetchall()]
def ks_query_validate(self, ks_query): with api.Environment.manage(): try: conn = sql_db.db_connect(self.env.cr.dbname) new_env = api.Environment(conn.cursor(), self.env.uid, self.env.context) new_env.cr.execute(ks_query) except Exception as e: raise UserError(_(e)) finally: new_env.cr.close()
def exp_drop(db_name): filestore_paths = [] connection = sql_db.db_connect(db_name) with closing(connection.cursor()) as cr: env = api.Environment(cr, SUPERUSER_ID, {}) settings = env['muk_dms.settings'].search([('save_type', '=', 'file')]) filestore_paths = settings.mapped('complete_base_path') res = exp_drop.super(db_name) for path in filestore_paths: if os.path.exists(path): shutil.rmtree(path, ignore_errors=True) return res
def get_count_db(user): db = sql_db.db_connect('postgres') with closing(db.cursor()) as cr: cr.execute( """ SELECT count(datname) FROM pg_database d LEFT JOIN pg_user u ON d.datdba = usesysid WHERE u.usename = %s; """, (user, )) res = cr.fetchone()[0] return res
def install_modulo_lista(self, modulos): #self.ensure_one() #modules = [module.name for module in self.template_module_ids] self.state = 'installing_modules' modulos = [('name', 'in', modulos)] db = sql_db.db_connect(self.name) with api.Environment.manage(), db.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) module_ids = env['ir.module.module'].search([('state', '=', 'uninstalled')] + modulos) module_ids.button_immediate_install()
def _batch_invoice_payment(self): try: new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() uid, context = self.env.uid, self.env.context with api.Environment.manage(): self.env = api.Environment(new_cr, uid, context) for reg_pay_wiz in self: reg_pay_wiz.create_payments() new_cr.commit() return True finally: self.env.cr.close()
def _get_db(self): # find current DB based on thread/worker db name (see netsvc) db_name = getattr(threading.currentThread(), 'dbname', None) if db_name: return sql_db.db_connect(db_name)