def _initialize_db(serv, id, db_name, demo, lang, user_password): cr = None try: serv.actions[id]['progress'] = 0 cr = sql_db.db_connect(db_name).cursor() openerp.modules.db.initialize(cr) # TODO this should be removed as it is done by pooler.restart_pool. tools.config['lang'] = lang cr.commit() cr.close() pool = pooler.restart_pool(db_name, demo, serv.actions[id], update_module=True)[1] cr = sql_db.db_connect(db_name).cursor() if lang: modobj = pool.get('ir.module.module') mids = modobj.search(cr, 1, [('state', '=', 'installed')]) modobj.update_translations(cr, 1, mids, lang) cr.execute('UPDATE res_users SET password=%s, context_lang=%s, active=True WHERE login=%s', ( user_password, lang, 'admin')) cr.execute('SELECT login, password, name ' \ ' FROM res_users ' \ ' ORDER BY login') serv.actions[id].update(users=cr.dictfetchall(), clean=True) cr.commit() cr.close() except Exception, e: serv.actions[id].update(clean=False, exception=e) _logger.exception('CREATE DATABASE failed:') serv.actions[id]['traceback'] = traceback.format_exc() if cr: cr.close()
def _initialize_db(serv, id, db_name, demo, lang, user_password): cr = None try: serv.actions[id]['progress'] = 0 cr = sql_db.db_connect(db_name).cursor() openerp.modules.db.initialize(cr) # TODO this should be removed as it is done by pooler.restart_pool. tools.config['lang'] = lang cr.commit() cr.close() pool = pooler.restart_pool(db_name, demo, serv.actions[id], update_module=True)[1] cr = sql_db.db_connect(db_name).cursor() if lang: modobj = pool.get('ir.module.module') mids = modobj.search(cr, SUPERUSER_ID, [('state', '=', 'installed')]) modobj.update_translations(cr, SUPERUSER_ID, mids, lang) # update admin's password and lang values = {'password': user_password, 'lang': lang} pool.get('res.users').write(cr, SUPERUSER_ID, [SUPERUSER_ID], values) cr.execute('SELECT login, password FROM res_users ORDER BY login') serv.actions[id].update(users=cr.dictfetchall(), clean=True) cr.commit() cr.close() except Exception, e: serv.actions[id].update(clean=False, exception=e) _logger.exception('CREATE DATABASE failed:') serv.actions[id]['traceback'] = traceback.format_exc() if cr: cr.close()
def tr(src, ttype): # We try to do the same as the _(), but without the frame # inspection, since we aready are wrapping an osv function # trans_obj = self.get('ir.translation') cannot work yet :( ctx = {} if not kwargs: if args and isinstance(args[-1], dict): ctx = args[-1] elif isinstance(kwargs, dict): ctx = kwargs.get('context', {}) uid = 1 if args and isinstance(args[0], (long, int)): uid = args[0] lang = ctx and ctx.get('lang') if not (lang or hasattr(src, '__call__')): return src # We open a *new* cursor here, one reason is that failed SQL # queries (as in IntegrityError) will invalidate the current one. cr = False if hasattr(src, '__call__'): # callable. We need to find the right parameters to call # the orm._sql_message(self, cr, uid, ids, context) function, # or we skip.. # our signature is f(osv_pool, dbname [,uid, obj, method, args]) try: if args and len(args) > 1: obj = self.get(args[1]) if len(args) > 3 and isinstance( args[3], (long, int, list)): ids = args[3] else: ids = [] cr = sql_db.db_connect(dbname).cursor() return src(obj, cr, uid, ids, context=(ctx or {})) except Exception: pass finally: if cr: cr.close() return False # so that the original SQL error will # be returned, it is the best we have. try: cr = sql_db.db_connect(dbname).cursor() res = translate(cr, name=False, source_type=ttype, lang=lang, source=src) if res: return res else: return src finally: if cr: cr.close()
def tr(src, ttype): # We try to do the same as the _(), but without the frame # inspection, since we aready are wrapping an osv function # trans_obj = self.get('ir.translation') cannot work yet :( ctx = {} if not kwargs: if args and isinstance(args[-1], dict): ctx = args[-1] elif isinstance(kwargs, dict): ctx = kwargs.get("context", {}) uid = 1 if args and isinstance(args[0], (long, int)): uid = args[0] lang = ctx and ctx.get("lang") if not (lang or hasattr(src, "__call__")): return src # We open a *new* cursor here, one reason is that failed SQL # queries (as in IntegrityError) will invalidate the current one. cr = False if hasattr(src, "__call__"): # callable. We need to find the right parameters to call # the orm._sql_message(self, cr, uid, ids, context) function, # or we skip.. # our signature is f(osv_pool, dbname [,uid, obj, method, args]) try: if args and len(args) > 1: obj = self.get(args[1]) if len(args) > 3 and isinstance(args[3], (long, int, list)): ids = args[3] else: ids = [] cr = sql_db.db_connect(dbname).cursor() return src(obj, cr, uid, ids, context=(ctx or {})) except Exception: pass finally: if cr: cr.close() return False # so that the original SQL error will # be returned, it is the best we have. try: cr = sql_db.db_connect(dbname).cursor() res = translate(cr, name=False, source_type=ttype, lang=lang, source=src) if res: return res else: return src finally: if cr: cr.close()
def _run_other_tests(dbname, modules, ignore): _logger.info('Running tests other than unit...') db = sql_db.db_connect(dbname) with closing(db.cursor()) as cr: test_files_by_module = _get_test_files_by_module(modules) for module in test_files_by_module: ignored_files = ignore.get(module, []) if ignored_files == 'all': ignored_files = test_files_by_module[module] for filename in test_files_by_module[module]: vals = { 'module': module, 'file': filename, } if filename in ignored_files\ or not _file_in_requested_directories(get_module_path(module)): vals['result'] = 'ignored' _write_log(vals) continue start = time.time() try: _run_test(cr, module, filename) except Exception, e: vals['duration'] = time.time() - start vals['result'] = 'error' vals['code'] = e.__class__.__name__ vals['exception'] = '\n%s' % _get_exception_message(e) if filename.endswith('.yml'): vals['exception'] += '\n%s' % _build_error_message() _write_log(vals) else: vals['duration'] = time.time() - start vals['result'] = 'success' _write_log(vals) cr.rollback()
def exp_list_posix_user(): """Rewrite/simplified version of openerp.service.exp_list() Lists all databases owned by the current posix user instead of the db_user from odoo config. The reason for this is because runbot creates databases with the posix user. :returns list of databases owned by the posix user """ chosen_template = tools.config['db_template'] templates_list = {'template0', 'template1', 'postgres', chosen_template} db = sql_db.db_connect('postgres') with closing(db.cursor()) as cr: db_user = pwd.getpwuid(os.getuid())[0] cr.execute( """ SELECT datname FROM pg_database WHERE datdba=( SELECT usesysid FROM pg_user WHERE usename=%s ) AND datname NOT IN %s order by datname""", (db_user, tuple(templates_list))) res = [tools.ustr(name) for (name, ) in cr.fetchall()] res.sort() return res
def _get_modules_list(dbname): db = sql_db.db_connect(dbname) with closing(db.cursor()) as cr: # INFO: Need to take modules in state 'to upgrade', for compatibility with versions older than 7.0 # The update of a module was done in two steps : 1) mark module to upgrade, 2) upgrade all marked modules cr.execute("SELECT name from ir_module_module WHERE state IN ('installed', 'to upgrade')") return [name for (name,) in cr.fetchall()]
def logout(self, redirect='/web'): try: superuser = openerp.SUPERUSER_ID uid = request.session.uid login = request.session.login url = request.httprequest.base_url ip = request.httprequest.remote_addr session_id = request.session_id dbname = request.session.db # Get database connexion db = sql_db.db_connect(dbname) # You can get the db name from config cr = db.cursor() pool = pooler.get_pool(cr.dbname) pool.get('control.access').create(cr, superuser,{'user_id': uid, 'user_name':login, 'url':url, 'ip':ip, 'db':dbname, 'type':'OUT', 'session': session_id, } ) cr.commit() cr.close() except: print "error logout" request.session.logout(keep_db=True) return werkzeug.utils.redirect(redirect, 303)
def empty_cursor_pool(self): """This method cleans (rollback) all current transactions over actual cursor in order to avoid errors with waiting transactions. - request.cr.rollback() Also connections on current database's only are closed by the next statement - dsn = openerp.sql_db.dsn(request.cr.dbname) - openerp.sql_db._Pool.close_all(dsn[1]) Otherwise next error will be trigger 'InterfaceError: connection already closed' Finally new cursor is assigned to the request object, this cursor will take the os.environ setted. In this case the os.environ is setted with all 'PGOPTIONS' required to log all sql transactions in postgres.log file. If this method is called one more time, it will create a new cursor and take the os.environ again, this is usefully if we want to reset 'PGOPTIONS' """ request.cr.rollback() dsn = sql_db.dsn(request.cr.dbname) sql_db._Pool.close_all(dsn[1]) db = sql_db.db_connect(request.cr.dbname) request._cr = db.cursor()
def exp_drop(self, db_name): if db_name not in self.exp_list(True): return False openerp.modules.registry.RegistryManager.delete(db_name) sql_db.close_db(db_name) db = sql_db.db_connect('postgres') cr = db.cursor() cr.autocommit(True) # avoid transaction block try: # Try to terminate all other connections that might prevent # dropping the database try: # PostgreSQL 9.2 renamed pg_stat_activity.procpid to pid: # http://www.postgresql.org/docs/9.2/static/release-9-2.html#AEN110389 pid_col = 'pid' if cr._cnx.server_version >= 90200 else 'procpid' cr.execute("""SELECT pg_terminate_backend(%(pid_col)s) FROM pg_stat_activity WHERE datname = %%s AND %(pid_col)s != pg_backend_pid()""" % {'pid_col': pid_col}, (db_name,)) except Exception: pass try: cr.execute('DROP DATABASE "%s"' % db_name) except Exception, e: _logger.error('DROP DB: %s failed:\n%s', db_name, e) raise Exception("Couldn't drop database %s: %s" % (db_name, e)) else:
def import_validator(self, csv_import_data, task_id=None): with api.Environment.manage(): new_cr = db_connect(self.env.cr.dbname).cursor() self.env = api.Environment(new_cr, self.env.uid, self.env.context) total_records = 0 obj_csv_manager = self.env['contact.manager'] for row in csv_import_data: total_records += 1 data = { 'name': row[1], 'age': row[2], 'address': row[3], 'phone': row[4], 'prueba': row[5] } obj_csv_manager.create(data) self.env.cr.commit() print str(total_records) + " records inserted" if not self.env.cr.closed: self.env.cr.close() return True
def exp_list(self, document=False): if not tools.config['list_db'] and not document: raise openerp.exceptions.AccessDenied() chosen_template = tools.config['db_template'] templates_list = tuple(set(['template0', 'template1', 'postgres', chosen_template])) db = sql_db.db_connect('postgres') cr = db.cursor() try: try: db_user = tools.config["db_user"] if not db_user and os.name == 'posix': import pwd db_user = pwd.getpwuid(os.getuid())[0] if not db_user: cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],)) res = cr.fetchone() db_user = res and str(res[0]) if db_user: cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in %s order by datname", (db_user, templates_list)) else: cr.execute("select datname from pg_database where datname not in %s order by datname", (templates_list,)) res = [tools.ustr(name) for (name,) in cr.fetchall()] except Exception: res = [] finally: cr.close() res.sort() return res
def get_unused_indexes(self, name): db = sql_db.db_connect(name) cr = db.cursor() cr.execute("""SELECT relid::regclass as table, indexrelid::regclass as unused_index FROM pg_stat_user_indexes JOIN pg_index USING (indexrelid) WHERE idx_scan = 0 AND indisunique IS FALSE AND pg_relation_size(relid::regclass) > 0;""") return cr.dictfetchall()
def pentaho_temp_users_unlink(self): with api.Environment.manage(): new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() new_env = self.env(new_cr, self.env.uid, {}) self.with_env(new_env)._pentaho_temp_users_unlink() new_cr.commit() new_cr.close()
def db_list(self): """Get the list of available databases, with FTPd support """ s = openerp.service.db result = s.exp_list(document=True) self.db_name_list = [] for db_name in result: db, cr = None, None try: try: db = sql_db.db_connect(db_name) cr = db.cursor() cr.execute("SELECT 1 FROM pg_class WHERE relkind = 'r' AND relname = 'ir_module_module'") if not cr.fetchone(): continue cr.execute("SELECT id FROM ir_module_module WHERE name = 'document_ftp' AND state IN ('installed', 'to install', 'to upgrade') ") res = cr.fetchone() if res and len(res): self.db_name_list.append(db_name) cr.commit() except Exception: self._log.warning('Cannot use db "%s".', db_name) finally: if cr is not None: cr.close() return self.db_name_list
def logout(self, redirect='/web'): try: superuser = openerp.SUPERUSER_ID uid = request.session.uid login = request.session.login url = request.httprequest.base_url ip = request.httprequest.remote_addr session_id = request.session_id dbname = request.session.db # Get database connexion db = sql_db.db_connect( dbname) # You can get the db name from config cr = db.cursor() pool = pooler.get_pool(cr.dbname) pool.get('control.access').create( cr, superuser, { 'user_id': uid, 'user_name': login, 'url': url, 'ip': ip, 'db': dbname, 'type': 'OUT', 'session': session_id, }) cr.commit() cr.close() except: print "error logout" request.session.logout(keep_db=True) return werkzeug.utils.redirect(redirect, 303)
def index(self, **kw): redirection_url = None dbname = request.session.db uid = openerp.SUPERUSER_ID context = request.session.context serverhost = request.httprequest.host # Get database connexion try: db = sql_db.db_connect( dbname) # You can get the db name from config cr = db.cursor() pool = pooler.get_pool(cr.dbname) ids = pool.get('shop.redirect').search( cr, uid, [('serverhost', 'ilike', serverhost)], limit=1, context=context) shop_redirect = pool.get('shop.redirect').browse( cr, uid, ids, context=context)[0] redirection_url = shop_redirect.route if shop_redirect != None else None default_shop_redirect = pool.get('ir.config_parameter').get_param( cr, uid, 'default_shop_redirect', context=context) redirection_url = default_shop_redirect if not redirection_url and default_shop_redirect else redirection_url cr.close() except: pass if not redirection_url: redirection_url = '/home' return request.redirect(redirection_url)
def run(self): cr, uid, context = self.env.args if not context.get('do_not_use_new_cursor'): intervention_cr = sql_db.db_connect(cr.dbname).cursor() else: intervention_cr = cr intervention_vals = {} try: _logger.info( 'Running script: %s\nCode:\n%s' % (self.name.encode('utf-8'), self.code.encode('utf-8'))) result = self.with_env(self.env(cr=intervention_cr))._run() self.console = result[0][0] if not context.get('do_not_use_new_cursor') and context.get( 'test_mode'): _logger.info('TEST MODE: Script rollbacking') intervention_cr.rollback() elif not context.get('do_not_use_new_cursor'): intervention_cr.commit() _logger.info('Script execution SUCCEEDED: %s\n' % (self.name.encode('utf-8'), )) except Exception, e: _logger.error('Script execution FAILED: %s\nError:\n%s' % (self.name.encode('utf-8'), _get_exception_message(e).encode('utf-8'))) import sys, traceback print "Exception in user code:" print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60
def exp_list(self, document=False): if not tools.config['list_db'] and not document: raise Exception('AccessDenied') db = sql_db.db_connect('template1') cr = db.cursor() try: try: db_user = tools.config["db_user"] if not db_user and os.name == 'posix': import pwd db_user = pwd.getpwuid(os.getuid())[0] if not db_user: cr.execute("select decode(usename, 'escape') from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],)) res = cr.fetchone() db_user = res and str(res[0]) if db_user: cr.execute("select decode(datname, 'escape') from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres') order by datname", (db_user,)) else: cr.execute("select decode(datname, 'escape') from pg_database where datname not in('template0', 'template1','postgres') order by datname") res = [str(name) for (name,) in cr.fetchall()] except Exception: res = [] finally: cr.close() res.sort() return res
def index(self, **kw): redirection_url = None dbname = request.session.db uid = openerp.SUPERUSER_ID context = request.session.context serverhost = request.httprequest.host # Get database connexion try: db = sql_db.db_connect(dbname) # You can get the db name from config cr = db.cursor() pool = pooler.get_pool(cr.dbname) ids = pool.get('shop.redirect').search(cr, uid, [('serverhost','ilike', serverhost)], limit=1, context=context) shop_redirect = pool.get('shop.redirect').browse(cr, uid, ids,context=context)[0] redirection_url = shop_redirect.route if shop_redirect != None else None default_shop_redirect = pool.get('ir.config_parameter').get_param(cr, uid, 'default_shop_redirect', context=context) redirection_url = default_shop_redirect if not redirection_url and default_shop_redirect else redirection_url cr.close() except: pass if not redirection_url: redirection_url = '/home' return request.redirect(redirection_url)
def set_factory_cost(self, line): new_cr = sql_db.db_connect( self.env.cr.dbname).cursor() uid, context = \ self.env.uid, self.env.context with api.Environment.manage(): new_env = api.Environment( new_cr, uid, context) productions = new_env['mrp.production'].search( [('date_finished', '>=', line.start_date), ('date_finished', '<', line.end_date)]) moves = [] afected_lots = [] total_feed = 0 for production in productions: total_feed = total_feed + production.product_qty for mov in production.move_created_ids2: moves.append(mov) amount = self.get_unit_price(line, new_env) * line.product_qty cost_per_kg = amount/total_feed for mov in moves: for quant in mov.quant_ids: lot = quant.lot_id if lot.id not in afected_lots: afected_lots.append(lot.id) lot.unit_cost = lot.unit_cost + cost_per_kg self.set_afected_feed_events(lot, cost_per_kg, line, new_env) new_env.cr.commit() new_cr.close() return True
def run(self, cr, uid, ids, context=None): context = context or {} if isinstance(ids, (int, long)): ids = [ids] intervention_obj = self.pool.get('smile.script.intervention') for script in self.browse(cr, uid, ids, context): if not context.get('test_mode'): if script.state != 'validated': raise orm.except_orm(_('Error!'), _('You can only run validated scripts!')) if script.automatic_dump: self.dump_database(cr) intervention_id = intervention_obj.create(cr, uid, {'script_id': script.id, 'test_mode': context.get('test_mode')}, context) logger = SmileDBLogger(cr.dbname, 'smile.script.intervention', intervention_id, uid) if not context.get('do_not_use_new_cursor'): intervention_cr = sql_db.db_connect(cr.dbname).cursor() else: intervention_cr = cr intervention_vals = {} try: _logger.info('Running script: %s\nCode:\n%s' % (script.name.encode('utf-8'), script.code.encode('utf-8'))) result = self._run(intervention_cr, uid, script, intervention_id, logger, context) if not context.get('do_not_use_new_cursor') and context.get('test_mode'): logger.info('TEST MODE: Script rollbacking') intervention_cr.rollback() elif not context.get('do_not_use_new_cursor'): intervention_cr.commit() intervention_vals.update({'state': 'done', 'result': result}) _logger.info('Script execution SUCCEEDED: %s\n' % (script.name.encode('utf-8'),)) except Exception, e: intervention_vals.update({'state': 'exception', 'result': _get_exception_message(e)}) _logger.error('Script execution FAILED: %s\nError:\n%s' % (script.name.encode('utf-8'), _get_exception_message(e).encode('utf-8'))) finally:
def exp_list(self, document=False): if not tools.config['list_db'] and not document: raise openerp.exceptions.AccessDenied() chosen_template = tools.config['db_template'] templates_list = tuple(set(['template0', 'template1', 'postgres', chosen_template])) db = sql_db.db_connect('template1') cr = db.cursor() try: try: db_user = tools.config["db_user"] if not db_user and os.name == 'posix': import pwd db_user = pwd.getpwuid(os.getuid())[0] if not db_user: cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],)) res = cr.fetchone() db_user = res and str(res[0]) if db_user: cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in %s order by datname", (db_user, templates_list)) else: cr.execute("select datname from pg_database where datname not in %s order by datname", (templates_list,)) res = [str(name) for (name,) in cr.fetchall()] except Exception: res = [] finally: cr.close() res.sort() return res
def write_log(self, import_operation='', import_file_name=''): """ This function will write log for a task. """ if CSVHolder.debug: _logger.info("Got request to write new log ") with api.Environment.manage(): self.env = api.Environment( db_connect(self.env.cr.dbname).cursor(), self.env.uid, self.env.context) if CSVHolder.debug: _logger.info(" Finally came to write log") try: data = { 'import_operation': import_operation, 'import_file_name': import_file_name } rec = self.create(data) self.env.cr.commit() if CSVHolder.debug: _logger.info(" Done write log" + str(rec)) except Exception, e: if CSVHolder.debug: _logger.info(" Error in write_log() " + str(e)) finally:
def exp_drop(self, db_name): if not self.exp_db_exist(db_name): return False openerp.modules.registry.RegistryManager.delete(db_name) sql_db.close_db(db_name) db = sql_db.db_connect('template1') cr = db.cursor() cr.autocommit(True) # avoid transaction block try: # Try to terminate all other connections that might prevent # dropping the database try: cr.execute("""SELECT pg_terminate_backend(procpid) FROM pg_stat_activity WHERE datname = %s AND procpid != pg_backend_pid()""", (db_name,)) except Exception: pass try: cr.execute('DROP DATABASE "%s"' % db_name) except Exception, e: _logger.error('DROP DB: %s failed:\n%s', db_name, e) raise Exception("Couldn't drop database %s: %s" % (db_name, e)) else:
def db_list(self): """Get the list of available databases, with FTPd support """ s = netsvc.ExportService.getService("db") result = s.exp_list(document=True) self.db_name_list = [] for db_name in result: db, cr = None, None try: try: db = sql_db.db_connect(db_name) cr = db.cursor() cr.execute("SELECT 1 FROM pg_class WHERE relkind = 'r' AND relname = 'ir_module_module'") if not cr.fetchone(): continue cr.execute( "SELECT id FROM ir_module_module WHERE name = 'document_ftp' AND state IN ('installed', 'to install', 'to upgrade') " ) res = cr.fetchone() if res and len(res): self.db_name_list.append(db_name) cr.commit() except Exception: self._log.warning('Cannot use db "%s".', db_name) finally: if cr is not None: cr.close() return self.db_name_list
def exp_drop(self, db_name): if db_name not in self.exp_list(True): return False openerp.modules.registry.RegistryManager.delete(db_name) sql_db.close_db(db_name) db = sql_db.db_connect('postgres') cr = db.cursor() cr.autocommit(True) # avoid transaction block try: # Try to terminate all other connections that might prevent # dropping the database try: # PostgreSQL 9.2 renamed pg_stat_activity.procpid to pid: # http://www.postgresql.org/docs/9.2/static/release-9-2.html#AEN110389 pid_col = 'pid' if cr._cnx.server_version >= 90200 else 'procpid' cr.execute( """SELECT pg_terminate_backend(%(pid_col)s) FROM pg_stat_activity WHERE datname = %%s AND %(pid_col)s != pg_backend_pid()""" % {'pid_col': pid_col}, (db_name, )) except Exception: pass try: cr.execute('DROP DATABASE "%s"' % db_name) except Exception, e: _logger.error('DROP DB: %s failed:\n%s', db_name, e) raise Exception("Couldn't drop database %s: %s" % (db_name, e)) else:
def _create_empty_database(self, name): db = sql_db.db_connect('template1') cr = db.cursor() try: cr.autocommit(True) # avoid transaction block cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name) finally: cr.close()
def __init__(self, db_name): self.db_name = db_name self.db = sql_db.db_connect(db_name) self.cr = self.db.cursor() self.db_in_creation = self._get_db_in_creation() self.code_version = self._get_code_version() self.db_version = self._get_db_version() self.upgrades = self._get_upgrades()
def grant_asterisk_access(self): cr = sql_db.db_connect(self.env.cr.dbname).cursor() sql = "GRANT ALL on asterisk_cdr to asterisk" cr.execute(sql) sql = "GRANT ALL on asterisk_cdr_id_seq to asterisk" cr.execute(sql) cr.commit() cr.close()
def exp_restore(self, db_name, data, replace_all_emails=False, new_email='', replace_all_passwd=False, new_passwd='', disable_all_crons=False): try: self._set_pg_psw_env_var() if self.exp_db_exist(db_name): _logger.warning('RESTORE DB: %s already exists' % (db_name,)) raise Exception("Database already exists") self._create_empty_database(db_name) cmd = ['pg_restore', '--no-owner'] if tools.config['db_user']: cmd.append('--username='******'db_user']) if tools.config['db_host']: cmd.append('--host=' + tools.config['db_host']) if tools.config['db_port']: cmd.append('--port=' + str(tools.config['db_port'])) cmd.append('--dbname=' + db_name) if subprocess.call(cmd, stdin=data): raise Exception("Couldn't restore database") _logger.info('RESTORE DB: %s' % (db_name)) db = sql_db.db_connect(db_name) cr = db.cursor() clean_up_query = '' if replace_all_emails: clean_up_query += cr.mogrify(''' UPDATE res_users SET email=%(new_email)s, user_email=%(new_email)s; UPDATE res_partner_address SET email=%(new_email)s; UPDATE email_template SET email_to=%(new_email)s, email_from=%(new_email)s; UPDATE mail_message SET email_to=%(new_email)s, email_from=%(new_email)s, email_cc=%(new_email)s, email_bcc=%(new_email)s;''', (locals())) if replace_all_passwd: clean_up_query += cr.mogrify(''' UPDATE res_users SET password=%(new_passwd)s;''', (locals())) if disable_all_crons: clean_up_query += cr.mogrify(''' UPDATE ir_cron SET active=false;''', (locals())) if self.document_addon_installed(cr): clean_up_query += """UPDATE document_storage SET path=(SELECT substring(path FROM '%s.*%s')) || %r;""" % (os.sep, os.sep, str(locals().get('db_name', 'dev'))) if clean_up_query: cr.execute(clean_up_query) cr.commit() cr.close() return True finally: self._unset_pg_psw_env_var()
def get_duplicated_indexes(self, name): db = sql_db.db_connect(name) cr = db.cursor() cr.execute("SELECT 0 FROM pg_proc WHERE proname = 'array_accum' AND proisagg;") if not cr.rowcount: cr.execute("CREATE AGGREGATE array_accum (anyelement) (sfunc = array_append, stype = anyarray, initcond = '{}');") cr.execute("""SELECT indrelid::regclass as table, array_accum(indexrelid::regclass) as duplicated_indexes FROM pg_index GROUP BY indrelid, indkey HAVING count(*) > 1;""") return cr.dictfetchall()
def _create_empty_database(self, name): db = sql_db.db_connect('template1') cr = db.cursor() chosen_template = tools.config['db_template'] try: cr.autocommit(True) # avoid transaction block cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (name, chosen_template)) finally: cr.close()
def get_unused_indexes(self, name): db = sql_db.db_connect(name) cr = db.cursor() cr.execute( """SELECT relid::regclass as table, indexrelid::regclass as unused_index FROM pg_stat_user_indexes JOIN pg_index USING (indexrelid) WHERE idx_scan = 0 AND indisunique IS FALSE AND pg_relation_size(relid::regclass) > 0;""" ) return cr.dictfetchall()
def thread_method(dbname): sleep(delay) cursor = sql_db.db_connect(dbname).cursor() with api.Environment.manage(): env = api.Environment(cursor, uid, context) env.user.post_notification(**r) cursor.commit() cursor.close() return True
def f(): time.sleep(delay) cr = None fp = None try: cr = sql_db.db_connect(db_name).cursor() fp = misc.file_open(file_name) convert_yaml_import(cr, module_name, fp, {}, 'update', True) finally: if cr: cr.close() if fp: fp.close()
def update_module(self, db_name, module, rel_id=False): """ Actualiza un modulo sobre la base de datos """ # Crea un nuevo registro db = ws() context={} uid = 1 mod_obj = self.pool.get('ir.module.module') data_obj = self.pool.get('ir.model.data') mod_rel_obj = self.pool.get('admon.database.module.relation') # Valida que exista la base de datos db_exist = db.exp_db_exist(db_name) if db_exist == False: raise osv.except_osv(_('Error!'),_("la base de datos que intenta modificar no existe '%s'")%(db_name,)) db = sql_db.db_connect(db_name) cr = db.cursor() cr.autocommit(True) # avoid transaction block try: # Revisa si el modulo existe en la base de datos mod_ids = mod_obj.search(cr, uid, [('name','=',module)], context=context) if mod_ids: # Revisa si el modulo esta instalado mod = mod_obj.browse(cr, uid, mod_ids[0], context=context) print "************ update modules ********** ",db_name, " - ", module, " - ", mod.state if mod.state == 'installed': print "********** update ", db_name, " ************** ", module mod_obj.button_immediate_upgrade(cr, uid, [mod.id], context=context) elif mod.state == 'to_install': print "********** to install ", db_name, " ************** ", module mod.obj.button_install_cancel(cr, uid, [mod.id], context=context) mod_obj.button_immediate_install(cr, uid, [mod.id], context=context) elif mod.state == 'uninstalled': print "********** install ", db_name, " ************* ", module mod_obj.button_immediate_install(cr, uid, [mod.id], context=context) elif mod.state == 'to_upgrade': print "********** to upgrade ", db_name, " ************** ", module mod.obj.button_upgrade_cancel(cr, uid, [mod.id], context=context) mod_obj.button_immediate_upgrade(cr, uid, [mod.id], context=context) else: # Si ocurre un error pone el modulo como cancelado mod.obj.button_upgrade_cancel(cr, uid, [mod.id], context=context) # Pone el campo de if rel_id: update_date = time.strftime('%Y-%m-%d %H:%M:%S') mod_rel_obj.write(cr, uid, [rel_id], {'state':'noupdated', 'date_update':update_date}) print "*********** error de instalacion de modulo ************* " raise osv.except_osv(_('Error!'),_("No se pudo instalar el modulo '%s' en la base de datos '%s', realice el proceso directo de la base de datos")%(module,db_name)) finally: #print "************* finaliza conexion - ", db_name, " *************** " cr.close() #sql_db.close_db(db_name) return True
def exp_duplicate_database(self, db_original_name, db_name): _logger.info('Duplicate database `%s` to `%s`.', db_original_name, db_name) sql_db.close_db(db_original_name) db = sql_db.db_connect('postgres') cr = db.cursor() try: cr.autocommit(True) # avoid transaction block cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (db_name, db_original_name)) finally: cr.close() return True
def import_sale_order(self): try: new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() uid, context = self.env.uid, self.env.context with api.Environment.manage(): self.env = api.Environment(new_cr, uid, context) self.test_connection() except: raise Warning(_('Facing a problems while importing sale order!')) finally: self.env.cr.close()
def __call__(self, serv, id, db_name, demo, lang, user_password='******'): cr = None try: serv.actions[id]['progress'] = 0 cr = sql_db.db_connect(db_name).cursor() openerp.modules.db.initialize(cr) # TODO this should be removed as it is done by pooler.restart_pool. tools.config['lang'] = lang cr.commit() cr.close() pool = pooler.restart_pool(db_name, demo, serv.actions[id], update_module=True)[1] cr = sql_db.db_connect(db_name).cursor() if lang: modobj = pool.get('ir.module.module') mids = modobj.search(cr, 1, [('state', '=', 'installed')]) modobj.update_translations(cr, 1, mids, lang) cr.execute('UPDATE res_users SET password=%s, context_lang=%s, active=True WHERE login=%s', ( user_password, lang, 'admin')) cr.execute('SELECT login, password, name ' \ ' FROM res_users ' \ ' ORDER BY login') serv.actions[id]['users'] = cr.dictfetchall() serv.actions[id]['clean'] = True cr.commit() cr.close() except Exception, e: serv.actions[id]['clean'] = False serv.actions[id]['exception'] = e import traceback e_str = StringIO() traceback.print_exc(file=e_str) traceback_str = e_str.getvalue() e_str.close() netsvc.Logger().notifyChannel('web-services', netsvc.LOG_ERROR, 'CREATE DATABASE\n%s' % (traceback_str)) serv.actions[id]['traceback'] = traceback_str if cr: cr.close()
def get_missing_indexes(self, name): db = sql_db.db_connect(name) cr = db.cursor() cr.execute("""SELECT relname as table, seq_scan-idx_scan as too_much_seq, case when seq_scan-idx_scan>0 THEN 'Missing Index?' ELSE 'OK' END as index, pg_relation_size(relname::regclass) as table_size, seq_scan, idx_scan FROM pg_stat_all_tables WHERE schemaname='public' AND pg_relation_size(relname::regclass)>80000 ORDER BY too_much_seq DESC;""") return cr.dictfetchall()
def get_cursor(self): db_name = config.get('log_db') con = db_connect(db_name) cr = con.cursor() cr.execute(""" CREATE TABLE IF NOT EXISTS sessionstore ( id varchar(40), data bytea ); """) cr.commit() return cr
def get_duplicated_indexes(self, name): db = sql_db.db_connect(name) cr = db.cursor() cr.execute( "SELECT 0 FROM pg_proc WHERE proname = 'array_accum' AND proisagg;") if not cr.rowcount: cr.execute( "CREATE AGGREGATE array_accum (anyelement) (sfunc = array_append, stype = anyarray, initcond = '{}');" ) cr.execute( """SELECT indrelid::regclass as table, array_accum(indexrelid::regclass) as duplicated_indexes FROM pg_index GROUP BY indrelid, indkey HAVING count(*) > 1;""") return cr.dictfetchall()
def get_cursor(self): db_name = config.get('log_db') con = db_connect(db_name) cr = con.cursor() cr.execute( """ CREATE TABLE IF NOT EXISTS sessionstore ( id varchar(40), data bytea ); """) cr.commit() return cr
def exp_drop(self, db_name): openerp.modules.registry.RegistryManager.delete(db_name) sql_db.close_db(db_name) db = sql_db.db_connect('template1') cr = db.cursor() cr.autocommit(True) # avoid transaction block try: try: cr.execute('DROP DATABASE "%s"' % db_name) except Exception, e: _logger.error('DROP DB: %s failed:\n%s', db_name, e) raise Exception("Couldn't drop database %s: %s" % (db_name, e)) else:
def exp_rename(self, old_name, new_name): openerp.modules.registry.RegistryManager.delete(old_name) sql_db.close_db(old_name) db = sql_db.db_connect('postgres') cr = db.cursor() cr.autocommit(True) # avoid transaction block try: try: cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name)) except Exception, e: _logger.error('RENAME DB: %s -> %s failed:\n%s', old_name, new_name, e) raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e)) else:
def import_product_product(self): try: print ":::::::::::" new_cr = sql_db.db_connect(self.env.cr.dbname).cursor() uid, context = self.env.uid, self.env.context with api.Environment.manage(): self.env = api.Environment(new_cr, uid, context) self.test_connection() session = ConnectorSession(self.env.cr, self.env.uid, context=self.env.context) self.import_products_from_date = datetime.now() products = shopify.Product.find() for product in products: vals_product_tmpl = {} dict_attr = product.__dict__['attributes'] if not session.search('product.template', [('shopify_product_id', '=', dict_attr['id'])]): image_urls = [getattr(i, 'src') for i in product.images] if len(image_urls) > 0: photo = base64.encodestring(urllib2.urlopen(image_urls[0]).read()) vals_product_tmpl.update({'image_medium': photo}) custom_collection = shopify.CustomCollection.find(product_id=dict_attr['id']) if custom_collection: for categ in custom_collection: product_cate_obj = session.search('product.category', [('shopify_product_cate_id', '=', categ.__dict__['attributes']['id'])]) if product_cate_obj: vals_product_tmpl.update({'categ_id': product_cate_obj[0]}) vals_product_tmpl.update({'name': dict_attr['title'], 'type': 'consu', 'shopify_product_id': dict_attr['id'], 'description': dict_attr['body_html'], 'state': 'add'}) product_tid = session.create('product.template', vals_product_tmpl) new_cr.commit() variants = dict_attr['variants'] for variant in variants: dict_variant = variant.__dict__['attributes'] u = session.create('product.product', {'product_tmpl_id': product_tid, 'product_sfy_variant_id': dict_variant['id']}) new_cr.commit() except: raise Warning(_('Facing a problems while importing product!')) finally: self.env.cr.close()
def _create_empty_database(self, name): db = sql_db.db_connect('postgres') cr = db.cursor() chosen_template = tools.config['db_template'] cr.execute("""SELECT datname FROM pg_database WHERE datname = %s """, (name,)) if cr.fetchall(): raise openerp.exceptions.Warning(" %s database already exists!" % name ) try: cr.autocommit(True) # avoid transaction block cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (name, chosen_template)) finally: cr.close()
def run_dialer(self): context = self.env.context uid = self.env.uid cr = sql_db.db_connect(self.env.cr.dbname).cursor() with api.Environment.manage(): self.env = env = api.Environment(cr, uid, context) dialer = self cr.commit() cr.commit() # Get possible call load based on simult restriction channel_count = env['asterisk.dialer.channel'].search_count([('dialer', '=', dialer.id)]) cr.commit() cr.commit() call_limit = dialer.simult - channel_count for contact in contacts: # Check cancel request: if dialer.cancel_request: pass print 'DOING CONTACT', contact # Generate channel ids print 'CREATED CHANNEL', channel env.cr.commit() # Create cdr cdr = env['asterisk.dialer.cdr'].create({ 'dialer': dialer.id, 'channel_id': channelId, 'other_channel_id': otherChannelId, 'phone': contact.phone, 'name': contact.name, 'status': 'PROGRESS', 'start_time': datetime.datetime.now(), }) print 'CREATED CDR', cdr env.cr.commit() print 'SETTING DONE' dialer.state = 'done' print 'STATE', dialer['state'] env.cr.commit() env.cr.close() client.events.userEvent(eventName='exit_request', application='odoo-dialer-%s' % self.id) print 'SENT exit_request to Stasis app'