def esale_export_attachment_thread(self, db_name, uid, sale, values, context=None): """Thread Export Products :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() product = self.pool.get('django.connect').ssh_command( cr, uid, sale, values, context) cr.commit() cr.close() if product: LOGGER.notifyChannel('e-Sale', netsvc.LOG_INFO, "Attachment Export Running.") return True else: LOGGER.notifyChannel('e-Sale', netsvc.LOG_ERROR, "Error connection to server.") return False
def core_sync_products_thread(self, db_name, uid, magento_app, products, store_view, context=None): """Thread Sync Products :magento_app: Magento APP ID (int) :products: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() magento_app = self.pool.get('magento.app').browse(cr, uid, magento_app) # We have list first product simple and after product configurable # First, only create product configurable (and product simple related in this product configurable) # After, create product simple. If this simple was created, skip for product in products: if product['type'] == 'configurable': self.pool.get('product.product').magento_create_product_type(cr, uid, magento_app, product, store_view, context) #Uncomment second part import only configurable products (id from to) for product in products: if product['type'] != 'configurable': self.pool.get('product.product').magento_create_product_type(cr, uid, magento_app, product, store_view, context) LOGGER.notifyChannel('Magento App', netsvc.LOG_INFO, "End Sync Products magento app %s." % (magento_app.name)) self.pool.get('magento.log').create_log(cr, uid, magento_app, 'product.product', 0, '', 'done', _('Finish Import/Update products') ) cr.close() return True
def zoook_export_manufacturers_thread(self, db_name, uid, sale, values, context=None): """Thread Export Images :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() manufacturer = self.pool.get('django.connect').ssh_command( cr, uid, sale, values, context) cr.commit() cr.close() if manufacturer: self._logger.info("Manufacturers Export Running.") return True else: self._logger.error("Error connection to server.") return False
def _get_cursor(self, dbname): cr = self._dbname_to_cr.get(dbname) if not cr or (cr and cr.closed): db, pool = pooler.get_db_and_pool(dbname, pooljobs=False) cr = db.cursor() self._dbname_to_cr[dbname] = cr return cr
def zoook_export_images_thread(self, db_name, uid, sale, values, context=None): """Thread Export Images :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() image = self.pool.get('django.connect').ssh_command( cr, uid, sale, values, context) cr.commit() cr.close() if image: LOGGER.notifyChannel('ZoooK Connection', netsvc.LOG_INFO, "Image Export Running.") return True else: LOGGER.notifyChannel('ZoooK Connection', netsvc.LOG_ERROR, "Error connection to server.") return False
def _run_process(self, db_name, uid, data, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() process_obj = pool.get('etl.job.process') process_obj.run_scheduler(cr, uid, False, use_new_cursor=cr.dbname,\ context=context) cr.close() return {}
def get_cr(self, uri): reluri = self.uri2local(uri) dbname = reluri.split('/')[1] uid = security.login(dbname, dav_auth.auth['user'], dav_auth.auth['pwd']) db,pool = pooler.get_db_and_pool(dbname) cr = db.cursor() uri2 = reluri.split('/')[1:] return cr, uid, pool, uri2
def _poolJobs(self, dbname, check=False): # Added by Smile report = """Here is the action scheduling report. Start Time: %s End Time: %s """ ## try: db, pool = pooler.get_db_and_pool(dbname) except Exception: return False cr = db.cursor() try: if not pool._init: now = datetime.now() cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority') for job in cr.dictfetchall(): # Added by Smile start_time = time.strftime('%Y-%m-%d %H:%M:%S') try: ## nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(cr, job['user_id'], job['model'], job['function'], job['args']) if numbercall: nextcall += _intervalTypes[job['interval_type']](job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cr.execute("update ir_cron set nextcall=%s, numbercall=%s" + addsql + " where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) # Added by Smile report += "No errors" except Exception, e: report += "Exception: \n" + tools.ustr(e) end_time = time.strftime('%Y-%m-%d %H:%M:%S') self.write(cr, 1, job['id'], {'report': report % (start_time, end_time)}) ## cr.commit() cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active') next_call = cr.dictfetchone()['min_next_call'] if next_call: next_call = time.mktime(time.strptime(next_call, '%Y-%m-%d %H:%M:%S')) else: next_call = int(time.time()) + 3600 # if do not find active cron job from database, it will run again after 1 day if not check: self.setAlarm(self._poolJobs, next_call, dbname, dbname)
def _poolJobs(self, db_name, check=False): try: db, pool = pooler.get_db_and_pool(db_name) except: return False cr = db.cursor() try: if not pool._init: now = datetime.now() cr.execute( 'select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority' ) for job in cr.dictfetchall(): nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(cr, job['user_id'], job['model'], job['function'], job['args']) if numbercall: nextcall += _intervalTypes[job['interval_type']]( job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cr.execute( "update ir_cron set nextcall=%s, numbercall=%s" + addsql + " where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) cr.commit() cr.execute( 'select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active and nextcall>=now()' ) next_call = cr.dictfetchone()['min_next_call'] if next_call: next_call = time.mktime( time.strptime(next_call, '%Y-%m-%d %H:%M:%S')) else: next_call = int( time.time() ) + 3600 # if do not find active cron job from database, it will run again after 1 day if not check: self.setAlarm(self._poolJobs, next_call, db_name, db_name) except Exception, ex: logger = netsvc.Logger() logger.notifyChannel('cron', netsvc.LOG_WARNING, 'Exception in cron:' + str(ex))
def _procure_calculation_procure(self, db_name, uid, data, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() try: proc_obj = pool.get('mrp.procurement') proc_obj._procure_confirm(cr, uid, use_new_cursor=cr.dbname, context=context) finally: cr.close() return {}
def get_cr(self, uri): reluri = self.uri2local(uri) dbname = reluri.split('/')[1] uid = security.login(dbname, dav_auth.auth['user'], dav_auth.auth['pwd']) db, pool = pooler.get_db_and_pool(dbname) cr = db.cursor() uri2 = reluri.split('/')[1:] return cr, uid, pool, uri2
def _procure_calculation_all(self, db_name, uid, data, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() proc_obj = pool.get('mrp.procurement') automatic = data['form']['automatic'] proc_obj.run_scheduler(cr, uid, automatic=automatic, use_new_cursor=cr.dbname,\ context=context) cr.close() return {}
def send_email(self, req, error, origin, prefix, db_name): context = req.session.eval_context(req.context) #registry = openerp.modules.registry.RegistryManager.get(req.session._db) #cr = registry.db.cursor() db, pool = pooler.get_db_and_pool(req.session._db) cr = db.cursor() mail_obj = pool.get('mail.message') return mail_obj.send_email_support(cr, req.session._uid, error, origin, prefix, db_name, context=context)
def close(self, *args, **kwargs): db,pool = pooler.get_db_and_pool(self.dbname) cr = db.cursor() cr.commit() try: getattr(self.pool.get('document.directory.content'), 'process_write_'+self.node.content.extension[1:])(cr, self.uid, self.node, self.getvalue()) finally: cr.commit() cr.close() return StringIO.StringIO.close(self, *args, **kwargs)
def get_transaction(self, dbname, uid, transaction_id): """Get transaction for all XML-RPC. """ database = pooler.get_db_and_pool(dbname)[0] cursor = database.cursor() sync_cursor = WSCursor(cursor) self.log(netsvc.LOG_INFO, 'Creating a new transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid) ) return {transaction_id: sync_cursor}
def get_transaction(self, dbname, uid, transaction_id): """Get transaction for all XML-RPC. """ database = pooler.get_db_and_pool(dbname)[0] cursor = database.cursor() sync_cursor = WSCursor(cursor) self.log( netsvc.LOG_INFO, 'Creating a new transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid)) return {transaction_id: sync_cursor}
def _procure_calculation_orderpoint(self, db_name, uid, data, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() try: proc_obj = pool.get('mrp.procurement') automatic = data['form']['automatic'] proc_obj._procure_orderpoint_confirm(cr, uid, automatic=automatic,\ use_new_cursor=cr.dbname, context=context) finally: cr.close() return {}
def close(self, *args, **kwargs): db, pool = pooler.get_db_and_pool(self.dbname) cr = db.cursor() cr.commit() try: getattr(self.pool.get('document.directory.content'), 'process_write_' + self.node.content.extension[1:])( cr, self.uid, self.node, self.getvalue()) finally: cr.commit() cr.close() return StringIO.StringIO.close(self, *args, **kwargs)
def get_attachments_func(browse_object): """ Returns the attachments for one browse_object """ db, pool = pooler.get_db_and_pool(self.dbname) cr = db.cursor() att_facade = pool.get('ir.attachment') # pylint: disable-msg=W0212 attachment_ids = att_facade.search( cr, self.uid, [('res_model', '=', browse_object._name), ('res_id', '=', browse_object.id)]) return att_facade.browse(cr, self.uid, attachment_ids)
def execute(self, db, uid, obj, method, *args, **kw): db, pool = pooler.get_db_and_pool(db) cr = db.cursor() try: try: res = pool.execute_cr(cr, uid, obj, method, *args, **kw) cr.commit() except Exception: cr.rollback() raise finally: cr.close() return res
def get_attachments_func(browse_object): """ Returns the attachments for one browse_object """ db, pool = pooler.get_db_and_pool(self.dbname) cr = db.cursor() att_facade = pool.get('ir.attachment') # pylint: disable-msg=W0212 attachment_ids = att_facade.search(cr, self.uid, [ ('res_model', '=', browse_object._name), ('res_id', '=', browse_object.id) ]) return att_facade.browse(cr, self.uid, attachment_ids)
def _poolJobs(self, db_name, check=False): try: db, pool = pooler.get_db_and_pool(db_name) except: return False cr = db.cursor() try: if not pool._init: now = datetime.now() cr.execute('SELECT * FROM ir_cron ' 'WHERE numbercall<>0 AND active AND nextcall<=now() ' 'ORDER BY priority', debug=self._debug) for job in cr.dictfetchall(): nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(cr, job['user_id'], job['model'], job['function'], job['args']) if numbercall: nextcall += _intervalTypes[job['interval_type']](job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cr.execute("UPDATE ir_cron " "SET nextcall=%s, numbercall=%s"+addsql+ \ " WHERE id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']), debug=self._debug) cr.commit() cr.execute('SELECT min(nextcall) AS min_next_call FROM ir_cron ' 'WHERE numbercall<>0 AND active ', debug=self._debug) next_call = cr.dictfetchone()['min_next_call'] if next_call: next_call = time.mktime(time.strptime(next_call, '%Y-%m-%d %H:%M:%S')) else: next_call = int(time.time()) + 3600 # if do not find active cron job from database, it will run again after 1 day if not check: self.setAlarm(self._poolJobs, next_call, db_name, db_name) except Exception, ex: self._logger.warning('Exception in cron:', exc_info=True)
def _poolJobs(self, db_name, check=False): try: db, pool = pooler.get_db_and_pool(db_name) except: return False if pool._init: # retry in a few minutes next_call = 600 else: now = DateTime.now() try: cr = db.cursor() cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority') for job in cr.dictfetchall(): nextcall = DateTime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(cr, job['user_id'], job['model'], job['function'], job['args']) if numbercall: nextcall += _intervalTypes[job['interval_type']](job['interval_number']) ok = True addsql='' if not numbercall: addsql = ', active=False' cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) cr.commit() finally: cr.commit() cr.close() # # Can be improved to do at the min(min(nextcalls), time()+next_call) # But is this an improvement ? # cr = db.cursor() cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active and nextcall>=now()') next_call = cr.dictfetchone()['min_next_call'] cr.close() if next_call: next_call = time.mktime(time.strptime(next_call, '%Y-%m-%d %H:%M:%S')) else: next_call = int(time.time()) + 3600 # if do not find active cron job from database, it will run again after 1 day if not check: self.setAlarm(self._poolJobs, next_call, db_name, db_name)
def get_userinfo(self, user, pw): print "\tAuth", user, pw print "-" * 80 if not self.db_name or self.db_name == "": self.db_name = self.path.split("/")[1] user = "******" pw = "" db, pool = pooler.get_db_and_pool(self.db_name) res = security.login(self.db_name, user, pw) print "\tAuth", user, pw, res if res: auth["user"] = user auth["pwd"] = pw return bool(res)
def close(self, *args, **kwargs): db,pool = pooler.get_db_and_pool(self.dbname) cr = db.cursor() cr.commit() try: val = self.getvalue() val2 = { 'datas': base64.encodestring(val), 'file_size': len(val), } pool.get('ir.attachment').write(cr, self.uid, [self.ressource_id], val2) finally: cr.commit() cr.close() StringIO.StringIO.close(self, *args, **kwargs)
def get_userinfo(self, user, pw): print '\tAuth', user, pw print '-' * 80 if not self.db_name or self.db_name == '': self.db_name = self.path.split('/')[1] user = '******' pw = '' db, pool = pooler.get_db_and_pool(self.db_name) res = security.login(self.db_name, user, pw) print '\tAuth', user, pw, res if res: auth['user'] = user auth['pwd'] = pw return bool(res)
def update_printers_status(self, db_name, uid, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() try: connection = cups.Connection() printers = connection.getPrinters() server_error = False except: server_error = True mapping = { 3 : 'available', 4 : 'printing', 5 : 'error' } try: # Skip update to avoid the thread being created again ctx = context.copy() ctx['skip_update'] = True ids = self.pool.get('printing.printer').search(cr, uid, [], context=ctx) for printer in self.pool.get('printing.printer').browse(cr, uid, ids, context=ctx): vals = {} if server_error: status = 'server-error' elif printer.system_name in printers: info = printers[printer.system_name] status = mapping.get( info['printer-state'], 'unknown' ) vals = { 'model': info.get('printer-make-and-model', False), 'location': info.get('printer-location', False), 'uri': info.get('device-uri', False), } else: status = 'unavailable' vals['status'] = status self.pool.get('printing.printer').write(cr, uid, [printer.id], vals, context) cr.commit() except: cr.rollback() raise finally: cr.close() with self.lock: self.updating = False self.last_update = time.time()
def __init__(self, dbname, model_name, res_id, uid=0): assert isinstance(uid, (int, long)), 'uid should be an integer' self._logger = logging.getLogger('smile_log') db, pool = pooler.get_db_and_pool(dbname, pooljobs=False) pid = 0 try: cr = db.cursor() cr.execute("select nextval('smile_log_seq')") res = cr.fetchone() pid = res and res[0] or 0 finally: cr.close() self._logger_start = datetime.datetime.now() self._logger_args = {'dbname': dbname, 'model_name': model_name, 'res_id': res_id, 'uid': uid, 'pid': pid}
def get_cr(self, path): path = self.ftpnorm(path) if path=='/': return None dbname = path.split('/')[1] if dbname not in self.db_list(): return None try: db,pool = pooler.get_db_and_pool(dbname) except: raise OSError(1, 'Operation not permited.') cr = db.cursor() uid = security.login(dbname, self.username, self.password) if not uid: raise OSError(2, 'Authentification Required.') return cr, uid, pool
def execute(self, db, uid, obj, method, *args, **kw): db, pool = pooler.get_db_and_pool(db) cr = db.cursor() try: try: if method.startswith('_'): raise except_osv('Access Denied', 'Private methods (such as %s) cannot be called remotely.' % (method,)) res = pool.execute_cr(cr, uid, obj, method, *args, **kw) if res is None: self.logger.warning('The method %s of the object %s can not return `None` !', method, obj) cr.commit() except Exception: cr.rollback() raise finally: cr.close() return res
def begin(self, dbname, uid, passwd): """Starts a transaction for XML-RPC. """ security.check(dbname, uid, passwd) self.cursors.setdefault(uid, {}) database = pooler.get_db_and_pool(dbname)[0] cursor = database.cursor() sync_cursor = WSCursor(cursor) self.log( netsvc.LOG_INFO, 'Creating a new transaction ID: %s TID: %s PID: %s' % ( sync_cursor.psql_tid, sync_cursor.psql_tid, sync_cursor.psql_pid ) ) self.cursors[uid].update({sync_cursor.psql_tid: sync_cursor}) return sync_cursor.psql_tid
def execute(self, dbname, uid, passwd, transaction_id, obj, method, *args, **kw): """Executes code with transaction_id. """ security.check(dbname, uid, passwd) sync_cursor = self.get_cursor(uid, transaction_id) cursor = sync_cursor.cursor pool = pooler.get_db_and_pool(dbname)[1] try: self.log(netsvc.LOG_DEBUG, 'Executing from transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid) ) res = pool.execute_cr(cursor, uid, obj, method, *args, **kw) except Exception, exc: self.rollback(dbname, uid, passwd, transaction_id) raise exc
def execute(self, dbname, uid, passwd, transaction_id, obj, method, *args, **kw): """Executes code with transaction_id. """ security.check(dbname, uid, passwd) sync_cursor = self.get_cursor(uid, transaction_id) cursor = sync_cursor.cursor pool = pooler.get_db_and_pool(dbname)[1] try: self.log( netsvc.LOG_DEBUG, 'Executing from transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid)) res = pool.execute_cr(cursor, uid, obj, method, *args, **kw) except Exception, exc: self.rollback(dbname, uid, passwd, transaction_id) raise exc
def begin(self, dbname, uid, passwd): """Starts a transaction for XML-RPC. """ security.check(dbname, uid, passwd) self.cursors.setdefault(uid, {}) database = pooler.get_db_and_pool(dbname)[0] cursor = database.cursor() sync_cursor = WSCursor(cursor) transaction_id = str(sync_cursor.psql_tid) self.log( netsvc.LOG_INFO, 'Creating a new transaction ID: %s TID: %s PID: %s' % ( transaction_id, transaction_id, sync_cursor.psql_pid ) ) self.cursors[uid].update({transaction_id: sync_cursor}) return transaction_id
def get_cr(self, path): path = _to_unicode(path) if path and path in ('/','.'): return None dbname = path.split('/')[1] try: if not len(self.db_name_list): self.db_name_list = self.db_list() if dbname not in self.db_name_list: return None db,pool = pooler.get_db_and_pool(dbname) except: raise OSError(1, 'Operation not permited.') cr = db.cursor() uid = self.server.check_security(dbname, self.server.username, self.server.key) if not uid: raise OSError(2, 'Authentification Required.') return cr, uid, pool
def close(self, *args, **kwargs): db, pool = pooler.get_db_and_pool(self.dbname) self.buf = '' cr = db.cursor() cr.commit() try: val = self.getvalue() val2 = { 'datas': base64.encodestring(val), 'file_size': len(val), } pool.get('ir.attachment').write(cr, self.uid, [self.ressource_id], val2) finally: cr.commit() cr.close() return StringIO.StringIO.close(self, *args, **kwargs)
def init_postgis(cursor): ## Create language may fail and it can be normal cursor.execute("SELECT tablename from pg_tables where tablename='spatial_ref_sys';") check = cursor.fetchone() if check: return {} db, pool = pooler.get_db_and_pool(cursor.dbname) mycursor = db.cursor() p = addons.get_module_resource('base_geoengine', 'postgis_sql','postgis.sql') postgis_sql = open(p).read() p = addons.get_module_resource('base_geoengine', 'postgis_sql','spatial_ref_sys.sql') spatial_ref_sys_sql = open(p).read() try: mycursor.execute('CREATE LANGUAGE plpgsql'); mycursor.commit() except Exception, exc: mycursor.rollback() logger.warning('Can not create LANGUAGE plpgsql')
def _poolJobs(self, db_name, check=False): try: db, pool = pooler.get_db_and_pool(db_name) except: return False cr = db.cursor() try: if not pool._init: now = datetime.now() cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority') for job in cr.dictfetchall(): nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(cr, job['user_id'], job['model'], job['function'], job['args']) if numbercall: nextcall += _intervalTypes[job['interval_type']](job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) cr.commit() cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active and nextcall>=now()') next_call = cr.dictfetchone()['min_next_call'] if next_call: next_call = time.mktime(time.strptime(next_call, '%Y-%m-%d %H:%M:%S')) else: next_call = int(time.time()) + 3600 # if do not find active cron job from database, it will run again after 1 day if not check: self.setAlarm(self._poolJobs, next_call, db_name, db_name) except Exception, ex: logger = netsvc.Logger() logger.notifyChannel('cron', netsvc.LOG_WARNING, 'Exception in cron:'+str(ex))
def get_cr(self, uri, allow_last=False): """ Split the uri, grab a cursor for that db """ pdb = self.parent.auth_provider.last_auth dbname, uri2 = self.get_db(uri, rest_ret=True, allow_last=allow_last) uri2 = (uri2 and uri2.split('/')) or [] if not dbname: return None, None, None, False, uri2 # if dbname was in our uri, we should have authenticated # against that. assert pdb == dbname, " %s != %s" % (pdb, dbname) res = self.parent.auth_provider.auth_creds.get(dbname, False) if not res: self.parent.auth_provider.checkRequest(self.parent, uri, dbname) res = self.parent.auth_provider.auth_creds[dbname] user, passwd, dbn2, uid = res db, pool = pooler.get_db_and_pool(dbname) cr = db.cursor() return cr, uid, pool, dbname, uri2
def get_cr(self, uri, allow_last=False): """ Split the uri, grab a cursor for that db """ pdb = self.parent.auth_proxy.last_auth dbname, uri2 = self.get_db(uri, rest_ret=True, allow_last=allow_last) uri2 = (uri2 and uri2.split('/')) or [] if not dbname: return None, None, None, False, uri2 # if dbname was in our uri, we should have authenticated # against that. assert pdb == dbname, " %s != %s" %(pdb, dbname) res = self.parent.auth_proxy.auth_creds.get(dbname, False) if not res: self.parent.auth_proxy.checkRequest(self.parent, uri, dbname) res = self.parent.auth_proxy.auth_creds[dbname] user, passwd, dbn2, uid = res db,pool = pooler.get_db_and_pool(dbname) cr = db.cursor() return cr, uid, pool, dbname, uri2
def get_cr(self, path): path = _to_unicode(path) if path and path in ('/', '.'): return None dbname = path.split('/')[1] try: if not len(self.db_name_list): self.db_name_list = self.db_list() if dbname not in self.db_name_list: return None db, pool = pooler.get_db_and_pool(dbname) except: raise OSError(1, 'Operation not permited.') cr = db.cursor() uid = self.server.check_security(dbname, self.server.username, self.server.key) if not uid: raise OSError(2, 'Authentification Required.') return cr, uid, pool
def core_sync_products_thread(self, db_name, uid, magento_app, products, store_view, context=None): """Thread Sync Products :magento_app: Magento APP ID (int) :products: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() magento_app = self.pool.get('magento.app').browse(cr, uid, magento_app) # We have list first product simple and after product configurable # First, only create product configurable (and product simple related in this product configurable) # After, create product simple. If this simple was created, skip for product in products: if product['type'] == 'configurable': self.pool.get('product.product').magento_create_product_type( cr, uid, magento_app, product, store_view, context) #Uncomment second part import only configurable products (id from to) for product in products: if product['type'] != 'configurable': self.pool.get('product.product').magento_create_product_type( cr, uid, magento_app, product, store_view, context) LOGGER.notifyChannel( 'Magento App', netsvc.LOG_INFO, "End Sync Products magento app %s." % (magento_app.name)) self.pool.get('magento.log').create_log( cr, uid, magento_app, 'product.product', 0, '', 'done', _('Finish Import/Update products')) cr.close() return True
def zoook_export_manufacturers_thread(self, db_name, uid, sale, values, context=None): """Thread Export Images :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() manufacturer = self.pool.get('django.connect').ssh_command(cr, uid, sale, values, context) cr.commit() cr.close() if manufacturer: LOGGER.notifyChannel('ZoooK Connection', netsvc.LOG_INFO, "Manufacturers Export Running.") return True else: LOGGER.notifyChannel('ZoooK Connection', netsvc.LOG_ERROR, "Error connection to server.") return False
def updateMetaInfo(db_name, uid, ids): # As we're creating a new transaction, if update is executed in another thread and very fast, # it may not get latest changes. So we wait a couple of seconds before updating meta information. time.sleep( 2 ) db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() # Ensure all ids still exist when data is actually updated: # Given this function is called in another process there're chances # the record might have been removed which would cause an # exception when browsing. ids = pool.get('ir.attachment').search(cr, uid, [('id','in',ids)]) for attachment in pool.get('ir.attachment').browse(cr, uid, ids): metainfo = pool.get('ir.attachment').extractMetaInfo( attachment.datas ) or '' # We use SQL directly to update metainfo so last modification time doesn't change. # This avoids messages in the GUI telling that the object has been modified in the # meanwhile. After all, the field is readonly in the GUI so no conflicts can occur. cr.execute("UPDATE ir_attachment SET metainfo=%s WHERE id=%s", (metainfo, attachment.id) ) cr.commit() cr.close()
def init_postgis(cursor): ## Create language may fail and it can be normal cursor.execute( "SELECT tablename from pg_tables where tablename='spatial_ref_sys';") check = cursor.fetchone() if check: return {} db, pool = pooler.get_db_and_pool(cursor.dbname) mycursor = db.cursor() p = addons.get_module_resource('base_geoengine', 'postgis_sql', 'postgis.sql') postgis_sql = open(p).read() p = addons.get_module_resource('base_geoengine', 'postgis_sql', 'spatial_ref_sys.sql') spatial_ref_sys_sql = open(p).read() try: mycursor.execute('CREATE LANGUAGE plpgsql') mycursor.commit() except Exception, exc: mycursor.rollback() logger.warning('Can not create LANGUAGE plpgsql')
def zoook_export_images_thread(self, db_name, uid, sale, values, context=None): """Thread Export Images :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() image = self.pool.get('django.connect').ssh_command(cr, uid, sale, values, context) cr.commit() cr.close() if image: self._logger.info("Image Export Running.") return True else: self._logger.error("Error connection to server.") return False
def esale_export_attachment_thread(self, db_name, uid, sale, values, context=None): """Thread Export Products :sale: Sale Shop ID (int) :values: Dicc :context: Dicc return True/False """ db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() product = self.pool.get('django.connect').ssh_command(cr, uid, sale, values, context) cr.commit() cr.close() if product: self._logger.info("Attachment Export Running.") return True else: self._logger.error("Error connection to server.") return False
def execute(self, db, uid, obj, method, *args, **kw): db, pool = pooler.get_db_and_pool(db) cr = db.cursor() try: try: if method.startswith('_'): raise except_osv( 'Access Denied', 'Private methods (such as %s) cannot be called remotely.' % (method, )) res = pool.execute_cr(cr, uid, obj, method, *args, **kw) if res is None: self.logger.warning( 'The method %s of the object %s can not return `None` !', method, obj) cr.commit() except Exception: cr.rollback() raise finally: cr.close() return res
def execute(self, dbname, uid, passwd, transaction_id, obj, method, *args, **kw): """Executes code with transaction_id. """ security.check(dbname, uid, passwd) sync_cursor = self.get_cursor(uid, transaction_id) cursor = sync_cursor.cursor pool = pooler.get_db_and_pool(dbname)[1] try: self.log(netsvc.LOG_DEBUG, 'Executing from transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid) ) res = pool.execute_cr(cursor, uid, obj, method, *args, **kw) except Exception as exc: #self.rollback(dbname, uid, passwd, transaction_id) import traceback self.log(netsvc.LOG_ERROR, 'Error within a transaction:\n'+ traceback.format_exc()) raise return res
def execute(self, dbname, uid, passwd, transaction_id, obj, method, *args, **kw): """Executes code with transaction_id. """ transaction_id = str(transaction_id) security.check(dbname, uid, passwd) sync_cursor = self.get_cursor(uid, transaction_id) cursor = sync_cursor.cursor pool = pooler.get_db_and_pool(dbname)[1] try: self.log(netsvc.LOG_DEBUG, 'Executing from transaction ID: %s TID: %s PID: %s' % (transaction_id, sync_cursor.psql_tid, sync_cursor.psql_pid) ) res = pool.execute_cr(cursor, uid, obj, method, *args, **kw) except Exception as exc: #self.rollback(dbname, uid, passwd, transaction_id) import traceback self.log(netsvc.LOG_ERROR, 'Error within a transaction:\n'+ traceback.format_exc()) raise return res
def create_zipcodes(self, db_name, uid, ids, res, context): # Import Spanish cities and zip codes (15000 zip codes can take several minutes) db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() if res['city_module'] == 'uninstalled': # city module no installed self._create_defaults(cr, uid, context) else: # city module installed try: fp = tools.file_open( os.path.join('l10n_es_toponyms', 'l10n_es_toponyms_zipcodes.xml')) except IOError, e: fp = None if fp: idref = {} tools.convert_xml_import(cr, 'l10n_es_toponyms', fp, idref, 'init', noupdate=True) if res['city_info_recover'] == 'yes': res = self._recover_zipcodes(cr, uid, context)
def proxy_db_list(): """Lists all databases behind proxy. """ res = [] uid = 1 up_servers = [] for dbname in netsvc.SERVICES['db'].list(): res.append(dbname) # Uncomptible versions 4.2 databases, etc. try: dba, pool = pooler.get_db_and_pool(dbname) except: continue # Check proxies in this database cursor = dba.cursor() try: proxy_obj = pool.get('proxy.remotes') pr_ids = proxy_obj.search(cursor, uid, []) for proxy in proxy_obj.browse(cursor, uid, pr_ids): dbsock = xmlrpclib.ServerProxy('http://%s:%i/xmlrpc/db' % (proxy.host, proxy.port)) try: for remote_db in dbsock.list(): res.append(remote_db) up_servers.append(remote_db) _PROXY_SERVERS[remote_db] = (proxy.host, proxy.port) except: continue except Exception: pass finally: cursor.close() # Drop from the dict servers unreachables for proxy_db in _PROXY_SERVERS.keys(): if proxy_db not in up_servers: del _PROXY_SERVERS[proxy_db] return res
def magento_export_prices_templates_stepbystep(self, db_name, uid, magentoapp, saleshop, ids, context=None): """ Get all IDs products to update Prices in Magento :param dbname: str :magentoapp: int :saleshop: int :ids: list :return mgn_id """ if len(ids) == 0: LOGGER.notifyChannel('Magento Sale Shop', netsvc.LOG_INFO, "End Product Templates Prices Export") return True db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() decimal = self.pool.get('decimal.precision').precision_get( cr, uid, 'Sale Price') magento_external_referential_obj = self.pool.get( 'magento.external.referential') magento_app = self.pool.get('magento.app').browse(cr, uid, magentoapp) context['magento_app'] = magento_app shop = self.pool.get('sale.shop').browse(cr, uid, saleshop) context['shop'] = shop decimal = self.pool.get('decimal.precision').precision_get( cr, uid, 'Sale Price') with Product(magento_app.uri, magento_app.username, magento_app.password) as product_api: for product in self.pool.get('product.template').browse( cr, uid, ids, context): LOGGER.notifyChannel( 'Magento Sale Shop', netsvc.LOG_INFO, "Waiting OpenERP ID %s...." % (product.id)) mgn_id = self.pool.get( 'magento.external.referential').check_oerp2mgn( cr, uid, magento_app, 'product.template', product.id) if mgn_id: mgn_id = self.pool.get('magento.external.referential' ).get_external_referential( cr, uid, [mgn_id])[0]['mgn_id'] #~ store_view = self.pool.get('magento.external.referential').check_oerp2mgn(cr, uid, magento_app, 'magento.storeview', shop.id) #~ store_view = self.pool.get('magento.external.referential').get_external_referential(cr, uid, [store_view])[0]['mgn_id'] price = '' if not mgn_id: #not product created/exist in Magento. Create LOGGER.notifyChannel( 'Magento Sale Shop', netsvc.LOG_INFO, "Force create product ID %s" % (product.id)) mgn_id = self.magento_export_product_templates_stepbystep( cr.dbname, uid, magento_app.id, [product.id], context) if shop.magento_sale_price == 'pricelist' and shop.pricelist_id: price = self.pool.get('product.pricelist').price_get( cr, uid, [shop.pricelist_id.id], product.id, 1.0)[shop.pricelist_id.id] else: price = product.list_price if shop.magento_tax_include: price_compute_all = self.pool.get( 'account.tax').compute_all(cr, uid, product.taxes_id, price, 1, address_id=None, product=product, partner=None) price = price_compute_all['total_included'] if price: price = '%.*f' % (decimal, price) #decimal precision data = {'price': price} #~ product_mgn_id = product_api.update(mgn_id, data, store_view) product_mgn_id = product_api.update(mgn_id, data) LOGGER.notifyChannel( 'Magento Sale Shop', netsvc.LOG_INFO, "Update Product Template Prices: %s. OpenERP ID %s, Magento ID %s" % (price, product.id, mgn_id)) LOGGER.notifyChannel('Magento Sale Shop', netsvc.LOG_INFO, "End Product Template Prices Export") return True
def magento_export_trademarks_stepbystep(self, db_name, uid, magentoapp, ids, context=None): """ Get all IDs trademarks to create in Magento :param dbname: str :magentoapp: int :saleshop: int :ids: list :return mgn_id """ if len(ids) == 0: LOGGER.notifyChannel('Magento Sale Shop', netsvc.LOG_INFO, "End Trademarks Export") return True db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() magento_manufacturer_obj = self.pool.get('magento.manufacturer') magento_app = self.pool.get('magento.app').browse(cr, uid, magentoapp) context['magento_app'] = magento_app manufacturer_name = magento_app.manufacturer_name magento_log_obj = self.pool.get('magento.log') request = [] with ProductAttribute(magento_app.uri, magento_app.username, magento_app.password) as product_attribute_api: for trademark in self.pool.get('res.partner').browse( cr, uid, ids, context): LOGGER.notifyChannel( 'Magento Sale Shop', netsvc.LOG_INFO, "Waiting OpenERP ID %s...." % (trademark.id)) mgn_id = magento_manufacturer_obj.search( cr, uid, [('manufacturer_id', '=', trademark.id), ('magento_app_id', '=', magento_app.id)]) if mgn_id: continue data = { 'label': [ { 'store_id': [ 0, ], 'value': trademark.name }, ] } try: result = product_attribute_api.createOption( manufacturer_name, data) if result: options = product_attribute_api.options( manufacturer_name) mgn_id = None for option in options: if option['label'] == trademark.name: mgn_id = option['value'] break if mgn_id: vals = { 'magento_app_id': magento_app.id, 'manufacturer_id': trademark.id, 'value': mgn_id, 'label': trademark.name, } magento_manufacturer_obj.create(cr, uid, vals) LOGGER.notifyChannel( 'Magento Attribute Manufacturer', netsvc.LOG_INFO, "Manufacturer %s create" % (trademark.id)) magento_log_obj.create_log( cr, uid, magento_app, 'magento.manufacturer', trademark.id, mgn_id, 'done', _('Successfully export trademark: %s') % (trademark.name)) else: raise Exception() else: raise Exception() except: message = _( 'Error: Magento Tradename: %s. OpenERP ID: %s, Magento ID %s' ) % (trademark.name, trademark.id, mgn_id) LOGGER.notifyChannel('Magento Sale Shop', netsvc.LOG_ERROR, message) magento_log_obj.create_log(cr, uid, magento_app, 'magento.manufacturer', trademark.id, mgn_id, 'error', message) request.append(message) cr.commit() LOGGER.notifyChannel('Magento Sale Shop', netsvc.LOG_INFO, "End Trademarks Export") self.pool.get('magento.app').set_request(cr, uid, magento_app, request) cr.close() return True
def _asiento_cierre(self, db_name, uid, data, context): db, pool = pooler.get_db_and_pool(db_name) cr = db.cursor() ejercicio_cierre_id = data['form']['ejercicio_cierre_id'] apuntes = [] periodo = pool.get('account.period').browse(cr, uid, data['form']['periodo_cierre_id']) cr.execute("select id from account_account WHERE type not in ('expense','income','view') ORDER BY code") ids = map(lambda x: x[0], cr.fetchall()) context['fiscalyear'] = ejercicio_cierre_id saldo_asiento = 0.0 for account in pool.get('account.account').browse(cr, uid, ids, context): if account.close_method=='none' or account.type == 'view': continue if account.close_method=='balance' or account.close_method == 'unreconciled': if abs(account.balance)>0.0001: linea = { 'credit': account.balance>0 and account.balance, 'debit': account.balance<0 and -account.balance, 'name': account.name, 'date': periodo.date_stop, 'journal_id': data['form']['diario_cierre'], 'period_id': periodo.id, 'account_id': account.id } saldo_asiento += account.balance apuntes.append((0,0,linea)) if account.close_method=='detail': offset = 0 limit = 100 while True: cr.execute('select name,quantity,debit,credit,account_id,ref,amount_currency,currency_id,blocked,partner_id,date_maturity,date_created from account_move_line where account_id=%d and period_id in (select id from account_period where fiscalyear_id=%d) order by id limit %d offset %d', (account.id,ejercicio_cierre_id, limit, offset)) result = cr.dictfetchall() if not result: break for linea in result: linea.update({ 'date': periodo.date_stop, 'journal_id': data['form']['diario_cierre'], 'period_id': periodo.id, }) saldo_asiento += linea['debit'] - linea['credit'] apuntes.append((0,0,linea)) offset += limit print saldo_asiento movimiento = { 'debit': (saldo_asiento >=0 ) and saldo_asiento or 0.0, 'credit': (saldo_asiento <0 ) and -saldo_asiento or 0.0, 'name': 'Resultado del ejercicio', 'date': periodo.date_stop, 'journal_id': data['form']['diario_cierre'], 'period_id': periodo.id, 'account_id': data['form']['cuenta_pyg'], } apuntes.append((0,0,movimiento)) asiento = {'name': data['form']['nombre_asiento'], 'line_id': apuntes, 'journal_id': data['form']['diario_cierre'], 'period_id': periodo.id,} #'date':periodo.date_stop asiento_id = pool.get('account.move').create(cr,uid,asiento) print "Asiento cierre id:" + str(asiento_id) apertura_id = revert_move(cr, uid, data, asiento_id, data['form']['diario_cierre'], data['form']['periodo_apertura_id'], pool.get('account.period').browse(cr,uid,data['form']['periodo_apertura_id']).date_start, True, context) print "Asiento apertura id:" + str(apertura_id) #pool.get('account.move').button_validate(cr,uid,[asiento_id, apertura_id],context) result = cr.commit() return {}
import service.http_server if not ( tools.config["stop_after_init"] or \ tools.config["translate_in"] or \ tools.config["translate_out"] ): service.http_server.init_servers() service.http_server.init_xmlrpc() service.http_server.init_static_http() import service.netrpc_server service.netrpc_server.init_servers() if tools.config['db_name']: for dbname in tools.config['db_name'].split(','): db, pool = pooler.get_db_and_pool(dbname, update_module=tools.config['init'] or tools.config['update'], pooljobs=False) cr = db.cursor() if tools.config["test_file"]: logger.info('loading test file %s', tools.config["test_file"]) tools.convert_yaml_import(cr, 'base', file(tools.config["test_file"]), {}, 'test', True) cr.rollback() pool.get('ir.cron')._poolJobs(db.dbname) cr.close() #----------------------------------------------------------