def get(self, ids, model, name, values=None): """ Replace removed reference id by False. :param ids: a list of ids :param model: a string with the name of the model :param name: a string with the name of the field :param values: a dictionary with the read values :return: a dictionary with ids as key and values as value """ pool = Pool() if values is None: values = {} res = {} for i in values: res[i["id"]] = i[name] ref_to_check = {} for i in ids: if not (i in res): res[i] = None continue if not res[i]: continue ref_model, ref_id = res[i].split(",", 1) if not ref_model: continue try: ref_id = int(ref_id) except Exception: continue if ref_id < 0: continue res[i] = ref_model + "," + str(ref_id) ref_to_check.setdefault(ref_model, (set(), [])) ref_to_check[ref_model][0].add(ref_id) ref_to_check[ref_model][1].append(i) # Check if reference ids still exist with contextlib.nested(Transaction().set_context(active_test=False), Transaction().set_user(0)): for ref_model, (ref_ids, ids) in ref_to_check.iteritems(): if ref_model not in pool.object_name_list(): res.update(dict((i, False) for i in ids)) continue ref_obj = pool.get(ref_model) ref_ids = ref_obj.search([("id", "in", list(ref_ids))], order=[]) refs = [ref_model + "," + str(ref_id) for ref_id in ref_ids] for i in ids: if res[i] not in refs: res[i] = False return res
def _inherits_reload(self): """ Reconstruct _inherit_fields """ res = {} pool = Pool() for model in self._inherits: res.update(pool.get(model)._inherit_fields) for field_name in pool.get(model)._columns.keys(): res[field_name] = (model, self._inherits[model], pool.get(model)._columns[field_name]) for field_name in pool.get(model)._inherit_fields.keys(): res[field_name] = (model, self._inherits[model], pool.get(model)._inherit_fields[field_name][2]) self._inherit_fields = res self._reset_columns() self._update_rpc() # Update objects that uses this one to update their _inherits fields for obj_name in pool.object_name_list(): obj = pool.get(obj_name) if self._name in obj._inherits: obj._inherits_reload()
def run(self): "Run the server and never return" from trytond.backend import Database from trytond.pool import Pool from trytond.monitor import monitor update = bool(CONFIG['init'] or CONFIG['update']) init = {} signal.signal(signal.SIGINT, lambda *a: self.stop()) signal.signal(signal.SIGTERM, lambda *a: self.stop()) if hasattr(signal, 'SIGQUIT'): signal.signal(signal.SIGQUIT, lambda *a: self.stop()) if hasattr(signal, 'SIGUSR1'): signal.signal(signal.SIGUSR1, lambda *a: self.restart()) if CONFIG['pidfile']: with open(CONFIG['pidfile'], 'w') as fd_pid: fd_pid.write("%d" % (os.getpid())) if CONFIG['psyco']: import psyco psyco.full() if not CONFIG["db_name"] \ and bool(CONFIG['init'] or CONFIG['update']): raise Exception('Missing database option!') if not update: self.start_servers() for db_name in CONFIG["db_name"]: init[db_name] = False database = Database(db_name).connect() cursor = database.cursor() try: if CONFIG['init']: if not cursor.test(): self.logger.info("init db") Database.init(cursor) init[db_name] = True cursor.commit() elif not cursor.test(): raise Exception("'%s' is not a Tryton database!" % db_name) finally: cursor.close() for db_name in CONFIG["db_name"]: if update: cursor = Database(db_name).connect().cursor() try: if not cursor.test(): raise Exception("'%s' is not a Tryton database!" % db_name) cursor.execute('SELECT code FROM ir_lang ' \ 'WHERE translatable') lang = [x[0] for x in cursor.fetchall()] finally: cursor.close() else: lang = None Pool(db_name).init(update=update, lang=lang) for kind in ('init', 'update'): CONFIG[kind] = {} for db_name in CONFIG['db_name']: if init[db_name]: while True: password = getpass('Admin Password for %s: ' % db_name) password2 = getpass('Admin Password Confirmation: ') if password != password2: sys.stderr.write('Admin Password Confirmation ' \ 'doesn\'t match Admin Password!\n') continue if not password: sys.stderr.write('Admin Password is required!\n') continue break database = Database(db_name).connect() cursor = database.cursor() try: salt = ''.join(random.sample( string.letters + string.digits, 8)) password += salt if hashlib: password = hashlib.sha1(password).hexdigest() else: password = sha.new(password).hexdigest() cursor.execute('UPDATE res_user ' \ 'SET password = %s, salt = %s ' \ 'WHERE login = \'admin\'', (password, salt)) cursor.commit() finally: cursor.close() if update: self.logger.info('Update/Init succeed!') logging.shutdown() sys.exit(0) threads = {} while True: if CONFIG['cron']: for dbname in Pool.database_list(): thread = threads.get(dbname) if thread and thread.is_alive(): continue pool = Pool(dbname) if not pool.lock.acquire(0): continue try: if 'ir.cron' not in pool.object_name_list(): continue cron_obj = pool.get('ir.cron') finally: pool.lock.release() thread = threading.Thread( target=cron_obj.run, args=(dbname,), kwargs={}) thread.start() threads[dbname] = thread if CONFIG['auto_reload']: for _ in range(60): if monitor(): self.restart() time.sleep(1) else: time.sleep(60)
def _validate(self, ids): pool = Pool() if (Transaction().user == 0 and Transaction().context.get('user')): with Transaction().set_user(Transaction().context.get('user')): return self._validate(ids) for field in self._constraints: if not getattr(self, field[0])(ids): self.raise_user_error(field[1]) if not 'res.user' in pool.object_name_list() \ or Transaction().user == 0: ctx_pref = { } else: user_obj = pool.get('res.user') ctx_pref = user_obj.get_preferences(context_only=True) def is_pyson(test): if isinstance(test, PYSON): return True if isinstance(test, (list, tuple)): for i in test: if isinstance(i, PYSON): return True if isinstance(i, (list, tuple)): if is_pyson(i): return True return False with Transaction().set_context(ctx_pref): records = self.browse(ids) for field_name, field in self._columns.iteritems(): if isinstance(field, fields.Function) and \ not field.setter: continue # validate domain if (field._type in ('many2one', 'many2many', 'one2many', 'one2one') and field.domain): if field._type in ('many2one', 'one2many'): relation_obj = pool.get(field.model_name) else: relation_obj = field.get_target() if is_pyson(field.domain): pyson_domain = PYSONEncoder().encode(field.domain) for record in records: env = EvalEnvironment(record, self) env.update(Transaction().context) env['current_date'] = datetime.datetime.today() env['time'] = time env['context'] = Transaction().context env['active_id'] = record.id domain = PYSONDecoder(env).decode(pyson_domain) relation_ids = [] if record[field_name]: if field._type in ('many2one',): relation_ids.append(record[field_name].id) else: relation_ids.extend( [x.id for x in record[field_name]]) if relation_ids and not relation_obj.search([ 'AND', [('id', 'in', relation_ids)], domain, ]): self.raise_user_error( 'domain_validation_record', error_args=self._get_error_args( field_name)) else: relation_ids = [] for record in records: if record[field_name]: if field._type in ('many2one',): relation_ids.append(record[field_name].id) else: relation_ids.extend( [x.id for x in record[field_name]]) if relation_ids: find_ids = relation_obj.search([ 'AND', [('id', 'in', relation_ids)], field.domain, ]) if not set(relation_ids) == set(find_ids): self.raise_user_error( 'domain_validation_record', error_args=self._get_error_args( field_name)) # validate states required if field.states and 'required' in field.states: if is_pyson(field.states['required']): pyson_required = PYSONEncoder().encode( field.states['required']) for record in records: env = EvalEnvironment(record, self) env.update(Transaction().context) env['current_date'] = datetime.datetime.today() env['time'] = time env['context'] = Transaction().context env['active_id'] = record.id required = PYSONDecoder(env).decode(pyson_required) if required and record[field_name] is None: self.raise_user_error( 'required_validation_record', error_args=self._get_error_args( field_name)) else: if field.states['required']: for record in records: if not record[field_name]: self.raise_user_error( 'required_validation_record', error_args=self._get_error_args( field_name)) # validate required if field.required: for record in records: if (isinstance(record[field_name], (BrowseRecordNull, type(None), type(False))) and not record[field_name]): self.raise_user_error( 'required_validation_record', error_args=self._get_error_args(field_name)) # validate size if hasattr(field, 'size') and field.size: for record in records: if len(record[field_name] or '') > field.size: self.raise_user_error( 'size_validation_record', error_args=self._get_error_args(field_name)) def digits_test(value, digits, field_name): def raise_user_error(): self.raise_user_error('digits_validation_record', error_args=self._get_error_args(field_name)) if value is None: return if isinstance(value, Decimal): if (value.quantize(Decimal(str(10.0 ** -digits[1]))) != value): raise_user_error() elif CONFIG.options['db_type'] != 'mysql': if not (round(value, digits[1]) == float(value)): raise_user_error() # validate digits if hasattr(field, 'digits') and field.digits: if is_pyson(field.digits): pyson_digits = PYSONEncoder().encode(field.digits) for record in records: env = EvalEnvironment(record, self) env.update(Transaction().context) env['current_date'] = datetime.datetime.today() env['time'] = time env['context'] = Transaction().context env['active_id'] = record.id digits = PYSONDecoder(env).decode(pyson_digits) digits_test(record[field_name], digits, field_name) else: for record in records: digits_test(record[field_name], field.digits, field_name)
class BrowseRecord(object): ''' An object that represents record defined by a ORM object. ''' def __init__(self, record_id, model, ids, local_cache): assert isinstance(ids, list) if ids: assert record_id in ids self._cursor = Transaction().cursor self._user = Transaction().user self.id = record_id self._model = model self._model_name = self._model._name self._context = Transaction().context self._pool = Pool() self._ids = ids cache = self._cursor.get_cache(self._context) if model._name not in cache: cache[model._name] = LRUDict(RECORD_CACHE_SIZE) self._data = cache[model._name] self._cache = cache assert isinstance(local_cache, LRUDict) self._local_data = local_cache def __getitem__(self, name): # Access to LRUDict must be atomic result = self._local_data.get(self.id, {}).get(name) if (self.id in self._local_data and name in self._local_data[self.id]): return result result = self._data.get(self.id, {}).get(name) if self.id in self._data and name in self._data[self.id]: return result # build the list of fields we will fetch # fetch the definition of the field which was asked for if name in self._model._columns: col = self._model._columns[name] elif name in self._model._inherit_fields: col = self._model._inherit_fields[name][2] elif hasattr(self._model, name): return getattr(self._model, name) else: raise Exception('Error', 'Programming error: field "%s" ' \ 'does not exist in model "%s"!' \ % (name, self._model._name)) ffields = {} if col.loading == 'eager': field_access_obj = self._pool.get('ir.model.field.access') fread_accesses = {} for inherit_name in self._model._inherits: inherit_obj = self._pool.get(inherit_name) fread_accesses.update(field_access_obj.check(inherit_name, inherit_obj._columns.keys(), 'read', access=True)) fread_accesses.update(field_access_obj.check(self._model._name, self._model._columns.keys(), 'read', access=True)) to_remove = set(x for x, y in fread_accesses.iteritems() if not y and x != name) threshold = BROWSE_FIELD_TRESHOLD inherit_threshold = threshold - len(self._model._columns) def not_cached(item): fname, field = item return (fname not in self._data.get(self.id, {}) and fname not in self._local_data.get(self.id, {})) def to_load(item): fname, field = item return (field.loading == 'eager' and fname not in to_remove) def overrided(item): fname, field = item return fname in self._model._columns if inherit_threshold > 0: ifields = ((fname, field) for fname, (_, _, field) in self._model._inherit_fields.iteritems()) ifields = ifilterfalse(overrided, ifilter(to_load, ifilter(not_cached, ifields))) ifields = islice(ifields, 0, inherit_threshold) ffields.update(ifields) threshold -= inherit_threshold ifields = ifilter(to_load, ifilter(not_cached, self._model._columns.iteritems())) ifields = islice(ifields, 0, threshold) ffields.update(ifields) ffields[name] = col # add datetime_field for field in ffields.values(): if hasattr(field, 'datetime_field') and field.datetime_field: if field.datetime_field in self._model._columns: datetime_field = self._model._columns[field.datetime_field] else: datetime_field = self._model._inherit_fields[ field.datetime_field][2] ffields[field.datetime_field] = datetime_field def filter_(id_): if (id_ in self._local_data and name in self._local_data[id_]): return False if id_ in self._data and name in self._data[id_]: return False return True index = self._ids.index(self.id) ids = chain(islice(self._ids, index, None), islice(self._ids, 0, max(index - 1, 0))) ids = list(islice(ifilter(filter_, ids), self._cursor.IN_MAX)) model2ids = {} model2cache = {} # read the data with contextlib.nested(Transaction().set_cursor(self._cursor), Transaction().set_user(self._user), Transaction().set_context(self._context)): # create browse records for 'remote' models for data in self._model.read(ids, ffields.keys()): for i, j in ffields.iteritems(): model = None if (hasattr(j, 'model_name') and j.model_name in self._pool.object_name_list()): model = self._pool.get(j.model_name) elif hasattr(j, 'get_target'): model = j.get_target() if model and j._type in ('many2one', 'one2one'): if (not data[i] and not (isinstance(data[i], (int, long)) and not isinstance(data[i], type(False)))): data[i] = BrowseRecordNull() else: _datetime = None if (hasattr(j, 'datetime_field') and j.datetime_field): _datetime = data[j.datetime_field] with Transaction().set_context( _datetime=_datetime): ids = model2ids.setdefault(model, []) ids.append(data[i]) local_cache = model2cache.setdefault(model, LRUDict(RECORD_CACHE_SIZE)) data[i] = BrowseRecord(data[i], model, ids, local_cache) elif (model and j._type in ('one2many', 'many2many')): _datetime = None if hasattr(j, 'datetime_field') and j.datetime_field: _datetime = data[j.datetime_field] with Transaction().set_context( _datetime=_datetime): ids = model2ids.setdefault(model, []) ids.extend(data[i]) local_cache = model2cache.setdefault(model, LRUDict(RECORD_CACHE_SIZE)) data[i] = BrowseRecordList( BrowseRecord(x, model, ids, local_cache) for x in data[i]) if (isinstance(j, fields.Function) or isinstance(data[i], (BrowseRecord, BrowseRecordList))): if data['id'] == self.id and i == name: result = data[i] self._local_data.setdefault(data['id'], {})[i] = \ data[i] del data[i] self._data.setdefault(data['id'], {}).update(data) if data['id'] == self.id and name in data: result = data[name] return result def __getattr__(self, name): # TODO raise an AttributeError exception return self[name] def __contains__(self, name): return (name in self._model._columns) \ or (name in self._model._inherit_fields) \ or hasattr(self._model, name) def __hasattr__(self, name): return name in self def __int__(self): return self.id def __str__(self): return "BrowseRecord(%s, %d)" % (self._model_name, self.id) def __eq__(self, other): return (self._model_name, self.id) == (other._model_name, other.id) def __ne__(self, other): return (self._model_name, self.id) != (other._model_name, other.id) # we need to define __unicode__ even though we've already defined __str__ # because we have overridden __getattr__ def __unicode__(self): return unicode(str(self)) def __hash__(self): return hash((self._model_name, self.id)) def __nonzero__(self): return True __repr__ = __str__ def setLang(self, lang): self._context = self._context.copy() prev_lang = self._context.get('language') or CONFIG['language'] self._context['language'] = lang for cache in (self._cache, {self._model_name: self._local_data}): language_cache = cache.setdefault('_language_cache', {}) for model in cache: if model == '_language_cache': continue for record_id in cache[model]: language_cache.setdefault(prev_lang, LRUDict(MODEL_CACHE_SIZE)).setdefault(model, LRUDict(RECORD_CACHE_SIZE))[record_id] = \ cache[model][record_id] if lang in language_cache \ and model in language_cache[lang] \ and record_id in language_cache[lang][model]: cache[model][record_id] = \ language_cache[lang][model][record_id] else: cache[model][record_id] = {} def get_eval(self, name): res = self[name] if isinstance(res, BrowseRecord): res = res.id if isinstance(res, BrowseRecordList): res = res.get_eval() if isinstance(res, BrowseRecordNull): res = False return res
self.logger.info('Update/Init succeed!') logging.shutdown() sys.exit(0) threads = {} while True: if CONFIG['cron']: for dbname in Pool.database_list(): thread = threads.get(dbname) if thread and thread.is_alive(): continue pool = Pool(dbname) if not pool.lock.acquire(0): continue try: if 'ir.cron' not in pool.object_name_list(): continue Cron = pool.get('ir.cron') finally: pool.lock.release() thread = threading.Thread( target=Cron.run, args=(dbname,), kwargs={}) thread.start() threads[dbname] = thread if CONFIG['auto_reload']: for _ in range(60): if monitor(): self.restart() time.sleep(1) else: