def _assert(self, record): pool = Pool() TestAccess = pool.get(self.model_name) TestAccess.delete([record])
def _assert_raises(self, record): pool = Pool() TestAccess = pool.get(self.model_name) with self.assertRaises(AccessError): TestAccess.delete([record])
def default_currency(): Company = Pool().get('company.company') company_id = Transaction().context.get('company') if company_id: return Company(company_id).currency.id
def _assert(self, record): pool = Pool() TestAccess = pool.get(self.model_name) TestAccess.create([{}])
def transition_start(self): pool = Pool() Request = pool.get('purchase.request') Purchase = pool.get('purchase.purchase') Line = pool.get('purchase.line') Date = pool.get('ir.date') requests = Request.browse(Transaction().context['active_ids']) if (getattr(self.ask_party, 'party', None) and getattr(self.ask_party, 'company', None)): def compare_string(first, second): return (first or '') == (second or '') def to_write(request): return (not request.purchase_line and not request.party and request.product == self.ask_party.product and compare_string( request.description, self.ask_party.description)) reqs = list(filter(to_write, requests)) if reqs: Request.write(reqs, { 'party': self.ask_party.party.id, }) self.ask_party.product = None self.ask_party.description = None self.ask_party.party = None self.ask_party.company = None def to_ask_party(request): return not request.purchase_line and not request.party reqs = filter(to_ask_party, requests) if any(reqs): return 'ask_party' today = Date.today() requests = [r for r in requests if not r.purchase_line] keyfunc = partial(self._group_purchase_key, requests) requests = sorted(requests, key=keyfunc) purchases = [] lines = [] for key, grouped_requests in groupby(requests, key=keyfunc): grouped_requests = list(grouped_requests) try: purchase_date = min(r.purchase_date for r in grouped_requests if r.purchase_date) except ValueError: purchase_date = today if purchase_date < today: purchase_date = today purchase = Purchase(purchase_date=purchase_date) for f, v in key: setattr(purchase, f, v) purchases.append(purchase) for line_key, line_requests in groupby( grouped_requests, key=self._group_purchase_line_key): line_requests = list(line_requests) line = self.compute_purchase_line( line_key, line_requests, purchase) line.purchase = purchase line.requests = line_requests lines.append(line) Purchase.save(purchases) Line.save(lines) Request.update_state(requests) return 'end'
def create_stock_moves(cls, roundings, lines): pool = Pool() Move = pool.get('stock.move') Date = pool.get('ir.date') moves = [] for rounding in roundings: for medicament in lines['medicaments']: move_info = {} move_info['origin'] = str(rounding) move_info['product'] = medicament.medicament.name.id move_info['uom'] = medicament.medicament.name.default_uom.id move_info['quantity'] = medicament.quantity move_info['from_location'] = \ rounding.hospitalization_location.id move_info['to_location'] = \ rounding.name.patient.name.customer_location.id move_info['unit_price'] = medicament.medicament.name.list_price if medicament.lot: if medicament.lot.expiration_date \ and medicament.lot.expiration_date < Date.today(): raise UserError('Expired medicaments') move_info['lot'] = medicament.lot.id moves.append(move_info) for medical_supply in lines['supplies']: move_info = {} move_info['origin'] = str(rounding) move_info['product'] = medical_supply.product.id move_info['uom'] = medical_supply.product.default_uom.id move_info['quantity'] = medical_supply.quantity move_info['from_location'] = \ rounding.hospitalization_location.id move_info['to_location'] = \ rounding.name.patient.name.customer_location.id move_info['unit_price'] = medical_supply.product.list_price if medical_supply.lot: if medical_supply.lot.expiration_date \ and medical_supply.lot.expiration_date < Date.today(): raise UserError('Expired supplies') move_info['lot'] = medical_supply.lot.id moves.append(move_info) for vaccine in lines['vaccines']: move_info = {} move_info['origin'] = str(rounding) move_info['product'] = vaccine.vaccine.id move_info['uom'] = vaccine.vaccine.default_uom.id move_info['quantity'] = vaccine.quantity move_info['from_location'] = \ rounding.hospitalization_location.id move_info['to_location'] = \ rounding.name.patient.name.customer_location.id move_info['unit_price'] = vaccine.vaccine.list_price if vaccine.lot: if vaccine.lot.expiration_date \ and vaccine.lot.expiration_date < Date.today(): raise UserError('Expired vaccines') move_info['lot'] = vaccine.lot.id moves.append(move_info) new_moves = Move.create(moves) Move.write(new_moves, { 'state': 'done', 'effective_date': Date.today(), }) return True
def ldap_sync(self, ldap_server, username, password): log = logging.getLogger('logfile') try: log.warn('LDAP Syncronization') l = ldap.initialize(ldap_server) l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) l.protocol_version = ldap.VERSION3 l.start_tls_s() l.simple_bind_s(username, password) baseDN = "ou=people,dc=asyncto,dc=auroville,dc=org,dc=in" searchScope = ldap.SCOPE_SUBTREE retrieveAttributes = None searchFilter = "objectClass=inetOrgPerson" ldap_result_id = l.search(baseDN, searchScope, searchFilter, retrieveAttributes) result_set = [] while 1: result_type, result_data = l.result(ldap_result_id, 0) if (result_data == []): break else: if result_type == ldap.RES_SEARCH_ENTRY: result_set.append(result_data) result_parsed = [] for i in range(len(result_set)): for entry in result_set[i]: entry = entry[1] entry = {k.lower(): v[0] for k, v in entry.items()} item = { 'asynctoid': self.__get_ldap_attr(entry, 'uid'), 'aurovillename': self.__get_ldap_attr(entry, 'cn'), 'name': self.__get_ldap_attr(entry, 'givenname'), 'surname': self.__get_ldap_attr(entry, 'sn'), 'address': self.__get_ldap_attr(entry, 'street'), 'telephone': self.__get_ldap_attr(entry, 'telephonenumber'), 'email': self.__get_ldap_attr(entry, 'mail'), 'presence': self.__get_ldap_attr(entry, 'departmentnumber'), 'status': self.__get_ldap_attr(entry, 'employeetype'), 'contactperson': self.__get_ldap_attr(entry, 'manager'), 'deleteflag': self.__get_ldap_attr(entry, 'title'), 'ldapid': self.__get_ldap_attr(entry, 'dn'), 'masterlistid': self.__get_ldap_attr(entry, 'postalcode'), 'fsid': self.__get_ldap_attr(entry, 'facsimiletelephonenumber'), 'tsid': self.__get_ldap_attr(entry, 'pager') } result_parsed.append(item) asyncto_obj = Pool().get('auroville.asyncto') for item in result_parsed: asyncto_id = asyncto_obj.search([ ('asynctoid', '=', item['asynctoid']), ]) if asyncto_id: asyncto_obj.write(asyncto_id, item) else: asyncto_obj.create(item) log.warn('LDAP %i records' % len(result_parsed)) except Exception as e: log.error(e)
def _assert2(self, record): pool = Pool() TestAccess = pool.get('test.access') TestAccess.read([record.id], ['field2']) TestAccess.search([('field2', '=', 'test')])
def default_forecast_date(): Date = Pool().get('ir.date') return Date.today()
def write(cls, *args): pool = Pool() Rule = pool.get('ir.rule') super(User, cls).write(*args) # Restart the cache on the domain_get method Rule._domain_get_cache.clear()
def migrate_property(model_name, field_names, ValueModel, value_names, parent=None, fields=None): "Migrate property from model_name.field_name to ValueModel.value_name" pool = Pool() Field = pool.get('ir.model.field') Model = pool.get('ir.model') TableHandler = backend.get('TableHandler') if not TableHandler.table_exist('ir_property'): return cursor = Transaction().connection.cursor() field = Field.__table__() model = Model.__table__() table = ValueModel.__table__() if fields is None: fields = [] if isinstance(field_names, str): field_names = [field_names] if isinstance(value_names, str): value_names = [value_names] def split_value(value): return value.split(',')[1] cast_funcs = { 'numeric': lambda v: Decimal(split_value(v)) if v else None, 'integer': lambda v: int(split_value(v)) if v else None, 'float': lambda v: float(split_value(v)) if v else None, 'char': lambda v: split_value(v) if v else None, 'selection': lambda v: split_value(v) if v else None, 'many2one': lambda v: int(split_value(v)) if v else None, 'reference': lambda v: v, } casts = [] queries = [] for field_name, value_name in zip(field_names, value_names): value_field = getattr(ValueModel, value_name) casts.append(cast_funcs[value_field._type]) property_ = Table('ir_property') columns = [ Literal(None).as_(f) if f != value_name else property_.value.as_(value_name) for f in value_names ] if parent: columns.append(property_.res.as_(parent)) where = property_.res.like(model_name + ',%') else: where = property_.res == Null columns.extend([Column(property_, f).as_(f) for f in fields]) query = property_.join(field, condition=property_.field == field.id).join( model, condition=field.model == model.id).select( *columns, where=where & (field.name == field_name) & (model.model == model_name)) queries.append(query) union = Union(*queries) columns = [Max(Column(union, f)).as_(f) for f in value_names] if parent: columns.append(Column(union, parent).as_(parent)) pcolumns = [Column(union, parent)] else: pcolumns = [] vcolumns = [Column(union, f).as_(f) for f in fields] cursor.execute( *union.select(*(columns + vcolumns), group_by=pcolumns + vcolumns)) columns = [Column(table, f) for f in value_names] if parent: pcolumns = [Column(table, parent)] else: pcolumns = [] vcolumns = [Column(table, f) for f in fields] values = [] l = len(value_names) for row in cursor.fetchall(): value = [c(v) for v, c in zip(row, casts)] if parent: value.append(int(row[l].split(',')[1]) if row[l] else None) i = 1 else: i = 0 value.extend(row[l + i:]) values.append(value) if (values and not ( # No property defined len(values) == 1 and all(x is None for x in values[0][:len(columns)]))): # Delete previous migrated values cursor.execute(*table.delete()) cursor.execute( *table.insert(columns + pcolumns + vcolumns, values=values))
def write(cls, appointments, values): pool = Pool() Event = pool.get('calendar.event') Patient = pool.get('gnuhealth.patient') Healthprof = pool.get('gnuhealth.healthprofessional') for appointment in appointments: #Update caldav event if appointment.event and ('healthprof' not in values): if 'appointment_date' in values: Event.write([appointment.event], { 'dtstart': values['appointment_date'], }) if 'appointment_date_end' in values: Event.write([appointment.event], { 'dtend': values['appointment_date_end'], }) if 'patient' in values: patient = Patient(values['patient']) Event.write([appointment.event], { 'summary': patient.name.rec_name, }) if 'comments' in values: Event.write([appointment.event], { 'description': values['comments'], }) else: #Move the event to the new health professional if appointment.event and ('healthprof' in values): current_event = [appointment.event] if appointment.healthprof.name.internal_user: healthprof = Healthprof(values['healthprof']) if healthprof.name.internal_user.calendar: #Health professional has calendar patient = appointment.patient.name.rec_name comments = '' if 'comments' in values: comments = values['comments'] else: comments = appointment.comments if 'appointment_date' in values: appointment_date = values['appointment_date'] else: appointment_date = appointment.appointment_date if 'appointment_date_end' in values: appointment_date_end = values[ 'appointment_date_end'] else: appointment_date_end = appointment.appointment_date_end events = Event.create([{ 'dtstart': appointment_date, 'dtend': appointment_date_end, 'calendar': healthprof.name.internal_user.calendar.id, 'summary': patient, 'description': comments, }]) values['event'] = events[0].id # Delete the event from the current health professional # after it has been transfer to the new healthpfof Event.delete(current_event) return super(Appointment, cls).write(appointments, values)
def default_employee(): pool = Pool() User = pool.get('res.user') user = User(Transaction().user) employee = user.employee return employee.id if employee else None
def _assert(self, record): pool = Pool() TestAccess = pool.get(self.model_name) TestAccess.read([record.id], ['field1']) TestAccess.search([])
def write(cls, companies, values, *args): super(Company, cls).write(companies, values, *args) # Restart the cache on the domain_get method Pool().get('ir.rule')._domain_get_cache.clear()
def group(self): pool = Pool() Group = pool.get('res.group') group, = Group.search([('users', '=', Transaction().user)]) return group
def _assert2(self, record): pool = Pool() TestAccess = pool.get('test.access') TestAccess.write([record], {'field2': 'test'})
def multivalue_model(cls, field): pool = Pool() if field == 'reconciliation_seq': return pool.get('cash_bank.configuration.sequences') return super(Configuration, cls).multivalue_model(field)
def _assert_raises2(self, record): pool = Pool() TestAccess = pool.get('test.access') with self.assertRaises(AccessError): TestAccess.write([record], {'field2': 'test'})
def generate_requests(cls, products=None, warehouses=None): """ For each product compute the purchase request that must be created today to meet product outputs. If products is specified it will compute the purchase requests for the selected products. If warehouses is specified it will compute the purchase request necessary for the selected warehouses. """ pool = Pool() OrderPoint = pool.get('stock.order_point') Product = pool.get('product.product') Location = pool.get('stock.location') User = pool.get('res.user') company = User(Transaction().user).company if warehouses is None: # fetch warehouses: warehouses = Location.search([ ('type', '=', 'warehouse'), ]) warehouse_ids = [w.id for w in warehouses] # fetch order points order_points = OrderPoint.search([ ('warehouse_location', '!=', None), ('company', '=', company.id if company else None), ]) # index them by product product2ops = {} product2ops_other = {} for order_point in order_points: if order_point.type == 'purchase': dict_ = product2ops else: dict_ = product2ops_other dict_[(order_point.warehouse_location.id, order_point.product.id)] = order_point if products is None: # fetch goods and assets # ordered by ids to speedup reduce_ids in products_by_location products = Product.search([ ('type', 'in', ['goods', 'assets']), ('consumable', '=', False), ('purchasable', '=', True), ], order=[('id', 'ASC')]) product_ids = [p.id for p in products] # aggregate product by minimum supply date date2products = {} for product in products: min_date, max_date = cls.get_supply_dates(product) date2products.setdefault((min_date, max_date), []).append(product) # compute requests new_requests = [] for dates, dates_products in date2products.iteritems(): min_date, max_date = dates for sub_products in grouped_slice(dates_products): sub_products = list(sub_products) product_ids = [p.id for p in sub_products] with Transaction().set_context(forecast=True, stock_date_end=min_date or datetime.date.max): pbl = Product.products_by_location(warehouse_ids, product_ids, with_childs=True) for warehouse_id in warehouse_ids: min_date_qties = defaultdict( lambda: 0, ((x, pbl.pop((warehouse_id, x), 0)) for x in product_ids)) # Do not compute shortage for product # with different order point product_ids = [ p.id for p in sub_products if (warehouse_id, p.id) not in product2ops_other ] # Search for shortage between min-max shortages = cls.get_shortage(warehouse_id, product_ids, min_date, max_date, min_date_qties=min_date_qties, order_points=product2ops) for product in sub_products: if product.id not in shortages: continue shortage_date, product_quantity = shortages[product.id] if shortage_date is None or product_quantity is None: continue order_point = product2ops.get( (warehouse_id, product.id)) # generate request values request = cls.compute_request(product, warehouse_id, shortage_date, product_quantity, company, order_point) new_requests.append(request) # delete purchase requests without a purchase line products = set(products) reqs = cls.search([ ('purchase_line', '=', None), ('origin', 'like', 'stock.order_point,%'), ]) reqs = [r for r in reqs if r.product in products] cls.delete(reqs) new_requests = cls.compare_requests(new_requests) cls.create_requests(new_requests)
def model(self): pool = Pool() Model = pool.get('ir.model') model, = Model.search([('model', '=', self.model_access_name)]) return model
def default_warehouse(cls): Location = Pool().get('stock.location') locations = Location.search(cls.warehouse.domain) if len(locations) == 1: return locations[0].id
'TRYTOND_LOGGING_LEVEL', default=logging.ERROR)) if logging_config: logging.config.fileConfig(logging_config) else: logformat = ('%(process)s %(thread)s [%(asctime)s] ' '%(levelname)s %(name)s %(message)s') level = max(logging_level, logging.NOTSET) logging.basicConfig(level=level, format=logformat) logging.captureWarnings(True) if os.environ.get('TRYTOND_COROUTINE'): from gevent import monkey monkey.patch_all() from trytond.pool import Pool # noqa: E402 from trytond.wsgi import app # noqa: E402 Pool.start() # TRYTOND_CONFIG it's managed by importing config db_names = os.environ.get('TRYTOND_DATABASE_NAMES') if db_names: # Read with csv so database name can include special chars reader = csv.reader(StringIO(db_names)) threads = [] for name in next(reader): thread = threading.Thread(target=Pool(name).init) thread.start() threads.append(thread) for thread in threads: thread.join()