def _backup_on_s3(self): if self.access_key_id is False or self.secret_access_key is False: raise exceptions.MissingError( "AWS S3: You need to add a AccessKey and a SecretAccessKey!") if self.s3_bucket_name is False: raise exceptions.MissingError( "AWS S3: You need to add a BucketName!") client = boto3.client('s3', aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key) path, content = self.get_path_and_content() try: client.put_object(Bucket=self.s3_bucket_name, Body=content, Key=path) except botocore.exceptions.ClientError as client_err: raise exceptions.ValidationError( "AWS S3: " + client_err.response['Error']['Message']) else: message = ( "AWS S3: No Error during upload. You can find the backup under the bucket {0} with the " "name {1}".format(self.s3_bucket_name, path)) self.set_last_fields(message, path=path) self.message_post(message)
def _backup_on_s3(self): """ Upload to S3 """ if self.access_key_id is False or self.secret_access_key is False: raise exceptions.MissingError( "AWS S3: You need to add a AccessKey and a SecretAccessKey!") if self.s3_bucket_name is False: raise exceptions.MissingError( "AWS S3: You need to add a BucketName!") client = boto3.client('s3', aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key) path, content = self.get_path_and_content() try: client.put_object(Bucket=self.s3_bucket_name, Body=content, Key=path) except botocore.exceptions.ClientError as client_err: raise exceptions.ValidationError( "AWS S3: " + client_err.response['Error']['Message']) else: self.send_and_set_message('AWS S3', path, bucket_name=self.s3_bucket_name)
def send_sms(username, password, message, quote_sms=False, **kwargs): ''' Sends the SMS @param username: bulkSMS EAPI username @param password: bulkSMS EAPI password @param message: The body of the SMS message. @param **kwargs: Valid keys are on the VALID_PARAMS_SEND_SMS @return: True on success ''' if not username or not password or not message: raise exceptions.MissingError('Provide username password and message.') for param in kwargs.keys(): if param not in VALID_PARAMS_SEND_SMS: raise exceptions.UserError( msg='''Paramerer {0} is invalid, valid params for sending an SMS are {1}''' .format(param, VALID_PARAMS_SEND_SMS)) try: request = requests.Request('POST', '{EAPI}/submission/{type}/2/2.0' .format(EAPI=EAPI, type='send_sms' if not quote_sms else 'quote_sms'), params={'username': username, 'password': password, 'message': message}.update(kwargs)) response = requests.Session().send(request) if response.ok and response.text.split('|')[0] in [0, 1]: return True else: raise BulkSMSExceptions.GenericError(response.text.split('|')) except HTTPError as error: raise exceptions.Warning(msg='''An error occured while sending SMS. Info: {} \n {}'''.format(error.response.status_code, error.response.reason))
def get_collection_data(self, name, version=None): cenit_api = self.env['cenit.api'] args = { 'name': name, } if not version: args.update({'sort_by': 'shared_version', 'limit': 1}) else: args.update({'shared_version': version}) path = "/setup/cross_shared_collection" rc = cenit_api.get(path, params=args).get("cross_shared_collections", False) if not isinstance(rc, list): raise exceptions.ValidationError( "Hey!! something wicked just happened") elif len(rc) != 1: raise exceptions.MissingError( "Required '%s [%s]' not found in Cenit" % (name, version or "any")) cross_id = rc[0]['id'] path = "/setup/cross_shared_collection/%s" % (cross_id) rc = cenit_api.get(path) return rc
def create_order(self): if not len(self.partner_id): raise exceptions.MissingError( 'You need to add a partner/customer to do this action!') sale_order = self.env['sale.order'].create({ 'partner_id': self.partner_id.id, }) sol_model = self.env['sale.order.line'] for line in self.product_template_ids: sol_model.create({ 'name': line.name, 'product_id': line.id, 'product_uom': line.uom_id.id, 'product_uom_qty': 1, 'order_id': sale_order.id, }) self.created_sale_order = sale_order return { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'sale.order', 'res_id': sale_order.id, 'view_id': False, 'type': 'ir.actions.act_window', 'target': 'current', 'nodestroy': True }
def send_message(self, partner, text): """ Send message to user. :param partner: res.partner record :param text: the message content :return: True if send message success """ recipient_id = partner.psid page_id = partner.page_id if not page_id and not recipient_id: exceptions.MissingError("Can't send message to this partner.") access_token = self._get_page_access_token(page_id) response = Bot(access_token).send_text_message(recipient_id, text) if response.get('error'): raise exceptions.MissingError("Can't send message to Partner(%s).\n%s" % (response['error'], recipient_id)) return True
def _get_page_access_token(self, page_id): """ Get page access token to do page's manipulation. :param page_id: ID of Facebook Page :return: string: access token """ page_access_token = self.env['omi.fb.page'].search([('page_id', '=', page_id)], limit=1).access_token try: facebook.GraphAPI(access_token=page_access_token, version=self.version).get_object("me") return page_access_token except facebook.GraphAPIError as exc: _logger.info("Maybe page(%s) access token is expired.\n%s" % (page_id, exc)) access_token = self._get_access_token() try: graph = facebook.GraphAPI(access_token=access_token, version=self.version) accounts_data = graph.get_connections('me', 'accounts') for page in accounts_data['data']: page_token = self.env['omi.fb.page'].search([('page_id', '=', page['id'])], limit=1) if page_token: page_token.write({'access_token': page['access_token']}) else: self.env['omi.fb.page'].create({ 'page_id': page['id'], 'access_token': page['access_token'], 'name': page['name'], }) return self._get_page_access_token(page_id) except facebook.GraphAPIError as exc: _logger.info("Maybe user's access token is expired.\n%s" % exc) raise exceptions.MissingError("User access token is expired.\n%s" % exc)
def prepare_line(self, name, debit_amount, credit_amount, account_code, cresus_tax_code, analytic_account_code, tax_ids): account_obj = self.env['account.account'] tax_obj = self.env['account.tax'] analytic_account_obj = self.env['account.analytic.account'] line = {} line['name'] = name line['debit'] = debit_amount line['credit'] = credit_amount account = account_obj.search([('code', '=', account_code)], limit=1) if not account: raise exceptions.MissingError( _("No account with code %s") % account_code) line['account_id'] = account.id if cresus_tax_code: tax = tax_obj.search([('tax_cresus_mapping', '=', cresus_tax_code), ('price_include', '=', True)], limit=1) line['tax_line_id'] = tax.id if analytic_account_code: analytic_account = analytic_account_obj.search( [('code', '=', analytic_account_code)], limit=1) line['analytic_account_id'] = analytic_account.id if tax_ids: line['tax_ids'] = [(4, id, 0) for id in tax_ids] return line
def web_select_pos(self, cash_register=None, **kw): ensure_db() if not request.session.uid: return werkzeug.utils.redirect('/web/login', 303) else: users = request.env['res.users'].browse([request.session.uid]) pos_session = request.env['pos.session'].search([ ('user_id', '=', users.id), ('state', '=', 'opened') ]) if pos_session: return http.redirect_with_hash('/pos/web') elif cash_register: config = request.env['pos.config'].search([('id', '=', cash_register)]) pos_session = request.env['pos.session'].sudo().search([ ('config_id', '=', config.id), ('state', '=', 'opened') ]) if config and not pos_session: session_id = config.open_session_cb() pos_session = request.env['pos.session'].search([ ('config_id', '=', config.id), ('state', '=', 'opening_control') ]) if config.cash_control: pos_session.write({'opening_balance': True}) session_open = pos_session.action_pos_session_open() return http.redirect_with_hash('/pos/web') else: raise exceptions.MissingError( _('La caja especificada no existe o esta ocupada. Por favor contacte con el administrador' )) pos_list = [] pos_ids = request.env['pos.config'].search([ ('company_id', '=', request.env.user.company_id.id) ]) for pos in pos_ids: occupied = False user = '' pos_session = request.env['pos.session'].sudo().search( [('config_id', '=', pos.id), ('state', '!=', 'closed')], limit=1, order='id desc') if pos_session: occupied = True user = pos_session.user_id.name pos_list.append({ 'occupied': occupied, 'pos': pos, 'user': user }) response = request.render('flexibite_com_advance.pos_selector', {'pos_list': pos_list}) response.headers['X-Frame-Options'] = 'DENY' return response
def send_batch(username, password, batch_data, **kwargs): if not username or not password or not batch_data: raise exceptions.MissingError('Provide username password and batch_data') _csv = csv.DictReader(batch_data) for field in _csv.fieldnames(): if field not in VALID_PARAMS_SEND_BATCH: raise exceptions.MissingError( msg='''Paramerer {0} is invalid, valid params for sending an SMS are {1}''' .format(field, VALID_FIELDS_BATCH)) for param in kwargs.keys(): if param not in VALID_PARAMS_SEND_BATCH: raise exceptions.UserError('''Parameter {0} is invalid valid params for sending batch SMS are {1} '''.format( param, VALID_PARAMS_SEND_BATCH)) try: request = requests.Request('POST', '{EAPI}/submission/send_batch/1/1.0' .format(EAPI=EAPI), params={'username': username, 'password': password, 'batch_data': batch_data}.update(kwargs)) response = requests.Session().send(request) if response.ok and response.text.split('|')[0] in [0, 1]: return True else: raise BulkSMSExceptions.GenericError(response.text.split('|')) except HTTPError as error: raise exceptions.Warning( msg='''An error occured while sending batch SMS. Info {} \n {}'''.format(error.response.status_code, error.response.reason)) def quote_sms(username, password, message, **kwargs): ''' Returns a quote for the SMS that is to be send. @param username: BulkSMS username @param password: BulkSMS password @param message: Your message @param param: **kwargs See method's documentation on BulkSMS.com ''' return send_sms(username, password, message, quote_sms=True, **kwargs)
def renumber(self): """Renumber all the posted moves on the given journal and periods. :return dict: Window action to open the renumbered moves, to review them. """ reset_sequences = self.env["ir.sequence"] reset_ranges = self.env["ir.sequence.date_range"] _logger.debug("Searching for account moves to renumber.") move_ids = self.env['account.move'].search( [('journal_id', 'in', self.journal_ids.ids), ('date', '>=', self.date_from), ('date', '<=', self.date_to), ('state', '=', 'posted')], order='date, id') if not move_ids: raise exceptions.MissingError( _('No records found for your selection!')) _logger.debug("Renumbering %d account moves.", len(move_ids)) for move in move_ids: sequence = move.journal_id.sequence_id if sequence not in reset_sequences: if sequence.use_date_range: date_range = self.env["ir.sequence.date_range"].search([ ("sequence_id", "=", sequence.id), ("date_from", "<=", move.date), ("date_to", ">=", move.date) ]) if date_range and date_range not in reset_ranges: date_range.number_next = self.number_next reset_ranges |= date_range else: sequence.number_next = self.number_next reset_sequences |= sequence # Generate (using our own get_id) and write the new move number move.name = (sequence.with_context( ir_sequence_date=move.date).next_by_id()) _logger.debug("%d account moves renumbered.", len(move_ids)) return { 'type': 'ir.actions.act_window', 'name': _("Renumbered account moves"), 'res_model': 'account.move', 'domain': [("id", "in", move_ids.ids)], 'view_type': 'form', 'view_mode': 'tree', 'context': self.env.context, 'target': 'current', }
def upload_existing(self): condition = self.s3_condition and safe_eval(self.s3_condition, mode="eval") or [] domain = [('type', '!=', 'url'), ('id', '!=', 0)] + condition attachments = self.env['ir.attachment'].search(domain) attachments = attachments._filter_protected_attachments() if attachments: s3 = self.env['ir.attachment']._get_s3_resource() if not s3: raise exceptions.MissingError( _("Some of the S3 connection credentials are missing.\n Don't forget to click the ``[Apply]`` button after any changes you've made" )) for attach in attachments: value = attach.datas bin_data = base64.b64decode(value) if value else b'' fname = hashlib.sha1(bin_data).hexdigest() bucket_name = self.s3_bucket try: s3.Bucket(bucket_name).put_object( Key=fname, Body=bin_data, ACL='public-read', ContentType=attach.mimetype, ) except Exception as e: raise exceptions.UserError(e.message) vals = { 'file_size': len(bin_data), 'checksum': attach._compute_checksum(bin_data), 'index_content': attach._index(bin_data, attach.datas_fname, attach.mimetype), 'store_fname': fname, 'db_datas': False, 'type': 'url', 'url': attach._get_s3_object_url(s3, bucket_name, fname), } attach.write(vals)
def upload_existing(self): condition = (self.s3_condition and safe_eval(self.s3_condition, mode="eval") or []) domain = [("type", "!=", "url"), ("id", "!=", 0)] + condition attachments = self.env["ir.attachment"].search(domain) attachments = attachments._filter_protected_attachments() if attachments: s3 = self.env["ir.attachment"]._get_s3_resource() if not s3: raise exceptions.MissingError( _("Some of the S3 connection credentials are missing.\n Don't forget to click the ``[Apply]`` button after any changes you've made" )) for attach in attachments: value = attach.datas bin_data = base64.b64decode(value) if value else b"" fname = hashlib.sha1(bin_data).hexdigest() bucket_name = self.s3_bucket try: s3.Bucket(bucket_name).put_object( Key=fname, Body=bin_data, ACL="public-read", ContentType=attach.mimetype, ) except Exception as e: raise exceptions.UserError(e) vals = { "file_size": len(bin_data), "checksum": attach._compute_checksum(bin_data), "index_content": attach._index(bin_data, attach.datas_fname, attach.mimetype), "store_fname": fname, "db_datas": False, "type": "url", "url": attach._get_s3_object_url(s3, bucket_name, fname), } attach.write(vals)
def action_transmettre_achat(self): # self.ensure_one() # self.generateQuotation() department = self.env['hr.department'].search([('code', '=', 'ACH') ])[0] result = False if department: if department.manager_id.user_id: #self.chief_purchase_id = department.manager_id.user_id self.state = 'transmitted' else: raise exceptions.MissingError( _("Le Service Achat n'a aucun responsable définit.\nVeuillez le signifier à l'administrateur." )) if self.alerte_mail: result = self.send_notification('besoin_achat_notif', self._context)
def upload_existing(self): domain = [('type', '!=', 'url'), ('id', '!=', 0)] attachments = self.env['ir.attachment'].search(domain) attachments = attachments._filter_protected_attachments() if attachments: bucket_name = self.env['ir.attachment']._get_gce_settings( 'gce.bucket', 'GCE_BUCKET') bucket = self.env['ir.attachment']._get_gce_resource() if not bucket: raise exceptions.MissingError( _("Some of the GCE credentials are missing.\n Don't forget to click the ``[Apply]`` button after any changes you've made" )) for attachment in attachments: value = attachment.datas bin_data = value and value.decode('base64') or '' fname = hashlib.sha1(bin_data).hexdigest() blob = bucket.blob(fname) blob.upload_from_string(bin_data, attachment.mimetype) blob.make_public() file_name = hashlib.sha1(bin_data).hexdigest() vals = { 'file_size': len(bin_data), 'checksum': attachment._compute_checksum(bin_data), 'index_content': attachment._index(bin_data, attachment.datas_fname, attachment.mimetype), 'store_fname': file_name, 'db_datas': False, 'type': 'url', 'url': attachment._get_gce_object_url(bucket_name, fname), } attachment.write(vals) self.env.cr.commit()
def validateAction(self): line_obj = self.env['account.bank.statement.line'] if self.bank_statement_id: for line in self.line_ids: vals = { 'date': self.date_payment, 'name': self.name, 'partner_id': line.partner_id.id, 'ref': line.ref, 'amount': line.amount, 'statement_id': self.bank_statement_id.id } line_obj.create(vals) active_id = self._context.get('active_id') request_obj = self.env['purchase.exp.achat'] if active_id: request = request_obj.browse(active_id) request.write({'state': 'done'}) else: raise exceptions.MissingError( _("Vous devez obligatoirement ajouter une caisse afin de valider les éecritures" ))
def send_product(self): self.ensure_one() SaleOrderModel = self.env['sale.order'] SaleOrderLineModel = self.env['sale.order.line'] ProductProductModel = self.env['product.product'] product_id = self._context.get('active_id', False) if self._context.get('active_model') == 'product.template': product = ProductProductModel.search([ ('product_tmpl_id', '=', self._context.get('active_id', False)) ], limit=1) product_id = product.id if not product_id: raise exceptions.MissingError('Product_id does not exist.') orders = SaleOrderModel.search([ ('subs_to', '>=', self.date), ('order_line.product_id', '=', product_id) ]) for order in orders: lines = order.order_line.filtered(lambda x: x.product_id.id == product_id) qty = 0 for line in lines: qty += line.product_uom_qty new_line = SaleOrderLineModel.create({ 'product_id': self.product_id.id, 'product_uom_qty': qty, 'order_id': order.id }) new_line.product_id_change() new_line.product_uom_change() return True
def get_rnc(self, fiscal_id): config_parameter = self.env['ir.config_parameter'].sudo() api_marcos = config_parameter.get_param("api_marcos") if not api_marcos: raise exceptions.MissingError( u"Debe configurar la URL de validacón en línea") http_proxy = config_parameter.get_param("http_proxy") https_proxy = config_parameter.get_param("https_proxy") proxies = {} if http_proxy != "False": proxies.update({"http": http_proxy}) if http_proxy != "False": proxies.update({"https": https_proxy}) res = requests.get('{}/rnc/{}'.format(api_marcos, fiscal_id), proxies=proxies) if res.status_code == 200: return res.json() else: return False
def _prepare_lines(self): # search BoM structure and route bom_obj = self.env['mrp.bom'] bom_point = self.bom_id if not bom_point: bom_point = bom_obj._bom_find(product=self.product_id) if bom_point: routing = bom_point.routing_id self.write({ 'bom_id': bom_point.id, 'routing_id': routing.id, }) if not bom_point: raise exceptions.MissingError( _("Cannot find a bill of material for this product.")) # get components from BoM structure factor = self.product_uom_id._compute_quantity( self.product_qty, bom_point.product_uom_id) boms_done, lines_done = bom_point.explode( self.product_id, factor / bom_point.product_qty) return lines_done
def import_data_types(self): self.ensure_one() irmodel_pool = self.env['ir.model'] schema_pool = self.env['cenit.schema'] namespace_pool = self.env['cenit.namespace'] datatype_pool = self.env['cenit.data_type'] line_pool = self.env['cenit.data_type.line'] domain_pool = self.env['cenit.data_type.domain_line'] trigger_pool = self.env['cenit.data_type.trigger'] try: data_file = base64.decodebytes(self.b_file).decode("utf-8") json_data = json.loads(data_file) except Exception as e: _logger.exception('File unsuccessfully imported, due to format mismatch.') raise UserError(_( 'File not imported due to format mismatch or a malformed file. (Valid format is .json)\n\nTechnical Details:\n%s') % tools.ustr( e)) for data in json_data: odoo_model = data['model'] namespace = data['namespace'] schema = data['schema'] domain = [('model', '=', odoo_model)] candidates = irmodel_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s module installed" % odoo_model ) odoo_model = candidates.id domain = [('name', '=', namespace)] candidates = namespace_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s namespace in Namespaces" % namespace ) namespace = candidates.id domain = [('name', '=', schema), ('namespace', '=', namespace)] candidates = schema_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s schema in Schemas" % schema ) schema = candidates.id vals = {'name': data['name'], 'model': odoo_model, 'namespace': namespace, 'schema': schema} dt = datatype_pool.search([('name', '=', data['name'])]) updt = False if dt: dt.write(vals) updt = True else: dt = datatype_pool.create(vals) if updt: for d in dt.domain: d.unlink() for d in dt.triggers: d.unlink() for d in dt.lines: d.unlink() for domain in data['domains']: vals = {'data_type': dt.id, 'field': domain['field'], 'value': domain['value'], 'op': domain['op']} domain_pool.create(vals) for trigger in data['triggers']: vals = {'data_type': dt.id, 'name': trigger['name'], 'cron_lapse': trigger['cron_lapse'], 'cron_units': trigger['cron_units'], 'cron_restrictions': trigger['cron_restrictions'], 'cron_name': trigger['cron_name']} trigger_pool.create(vals) for line in data['lines']: domain = [('name', '=', line['reference'])] candidate = datatype_pool.search(domain) vals = { 'data_type': dt.id, 'name': line['name'], 'value': line['value'], 'line_type': line['line_type'], 'line_cardinality': line['line_cardinality'], 'primary': line['primary'], 'inlined': line['inlined'], 'reference': candidate.id } line_pool.create(vals) dt.sync_rules() return True
def send_product(self): self.ensure_one() SaleOrderModel = self.env['sale.order'] SaleOrderLineModel = self.env['sale.order.line'] ProductProductModel = self.env['product.product'] product_id = self._context.get('active_id', False) if self._context.get('active_model') == 'product.template': product = ProductProductModel.search([ ('product_tmpl_id', '=', self._context.get('active_id', False)) ], limit=1) product_id = product.id if not product_id: raise exceptions.MissingError('Product_id does not exist.') _logger.info('SEND PRODUCT') _logger.info("product (%s %s)" % (product_id, ProductProductModel.browse(product_id).name)) domain = [ ('subs_from', '<=', self.subs_date), ('order_line.product_id', '=', product_id), '|', ('subs_to', '=', False), ('subs_to', '>=', self.subs_date), ] _logger.info("Domain (%s)" % (domain)) orders = SaleOrderModel.search(domain) _logger.info("Sale oders (%s)" % (orders)) for order in orders: lines = order.order_line.filtered( lambda x: x.product_id.id == product_id) qty = 0 discount = [] for line in lines: qty += line.product_uom_qty discount.append(line.discount) new_line = SaleOrderLineModel.create({ 'product_id': self.product_id.id, 'product_uom_qty': qty, 'order_id': order.id, 'discount': max(discount) }) new_line.product_id_change() new_line.product_uom_change() return True
def find_account(code): res = account_obj.search([('code', '=', code)], limit=1) if not res: raise exceptions.MissingError( _("No account with code %s") % code) return res
def _standardise_data(self, data, importer): """ This function split one line of the spreadsheet into multiple lines. Winbiz just writes one line per move. """ tax_obj = self.env['account.tax'] journal_obj = self.env['account.journal'] account_obj = self.env['account.account'] def find_account(code): res = account_obj.search([('code', '=', code)], limit=1) if not res: raise exceptions.MissingError( _("No account with code %s") % code) return res if self.enable_account_based_line_merging: my_prepare_move = (lambda lines, journal, date, ref: prepare_move(account_line_merge(lines), journal, date, ref)) else: my_prepare_move = prepare_move # loop incomplete = None previous_pce = None previous_date = None previous_journal = None previous_tax = None lines = [] for self.index, winbiz_item in enumerate(data, 1): if previous_pce not in (None, winbiz_item[u'pièce']): yield my_prepare_move(lines, previous_journal, previous_date, ref=previous_pce) lines = [] incomplete = None previous_pce = winbiz_item[u'pièce'] previous_date = importer.parse_date(winbiz_item[u'date']) journal = journal_obj.search( [('winbiz_mapping', '=', winbiz_item[u'journal'])], limit=1) if not journal: raise exceptions.MissingError( _(u"No journal ‘%s’") % winbiz_item[u'journal']) previous_journal = journal # tvatyp: 0 no vat was applied (internal transfers for example) # 1 there is vat but it's not on this line # 2 sales vat # 3 purchases vat # -1 pure vat tvatyp = int(winbiz_item['ecr_tvatyp']) if tvatyp > 1: if tvatyp == 2: scope = 'sale' else: assert tvatyp == 3 scope = 'purchase' tvabn = int(winbiz_item['ecr_tvabn']) if tvabn == 2: included = True else: assert tvabn == 1 included = False tax = tax_obj.search([ ('amount', '=', winbiz_item['ecr_tvatx']), ('price_include', '=', included), ('type_tax_use', '=', scope)], limit=1) if not tax: raise exceptions.MissingError( _("No tax found with amount = %r and type = %r") % (winbiz_item['ecr_tvatx'], scope)) else: tax = None if int(winbiz_item['ecr_tvatyp']) < 0: assert previous_tax is not None originator_tax = previous_tax else: originator_tax = None previous_tax = tax amount = float(winbiz_item[u'montant']) recto_line = verso_line = None if winbiz_item[u'cpt_débit'] != 'Multiple': account = find_account(winbiz_item[u'cpt_débit']) if incomplete is not None and incomplete.account == account: incomplete.amount -= amount else: recto_line = prepare_line( name=winbiz_item[u'libellé'].strip(), amount=(-amount), account=account, originator_tax=originator_tax) if winbiz_item['ecr_tvadc'] == 'd': recto_line.tax = tax lines.append(recto_line) if winbiz_item[u'cpt_crédit'] != 'Multiple': account = find_account(winbiz_item[u'cpt_crédit']) if incomplete is not None and incomplete.account == account: incomplete.amount += amount else: verso_line = prepare_line( name=winbiz_item[u'libellé'].strip(), amount=amount, account=account, originator_tax=originator_tax) if winbiz_item['ecr_tvadc'] == 'c': verso_line.tax = tax lines.append(verso_line) if winbiz_item[u'cpt_débit'] == 'Multiple': assert incomplete is None incomplete = verso_line if winbiz_item[u'cpt_crédit'] == 'Multiple': assert incomplete is None incomplete = recto_line yield my_prepare_move(lines, previous_journal, previous_date, ref=previous_pce)
def import_mappings_data(self, json_data): irmodel_pool = self.env['ir.model'] schema_pool = self.env['cenit.schema'] namespace_pool = self.env['cenit.namespace'] datatype_pool = self.env['cenit.data_type'] line_pool = self.env['cenit.data_type.line'] domain_pool = self.env['cenit.data_type.domain_line'] trigger_pool = self.env['cenit.data_type.trigger'] for data in json_data: odoo_model = data['model'] namespace = data['namespace'] schema = data['schema'] domain = [('model', '=', odoo_model)] candidates = irmodel_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s module installed" % odoo_model) odoo_model = candidates.id domain = [('name', '=', namespace)] candidates = namespace_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s namespace in Namespaces" % namespace) namespace = candidates.id domain = [('name', '=', schema), ('namespace', '=', namespace)] candidates = schema_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s schema in Schemas" % schema) schema = candidates.id vals = { 'name': data['name'], 'model': odoo_model, 'namespace': namespace, 'schema': schema } dt = datatype_pool.search([('name', '=', data['name'])]) updt = False if dt: dt.write(vals) updt = True else: dt = datatype_pool.create(vals) if updt: for d in dt.domain: d.unlink() for d in dt.triggers: d.unlink() for d in dt.lines: d.unlink() for domain in data['domains']: vals = { 'data_type': dt.id, 'field': domain['field'], 'value': domain['value'], 'op': domain['op'] } domain_pool.create(vals) for trigger in data['triggers']: vals = { 'data_type': dt.id, 'name': trigger['name'], 'cron_lapse': trigger['cron_lapse'], 'cron_units': trigger['cron_units'], 'cron_restrictions': trigger['cron_restrictions'], # 'cron_name': trigger['cron_name'] } trigger_pool.create(vals) for line in data['lines']: domain = [('name', '=', line['reference'])] candidate = datatype_pool.search(domain) vals = { 'data_type': dt.id, 'name': line['name'], 'value': line['value'], 'line_type': line['line_type'], 'line_cardinality': line['line_cardinality'], 'primary': line['primary'], 'inlined': line['inlined'], 'reference': candidate.id } line_pool.create(vals) dt.sync_rules() return True
def renumber(self): sequence = self.journal_id.sequence_id self.env.cr.execute(""" update account_move set name = '""" + str(self.periodo.code.split('/')[0]) + """' || '-' || LPAD(T.CORRELATIVO::text,""" + str(sequence.padding) + """,'0') from ( SELECT row_number() OVER () AS CORRELATIVO,* FROM ( SELECT ID,DATE,FECHA_CONTABLE,NAME,REF,JOURNAL_ID FROM ACCOUNT_MOVE WHERE FECHA_CONTABLE BETWEEN '""" + str(self.periodo.date_start) + """' AND '""" + str(self.periodo.date_stop) + """' AND JOURNAL_ID=""" + str(self.journal_id.id) + """ ORDER BY FECHA_CONTABLE, ref ,LEFT(NAME,2) )TT ) T where T.id = account_move.id """) self.env.cr.execute(""" select max(CORRELATIVO)+1 from ( SELECT row_number() OVER () AS CORRELATIVO,* FROM ( SELECT ID,DATE,FECHA_CONTABLE,NAME,REF,JOURNAL_ID FROM ACCOUNT_MOVE WHERE FECHA_CONTABLE BETWEEN '""" + str(self.periodo.date_start) + """' AND '""" + str(self.periodo.date_stop) + """' AND JOURNAL_ID=""" + str(self.journal_id.id) + """ ORDER BY FECHA_CONTABLE,LEFT(NAME,2) )TT ) X """) res = self.env.cr.fetchall() default = 1 for i in res: default = i[0] if sequence.use_date_range: date_range = self.env["ir.sequence.date_range"].search([ ("sequence_id", "=", sequence.id), ("date_from", "=", self.periodo.date_start), ("date_to", ">=", self.periodo.date_stop) ]) if len(date_range) > 1: raise exceptions.MissingError( _('Existe dos intervalos de fecha que se cruzan en el diario ' + self.journal_id.name)) else: date_range[0].number_next_actual = default date_range[0].number_next = default else: sequence.number_next_actual = default self.env.cr.execute(""" update account_invoice set move_name = T.NAME from ( SELECT row_number() OVER () AS CORRELATIVO,* FROM ( SELECT ID,DATE,FECHA_CONTABLE,NAME,REF,JOURNAL_ID FROM ACCOUNT_MOVE WHERE FECHA_CONTABLE BETWEEN '""" + str(self.periodo.date_start) + """' AND '""" + str(self.periodo.date_stop) + """' AND JOURNAL_ID=""" + str(self.journal_id.id) + """ ORDER BY FECHA_CONTABLE,LEFT(NAME,2) )TT ) T where T.id = account_invoice.move_id """) self.env.cr.execute(""" update account_payment set move_name = T.NAME,payment_reference = T.NAME from ( SELECT row_number() OVER () AS CORRELATIVO,* FROM ( SELECT ap.ID,am.NAME FROM ACCOUNT_MOVE am INNER JOIN ACCOUNT_MOVE_LINE aml on aml.move_id = am.id INNER JOIN account_payment ap on ap.id = aml.payment_id WHERE am.FECHA_CONTABLE BETWEEN '""" + str(self.periodo.date_start) + """' AND '""" + str(self.periodo.date_stop) + """' AND am.JOURNAL_ID=""" + str(self.journal_id.id) + """ ORDER BY am.FECHA_CONTABLE,LEFT(am.NAME,2) )TT ) T where T.id = account_payment.id """) self.env.cr.execute(""" update account_bank_statement_line set move_name = T.NAME from ( SELECT row_number() OVER () AS CORRELATIVO,* FROM ( SELECT statement_line_id as ID,DATE,FECHA_CONTABLE,NAME,REF,JOURNAL_ID FROM ACCOUNT_MOVE WHERE FECHA_CONTABLE BETWEEN '""" + str(self.periodo.date_start) + """' AND '""" + str(self.periodo.date_stop) + """' AND JOURNAL_ID=""" + str(self.journal_id.id) + """ and statement_line_id is not null ORDER BY FECHA_CONTABLE,LEFT(NAME,2) )TT ) T where T.id = account_bank_statement_line.id """) lineas = [] #self.env.cr.execute(""" # SELECT ID,DATE,FECHA_CONTABLE,NAME,REF,JOURNAL_ID FROM ACCOUNT_MOVE # WHERE FECHA_CONTABLE BETWEEN '""" +str(self.periodo.date_start)+ """' AND '""" +str(self.periodo.date_stop)+ """' # AND JOURNAL_ID=""" +str(self.journal_id.id)+ """ # ORDER BY FECHA_CONTABLE,LEFT(NAME,2) # """) #for i in self.env.cr.fetchall(): # lineas.append(i[0]) #return { # 'type': 'ir.actions.act_window', # 'name': _("Renumbered account moves"), # 'res_model': 'account.move', # 'domain': [("id", "in", lineas)], # 'view_type': 'form', # 'view_mode': 'tree', # 'context': self.env.context, # 'target': 'current', #} contextn = dict(self._context or {}) contextn['message'] = "Generado Exitosamente" return { 'name': 'Finalizado', 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'sh.message.wizard', 'views': [(False, 'form')], 'target': 'new', 'context': contextn, }