def build_ctx_periods(self, period_from_id, period_to_id): if period_from_id == period_to_id: return [period_from_id] period_from = self.browse(period_from_id) period_date_start = period_from.date_start company1_id = period_from.company_id.id period_to = self.browse(period_to_id) period_date_stop = period_to.date_stop company2_id = period_to.company_id.id if company1_id != company2_id: raise osv.except_osv( _('Error!'), _('You should choose the periods that belong to the same company.' )) if period_date_start > period_date_stop: raise osv.except_osv( _('Error!'), _('Start period should precede then end period.')) # /!\ We do not include a criterion on the company_id field below, to allow producing consolidated reports # on multiple companies. It will only work when start/end periods are selected and no fiscal year is chosen. #for period from = january, we want to exclude the opening period (but it has same date_from, so we have to check if period_from is special or not to include that clause or not in the search). if period_from.special: return self.search([('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop)]) return self.search([('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop), ('special', '=', False)])
def generate_refresh_token(self): if self.env.user.refresh_token: settings = self.env['office.settings'].search([]) settings = settings[0] if settings else settings if not settings.client_id or not settings.redirect_url or not settings.secret: raise osv.except_osv( _("Error!"), (_("Please ask admin to add Office365 settings!"))) header = {'Content-Type': 'application/x-www-form-urlencoded'} response = requests.post( 'https://login.microsoftonline.com/common/oauth2/v2.0/token', data='grant_type=refresh_token&refresh_token=' + self.env.user.refresh_token + '&redirect_uri=' + settings.redirect_url + '&client_id=' + settings.client_id + '&client_secret=' + settings.secret, headers=header).content response = json.loads((str(response)[2:])[:-1]) if 'access_token' not in response: response["error_description"] = response[ "error_description"].replace("\\r\\n", " ") raise osv.except_osv( ("Error!"), (response["error"] + " " + response["error_description"])) else: self.env.user.token = response['access_token'] self.env.user.refresh_token = response['refresh_token'] self.env.user.expires_in = int(round(time.time() * 1000))
def unlink(self): for activity in self: if activity.office_id: response = requests.delete( 'https://graph.microsoft.com/beta/me/outlook/tasks/' + activity.office_id, headers={ 'Host': 'outlook.office.com', 'Authorization': 'Bearer {0}'.format(self.env.user.token), 'Accept': 'application/json', 'Content-Type': 'application/json', 'X-Target-URL': 'http://outlook.office.com', 'connection': 'keep-Alive' }) if response.status_code != 204 and response.status_code != 404: raise osv.except_osv( _("Office365 SYNC ERROR"), (_("Error: " + str(response.status_code)))) if activity.date_deadline <= fields.Date.today(): self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', activity.user_id.partner_id.id), { 'type': 'activity_updated', 'activity_deleted': True }) return super(CustomActivity, self).unlink()
def save_data(self): try: if not self.client_id or not self.redirect_url or not self.secret: raise osv.except_osv( _("Wrong Credentials!"), (_("Please Check your Credentials and try again"))) else: self.env.user.redirect_url = self.redirect_url self.env.user.client_id = self.client_id self.env.user.secret = self.secret self.env.user.code = None self.env.user.token = None self.env.user.refresh_token = None self.env.user.expires_in = None self.env.user.office365_email = None self.env.user.office365_id_address = None self.env.cr.commit() context = dict(self._context) # self.env['office.usersettings'].login_url context['message'] = 'Successfully Saved!' return self.message_wizard(context) except Exception as e: raise ValidationError(_(str(e)))
def action_move_create(self): for obj_inv in self: invtype = obj_inv.type if obj_inv.journal_document_class_id and not obj_inv.sii_document_number: if invtype in ('out_invoice', 'out_refund'): if not obj_inv.journal_document_class_id.sequence_id: raise osv.except_osv( _('Error!'), _('Please define sequence on the journal related documents to this invoice.' )) sii_document_number = obj_inv.journal_document_class_id.sequence_id.next_by_id( ) prefix = obj_inv.journal_document_class_id.sii_document_class_id.doc_code_prefix or '' move_name = (prefix + str(sii_document_number)).replace( ' ', '') obj_inv.write({'move_name': move_name}) elif invtype in ('in_invoice', 'in_refund'): sii_document_number = obj_inv.supplier_invoice_number super(account_invoice, self).action_move_create() for obj_inv in self: invtype = obj_inv.type if obj_inv.journal_document_class_id and not obj_inv.sii_document_number: obj_inv.write({'sii_document_number': sii_document_number}) document_class_id = obj_inv.sii_document_class_id.id guardar = { 'document_class_id': document_class_id, 'sii_document_number': obj_inv.sii_document_number, 'no_rec_code': obj_inv.no_rec_code, 'iva_uso_comun': obj_inv.iva_uso_comun, } obj_inv.move_id.write(guardar) return True
def get_test(self,cr, uid,ids,employee_id, context=None): res = {} obj_payslip = self.pool.get('hr.payslip') for emp in self.browse(cr, uid, ids, context=context): contract_ids = obj_payslip.search(cr, uid, [('employee_id','=',emp.id),], context=context) if contract_ids: raise osv.except_osv(_("test"),_("test"))
def write(self, vals): emp_obj = self.env['hr.employee'] trouver = False for payslip in self: employee=payslip.employee_id list_payslips=employee.slip_ids date_from = datetime.strptime(payslip.date_from,'%Y-%m-%d') date_to = datetime.strptime(payslip.date_to,'%Y-%m-%d') Range = namedtuple('Range',['start','end']) r1=Range(start=date_from,end=date_to) new_list=[] if (len(list_payslips)!=1): for slip in list_payslips: if slip.id != payslip.id : new_list.append(slip) for slip in new_list: old_date_from=datetime.strptime(slip.date_from,'%Y-%m-%d') old_date_to = datetime.strptime(slip.date_to,'%Y-%m-%d') r2=Range(start=old_date_from,end=old_date_to) result = (min(r1.end, r2.end) - max(r1.start,r2.start)).days + 1 if result > 0 and slip.contract_id.categorie_id == payslip.contract_id.categorie_id: trouver = True if trouver == True : raise osv.except_osv(_('Warning'),_("L'employé possède déjà un bulletin pour cette période")) else : super(hr_payslip,self).write(vals) return True
def product_meli_upload_multi_images(self): company = self.env.user.company_id product_obj = self.env['product.product'] product = self CLIENT_ID = company.mercadolibre_client_id CLIENT_SECRET = company.mercadolibre_secret_key ACCESS_TOKEN = company.mercadolibre_access_token REFRESH_TOKEN = company.mercadolibre_refresh_token # meli = Meli(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, access_token=ACCESS_TOKEN, refresh_token=REFRESH_TOKEN) if product.product_image_ids == None: return {'status': 'error', 'message': 'no images to upload'} image_ids = [] c = 0 #loop over images for product_image in product.product_image_ids: if (product_image.image): print "product_image.image:" + str(product_image.image) imagebin = base64.b64decode(product_image.image) #files = { 'file': ('image.png', imagebin, "image/png"), } files = { 'file': ('image.jpg', imagebin, "image/jpeg"), } response = meli.upload("/pictures", files, {'access_token': meli.access_token}) print "meli upload:" + response.content rjson = response.json() if ("error" in rjson): raise osv.except_osv( _('MELI WARNING'), _('No se pudo cargar la imagen en MELI! Error: %s , Mensaje: %s, Status: %s' ) % ( rjson["error"], rjson["message"], rjson["status"], )) #return { 'status': 'error', 'message': 'not uploaded'} else: image_ids += [{'id': rjson['id']}] c = c + 1 print "image_ids:" + str(image_ids) product.write({"meli_multi_imagen_id": "%s" % (image_ids)}) return image_ids
def test_connectiom(self): # Generates a random name between 9 and 15 characters long and writes it to the record. shop_url = "https://%s:%s@%s.myshopify.com/admin" % ( self.api_key, self.api_password, self.shop_name) shopify.ShopifyResource.set_site(shop_url) shopify.Session.setup(api_key=self.api_key, secret=self.api_secret_key) try: shop = shopify.Shop.current() except: raise ValidationError('Connection Failed >> invalid credentials') raise osv.except_osv(("Success!"), (" Connection Successful !"))
def action_draft(self): mode = 'draft' for period in self: if period.fiscalyear_id.state == 'done': raise osv.except_osv( _('Warning!'), _('You can not re-open a period which belongs to closed fiscal year' )) #cr.execute('update account_journal_period set state=%s where period_id in %s', (mode, tuple(ids),)) self.env.cr.execute( 'update account_period set state=%s where id in %s', ( mode, tuple(self.ids), )) return True
def add_mo_product(self): """Add new move. @return: True. """ if self.env.context is None: self.env.context = {} if not self.env.context.get('mo_id', False) or not self.env.context.get( 'active_id', False): raise osv.except_osv(_('Exception!'), _('Can not create the Move related to MO')) new_move = self.browse(self.ids)[0] mrp_obj = self.env['mrp.production'] production = mrp_obj.browse( self.env.context.get('mo_id', False) or self.env.context.get('active_id', False)) for move in production.move_raw_ids: if (move.product_id.id == new_move.product_id.id) and (move.state not in ('cancel', 'done')): if move.procure_method != 'make_to_order': # SI ES BAJO PEDIDO SE TIENE QUE AGREGAR EN OTRA LINEA, NO LA MISMA qty_in_line_uom = self.product_qty old_move = self.env['stock.move'].browse(move.id) new_qty = old_move.product_qty + new_move.product_qty # NUEVO EN SOBRESCRITURA##### # SE CALCULA real_p real_p = 0 if production.product_qty > 0: real_p = ((move.product_qty + qty_in_line_uom) * 100) / production.product_qty vals = { 'real_p': real_p, 'product_uom_qty': move.product_qty + qty_in_line_uom, 'unit_factor': new_qty / (production.product_qty - production.qty_produced), } ############################# self.env['stock.move'].browse(move.id).write(vals) self.add_stock_move_lots_line(new_move) break else: self.add_production_consume_line(new_move, production) self.add_stock_move_lots_line(new_move) return True
def unlink(self): events = self for self in events: if self.office_id and self.env.user.event_del_flag: if self.env.user.expires_in: expires_in = datetime.fromtimestamp( int(self.env.user.expires_in) / 1e3) expires_in = expires_in + timedelta(seconds=3600) nowDateTime = datetime.now() if nowDateTime > expires_in: self.env['res.users'].generate_refresh_token() header = { 'Authorization': 'Bearer {0}'.format(self.env.user.token), 'Content-Type': 'application/json' } response = requests.get( 'https://graph.microsoft.com/v1.0/me/calendars', headers={ 'Host': 'outlook.office.com', 'Authorization': 'Bearer {0}'.format(self.env.user.token), 'Accept': 'application/json', 'X-Target-URL': 'http://outlook.office.com', 'connection': 'keep-Alive' }).content if 'value' not in json.loads( (response.decode('utf-8'))).keys(): raise osv.except_osv(("Access Token Expired!"), (" Please Regenerate Access Token !")) calendars = json.loads((response.decode('utf-8')))['value'] calendar_id = calendars[0]['id'] response = requests.delete( 'https://graph.microsoft.com/v1.0/me/calendars/' + calendar_id + '/events/' + self.office_id, headers=header) if response.status_code == 204: _logger.info('successfull deleted event ' + self.name + "from Office365 Calendar") # res = super(CustomMeeting, self).unlink(self) res = super(CustomMeeting, self).unlink(self) else: res = super(CustomMeeting, self).unlink(self) return res
def _reconcile_fy_closing(cr, uid, ids, context=None): """ This private function manually do the reconciliation on the account_move_line given as `ids´, and directly through psql. It's necessary to do it this way because the usual `reconcile()´ function on account.move.line object is really resource greedy (not supposed to work on reconciliation between thousands of records) and it does a lot of different computation that are useless in this particular case. """ # check that the reconcilation concern journal entries from only one company cr.execute( 'select distinct(company_id) from account_move_line where id in %s', (tuple(ids), )) if len(cr.fetchall()) > 1: raise osv.except_osv( _('Warning!'), _('The entries to reconcile should belong to the same company.' )) r_id = self.pool.get('account.move.reconcile').create( cr, uid, { 'type': 'auto', 'opening_reconciliation': True }) cr.execute( 'update account_move_line set reconcile_id = %s where id in %s', ( r_id, tuple(ids), )) # reconcile_ref deptends from reconcile_id but was not recomputed obj_acc_move_line._store_set_values(cr, uid, ids, ['reconcile_ref'], context=context) obj_acc_move_line.invalidate_cache(cr, uid, ['reconcile_id'], ids, context=context) return r_id
def generate_token(self, code): try: settings = self.env['office.settings'].search([]) settings = settings[0] if settings else settings if not settings.client_id or not settings.redirect_url or not settings.secret: raise osv.except_osv( _("Error!"), (_("Please ask admin to add Office365 settings!"))) header = {'Content-Type': 'application/x-www-form-urlencoded'} response = requests.post( 'https://login.microsoftonline.com/common/oauth2/v2.0/token', data='grant_type=authorization_code&code=' + code + '&redirect_uri=' + settings.redirect_url + '&client_id=' + settings.client_id + '&client_secret=' + settings.secret, headers=header).content if 'error' in json.loads(response.decode('utf-8')) and json.loads( response.decode('utf-8'))['error']: raise UserError( 'Invalid Credentials . Please! Check your credential and regenerate the code and try again!' ) else: data = {} response = json.loads((str(response)[2:])[:-1]) data['token'] = response['access_token'] data['refresh_token'] = response['refresh_token'] data['expires_in'] = response['expires_in'] categories = requests.get( 'https://graph.microsoft.com/v1.0/me/outlook/masterCategories', headers={ 'Host': 'outlook.office.com', 'Authorization': 'Bearer {0}'.format(data['token']), 'Accept': 'application/json', 'X-Target-URL': 'http://outlook.office.com', 'connection': 'keep-Alive' }).content category = json.loads(categories) odoo_categ = self.env['calendar.event.type'] if 'value' in category: for categ in category['value']: if self.env['calendar.event.type'].search([ '|', ('categ_id', '=', categ['id']), ('name', '=', categ['displayName']) ]): # office_categ.write({'categ_id': categ['id'], # 'color': categ['color'], # 'name': categ['displayName'], # }) odoo_categ.write({ 'categ_id': categ['id'], 'color': categ['color'], 'name': categ['displayName'], }) else: # office_categ.create({'categ_id': categ['id'], # 'color': categ['color'], # 'name': categ['displayName'], # }) odoo_categ.create({ 'categ_id': categ['id'], 'color': categ['color'], 'name': categ['displayName'], }) response = json.loads( (requests.get('https://graph.microsoft.com/v1.0/me', headers={ 'Host': 'outlook.office.com', 'Authorization': 'Bearer {0}'.format(data['token']), 'Accept': 'application/json', 'X-Target-URL': 'http://outlook.office.com', 'connection': 'keep-Alive' }).content.decode('utf-8'))) if response: data['userPrincipalName'] = response[ 'userPrincipalName'] data['office365_id_address'] = 'outlook_' + response[ 'id'].upper() + '@outlook.com' if 'token' in data: self.get_calendars(data) return data except Exception as e: _logger.error(e) data['error'] = e return data
def product_meli_upload_image(self): company = self.env.user.company_id product_obj = self.env['product.product'] product = self CLIENT_ID = company.mercadolibre_client_id CLIENT_SECRET = company.mercadolibre_secret_key ACCESS_TOKEN = company.mercadolibre_access_token REFRESH_TOKEN = company.mercadolibre_refresh_token # meli = Meli(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, access_token=ACCESS_TOKEN, refresh_token=REFRESH_TOKEN) if product.image == None or product.image == False: return {'status': 'error', 'message': 'no image to upload'} # print "product_meli_upload_image" #print "product_meli_upload_image: " + response.content imagebin = base64.b64decode(product.image) imageb64 = product.image # print "data:image/png;base64,"+imageb64 # files = [ ('images', ('image_medium', imagebin, "image/png")) ] files = { 'file': ('image.jpg', imagebin, "image/jpeg"), } #print files response = meli.upload("/pictures", files, {'access_token': meli.access_token}) # print response.content rjson = response.json() if ("error" in rjson): raise osv.except_osv( _('MELI WARNING'), _('No se pudo cargar la imagen en MELI! Error: %s , Mensaje: %s, Status: %s' ) % ( rjson["error"], rjson["message"], rjson["status"], )) return {'status': 'error', 'message': 'not uploaded'} _logger.info(rjson) if ("id" in rjson): #guardar id product.write({ "meli_imagen_id": rjson["id"], "meli_imagen_link": rjson["variations"][0]["url"] }) #asociar imagen a producto if product.meli_id: response = meli.post("/items/" + product.meli_id + "/pictures", {'id': rjson["id"]}, {'access_token': meli.access_token}) else: return { 'status': 'warning', 'message': 'uploaded but not assigned' } return {'status': 'success', 'message': 'uploaded and assigned'}
def data_save(self, cr, uid, ids, context=None): """ This function close account fiscalyear and create entries in new fiscalyear @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Account fiscalyear close state’s IDs """ def _reconcile_fy_closing(cr, uid, ids, context=None): """ This private function manually do the reconciliation on the account_move_line given as `ids´, and directly through psql. It's necessary to do it this way because the usual `reconcile()´ function on account.move.line object is really resource greedy (not supposed to work on reconciliation between thousands of records) and it does a lot of different computation that are useless in this particular case. """ # check that the reconcilation concern journal entries from only one company cr.execute( 'select distinct(company_id) from account_move_line where id in %s', (tuple(ids), )) if len(cr.fetchall()) > 1: raise osv.except_osv( _('Warning!'), _('The entries to reconcile should belong to the same company.' )) r_id = self.pool.get('account.move.reconcile').create( cr, uid, { 'type': 'auto', 'opening_reconciliation': True }) cr.execute( 'update account_move_line set reconcile_id = %s where id in %s', ( r_id, tuple(ids), )) # reconcile_ref deptends from reconcile_id but was not recomputed obj_acc_move_line._store_set_values(cr, uid, ids, ['reconcile_ref'], context=context) obj_acc_move_line.invalidate_cache(cr, uid, ['reconcile_id'], ids, context=context) return r_id obj_acc_period = self.pool.get('account.period') obj_acc_fiscalyear = self.pool.get('account.fiscalyear') obj_acc_journal = self.pool.get('account.journal') obj_acc_move = self.pool.get('account.move') obj_acc_move_line = self.pool.get('account.move.line') obj_acc_account = self.pool.get('account.account') obj_acc_journal_period = self.pool.get('account.journal.period') currency_obj = self.pool.get('res.currency') data = self.browse(cr, uid, ids, context=context) if context is None: context = {} fy_id = data[0].fy_id.id cr.execute( "SELECT id FROM account_period WHERE date_stop < (SELECT date_start FROM account_fiscalyear WHERE id = %s)", (str(data[0].fy2_id.id), )) fy_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall())) cr.execute( "SELECT id FROM account_period WHERE date_start > (SELECT date_stop FROM account_fiscalyear WHERE id = %s)", (str(fy_id), )) fy2_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall())) if not fy_period_set or not fy2_period_set: raise osv.except_osv( _('User Error!'), _('The periods to generate opening entries cannot be found.')) period = obj_acc_period.browse(cr, uid, data[0].period_id.id, context=context) new_fyear = obj_acc_fiscalyear.browse(cr, uid, data[0].fy2_id.id, context=context) old_fyear = obj_acc_fiscalyear.browse(cr, uid, fy_id, context=context) new_journal = data[0].journal_id.id new_journal = obj_acc_journal.browse(cr, uid, new_journal, context=context) company_id = new_journal.company_id.id if not new_journal.default_credit_account_id or not new_journal.default_debit_account_id: raise osv.except_osv( _('User Error!'), _('The journal must have default credit and debit account.')) if (not new_journal.centralisation) or new_journal.entry_posted: raise osv.except_osv( _('User Error!'), _('The journal must have centralized counterpart without the Skipping draft state option checked.' )) # delete existing move and move lines if any move_ids = obj_acc_move.search(cr, uid, [('journal_id', '=', new_journal.id), ('period_id', '=', period.id)]) if move_ids: move_line_ids = obj_acc_move_line.search( cr, uid, [('move_id', 'in', move_ids)]) obj_acc_move_line._remove_move_reconcile( cr, uid, move_line_ids, opening_reconciliation=True, context=context) obj_acc_move_line.unlink(cr, uid, move_line_ids, context=context) obj_acc_move.unlink(cr, uid, move_ids, context=context) cr.execute("SELECT id FROM account_fiscalyear WHERE date_stop < %s", (str(new_fyear.date_start), )) result = cr.dictfetchall() fy_ids = [x['id'] for x in result] query_line = obj_acc_move_line._query_get( cr, uid, obj='account_move_line', context={'fiscalyear': fy_ids}) # create the opening move vals = { 'name': '/', 'ref': '', 'period_id': period.id, 'date': period.date_start, 'journal_id': new_journal.id, } move_id = obj_acc_move.create(cr, uid, vals, context=context) # 1. report of the accounts with defferal method == 'unreconciled' cr.execute( ''' SELECT a.id FROM account_account a LEFT JOIN account_account_type t ON (a.user_type_id = t.id) WHERE a.active AND a.type not in ('view', 'consolidation') AND a.company_id = %s AND t.close_method = %s''', ( company_id, 'unreconciled', )) account_ids = map(lambda x: x[0], cr.fetchall()) if account_ids: cr.execute( ''' INSERT INTO account_move_line ( name, create_uid, create_date, write_uid, write_date, statement_id, journal_id, currency_id, date_maturity, partner_id, blocked, credit, state, debit, ref, account_id, period_id, date, move_id, amount_currency, quantity, product_id, company_id) (SELECT name, create_uid, create_date, write_uid, write_date, statement_id, %s,currency_id, date_maturity, partner_id, blocked, credit, 'draft', debit, ref, account_id, %s, (%s) AS date, %s, amount_currency, quantity, product_id, company_id FROM account_move_line WHERE account_id IN %s AND ''' + query_line + ''' AND reconcile_id IS NULL)''', ( new_journal.id, period.id, period.date_start, move_id, tuple(account_ids), )) # We have also to consider all move_lines that were reconciled # on another fiscal year, and report them too cr.execute( ''' INSERT INTO account_move_line ( name, create_uid, create_date, write_uid, write_date, statement_id, journal_id, currency_id, date_maturity, partner_id, blocked, credit, state, debit, ref, account_id, period_id, date, move_id, amount_currency, quantity, product_id, company_id) (SELECT b.name, b.create_uid, b.create_date, b.write_uid, b.write_date, b.statement_id, %s, b.currency_id, b.date_maturity, b.partner_id, b.blocked, b.credit, 'draft', b.debit, b.ref, b.account_id, %s, (%s) AS date, %s, b.amount_currency, b.quantity, b.product_id, b.company_id FROM account_move_line b WHERE b.account_id IN %s AND b.reconcile_id IS NOT NULL AND b.period_id IN (''' + fy_period_set + ''') AND b.reconcile_id IN (SELECT DISTINCT(reconcile_id) FROM account_move_line a WHERE a.period_id IN (''' + fy2_period_set + ''')))''', ( new_journal.id, period.id, period.date_start, move_id, tuple(account_ids), )) self.invalidate_cache(cr, uid, context=context) # 2. report of the accounts with defferal method == 'detail' cr.execute( ''' SELECT a.id FROM account_account a LEFT JOIN account_account_type t ON (a.user_type_id= t.id) WHERE a.active AND a.type not in ('view', 'consolidation') AND a.company_id = %s AND t.close_method = %s''', ( company_id, 'detail', )) account_ids = map(lambda x: x[0], cr.fetchall()) if account_ids: cr.execute( ''' INSERT INTO account_move_line ( name, create_uid, create_date, write_uid, write_date, statement_id, journal_id, currency_id, date_maturity, partner_id, blocked, credit, state, debit, ref, account_id, period_id, date, move_id, amount_currency, quantity, product_id, company_id) (SELECT name, create_uid, create_date, write_uid, write_date, statement_id, %s,currency_id, date_maturity, partner_id, blocked, credit, 'draft', debit, ref, account_id, %s, (%s) AS date, %s, amount_currency, quantity, product_id, company_id FROM account_move_line WHERE account_id IN %s AND ''' + query_line + ''') ''', ( new_journal.id, period.id, period.date_start, move_id, tuple(account_ids), )) self.invalidate_cache(cr, uid, context=context) # 3. report of the accounts with defferal method == 'balance' cr.execute( ''' SELECT a.id FROM account_account a LEFT JOIN account_account_type t ON (a.user_type_id = t.id) WHERE a.active AND a.type not in ('view', 'consolidation') AND a.company_id = %s AND t.close_method = %s''', ( company_id, 'balance', )) account_ids = map(lambda x: x[0], cr.fetchall()) query_1st_part = """ INSERT INTO account_move_line ( debit, credit, name, date, move_id, journal_id, period_id, account_id, currency_id, amount_currency, company_id, state) VALUES """ query_2nd_part = "" query_2nd_part_args = [] for account in obj_acc_account.browse(cr, uid, account_ids, context={'fiscalyear': fy_id}): company_currency_id = self.pool.get('res.users').browse( cr, uid, uid).company_id.currency_id if not currency_obj.is_zero(cr, uid, company_currency_id, abs(account.balance)): if query_2nd_part: query_2nd_part += ',' query_2nd_part += "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" query_2nd_part_args += ( account.balance > 0 and account.balance or 0.0, account.balance < 0 and -account.balance or 0.0, data[0].report_name, period.date_start, move_id, new_journal.id, period.id, account.id, account.currency_id and account.currency_id.id or None, account.foreign_balance if account.currency_id else 0.0, account.company_id.id, 'draft') if query_2nd_part: cr.execute(query_1st_part + query_2nd_part, tuple(query_2nd_part_args)) self.invalidate_cache(cr, uid, context=context) # validate and centralize the opening move obj_acc_move.validate(cr, uid, [move_id], context=context) # reconcile all the move.line of the opening move ids = obj_acc_move_line.search( cr, uid, [('journal_id', '=', new_journal.id), ('period_id.fiscalyear_id', '=', new_fyear.id)]) if ids: reconcile_id = _reconcile_fy_closing(cr, uid, ids, context=context) # set the creation date of the reconcilation at the first day of the new fiscalyear, in order to have good figures in the aged trial balance self.pool.get('account.move.reconcile').write( cr, uid, [reconcile_id], {'create_date': new_fyear.date_start}, context=context) # create the journal.period object and link it to the old fiscalyear new_period = data[0].period_id.id ids = obj_acc_journal_period.search( cr, uid, [('journal_id', '=', new_journal.id), ('period_id', '=', new_period)]) if not ids: ids = [ obj_acc_journal_period.create( cr, uid, { 'name': (new_journal.name or '') + ':' + (period.code or ''), 'journal_id': new_journal.id, 'period_id': period.id }) ] cr.execute( 'UPDATE account_fiscalyear ' 'SET end_journal_period_id = %s ' 'WHERE id = %s', (ids[0], old_fyear.id)) obj_acc_fiscalyear.invalidate_cache(cr, uid, ['end_journal_period_id'], [old_fyear.id], context=context) return {'type': 'ir.actions.act_window_close'}