def action_validate(self): self._check_security_action_validate() current_employee = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError( _('Leave request must be confirmed in order to approve it.' )) if holiday.state == 'validate1' and not holiday.env.user.has_group( 'hr_holidays.group_hr_holidays_manager'): raise UserError( _('Only an HR Manager can apply the second approval on leave requests.' )) holiday.write({'state': 'validate'}) if holiday.double_validation: holiday.write({'second_approver_id': current_employee.id}) else: holiday.write({'first_approver_id': current_employee.id}) if holiday.holiday_type == 'employee' and holiday.type == 'remove': holiday._validate_leave_request() elif holiday.holiday_type == 'category': leaves = self.env['hr.holidays'] for employee in holiday.category_id.employee_ids: values = holiday._prepare_create_by_category(employee) leaves += self.with_context( mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].double_validation: leaves.action_validate() return True
def _parse_date_from_data(self, data, index, name, field_type, options): dt = datetime.datetime fmt = fields.Date.to_string if field_type == 'date' else fields.Datetime.to_string d_fmt = options.get('date_format') dt_fmt = options.get('datetime_format') for num, line in enumerate(data): if not line[index]: continue v = line[index].strip() try: # first try parsing as a datetime if it's one if dt_fmt and field_type == 'datetime': try: line[index] = fmt(dt.strptime(v, dt_fmt)) continue except ValueError: pass # otherwise try parsing as a date whether it's a date # or datetime line[index] = fmt(dt.strptime(v, d_fmt)) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e))
def create_debit(self): self.ensure_one() new_moves = self.env['account.move'] for move in self.move_ids.with_context( include_business_fields=True): #copy sale/purchase links default_values = self._prepare_default_values(move) new_move = move.copy(default=default_values) move_msg = _( "This debit note was created from:" ) + " <a href=# data-oe-model=account.move data-oe-id=%d>%s</a>" % ( move.id, move.name) new_move.message_post(body=move_msg) new_moves |= new_move action = { 'name': _('Debit Notes'), 'type': 'ir.actions.act_window', 'res_model': 'account.move', } if len(new_moves) == 1: action.update({ 'view_mode': 'form', 'res_id': new_moves.id, }) else: action.update({ 'view_mode': 'tree,form', 'domain': [('id', 'in', new_moves.ids)], }) return action
def assemble_tasks(self, docids, data, report, ctx): code = report.out_format.code result = self.single_report(docids, data, report, ctx) print_report_name = 'report' if report.print_report_name and not len(docids) > 1: obj = self.env[report.model].browse(docids) print_report_name = safe_eval( report.print_report_name, {'object': obj, 'time': time}) if report.in_format == code: filename = '%s.%s' % ( print_report_name, mime_dict[report.in_format]) return result[0], result[1], filename else: try: self.get_docs_conn() result = self._generate_doc(result[0], report) filename = '%s.%s' % ( print_report_name, mime_dict[report.out_format.code]) return result, mime_dict[code], filename except Exception as e: _logger.exception(_("Aeroo DOCS error!\n%s") % str(e)) if report.disable_fallback: result = None _logger.exception(e[0]) raise ConnectionError(_('Could not connect Aeroo DOCS!')) # only if fallback filename = '%s.%s' % (print_report_name, mime_dict[report.in_format]) return result[0], result[1], filename
def _get_mts_mto_rule(self): self.ensure_one() route_model = self.env['stock.location.route'] pull_model = self.env['procurement.rule'] try: mts_mto_route = self.env.ref('stock_mts_mto_rule.route_mto_mts') except: mts_mto_route = route_model.search([ ('name', 'like', 'Make To Order + Make To Stock') ]) if not mts_mto_route: raise exceptions.Warning( _('Can\'t find any generic MTS+MTO route.')) if not self.mto_pull_id: raise exceptions.Warning( _('Can\'t find MTO Rule on the warehouse')) mts_rules = pull_model.search([ ('location_src_id', '=', self.lot_stock_id.id), ('route_id', '=', self.delivery_route_id.id) ]) if not mts_rules: raise exceptions.Warning( _('Can\'t find MTS Rule on the warehouse')) return { 'name': self._format_routename(route_type='mts_mto'), 'route_id': mts_mto_route.id, 'action': 'split_procurement', 'mto_rule_id': self.mto_pull_id.id, 'mts_rule_id': mts_rules[0].id, 'warehouse_id': self.id, 'location_id': self.mto_pull_id.location_id.id, 'picking_type_id': self.mto_pull_id.picking_type_id.id, }
def _check_training_dup(self): if self.training_start_date < datetime.datetime.now().\ strftime(DEFAULT_SERVER_DATE_FORMAT): raise ValidationError(_("You can't create past training!")) if self.training_start_date > self.training_end_date: raise ValidationError( _("End Date should be greated than Start date of Training!"))
def write(self, vals): has_been_posted = False for order in self: if order.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting or invoicing an pos.order if vals.get('state') in ['paid', 'done', 'invoiced']: has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (order.state in ['paid', 'done', 'invoiced'] and set(vals).intersection(ORDER_FIELDS)): raise UserError( _('According to the French law, you cannot modify a point of sale order. Forbidden fields: %s.' ) % ', '.join(ORDER_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (order.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( order.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the point of sale.' )) res = super(pos_order, self).write(vals) # write the hash and the secure_sequence_number when posting or invoicing a pos order if has_been_posted: for order in self.filtered( lambda o: o.company_id._is_accounting_unalterable() and not (o.l10n_fr_secure_sequence_number or o.l10n_fr_hash)): new_number = order.company_id.l10n_fr_pos_cert_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': order._get_new_hash(new_number) } res |= super(pos_order, order).write(vals_hashing) return res
def onchange_birthday(self): if self.birthday and datetime.strptime( self.birthday, DEFAULT_SERVER_DATE_FORMAT) >= datetime.today(): warning = {'title': _('User Alert !'), 'message': _( 'Date of Birth must be less than today!')} self.birthday = False return {'warning': warning}
def write(self, vals): has_been_posted = False for move in self: if move.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting an account.move if vals.get('state') == 'posted': has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (move.state == "posted" and set(vals).intersection(MOVE_FIELDS)): raise UserError( _("According to the French law, you cannot modify a journal entry in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % ', '.join(MOVE_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (move.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( move.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the accounting.' )) res = super(AccountMove, self).write(vals) # write the hash and the secure_sequence_number when posting an account.move if has_been_posted: for move in self.filtered( lambda m: m.company_id._is_accounting_unalterable() and not (m.l10n_fr_secure_sequence_number or m.l10n_fr_hash)): new_number = move.company_id.l10n_fr_secure_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': move._get_new_hash(new_number) } res |= super(AccountMove, move).write(vals_hashing) return res
def em_ask_analytic_tag(self, options, command, record, is_from=None, is_to=None): data = { 'action': ASK_ANALYTIC_TAG, 'record_id': record.id, } if is_from or is_to: data['transfer'] = 'from' if is_from else 'to' TAG2STRING = { TAG_LIQUIDITY: _("Account"), TAG_PAYABLE: _("Expense"), TAG_RECEIVABLE: _("Income"), } if is_from: del TAG2STRING[TAG_PAYABLE] if is_to: del TAG2STRING[TAG_RECEIVABLE] buttons = [{ 'text': name, 'callback_data': dict(data.items() + [('tag_ref', tag_ref)]) } for tag_ref, name in TAG2STRING.items()] command.keyboard_buttons(options, buttons, row_width=1) options['handle_reply']['custom_reply'] = data return buttons
def create_journals(self, journal_ids): for journal in self.env['account.journal'].browse(journal_ids): responsability = journal.company_id.responsability_id if not responsability.id: raise orm.except_orm( _('Your company has not setted any responsability'), _('Please, set your company responsability in the company partner before continue.' )) journal_type = journal.type if journal_type in ['sale', 'sale_refund']: letter_ids = [x.id for x in responsability.issued_letter_ids] elif journal_type in ['purchase', 'purchase_refund']: letter_ids = [x.id for x in responsability.received_letter_ids] if journal_type == 'sale': for doc_type in ['invoice', 'credit_note', 'debit_note']: self.create_journal_document(letter_ids, doc_type, journal.id) elif journal_type == 'purchase': for doc_type in [ 'invoice', 'debit_note', 'credit_note', 'invoice_in' ]: self.create_journal_document(letter_ids, doc_type, journal.id)
def website_set_ga_data(self, website_id, ga_client_id, ga_analytics_key): if not request.env.user.has_group('base.group_system'): return { 'error': { 'title': _('Access Error'), 'message': _('You do not have sufficient rights to perform that action.' ), } } if not ga_analytics_key or not ga_client_id.endswith( '.apps.googleusercontent.com'): return { 'error': { 'title': _('Incorrect Client ID / Key'), 'message': _('The Google Analytics Client ID or Key you entered seems incorrect.' ), } } Website = request.env['website'] current_website = website_id and Website.browse( website_id) or Website.get_current_website() request.env['res.config.settings'].create({ 'google_management_client_id': ga_client_id, 'google_analytics_key': ga_analytics_key, 'website_id': current_website.id, }).execute() return True
def _interval_dates(self, frequency, company): """ Method used to compute the theoretical date from which account move lines should be fetched @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} the theoretical date from which account move lines are fetched. date_stop date to which the move lines are fetched, always now() the dates are in their Flectra Database string representation """ date_stop = datetime.utcnow() interval_from = None name_interval = '' if frequency == 'daily': interval_from = date_stop - timedelta(days=1) name_interval = _('Daily Closing') elif frequency == 'monthly': month_target = date_stop.month > 1 and date_stop.month - 1 or 12 year_target = month_target < 12 and date_stop.year or date_stop.year - 1 interval_from = date_stop.replace(year=year_target, month=month_target) name_interval = _('Monthly Closing') elif frequency == 'annually': year_target = date_stop.year - 1 interval_from = date_stop.replace(year=year_target) name_interval = _('Annual Closing') return { 'interval_from': FieldDateTime.to_string(interval_from), 'date_stop': FieldDateTime.to_string(date_stop), 'name_interval': name_interval }
def _validate_range(self): for this in self: start = fields.Date.from_string(this.date_start) end = fields.Date.from_string(this.date_end) if start > end: raise ValidationError( _("%s is not a valid range (%s > %s)") % (this.name, this.date_start, this.date_end)) if this.type_id.allow_overlap: continue # here we use a plain SQL query to benefit of the daterange # function available in PostgresSQL # (http://www.postgresql.org/docs/current/static/rangetypes.html) SQL = """ SELECT id FROM date_range dt WHERE DATERANGE(dt.date_start, dt.date_end, '[]') && DATERANGE(%s::date, %s::date, '[]') AND dt.id != %s AND dt.active AND dt.company_id = %s AND dt.type_id=%s;""" self.env.cr.execute(SQL, (this.date_start, this.date_end, this.id, this.company_id.id or None, this.type_id.id)) res = self.env.cr.fetchall() if res: dt = self.browse(res[0][0]) raise ValidationError( _("%s overlaps %s") % (this.name, dt.name))
def get_empty_list_help(self, help): if 'active_id' in self.env.context and self.env.context.get( 'active_model') == 'hr.job': alias_id = self.env['hr.job'].browse( self.env.context['active_id']).alias_id else: alias_id = False nocontent_values = { 'help_title': _('No application yet'), 'para_1': _('Let people apply by email to save time.'), 'para_2': _('Attachments, like resumes, get indexed automatically.'), } nocontent_body = """ <p class="o_view_nocontent_empty_folder">%(help_title)s</p> <p>%(para_1)s<br/>%(para_2)s</p>""" if alias_id and alias_id.alias_domain and alias_id.alias_name: email = alias_id.display_name email_link = "<a href='mailto:%s'>%s</a>" % (email, email) nocontent_values['email_link'] = email_link nocontent_body += """<p class="o_copy_paste_email">%(email_link)s</p>""" return nocontent_body % nocontent_values
def _notification_recipients(self, message, groups): """ Handle HR users and officers recipients that can validate or refuse holidays directly from email. """ groups = super(Holidays, self)._notification_recipients(message, groups) self.ensure_one() hr_actions = [] if self.state == 'confirm': app_action = self._notification_link_helper( 'controller', controller='/hr_holidays/validate') hr_actions += [{'url': app_action, 'title': _('Approve')}] if self.state in ['confirm', 'validate', 'validate1']: ref_action = self._notification_link_helper( 'controller', controller='/hr_holidays/refuse') hr_actions += [{'url': ref_action, 'title': _('Refuse')}] new_group = ('group_hr_holidays_user', lambda partner: bool(partner.user_ids) and any( user.has_group('hr_holidays.group_hr_holidays_user') for user in partner.user_ids), { 'actions': hr_actions, }) return [new_group] + groups
def _notify_get_groups(self, msg_vals=None): """ Handle HR users and officers recipients that can validate or refuse holidays directly from email. """ groups = super(HolidaysAllocation, self)._notify_get_groups(msg_vals=msg_vals) msg_vals = msg_vals or {} self.ensure_one() hr_actions = [] if self.state == 'confirm': app_action = self._notify_get_action_link( 'controller', controller='/allocation/validate', **msg_vals) hr_actions += [{'url': app_action, 'title': _('Approve')}] if self.state in ['confirm', 'validate', 'validate1']: ref_action = self._notify_get_action_link( 'controller', controller='/allocation/refuse', **msg_vals) hr_actions += [{'url': ref_action, 'title': _('Refuse')}] holiday_user_group_id = self.env.ref( 'hr_holidays.group_hr_holidays_user').id new_group = ('group_hr_holidays_user', lambda pdata: pdata['type'] == 'user' and holiday_user_group_id in pdata['groups'], { 'actions': hr_actions, }) return [new_group] + groups
def format_failure_reason(self): self.ensure_one() if self.failure_type != 'UNKNOWN': return dict(type(self).failure_type.selection).get( self.failure_type, _('No Error')) else: return _("Unknown error") + ": %s" % (self.failure_reason or '')
def _warn_template_error(self, scheduler, exception): # We warn ~ once by hour ~ instead of every 10 min if the interval unit is more than 'hours'. if random.random() < 0.1666 or scheduler.interval_unit in ('now', 'hours'): ex_s = exception_to_unicode(exception) try: event, template = scheduler.event_id, scheduler.template_id emails = list(set([event.organizer_id.email, event.user_id.email, template.write_uid.email])) subject = _("WARNING: Event Scheduler Error for event: %s" % event.name) body = _("""Event Scheduler for: - Event: %s (%s) - Scheduled: %s - Template: %s (%s) Failed with error: - %s You receive this email because you are: - the organizer of the event, - or the responsible of the event, - or the last writer of the template.""" % (event.name, event.id, scheduler.scheduled_date, template.name, template.id, ex_s)) email = self.env['ir.mail_server'].build_email( email_from=self.env.user.email, email_to=emails, subject=subject, body=body, ) self.env['ir.mail_server'].send_email(email) except Exception as e: _logger.error("Exception while sending traceback by email: %s.\n Original Traceback:\n%s", e, exception) pass
def install_report(self, cr, uid, ids, context=None): report_obj = self.pool.get('ir.actions.report') this = self.browse(cr, uid, ids[0], context=context) if report_obj.search(cr, uid, [('report_name','=',this.name)], context=context): raise osv.except_osv(_('Warning!'), _('Report with service name "%s" already exist in system!') % this.name) fd = StringIO() fd.write(base64.decodestring(this.file)) fd.seek(0) convert_xml_import(cr, 'report_aeroo', fd, {}, 'init', noupdate=True) fd.close() self.write(cr, uid, ids, {'state':'done'}, context=context) report_id = report_obj.search(cr, uid, [('report_name','=',this.name)], context=context)[-1] report = report_obj.browse(cr, uid, report_id, context=context) event_id = self.pool.get('ir.values').set_action(cr, uid, report.report_name, 'client_print_multi', report.model, 'ir.actions.report,%d' % report_id) if report.report_wizard: report._set_report_wizard(report.id) mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') mod_id = mod_obj.search(cr, uid, [('name', '=', 'action_aeroo_report_xml_tree')])[0] res_id = mod_obj.read(cr, uid, mod_id, ['res_id'])['res_id'] act_win = act_obj.read(cr, uid, res_id, []) act_win['domain'] = [('id','=',report_id)] return act_win
def _message_notification_recipients(self, message, recipients): result = super(Holidays, self)._message_notification_recipients(message, recipients) leave_type = self.env[message.model].browse(message.res_id).type title = _("See Leave") if leave_type == 'remove' else _("See Allocation") for res in result: if result[res].get('button_access'): result[res]['button_access']['title'] = title return result
def _check_hash_integrity(self, company_id): """Checks that all posted or invoiced pos orders have still the same data as when they were posted and raises an error with the result. """ def build_order_info(order): entry_reference = _('(Receipt ref.: %s)') order_reference_string = order.pos_reference and entry_reference % order.pos_reference or '' return [ ctx_tz(order, 'date_order'), order.l10n_fr_secure_sequence_number, order.name, order_reference_string, ctx_tz(order, 'write_date') ] orders = self.search([('state', 'in', ['paid', 'done', 'invoiced']), ('company_id', '=', company_id), ('l10n_fr_secure_sequence_number', '!=', 0)], order="l10n_fr_secure_sequence_number ASC") if not orders: raise UserError( _('There isn\'t any order flagged for data inalterability yet for the company %s. This mechanism only runs for point of sale orders generated after the installation of the module France - Certification CGI 286 I-3 bis. - POS' ) % self.env.user.company_id.name) previous_hash = u'' start_order_info = [] for order in orders: if order.l10n_fr_hash != order._compute_hash( previous_hash=previous_hash): raise UserError( _('Corrupted data on point of sale order with id %s.') % order.id) previous_hash = order.l10n_fr_hash orders_sorted_date = orders.sorted(lambda o: o.date_order) start_order_info = build_order_info(orders_sorted_date[0]) end_order_info = build_order_info(orders_sorted_date[-1]) report_dict = { 'start_order_name': start_order_info[2], 'start_order_ref': start_order_info[3], 'start_order_date': start_order_info[0], 'end_order_name': end_order_info[2], 'end_order_ref': end_order_info[3], 'end_order_date': end_order_info[0] } # Raise on success raise UserError( _('''Successful test ! The point of sale orders are guaranteed to be in their original and inalterable state From: %(start_order_name)s %(start_order_ref)s recorded on %(start_order_date)s To: %(end_order_name)s %(end_order_ref)s recorded on %(end_order_date)s For this report to be legally meaningful, please download your certification from your customer account on flectrahq.com (Only for Flectra Enterprise users).''') % report_dict)
def set_activity_approved(self): self.ensure_one() has_group = self.env.user.has_group( 'hr_timesheet.group_hr_timesheet_user') if not has_group: raise UserError(_('Activities can only be Approved by Managers.')) if self.filtered(lambda sheet: sheet.state != 'unapproved'): raise UserError(_("Kindly submit your activity first!.")) self.write({'state': 'approved'})
class BarcodeRule(models.Model): _inherit = 'barcode.rule' type = fields.Selection( selection_add=[('weight', _('Weighted Product')), ('price', _('Priced Product')), ('discount', _('Discounted Product')), ( 'client', _('Client')), ('cashier', _('Cashier'))])
def _compute_duration_display(self): for allocation in self: allocation.duration_display = '%g %s' % ( (float_round(allocation.number_of_hours_display, precision_digits=2) if allocation.type_request_unit == 'hour' else float_round( allocation.number_of_days_display, precision_digits=2)), _('hours') if allocation.type_request_unit == 'hour' else _('days'))
class BarcodeRule(models.Model): _inherit = 'barcode.rule' type = fields.Selection(selection_add=[ ('mrp_order', _('Production Order')), ('mrp_operation', _('Production Operation')), ('mrp_worker', _('Worker')), ('mrp_group', _('Work Order Group')), ])
def get_suggested_controllers(self): """ Returns a tuple (name, url, icon). Where icon can be a module name, or a path """ suggested_controllers = [ (_('Homepage'), url_for('/'), 'website'), (_('Contact Us'), url_for('/contactus'), 'website_crm'), ] return suggested_controllers
def record_objects(self): data = self.read([])[0] check_date = data['check_date'] filter_cond = data['filter_cond'] mod_obj = self.env['ir.model'] recording_data = [] for obj_id in data['objects']: obj_name = (mod_obj.browse(obj_id)).model obj_pool = self.env[obj_name] if filter_cond == 'created': search_condition = [('create_date', '>', check_date)] elif filter_cond == 'modified': search_condition = [('write_date', '>', check_date)] elif filter_cond == 'created_modified': search_condition = [ '|', ('create_date', '>', check_date), ('write_date', '>', check_date) ] if '_log_access' in dir(obj_pool): if not (obj_pool._log_access): search_condition = [] if '_auto' in dir(obj_pool): if not obj_pool._auto: continue search_ids = obj_pool.search(search_condition) for s_id in search_ids: dbname = self.env.cr.dbname args = (dbname, self.env.user.id, obj_name, 'copy', s_id.id, {}) recording_data.append(('query', args, {}, s_id.id)) if len(recording_data): res_id = self.env.ref('base_module_record.info_start_form_view').id self = self.with_context({'recording_data': recording_data}) return { 'name': _('Module Recording'), 'context': self._context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(res_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', } res_id = self.env.ref( 'base_module_record.module_recording_message_view').id return { 'name': _('Module Recording'), 'context': self._context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(res_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def copy_doc(self, res_id, template_id, name_gdocs, res_model): google_web_base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url') access_token = self.get_access_token() # Copy template in to drive with help of new access token request_url = "https://www.googleapis.com/drive/v2/files/%s?fields=parents/id&access_token=%s" % (template_id, access_token) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = requests.post(request_url, headers=headers, timeout=TIMEOUT) req.raise_for_status() parents_dict = req.json() except requests.HTTPError: raise UserError(_("The Google Template cannot be found. Maybe it has been deleted.")) record_url = "Click on link to open Record in Flectra\n %s/?db=%s#id=%s&model=%s" % (google_web_base_url, self._cr.dbname, res_id, res_model) data = { "title": name_gdocs, "description": record_url, "parents": parents_dict['parents'] } request_url = "https://www.googleapis.com/drive/v2/files/%s/copy?access_token=%s" % (template_id, access_token) headers = { 'Content-type': 'application/json', 'Accept': 'text/plain' } # resp, content = Http().request(request_url, "POST", data_json, headers) req = requests.post(request_url, data=json.dumps(data), headers=headers, timeout=TIMEOUT) req.raise_for_status() content = req.json() res = {} if content.get('alternateLink'): res['id'] = self.env["ir.attachment"].create({ 'res_model': res_model, 'name': name_gdocs, 'res_id': res_id, 'type': 'url', 'url': content['alternateLink'] }).id # Commit in order to attach the document to the current object instance, even if the permissions has not been written. self._cr.commit() res['url'] = content['alternateLink'] key = self._get_key_from_url(res['url']) request_url = "https://www.googleapis.com/drive/v2/files/%s/permissions?emailMessage=This+is+a+drive+file+created+by+Flectra&sendNotificationEmails=false&access_token=%s" % (key, access_token) data = {'role': 'writer', 'type': 'anyone', 'value': '', 'withLink': True} try: req = requests.post(request_url, data=json.dumps(data), headers=headers, timeout=TIMEOUT) req.raise_for_status() except requests.HTTPError: raise self.env['res.config.settings'].get_config_warning(_("The permission 'reader' for 'anyone with the link' has not been written on the document")) if self.env.user.email: data = {'role': 'writer', 'type': 'user', 'value': self.env.user.email} try: requests.post(request_url, data=json.dumps(data), headers=headers, timeout=TIMEOUT) except requests.HTTPError: pass return res
def name_get(self): res = [] for leave in self: if leave.type == 'remove': if self.env.context.get('short_name'): res.append((leave.id, _("%s : %.2f day(s)") % (leave.name or leave.holiday_status_id.name, leave.number_of_days_temp))) else: res.append((leave.id, _("%s on %s : %.2f day(s)") % (leave.employee_id.name or leave.category_id.name, leave.holiday_status_id.name, leave.number_of_days_temp))) else: res.append((leave.id, _("Allocation of %s : %.2f day(s) To %s") % (leave.holiday_status_id.name, leave.number_of_days_temp, leave.employee_id.name))) return res