def on_trash(self): global_defaults = dataent.get_doc("Global Defaults") if global_defaults.current_fiscal_year == self.name: dataent.throw( _("You cannot delete Fiscal Year {0}. Fiscal Year {0} is set as default in Global Settings" ).format(self.name)) dataent.cache().delete_value("fiscal_years")
def resolve_redirect(path): ''' Resolve redirects from hooks Example: website_redirect = [ # absolute location {"source": "/from", "target": "https://mysite/from"}, # relative location {"source": "/from", "target": "/main"}, # use regex {"source": r"/from/(.*)", "target": r"/main/\1"} # use r as a string prefix if you use regex groups or want to escape any string literal ] ''' redirects = dataent.get_hooks('website_redirects') if not redirects: return redirect_to = dataent.cache().hget('website_redirects', path) if redirect_to: dataent.flags.redirect_location = redirect_to raise dataent.Redirect for rule in redirects: pattern = rule['source'].strip('/ ') + '$' if re.match(pattern, path): redirect_to = re.sub(pattern, rule['target'], path) dataent.flags.redirect_location = redirect_to dataent.cache().hset('website_redirects', path, redirect_to) raise dataent.Redirect
def update_user_settings_data(user_setting, fieldname, old, new, condition_fieldname=None, condition_values=None): data = user_setting.get("data") if data: update = False data = json.loads(data) for view in [ 'List', 'Gantt', 'Kanban', 'Calendar', 'Image', 'Inbox', 'Report' ]: view_settings = data.get(view) if view_settings and view_settings.get("filters"): view_filters = view_settings.get("filters") for view_filter in view_filters: if condition_fieldname and view_filter[filter_dict[ condition_fieldname]] != condition_values: continue if view_filter[filter_dict[fieldname]] == old: view_filter[filter_dict[fieldname]] = new update = True if update: dataent.db.sql( "update __UserSettings set data=%s where doctype=%s and user=%s", (json.dumps(data), user_setting.doctype, user_setting.user)) # clear that user settings from the redis cache dataent.cache().hset( '_user_settings', '{0}::{1}'.format(user_setting.doctype, user_setting.user), None)
def build_page(path): if not getattr(dataent.local, "path", None): dataent.local.path = path context = get_context(path) if context.source: html = dataent.render_template(context.source, context) elif context.template: if path.endswith('min.js'): html = dataent.get_jloader().get_source(dataent.get_jenv(), context.template)[0] else: html = dataent.get_template(context.template).render(context) if '{index}' in html: html = html.replace('{index}', get_toc(context.route)) if '{next}' in html: html = html.replace('{next}', get_next_link(context.route)) # html = dataent.get_template(context.base_template_path).render(context) if can_cache(context.no_cache): page_cache = dataent.cache().hget("website_page", path) or {} page_cache[dataent.local.lang] = html dataent.cache().hset("website_page", path, page_cache) return html
def update_password(new_password, logout_all_sessions=0, key=None, old_password=None): result = test_password_strength(new_password, key, old_password) feedback = result.get("feedback", None) if feedback and not feedback.get('password_policy_validation_passed', False): handle_password_test_fail(result) res = _get_user_for_update_password(key, old_password) if res.get('message'): return res['message'] else: user = res['user'] _update_password(user, new_password, logout_all_sessions=int(logout_all_sessions)) user_doc, redirect_url = reset_user_data(user) # get redirect url from cache redirect_to = dataent.cache().hget('redirect_after_login', user) if redirect_to: redirect_url = redirect_to dataent.cache().hdel('redirect_after_login', user) dataent.local.login_manager.login_as(user) if user_doc.user_type == "System User": return "/desk" else: return redirect_url if redirect_url else "/"
def google_callback(code=None, state=None, account=None): redirect_uri = get_request_site_address(True) + "?cmd=dataent.integrations.doctype.gcalendar_settings.gcalendar_settings.google_callback" if account is not None: dataent.cache().hset("gcalendar_account","GCalendar Account", account) doc = dataent.get_doc("GCalendar Settings") if code is None: return { 'url': 'https://accounts.google.com/o/oauth2/v2/auth?access_type=offline&response_type=code&prompt=consent&client_id={}&include_granted_scopes=true&scope={}&redirect_uri={}'.format(doc.client_id, SCOPES, redirect_uri) } else: try: account = dataent.get_doc("GCalendar Account", dataent.cache().hget("gcalendar_account", "GCalendar Account")) data = {'code': code, 'client_id': doc.client_id, 'client_secret': doc.get_password(fieldname='client_secret',raise_exception=False), 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'} r = requests.post('https://www.googleapis.com/oauth2/v4/token', data=data).json() dataent.db.set_value("GCalendar Account", account.name, "authorization_code", code) if 'access_token' in r: dataent.db.set_value("GCalendar Account", account.name, "session_token", r['access_token']) if 'refresh_token' in r: dataent.db.set_value("GCalendar Account", account.name, "refresh_token", r['refresh_token']) dataent.db.commit() dataent.local.response["type"] = "redirect" dataent.local.response["location"] = "/integrations/gcalendar-success.html" return except Exception as e: dataent.throw(e.message)
def confirm_otp_token(login_manager, otp=None, tmp_id=None): '''Confirm otp matches.''' if not otp: otp = dataent.form_dict.get('otp') if not otp: if two_factor_is_enabled_for_(login_manager.user): return False return True if not tmp_id: tmp_id = dataent.form_dict.get('tmp_id') hotp_token = dataent.cache().get(tmp_id + '_token') otp_secret = dataent.cache().get(tmp_id + '_otp_secret') if not otp_secret: raise ExpiredLoginException( _('Login session expired, refresh page to retry')) hotp = pyotp.HOTP(otp_secret) if hotp_token: if hotp.verify(otp, int(hotp_token)): dataent.cache().delete(tmp_id + '_token') return True else: login_manager.fail(_('Incorrect Verification code'), login_manager.user) totp = pyotp.TOTP(otp_secret) if totp.verify(otp): # show qr code only once if not dataent.db.get_default(login_manager.user + '_otplogin'): dataent.db.set_default(login_manager.user + '_otplogin', 1) delete_qrimage(login_manager.user) return True else: login_manager.fail(_('Incorrect Verification code'), login_manager.user)
def run_webhooks(doc, method): '''Run webhooks for this method''' if dataent.flags.in_import or dataent.flags.in_patch or dataent.flags.in_install: return if dataent.flags.webhooks_executed is None: dataent.flags.webhooks_executed = {} if dataent.flags.webhooks == None: # load webhooks from cache webhooks = dataent.cache().get_value('webhooks') if webhooks == None: # query webhooks webhooks_list = dataent.get_all( 'Webhook', fields=["name", "webhook_docevent", "webhook_doctype"]) # make webhooks map for cache webhooks = {} for w in webhooks_list: webhooks.setdefault(w.webhook_doctype, []).append(w) dataent.cache().set_value('webhooks', webhooks) dataent.flags.webhooks = webhooks # get webhooks for this doctype webhooks_for_doc = dataent.flags.webhooks.get(doc.doctype, None) if not webhooks_for_doc: # no webhooks, quit return def _webhook_request(webhook): if not webhook.name in dataent.flags.webhooks_executed.get( doc.name, []): dataent.enqueue( "dataent.integrations.doctype.webhook.webhook.enqueue_webhook", enqueue_after_commit=True, doc=doc, webhook=webhook) # keep list of webhooks executed for this doc in this request # so that we don't run the same webhook for the same document multiple times # in one request dataent.flags.webhooks_executed.setdefault(doc.name, []).append(webhook.name) event_list = [ "on_update", "after_insert", "on_submit", "on_cancel", "on_trash" ] if not doc.flags.in_insert: # value change is not applicable in insert event_list.append('on_change') event_list.append('before_update_after_submit') for webhook in webhooks_for_doc: event = method if method in event_list else None if event and webhook.webhook_docevent == event: _webhook_request(webhook)
def set_user_info(self, resume=False): # set sid again dataent.local.cookie_manager.init_cookies() self.full_name = " ".join( filter(None, [self.info.first_name, self.info.last_name])) if self.info.user_type == "Website User": dataent.local.cookie_manager.set_cookie("system_user", "no") if not resume: dataent.local.response["message"] = "No App" dataent.local.response[ "home_page"] = get_website_user_home_page(self.user) else: dataent.local.cookie_manager.set_cookie("system_user", "yes") if not resume: dataent.local.response['message'] = 'Logged In' dataent.local.response["home_page"] = "/desk" if not resume: dataent.response["full_name"] = self.full_name # redirect information redirect_to = dataent.cache().hget('redirect_after_login', self.user) if redirect_to: dataent.local.response["redirect_to"] = redirect_to dataent.cache().hdel('redirect_after_login', self.user) dataent.local.cookie_manager.set_cookie("full_name", self.full_name) dataent.local.cookie_manager.set_cookie("user_id", self.user) dataent.local.cookie_manager.set_cookie("user_image", self.info.user_image or "")
def load_lang(lang, apps=None): """Combine all translations from `.csv` files in all `apps`. For derivative languages (es-GT), take translations from the base language (es) and then update translations from the child (es-GT)""" if lang == 'en': return {} out = dataent.cache().hget("lang_full_dict", lang, shared=True) if not out: out = {} for app in (apps or dataent.get_all_apps(True)): path = os.path.join(dataent.get_pymodule_path(app), "translations", lang + ".csv") out.update(get_translation_dict_from_file(path, lang, app) or {}) if '-' in lang: parent = lang.split('-')[0] parent_out = load_lang(parent) parent_out.update(out) out = parent_out dataent.cache().hset("lang_full_dict", lang, out, shared=True) return out or {}
def get_defaults_for(parent="__default"): """get all defaults""" defaults = dataent.cache().hget("defaults", parent) if defaults == None: # sort descending because first default must get precedence res = dataent.db.sql("""select defkey, defvalue from `tabDefaultValue` where parent = %s order by creation""", (parent, ), as_dict=1) defaults = dataent._dict({}) for d in res: if d.defkey in defaults: # listify if not isinstance(defaults[d.defkey], list) and defaults[d.defkey] != d.defvalue: defaults[d.defkey] = [defaults[d.defkey]] if d.defvalue not in defaults[d.defkey]: defaults[d.defkey].append(d.defvalue) elif d.defvalue is not None: defaults[d.defkey] = d.defvalue dataent.cache().hset("defaults", parent, defaults) return defaults
def get_all_page_context_from_doctypes(): '''Get all doctype generated routes (for sitemap.xml)''' routes = dataent.cache().get_value("website_generator_routes") if not routes: routes = get_page_info_from_doctypes() dataent.cache().set_value("website_generator_routes", routes) return routes
def get_workflow_name(doctype): workflow_name = dataent.cache().hget('workflow', doctype) if workflow_name is None: workflow_name = dataent.db.get_value("Workflow", {"document_type": doctype, "is_active": 1}, "name") dataent.cache().hset('workflow', doctype, workflow_name or '') return workflow_name
def update_contact_cache(contacts): cached_contacts = dataent.cache().hget("contacts", dataent.session.user) or [] uncached_contacts = [d for d in contacts if d not in cached_contacts] cached_contacts.extend(uncached_contacts) dataent.cache().hset("contacts", dataent.session.user, cached_contacts)
def get_cached_user_pass(): '''Get user and password if set.''' user = pwd = None tmp_id = dataent.form_dict.get('tmp_id') if tmp_id: user = dataent.safe_decode(dataent.cache().get(tmp_id + '_usr')) pwd = dataent.safe_decode(dataent.cache().get(tmp_id + '_pwd')) return (user, pwd)
def update_invalid_login(self, user): last_login_tried = get_last_tried_login_data(user) failed_count = 0 if last_login_tried > get_datetime(): failed_count = get_login_failed_count(user) dataent.cache().hset('login_failed_count', user, failed_count + 1)
def insert_session_record(self): dataent.db.sql("""insert into tabSessions (sessiondata, user, lastupdate, sid, status, device) values (%s , %s, NOW(), %s, 'Active', %s)""", (str(self.data['data']), self.data['user'], self.data['sid'], self.device)) # also add to memcache dataent.cache().hset("session", self.data.sid, self.data)
def get_scheduler_events(event): '''Get scheduler events from hooks and integrations''' scheduler_events = dataent.cache().get_value('scheduler_events') if not scheduler_events: scheduler_events = dataent.get_hooks("scheduler_events") dataent.cache().set_value('scheduler_events', scheduler_events) return scheduler_events.get(event) or []
def clear_global_cache(): from dataent.website.render import clear_cache as clear_website_cache clear_doctype_cache() clear_website_cache() dataent.cache().delete_value(["app_hooks", "installed_apps", "app_modules", "module_app", "notification_config", 'system_settings', 'scheduler_events', 'time_zone', 'webhooks', 'active_domains', 'active_modules']) dataent.setup_module_map()
def get_user_permissions(user=None): '''Get all users permissions for the user as a dict of doctype''' # if this is called from client-side, # user can access only his/her user permissions if dataent.request and dataent.local.form_dict.cmd == 'get_user_permissions': user = dataent.session.user if not user: user = dataent.session.user if user == "Administrator": return {} cached_user_permissions = dataent.cache().hget("user_permissions", user) if cached_user_permissions is not None: return cached_user_permissions out = {} def add_doc_to_perm(perm, doc_name): # group rules for each type # for example if allow is "Customer", then build all allowed customers # in a list if not out.get(perm.allow): out[perm.allow] = [] out[perm.allow].append( dataent._dict({ 'doc': doc_name, 'applicable_for': perm.get('applicable_for') })) try: for perm in dataent.get_all( 'User Permission', fields=['allow', 'for_value', 'applicable_for'], filters=dict(user=user)): meta = dataent.get_meta(perm.allow) add_doc_to_perm(perm, perm.for_value) if meta.is_nested_set(): decendants = dataent.db.get_descendants( perm.allow, perm.for_value) for doc in decendants: add_doc_to_perm(perm, doc) out = dataent._dict(out) dataent.cache().hset("user_permissions", user, out) except dataent.SQLError as e: if e.args[0] == 1146: # called from patch pass return out
def execute(): dataent.cache().delete_value('doctypes_with_global_search') doctypes_with_global_search = get_doctypes_with_global_search( with_child_tables=False) for i, doctype in enumerate(doctypes_with_global_search): update_progress_bar("Updating Global Search", i, len(doctypes_with_global_search)) rebuild_for_doctype(doctype)
def update_global_search(doc): """ Add values marked with `in_global_search` to `global_search_queue` from given doc :param doc: Document to be added to global search """ if dataent.local.conf.get('disable_global_search'): return if doc.docstatus > 1 or (doc.meta.has_field("enabled") and not doc.get("enabled")) \ or doc.get("disabled"): return content = [] for field in doc.meta.get_global_search_fields(): if doc.get(field.fieldname) and field.fieldtype != "Table": content.append(get_formatted_value(doc.get(field.fieldname), field)) tags = (doc.get('_user_tags') or '').strip() if tags: content.extend(list(filter(lambda x: x, tags.split(',')))) # Get children for child in doc.meta.get_table_fields(): for d in doc.get(child.fieldname): if d.parent == doc.name: for field in d.meta.get_global_search_fields(): if d.get(field.fieldname): content.append( get_formatted_value(d.get(field.fieldname), field)) if content: published = 0 if hasattr(doc, 'is_website_published') and doc.meta.allow_guest_to_view: published = 1 if doc.is_website_published() else 0 title = (doc.get_title() or '')[:int(varchar_len)] route = doc.get('route') if doc else '' value = dict(doctype=doc.doctype, name=doc.name, content=' ||| '.join(content or ''), published=published, title=title, route=route) try: # append to search queue if connected dataent.cache().lpush('global_search_queue', json.dumps(value)) except redis.exceptions.ConnectionError: # not connected, sync directly sync_value(value)
def get_fiscal_years(transaction_date=None, fiscal_year=None, label="Date", verbose=1, company=None, as_dict=False): fiscal_years = dataent.cache().hget("fiscal_years", company) or [] if not fiscal_years: # if year start date is 2012-04-01, year end date should be 2013-03-31 (hence subdate) cond = "" if fiscal_year: cond += " and fy.name = {0}".format(dataent.db.escape(fiscal_year)) if company: cond += """ and (not exists (select name from `tabFiscal Year Company` fyc where fyc.parent = fy.name) or exists(select company from `tabFiscal Year Company` fyc where fyc.parent = fy.name and fyc.company=%(company)s) ) """ fiscal_years = dataent.db.sql(""" select fy.name, fy.year_start_date, fy.year_end_date from `tabFiscal Year` fy where disabled = 0 {0} order by fy.year_start_date desc""".format(cond), { "company": company }, as_dict=True) dataent.cache().hset("fiscal_years", company, fiscal_years) if transaction_date: transaction_date = getdate(transaction_date) for fy in fiscal_years: matched = False if fiscal_year and fy.name == fiscal_year: matched = True if (transaction_date and getdate(fy.year_start_date) <= transaction_date and getdate(fy.year_end_date) >= transaction_date): matched = True if matched: if as_dict: return (fy,) else: return ((fy.name, fy.year_start_date, fy.year_end_date),) error_msg = _("""{0} {1} not in any active Fiscal Year.""").format(label, formatdate(transaction_date)) if verbose==1: dataent.msgprint(error_msg) raise FiscalYearError(error_msg)
def sync_global_search(): """ Inserts / updates values from `global_search_queue` to __global_search. This is called via job scheduler :param flags: :return: """ while dataent.cache().llen('global_search_queue') > 0: value = json.loads( dataent.cache().lpop('global_search_queue').decode('utf-8')) sync_value(value)
def on_update(self): for df in self.meta.get("fields"): if df.fieldtype not in no_value_fields: dataent.db.set_default(df.fieldname, self.get(df.fieldname)) if self.language: set_default_language(self.language) dataent.cache().delete_value('system_settings') dataent.cache().delete_value('time_zone') dataent.local.system_settings = {}
def get_user_translations(lang): out = dataent.cache().hget('lang_user_translations', lang) if out is None: out = {} for fields in dataent.get_all('Translation', fields=["source_name", "target_name"], filters={'language': lang}): out.update({fields.source_name: fields.target_name}) dataent.cache().hset('lang_user_translations', lang, out) return out
def get_last_tried_login_data(user, get_last_login=False): locked_account_time = dataent.cache().hget('locked_account_time', user) if get_last_login and locked_account_time: return locked_account_time last_login_tried = dataent.cache().hget('last_login_tried', user) if not last_login_tried or last_login_tried < get_datetime(): last_login_tried = get_datetime() + datetime.timedelta(seconds=60) dataent.cache().hset('last_login_tried', user, last_login_tried) return last_login_tried
def get_user_svg_from_cache(): '''Get User and SVG code from cache.''' key = get_query_key() totp_uri = dataent.cache().get_value("{}_uri".format(key)) user = dataent.cache().get_value("{}_user".format(key)) if not totp_uri or not user: dataent.throw(_('Page has expired!'), dataent.PermissionError) if not dataent.db.exists('User', user): dataent.throw(_('Not Permitted'), dataent.PermissionError) user = dataent.get_doc('User', user) svg = get_qr_svg_code(totp_uri) return (user, svg)
def delete_session(sid=None, user=None, reason="Session Expired"): from dataent.core.doctype.activity_log.feed import logout_feed dataent.cache().hdel("session", sid) dataent.cache().hdel("last_db_session_update", sid) if sid and not user: user_details = dataent.db.sql("""select user from tabSessions where sid=%s""", sid, as_dict=True) if user_details: user = user_details[0].get("user") logout_feed(user, reason) dataent.db.sql("""delete from tabSessions where sid=%s""", sid) dataent.db.commit()
def sync(doc, method=None): """Sync Webhook under Mandrill account""" if not (doc.enable_outgoing and doc.service == "Mandrill" and doc.smtp_server and doc.email_id and doc.password): return session = requests.Session() if not webhook_exists(doc, session): add_webhook(doc, session) # always clear key cache dataent.cache().delete_value("mandrill_webhook_keys")