示例#1
0
 def add_sidebars(self):
     '''Add _sidebar.json in each folder in docs'''
     for basepath, folders, files in os.walk(self.docs_path):  # pylint: disable=unused-variable
         with open(os.path.join(basepath, '_sidebar.json'),
                   'w') as sidebarfile:
             sidebarfile.write(
                 dataent.as_json([{
                     "title": "Search Docs ...",
                     "type": "input",
                     "route": "/search_docs"
                 }, {
                     "title": "Docs Home",
                     "route": "/docs"
                 }, {
                     "title": "User Guide",
                     "route": "/docs/user"
                 }, {
                     "title": "Server API",
                     "route": "/docs/current/api"
                 }, {
                     "title": "Models (Reference)",
                     "route": "/docs/current/models"
                 }, {
                     "title":
                     "Improve Docs",
                     "route":
                     "{0}/tree/develop/{1}/docs".format(
                         self.docs_config.source_link, self.app)
                 }]))
示例#2
0
文件: error.py 项目: dataent/dataent
def make_error_snapshot(exception):
    if dataent.conf.disable_error_snapshot:
        return

    logger = dataent.logger(__name__, with_more_info=False)

    try:
        error_id = '{timestamp:s}-{ip:s}-{hash:s}'.format(
            timestamp=cstr(datetime.datetime.now()),
            ip=dataent.local.request_ip or '127.0.0.1',
            hash=dataent.generate_hash(length=3))
        snapshot_folder = get_error_snapshot_path()
        dataent.create_folder(snapshot_folder)

        snapshot_file_path = os.path.join(snapshot_folder,
                                          "{0}.json".format(error_id))
        snapshot = get_snapshot(exception)

        with open(encode(snapshot_file_path), 'wb') as error_file:
            error_file.write(encode(dataent.as_json(snapshot)))

        logger.error('New Exception collected with id: {}'.format(error_id))

    except Exception as e:
        logger.error('Could not take error snapshot: {0}'.format(e),
                     exc_info=True)
示例#3
0
def get_site_info():
    from dataent.utils.user import get_system_managers
    from dataent.core.doctype.user.user import STANDARD_USERS
    from dataent.email.queue import get_emails_sent_this_month

    # only get system users
    users = dataent.get_all('User',
                            filters={
                                'user_type': 'System User',
                                'name': ('not in', STANDARD_USERS)
                            },
                            fields=[
                                'name', 'enabled', 'last_login', 'last_active',
                                'language', 'time_zone'
                            ])
    system_managers = get_system_managers(only_name=True)
    for u in users:
        # tag system managers
        u.is_system_manager = 1 if u.name in system_managers else 0
        u.full_name = get_fullname(u.name)
        u.email = u.name
        del u['name']

    system_settings = dataent.db.get_singles_dict('System Settings')
    space_usage = dataent._dict((dataent.local.conf.limits
                                 or {}).get('space_usage', {}))

    kwargs = {
        "fields": ["user", "creation", "full_name"],
        "filters": {
            "Operation": "Login",
            "Status": "Success"
        },
        "limit": "10"
    }

    site_info = {
        'installed_apps': get_installed_apps_info(),
        'users': users,
        'country': system_settings.country,
        'language': system_settings.language or 'english',
        'time_zone': system_settings.time_zone,
        'setup_complete': cint(system_settings.setup_complete),
        'scheduler_enabled': system_settings.enable_scheduler,

        # usage
        'emails_sent': get_emails_sent_this_month(),
        'space_used': flt((space_usage.total or 0) / 1024.0, 2),
        'database_size': space_usage.database_size,
        'backup_size': space_usage.backup_size,
        'files_size': space_usage.files_size,
        'last_logins': dataent.get_all("Activity Log", **kwargs)
    }

    # from other apps
    for method_name in dataent.get_hooks('get_site_info'):
        site_info.update(dataent.get_attr(method_name)(site_info) or {})

    # dumps -> loads to prevent datatype conflicts
    return json.loads(dataent.as_json(site_info))
示例#4
0
    def add_comment(self,
                    comment_type,
                    text=None,
                    comment_by=None,
                    link_doctype=None,
                    link_name=None):
        """Add a comment to this document.

		:param comment_type: e.g. `Comment`. See Communication for more info."""

        if comment_type == 'Comment':
            out = dataent.get_doc({
                "doctype": "Communication",
                "communication_type": "Comment",
                "sender": comment_by or dataent.session.user,
                "comment_type": comment_type,
                "reference_doctype": self.doctype,
                "reference_name": self.name,
                "content": text or comment_type,
                "link_doctype": link_doctype,
                "link_name": link_name
            }).insert(ignore_permissions=True)
        else:
            out = dataent.get_doc(
                dict(doctype='Version',
                     ref_doctype=self.doctype,
                     docname=self.name,
                     data=dataent.as_json(
                         dict(comment_type=comment_type, comment=text))))
            if comment_by:
                out.owner = comment_by
            out.insert(ignore_permissions=True)
        return out
示例#5
0
    def insert_many(self, docs):
        '''Insert multiple documents to the remote server

		:param docs: List of dict or Document objects to be inserted in one request'''
        return self.post_request({
            "cmd": "dataent.client.insert_many",
            "docs": dataent.as_json(docs)
        })
示例#6
0
    def submit(self, doc):
        '''Submit remote document

		:param doc: dict or Document object to be submitted remotely'''
        return self.post_request({
            "cmd": "dataent.client.submit",
            "doc": dataent.as_json(doc)
        })
示例#7
0
    def bulk_update(self, docs):
        '''Bulk update documents remotely

		:param docs: List of dict or Document objects to be updated remotely (by `name`)'''
        return self.post_request({
            "cmd": "dataent.client.bulk_update",
            "docs": dataent.as_json(docs)
        })
示例#8
0
    def insert(self, doc):
        '''Insert a document to the remote server

		:param doc: A dict or Document object to be inserted remotely'''
        res = self.session.post(self.url + "/api/resource/" +
                                doc.get("doctype"),
                                data={"data": dataent.as_json(doc)},
                                verify=self.verify,
                                headers=self.headers)
        return self.post_process(res)
示例#9
0
 def set_diff(self, old, new):
     '''Set the data property with the diff of the docs if present'''
     diff = get_diff(old, new)
     if diff:
         self.ref_doctype = new.doctype
         self.docname = new.name
         self.data = dataent.as_json(diff)
         return True
     else:
         return False
示例#10
0
    def update(self, doc):
        '''Update a remote document

		:param doc: dict or Document object to be updated remotely. `name` is mandatory for this'''
        url = self.url + "/api/resource/" + doc.get("doctype") + "/" + doc.get(
            "name")
        res = self.session.put(url,
                               data={"data": dataent.as_json(doc)},
                               verify=self.verify,
                               headers=self.headers)
        return self.post_process(res)
示例#11
0
def export_languages_json():
    '''Export list of all languages'''
    languages = dataent.db.get_all('Language',
                                   fields=['name', 'language_name'])
    languages = [{'name': d.language_name, 'code': d.name} for d in languages]

    languages.sort(key=lambda a: a['code'])

    with open(dataent.get_app_path('dataent', 'geo', 'languages.json'),
              'w') as f:
        f.write(dataent.as_json(languages))
示例#12
0
    def get_value(self, doctype, fieldname=None, filters=None):
        '''Returns a value form a document

		:param doctype: DocType to be queried
		:param fieldname: Field to be returned (default `name`)
		:param filters: dict or string for identifying the record'''
        return self.get_request({
            "cmd": "dataent.client.get_value",
            "doctype": doctype,
            "fieldname": fieldname or "name",
            "filters": dataent.as_json(filters)
        })
示例#13
0
def export_customizations(module,
                          doctype,
                          sync_on_migrate=0,
                          with_permissions=0):
    """Export Custom Field and Property Setter for the current document to the app folder.
		This will be synced with bench migrate"""

    sync_on_migrate = cint(sync_on_migrate)
    with_permissions = cint(with_permissions)

    if not dataent.get_conf().developer_mode:
        raise Exception('Not developer mode')

    custom = {
        'custom_fields': [],
        'property_setters': [],
        'custom_perms': [],
        'doctype': doctype,
        'sync_on_migrate': sync_on_migrate
    }

    def add(_doctype):
        custom['custom_fields'] += dataent.get_all('Custom Field',
                                                   fields='*',
                                                   filters={'dt': _doctype})
        custom['property_setters'] += dataent.get_all(
            'Property Setter', fields='*', filters={'doc_type': _doctype})

    add(doctype)

    if with_permissions:
        custom['custom_perms'] = dataent.get_all('Custom DocPerm',
                                                 fields='*',
                                                 filters={'parent': doctype})

    # also update the custom fields and property setters for all child tables
    for d in dataent.get_meta(doctype).get_table_fields():
        export_customizations(module, d.options, sync_on_migrate,
                              with_permissions)

    if custom["custom_fields"] or custom["property_setters"] or custom[
            "custom_perms"]:
        folder_path = os.path.join(get_module_path(module), 'custom')
        if not os.path.exists(folder_path):
            os.makedirs(folder_path)

        path = os.path.join(folder_path, scrub(doctype) + '.json')
        with open(path, 'w') as f:
            f.write(dataent.as_json(custom))

        dataent.msgprint(
            _('Customizations for <b>{0}</b> exported to:<br>{1}').format(
                doctype, path))
示例#14
0
def call(fn, *args, **kwargs):
    """
	Pass a doctype or a series of doctypes to get the count of docs in them
	Parameters:
		fn: dataent function to be called

	Returns:
		based on the function you call: output of the function you call

	Example:
		via terminal:
			bench --site epaas.local execute dataent.utils.call --args '''["dataent.get_all", "Activity Log"]''' --kwargs '''{"fields": ["user", "creation", "full_name"], "filters":{"Operation": "Login", "Status": "Success"}, "limit": "10"}'''
	"""
    return json.loads(dataent.as_json(dataent.call(fn, *args, **kwargs)))
示例#15
0
def create_json_gz_file(data, dt, dn):
    # Storing data in CSV file causes information loss
    # Reports like P&L Statement were completely unsuable because of this
    json_filename = '{0}.json.gz'.format(
        dataent.utils.data.format_datetime(dataent.utils.now(), "Y-m-d-H:M"))
    encoded_content = dataent.safe_encode(dataent.as_json(data))

    # GZip compression seems to reduce storage requirements by 80-90%
    compressed_content = gzip_compress(encoded_content)
    save_file(fname=json_filename,
              content=compressed_content,
              dt=dt,
              dn=dn,
              folder=None,
              is_private=False)
示例#16
0
文件: logger.py 项目: dataent/dataent
def get_more_info_for_log():
	'''Adds Site, Form Dict into log entry'''
	more_info = []
	site = getattr(dataent.local, 'site', None)
	if site:
		more_info.append('Site: {0}'.format(site))

	form_dict = getattr(dataent.local, 'form_dict', None)
	if form_dict:
		more_info.append('Form Dict: {0}'.format(dataent.as_json(form_dict)))

	if more_info:
		# to append a \n
		more_info = more_info + ['']

	return '\n'.join(more_info)
示例#17
0
def get_db_count(*args):
    """
	Pass a doctype or a series of doctypes to get the count of docs in them
	Parameters:
		*args: Variable length argument list of doctype names whose doc count you need

	Returns:
		dict: A dict with the count values.

	Example:
		via terminal:
			bench --site epaas.local execute dataent.utils.get_db_count --args "['DocType', 'Communication']"
	"""
    db_count = {}
    for doctype in args:
        db_count[doctype] = dataent.db.count(doctype)

    return json.loads(dataent.as_json(db_count))
示例#18
0
def write_document_file(doc, record_module=None, create_init=True):
    newdoc = doc.as_dict(no_nulls=True)

    # strip out default fields from children
    for df in doc.meta.get_table_fields():
        for d in newdoc.get(df.fieldname):
            for fieldname in dataent.model.default_fields:
                if fieldname in d:
                    del d[fieldname]

    module = record_module or get_module_name(doc)

    # create folder
    folder = create_folder(module, doc.doctype, doc.name, create_init)

    # write the data file
    fname = scrub(doc.name)
    with open(os.path.join(folder, fname + ".json"), 'w+') as txtfile:
        txtfile.write(dataent.as_json(newdoc))
示例#19
0
def emit_via_redis(event, message, room):
    """Publish real-time updates via redis

	:param event: Event name, like `task_progress` etc.
	:param message: JSON message object. For async must contain `task_id`
	:param room: name of the room"""
    r = get_redis_server()

    try:
        r.publish(
            'events',
            dataent.as_json({
                'event': event,
                'message': message,
                'room': room
            }))
    except redis.exceptions.ConnectionError:
        # print(dataent.get_traceback())
        pass
示例#20
0
def export_json(doctype,
                path,
                filters=None,
                or_filters=None,
                name=None,
                order_by="creation asc"):
    def post_process(out):
        del_keys = ('modified_by', 'creation', 'owner', 'idx')
        for doc in out:
            for key in del_keys:
                if key in doc:
                    del doc[key]
            for k, v in doc.items():
                if isinstance(v, list):
                    for child in v:
                        for key in del_keys + ('docstatus', 'doctype',
                                               'modified', 'name'):
                            if key in child:
                                del child[key]

    out = []
    if name:
        out.append(dataent.get_doc(doctype, name).as_dict())
    elif dataent.db.get_value("DocType", doctype, "issingle"):
        out.append(dataent.get_doc(doctype).as_dict())
    else:
        for doc in dataent.get_all(doctype,
                                   fields=["name"],
                                   filters=filters,
                                   or_filters=or_filters,
                                   limit_page_length=0,
                                   order_by=order_by):
            out.append(dataent.get_doc(doctype, doc.name).as_dict())
    post_process(out)

    dirname = os.path.dirname(path)
    if not os.path.exists(dirname):
        path = os.path.join('..', path)

    with open(path, "w") as outfile:
        outfile.write(dataent.as_json(out))
示例#21
0
文件: error.py 项目: dataent/dataent
def collect_error_snapshots():
    """Scheduled task to collect error snapshots from files and push into Error Snapshot table"""
    if dataent.conf.disable_error_snapshot:
        return

    try:
        path = get_error_snapshot_path()
        if not os.path.exists(path):
            return

        for fname in os.listdir(path):
            fullpath = os.path.join(path, fname)

            try:
                with open(fullpath, 'r') as filedata:
                    data = json.load(filedata)

            except ValueError:
                # empty file
                os.remove(fullpath)
                continue

            for field in ['locals', 'exception', 'frames']:
                data[field] = dataent.as_json(data[field])

            doc = dataent.new_doc('Error Snapshot')
            doc.update(data)
            doc.save()

            dataent.db.commit()

            os.remove(fullpath)

        clear_old_snapshots()

    except Exception as e:
        make_error_snapshot(e)

        # prevent creation of unlimited error snapshots
        raise
示例#22
0
 def as_json(self):
     return dataent.as_json(self.as_dict())
示例#23
0
	def receive(self, test_mails=None):
		"""Called by scheduler to receive emails from this EMail account using POP3/IMAP."""
		def get_seen(status):
			if not status:
				return None
			seen = 1 if status == "SEEN" else 0
			return seen

		if self.enable_incoming:
			uid_list = []
			exceptions = []
			seen_status = []
			uid_reindexed = False

			if dataent.local.flags.in_test:
				incoming_mails = test_mails
			else:
				email_sync_rule = self.build_email_sync_rule()

				email_server = None
				try:
					email_server = self.get_incoming_server(in_receive=True, email_sync_rule=email_sync_rule)
				except Exception:
					dataent.log_error(title=_("Error while connecting to email account {0}").format(self.name))

				if not email_server:
					return

				emails = email_server.get_messages()
				if not emails:
					return

				incoming_mails = emails.get("latest_messages", [])
				uid_list = emails.get("uid_list", [])
				seen_status = emails.get("seen_status", [])
				uid_reindexed = emails.get("uid_reindexed", False)

			for idx, msg in enumerate(incoming_mails):
				uid = None if not uid_list else uid_list[idx]
				try:
					args = {
						"uid": uid,
						"seen": None if not seen_status else get_seen(seen_status.get(uid, None)),
						"uid_reindexed": uid_reindexed
					}
					communication = self.insert_communication(msg, args=args)

				except SentEmailInInbox:
					dataent.db.rollback()

				except Exception:
					dataent.db.rollback()
					log('email_account.receive')
					if self.use_imap:
						self.handle_bad_emails(email_server, uid, msg, dataent.get_traceback())
					exceptions.append(dataent.get_traceback())

				else:
					dataent.db.commit()
					if communication:
						attachments = [d.file_name for d in communication._attachments]
						communication.notify(attachments=attachments, fetched_from_email_account=True)

			#notify if user is linked to account
			if len(incoming_mails)>0 and not dataent.local.flags.in_test:
				dataent.publish_realtime('new_email', {"account":self.email_account_name, "number":len(incoming_mails)})

			if exceptions:
				raise Exception(dataent.as_json(exceptions))
示例#24
0
def sendmail(communication_name,
             print_html=None,
             print_format=None,
             attachments=None,
             recipients=None,
             cc=None,
             bcc=None,
             lang=None,
             session=None,
             print_letterhead=None):
    try:

        if lang:
            dataent.local.lang = lang

        if session:
            # hack to enable access to private files in PDF
            session['data'] = dataent._dict(session['data'])
            dataent.local.session.update(session)

        if print_letterhead:
            dataent.flags.print_letterhead = print_letterhead

        # upto 3 retries
        for i in range(3):
            try:
                communication = dataent.get_doc("Communication",
                                                communication_name)
                communication._notify(print_html=print_html,
                                      print_format=print_format,
                                      attachments=attachments,
                                      recipients=recipients,
                                      cc=cc,
                                      bcc=bcc)

            except pymysql.InternalError as e:
                # deadlock, try again
                if e.args[0] == ER.LOCK_DEADLOCK:
                    dataent.db.rollback()
                    time.sleep(1)
                    continue
                else:
                    raise
            else:
                break

    except:
        traceback = log(
            "dataent.core.doctype.communication.email.sendmail",
            dataent.as_json({
                "communication_name": communication_name,
                "print_html": print_html,
                "print_format": print_format,
                "attachments": attachments,
                "recipients": recipients,
                "cc": cc,
                "bcc": bcc,
                "lang": lang
            }))
        dataent.logger(__name__).error(traceback)
        raise