Beispiel #1
0
def celery_task_failure_email(**kwargs):
    """This event handler is for reporting by email when an exception error in celery."""

    subject = "ERROR Celery Task {sender.name}".format(**kwargs)
    message = """
Task Name: {sender.name}
Task ID: {task_id}
Task args: {args}
Task kwargs: {kwargs}

raised exception:
{exception!r}

full traceback:
{einfo}
""".format(**kwargs)

    # Logger for DEBUG because email is not sent in dev environment
    Logger.error(message)

    # Logger for Alert because long texts usually cannot be parsed by log server
    Logger.error("An exception error has occurred")

    # Send an email so that admins can receive errors
    mail_admins(subject, message)
Beispiel #2
0
def index(request, obj_id):
    aclbase_obj, error = get_obj_with_check_perm(request.user, ACLBase, obj_id, ACLType.Full)
    if error:
        return error
    target_obj = aclbase_obj.get_subclass_object()

    # Some type of objects needs object that refers target_obj (e.g. Attribute)
    # for showing breadcrumb navigation.
    parent_obj = None
    try:
        if isinstance(target_obj, Attribute):
            parent_obj = target_obj.parent_entry
        elif isinstance(target_obj, EntityAttr):
            parent_obj = target_obj.parent_entity
    except StopIteration:
        Logger.warning("failed to get related parent object")

    context = {
        "object": target_obj,
        "parent": parent_obj,
        "acltypes": [{"id": x.id, "name": x.label} for x in ACLType.all()],
        "roles": [
            {
                "id": x.id,
                "name": x.name,
                "description": x.description,
                "current_permission": x.get_current_permission(target_obj),
            }
            for x in Role.objects.filter(is_active=True)
            if request.user.is_superuser or x.is_belonged_to(request.user)
        ],
    }
    return render(request, "edit_acl.html", context)
Beispiel #3
0
    def is_authenticated(kls, username, password):
        try:
            c = ldap3.Connection(
                CONF_LDAP['SERVER_ADDRESS'],
                user=CONF_LDAP['USER_FILTER'].format(username=username),
                password=password)
            return c.bind()
        except LDAPException as e:
            Logger.error(str(e))

            return False
Beispiel #4
0
    def run(self, will_delay=True):
        method_table = self.method_table()
        if self.operation not in method_table:
            Logger.error('Job %s has invalid operation type' % self.id)
            return

        # initiate job processing
        method = method_table[self.operation]
        if will_delay:
            return method.delay(self.id)
        else:
            return method(self.id)
Beispiel #5
0
def index(request, obj_id):
    user = User.objects.get(id=request.user.id)
    aclbase_obj, error = get_object_with_check_permission(
        user, ACLBase, obj_id, ACLType.Full)
    if error:
        return error
    target_obj = aclbase_obj.get_subclass_object()

    # get ACLTypeID of target_obj if a permission is set
    def get_current_permission(member):
        permissions = [
            x for x in member.permissions.all()
            if x.get_objid() == target_obj.id
        ]
        if permissions:
            return permissions[0].get_aclid()
        else:
            return 0

    # Some type of objects needs object that refers target_obj (e.g. Attribute)
    # for showing breadcrumb navigation.
    parent_obj = None
    try:
        if isinstance(target_obj, Attribute):
            parent_obj = target_obj.parent_entry
        elif isinstance(target_obj, EntityAttr):
            parent_obj = target_obj.parent_entity
    except StopIteration:
        Logger.warning('failed to get related parent object')

    context = {
        'object':
        target_obj,
        'parent':
        parent_obj,
        'acltypes': [{
            'id': x.id,
            'name': x.label
        } for x in ACLType.all()],
        'members': [{
            'id': x.id,
            'name': x.username,
            'current_permission': get_current_permission(x),
            'type': 'user'
        } for x in User.objects.filter(is_active=True)] +
        [{
            'id': x.id,
            'name': x.name,
            'current_permission': get_current_permission(x),
            'type': 'group'
        } for x in Group.objects.filter(is_active=True)]
    }
    return render(request, 'edit_acl.html', context)
Beispiel #6
0
    def _do_import(resource, iter_data):
        results = []
        for data in iter_data:
            try:
                result = resource.import_data_from_request(data, user)

                results.append({'result': result, 'data': data})
            except RuntimeError as e:
                Logger.warning(('(%s) %s ' % (resource, data)) + str(e))

        if results:
            resource.after_import_completion(results)
Beispiel #7
0
def edit_entry_attrs(self, job_id):
    job = Job.objects.get(id=job_id)

    if job.proceed_if_ready():
        # At the first time, update job status to prevent executing this job duplicately
        job.update(Job.STATUS["PROCESSING"])

        user = User.objects.get(id=job.user.id)
        entry = Entry.objects.get(id=job.target.id)

        recv_data = json.loads(job.params)

        for info in recv_data["attrs"]:
            if info["id"]:
                attr = Attribute.objects.get(id=info["id"])
            else:
                entity_attr = EntityAttr.objects.get(id=info["entity_attr_id"])
                attr = entry.attrs.filter(schema=entity_attr,
                                          is_active=True).first()
                if not attr:
                    attr = entry.add_attribute_from_base(entity_attr, user)

            try:
                converted_value = _convert_data_value(attr, info)
            except ValueError as e:
                Logger.warning("(%s) attr_data: %s" % (e, str(info)))
                continue

            # Check a new update value is specified, or not
            if not attr.is_updated(converted_value):
                continue

            # Add new AttributeValue instance to Attribute instnace
            attr.add_value(user, converted_value)

        if custom_view.is_custom("after_edit_entry", entry.schema.name):
            custom_view.call_custom("after_edit_entry", entry.schema.name,
                                    recv_data, user, entry)

        # update entry information to Elasticsearch
        entry.register_es()

        # clear flag to specify this entry has been completed to edit
        entry.del_status(Entry.STATUS_EDITING)

        # update job status and save it
        job.update(Job.STATUS["DONE"])

        # running job to notify changing entry event
        job_notify_event = Job.new_notify_update_entry(user, entry)
        job_notify_event.run()
Beispiel #8
0
    def is_authenticated(kls, username, password):
        try:
            o = ldap.initialize(CONF_LDAP["SERVER_ADDRESS"])
            o.protocol_version = ldap.VERSION3
            o.simple_bind_s(
                who=CONF_LDAP["USER_FILTER"].format(username=username),
                cred=password)
            o.unbind_s()
            return True
        except ldap.INVALID_CREDENTIALS:
            return False
        except ldap.LDAPError as e:
            Logger.error(str(e))

            return False
Beispiel #9
0
def main():
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "airone.settings")
    os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")

    try:
        from configurations.management import execute_from_command_line
    except ImportError as exc:
        raise ImportError(
            "Couldn't import Django. Are you sure it's installed and "
            "available on your PYTHONPATH environment variable? Did you "
            "forget to activate a virtual environment?") from exc

    for extension in settings.AIRONE["EXTENSIONS"]:
        try:
            importlib.import_module("%s.settings" % extension)
        except ImportError:
            Logger.warning("Failed to load settings %s" % extension)

    execute_from_command_line(sys.argv)
Beispiel #10
0
def celery_task_failure_email(**kwargs):
    """This event handler is for reporting by email when an exception error in celery."""

    subject = "ERROR Celery Task {sender.name}".format(**kwargs)
    message = """
Task Name: {sender.name}
Task ID: {task_id}
Task args: {args}
Task kwargs: {kwargs}

raised exception:
{exception!r}

full traceback:
{einfo}
""".format(**kwargs)

    # Logger for DEBUG because email is not sent in dev environment
    Logger.error(message)
    mail_admins(subject, message)
Beispiel #11
0
    def authenticate(self, request, username=None, password=None):

        # check authentication with local database at first.
        user = User.objects.filter(username=username,
                                   authenticate_type=User.AUTH_TYPE_LOCAL,
                                   is_active=True).first()
        if user and user.check_password(password):
            return user
        elif user:
            # This is necessary not to send a request to check authentication even though
            # the specified user is in the local database.
            Logger.info("Failed to authenticate user(%s) in local" % username)
            return None

        if not hasattr(settings, "AUTH_CONFIG"):
            Logger.warn(
                '"AUTH_CONFIG" parameter is necessary in airone/settings.py')
            return None

        # If local authentication fails, check it with LDAP server.
        if self.is_authenticated(username, password):
            # This creates LDAP-authenticated user if necessary. Those of them who
            # authenticated by LDAP are distinguished by 'authenticate_type' parameter
            # of User object.
            (user, _) = User.objects.get_or_create(
                **{
                    "username": username,
                    "authenticate_type": User.AUTH_TYPE_LDAP,
                })
        else:
            Logger.info("Failed to authenticate user(%s) in LDAP" % username)

        return user
Beispiel #12
0
def edit_entry_attrs(self, job_id):
    job = Job.objects.get(id=job_id)

    if job.proceed_if_ready():
        # At the first time, update job status to prevent executing this job duplicately
        job.update(Job.STATUS['PROCESSING'])

        user = User.objects.get(id=job.user.id)
        entry = Entry.objects.get(id=job.target.id)

        recv_data = json.loads(job.params)
        for info in recv_data['attrs']:
            attr = Attribute.objects.get(id=info['id'])

            try:
                converted_value = _convert_data_value(attr, info)
            except ValueError as e:
                Logger.warning('(%s) attr_data: %s' % (e, str(info)))
                continue

            # Check a new update value is specified, or not
            if not attr.is_updated(converted_value):
                continue

            # Add new AttributeValue instance to Attribute instnace
            attr.add_value(user, converted_value)

        if custom_view.is_custom("after_edit_entry", entry.schema.name):
            custom_view.call_custom("after_edit_entry", entry.schema.name,
                                    recv_data, user, entry)

        # update entry information to Elasticsearch
        entry.register_es()

        # clear flag to specify this entry has been completed to edit
        entry.del_status(Entry.STATUS_EDITING)

        # update job status and save it
        job.update(Job.STATUS['DONE'])
Beispiel #13
0
    def authenticate(self, username=None, password=None):
        # check authentication with local database at first.
        user = User.objects.filter(username=username,
                                   authenticate_type=User.AUTH_TYPE_LOCAL,
                                   is_active=True).first()
        if user and user.check_password(password):
            return user
        elif user:
            # This is necessary not to send a request to check authentication even though
            # the specified user is in the local database.
            Logger.info('Failed to authenticate user(%s) in local' % username)
            return None

        if not hasattr(settings, 'AUTH_CONFIG'):
            Logger.warn(
                '"AUTH_CONFIG" parameter is necessary in airone/settings.py')
            return None

        # If local authentication fails, check it with LDAP server.
        try:
            user_dn = None
            with ldap3.Connection(CONF_LDAP['SERVER_ADDRESS'],
                                  auto_bind=True) as conn:
                if conn.search(search_base=CONF_LDAP['BASE_DN'],
                               search_scope=ldap3.SUBTREE,
                               search_filter=CONF_LDAP['SEARCH_FILTER'].format(
                                   username=username)):

                    user_dn = conn.entries[0].entry_dn

            if user_dn:
                with ldap3.Connection(CONF_LDAP['SERVER_ADDRESS'],
                                      user=user_dn,
                                      password=password,
                                      auto_bind=True) as conn:

                    # This creates LDAP-authenticated user if necessary. Those of them who
                    # authenticated by LDAP are distinguished by 'authenticate_type' parameter
                    # of User object.
                    (user, _) = User.objects.update_or_create(
                        **{
                            'username': username,
                            'authenticate_type': User.AUTH_TYPE_LDAP,
                        })
                    return user

        except ldap_exceptions.LDAPException as e:
            Logger.warn('Failed to authenticate user(%s) in LDAP server(%s)' %
                        (username, e))
Beispiel #14
0
def make_search_results(
    user: User,
    res: Dict[str, Any],
    hint_attrs: List[Dict[str, str]],
    limit: int,
    hint_referral: str,
) -> Dict[str, str]:
    """Acquires and returns the attribute values held by each search result

    When the condition of reference entry is specified, the entry to reference is acquired.
    Also, get the attribute name and attribute value that matched the condition.

    Do the following:
    1. Keep a list of IDs of all entries that have been found in Elasticsearch.
    2. If the reference entry filtering conditions have been entered,
       the following processing is performed.
       If not entered, get entry object from search result of Elasticsearch.

       2-1. If blank characters are entered in the filtering condition of the reference entry,
            only entries that are not referenced by other entries are filtered.
       2-2. In cases other than the above, only entries whose filtering condition is
            included in the entry name being referred to are acquired.
       2-3. Get the entry object from the entry ID obtained above.

    3. Get attributes for each entry for the maximum number of displayed items
       from the Elasticsearch search results.
    4. For the attribute of the acquired entry,
       the attribute value is acquired according to the attribute type.
    5. When all entries have been processed, the search results are returned.

    Args:
        res (`str`, optional): Search results for Elasticsearch
        hint_attrs (list(dict[str, str])):  A list of search strings and attribute sets
        limit (int): Maximum number of search results to return
        hint_referral (str): Input value used to refine the reference entry.
            Use only for advanced searches.

    Returns:
        dict[str, str]: A set of attributes and attribute values associated with the entry
            that was hit in the search

    """
    from entry.models import Entry, AttributeValue

    # set numbers of found entries
    results = {
        "ret_count": res["hits"]["total"],
        "ret_values": [],
    }

    # get django objects from the hit information from Elasticsearch
    hit_entry_ids = [x["_id"] for x in res["hits"]["hits"]]
    if isinstance(hint_referral, str) and hint_referral:
        # If the hint_referral parameter is specified,
        # this filters results that only have specified referral entry.

        if (CONFIG.EMPTY_SEARCH_CHARACTER == hint_referral
                or CONFIG.EMPTY_SEARCH_CHARACTER_CODE == hint_referral):

            hit_entry_ids_num = [int(x) for x in hit_entry_ids]
            filtered_ids = set(hit_entry_ids_num) - set(
                AttributeValue.objects.filter(
                    Q(
                        referral__id__in=hit_entry_ids,
                        parent_attr__is_active=True,
                        is_latest=True,
                    )
                    | Q(
                        referral__id__in=hit_entry_ids,
                        parent_attr__is_active=True,
                        parent_attrv__is_latest=True,
                    )).values_list("referral_id", flat=True))

        else:

            filtered_ids = AttributeValue.objects.filter(
                Q(
                    parent_attr__parent_entry__name__iregex=
                    prepend_escape_character(
                        CONFIG.ESCAPE_CHARACTERS_REFERRALS_ENTRY,
                        hint_referral),
                    referral__id__in=hit_entry_ids,
                    is_latest=True,
                )
                | Q(
                    parent_attr__parent_entry__name__iregex=
                    prepend_escape_character(
                        CONFIG.ESCAPE_CHARACTERS_REFERRALS_ENTRY,
                        hint_referral),
                    referral__id__in=hit_entry_ids,
                    parent_attrv__is_latest=True,
                )).values_list("referral", flat=True)

        hit_entries = Entry.objects.filter(pk__in=filtered_ids, is_active=True)

        # reset matched count by filtered results by hint_referral parameter
        results["ret_count"] = len(hit_entries)
    else:
        hit_entries = Entry.objects.filter(id__in=hit_entry_ids,
                                           is_active=True)

    hit_infos: Dict = {}
    for entry in hit_entries:
        if len(hit_infos) >= limit:
            break

        hit_infos[entry] = [
            x["_source"] for x in res["hits"]["hits"]
            if int(x["_id"]) == entry.id
        ][0]

    for (entry, entry_info) in sorted(hit_infos.items(),
                                      key=lambda x: x[0].name):
        ret_info: Dict[str, Any] = {
            "entity": {
                "id": entry.schema.id,
                "name": entry.schema.name
            },
            "entry": {
                "id": entry.id,
                "name": entry.name
            },
            "attrs": {},
        }

        # When 'hint_referral' parameter is specifed, return referred entries for each results
        if hint_referral is not False:
            ret_info["referrals"] = [{
                "id": x.id,
                "name": x.name,
                "schema": x.schema.name,
            } for x in entry.get_referred_objects()]

        # Check for has permission to Entry
        if entry_info["is_readble"] or user.has_permission(
                entry, ACLType.Readable):
            ret_info["is_readble"] = True
        else:
            ret_info["is_readble"] = False
            results["ret_values"].append(ret_info)
            continue

        # formalize attribute values according to the type
        for attrinfo in entry_info["attr"]:
            # Skip other than the target Attribute
            if attrinfo["name"] not in [x["name"] for x in hint_attrs]:
                continue

            if attrinfo["name"] in ret_info["attrs"]:
                ret_attrinfo = ret_info["attrs"][attrinfo["name"]]
            else:
                ret_attrinfo = ret_info["attrs"][attrinfo["name"]] = {}

            # if target attribute is array type, then values would be stored in array
            if attrinfo["name"] not in ret_info["attrs"]:
                if attrinfo["type"] & AttrTypeValue["array"]:
                    ret_info["attrs"][attrinfo["name"]] = []
                else:
                    ret_info["attrs"][attrinfo["name"]] = ret_attrinfo

            # Check for has permission to EntityAttr
            if attrinfo["name"] not in [
                    x["name"] for x in hint_attrs if x["is_readble"]
            ]:
                ret_attrinfo["is_readble"] = False
                continue

            # Check for has permission to Attribute
            if not attrinfo["is_readble"]:
                attr = entry.attrs.filter(schema__name=attrinfo["name"],
                                          is_active=True).first()
                if not attr:
                    Logger.warning(
                        "Non exist Attribute (entry:%s, name:%s) is registered in ESS."
                        % (entry.id, attrinfo["name"]))
                    continue

                if not user.has_permission(attr, ACLType.Readable):
                    ret_attrinfo["is_readble"] = False
                    continue

            ret_attrinfo["is_readble"] = True

            ret_attrinfo["type"] = attrinfo["type"]
            if (attrinfo["type"] == AttrTypeValue["string"]
                    or attrinfo["type"] == AttrTypeValue["text"]):

                if attrinfo["value"]:
                    ret_attrinfo["value"] = attrinfo["value"]
                elif "date_value" in attrinfo and attrinfo["date_value"]:
                    ret_attrinfo["value"] = attrinfo["date_value"].split(
                        "T")[0]

            elif attrinfo["type"] == AttrTypeValue["boolean"]:
                ret_attrinfo["value"] = attrinfo["value"]

            elif attrinfo["type"] == AttrTypeValue["date"]:
                ret_attrinfo["value"] = attrinfo["date_value"]

            elif (attrinfo["type"] == AttrTypeValue["object"]
                  or attrinfo["type"] == AttrTypeValue["group"]):
                ret_attrinfo["value"] = {
                    "id": attrinfo["referral_id"],
                    "name": attrinfo["value"],
                }

            elif attrinfo["type"] == AttrTypeValue["named_object"]:
                if attrinfo["key"] == attrinfo["value"] == attrinfo[
                        "referral_id"] == "":
                    continue
                ret_attrinfo["value"] = {
                    attrinfo["key"]: {
                        "id": attrinfo["referral_id"],
                        "name": attrinfo["value"],
                    }
                }

            elif attrinfo["type"] & AttrTypeValue["array"]:
                if "value" not in ret_attrinfo:
                    ret_attrinfo["value"] = []

                # If there is no value, it will be skipped.
                if attrinfo["key"] == attrinfo["value"] == attrinfo[
                        "referral_id"] == "":
                    if "date_value" not in attrinfo:
                        continue

                if attrinfo["type"] & AttrTypeValue["named"]:
                    ret_attrinfo["value"].append({
                        attrinfo["key"]: {
                            "id": attrinfo["referral_id"],
                            "name": attrinfo["value"],
                        }
                    })

                elif attrinfo["type"] & AttrTypeValue["string"]:
                    if "date_value" in attrinfo:
                        ret_attrinfo["value"].append(
                            attrinfo["date_value"].split("T")[0])
                    else:
                        ret_attrinfo["value"].append(attrinfo["value"])

                elif attrinfo["type"] & (AttrTypeValue["object"]
                                         | AttrTypeValue["group"]):
                    ret_attrinfo["value"].append({
                        "id": attrinfo["referral_id"],
                        "name": attrinfo["value"]
                    })

        results["ret_values"].append(ret_info)

    return results
Beispiel #15
0
    'ENABLE_PROFILE': True,
    'CONCURRENCY': 1,
    'VERSION': 'unknown',
    'FILE_STORE_PATH': '/tmp/airone_app',
    'AUTO_COMPLEMENT_USER': '******',
    'DB_SLAVES': ['default'],
    'DB_MASTER': 'default',
    'EXTENSIONS': [],
}

# load extension settings individually
for extension in AIRONE['EXTENSIONS']:
    try:
        importlib.import_module('%s.settings' % extension)
    except ImportError:
        Logger.warning('Failed to load settings %s' % extension)

try:
    proc = subprocess.Popen("cd %s && git describe --tags" % BASE_DIR, shell=True,
                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    outs, errs = proc.communicate(timeout=1)
    # if `git describe --tags` prints some string to stdout, use the result as version
    # else use 'unknown' as version (e.g. untagged git repository)
    if outs != b'':
        AIRONE['VERSION'] = outs.strip()
    else:
        logging.getLogger(__name__).warning('could not describe airone version from git')

    # create a directory to store temporary file for applications
    if not os.path.exists(AIRONE['FILE_STORE_PATH']):
        os.makedirs(AIRONE['FILE_STORE_PATH'])
Beispiel #16
0
"""
WSGI config for airone project.

It exposes the WSGI callable as a module-level variable named ``application``.

For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""

import importlib
import os

from configurations.wsgi import get_wsgi_application
from django.conf import settings

from airone.lib.log import Logger

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "airone.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")

for extension in settings.AIRONE["EXTENSIONS"]:
    try:
        importlib.import_module("%s.settings" % extension)
    except ImportError:
        Logger.warning("Failed to load settings %s" % extension)

application = get_wsgi_application()
Beispiel #17
0
from django.conf.urls import url

from airone.lib.log import Logger
from group import views as group_views
from user import views

urlpatterns = [
    url(r'^$', views.index, name='index'),
    url(r'^edit/(\d+)$', views.edit, name='edit'),
    url(r'^do_edit/(\d+)$', views.do_edit, name='do_edit'),
    url(r'^edit_passwd/(\d+)$', views.edit_passwd, name='edit_passwd'),
    url(r'^do_edit_passwd/(\d+)$', views.do_edit_passwd,
        name='do_edit_passwd'),
    url(r'^do_su_edit_passwd/(\d+)$',
        views.do_su_edit_passwd,
        name='do_su_edit_passwd'),
    url(r'^create$', views.create, name='create'),
    url(r'^do_create$', views.do_create, name='do_create'),
    url(r'^do_delete/(\d+)$', views.do_delete, name='do_delete'),
    url(r'^export/$', group_views.export, name='export'),
]

try:
    from custom_view.user.urls import override_urlpatterns

    urlpatterns = override_urlpatterns(urlpatterns)
except ImportError:
    Logger.info("There is no URL dispatcher of custom-view")
Beispiel #18
0
def create_entry_attrs(self, job_id):
    job = Job.objects.get(id=job_id)

    if job.proceed_if_ready():
        # At the first time, update job status to prevent executing this job duplicately
        job.update(Job.STATUS['PROCESSING'])

        user = User.objects.filter(id=job.user.id).first()
        entry = Entry.objects.filter(id=job.target.id, is_active=True).first()
        if not entry or not user:
            # Abort when specified entry doesn't exist
            job.update(Job.STATUS['CANCELED'])
            return

        recv_data = json.loads(job.params)
        # Create new Attributes objects based on the specified value
        for entity_attr in entry.schema.attrs.filter(is_active=True):
            # skip for unpermitted attributes
            if not entity_attr.is_active or not user.has_permission(
                    entity_attr, ACLType.Readable):
                continue

            # This creates Attibute object that contains AttributeValues.
            # But the add_attribute_from_base may return None when target Attribute instance
            # has already been created or is creating by other process. In that case, this job
            # do nothing about that Attribute instance.
            attr = entry.add_attribute_from_base(entity_attr, user)
            if not attr or not any(
                [int(x['id']) == attr.schema.id for x in recv_data['attrs']]):
                continue

            # When job is canceled during this processing, abort it after deleting the created entry
            if job.is_canceled():
                entry.delete()
                return

            # make an initial AttributeValue object if the initial value is specified
            attr_data = [
                x for x in recv_data['attrs'] if int(x['id']) == attr.schema.id
            ][0]

            # register new AttributeValue to the "attr"
            try:
                attr.add_value(user, _convert_data_value(attr, attr_data))
            except ValueError as e:
                Logger.warning('(%s) attr_data: %s' % (e, str(attr_data)))

        # Delete duplicate attrs because this processing may execute concurrently
        for entity_attr in entry.schema.attrs.filter(is_active=True):
            if entry.attrs.filter(schema=entity_attr,
                                  is_active=True).count() > 1:
                query = entry.attrs.filter(schema=entity_attr, is_active=True)
                query.exclude(id=query.first().id).delete()

        if custom_view.is_custom("after_create_entry", entry.schema.name):
            custom_view.call_custom("after_create_entry", entry.schema.name,
                                    recv_data, user, entry)

        # register entry information to Elasticsearch
        entry.register_es()

        # clear flag to specify this entry has been completed to ndcreate
        entry.del_status(Entry.STATUS_CREATING)

        # update job status and save it except for the case that target job is canceled.
        if not job.is_canceled():
            job.update(Job.STATUS['DONE'])

    elif job.is_canceled():
        # When job is canceled before starting, created entry should be deleted.
        entry = Entry.objects.filter(id=job.target.id, is_active=True).first()
        if entry:
            entry.delete()
Beispiel #19
0
 def check(self, msg=''):
     if self._is_enable():
         AIRONE_LOGGER.info('(Profiling result: %fs) %s' %
                            (time() - self.start_time, msg))
Beispiel #20
0
from django.conf.urls import url, include

from . import views
from .user import views as user_views
from .entity.urls import urlpatterns as entity_urlpatterns
from .entry.urls import urlpatterns as entry_urlpatterns
from .job.urls import urlpatterns as job_urlpatterns
from airone.lib.log import Logger

urlpatterns = [
    url(r'^user/access_token$', user_views.AccessTokenAPI.as_view()),
    url(r'^entity/', include(entity_urlpatterns)),
    url(r'^entry/', include(entry_urlpatterns)),
    url(r'^job/', include(job_urlpatterns)),
]

# Custom view is prioritized to handle if it exists.
try:
    from custom_view.api_v1.urls import urlpatterns as custom_patterns
    urlpatterns.append(url(r'^advanced/', include(custom_patterns)))
except ImportError:
    Logger.info('advanced API endpoints are unavailable')

try:
    from custom_view.api_v1 import views as custom_views
    urlpatterns.append(url(r'^entry$', custom_views.CustomEntryAPI.as_view()))
except ImportError:
    urlpatterns.append(url(r'^entry$', views.EntryAPI.as_view()))
Beispiel #21
0
from airone.lib.log import Logger

from . import views
from .entity.urls import urlpatterns as entity_urlpatterns
from .entry.urls import urlpatterns as entry_urlpatterns
from .job.urls import urlpatterns as job_urlpatterns
from .user import views as user_views

urlpatterns = [
    url(r"^user/access_token$", user_views.AccessTokenAPI.as_view()),
    url(r"^entity/", include(entity_urlpatterns)),
    url(r"^entry/", include(entry_urlpatterns)),
    url(r"^job/", include(job_urlpatterns)),
]

# Custom view is prioritized to handle if it exists.
try:
    from custom_view.api_v1.urls import urlpatterns as custom_patterns

    urlpatterns.append(url(r"^advanced/", include(custom_patterns)))
except ImportError:
    Logger.info("advanced API endpoints are unavailable")

try:
    from custom_view.api_v1 import views as custom_views

    urlpatterns.append(url(r"^entry$", custom_views.CustomEntryAPI.as_view()))
except ImportError:
    urlpatterns.append(url(r"^entry$", views.EntryAPI.as_view()))
Beispiel #22
0
def _do_import_entries(job):
    user = job.user

    entity = Entity.objects.get(id=job.target.id)
    if not user.has_permission(entity, ACLType.Writable):
        job.update(
            **{
                'status':
                Job.STATUS['ERROR'],
                'text':
                'Permission denied to import. '
                'You need Writable permission for "%s"' % entity.name
            })
        return

    whole_data = json.loads(job.params).get(entity.name)
    if not whole_data:
        job.update(
            **{
                'status': Job.STATUS['ERROR'],
                'text': 'Uploaded file has no entry data of %s' % entity.name
            })
        return

    # get custom_view method to prevent executing check method in every loop processing
    custom_view_handler = None
    if custom_view.is_custom("after_import_entry", entity.name):
        custom_view_handler = 'after_import_entry'

    job.update(Job.STATUS['PROCESSING'])

    total_count = len(whole_data)
    # create or update entry
    for (index, entry_data) in enumerate(whole_data):
        job.text = 'Now importing... (progress: [%5d/%5d])' % (index + 1,
                                                               total_count)
        job.save(update_fields=['text'])

        # abort processing when job is canceled
        if job.is_canceled():
            return

        entry = Entry.objects.filter(name=entry_data['name'],
                                     schema=entity).first()
        if not entry:
            entry = Entry.objects.create(name=entry_data['name'],
                                         schema=entity,
                                         created_user=user)

            # create job to notify create event to the WebHook URL
            job_notify = Job.new_notify_create_entry(user, entry)

        elif not user.has_permission(entry, ACLType.Writable):
            continue

        else:
            # create job to notify edit event to the WebHook URL
            job_notify = Job.new_notify_update_entry(user, entry)

        entry.complement_attrs(user)
        for attr_name, value in entry_data['attrs'].items():
            # If user doesn't have readable permission for target Attribute,
            # it won't be created.
            if not entry.attrs.filter(schema__name=attr_name).exists():
                continue

            # There should be only one EntityAttr that is specified by name and Entity.
            # Once there are multiple EntityAttrs, it must be an abnormal situation.
            # In that case, this aborts import processing for this entry and reports it
            # as an error.
            attr_query = entry.attrs.filter(schema__name=attr_name,
                                            is_active=True,
                                            schema__parent_entity=entry.schema)
            if attr_query.count() > 1:
                Logger.error(
                    '[task.import_entry] Abnormal entry was detected(%s:%d)' %
                    (entry.name, entry.id))
                break

            attr = attr_query.last()
            if (not user.has_permission(attr.schema, ACLType.Writable)
                    or not user.has_permission(attr, ACLType.Writable)):
                continue

            input_value = attr.convert_value_to_register(value)
            if user.has_permission(
                    attr.schema,
                    ACLType.Writable) and attr.is_updated(input_value):
                attr.add_value(user, input_value)

            # call custom-view processing corresponding to import entry
            if custom_view_handler:
                custom_view.call_custom(custom_view_handler, entity.name, user,
                                        entry, attr, value)

        # register entry to the Elasticsearch
        entry.register_es()

        # run notification job
        job_notify.run()

    if not job.is_canceled():
        job.update(status=Job.STATUS['DONE'], text='')