def delete_multiple_resources(request, group_id): files_list = request.POST.getlist("collection[]", '') auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) group_obj = node_collection.find_one({"_id": ObjectId(group_id)}) trash_group = node_collection.find_one({"name": "Trash"}) files_list_obj = [] for each in files_list: node_obj = node_collection.find_one({"_id": ObjectId(each)}) if ObjectId(group_id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(group_id)) if ObjectId(auth._id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(auth._id)) node_obj.save() if not node_obj.group_set: # Add Trash group _id to node_obj's group_set if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" if node_obj.collection_set: if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" node_obj.save() return HttpResponse(json.dumps(files_list))
def delete_multiple_resources(request,group_id): files_list = request.POST.getlist("collection[]", '') auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) group_obj = node_collection.find_one({"_id":ObjectId(group_id)}) trash_group = node_collection.find_one({"name":"Trash"}) files_list_obj = [] for each in files_list: node_obj = node_collection.find_one({"_id":ObjectId(each)}) if ObjectId(group_id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(group_id)) if ObjectId(auth._id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(auth._id)) node_obj.save() if not node_obj.group_set: # Add Trash group _id to node_obj's group_set if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" if node_obj.collection_set: if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" node_obj.save() return HttpResponse(json.dumps(files_list))
def delete_task(request, group_name, _id): """This method will delete task object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_name) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_name}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_name) except: group_name, group_id = get_group_name_id(group_name) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({ '_type': 'GAttribute', 'subject': node._id }) relations = triple_collection.find({ '_type': 'GRelation', 'subject': node._id }) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() if len(node.post_node) > 0: for each in node.post_node: sys_each_postnode = node_collection.find_one({'_id': each}) member_of_name = node_collection.find_one({ '_id': sys_each_postnode.member_of[0] }).name if member_of_name == "Task": sys_each_postnode.prior_node.remove(node._id) sys_each_postnode.save(groupid=group_id) if member_of_name == "task_update_history": sys_each_postnode.delete(groupid=group_id) node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect( reverse('task', kwargs={'group_name': group_name}))
def trash_resource(request,group_id,node_id): ''' Delete Action. This method removes the group_id from the node's group_set. Iff node's group_set is empty, send to Trash group. ''' auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) gst_base_unit = node_collection.one({'_type': 'GSystemType', 'name': 'base_unit'}) node_obj = node_collection.find_one({"_id":ObjectId(node_id)}) group_obj = node_collection.find_one({"_id":ObjectId(group_id)}) trash_group = node_collection.find_one({"name":"Trash"}); response_dict = {} response_dict['success'] = False if trash_group._id in node_obj.group_set: try: if node_obj._id: delete_node(ObjectId(node_obj._id),deletion_type=1) response_dict['success'] = True except Exception as e: pass return HttpResponse(json.dumps(response_dict)) if ObjectId(group_id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(group_id)) if ObjectId(auth._id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(auth._id)) node_obj.save() if not node_obj.group_set: # Add Trash group _id to node_obj's group_set if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" if node_obj.collection_set: if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" node_obj.save() # print "\n\n\nnode_obj.status",node_obj.status # get_member_of = node_collection.find_one({"_id":{'$in':node_obj.member_of}}) # if get_member_of.name == 'Page': if gst_base_unit._id in node_obj.group_set: return HttpResponse("True") elif "Page" in node_obj.member_of_names_list and not "CourseEventGroup" in group_obj.member_of_names_list: return HttpResponseRedirect(reverse('page', kwargs={'group_id': group_id})) # return (eval('page')(request, group_id)) elif "File" in node_obj.member_of_names_list and not "CourseEventGroup" in group_obj.member_of_names_list : return HttpResponse(json.dumps(response_dict)) # elif get_member_of.name == 'File': # return(eval('file')(request, group_id)) elif "CourseEventGroup" in group_obj.member_of_names_list: response_dict = {'success': True } return HttpResponse(json.dumps(response_dict)) else: return HttpResponseRedirect(reverse('group_dashboard', kwargs={'group_id': group_id}))
def adminDashboardEdit(request): ''' edit class's objects ''' try: if request.is_ajax() and request.method == "POST": objectjson = json.loads(request.POST['objectjson']) node = node_collection.one({'_id': ObjectId(objectjson['id'])}) node.name = objectjson['fields']['title'] for key, value in objectjson['fields'].items(): if key == "group": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['group_set'] = typelist # if key == "type": # typelist = [] # for eachvalue in value.split(","): # typelist.append(ObjectId(eachvalue.split(" ")[-1])) # node['member_of'] = typelist if key == "member_of": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['member_of'] = typelist if key == "collection_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['collection_set'] = typelist if key == "attribute_type_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append( node_collection.find_one( ObjectId(eachvalue.split(" ")[-1]))) node['attribute_type_set'] = typelist if key == "relation_type_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append( node_collection.find_one( ObjectId(eachvalue.split(" ")[-1]))) node['relation_type_set'] = typelist node.save(groupid=group_id) return StreamingHttpResponse(node.name + " edited successfully") except Exception as e: return StreamingHttpResponse(e)
def adminDashboardEdit(request): ''' edit class's objects ''' try: if request.is_ajax() and request.method =="POST": objectjson = json.loads(request.POST['objectjson']) node = node_collection.one({ '_id': ObjectId(objectjson['id'])}) node.name = objectjson['fields']['title'] for key,value in objectjson['fields'].items(): if key == "group": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['group_set'] = typelist # if key == "type": # typelist = [] # for eachvalue in value.split(","): # typelist.append(ObjectId(eachvalue.split(" ")[-1])) # node['member_of'] = typelist if key == "member_of": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['member_of'] = typelist if key == "collection_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(ObjectId(eachvalue.split(" ")[-1])) node['collection_set'] = typelist if key == "attribute_type_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(node_collection.find_one(ObjectId(eachvalue.split(" ")[-1]))) node['attribute_type_set'] = typelist if key == "relation_type_set": typelist = [] for eachvalue in value.split(","): if eachvalue: typelist.append(node_collection.find_one(ObjectId(eachvalue.split(" ")[-1]))) node['relation_type_set'] = typelist node.save(groupid=group_id) return StreamingHttpResponse(node.name+" edited successfully") except Exception as e: return StreamingHttpResponse(e)
def delete_task(request, group_name, _id): """This method will delete task object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_name) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_name}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_name) except: group_name, group_id = get_group_name_id(group_name) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({'_type': 'GAttribute', 'subject': node._id}) relations = triple_collection.find({'_type': 'GRelation', 'subject': node._id}) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() if len(node.post_node) > 0 : for each in node.post_node : sys_each_postnode = node_collection.find_one({'_id': each}) member_of_name = node_collection.find_one({'_id': sys_each_postnode.member_of[0]}).name if member_of_name == "Task" : sys_each_postnode.prior_node.remove(node._id) sys_each_postnode.save(groupid=group_id) if member_of_name == "task_update_history": sys_each_postnode.delete(groupid=group_id) node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(reverse('task', kwargs={'group_name': group_name }))
def videoDashboard(request, group_id, video_id): # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) if video_id is None: video_ins = node_collection.find_one({'_type':"GSystemType", "name":"Video"}) if video_ins: video_id = str(video_ins._id) vid_col = node_collection.find({'member_of': {'$all': [ObjectId(video_id)]},'_type':'File', 'group_set': {'$all': [group_id]}}) template = "ndf/videoDashboard.html" already_uploaded=request.GET.getlist('var',"") variable = RequestContext(request, {'videoCollection':vid_col, 'already_uploaded':already_uploaded, 'newgroup':group_id}) return render_to_response(template, variable)
def graphs(request, group_id): # HttpResponseRedirect("ndf/visualize.html", # { # 'groupid':group_id, 'group_id':group_id, # }, # # context_instance=RequestContext(request) # ) ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = node_collection.find_one({ '_type': "Group", "name": group_id }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) return render_to_response("ndf/visualize.html", { 'group_id': group_id, 'groupid': group_id }, context_instance=RequestContext(request))
def process_request(self, request): #print "inside Author process_request" request.author = node_collection.find_one({ '_type': 'Author', 'created_by': request.user.id }) return
def adminDashboardClass(request, class_name="GSystem"): ''' fetching class's objects ''' group_set = "" if request.method=="POST": search = request.POST.get("search","") classtype = request.POST.get("class","") nodes = node_collection.find({'name':{'$regex':search, '$options': 'i' },'_type':classtype}) else : nodes = node_collection.find({'_type':class_name}) objects_details = [] for each in nodes: member = [] # for members in each.member_of: # obj = node_collection.one({ '_id': members}) # if obj: # member.append(obj.name+" - "+str(members)) member = [] member_of_list = [] collection_list = [] attribute_type_set = [] relation_type_set = [] if class_name == "GSystemType": for members in each.member_of: member.append(node_collection.one({ '_id': members}).name) # member_of_list.append(node_collection.one({'_id':members}).name+" - "+str(members)) for coll in each.collection_set: collection_list.append(node_collection.one({ '_id': coll}).name) # collection_list.append(node_collection.one({ '_id': coll}).name+" - "+str(coll)) for at_set in each.attribute_type_set: attribute_type_set.append(at_set.name) # attribute_type_set.append(at_set.name+" - "+str(at_set._id)) for rt_set in each.relation_type_set: relation_type_set.append(rt_set.name) # relation_type_set.append(rt_set.name+" - "+str(rt_set._id)) if class_name in ("GSystem","File"): group_set = [node_collection.find_one({"_id":eachgroup}).name for eachgroup in each.group_set if node_collection.find_one({"_id":eachgroup}) ] objects_details.append({"Id":each._id,"Title":each.name,"Type":", ".join(member),"Author":User.objects.get(id=each.created_by).username,"Group":", ".join(group_set),"Creation":each.created_at}) elif class_name in ("GAttribute","GRelation"): objects_details.append({"Id":each._id,"Title":each.name,"Type":"","Author":"","Creation":""}) else : objects_details.append({"Id":each._id,"Title":each.name,"Type":", ".join(member),"Author":User.objects.get(id=each.created_by).username,"Creation":each.created_at,'member_of':", ".join(member_of_list), "collection_list":", ".join(collection_list), "attribute_type_set":", ".join(attribute_type_set), "relation_type_set":", ".join(relation_type_set)}) groups = [] group = node_collection.find({'_type':"Group"}) for each in group: groups.append({'id':each._id,"title":each.name}) systemtypes = [] systemtype = node_collection.find({'_type':"GSystemType"}) for each in systemtype: systemtypes.append({'id':each._id,"title":each.name}) groupid = "" group_obj= node_collection.find({'$and':[{"_type":u'Group'},{"name":u'home'}]}) if group_obj: groupid = str(group_obj[0]._id) template = "ndf/adminDashboard.html" variable = RequestContext(request, {'class_name':class_name, "nodes":objects_details, "Groups":groups, "systemtypes":systemtypes, "url":"data", "groupid":groupid}) return render_to_response(template, variable)
def find_file_from_media_url(source_attr): try: global log_file log_file.write("\n find_file_from_media_url invoked for: " + str(source_attr)) if "media" in source_attr: source_attr = source_attr.split("media/")[-1] file_node = node_collection.find_one({"$or": [{'if_file.original.relurl': source_attr}, {'if_file.mid.relurl': source_attr},{'if_file.thumbnail.relurl': source_attr}]}) elif "readDoc" in source_attr: split_src = source_attr.split('/') node_id = split_src[split_src.index('readDoc') + 1] file_node = node_collection.one({'_id': ObjectId(node_id)}) if file_node: log_file.write("\n media file_node gs found: " + str(file_node._id) ) get_file_node_details(file_node) except Exception as find_file_from_media_url_err: error_log = "\n !!! Error found while taking dump in find_file_from_media_url() ." error_log += "\nError: " + str(find_file_from_media_url_err) print "\n Error: ", error_log log_file.write(error_log) print error_log pass
def delete_thread(request,group_id,forum_id,node_id): """ Changing status of thread to HIDDEN """ ins_objectid = ObjectId() if ins_objectid.is_valid(node_id) : thread=node_collection.one({'_id':ObjectId(node_id)}) else: return forum = node_collection.one({'_id': ObjectId(forum_id)}) if ins_objectid.is_valid(group_id) is False : group_ins = node_collection.find_one({'_type': "Group","name": group_id}) auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass op = node_collection.collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}}) node=node_collection.one({'_id':ObjectId(node_id)}) forum_threads = [] exstng_reply = node_collection.find({'$and':[{'_type':'GSystem'},{'prior_node':ObjectId(forum._id)}],'status':{'$nin':['HIDDEN']}}) exstng_reply.sort('created_at') forum_node=node_collection.one({'_id':ObjectId(forum_id)}) for each in exstng_reply: forum_threads.append(each.name) #send notifications to all group members colg=node_collection.one({'_id':ObjectId(group_id)}) for each in colg.author_set: if each != colg.created_by: bx=get_userobject(each) if bx: activity=request.user.username+" -deleted thread " prefix=" in the forum "+forum_node.name link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id) msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum." # no_check=forum_notification_status(group_id,auth._id) # if no_check: ret = set_notif_val(request,group_id,msg,activity,bx) activity=request.user.username+" -deleted thread " prefix=" in the forum "+forum_node.name bx=get_userobject(colg.created_by) if bx: link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id) msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum." # no_check=forum_notification_status(group_id,auth._id) # if no_check: ret = set_notif_val(request,group_id,msg,activity,bx) #send notification code ends here variables = RequestContext(request,{ 'forum':forum, 'groupid':group_id,'group_id':group_id, 'forum_created_by':User.objects.get(id=forum.created_by).username }) return render_to_response("ndf/forumdetails.html",variables)
def group_summary(request,group_id): ''' Renders the summary of all the activities done by the members of the Group ''' group_name, group_id = get_group_name_id(group_id) query("group",{ "group_id" : group_id }) data = {} pipe = [{'$match' : { 'group_id' : str(group_id)}}, {'$group': {'_id': '$user.name', 'num_of_activities': {'$sum': 1}}}] sorted_list = analytics_collection.aggregate(pipeline=pipe) sorted_list_acc_activities = sorted(sorted_list['result'],key = lambda k:k[u'num_of_activities'],reverse=True) data['active_users'] = [] i=0 for doc in sorted_list_acc_activities : data['active_users'].append({ "name" : (doc[u'_id']) , "activities" : doc[u'num_of_activities'] } ) i+=1 if i==3: break Course = node_collection.find_one({"_type":"GSystemType","name":"Course"}) CourseEventGroup = node_collection.find_one({"_type":"GSystemType","name":"CourseEventGroup"}) TwistGst = node_collection.find_one({"_type":"GSystemType","name":"Twist"}) data['forums'] = db['Nodes'].find({"url":"forum", "group_set":ObjectId(group_id)}).count() data['threads'] = db['Nodes'].find({"member_of":ObjectId(TwistGst._id),"group_set":ObjectId(group_id)}).count() regx=re.compile("^Reply of:.*") data['replies'] = db['Nodes'].find({"name": regx,"group_set":ObjectId(group_id)}).count() data['files'] = db['Nodes'].find({"url":"file", "group_set":ObjectId(group_id)}).count() data['pages'] = db['Nodes'].find({"url":"page", "group_set":ObjectId(group_id)}).count() data['total_activities'] = analytics_collection.find({ "group_id" : unicode(group_id)}).count() data['Courses'] = node_collection.find({"type_of":Course._id}).count() data['announce_courses'] = node_collection.find({"prior_node":ObjectId(group_id),"member_of":CourseEventGroup._id}).count() data['recent'] = {} specific_date = datetime.datetime.now() - datetime.timedelta(days=7) data['recent']['forums'] = analytics_collection.find({"action.key": {"$in" : ['create', 'edit']}, "group_id": str(group_id), "obj.forum" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() data['recent']['threads'] = analytics_collection.find({"action.key": {"$in" : ['create', 'edit']}, "group_id": str(group_id), "obj.thread" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() data['recent']['replies'] = analytics_collection.find({"action.key": {"$in" : ['add']}, "group_id": str(group_id), "obj.reply" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() data['recent']['files'] = analytics_collection.find({"action.key": {"$in" : ['create', 'edit']}, "group_id": str(group_id), "obj.file" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() data['recent']['pages'] = analytics_collection.find({"action.key": {"$in" : ['create', 'edit']}, "group_id": str(group_id), "obj.page" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() data['recent']['create_edit_course'] = analytics_collection.find({"action.key": {"$in" : ['create', 'edit']}, "group_id": str(group_id), "obj.course" : { '$exists' : 'true'},"timestamp":{'$gte':specific_date}}).count() return render (request ,"ndf/analytics_group_summary.html", { "data" : data, "group_id" : group_id, "groupid" : group_id})
def user_template_view(request, group_id): auth_group = None group_list=[] group_cur = node_collection.find({'_type': "Group", 'name': {'$nin': ["home", request.user.username]}}).limit(4) for i in group_cur: group_list.append(i) blank_list = [] attributetype_assignee = node_collection.find_one({"_type": 'AttributeType', 'name':'Assignee'}) attr_assignee = triple_collection.find({"_type": "GAttribute", "attribute_type.$id":attributetype_assignee._id, "object_value":request.user.username}) for attr in attr_assignee : task_node = node_collection.find_one({'_id': attr.subject}) blank_list.append(task_node) notification_object = notification.NoticeSetting.objects.filter(user_id=request.user.id) for each in notification_object: ntid = each.notice_type_id ntype = notification.NoticeType.objects.get(id=ntid) label = ntype.label.split("-")[0] blank_list.append({'label':label, 'display': ntype.display}) blank_list.reverse() blank_list = [] activity = "" activity_user = node_collection.find({'$and':[{'$or':[{'_type':'GSystem'},{'_type':'Group'},{'_type':'File'}]}, {'$or':[{'created_by':request.user.id}, {'modified_by':request.user.id}]}] }).sort('last_update', -1).limit(4) for each in activity_user: if each.created_by == each.modified_by : if each.last_update == each.created_at: activity = 'created' else : activity = 'modified' else : activity = 'created' if each._type == 'Group': blank_list.append(each) else : member_of = node_collection.find_one({"_id": each.member_of[0]}) blank_list.append(each) print blank_list template = "ndf/task_card_view.html" #variable = RequestContext(request, {'TASK_inst': self_task,'group_name':group_name,'group_id': group_id, 'groupid': group_id,'send':send}) variable = RequestContext(request, {'TASK_inst':blank_list,'group_name':group_id,'group_id': group_id, 'groupid': group_id}) return render_to_response(template, variable)
def output(request, group_id, meetingid): newmeetingid = meetingid ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else: group_ins = node_collection.find_one({'_type': "Group", "_id": ObjectId(group_id)}) pass #template = "https://chatb/#"+meetingid return render_to_response("ndf/newmeeting.html",{'group_id': group_id, 'appId':app._id, 'groupid':group_id,'newmeetingid':newmeetingid},context_instance=RequestContext(request))
def delete_observation(request, group_id, app_id=None, app_name=None, app_set_id=None, slug=None): user_type = request.POST["user"] user_session_id = request.POST["user_session_id"] marker_geojson = request.POST["marker_geojson"] marker_geojson = ast.literal_eval(marker_geojson) marker_ref = marker_geojson['properties']['ref'] is_cookie_supported = request.session.test_cookie_worked() operation_performed = "" app_set_element = node_collection.find_one({ '_id': ObjectId(app_set_id), 'group_set': { '$all': [ObjectId(group_id)] } }) # for anonymous user anonymous_flag = False if (user_type == "anonymous" and is_cookie_supported): cookie_added_markers = request.session.get('anonymous_added_markers') if (cookie_added_markers != None) and ( cookie_added_markers[:cookie_added_markers.find(",")] == user_session_id): if cookie_added_markers.find(marker_ref) > 0: anonymous_flag = True else: operation_performed = "You have not created this marker or you had lost your session !" else: operation_performed = "You have not created this marker or you had lost your session !" else: operation_performed = "You have not created this marker or we think you had disabled support for cookies !" if (user_type == "authenticated") or anonymous_flag: for each in app_set_element.location: if each['properties']['ref'] == marker_ref: app_set_element.location.remove(each) app_set_element.save(groupid=group_id) operation_performed = "marker_deleted" response_data = [len(app_set_element.location), operation_performed] response_data = json.dumps(response_data) return StreamingHttpResponse(response_data)
def task_collection(request,group_name,task_id=None,each_page=1): ins_objectid = ObjectId() choice=0 task=[] if ins_objectid.is_valid(group_name) is False : group_ins = node_collection.find_one({'_type': "Group", "name": group_name}) auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass collection_task=[] node = node_collection.one({'_id': ObjectId(task_id)}) attr_value = {} at_list = ["Status", "start_time", "Priority", "end_time", "Assignee", "Estimated_time"] for each in node.collection_set: attr_value = {} new = node_collection.one({'_id': ObjectId(each)}) for attrvalue in at_list: attributetype_key = node_collection.find_one({"_type": 'AttributeType', 'name': attrvalue}) attr = triple_collection.find_one({"_type": "GAttribute", "subject": new._id, "attribute_type": attributetype_key._id}) if attr: attr_value.update({attrvalue:attr.object_value}) else: attr_value.update({attrvalue:None}) attr_value.update({'id':each}) attr_value.update({'Name':new.name}) collection_task.append(dict(attr_value)) paged_resources = Paginator(collection_task,10) files_list = [] for each_resource in (paged_resources.page(each_page)).object_list: files_list.append(each_resource) template = "ndf/task_list_view.html" variable = RequestContext(request, {'TASK_inst':files_list,'group_name':group_name,"page_info":paged_resources,'page_no':each_page, 'group_id': group_id, 'groupid': group_id,'choice':choice,'status':'None','task':task_id}) return render_to_response(template, variable)
def module(request, group_id, module_id=None): """ * Renders a list of all 'modules' available within the database. """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) if module_id is None: module_ins = node_collection.find_one({'_type':"GSystemType", "name":"Module"}) if module_ins: module_id = str(module_ins._id) if request.method == "POST": # Module search view title = GST_MODULE.name search_field = request.POST['search_field'] module_coll = node_collection.find({'member_of': {'$all': [ObjectId(GST_MODULE._id)]}, '$or': [{'name': {'$regex': search_field, '$options': 'i'}}, {'tags': {'$regex':search_field, '$options': 'i'}}], 'group_set': {'$all': [ObjectId(group_id)]} }).sort('last_update', -1) # module_nodes_count = course_coll.count() return render_to_response("ndf/module.html", {'title': title, 'appId':app._id, 'searching': True, 'query': search_field, 'module_coll': module_coll, 'groupid':group_id, 'group_id':group_id }, context_instance=RequestContext(request) ) elif GST_MODULE._id == ObjectId(module_id): # Module list view title = GST_MODULE.name module_coll = node_collection.find({'member_of': {'$all': [ObjectId(module_id)]}, 'group_set': {'$all': [ObjectId(group_id)]}}) template = "ndf/module.html" variable = RequestContext(request, {'title': title, 'appId':app._id, 'module_coll': module_coll, 'group_id': group_id, 'groupid': group_id}) return render_to_response(template, variable)
def create_task(request, task_id, group_id): if task_id: task_node = node_collection.one({ '_type': u'GSystem', '_id': ObjectId(task_id) }) edit_task_node = task_node else: task_node = node_collection.collection.GSystem() name = request.POST.get("name", "") content_org = request.POST.get("content_org", "") parent = request.POST.get("parent", "") Status = request.POST.get("Status", "") Start_date = request.POST.get("start_time", "") Priority = request.POST.get("Priority", "") Due_date = request.POST.get("end_time", "") Assignee = request.POST.get("Assignee", "") Estimated_time = request.POST.get("Estimated_time", "") watchers = request.POST.get("watchers", "") GST_TASK = node_collection.one({'_type': "GSystemType", 'name': 'Task'}) tag = "" field_value = [] file_name = (request.POST.get("files_name")) if not task_id: # create get_node_common_fields(request, task_node, group_id, GST_TASK) # Adding watchers to node's author_set if watchers: task_node.author_set = [] user_to_be_notified = [] for each_watchers in watchers.split(','): bx = User.objects.get(id=int(each_watchers)) if bx: task_node.author_set.append(bx.id) # Adding to list which holds user's to be notified about the task if bx not in user_to_be_notified: user_to_be_notified.append(bx) task_node.save(groupid=group_id) if parent: # prior node saving if not task_id: task_node.prior_node = [ObjectId(parent)] parent_object = node_collection.find_one({'_id': ObjectId(parent)}) parent_object.post_node = [task_node._id] parent_object.save(groupid=group_id) task_node.save(groupid=group_id) return task_node
def parse_content(path, content_soup): """ This will fill: OEBPS/Images OEBPS/Audios OEBPS/Videos Steps: 1. Update links 2. Copy media file object """ # all_a = content_soup.find_all('a', href=True) # ==== updating media elements ==== all_src = content_soup.find_all(src=True) # Fetching the files for each_src in all_src: src_attr = each_src["src"] file_node = None if src_attr.startswith("/media"): # file src_attr = src_attr.split("media/")[-1] file_extension = src_attr.rsplit(".",1)[-1] file_node = node_collection.find_one({"$or": [{'if_file.original.relurl': src_attr}, {'if_file.mid.relurl': src_attr},{'if_file.thumbnail.relurl': src_attr}]}) if "readDoc" in src_attr: split_src = src_attr.split('/') node_id = split_src[split_src.index('readDoc') + 1] file_node = node_collection.one({'_id': ObjectId(node_id)}) if file_node: mimetype_val = file_node.if_file.mime_type.lower() # mimetype can be audio|video|image # file_name = slugify(file_node.name) + "." + file_extension file_name = file_node.name file_loc = None if "image" in mimetype_val: file_loc = "Images" elif "video" in mimetype_val: file_loc = "Videos" elif "audio" in mimetype_val: file_loc = "Audios" elif "text" in mimetype_val: file_loc = "Misc" each_src["src"] = (os.path.join('..',file_loc, file_name)) shutil.copyfile("/data/media/" + file_node['if_file']['original']['relurl'], os.path.join(oebps_path, file_loc, file_name)) create_update_content_file(file_name, file_loc, mimetype_val, is_non_html=True) # ==== updating assessment iframes ==== # ==== updating App iframes ==== return content_soup
def create_task(request,task_id,group_id): if task_id: task_node = node_collection.one({'_type': u'GSystem', '_id': ObjectId(task_id)}) edit_task_node = task_node else: task_node = node_collection.collection.GSystem() name = request.POST.get("name","") content_org = request.POST.get("content_org","") parent = request.POST.get("parent","") Status = request.POST.get("Status","") Start_date = request.POST.get("start_time", "") Priority = request.POST.get("Priority","") Due_date = request.POST.get("end_time", "") Assignee = request.POST.get("Assignee","") Estimated_time = request.POST.get("Estimated_time","") watchers = request.POST.get("watchers", "") GST_TASK = node_collection.one({'_type': "GSystemType", 'name': 'Task'}) tag="" field_value=[] file_name=(request.POST.get("files_name")) if not task_id: # create get_node_common_fields(request, task_node, group_id, GST_TASK) # Adding watchers to node's author_set if watchers: task_node.author_set = [] user_to_be_notified= [] for each_watchers in watchers.split(','): bx = User.objects.get(id=int(each_watchers)) if bx: task_node.author_set.append(bx.id) # Adding to list which holds user's to be notified about the task if bx not in user_to_be_notified: user_to_be_notified.append(bx) task_node.save(groupid=group_id) if parent: # prior node saving if not task_id: task_node.prior_node = [ObjectId(parent)] parent_object = node_collection.find_one({'_id': ObjectId(parent)}) parent_object.post_node = [task_node._id] parent_object.save(groupid=group_id) task_node.save(groupid=group_id) return task_node
def find_file_from_media_url(source_attr): source_attr = source_attr.split("media/")[-1] file_extension = source_attr.rsplit(".", 1)[-1] file_node = node_collection.find_one({ "$or": [{ 'if_file.original.relurl': source_attr }, { 'if_file.mid.relurl': source_attr }, { 'if_file.thumbnail.relurl': source_attr }] }) return file_node
def query(analytics_type,details) : ''' This function checks the Analytics data(for a user) in analytics_collection and gets the time to which the query set is updated. Based on the time, it fetches raw data from Benchmark collection and hands over to normalize to do the filtering and redundancy check. In case, the analytics_type is 'group', the function resolves the members of the group and calls itself recursively for each user, to update the analytics_collection. ''' if analytics_type == "user" : cursor = analytics_collection.find({"user.name" : str(details['username']) }).sort("timestamp",-1).limit(1) latest_timestamp = datetime.datetime(1900,1,1) if cursor is None : pass else : for doc in cursor : latest_timestamp = doc['timestamp'] break raw_data = benchmark_collection.find({"user" : details['username'], "last_update": {"$gt":latest_timestamp}}).sort("last_update",-1) if raw_data is None: pass else : normalize(raw_data) else : group_id = details['group_id'] group_node = node_collection.find_one({"_id" : ObjectId(group_id)}) if group_node is not None : member_list = group_node[u'author_set'] + group_node[u'group_admin'] for member in member_list : author = node_collection.find_one({"_type" : "Author", "created_by" : int(member)}) if author is not None : query("user",{"username" : author[u'name'] }) return 1
def output(request, group_id, meetingid): newmeetingid = meetingid ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = node_collection.find_one({ '_type': "Group", "name": group_id }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) else: group_ins = node_collection.find_one({ '_type': "Group", "_id": ObjectId(group_id) }) pass #template = "https://chatb/#"+meetingid return render_to_response("ndf/newmeeting.html", { 'group_id': group_id, 'appId': app._id, 'groupid': group_id, 'newmeetingid': newmeetingid }, context_instance=RequestContext(request))
def delete_observation(request, group_id, app_id=None, app_name=None, app_set_id=None, slug=None): user_type = request.POST["user"] user_session_id = request.POST["user_session_id"] marker_geojson = request.POST["marker_geojson"] marker_geojson = ast.literal_eval(marker_geojson) marker_ref = marker_geojson['properties']['ref'] is_cookie_supported = request.session.test_cookie_worked() operation_performed = "" app_set_element = node_collection.find_one({'_id': ObjectId(app_set_id), 'group_set': {'$all': [ObjectId(group_id)]}}) # for anonymous user anonymous_flag = False if (user_type == "anonymous" and is_cookie_supported): cookie_added_markers = request.session.get('anonymous_added_markers') if (cookie_added_markers != None) and (cookie_added_markers[:cookie_added_markers.find(",")] == user_session_id): if cookie_added_markers.find(marker_ref) > 0: anonymous_flag = True else: operation_performed = "You have not created this marker or you had lost your session !" else: operation_performed = "You have not created this marker or you had lost your session !" else: operation_performed = "You have not created this marker or we think you had disabled support for cookies !" if (user_type == "authenticated") or anonymous_flag: for each in app_set_element.location: if each['properties']['ref'] == marker_ref: app_set_element.location.remove(each) app_set_element.save(groupid=group_id) operation_performed = "marker_deleted" response_data = [len(app_set_element.location), operation_performed] response_data = json.dumps(response_data) return StreamingHttpResponse(response_data)
def Gid(group): group_id = group; ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) #auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({'_type': 'Author', 'name': group_id }) if auth: group_id = str(auth._id) pass else: pass return group_id
def graphs(request,group_id): # HttpResponseRedirect("ndf/visualize.html", # { # 'groupid':group_id, 'group_id':group_id, # }, # # context_instance=RequestContext(request) # ) ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = node_collection.find_one({'_type': "Group","name": group_id}) auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) return render_to_response("ndf/visualize.html", {'group_id': group_id, 'groupid': group_id }, context_instance=RequestContext(request))
def user_activity(request, group_id): activity_user = node_collection.find({ '$and': [{ '$or': [{ '_type': 'GSystem' }, { '_type': 'group' }, { '_type': 'File' }] }, { '$or': [{ 'created_by': request.user.id }, { 'modified_by': request.user.id }] }] }).sort('last_update', -1) blank_list = [] for each in activity_user: if each.created_by == each.modified_by: if each.last_update == each.created_at: activity = 'created' else: activity = 'modified' else: activity = 'created' if each._type == 'Group': blank_list.append(each) else: member_of = node_collection.find_one({"_id": each.member_of[0]}) blank_list.append(each) template = "ndf/User_Activity.html" #variable = RequestContext(request, {'TASK_inst': self_task,'group_name':group_name,'group_id': group_id, 'groupid': group_id,'send':send}) variable = RequestContext( request, { 'user_activity': blank_list, 'group_name': group_id, 'group_id': group_id, 'groupid': group_id }) return render_to_response(template, variable)
def dashbard_activity(group_id,url,doc): analytics_doc = initialize_analytics_obj(doc, group_id, 'dashboard') try : if(url[3] == 'group') : try : group_name, group_id = get_group_name_id(url[1]) if group_name != None : analytics_doc.action = { "key" : "view" , "phrase" : "viewed dashboard of" } analytics_doc.obj['dashboard']['id'] = group_id analytics_doc.obj['dashboard']['name'] = group_name analytics_doc.save() return 1 else : analytics_doc.action = { "key" : "view" , "phrase" : "viewed User group's Dashboard" } analytics_doc.obj['dashboard']['id'] = url[1] analytics_doc.obj['dashboard']['name'] = doc[u'user'] analytics_doc.save() return 1 except : pass except : try : user = node_collection.find_one({ "_type": "Author", "created_by" : int(url[1])}) try : analytics_doc.action = { "key" : "view" , "phrase" : "viewed profile of" } analytics_doc.obj['dashboard']['id'] = url[1] analytics_doc.obj['dashboard']['name'] = user[u'name'] analytics_doc.save(); return 1 except : pass except : pass return 0
def user_activity(request, group_id): activity_user = node_collection.find({'$and':[{'$or':[{'_type':'GSystem'},{'_type':'group'},{'_type':'File'}]}, {'$or':[{'created_by':request.user.id}, {'modified_by':request.user.id}]}] }).sort('last_update', -1) blank_list=[] for each in activity_user: if each.created_by == each.modified_by : if each.last_update == each.created_at: activity = 'created' else : activity = 'modified' else : activity = 'created' if each._type == 'Group': blank_list.append(each) else : member_of = node_collection.find_one({"_id":each.member_of[0]}) blank_list.append(each) template = "ndf/User_Activity.html" #variable = RequestContext(request, {'TASK_inst': self_task,'group_name':group_name,'group_id': group_id, 'groupid': group_id,'send':send}) variable = RequestContext(request, {'user_activity':blank_list,'group_name':group_id,'group_id': group_id, 'groupid': group_id}) return render_to_response(template, variable)
def forum_activity(group_id,url,doc): ''' The function analyzes the forum activities of the user. It takes in the raw normalized document from the normalize() function and analyzes the doc for activities like create, delete, view forums, thread, reply etc. The analyzed data is stored in the Analytics collection. ''' if ins_objectid.is_valid(url[3]) is False: if(url[3]=="delete"): if ins_objectid.is_valid(url[4]) is True: n=node_collection.find_one({"_id":ObjectId(url[4])}) if n['status']=="HIDDEN" or n['status']=="DELETED": analytics_doc = initialize_analytics_obj(doc, group_id, 'forum') analytics_doc.action = { 'key' : 'delete', 'phrase' : 'deleted a' } analytics_doc.obj['forum']['id'] = url[4]; forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['forum']['name'] = forum_node[u'name'] analytics_doc.obj['forum']['url'] = forum_node[u'url'] analytics_doc.save(); return 1 elif url[4]=="thread": if ins_objectid.is_valid(url[6]) is True: n=node_collection.find_one({"_id":ObjectId(url[6])}) if n['status']=="HIDDEN" or n['status']=="DELETED": analytics_doc = initialize_analytics_obj(doc, group_id, 'thread') analytics_doc.action = { 'key' : 'delete', 'phrase' : 'deleted a' } analytics_doc.action[2] = 'thread' analytics_doc.args['thread']['id'] = ObjectId(url[6]); thread_node = db['Nodes'].find_one({ "_id" : ObjectId(url[6])}) analytics_doc.obj['thread']['name'] = thread_node[u'name'] analytics_doc.obj['forum']['url'] = thread_node[u'url'] forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[5])}) analytics_doc.obj['thread']['forum'] = { "id" : forum_node[u'_id'], "name" : forum_node[u'name'], "url" : forum_node[u'url']}; analytics_doc.save(); return 1 elif url[4]=="reply": if ins_objectid.is_valid(url[7]) is True: n=node_collection.find_one({"_id":ObjectId(url[7])}) if n['status']=="HIDDEN" or n['status']=="DELETED": analytics_doc = initialize_analytics_obj(doc, group_id, 'reply') analytics_doc.action = { 'key' : 'delete', 'phrase' : 'deleted a' } analytics_doc.obj['reply']['id'] = url[7]; reply_node = db['Nodes'].find_one({ "_id" : ObjectId(url[7])}) analytics_doc.obj['reply']['name'] = reply_node[u'name']; analytics_doc.obj['reply']['url'] = reply_node[u'url'] thread_node = db['Nodes'].find_one({ "_id" : ObjectId(url[6])}) analytics_doc.obj['reply']['thread'] = { "id" : thread_node[u'_id'], "name" : thread_node[u'name'], "url" : thread_node[u'url']}; forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[5])}) analytics_doc.obj['reply']['forum'] = { "id" : forum_node[u'_id'], "name" : forum_node[u'name'], "url" : forum_node[u'url']}; analytics_doc.save(); return 1 elif url[3]=="thread" : try : if ins_objectid.is_valid(url[4]) is True: analytics_doc = initialize_analytics_obj(doc, group_id, 'thread') analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a' } analytics_doc.obj['thread']['id'] = ObjectId(url[4]); thread_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['thread']['name'] = thread_node[u'name']; analytics_doc.obj['thread']['url'] = thread_node[u'url']; analytics_doc.save(); return 1 except : pass elif url[3]=="edit_forum" : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : analytics_doc = initialize_analytics_obj(doc, group_id, 'forum') analytics_doc.action = { 'key' : 'edit', 'phrase' : 'edited a' } analytics_doc.obj['forum']['id'] = ObjectId(url[4]); forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['forum']['name'] = forum_node[u'name']; analytics_doc.obj['forum']['url'] = forum_node[u'url']; analytics_doc.save(); return 1 elif url[3]=="edit_thread" : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : analytics_doc = initialize_analytics_obj(doc, group_id, 'thread') analytics_doc.action = { 'key' : 'edit', 'phrase' : 'edited a' } forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['thread']['forum'] = { "id" : forum_node[u'_id'], "name" : forum_node[u'name'], "url" : forum_node[u'url']}; analytics_doc.obj['thread']['id'] = ObjectId(url[5]); thread_node = db['Nodes'].find_one({ "_id" : ObjectId(url[5])}) analytics_doc.obj['thread']['name'] = thread_node[u'name']; analytics_doc.obj['thread']['url'] = thread_node[u'url']; analytics_doc.save(); return 1 elif url[3] == 'add_node' : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : analytics_doc = initialize_analytics_obj(doc, group_id, 'reply') analytics_doc.action = { 'key' : 'add', 'phrase' : 'added a' } analytics_doc.save(); return 1 else: n=node_collection.find_one({"_id":ObjectId(url[3])}) try : if url[4] == 'thread' : if url[5] == 'create' : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : try : author = node_collection.find_one({"_type" : "Author", "name" : doc[u'user']}) except : pass if author : try : threads = node_collection.find({"url" : "forum/thread" , "created_by" : author[u'created_by']}) for created_thread in threads : if (doc[u'last_update'] - created_thread[u'created_at']).seconds < 5 : analytics_doc = initialize_analytics_obj(doc, group_id, 'thread') analytics_doc.action = { 'key' : 'create', 'phrase' : 'created a' } analytics_doc.obj['thread']['id'] = str(created_thread[u'_id']); thread_node = db['Nodes'].find_one({ "_id" : created_thread[u'_id']}) analytics_doc.obj['thread']['name'] = thread_node[u'name']; analytics_doc.obj['thread']['url'] = thread_node[u'url']; forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[3])}) analytics_doc.obj['thread']['forum'] = { "id" : forum_node[u'_id'], "name" : forum_node[u'name'], "url" : forum_node[u'url']}; analytics_doc.save(); return 1 except : pass except : author_id=n[u'created_by'] auth=node_collection.find_one({"_type": "Author", "created_by": author_id}) if auth: if auth[u'name'] == doc[u'user']: created_at = n[u'created_at'] if (doc[u'last_update'] - created_at).seconds < 5 : analytics_doc = initialize_analytics_obj(doc, group_id, 'forum') analytics_doc.action = { 'key' : 'create', 'phrase' : 'created a' } analytics_doc.obj['forum']['id'] = ObjectId(url[3]); forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[3])}) analytics_doc.obj['forum']['name'] = forum_node[u'name']; analytics_doc.obj['forum']['url'] = forum_node[u'url']; analytics_doc.save(); return 1 else : analytics_doc = initialize_analytics_obj(doc, group_id, 'forum') analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a' } analytics_doc.obj['forum']['id'] = ObjectId(url[3]); forum_node = db['Nodes'].find_one({ "_id" : ObjectId(url[3])}) analytics_doc.obj['forum']['name'] = forum_node[u'name']; analytics_doc.obj['forum']['url'] = forum_node[u'url']; analytics_doc.save(); return 1 return 0
def task_activity(group_id,url,doc): analytics_doc = initialize_analytics_obj(doc, group_id, 'task') if ins_objectid.is_valid(url[3]) is False: if(url[3]=="delete_task"): if ins_objectid.is_valid(url[4]) is True: analytics_doc.action = { "key" : "delete" , "phrase" : "deleted a" } analytics_doc.obj['task']['id'] = url[4] try : task_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['task']['name'] = task_node[u'name'] analytics_doc.obj['task']['url'] = task_node[u'url'] except : pass analytics_doc.save(); return 1 elif url[3]=="edit": if ins_objectid.is_valid(url[4]) is True: if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : analytics_doc.action = { "key" : "edit" , "phrase" : "edited a" } analytics_doc.obj['task']['id'] = url[4] try : task_node = db['Nodes'].find_one({ "_id" : ObjectId(url[4])}) analytics_doc.obj['task']['name'] = task_node[u'name'] analytics_doc.obj['task']['url'] = task_node[u'url'] except : pass analytics_doc.save(); return 1 elif url[3]=="task" : if url[4] == "saveimage" : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : analytics_doc.action = { "key" : "save_image" , "phrase" : "saved an" } #analytics_doc.obj['task']['id'] = url[4] return 1 else: n=node_collection.find_one({"_id":ObjectId(url[3])}) try : author_id=n[u'created_by'] auth=node_collection.find_one({"_type": "Author", "created_by": author_id}) if auth[u'name']==doc[u'user']: created_at = n[u'created_at'] if (doc[u'last_update'] - created_at).seconds < 5 : analytics_doc.action = { "key" : "create" , "phrase" : "created a" } analytics_doc.obj['task']['id'] = url[3] analytics_doc.obj['task']['name'] = n[u'name'] analytics_doc.obj['task']['url'] = n[u'url'] analytics_doc.save(); return 1 else : analytics_doc.action = { "key" : "view" , "phrase" : "viewed a" } analytics_doc.obj['task']['id'] = url[3] analytics_doc.obj['task']['name'] = n[u'name'] analytics_doc.obj['task']['url'] = n[u'url'] analytics_doc.save(); return 1 except : analytics_doc.action = { "key" : "view" , "phrase" : "viewed a" } analytics_doc.obj['task']['id'] = url[3] analytics_doc.save(); return 1 return 0
def course_activity(group_id,url,doc): ''' This function updates the analytics_collection database with the new activities done on the courses of MetaStudio, and also to see whether the course is created, edited or viewed.We check the status in the Nodes collection of database. And also we are assuming here that if the difference between the last update and created at is less than 5 seconds then we should have created the course. ''' analytics_doc = initialize_analytics_obj(doc, group_id, 'course') if(url[3]=="create"): if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : try : author = node_collection.find_one({'_type' : 'Author','name' : doc[u'user']}) cursor = node_collection.find({'url' : 'course','created_by' : author[u'created_by']}) for course_created in cursor : if (doc[u'last_update'] - course_created[u'created_at']).seconds < 5 : analytics_doc.action = { 'key' : 'create', 'phrase' : 'created a'} analytics_doc.obj['course']['id'] = ObjectId(course_created[u'_id']) analytics_doc.obj['course']['name'] = str(course_created[u"name"]) analytics_doc.obj['course']['url'] = course_created[u'url'] analytics_doc.save() except : return 0 elif(url[3]=="course_detail"): if(ins_objectid.is_valid(url[4])): n=node_collection.find_one({"_id":ObjectId(url[4])}) try : analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a'} analytics_doc.obj['course']['id'] = ObjectId(url[4]) analytics_doc.obj['course']['name'] = str(n[u"name"]) analytics_doc.obj['course']['url'] = course_created[u'url'] analytics_doc.save() except Exception : return 0 elif(url[3]=="edit") : if(ins_objectid.is_valid(url[4])): if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : n=node_collection.find_one({"_id":ObjectId(url[4])}) try : analytics_doc.action = { 'key' : 'edit', 'phrase' : 'edited a'} analytics_doc.obj['course']['id'] = ObjectId(url[4]) analytics_doc.obj['course']['name'] = str(n[u"name"]) analytics_doc.obj['course']['url'] = course_created[u'url'] analytics_doc.save() except Exception : return 0 elif(ins_objectid.is_valid(url[3])): n=node_collection.find_one({"_id":ObjectId(url[3])}) try : analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a'} analytics_doc.obj['course']['id'] = ObjectId(url[4]) analytics_doc.obj['course']['name'] = str(n[u"name"]) analytics_doc.obj['course']['url'] = n[u'url'] analytics_doc.save() except Exception : return 0 return 1
def file_activity(group_id,url,doc): ''' This function updates the analytics_collection database with the new activities done on the files of MetaStudio, and also to see whether the file is viewed,edited, deleted, uploaded . We check the status in the Nodes collection of database. And also we are assuming here that if the difference between the last update and created at is less than 5 seconds then we should have uploaded the file. ''' analytics_doc = initialize_analytics_obj(doc, group_id, 'file') if(url[3]=="submit"): try : author = node_collection.find_one({'_type' : 'Author','name' : doc[u'user']}) cursor = node_collection.find({'url' : 'file','created_by' : author[u'created_by']}) for file_created in cursor : if (doc[u'last_update'] - file_created[u'created_at']).seconds < 5 : analytics_doc.action = { 'key' : 'create', 'phrase' : 'created a'} analytics_doc.obj['file']['id'] = ObjectId(file_created[u'_id']) analytics_doc.obj['file']['type'] = str(file_created[u"mime_type"]) analytics_doc.obj['file']['name'] = str(file_created[u"name"]) analytics_doc.obj['file']['url'] = file_created[u'url'] analytics_doc.save() except : return 0 elif(url[3]=="readDoc"): n=node_collection.find_one({"_id":ObjectId(url[4])}) try : analytics_doc.action = { 'key' : 'download', 'phrase' : 'downloaded a'} analytics_doc.obj['file']['id'] = ObjectId(url[4]) analytics_doc.obj['file']['type'] = str(n[u"mime_type"]) analytics_doc.obj['file']['name'] = str(n[u"name"]) analytics_doc.obj['file']['url'] = n[u'url'] analytics_doc.save() except Exception : return 0 elif url[3]=="details": if(ins_objectid.is_valid(url[4])): n=node_collection.find_one({"_id":ObjectId(url[4])}) try : analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a'} analytics_doc.obj['file']['id'] = ObjectId(url[4]) analytics_doc.obj['file']['type'] = str(n[u"mime_type"]) analytics_doc.obj['file']['name'] = str(n[u"name"]) analytics_doc.obj['file']['url'] = n[u'url'] analytics_doc.save() except Exception : return 0 elif(ins_objectid.is_valid(url[3])): n=node_collection.find_one({"_id":ObjectId(url[3])}) try : analytics_doc.action = { 'key' : 'view', 'phrase' : 'viewed a'} analytics_doc.obj['file']['id'] = ObjectId(url[4]) analytics_doc.obj['file']['type'] = str(n[u"mime_type"]) analytics_doc.obj['file']['name'] = str(n[u"name"]) analytics_doc.obj['file']['url'] = n[u'url'] analytics_doc.save() except Exception : return 0 elif(url[3]=="delete"): if ins_objectid.is_valid(url[4]) is True: n=node_collection.find_one({"_id":ObjectId(url[4])}) if n['status']=="HIDDEN" or n['status']=="DELETED": analytics_doc.action = { 'key' : 'delete', 'phrase' : 'deleted a'} analytics_doc.obj['file']['id'] = ObjectId(url[4]) analytics_doc.obj['file']['type'] = str(n[u"mime_type"]) analytics_doc.obj['file']['name'] = str(n[u"name"]) analytics_doc.obj['file']['url'] = n[u'url'] analytics_doc.save() elif(url[3]=="edit" or url[3]=="edit_file"): if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : if ins_objectid.is_valid(url[4]) is True: n=node_collection.find_one({"_id":ObjectId(url[4])}) analytics_doc.action = { 'key' : 'edit', 'phrase' : 'edited'} analytics_doc.obj['file']['id'] = str(url[4]) analytics_doc.obj['file']['type'] = str(n[u"mime_type"]) analytics_doc.obj['file']['name'] = str(n[u"name"]) analytics_doc.obj['file']['url'] = n[u'url'] analytics_doc.save() return 1
def create_task_at_rt(request, rt_list, at_list, task_node, assign, group_name, group_id): file_id = (request.POST.get("files")) file_name = (request.POST.get("files_name")) for each in rt_list: rel_type_node = node_collection.one({ '_type': "RelationType", 'name': each }) field_value_list = None if rel_type_node["object_cardinality"] > 1: field_value_list = request.POST.get(rel_type_node["name"], "") if "[" in field_value_list and "]" in field_value_list: field_value_list = json.loads(field_value_list) else: field_value_list = request.POST.getlist(rel_type_node["name"]) else: field_value_list = request.POST.getlist(rel_type_node["name"]) # rel_type_node_type = "GRelation" for i, field_value in enumerate(field_value_list): field_value = parse_template_data(rel_type_node.object_type, field_value, field_instance=rel_type_node) field_value_list[i] = field_value task_gs_triple_instance = create_grelation( task_node._id, node_collection.collection.RelationType(rel_type_node), field_value_list) for each in at_list: field_value = [] if request.POST.get(each, ""): attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': each }) subject = task_node._id object_value = "" if each == 'Assignee': if type(assign) == list: object_value = assign else: field_value.append(assign) object_value = field_value else: field_value = request.POST.get(each, "") date_format_string = "" if each in ["start_time", "end_time"]: date_format_string = "%d/%m/%Y" field_value = parse_template_data( eval(attributetype_key["data_type"]), field_value, date_format_string=date_format_string) # newattribute.object_value = field_value object_value = field_value # newattribute.save() ga_node = create_gattribute(subject, attributetype_key, object_value) if request.FILES.getlist('UploadTask'): attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': 'Upload_Task' }) ga_node = create_gattribute(task_node._id, attributetype_key, file_id) assignee_list = [] assignee_list_id = [] if type(assign) == list: assignee_list_id = assign else: assignee_list_id.append(assign) user_to_be_notified = [] if assignee_list_id: for eachuser in assignee_list_id: if eachuser: bx = User.objects.get(id=int(eachuser)) assignee_list.append(bx.username) user_to_be_notified.append(bx) for eachuser in user_to_be_notified: activ = "Task reported" msg = "Task '" + task_node.name + \ "' has been reported by " + request.user.username + \ "\n - Status: " + request.POST.get('Status', '') + \ "\n - Assignee: " + ", ".join(assignee_list) + \ "\n - Url: http://" + site_domain + "/" + group_name.replace(" ","%20").encode('utf8') + "/task/" + str(task_node._id) set_notif_val(request, group_id, msg, activ, eachuser)
def page_activity(group_id,url,doc): ''' This function updates the Analytic_col database with the new activities done on the page of MetaStudio, and also to see whether the page is published,deleted we check the status in the Nodes collection of database. And also we are assuming here that if the difference between the last update and created at is less than 5 seconds then we should have created the page else we must have viewed the page. ''' analytics_doc = initialize_analytics_obj(doc, group_id, 'page') if url[3] == "delete": if ins_objectid.is_valid(url[4]) is True: n = node_collection.find_one({"_id":ObjectId(url[4])}) if n['status']=="HIDDEN" or n['status']=="DELETED": analytics_doc.action = { 'key' : 'delete', "phrase" : "deleted a" } analytics_doc.obj['page']['id'] = url[4] analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 elif url[3] == "page_publish" : if ins_objectid.is_valid(url[4]) is True: n=node_collection.find_one({"_id":ObjectId(url[4])}) if n['status']=="PUBLISHED" : analytics_doc.action = { 'key' : 'publish', "phrase" : "published a" } analytics_doc.obj['page']['id'] = url[4] analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 elif url[3] =="edit" : if u'has_data' in doc.keys() and doc[u'has_data']["POST"] == True : n=node_collection.find_one({"_id":ObjectId(url[4])}) analytics_doc.action = { 'key' : 'edit' , "phrase" : "edited a" } analytics_doc.obj['page']['id'] = url[4] analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 else: try : n = node_collection.find_one({"_id":ObjectId(url[3])}) author_id = n[u'created_by'] auth=node_collection.find_one({"_type": "Author", "created_by": author_id}) if auth[u'name']==doc[u'user']: if (doc[u'last_update'] - n[u'created_at']).seconds < 5 : analytics_doc.action = { "key" : "create" , "phrase" : "created a" } analytics_doc.obj['page']['id'] = ObjectId(url[3]) analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 else : analytics_doc.action = { 'key' : "view" , "phrase" : "viewed a" } analytics_doc.obj['page']['id'] = ObjectId(url[3]) analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 else : analytics_doc.action = { 'key' : "view" , "phrase" : "viewed a" } analytics_doc.obj['page']['id'] = ObjectId(url[3]) analytics_doc.obj['page']['name'] = n[u'name'] analytics_doc.obj['page']['url'] = n[u'url'] analytics_doc.save() return 1 except Exception : pass return 0
def all_observations(request, group_id, app_id=None): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = node_collection.find_one({ '_type': "Group", "name": group_id }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) else: pass if app_id is None: app_ins = node_collection.find_one({ '_type': "GSystemType", "name": "Observation" }) if app_ins: app_id = str(app_ins._id) # app is GSystemType Observation app = node_collection.find_one({"_id": ObjectId(app_id)}) app_name = app.name app_collection_set = [] file_metadata = [] # retriving each GSystemType in Observation e.g.Plant Obs.., Rain Fall etc. for each in app.collection_set: app_set_element = node_collection.find_one({ '_id': ObjectId(each), 'group_set': { '$all': [ObjectId(group_id)] } }) # Individual observations e.g. Rain Fall if app_set_element: locs = len(app_set_element.location) locations = app_set_element.location for loc in locations: # creating list of ObjectId's of file GSystem. files_list = ast.literal_eval(loc["properties"].get( "attached_files", '[]')) for file_id in files_list: # executes only if files_list has at least ObjectId if ObjectId.is_valid(file_id) and file_id: # for preventing duplicate dict forming if not file_id in [d['id'] for d in file_metadata]: file_obj = node_collection.one({ '_type': 'File', "_id": ObjectId(file_id) }) # print file_id, "===", type(file_id) temp_dict = {} temp_dict['id'] = file_obj._id.__str__() temp_dict['name'] = file_obj.name temp_dict['mimetype'] = file_obj.mime_type file_metadata.append(temp_dict) # app_element_content_objects = node_collection.find({'member_of':ObjectId(each), 'group_set':{'$all': [ObjectId(group_id)]}}) # obj_count = app_element_content_objects.count() app_collection_set.append({ "id": str(app_set_element._id), "name": app_set_element.name, "locations": json.dumps(locations), "total_locations": locs }) request.session.set_test_cookie() return render_to_response("ndf/observation.html", { 'app_collection_set': app_collection_set, 'groupid': group_id, 'group_id': group_id, 'app_name': app_name, 'app_id': app_id, 'template_view': 'landing_page_view', 'map_type': 'all_app_markers', 'file_metadata': json.dumps(file_metadata) }, context_instance=RequestContext(request))
def group_members(request, group_id) : ''' Renders the list of members sorted on the basis of their contributions in the group ''' group_name, group_id = get_group_name_id(group_id) query("group",{ "group_id" : group_id }) ''' grouping the data on the basis of user name ''' pipe = [{'$match' : { 'group_id' : str(group_id)}}, {'$group': {'_id': '$user.name', 'num_of_activities': {'$sum': 1}}}] sorted_list = analytics_collection.aggregate(pipeline=pipe) sorted_list_acc_activities = sorted(sorted_list['result'],key = lambda k:k[u'num_of_activities'],reverse=True) computing_urls = [ { 'key' : 'forums', 'url' : 'forum', 'status' : 'DRAFT' }, { 'key' : 'threads', 'url' : 'forum/thread', 'status' : 'DRAFT' }, { 'key' : 'files', 'url' : 'file', 'status' : 'PUBLISHED' }, { 'key' : 'pages', 'url' : 'page', 'status' : 'PUBLISHED' }, { 'key' : 'tasks', 'url' : 'task', 'status' : 'DRAFT' }, { 'key' : 'replies', 'name' : re.compile("^Reply of:.*"), 'status' : 'DRAFT' } ] list_of_members = [] for member in sorted_list_acc_activities : #try : member_doc = {} member_doc['count'] = member[u'num_of_activities'] author = node_collection.find_one({ "_type" : "Author" , "name" : member[u'_id']}) member_doc['name'] = member[u'_id'] try : member_doc['email'] = author[u'email'] except : pass for entity in computing_urls : member_doc[entity['key']] = 0 if entity['key'] == 'replies' : try : nodes = node_collection.find({"name":entity['name'], "group_set":ObjectId(group_id), "created_by" : author[u'created_by'], "status": entity[u'status']}).count() member_doc[entity['key']] = nodes except : pass else : try : nodes = node_collection.find({"url":entity['url'], "group_set":ObjectId(group_id), "created_by" : author[u'created_by'], "status": entity[u'status']}).count() member_doc[entity['key']] = nodes except : pass list_of_members.append(member_doc) #except : # return HttpResponse('Fatal Error') return render (request, "ndf/analytics_group_members.html", {"data" : list_of_members ,"group_id" : group_id, "groupid" : group_id})
def find_file_from_media_url(source_attr): source_attr = source_attr.split("media/")[-1] file_extension = source_attr.rsplit(".",1)[-1] file_node = node_collection.find_one({"$or": [{'if_file.original.relurl': source_attr}, {'if_file.mid.relurl': source_attr},{'if_file.thumbnail.relurl': source_attr}]}) return file_node
def adminDesignerDashboardClass(request, class_name='GSystemType'): ''' fetching class's objects ''' if request.method == "POST": search = request.POST.get("search", "") classtype = request.POST.get("class", "") nodes = node_collection.find({ 'name': { '$regex': search, '$options': 'i' }, '_type': classtype }).sort('last_update', -1) else: nodes = node_collection.find({ '_type': class_name }).sort('last_update', -1) objects_details = [] for each in nodes: try: user_name = User.objects.get(id=each.created_by).username except Exception, e: print e user_name = None member = [] member_of_list = [] collection_list = [] attribute_type_set = [] relation_type_set = [] if class_name in ("GSystemType", "AttributeType", "RelationType"): for e in each.member_of: member_of_list.append(node_collection.one({'_id': e}).name) # member_of_list.append(node_collection.one({'_id':e}).name+" - "+str(e)) for members in each.member_of: member.append(node_collection.one({'_id': members}).name) # member.append(node_collection.one({ '_id': members}).name+" - "+str(members)) # for coll in each.collection_set: # collection_list.append(node_collection.one({ '_id': coll}).name+" - "+str(coll)) if class_name == "GSystemType": for at_set in each.attribute_type_set: attribute_type_set.append(at_set.name) # attribute_type_set.append(at_set.name+" - "+str(at_set._id)) for rt_set in each.relation_type_set: relation_type_set.append(rt_set.name) # relation_type_set.append(rt_set.name+" - "+str(rt_set._id)) objects_details.append({ "Id": each._id, "Title": each.name, "Type": ", ".join(member), "Author": user_name, "Creation": each.created_at, 'member_of': ", ".join(member_of_list), "collection_list": ", ".join(collection_list), "attribute_type_set": ", ".join(attribute_type_set), "relation_type_set": ", ".join(relation_type_set) }) else: if class_name in ("AttributeType", "RelationType"): objects_details.append({ "Id": each._id, "Title": each.name, "Type": ", ".join(member), "Author": user_name, "Creation": each.created_at, 'member_of': ", ".join(member_of_list), "collection_list": ", ".join(collection_list) }) else: if class_name == "GSystem": group_set = [ node_collection.find_one({ "_id": eachgroup }).name for eachgroup in each.group_set if node_collection.find_one({"_id": eachgroup}) ] mem_ty = [] if each.member_of: for e in each.member_of: mem_ty.append(str(e)) # find = node_collection.one({ '_id': ObjectId(e),'_type':"GSystem"}) # find_name = find.name # member.append(find_name) # print e,find,"here\n\n" k = mem_ty[0] else: k = None member = [] objects_details.append({ "Id": each._id, "Title": each.name, "Alt_Title": each.altnames, "Mem": k, "Type": ", ".join(member), "collection_list": ", ".join(collection_list), "Type": ", ".join(member), "Author": user_name, "Group": ", ".join(group_set), "Creation": each.created_at })
def parse_data_create_gsystem(json_file_path): json_file_content = "" try: with open(json_file_path) as json_file: json_file_content = json_file.read() json_documents_list = json.loads(json_file_content) # Initiating empty node obj and other related data variables node = node_collection.collection.File() node_keys = node.keys() node_structure = node.structure # print "\n\n---------------", node_keys json_documents_list_spaces = json_documents_list json_documents_list = [] # Removes leading and trailing spaces from keys as well as values for json_document_spaces in json_documents_list_spaces: json_document = {} for key_spaces, value_spaces in json_document_spaces.iteritems(): json_document[key_spaces.strip().lower()] = value_spaces.strip() json_documents_list.append(json_document) except Exception as e: error_message = "\n!! While parsing the file ("+json_file_path+") got following error...\n " + str(e) log_print(error_message) raise error_message for i, json_document in enumerate(json_documents_list): info_message = "\n\n\n********** Processing row number : ["+ str(i + 2) + "] **********" log_print(info_message) try: parsed_json_document = {} attribute_relation_list = [] for key in json_document.iterkeys(): parsed_key = key.lower() if parsed_key in node_keys: # print parsed_key # adding the default field values e.g: created_by, member_of # created_by: if parsed_key == "created_by": if json_document[key]: temp_user_id = get_user_id(json_document[key].strip()) if temp_user_id: parsed_json_document[parsed_key] = temp_user_id else: parsed_json_document[parsed_key] = nroer_team_id else: parsed_json_document[parsed_key] = nroer_team_id # print "---", parsed_json_document[parsed_key] # contributors: elif parsed_key == "contributors": if json_document[key]: contrib_list = json_document[key].split(",") temp_contributors = [] for each_user in contrib_list: user_id = get_user_id(each_user.strip()) if user_id: temp_contributors.append(user_id) parsed_json_document[parsed_key] = temp_contributors else: parsed_json_document[parsed_key] = [nroer_team_id] # print "===", parsed_json_document[parsed_key] # tags: elif (parsed_key == "tags") and json_document[key]: parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # member_of: elif parsed_key == "member_of": parsed_json_document[parsed_key] = [file_gst._id] # print parsed_json_document[parsed_key] # --- END of adding the default field values else: # parsed_json_document[parsed_key] = json_document[key] parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # --- END of processing for remaining fields else: # key is not in the node_keys parsed_json_document[key] = json_document[key] attribute_relation_list.append(key) # print "key : ", key # --END of for loop --- # calling method to create File GSystems node_obj = create_resource_gsystem(parsed_json_document, i) nodeid = node_obj._id if node_obj else None # print "nodeid : ", nodeid # ----- for updating language ----- node_lang = get_language_tuple(eval(parsed_json_document['language'])) # print "============= :", node_lang # print "============= lang :", node_obj.language if node_obj and node_obj.language != node_lang: update_res = node_collection.collection.update( {'_id': ObjectId(nodeid), 'language': {'$ne': node_lang}}, {'$set': {'language': node_lang}}, upsert=False, multi=False ) if update_res['updatedExisting']: node_obj.reload() info_message = "\n\n- Update to language of resource: " + str(update_res) log_print(info_message) info_message = "\n\n- Now language of resource updates to: " + str(node_obj.language) log_print(info_message) # print "============= lang :", node_obj.language # ----- END of updating language ----- collection_name = parsed_json_document.get('collection', '') if collection_name and nodeid: collection_node = node_collection.find_one({ # '_type': 'File', 'member_of': {'$in': [file_gst._id]}, 'group_set': {'$in': [home_group._id]}, 'name': unicode(collection_name) }) if collection_node: add_to_collection_set(collection_node, nodeid) thumbnail_url = parsed_json_document.get('thumbnail') # print "thumbnail_url : ", thumbnail_url if thumbnail_url and nodeid: try: info_message = "\n\n- Attaching thumbnail to resource\n" log_print(info_message) attach_resource_thumbnail(thumbnail_url, nodeid, parsed_json_document, i) except Exception, e: print e # print type(nodeid), "-------", nodeid, "\n" # create thread node if isinstance(nodeid, ObjectId): thread_result = create_thread_obj(nodeid) # starting processing for the attributes and relations saving if isinstance(nodeid, ObjectId) and attribute_relation_list: node = node_collection.one({ "_id": ObjectId(nodeid) }) gst_possible_attributes_dict = node.get_possible_attributes(file_gst._id) # print gst_possible_attributes_dict relation_list = [] json_document['name'] = node.name # Write code for setting atrributes for key in attribute_relation_list: is_relation = True # print "\n", key, "----------\n" for attr_key, attr_value in gst_possible_attributes_dict.iteritems(): # print "\n", attr_key,"======", attr_value if key == attr_key: # print key is_relation = False # setting value to "0" for int, float, long (to avoid casting error) # if (attr_value['data_type'] in [int, float, long]) and (not json_document[key]): # json_document[key] = 0 if json_document[key]: # print "key : ", key, "\nvalue : ",json_document[key] info_message = "\n- For GAttribute parsing content | key: '" + attr_key + "' having value: '" + json_document[key] + "'" log_print(info_message) cast_to_data_type(json_document[key], attr_value['data_type']) if attr_value['data_type'] == "curricular": # setting int values for CR/XCR if json_document[key] == "CR": json_document[key] = 1 elif json_document[key] == "XCR": json_document[key] = 0 else: # needs to be confirm json_document[key] = 0 # json_document[key] = bool(int(json_document[key])) # print attr_value['data_type'], "@@@@@@@@@ : ", json_document[key] json_document[key] = cast_to_data_type(json_document[key], attr_value['data_type']) # print key, " !!!!!!!!! : ", json_document[key] subject_id = node._id # print "\n-----\nsubject_id: ", subject_id attribute_type_node = node_collection.one({ '_type': "AttributeType", '$or': [ {'name': {'$regex': "^"+attr_key+"$", '$options': 'i'} }, {'altnames': {'$regex': "^"+attr_key+"$", '$options': 'i'} } ] }) # print "\nattribute_type_node: ", attribute_type_node.name object_value = json_document[key] # print "\nobject_value: ", object_value ga_node = None info_message = "\n- Creating GAttribute ("+node.name+" -- "+attribute_type_node.name+" -- "+str(json_document[key])+") ...\n" log_print(info_message) ga_node = create_gattribute(subject_id, attribute_type_node, object_value) info_message = "- Created ga_node : "+ str(ga_node.name) + "\n" log_print(info_message) # To break outer for loop as key found break else: error_message = "\n!! DataNotFound: No data found for field ("+str(attr_key)+") while creating GSystem ( -- "+str(node.name)+")\n" log_print(error_message) # ---END of if (key == attr_key) if is_relation: relation_list.append(key) if not relation_list: # No possible relations defined for this node info_message = "\n!! ("+str(node.name)+"): No possible relations defined for this node.\n" log_print(info_message) return gst_possible_relations_dict = node.get_possible_relations(file_gst._id) # processing each entry in relation_list # print "=== relation_list : ", relation_list for key in relation_list: is_relation = True for rel_key, rel_value in gst_possible_relations_dict.iteritems(): if key == rel_key: # if key == "teaches": is_relation = False if json_document[key]: # most often the data is hierarchy sep by ":" if ":" in json_document[key]: formatted_list = [] temp_teaches_list = json_document[key].replace("\n", "").split(":") # print "\n temp_teaches", temp_teaches for v in temp_teaches_list: formatted_list.append(v.strip()) right_subject_id = [] # print "~~~~~~~~~~~", formatted_list # rsub_id = _get_id_from_hierarchy(formatted_list) rsub_id = get_id_from_hierarchy(formatted_list) # print "=== rsub_id : ", rsub_id hierarchy_output = None # checking every item in hierarchy exist and leaf node's _id found if rsub_id: right_subject_id.append(rsub_id) json_document[key] = right_subject_id # print json_document[key] else: error_message = "\n!! While creating teaches rel: Any one of the item in hierarchy"+ str(json_document[key]) +"does not exist in Db. \n!! So relation: " + str(key) + " cannot be created.\n" log_print(error_message) break # sometimes direct leaf-node may be present without hierarchy and ":" else: formatted_list = list(json_document[key].strip()) right_subject_id = [] right_subject_id.append(_get_id_from_hierarchy(formatted_list)) json_document[key] = right_subject_id # print "\n----------", json_document[key] info_message = "\n- For GRelation parsing content | key: " + str(rel_key) + " -- " + str(json_document[key]) log_print(info_message) # print list(json_document[key]) # perform_eval_type(key, json_document, "GSystem", "GSystem") for right_subject_id in json_document[key]: # print "\njson_document[key]: ", json_document[key] subject_id = node._id # print "subject_id : ", subject_id # print "node.name: ", node.name # Here we are appending list of ObjectIds of GSystemType's type_of field # along with the ObjectId of GSystemType's itself (whose GSystem is getting created) # This is because some of the RelationType's are holding Base class's ObjectId # and not that of the Derived one's # Delibrately keeping GSystemType's ObjectId first in the list # And hence, used $in operator in the query! rel_subject_type = [] rel_subject_type.append(file_gst._id) if file_gst.type_of: rel_subject_type.extend(file_gst.type_of) relation_type_node = node_collection.one({'_type': "RelationType", '$or': [{'name': {'$regex': "^"+rel_key+"$", '$options': 'i'}}, {'altnames': {'$regex': "^"+rel_key+"$", '$options': 'i'}}], 'subject_type': {'$in': rel_subject_type} }) right_subject_id_or_list = [] right_subject_id_or_list.append(ObjectId(right_subject_id)) nodes = triple_collection.find({'_type': "GRelation", 'subject': subject_id, 'relation_type.$id': relation_type_node._id }) # sending list of all the possible right subject to relation for n in nodes: if not n.right_subject in right_subject_id_or_list: right_subject_id_or_list.append(n.right_subject) info_message = "\n- Creating GRelation ("+ str(node.name)+ " -- "+ str(rel_key)+ " -- "+ str(right_subject_id_or_list)+") ..." log_print(info_message) gr_node = create_grelation(subject_id, relation_type_node, right_subject_id_or_list) info_message = "\n- Grelation processing done.\n" log_print(info_message) # To break outer for loop if key found break else: error_message = "\n!! DataNotFound: No data found for relation ("+ str(rel_key)+ ") while creating GSystem (" + str(file_gst.name) + " -- " + str(node.name) + ")\n" log_print(error_message) break # print relation_list else: info_message = "\n!! Either resource is already created or file is already saved into filehive/DB or file not found" log_print(info_message) continue except Exception as e: error_message = "\n While creating ("+str(json_document['name'])+") got following error...\n " + str(e) print "!!!!!!!!!!!!EEEEEEEERRRRRRRRRRRRRROOOOOOORRRRRRRRRRRRR......................" # file_error_msg = "\nFile with following details got an error: \n" file_error_msg = "\n========================" + " Row No : " + str(i + 2) + " ========================\n" # file_error_msg += "- Row No : " + str(i + 2) + "\n" file_error_msg += "- Name : " + json_document["name"] + "\n" file_error_msg += "- File Name: " + json_document["file_name"] + "\n" file_error_msg += "- ERROR : " + str(e) + "\n\n" file_error_msg += "- Following are the row details : \n\n" + unicode(json.dumps(json_document, sort_keys=True, indent=4, ensure_ascii=False)) + "\n" file_error_msg += "============================================================\n\n\n" log_error_rows.append(file_error_msg) log_print(error_message)
def trash_resource(request, group_id, node_id): ''' Delete Action. This method removes the group_id from the node's group_set. Iff node's group_set is empty, send to Trash group. ''' auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) gst_base_unit = node_collection.one({ '_type': 'GSystemType', 'name': 'base_unit' }) node_obj = node_collection.find_one({"_id": ObjectId(node_id)}) group_obj = node_collection.find_one({"_id": ObjectId(group_id)}) trash_group = node_collection.find_one({"name": "Trash"}) response_dict = {} response_dict['success'] = False if trash_group._id in node_obj.group_set: try: if node_obj._id: delete_node(ObjectId(node_obj._id), deletion_type=1) response_dict['success'] = True except Exception as e: pass return HttpResponse(json.dumps(response_dict)) if ObjectId(group_id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(group_id)) if ObjectId(auth._id) in node_obj.group_set: node_obj.group_set.remove(ObjectId(auth._id)) node_obj.save() if not node_obj.group_set: # Add Trash group _id to node_obj's group_set if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" if node_obj.collection_set: if trash_group._id not in node_obj.group_set: node_obj.group_set.append(trash_group._id) node_obj.status = u"DELETED" node_obj.save() # print "\n\n\nnode_obj.status",node_obj.status # get_member_of = node_collection.find_one({"_id":{'$in':node_obj.member_of}}) # if get_member_of.name == 'Page': if gst_base_unit._id in node_obj.group_set: return HttpResponse("True") elif "Page" in node_obj.member_of_names_list and not "CourseEventGroup" in group_obj.member_of_names_list: return HttpResponseRedirect( reverse('page', kwargs={'group_id': group_id})) # return (eval('page')(request, group_id)) elif "File" in node_obj.member_of_names_list and not "CourseEventGroup" in group_obj.member_of_names_list: return HttpResponse(json.dumps(response_dict)) # elif get_member_of.name == 'File': # return(eval('file')(request, group_id)) elif "CourseEventGroup" in group_obj.member_of_names_list: response_dict = {'success': True} return HttpResponse(json.dumps(response_dict)) else: return HttpResponseRedirect( reverse('group_dashboard', kwargs={'group_id': group_id}))
def save_observation(request, group_id, app_id=None, app_name=None, app_set_id=None, slug=None): user_type = request.POST["user"] user_session_id = request.POST["user_session_id"] marker_geojson = request.POST["marker_geojson"] marker_geojson = ast.literal_eval(marker_geojson) is_cookie_supported = request.session.test_cookie_worked() operation_performed = "" unique_token = str(ObjectId()) cookie_added_markers = "" app_set_element = node_collection.find_one({ '_id': ObjectId(app_set_id), 'group_set': { '$all': [ObjectId(group_id)] } }) # to update existing location if "ref" in marker_geojson['properties']: marker_ref = marker_geojson['properties']['ref'] if app_set_element: # for anonymous user anonymous_flag = False if (user_type == "anonymous" and is_cookie_supported): cookie_added_markers = request.session.get( 'anonymous_added_markers', "") if (cookie_added_markers != None) and ( cookie_added_markers[:cookie_added_markers.find(",")] == user_session_id): if cookie_added_markers.find(marker_ref) > 0: anonymous_flag = True else: operation_performed = "You have not created this marker or you had lost your session !" else: operation_performed = "You have not created this marker or you are had lost your session !" else: operation_performed = "You have not created this marker or we think you had disabled support for cookies !" if (user_type == "authenticated") or anonymous_flag: for each in app_set_element.location: if each['properties']['ref'] == marker_ref: app_set_element.location.remove(each) app_set_element.location.append(marker_geojson) app_set_element.save(groupid=group_id) unique_token = marker_ref operation_performed = "edit" # to create/add new location else: marker_geojson['properties']['ref'] = unique_token if app_set_element: app_set_element.location.append(marker_geojson) app_set_element.save(groupid=group_id) operation_performed = "create_new" # for anonymous user if user_type == "anonymous" and is_cookie_supported: cookie_added_markers = request.session.get( 'anonymous_added_markers', "") if cookie_added_markers == None or cookie_added_markers[:cookie_added_markers .find( "," )] != user_session_id: cookie_added_markers = user_session_id + "," + unique_token elif cookie_added_markers[:cookie_added_markers. find(",")] == user_session_id: cookie_added_markers += "," + unique_token request.session['anonymous_added_markers'] = cookie_added_markers # HttpResponse.set_cookie('anonymous_added_markers', value=cookie_added_markers) # print "\n create/save : ", request.session.items() response_data = [ len(app_set_element.location), unique_token, operation_performed, str(cookie_added_markers) ] response_data = json.dumps(response_data) # response = HttpResponse(response_data) # response.cookies['anonymous_added_markers'] = cookie_added_markers # return response return StreamingHttpResponse( response_data, {'anonymous_added_markers': cookie_added_markers})
def imageDashboard(request, group_id, image_id=None, page_no=1): from gnowsys_ndf.settings import GSTUDIO_NO_OF_OBJS_PP ''' fetching image acording to group name ''' # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) if image_id is None: image_ins = node_collection.find_one({ '_type': "GSystemType", "name": "Image" }) if image_ins: image_id = str(image_ins._id) # img_col = node_collection.find({'_type': 'File', 'member_of': {'$all': [ObjectId(image_id)]}, 'group_set': ObjectId(group_id)}).sort("last_update", -1) files_cur = node_collection.find( { '_type': { '$in': ["GSystem"] }, 'member_of': file_gst._id, 'group_set': { '$all': [ObjectId(group_id)] }, 'if_file.mime_type': { '$regex': 'image' } # 'created_by': {'$in': gstaff_users}, # '$or': [ # { # }, # { # '$or': [ # {'access_policy': u"PUBLIC"}, # { # '$and': [ # {'access_policy': u"PRIVATE"}, # {'created_by': request.user.id} # ] # } # ], # } # { 'collection_set': {'$exists': "true", '$not': {'$size': 0} }} # ] }, { 'name': 1, '_id': 1, 'fs_file_ids': 1, 'member_of': 1, 'mime_type': 1, 'if_file': 1 }).sort("last_update", -1) # print "file count\n\n\n",files_cur.count() # image_page_info = paginator.Paginator(files_cur, page_no, GSTUDIO_NO_OF_OBJS_PP) template = "ndf/ImageDashboard.html" already_uploaded = request.GET.getlist('var', "") variable = RequestContext( request, { 'imageCollection': files_cur, 'already_uploaded': already_uploaded, 'groupid': group_id, 'group_id': group_id }) return render_to_response(template, variable)
def page(request, group_id, app_id=None, page_no=1): """Renders a list of all 'Page-type-GSystems' available within the database. """ try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) if app_id is None: app_ins = node_collection.find_one({ '_type': "GSystemType", "name": "Page" }) if app_ins: app_id = str(app_ins._id) from gnowsys_ndf.settings import GSTUDIO_NO_OF_OBJS_PP content = [] version = [] con = [] group_object = node_collection.one({'_id': ObjectId(group_id)}) # Code for user shelf shelves = [] shelf_list = {} auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if request.method == "POST": title = gst_page.name search_field = request.POST['search_field'] page_nodes = node_collection.find({ 'member_of': { '$all': [ObjectId(app_id)] }, '$or': [{ '$and': [{ 'name': { '$regex': search_field, '$options': 'i' } }, { '$or': [{ 'access_policy': u"PUBLIC" }, { '$and': [{ 'access_policy': u"PRIVATE" }, { 'created_by': request.user.id }] }] }] }, { '$and': [{ 'tags': { '$regex': search_field, '$options': 'i' } }, { '$or': [{ 'access_policy': u"PUBLIC" }, { '$and': [{ 'access_policy': u"PRIVATE" }, { 'created_by': request.user.id }] }] }] }], 'group_set': { '$all': [ObjectId(group_id)] }, 'status': { '$nin': ['HIDDEN'] } }).sort('last_update', -1) paginator_pages = paginator.Paginator(page_nodes, page_no, GSTUDIO_NO_OF_OBJS_PP) return render_to_response("ndf/page_list.html", { 'title': title, 'appId': app._id, 'shelf_list': shelf_list, 'shelves': shelves, 'searching': True, 'query': search_field, 'page_nodes': page_nodes, 'groupid': group_id, 'group_id': group_id, 'page_info': paginator_pages }, context_instance=RequestContext(request)) elif gst_page._id == ObjectId(app_id): group_type = node_collection.one({'_id': ObjectId(group_id)}) group_info = group_type_info(group_id) node = node_collection.find({'member_of': ObjectId(app_id)}) title = gst_page.name """ Below query returns only those documents: (a) which are pages, (b) which belongs to given group, (c) which has status either as DRAFT or PUBLISHED, and (d) which has access_policy either as PUBLIC or if PRIVATE then it's created_by must be the logged-in user """ page_nodes = node_collection.find({ 'member_of': { '$all': [ObjectId(app_id)] }, 'group_set': { '$all': [ObjectId(group_id)] }, '$or': [{ 'access_policy': u"PUBLIC" }, { '$and': [{ 'access_policy': u"PRIVATE" }, { 'created_by': request.user.id }] }], 'status': { '$nin': ['HIDDEN'] } }).sort('last_update', -1) paginator_pages = paginator.Paginator(page_nodes, page_no, GSTUDIO_NO_OF_OBJS_PP) return render_to_response("ndf/page_list.html", { 'title': title, 'appId': app._id, 'shelf_list': shelf_list, 'shelves': shelves, 'page_nodes': page_nodes, 'groupid': group_id, 'group_id': group_id, 'page_info': paginator_pages }, context_instance=RequestContext(request)) else: # Page Single instance view page_node = node_collection.one({"_id": ObjectId(app_id)}) thread_node = None allow_to_comment = None annotations = None if page_node: annotations = json.dumps(page_node.annotations) page_node.get_neighbourhood(page_node.member_of) thread_node, allow_to_comment = node_thread_access( group_id, page_node) return render_to_response('ndf/page_details.html', { 'node': page_node, 'node_has_thread': thread_node, 'appId': app._id, 'group_id': group_id, 'shelf_list': shelf_list, 'allow_to_comment': allow_to_comment, 'annotations': annotations, 'shelves': shelves, 'groupid': group_id }, context_instance=RequestContext(request))
def save_quizitem_response(user_id, quiz_type_val, user_action, user_given_ans, thread_obj, node_obj, group_id, qip_gst, curr_datetime): try: new_list = [] old_submitted_ans = None user_ans = None user_name = User.objects.get(pk=int(user_id)).username already_ans_obj = node_collection.find_one( {'member_of': qip_gst._id,'created_by': user_id, 'prior_node': thread_obj._id}) if already_ans_obj: already_submitted_ans = get_attribute_value(node_id=already_ans_obj._id,attr_name="quizitempost_user_submitted_ans", get_data_type=False, use_cache=False) # print "\n already_submitted_ans == ", already_submitted_ans # check whether user has already checked or submitted ans user_ans = already_ans_obj else: user_ans = node_collection.collection.GSystem() user_ans.created_by = user_id user_ans.modified_by = user_id user_ans.contributors.append(user_id) user_ans.member_of.append(qip_gst._id) user_ans.group_set.append(group_id) if user_ans and (node_obj._id not in user_ans.prior_node): user_ans.prior_node.append(node_obj._id) user_ans.origin = [{'thread_id': thread_obj._id, 'prior_node_id_of_thread': node_obj._id}] user_ans.status = u"PUBLISHED" user_ans.name = unicode("Answer_of:" + str(node_obj.name) + "-Answer_by:"+ str(user_name)) user_ans.save() # print "\n\n user_ans== ",user_ans if user_id not in thread_obj.author_set: thread_obj.author_set.append(user_id) thread_obj.save() # print "\n thread_obj.author_set",thread_obj.author_set if thread_obj._id not in user_ans.prior_node: # add user's post/reply obj to thread obj's post_node node_collection.collection.update({'_id': user_ans._id}, {'$push': {'prior_node':thread_obj._id}},upsert=False,multi=False) user_ans.reload() if user_ans._id not in thread_obj.post_node: # add thread obj to user's post/reply prior_node node_collection.collection.update({'_id': thread_obj._id}, {'$push': {'post_node':user_ans._id}},upsert=False,multi=False) thread_obj.reload() if user_given_ans and user_ans: if quiz_type_val == "Short-Response": if already_ans_obj: old_submitted_ans = get_attribute_value(node_id=user_ans._id, attr_name="quizitempost_user_submitted_ans", get_data_type=False, use_cache=False) #old_submitted_ans = get_attribute_value(user_ans._id,"quizitempost_user_submitted_ans") if old_submitted_ans != "None" and old_submitted_ans != "" and old_submitted_ans: new_list = old_submitted_ans new_list.append({str(curr_datetime):user_given_ans}) if new_list: create_gattribute(user_ans._id, qip_user_submitted_ans_AT, new_list) else: if user_given_ans: if user_action == "check": if already_ans_obj: old_checked_ans = get_attribute_value(node_id=user_ans._id, attr_name="quizitempost_user_checked_ans", get_data_type=False, use_cache=False) #old_checked_ans = get_attribute_value(user_ans._id,"quizitempost_user_checked_ans") if old_checked_ans != "None" and old_checked_ans != "": new_list = old_checked_ans new_list.append({str(curr_datetime):user_given_ans}) if new_list: create_gattribute(user_ans._id, qip_user_checked_ans_AT, new_list) elif user_action == "submit": if already_ans_obj: old_submitted_ans = get_attribute_value(node_id=user_ans._id, attr_name="quizitempost_user_submitted_ans", get_data_type=False, use_cache=False) #old_submitted_ans = get_attribute_value(user_ans._id,"quizitempost_user_submitted_ans") if old_submitted_ans != "None" and old_submitted_ans != "" and old_submitted_ans: new_list = old_submitted_ans new_list.append({str(curr_datetime):user_given_ans}) if new_list: create_gattribute(user_ans._id, qip_user_submitted_ans_AT, new_list) user_ans.reload() # print "\n user_ans.attribute_set",user_ans.attribute_set # must returnL user_ans, already_ans_obj # print "\nold_submitted_ans: ",old_submitted_ans # print "\nuser_ans: ", user_ans # print "\nnew_list: ", new_list except Exception as save_quizitem_response_err: pass print "\nError occurred in save_quizitem_response(). ", save_quizitem_response_err return old_submitted_ans, user_ans, new_list
def task_details(request, group_name, task_id): """Renders given task's details. """ group_id = None if ObjectId.is_valid(group_name) is False: group_ins = node_collection.find_one({ '_type': "Group", "name": group_name }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) elif ObjectId.is_valid(group_name) is True: group_ins = node_collection.find_one({ '_type': "Group", "_id": ObjectId(group_name) }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) task_node = node_collection.one({ '_type': u'GSystem', '_id': ObjectId(task_id) }) at_list = [ "Status", "start_time", "Priority", "end_time", "Assignee", "Estimated_time", "Upload_Task" ] blank_dict = {} history = [] subtask = [] for each in at_list: attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': each }) attr = triple_collection.find_one({ "_type": "GAttribute", "subject": task_node._id, "attribute_type": attributetype_key._id }) if attr: if attributetype_key.name == "Assignee": u_list = [] for each_id in attr.object_value: u = User.objects.get(id=each_id) if u: if u.username not in u_list: u_list.append(u.username) blank_dict[each] = u_list else: blank_dict[each] = attr.object_value if task_node.prior_node: blank_dict['parent'] = node_collection.one({ '_id': task_node.prior_node[0] }).name if task_node.post_node: for each_postnode in task_node.post_node: sys_each_postnode = node_collection.find_one( {'_id': each_postnode}) sys_each_postnode_user = User.objects.get( id=sys_each_postnode.created_by) member_of_name = node_collection.find_one({ '_id': sys_each_postnode.member_of[0] }).name if member_of_name == "Task": subtask.append({ 'id': str(sys_each_postnode._id), 'name': sys_each_postnode.name, 'created_by': sys_each_postnode_user.username, 'created_at': sys_each_postnode.created_at }) if member_of_name == "task_update_history": if sys_each_postnode.altnames == None: postnode_task = '[]' else: postnode_task = sys_each_postnode.altnames history.append({ 'id': str(sys_each_postnode._id), 'name': sys_each_postnode.name, 'created_by': sys_each_postnode_user.username, 'created_at': sys_each_postnode.created_at, 'altnames': eval(postnode_task), 'content': sys_each_postnode.content }) if task_node.collection_set: blank_dict['collection'] = 'True' # Appending TaskType to blank_dict, i.e. "has_type" relationship if task_node.relation_set: for rel in task_node.relation_set: if "has_type" in rel and rel["has_type"]: task_type = node_collection.one({'_id': rel["has_type"][0]}, {'name': 1}) if task_type: blank_dict["has_type"] = task_type["name"] break # Appending Watchers to blank_dict, i.e. values of node's author_set field if task_node.author_set: watchers_list = [] for eachid in task_node.author_set: if eachid not in watchers_list: watchers_list.append(eachid) blank_dict["Watchers"] = watchers_list history.reverse() var = { 'title': task_node.name, 'group_id': group_id, 'appId': app._id, 'groupid': group_id, 'group_name': group_name, 'node': task_node, 'history': history, 'subtask': subtask } var.update(blank_dict) variables = RequestContext(request, var) template = "ndf/task_details.html" return render_to_response(template, variables)
def discussion_reply(request, group_id, node_id): try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) try: group_object = node_collection.one({'_id': ObjectId(group_id)}) prior_node = request.POST.get("prior_node_id", "") content_org = request.POST.get("reply_text_content", "") # reply content node = node_collection.one({"_id": ObjectId(node_id)}) gs_type_node_id = None if u'Twist' not in node.member_of_names_list: grel_dict = get_relation_value(node_id,'thread_of', True) node = grel_dict['grel_node'] if node.prior_node: gs_type_node_id = node.prior_node[0] # if node and node.relation_set: # for each_rel in node.relation_set: # if each_rel and "thread_of" in each_rel: # gs_type_node_id = each_rel['thread_of'][0] # break # grel_dict = get_relation_value(node_id,'thread_of') # is_cursor = grel_dict.get("cursor",False) # if not is_cursor: # gs_type_node_id = grel_dict.get("grel_node") # # grel_id = grel_dict.get("grel_id") # print "\n\n node.name === ", node.member_of_names_list, node._id, node.name # process and save node if it reply has content if content_org: user_id = int(request.user.id) user_name = unicode(request.user.username) # auth = node_collection.one({'_type': 'Author', 'name': user_name }) # creating empty GST and saving it reply_obj = node_collection.collection.GSystem() reply_obj.name = unicode("Reply of:" + str(prior_node)) reply_obj.status = u"PUBLISHED" reply_obj.created_by = user_id reply_obj.modified_by = user_id reply_obj.contributors.append(user_id) reply_obj.member_of.append(ObjectId(reply_st._id)) reply_obj.prior_node.append(ObjectId(prior_node)) reply_obj.group_set.append(ObjectId(group_id)) reply_obj.content_org = unicode(content_org) filename = slugify(unicode("Reply of:" + str(prior_node))) + "-" + user_name + "-" # reply_obj.content = org2html(content_org, file_prefix=filename) reply_obj.content = content_org if gs_type_node_id: reply_obj.origin.append({'prior_node_id_of_thread': ObjectId(gs_type_node_id)}) if node_id: reply_obj.origin.append({'thread_id': ObjectId(node_id)}) # ============================== # try: upload_files_count=int(request.POST.get("upload_cnt",0)) # print "upfiles=",upload_files_count lst=[] lstobj_collection=[] usrid = int(request.user.id) if upload_files_count > 0: # print "uploaded items",request.FILES.items() try: thread_obj = node_collection.one({'_id': ObjectId(prior_node)}) # print "thread_obj : ", thread_obj if thread_obj.access_policy: access_policy = thread_obj.access_policy else: access_policy = u'PUBLIC' except: access_policy = u'PUBLIC' for key,value in request.FILES.items(): fname=unicode(value.__dict__['_name']) # print "key=",key,"value=",value,"fname=",fname fileobj,fs=save_file(value,fname,usrid,group_id, "", "", username=unicode(request.user.username), access_policy=access_policy, count=0, first_object="", oid=True) if type(fileobj) == list: obid = str(list(fileobj)[1]) else: obid=str(fileobj) try: file_obj=node_collection.find_one({'_id': ObjectId(obid)}) lstobj_collection.append(file_obj._id) except: pass if "CourseEventGroup" not in group_object.member_of_names_list: if group_object.edit_policy == 'EDITABLE_MODERATED': t = create_moderator_task(request, file_obj.group_set[0], file_obj._id,on_upload=True) # print "::: lstobj_collection: ", lstobj_collection # except: # lstobj_collection = [] # ============================== reply_obj.collection_set = lstobj_collection # print "=== lstobj_collection: ", lstobj_collection # saving the reply obj reply_obj.save() #Update Counter Collection # thread_obj = node_collection.one({'_id':ObjectId(node_id)}) if gs_type_node_id: gs_type_node = node_collection.one({'_id':gs_type_node_id}) active_user_ids_list = [request.user.id] if GSTUDIO_BUDDY_LOGIN: active_user_ids_list += Buddy.get_buddy_userids_list_within_datetime(request.user.id, datetime.now()) # removing redundancy of user ids: active_user_ids_list = dict.fromkeys(active_user_ids_list).keys() Counter.add_comment_pt(resource_obj_or_id=gs_type_node, current_group_id=group_id, active_user_id_or_list=active_user_ids_list) # if gs_type_node.if_file.mime_type : # file_creator_id = gs_type_node.created_by # if file_creator_id != request.user.id : # counter_obj = Counter.get_counter_obj(request.user.id, ObjectId(group_id)) # # counter_obj.no_comments_on_others_files += 1 # counter_obj['file']['commented_on_others_res'] += 1 # # counter_obj.no_comments_by_user += 1 # counter_obj['total_comments_by_user'] += 1 # # counter_obj.course_score += GSTUDIO_COMMENT_POINTS # counter_obj['group_points'] += GSTUDIO_COMMENT_POINTS # counter_obj_creator = Counter.get_counter_obj(file_creator_id, ObjectId(group_id)) # # counter_obj_creator.no_comments_received_on_files += 1 # counter_obj_creator['file']['comments_gained'] += 1 # # counter_obj_creator.no_comments_for_user += 1 # if str(counter_obj.user_id) in counter_obj_creator.file['comments_by_others_on_res'].keys(): # # counter_obj_creator.comments_by_others_on_files[str(counter_obj.user_id)] += 1 # counter_obj_creator['file']['comments_by_others_on_res'][str(counter_obj.user_id)] += 1 # else: # # counter_obj_creator.comments_by_others_on_files.update({str(counter_obj.user_id):1}) # counter_obj_creator.file['comments_by_others_on_res'].update({str(counter_obj.user_id):1}) # counter_obj.last_update = datetime.now() # counter_obj_creator.last_update = datetime.now() # counter_obj.save() # counter_obj_creator.save() # else : # note_creator_id = gs_type_node.created_by # if note_creator_id != request.user.id : # counter_obj = Counter.get_counter_obj(request.user.id, ObjectId(group_id)) # # counter_obj.no_comments_by_user += 1 # counter_obj['total_comments_by_user'] += 1 # # counter_obj.no_comments_on_others_notes += 1 # counter_obj['page']['blog']['commented_on_others_res'] += 1 # counter_obj['group_points'] += GSTUDIO_COMMENT_POINTS # counter_obj_creator = Counter.get_counter_obj(note_creator_id, ObjectId(group_id)) # # counter_obj_creator.no_comments_for_user += 1 # # counter_obj_creator.no_comments_received_on_notes += 1 # counter_obj_creator['page']['blog']['comments_gained'] += 1 # # if str(counter_obj.user_id) in counter_obj_creator.comments_by_others_on_notes.keys(): # if str(counter_obj.user_id) in counter_obj_creator.page.blog['comments_by_others_on_res'].keys(): # # counter_obj_creator.comments_by_others_on_notes[str(counter_obj.user_id)] += 1 # counter_obj_creator['page']['blog']['comments_by_others_on_res'][str(counter_obj.user_id)] += 1 # else: # counter_obj_creator.page.blog['comments_by_others_on_res'].update({str(counter_obj.user_id):1}) # counter_obj.last_update = datetime.now() # counter_obj_creator.last_update = datetime.now() # counter_obj.save() # counter_obj_creator.save() formated_time = reply_obj.created_at.strftime("%B %d, %Y, %I:%M %p") files = [] for each_coll_item in reply_obj.collection_set: temp_list = [] temp = node_collection.one({'_id': ObjectId(each_coll_item)}, {'mime_type': 1, 'name': 1}) temp_list.append(str(temp['_id'])) temp_list.append(str(temp['mime_type'])) temp_list.append(str(temp['name'])) files.append(temp_list) # print files user_names = reply_obj.user_details_dict["contributors"] is_grp_admin = False if request.user.id in group_object.group_admin: is_grp_admin = True # ["status_info", "reply_id", "prior_node", "html_content", "org_content", "user_id", "user_name", "created_at" ] reply = json.dumps( [ "reply_saved", str(reply_obj._id), str(reply_obj.prior_node[0]), reply_obj.content, reply_obj.content_org, user_id, user_names, formated_time, files,is_grp_admin], cls=DjangoJSONEncoder ) # print "===========", reply # ---------- mail/notification sending ------- try: node_creator_user_obj = User.objects.get(id=node.created_by) node_creator_user_name = node_creator_user_obj.username if int(request.user.id) not in node.author_set: node.author_set.append(int(request.user.id)) node.save() site = Site.objects.get(pk=1) site = site.name.__str__() from_user = user_name to_user_list = [node_creator_user_obj] msg = "\n\nDear " + node_creator_user_name + ",\n\n" + \ "A reply has been added in discussion under the " + \ node.member_of_names_list[0] + " named: '" + \ node.name + "' by '" + user_name + "'." activity = "Discussion Reply" render_label = render_to_string( "notification/label.html", { # "sender": from_user, "activity": activity, "conjunction": "-", "link": "url_link" } ) notification.create_notice_type(render_label, msg, "notification") notification.send(to_user_list, render_label, {"from_user": from_user}) except Exception as notification_err: print "\n Unable to send notification", notification_err # ---------- END of mail/notification sending --------- return HttpResponse( reply ) else: # no reply content return HttpResponse(json.dumps(["no_content"])) except Exception as e: error_message = "\n DiscussionReplyCreateError: " + str(e) + "\n" raise Exception(error_message) return HttpResponse(json.dumps(["Server Error"]))
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_EDUCATIONAL_LEVEL from gnowsys_ndf.settings import GSTUDIO_RESOURCES_CURRICULAR from gnowsys_ndf.settings import GSTUDIO_RESOURCES_AUDIENCE from gnowsys_ndf.settings import GSTUDIO_RESOURCES_TEXT_COMPLEXITY from gnowsys_ndf.settings import GSTUDIO_RESOURCES_LANGUAGES GST_FILE = node_collection.one({'_type': 'GSystemType', 'name': u'File'}) pandora_video_st = node_collection.one( {'$and': [{ '_type': 'GSystemType' }, { 'name': 'Pandora_video' }]}) file_id = node_collection.find_one({ '_type': "GSystemType", "name": "File" }, {"_id": 1}) page_id = node_collection.find_one({ '_type': "GSystemType", "name": "Page" }, {"_id": 1}) theme_gst_id = node_collection.find_one( { '_type': "GSystemType", "name": "Theme" }, {"_id": 1}) group_gst_id = node_collection.find_one( { '_type': "GSystemType", "name": "Group" }, {"_id": 1})
try: from bson import ObjectId except ImportError: # old pymongo from pymongo.objectid import ObjectId from gnowsys_ndf.ndf.models import File ''' -- imports from application folders/files -- ''' from gnowsys_ndf.settings import META_TYPE, GAPPS, MEDIA_ROOT from gnowsys_ndf.ndf.models import node_collection from gnowsys_ndf.ndf.views.methods import get_node_common_fields, create_grelation_list, get_execution_time, delete_grelation, create_grelation from gnowsys_ndf.ndf.views.methods import get_node_metadata, node_thread_access, create_thread_for_node from gnowsys_ndf.ndf.management.commands.data_entry import create_gattribute from gnowsys_ndf.ndf.templatetags.ndf_tags import get_relation_value, get_file_obj from gnowsys_ndf.ndf.views.methods import get_node_metadata, get_node_common_fields, create_gattribute, get_page, get_execution_time, set_all_urls, get_group_name_id gapp_mt = node_collection.one({'_type': "MetaType", 'name': META_TYPE[0]}) GST_IMAGE = node_collection.one({'member_of': gapp_mt._id, 'name': GAPPS[3]}) image_ins = node_collection.find_one({'_type': "GSystemType", "name": "Image"}) file_gst = node_collection.find_one({"_type": "GSystemType", "name": "File"}) @get_execution_time def imageDashboard(request, group_id, image_id=None, page_no=1): from gnowsys_ndf.settings import GSTUDIO_NO_OF_OBJS_PP ''' fetching image acording to group name ''' # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id)
def check_filter(request, group_name, choice=1, status='New', each_page=1): at_list = [ "Status", "start_time", "Priority", "end_time", "Assignee", "Estimated_time" ] blank_dict = {} history = [] subtask = [] group_name = group_name # ins_objectid = ObjectId() task = [] # if ins_objectid.is_valid(group_name) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_name}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_name) except: group_name, group_id = get_group_name_id(group_name) #section to get the Tasks group = node_collection.find_one({'_id': ObjectId(group_id)}) GST_TASK = node_collection.one({'_type': "GSystemType", 'name': 'Task'}) attributetype_key1 = node_collection.find_one({ "_type": 'AttributeType', 'name': 'Assignee' }) Completed_Status_List = ['Resolved', 'Closed'] title = "Task" TASK_inst = node_collection.find({'member_of': {'$all': [GST_TASK._id]}}) task_list = [] message = "" send = "This group doesn't have any files" #Task Completed sub_task_name = [] for each in TASK_inst: if (each.collection_set): sub_task_name.append(each.name) TASK_inst.rewind() #every one see only task created by them and assigned to them #only group owner can see all the task for each in TASK_inst: attr_value = {} for attrvalue in at_list: attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': attrvalue }) attr = triple_collection.find_one({ "_type": "GAttribute", "subject": each._id, "attribute_type": attributetype_key._id }) attr1 = triple_collection.find_one({ "_type": "GAttribute", "subject": each._id, "attribute_type": attributetype_key1._id, "object_value": request.user.username }) if attr: if attrvalue == "Assignee": uname_list = [] for uid in attr.object_value: u = User.objects.get(id=int(uid)) if u: if u.username not in uname_list: uname_list.append(u.username) attr_value.update({attrvalue: uname_list}) else: attr_value.update({attrvalue: attr.object_value}) else: attr_value.update({attrvalue: None}) attr_value.update({'id': each._id}) if each.created_by == request.user.id: attr_value.update({'owner': 'owner'}) else: attr_value.update({'owner': 'assignee'}) attr_value.update({'Name': each.name}) attr_value.update({'collection': each.collection_set}) if attr1 or each.created_by == request.user.id or group.created_by == request.user.id: if ((each.name in sub_task_name and (not each.collection_set) == False) or each.name not in sub_task_name or attr1): if int(choice) == int(1): task_list.append(dict(attr_value)) if int(choice) == int(2): message = "No Completed Task" if attr_value['Status'] in Completed_Status_List: task_list.append(dict(attr_value)) if int(choice) == int(3): message = "No Task Created" auth1 = node_collection.one({ '_type': 'Author', 'created_by': each.created_by }) if auth1: if request.user.username == auth1.name: task_list.append(dict(attr_value)) if int(choice) == int(4): message = "Nothing Assigned" attr1 = triple_collection.find_one({ "_type": "GAttribute", "subject": each._id, "attribute_type": attributetype_key1._id, "object_value": request.user.id }) if attr1: task_list.append(dict(attr_value)) if int(choice) == int(5): message = "No Pending Task" if attr_value['Status'] not in Completed_Status_List: if attr_value['Status'] != 'Rejected': if attr_value['end_time'] != "--": # if (attr_value['end_time'] > unicode(datetime.date.today())) is False: if (attr_value['end_time'] > datetime.datetime.today()) is False: task_list.append(dict(attr_value)) else: task_list.append(dict(attr_value)) if int(choice) == int(6): message = "No" + " " + status + " " + "Task" if attr_value['Status'] == status: task_list.append(dict(attr_value)) paged_resources = Paginator(task_list, 10) files_list = [] for each_resource in (paged_resources.page(each_page)).object_list: files_list.append(each_resource) count_list = [] #count_list.append(TASK_inst.count()) TASK_inst.rewind() count = len(task_list) template = "ndf/task_list_view.html" variable = RequestContext( request, { 'TASK_inst': files_list, 'group_name': group_name, 'appId': app._id, 'group_id': group_id, 'groupid': group_id, 'send': message, 'count': count, 'TASK_obj': TASK_inst, "page_info": paged_resources, 'page_no': each_page, 'choice': choice, 'status': status }) return render_to_response(template, variable)
def person_create_edit(request, group_id, app_id, app_set_id=None, app_set_instance_id=None, app_name=None): """ Creates/Modifies document of given person-type. """ auth = None if ObjectId.is_valid(group_id) is False: group_ins = node_collection.one({'_type': "Group", "name": group_id}) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) else: pass app = None if app_id is None: app = node_collection.one({'_type': "GSystemType", 'name': app_name}) if app: app_id = str(app._id) else: app = node_collection.one({'_id': ObjectId(app_id)}) app_name = app.name # app_name = "mis" app_set = "" app_collection_set = [] title = "" person_gst = None person_gs = None college_node = None college_id = None student_enrollment_code = u"" create_student_enrollment_code = False existing_colg = [] registration_date = None property_order_list = [] template = "" template_prefix = "mis" if request.user: if auth is None: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) agency_type = auth.agency_type agency_type_node = node_collection.one( { '_type': "GSystemType", 'name': agency_type }, {'collection_set': 1}) if agency_type_node: for eachset in agency_type_node.collection_set: app_collection_set.append( node_collection.one({"_id": eachset}, { '_id': 1, 'name': 1, 'type_of': 1 })) # for eachset in app.collection_set: # app_collection_set.append(node_collection.one({"_id":eachset}, {'_id': 1, 'name': 1, 'type_of': 1})) college_node = node_collection.one( { "_id": ObjectId(group_id), "relation_set.group_of": { "$exists": True } }, {"relation_set.group_of": 1}) if app_set_id: person_gst = node_collection.one( { '_type': "GSystemType", '_id': ObjectId(app_set_id) }, { 'name': 1, 'type_of': 1 }) template = "ndf/" + person_gst.name.strip().lower().replace( ' ', '_') + "_create_edit.html" title = person_gst.name person_gs = node_collection.collection.GSystem() person_gs.member_of.append(person_gst._id) if app_set_instance_id: person_gs = node_collection.one({ '_type': "GSystem", '_id': ObjectId(app_set_instance_id) }) property_order_list = get_property_order_with_value( person_gs) #.property_order if request.method == "POST": if person_gst.name == "Student" and "_id" not in person_gs: create_student_enrollment_code = True # [A] Save person-node's base-field(s) is_changed = get_node_common_fields(request, person_gs, group_id, person_gst) if is_changed: # Remove this when publish button is setup on interface person_gs.status = u"PUBLISHED" person_gs.save(is_changed=is_changed, groupid=group_id) for each_rel in person_gs.relation_set: if each_rel and "officer_incharge_of" in each_rel: existing_colg = each_rel["officer_incharge_of"] if college_node: mis_admin = node_collection.one( { "_type": "Group", "name": "MIS_admin" }, {"_id": 1}) node_collection.collection.update( {"_id": person_gs._id}, {"$addToSet": { "group_set": mis_admin._id }}, upsert=False, multi=False) # [B] Store AT and/or RT field(s) of given person-node (i.e., person_gs) for tab_details in property_order_list: for field_set in tab_details[1]: # Fetch only Attribute field(s) / Relation field(s) if '_id' in field_set: field_instance = node_collection.one( {'_id': field_set['_id']}) fi_name = field_instance["name"] field_instance_type = type(field_instance) if field_instance_type in [AttributeType, RelationType]: field_data_type = field_set['data_type'] # Fetch field's value depending upon AT/RT and Parse fetched-value depending upon that field's data-type if field_instance_type == AttributeType: if "File" in field_instance["validators"]: # Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used user_id = request.user.id if fi_name in request.FILES: field_value = request.FILES[fi_name] else: field_value = "" # Below 0th index is used because that function returns tuple(ObjectId, bool-value) if field_value != '' and field_value != u'': file_name = person_gs.name + " -- " + field_instance[ "altnames"] content_org = "" tags = "" field_value = save_file( field_value, file_name, user_id, group_id, content_org, tags, access_policy="PRIVATE", count=0, first_object="", oid=True)[0] else: # Other AttributeTypes if fi_name in request.POST: field_value = request.POST[fi_name] # field_instance_type = "GAttribute" if fi_name in [ "12_passing_year", "degree_passing_year" ]: #, "registration_year"]: field_value = parse_template_data( field_data_type, field_value, date_format_string="%Y") elif fi_name in ["dob", "registration_date"]: field_value = parse_template_data( field_data_type, field_value, date_format_string="%d/%m/%Y") registration_date = field_value else: field_value = parse_template_data( field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M") if field_value: person_gs_triple_instance = create_gattribute( person_gs._id, node_collection.collection.AttributeType( field_instance), field_value) else: if field_instance["object_cardinality"] > 1: field_value_list = request.POST.get( fi_name, "") if "[" in field_value_list and "]" in field_value_list: field_value_list = json.loads( field_value_list) else: field_value_list = request.POST.getlist( fi_name) else: field_value_list = request.POST.getlist( fi_name) if META_TYPE[ 3] in field_instance.member_of_names_list: # If Binary relationship found # [id, id, ...] # field_instance_type = "GRelation" for i, field_value in enumerate( field_value_list): field_value = parse_template_data( field_data_type, field_value, field_instance=field_instance, date_format_string="%m/%d/%Y %H:%M") field_value_list[i] = field_value else: # Relationship Other than Binary one found; e.g, Triadic # [[id, id, ...], [id, id, ...], ...] # field_instance_type = "GRelation" for i, field_value_inner_list in enumerate( field_value_list): for j, field_value in enumerate( field_value_inner_list): field_value = parse_template_data( field_data_type, field_value, field_instance=field_instance, date_format_string="%m/%d/%Y %H:%M" ) field_value_list[i][j] = field_value person_gs_triple_instance = create_grelation( person_gs._id, node_collection.collection.RelationType( field_instance), field_value_list) # Setting enrollment code for student node only while creating it if create_student_enrollment_code: # Create enrollment code for student node only while registering a new node for rel in college_node.relation_set: if rel and "group_of" in rel: college_id = rel["group_of"][0] student_enrollment_code = get_student_enrollment_code( college_id, person_gs._id, registration_date, ObjectId(group_id)) enrollment_code_at = node_collection.one({ "_type": "AttributeType", "name": "enrollment_code" }) try: ga_node = create_gattribute(person_gs._id, enrollment_code_at, student_enrollment_code) except Exception as e: print "\n StudentEnrollmentCreateError: " + str(e) + "!!!" # [C] Code to link GSystem Node and Author node via "has_login" relationship; # and Subscribe the Author node to College group if user "Program Officer" person_gs.reload() auth_node = None for attr in person_gs.attribute_set: if "email_id" in attr: if attr["email_id"]: auth_node = node_collection.one({ '_type': "Author", 'email': attr["email_id"].lower() }) break if auth_node: has_login_rt = node_collection.one({ '_type': "RelationType", 'name': "has_login" }) if has_login_rt: # Linking GSystem Node and Author node via "has_login" relationship; gr_node = create_grelation(person_gs._id, has_login_rt, auth_node._id) # Set author_agency field's value of author node as "Program Officer" # Required to identify at time of log-in in order to display # required modules defined for Program Officers under MIS GApp if auth_node.agency_type != u"Program Officer": auth_node.agency_type = u"Program Officer" auth_node.save(groupid=group_id) if "Program Officer" in person_gs.member_of_names_list: # If Person node (GSystem) is of Program Officer type # then only go for subscription college_id_list = [] # Fetch College's ObjectId to which Program Officer is assigned (via "officer_incharge_of") for rel in person_gs.relation_set: if "officer_incharge_of" in rel: if rel["officer_incharge_of"]: for college_id in rel["officer_incharge_of"]: if college_id not in college_id_list: college_id_list.append(college_id) break # break outer-loop (of relation_set) if college_id_list: # If College's ObjectId exists (list as PO might be assigned to more than one college) # Then prepare a list of their corresponding private group(s) (via "has_group") college_cur = node_collection.find( {'_id': { '$in': college_id_list }}, {'relation_set.has_group': 1}) college_group_id_list = [] for college in college_cur: for rel in college.relation_set: if rel and "has_group" in rel: if rel["has_group"]: if rel["has_group"][ 0] not in college_group_id_list: college_group_id_list.append( rel["has_group"][0]) break # break inner-loop (college.relation_set) if college_group_id_list: # If college-group list exists # Then update their group_admin field (append PO's created_by) res = node_collection.collection.update( {'_id': { '$in': college_group_id_list }}, { '$addToSet': { 'group_admin': auth_node.created_by } }, upsert=False, multi=True) old_college_id_list = [] if existing_colg: if len(existing_colg) > len(college_id_list): for each_colg_id in existing_colg: if each_colg_id not in college_id_list: old_college_id_list.append(each_colg_id) old_college_cur = node_collection.find( {'_id': { '$in': old_college_id_list }}, {'relation_set.has_group': 1}) old_college_group_id_list = [] for college in old_college_cur: for rel in college.relation_set: if rel and "has_group" in rel: if rel["has_group"]: if rel["has_group"][ 0] not in old_college_group_id_list: old_college_group_id_list.append( rel["has_group"][0]) break # break inner-loop (college.relation_set) if old_college_group_id_list: # If college-group list exists # Then update their group_admin field (remove PO's created_by) res = node_collection.collection.update( { '_id': { '$in': old_college_group_id_list }, '$or': [{ 'group_admin': auth_node.created_by }, { 'author_set': auth_node.created_by }] }, { '$pull': { 'group_admin': auth_node.created_by, 'author_set': auth_node.created_by } }, upsert=False, multi=True) # The code below is commented as the college groups are PRIVATE. # for rel in person_gs.relation_set: # if rel and "officer_incharge_of" in rel: # pass # else: # node_collection.collection.update({'_id': auth_node._id}, # {'$set': {'agency_type': u"Other"}}, # upsert=False, multi=False # ) # Its purpose is to change the agency type back to Other auth_node.reload() # if person_gst.name != "Student": # return HttpResponseRedirect(reverse(app_name.lower()+":"+template_prefix+'_app_detail', kwargs={'group_id': group_id, "app_id":app_id, "app_set_id":app_set_id})) # else: return HttpResponseRedirect( reverse('mis:mis_app_instance_detail', kwargs={ 'group_id': group_id, "app_id": app_id, "app_set_id": app_set_id, "app_set_instance_id": unicode(person_gs._id) })) # return HttpResponseRedirect(reverse(app_name.lower()+":"+template_prefix+'_app_detail', kwargs={'group_id': group_id, "app_id":app_id, "app_set_id":app_set_id})) default_template = "ndf/person_create_edit.html" # default_template = "ndf/"+template_prefix+"_create_edit.html" context_variables = { 'groupid': group_id, 'group_id': group_id, 'app_id': app_id, 'app_name': app_name, 'app_collection_set': app_collection_set, 'app_set_id': app_set_id, 'title': title, 'property_order_list': property_order_list } if person_gst and person_gst.name in [ "Voluntary Teacher", "Master Trainer" ]: nussd_course_type = node_collection.one( { '_type': "AttributeType", 'name': "nussd_course_type" }, { '_type': 1, '_id': 1, 'data_type': 1, 'complex_data_type': 1, 'name': 1, 'altnames': 1 }) if nussd_course_type["data_type"] == "IS()": # Below code does little formatting, for example: # data_type: "IS()" complex_value: [u"ab", u"cd"] dt: # "IS(u'ab', u'cd')" dt = "IS(" for v in nussd_course_type.complex_data_type: dt = dt + "u'" + v + "'" + ", " dt = dt[:(dt.rfind(", "))] + ")" nussd_course_type["data_type"] = dt nussd_course_type["data_type"] = eval(nussd_course_type["data_type"]) nussd_course_type["value"] = None context_variables['nussd_course_type'] = nussd_course_type if app_set_instance_id: person_gs.get_neighbourhood(person_gs.member_of) if "trainer_teaches_course_in_college" in person_gs: l = [] for each_course_college in person_gs.trainer_teaches_course_in_college: # Fetch Course Type (i.e. nussd_course_type) ct = "" for each_attr in each_course_college[0].attribute_set: if "nussd_course_type" in each_attr and each_attr: ct = each_attr["nussd_course_type"] break univ_name = "" for each_rel in each_course_college[1].relation_set: if "college_affiliated_to" in each_rel and each_rel: univ = node_collection.find_one({ "_id": { "$in": each_rel["college_affiliated_to"] } }) univ_name = univ.name if univ else "" l.append((ct, each_course_college[0].name, each_course_college[1].name, each_course_college[0]._id.__str__(), each_course_college[1]._id.__str__(), univ_name)) if l: person_gs.trainer_teaches_course_in_college = l context_variables['node'] = person_gs try: return render_to_response([template, default_template], context_variables, context_instance=RequestContext(request)) except TemplateDoesNotExist as tde: error_message = "\n PersonCreateEditViewError: This html template (" + str( tde) + ") does not exists !!!\n" raise Http404(error_message) except Exception as e: error_message = "\n PersonCreateEditViewError: " + str(e) + " !!!\n" raise Exception(error_message)
def create_edit_task(request, group_name, task_id=None, task=None, count=0): """Creates/Modifies details about the given Task. """ edit_task_node = "" parent_task_check = "" userlist = [] if ObjectId.is_valid(group_name) is False: group_ins = node_collection.find_one({ '_type': "Group", "name": group_name }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) elif ObjectId.is_valid(group_name) is True: group_ins = node_collection.find_one({ '_type': "Group", "_id": ObjectId(group_name) }) auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else: auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: group_id = str(auth._id) blank_dict = {} collection_set_ids = [] userlist = [] at_list = [ "Status", "start_time", "Priority", "end_time", "Assignee", "Estimated_time", "Upload_Task" ] # fields rt_list = ["has_type"] if request.method == "POST": # create or edit if not task_id: # create task_type = request.POST.get("assignees", "") Assignees = request.POST.get("Assignee", "").split(',') Assignees = [int(x) for x in Assignees] if task_type != "Group Assignees": for i in Assignees: if i: task_node = create_task(request, task_id, group_id) create_task_at_rt(request, rt_list, at_list, task_node, i, group_name, group_id) collection_set_ids.append(ObjectId(task_node._id)) if len(Assignees) > 1: task_node = create_task(request, task_id, group_id) task_node.collection_set = collection_set_ids task_node.save(groupid=group_id) create_task_at_rt(request, rt_list, at_list, task_node, request.user.id, group_name, group_id) else: task_node = create_task(request, task_id, group_id) create_task_at_rt(request, rt_list, at_list, task_node, Assignees, group_name, group_id) else: #update task_node = node_collection.one({ '_type': u'GSystem', '_id': ObjectId(task_id) }) update(request, rt_list, at_list, task_node, group_id, group_name) return HttpResponseRedirect( reverse('task_details', kwargs={ 'group_name': group_name, 'task_id': str(task_node._id) })) # Filling blank_dict in below if block if task_id: task_node = node_collection.one({ '_type': u'GSystem', '_id': ObjectId(task_id) }) for each in at_list: attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': each }) attr = triple_collection.find_one({ "_type": "GAttribute", "subject": task_node._id, "attribute_type": attributetype_key._id }) if attr: if each == "Upload_Task": file_list = [] new_list = [] files = str(attr.object_value).split(',') for i in files: files_name = str(i.strip(' [](\'u\' ')) new_list.append(files_name) ins_objectid = ObjectId() for i in new_list: if ins_objectid.is_valid(i) is False: filedoc = node_collection.find({ '_type': 'File', 'name': unicode(i) }) else: filedoc = node_collection.find({ '_type': 'File', '_id': ObjectId(i) }) if filedoc: for i in filedoc: file_list.append(i.name) blank_dict[each] = json.dumps(file_list) blank_dict['select'] = json.dumps(new_list) else: blank_dict[each] = attr.object_value if task_node.prior_node: pri_node = node_collection.one({'_id': task_node.prior_node[0]}) blank_dict['parent'] = pri_node.name blank_dict['parent_id'] = str(pri_node._id) # Appending TaskType to blank_dict, i.e. "has_type" relationship if task_node.relation_set: for rel in task_node.relation_set: for k in rel: blank_dict[k] = rel[k] blank_dict["node"] = task_node Assignee = "" for i in blank_dict["Assignee"]: Assignee_name = (User.objects.get(id=int(i))) Assignee = Assignee_name.username + "," + Assignee blank_dict["Assignee_name"] = Assignee # Appending Watchers to blank_dict, i.e. values of node's author_set field if task_node.author_set: watchers_list = [] for eachid in task_node.author_set: if eachid not in watchers_list: watchers_list.append(eachid) blank_dict["Watchers"] = watchers_list # Fetch Task Type list values glist = node_collection.one({ '_type': "GSystemType", 'name': "GList" }, {'name': 1}) task_type_node = node_collection.one( { '_type': "GSystem", 'member_of': glist._id, 'name': "TaskType" }, {'collection_set': 1}) task_type_list = [] for task_type_id in task_type_node.collection_set: task_type = node_collection.one({'_id': task_type_id}, {'name': 1}) if task_type: if task_type not in task_type_list: task_type_list.append(task_type) var = { 'title': 'Task', 'task_type_choices': task_type_list, 'group_id': group_id, 'groupid': group_id, 'group_name': group_name, 'appId': app._id, # 'node': task_node, 'task_id': task_id 'task_id': task_id } var.update(blank_dict) context_variables = var return render_to_response("ndf/task_create_edit.html", context_variables, context_instance=RequestContext(request))
try: from bson import ObjectId except ImportError: # old pymongo from pymongo.objectid import ObjectId # from gnowsys_ndf.ndf.models import File ''' -- imports from application folders/files -- ''' from gnowsys_ndf.settings import META_TYPE, GAPPS # , MEDIA_ROOT from gnowsys_ndf.ndf.models import node_collection # , triple_collection from gnowsys_ndf.ndf.views.methods import get_node_common_fields, get_node_metadata, create_thread_for_node from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, delete_grelation from gnowsys_ndf.ndf.templatetags.ndf_tags import get_relation_value from gnowsys_ndf.ndf.views.methods import get_execution_time, create_grelation_list, node_thread_access, get_group_name_id gapp_mt = node_collection.one({'_type': "MetaType", 'name': META_TYPE[0]}) GST_VIDEO = node_collection.one({'member_of': gapp_mt._id, 'name': GAPPS[4]}) file_gst = node_collection.find_one( { "_type" : "GSystemType","name":"File" } ) @get_execution_time def videoDashboard(request, group_id): # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass
def update(request, rt_list, at_list, task_node, group_id, group_name): file_id = (request.POST.get("files")) file_name = (request.POST.get("files_name")) user_to_be_notified = [] assignee_list = [] change_list = [] for each in rt_list: rel_type_node = node_collection.one({ '_type': "RelationType", 'name': each }) field_value_list = None if rel_type_node["object_cardinality"] > 1: field_value_list = request.POST.get(rel_type_node["name"], "") if "[" in field_value_list and "]" in field_value_list: field_value_list = json.loads(field_value_list) else: field_value_list = request.POST.getlist(rel_type_node["name"]) else: field_value_list = request.POST.getlist(rel_type_node["name"]) for i, field_value in enumerate(field_value_list): field_value = parse_template_data(rel_type_node.object_type, field_value, field_instance=rel_type_node) field_value_list[i] = field_value old_value = [] for rel in task_node.relation_set: for k in rel: if rel_type_node.name == k: vals_cur = node_collection.find({'_id': { '$in': rel[k] }}, {'name': 1}) for v_node in vals_cur: old_value.append(v_node.name) break new_value = [] vals_cur = node_collection.find({'_id': { '$in': field_value_list }}, {'name': 1}) for v_node in vals_cur: new_value.append(v_node.name) break if old_value != new_value: change_list.append( each.encode('utf8') + ' changed from ' + ", ".join(old_value) + ' to ' + ", ".join(new_value)) # updated details task_gs_triple_instance = create_grelation( task_node._id, node_collection.collection.RelationType(rel_type_node), field_value_list) task_node.reload() for each in at_list: if request.POST.get(each, ""): attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': each }) attr = triple_collection.find_one({ "_type": "GAttribute", "subject": task_node._id, "attribute_type": attributetype_key._id }) if each == "Assignee": field_value = request.POST.getlist(each, "") for i, val in enumerate(field_value): field_value[i] = int(val) assignee_list_id = field_value for eachuser in assignee_list_id: bx = User.objects.get(id=int(eachuser)) if bx: if bx.username not in assignee_list: assignee_list.append(bx.username) # Adding to list which holds user's to be notified about the task if bx not in user_to_be_notified: user_to_be_notified.append(bx) else: field_value = request.POST.get(each, "") date_format_string = "" if each in ["start_time", "end_time"]: date_format_string = "%d/%m/%Y" field_value = parse_template_data( eval(attributetype_key["data_type"]), field_value, date_format_string=date_format_string) if attr: # already attribute exist if not attr.object_value == field_value: # change_list.append(each.encode('utf8')+' changed from '+attr.object_value.encode('utf8')+' to '+request.POST.get(each,"").encode('utf8')) # updated details if attributetype_key["data_type"] == "datetime.datetime": change_list.append( each.encode('utf8') + ' changed from ' + attr.object_value.strftime("%d/%m/%Y") + ' to ' + field_value.strftime("%d/%m/%Y") ) # updated details else: change_list.append( each.encode('utf8') + ' changed from ' + str(attr.object_value) + ' to ' + str(field_value)) # updated details attr.object_value = field_value attr.save(groupid=group_id) else: # attributetype_key = node_collection.find_one({"_type":'AttributeType', 'name':each}) # newattribute = triple_collection.collection.GAttribute() # newattribute.subject = task_node._id # newattribute.attribute_type = attributetype_key # newattribute.object_value = request.POST.get(each,"") # newattribute.object_value = field_value # newattribute.save() ga_node = create_gattribute(task_node._id, attributetype_key, field_value) # change_list.append(each.encode('utf8')+' set to '+request.POST.get(each,"").encode('utf8')) # updated details change_list.append( each.encode('utf8') + ' set to ' + str(field_value)) # updated details elif each == 'Upload_Task': attributetype_key = node_collection.find_one({ "_type": 'AttributeType', 'name': 'Upload_Task' }) attr = triple_collection.find_one({ "_type": "GAttribute", "subject": task_node._id, "attribute_type": attributetype_key._id }) if attr: value = get_file_node(attr.object_value) change_list.append( each.encode('utf8') + ' changed from ' + str(value).strip('[]') + ' to ' + str(file_name)) # attr.object_value=file_id # attr.save() ga_node = create_gattribute(attr.subject, attributetype_key, file_id) else: # newattribute = node_collection.collection.GAttribute() # newattribute.subject = task_node._id # newattribute.attribute_type = attributetype_key # newattribute.object_value = file_id # newattribute.save() ga_node = create_gattribute(task_node._id, attributetype_key, file_id) change_list.append( each.encode('utf8') + ' set to ' + file_name.encode('utf8')) # updated details # userobj = User.objects.get(id=task_node.created_by) # if userobj and userobj not in user_to_be_notified: # user_to_be_notified.append(userobj) for each_author in task_node.author_set: each_author = User.objects.get(id=each_author) if each_author and each_author not in user_to_be_notified: user_to_be_notified.append(each_author) # Sending notification to all watchers about the updates of the task for eachuser in user_to_be_notified: activ = "task updated" msg = "Task '" + task_node.name + \ "' has been updated by " + request.user.username + \ "\n - Changes: " + str(change_list).strip('[]') + \ "\n - Status: " + request.POST.get('Status','') + \ "\n - Assignee: " + ", ".join(assignee_list) + \ "\n - Url: http://" + site_domain + "/" + group_name.replace(" ","%20").encode('utf8') + "/task/" + str(task_node._id) bx = User.objects.get(username=eachuser) set_notif_val(request, group_id, msg, activ, bx) if change_list or content_org: GST_task_update_history = node_collection.one({ '_type': "GSystemType", 'name': 'task_update_history' }) update_node = node_collection.collection.GSystem() get_node_common_fields(request, update_node, group_id, GST_task_update_history) if change_list: update_node.altnames = unicode(str(change_list)) else: update_node.altnames = unicode('[]') update_node.prior_node = [task_node._id] update_node.name = unicode(task_node.name + "-update_history") update_node.save(groupid=group_id) update_node.name = unicode(task_node.name + "-update_history-" + str(update_node._id)) update_node.save(groupid=group_id) task_node.post_node.append(update_node._id) task_node.save(groupid=group_id) # patch GST_TASK = node_collection.one({ '_type': "GSystemType", 'name': 'Task' }) get_node_common_fields(request, task_node, group_id, GST_TASK) task_node.save(groupid=group_id)