def delete_task(request, group_name, _id): """This method will delete task object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_name) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_name}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_name) except: group_name, group_id = get_group_name_id(group_name) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({ '_type': 'GAttribute', 'subject': node._id }) relations = triple_collection.find({ '_type': 'GRelation', 'subject': node._id }) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() if len(node.post_node) > 0: for each in node.post_node: sys_each_postnode = node_collection.find_one({'_id': each}) member_of_name = node_collection.find_one({ '_id': sys_each_postnode.member_of[0] }).name if member_of_name == "Task": sys_each_postnode.prior_node.remove(node._id) sys_each_postnode.save(groupid=group_id) if member_of_name == "task_update_history": sys_each_postnode.delete(groupid=group_id) node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect( reverse('task', kwargs={'group_name': group_name}))
def delete_batch(request, group_id, _id): group_name, group_id = get_group_name_id(group_id) # if ObjectId.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass node = node_collection.one({'_id': ObjectId(_id)}) left_relations = triple_collection.find({ "_type": "GRelation", "subject": node._id }) right_relations = triple_collection.find({ "_type": "GRelation", "right_subject": node._id }) attributes = triple_collection.find({ "_type": "GAttribute", "subject": node._id }) for eachobject in right_relations: # If given node is used in relationship with any other node (as right_subject) # Then this node's ObjectId must be removed from relation_set field of other node node_collection.collection.update( { '_id': eachobject.subject, 'relation_set.' + eachobject.relation_type.name: { '$exists': True } }, { '$pull': { 'relation_set.$.' + eachobject.relation_type.name: node._id } }, upsert=False, multi=False) eachobject.delete() all_associates = list(left_relations) + list(attributes) # Deleting GAttributes and GRelations where given node is used as left subject for eachobject in all_associates: eachobject.delete() # Finally deleting given node node.delete() return HttpResponseRedirect(reverse('batch', kwargs={'group_id': group_id}))
def delete_task(request, group_name, _id): """This method will delete task object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_name) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_name}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_name) except: group_name, group_id = get_group_name_id(group_name) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({'_type': 'GAttribute', 'subject': node._id}) relations = triple_collection.find({'_type': 'GRelation', 'subject': node._id}) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() if len(node.post_node) > 0 : for each in node.post_node : sys_each_postnode = node_collection.find_one({'_id': each}) member_of_name = node_collection.find_one({'_id': sys_each_postnode.member_of[0]}).name if member_of_name == "Task" : sys_each_postnode.prior_node.remove(node._id) sys_each_postnode.save(groupid=group_id) if member_of_name == "task_update_history": sys_each_postnode.delete(groupid=group_id) node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(reverse('task', kwargs={'group_name': group_name }))
def get_triple_data(node_id): ''' Gets all data stored in triples for this node. Fetches GAttrtibutes as wells as GRelations. ''' triple_query = {"_type": {'$in': ["GAttribute", "GRelation"]}, "subject": node_id} node_gattr_grel_cur = triple_collection.find(triple_query) fetch_value = None if node_gattr_grel_cur: for each_triple_node in node_gattr_grel_cur: dump_node(node=each_triple_node, collection_name=triple_collection) # Get ObjectIds in object_value fields if each_triple_node._type is "GAttribute": fetch_value = "object_value" elif each_triple_node._type is "GRelation": fetch_value = "right_subject" if fetch_value: if type(each_triple_node[fetch_value]) == list and all(isinstance(each_obj_value, ObjectId) for each_obj_value in each_triple_node[fetch_value]): dump_node(node_id_list=each_triple_node[fetch_value], collection_name=node_collection) elif isinstance(each_triple_node[fetch_value], ObjectId): dump_node(node_id=each_triple_node[fetch_value], collection_name=node_collection)
def batch_detail(request, group_id): group_name, group_id = get_group_name_id(group_id) new_batch_node = None if request.is_ajax() and request.method == "GET": batch_id = request.GET.get("batch_id", '') student_coll = [] node = node_collection.one({'_id': ObjectId(batch_id)}) rt_has_batch_member = node_collection.one({ '_type': 'RelationType', 'name': 'has_batch_member' }) relation_coll = triple_collection.find({ '_type': 'GRelation', 'relation_type': rt_has_batch_member._id, 'subject': node._id, 'status': u'PUBLISHED' }) for each in relation_coll: n = node_collection.one({'_id': ObjectId(each.right_subject)}) student_coll.append(n) return HttpResponse(json.dumps(student_coll, cls=NodeJSONEncoder))
def detail(request, group_id, _id): student_coll = [] node = node_collection.one({'_id': ObjectId(_id)}) rt_has_batch_member = node_collection.one({ '_type': 'RelationType', 'name': 'has_batch_member' }) relation_coll = triple_collection.find({ '_type': 'GRelation', 'relation_type': rt_has_batch_member._id, 'subject': node._id, 'status': u'PUBLISHED' }) for each in relation_coll: n = node_collection.one({'_id': ObjectId(each.right_subject)}) student_coll.append(n) template = "ndf/batch_detail.html" variable = RequestContext( request, { 'node': node, 'node_name_human_readable': (node.name).replace('_', ' '), 'appId': app._id, 'groupid': group_id, 'group_id': group_id, 'title': GST_BATCH.name, 'student_coll': student_coll }) return render_to_response(template, variable)
def main(): columns = defaultdict(list) translation_dict={} get_translation_rt = node_collection.one({'_type': 'RelationType', 'name': u"translation_of"}) with open(schema_file_csv, 'rb') as f: reader = csv.reader(f) i = 1 reader.next() for row in reader: for (i,v) in enumerate(row): columns[i].append(v) translation_dict=dict(zip(columns[0],columns[1])) print translation_dict,"dict" for k, v in translation_dict.items(): app_items = node_collection.find({'name':k}) for each in list(app_items): get_node = node_collection.one({'_id': ObjectId(each._id), 'member_of': gapp._id}) if get_node: name=v.decode('utf-8') print get_node.name node_rt = triple_collection.find({'_type': "GRelation", 'subject': get_node._id, 'relation_type': get_translation_rt._id}) if node_rt.count() > 0: node = node_collection.one({'_id': ObjectId(node_rt[0].right_subject) }) else: node = None node_rt = None if node is None: node = node_collection.collection.GSystem() node.name = unicode(name) node.access_policy = u"PUBLIC" node.contributors.append(1) node.created_by = 1 #node.group_set.append(get_group._id) node.language = u"hi" node.member_of.append(gapp._id) node.modified_by = 1 node.status = u"DRAFT" node.save() print "\nTranslated Node ",node.name," created successfully\n" else: print "\nTranslated node ",node.name," already exists\n" if node_rt is None: relation_type = node_collection.one({'_type': 'RelationType', 'name':'translation_of'}) gr_node = create_grelation(each._id, relation_type, node._id) # grelation = triple_collection.collection.GRelation() # grelation.relation_type=relation_type # grelation.subject=each._id # grelation.right_subject=node._id # grelation.name=u"" # grelation.save() print "\nGRelation for node ",node.name," created sucessfully!!" else: print "\nGRelation for node ",node.name," already exists\n"
def delete_module(request, group_id, _id): """This method will delete module object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({ '_type': 'GAttribute', 'subject': node._id }) relations = triple_collection.find({ '_type': 'GRelation', 'subject': node._id }) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(pageurl)
def get_triple_data(node_id): ''' Gets all data stored in triples for this node. Fetches GAttrtibutes as wells as GRelations. ''' try: global log_file log_file.write("\n get_triple_data invoked for: " + str(node_id)) triple_query = { "_type": { '$in': ["GAttribute", "GRelation"] }, "subject": ObjectId(node_id) } node_gattr_grel_cur = triple_collection.find(triple_query) if node_gattr_grel_cur: for each_triple_node in node_gattr_grel_cur: fetch_value = None dump_node(node=each_triple_node, collection_name=triple_collection) # Get ObjectIds in object_value fields if each_triple_node._type == u"GAttribute": fetch_value = "object_value" elif each_triple_node._type == u"GRelation": fetch_value = "right_subject" log_file.write("\n fetch_value: " + str(fetch_value)) if fetch_value == "right_subject": log_file.write("\n Picking up right-subject nodes.\n\t " + str(each_triple_node[fetch_value])) if type(each_triple_node[fetch_value]) == list and all( isinstance(each_obj_value, ObjectId) for each_obj_value in each_triple_node[fetch_value]): log_file.write("\n List: " + str(True)) dump_node(node_id_list=each_triple_node[fetch_value], collection_name=node_collection) elif isinstance(each_triple_node[fetch_value], ObjectId): log_file.write("\n ObjectId: " + str(True)) dump_node(node_id=each_triple_node[fetch_value], collection_name=node_collection) log_file.write("\n get_triple_data finished for: " + str(node_id)) except Exception as get_triple_data_err: error_log = "\n !!! Error found while taking triple data in get_triple_data() ." error_log += "\nError: " + str(get_triple_data_err) print "\n Error: ", error_log log_file.write(str(error_log)) print error_log pass
def detail(request, group_id, _id): student_coll = [] node = node_collection.one({'_id':ObjectId(_id)}) rt_has_batch_member = node_collection.one({'_type':'RelationType','name':'has_batch_member'}) relation_coll = triple_collection.find({'_type':'GRelation','relation_type.$id':rt_has_batch_member._id,'subject':node._id,'status':u'PUBLISHED'}) for each in relation_coll: n = node_collection.one({'_id':ObjectId(each.right_subject)}) student_coll.append(n) template = "ndf/batch_detail.html" variable = RequestContext(request, {'node':node,'node_name_human_readable':(node.name).replace('_',' '), 'appId':app._id, 'groupid':group_id, 'group_id': group_id,'title':GST_BATCH.name, 'student_coll':student_coll}) return render_to_response(template, variable)
def delete_batch(request, group_id, _id): group_name, group_id = get_group_name_id(group_id) # if ObjectId.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass node = node_collection.one({'_id': ObjectId(_id)}) left_relations = triple_collection.find({"_type": "GRelation", "subject": node._id}) right_relations = triple_collection.find({"_type": "GRelation", "right_subject": node._id}) attributes = triple_collection.find({"_type": "GAttribute", "subject": node._id}) for eachobject in right_relations: # If given node is used in relationship with any other node (as right_subject) # Then this node's ObjectId must be removed from relation_set field of other node node_collection.collection.update( {'_id': eachobject.subject, 'relation_set.' + eachobject.relation_type.name: {'$exists': True}}, {'$pull': {'relation_set.$.' + eachobject.relation_type.name: node._id}}, upsert=False, multi=False ) eachobject.delete() all_associates = list(left_relations) + list(attributes) # Deleting GAttributes and GRelations where given node is used as left subject for eachobject in all_associates: eachobject.delete() # Finally deleting given node node.delete() return HttpResponseRedirect(reverse('batch', kwargs={'group_id': group_id}))
def save_batch(request, group_id): # def save_batch(batch_name, user_list, group_id, request, ac_id): group_name, group_id = get_group_name_id(group_id) response_dict = {"success": False} # new_batch_node = None rt_has_batch_member = node_collection.one({'_type': 'RelationType', 'name': 'has_batch_member'}) if request.is_ajax() and request.method == "POST": ac_id = request.POST.get("ac_id", '') batch_name = request.POST.get("batch_name", '') batch_id = request.POST.get("batch_id", '') user_list = request.POST.getlist("user_list[]", '') # create_new_batch = request.POST.get("create_new_batch", '') # response_dict["old_batches"] = find_batches_of_ac(ac_id) user_list = [ObjectId(each) for each in user_list] all_batches_in_grp = [] if not batch_id: # b_node = node_collection.one({'member_of':GST_BATCH._id,'name':unicode(batch_name)}) b_node = node_collection.collection.GSystem() b_node.member_of.append(GST_BATCH._id) b_node.created_by = int(request.user.id) b_node.group_set.append(ObjectId(group_id)) b_node.name = batch_name b_node['altnames'] = batch_name.replace('_', ' ') b_node.contributors.append(int(request.user.id)) b_node.modified_by = int(request.user.id) b_node.save(groupid=group_id) all_batches_in_grp.append(b_node._id) rt_group_has_batch = node_collection.one({'_type': 'RelationType', 'name': 'group_has_batch'}) relation_coll = triple_collection.find({'_type': 'GRelation', 'relation_type.$id': rt_group_has_batch._id,'subject':ObjectId(group_id)}) for each in relation_coll: all_batches_in_grp.append(each.right_subject) # to get all batches of the group rt_has_course = node_collection.one({'_type': 'RelationType', 'name': 'has_course'}) create_grelation(ObjectId(group_id), rt_group_has_batch, all_batches_in_grp) create_grelation(b_node._id, rt_has_course, ObjectId(ac_id)) response_dict['new_batch_created'] = True response_dict['new_batch_node_name'] = b_node.name response_dict['new_batch_node_id'] = str(b_node._id) else: response_dict['new_batch_created'] = False b_node = node_collection.one({'_id': ObjectId(batch_id)}) if user_list: create_grelation(b_node._id, rt_has_batch_member, user_list) response_dict['success'] = True return HttpResponse(json.dumps(response_dict, cls=NodeJSONEncoder))
def delete_module(request, group_id, _id): """This method will delete module object and its Attribute and Relation """ # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) pageurl = request.GET.get("next", "") try: node = node_collection.one({'_id': ObjectId(_id)}) if node: attributes = triple_collection.find({'_type': 'GAttribute', 'subject': node._id}) relations = triple_collection.find({'_type': 'GRelation', 'subject': node._id}) if attributes.count() > 0: for each in attributes: triple_collection.one({'_id': each['_id']}).delete() if relations.count() > 0: for each in relations: triple_collection.one({'_id': each['_id']}).delete() node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(pageurl)
def batch_detail(request, group_id): group_name, group_id = get_group_name_id(group_id) new_batch_node = None if request.is_ajax() and request.method == "GET": batch_id = request.GET.get("batch_id", '') student_coll = [] node = node_collection.one({'_id':ObjectId(batch_id)}) rt_has_batch_member = node_collection.one({'_type':'RelationType','name':'has_batch_member'}) relation_coll = triple_collection.find({'_type':'GRelation','relation_type.$id':rt_has_batch_member._id,'subject':node._id,'status':u'PUBLISHED'}) for each in relation_coll: n = node_collection.one({'_id':ObjectId(each.right_subject)}) student_coll.append(n) return HttpResponse(json.dumps(student_coll, cls=NodeJSONEncoder))
def show_translation(request, group_id, node_id, lang): ''' for VIEW/READ: show translated provided node to provided LANG CODE lang could be either proper/full language-name/language-code ''' node = translated_node_id = None grel_node = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'language': get_language_tuple(lang), # 'status': 'PUBLISHED' }) if grel_node: node = Node.get_node_by_id(grel_node.right_subject) translated_node_id = node._id # code to show other translations other_translations_grels = triple_collection.find({ '_type': u'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'right_subject': { '$nin': [translated_node_id] } }) other_translations = node_collection.find( {'_id': { '$in': [r.right_subject for r in other_translations_grels] }}) # --- END of code to show other translations return render_to_response("ndf/translate_detail.html", { 'group_id': Group.get_group_name_id(group_id)[1], 'groupid': Group.get_group_name_id(group_id)[1], 'source_node_id': node_id, 'source_node_obj': Node.get_node_by_id(node_id), 'node': node, 'other_translations': other_translations, 'card_url_name': 'show_translation', }, context_instance=RequestContext(request))
def handle(self, *args, **options): pandora_video_st = node_collection.one({'_type': 'GSystemType','name': 'Pandora_video'}) source_id_at=node_collection.one({'$and':[{'name':'source_id'},{'_type':'AttributeType'}]}) if pandora_video_st and source_id_at: member_set=node_collection.find({'$and':[{'member_of': {'$all': [ObjectId(pandora_video_st._id)]}}, {'_type':'File'}]}) gattribute=triple_collection.find({'_type':'GAttribute', 'attribute_type':source_id_at._id }) for each in member_set: each.delete() print "Video ",each.name," removed from member_of successfully !!\n" for each in gattribute: each.delete() print "Video ",each.name," removed from attribute_set successfully !!\n"
def get_triple_data(node_id): ''' Gets all data stored in triples for this node. Fetches GAttrtibutes as wells as GRelations. ''' try: global log_file log_file.write("\n get_triple_data invoked for: " + str(node_id)) triple_query = {"_type": {'$in': ["GAttribute", "GRelation"]}, "subject": ObjectId(node_id)} node_gattr_grel_cur = triple_collection.find(triple_query) if node_gattr_grel_cur: for each_triple_node in node_gattr_grel_cur: fetch_value = None dump_node(node=each_triple_node, collection_name=triple_collection) # Get ObjectIds in object_value fields if each_triple_node._type == u"GAttribute": fetch_value = "object_value" elif each_triple_node._type == u"GRelation": fetch_value = "right_subject" log_file.write("\n fetch_value: " + str(fetch_value)) if fetch_value == "right_subject": log_file.write("\n Picking up right-subject nodes.\n\t " + str(each_triple_node[fetch_value])) if type(each_triple_node[fetch_value]) == list and all(isinstance(each_obj_value, ObjectId) for each_obj_value in each_triple_node[fetch_value]): log_file.write("\n List: " + str(True)) dump_node(node_id_list=each_triple_node[fetch_value], collection_name=node_collection) elif isinstance(each_triple_node[fetch_value], ObjectId): log_file.write("\n ObjectId: " + str(True)) dump_node(node_id=each_triple_node[fetch_value], collection_name=node_collection) log_file.write("\n get_triple_data finished for: " + str(node_id)) except Exception as get_triple_data_err: error_log = "\n !!! Error found while taking triple data in get_triple_data() ." error_log += "\nError: " + str(get_triple_data_err) print "\n Error: ", error_log log_file.write(error_log) print error_log pass
def user_template_view(request, group_id): auth_group = None group_list=[] group_cur = node_collection.find({'_type': "Group", 'name': {'$nin': ["home", request.user.username]}}).limit(4) for i in group_cur: group_list.append(i) blank_list = [] attributetype_assignee = node_collection.find_one({"_type": 'AttributeType', 'name':'Assignee'}) attr_assignee = triple_collection.find({"_type": "GAttribute", "attribute_type.$id":attributetype_assignee._id, "object_value":request.user.username}) for attr in attr_assignee : task_node = node_collection.find_one({'_id': attr.subject}) blank_list.append(task_node) notification_object = notification.NoticeSetting.objects.filter(user_id=request.user.id) for each in notification_object: ntid = each.notice_type_id ntype = notification.NoticeType.objects.get(id=ntid) label = ntype.label.split("-")[0] blank_list.append({'label':label, 'display': ntype.display}) blank_list.reverse() blank_list = [] activity = "" activity_user = node_collection.find({'$and':[{'$or':[{'_type':'GSystem'},{'_type':'Group'},{'_type':'File'}]}, {'$or':[{'created_by':request.user.id}, {'modified_by':request.user.id}]}] }).sort('last_update', -1).limit(4) for each in activity_user: if each.created_by == each.modified_by : if each.last_update == each.created_at: activity = 'created' else : activity = 'modified' else : activity = 'created' if each._type == 'Group': blank_list.append(each) else : member_of = node_collection.find_one({"_id": each.member_of[0]}) blank_list.append(each) print blank_list template = "ndf/task_card_view.html" #variable = RequestContext(request, {'TASK_inst': self_task,'group_name':group_name,'group_id': group_id, 'groupid': group_id,'send':send}) variable = RequestContext(request, {'TASK_inst':blank_list,'group_name':group_id,'group_id': group_id, 'groupid': group_id}) return render_to_response(template, variable)
def details(request, group_id, topic_id): # ins_objectid = ObjectId() # group_ins = None # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) selected_topic = node_collection.one({"_type":u"GSystem", "_id":ObjectId(topic_id)}) topic_coll = node_collection.find({"_type": u"GSystem"}) topic_count = topic_coll.count() #print "here: " + str(topic_coll) context = RequestContext(request, {'title': "WikiData Topics", 'topic_coll': topic_coll}) template = "ndf/wikidata.html" variable = RequestContext(request,{'title': "WikiData Topics"}) context_variables = {'title': "WikiData Topics"} context_instance = RequestContext(request, {'title': "WikiData Topics", 'groupid':group_id, 'group_id':group_id}) attribute_set = triple_collection.find({"_type":u"GAttribute", "subject":ObjectId(topic_id)}) #relation_set = triple_collection.find({"_type":u"GRelation", "subject":ObjectId(topic_id)}) relation_set = selected_topic.get_possible_relations(selected_topic.member_of) #print relation_set relation_set_dict = {} for rk, rv in relation_set.iteritems(): if rv["subject_or_right_subject_list"]: #print "\n val perse : ", rk for v in rv["subject_or_right_subject_list"]: #print "\t", v["name"] relation_set_dict[rk] = v["name"] flag=0==1 return render(request, template, {'title': "WikiData Topics", 'topic_coll': topic_coll, 'selected_topic': selected_topic, 'attribute_set': attribute_set,'relation_set':relation_set_dict, 'groupid':group_id, 'group_id':group_id,'flag':flag,'topic_count':topic_count, 'node':selected_topic})
def show_translation(request, group_id, node_id, lang): ''' for VIEW/READ: show translated provided node to provided LANG CODE lang could be either proper/full language-name/language-code ''' node = translated_node_id = None grel_node = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'language': get_language_tuple(lang), # 'status': 'PUBLISHED' }) if grel_node: node = Node.get_node_by_id(grel_node.right_subject) translated_node_id = node._id # code to show other translations other_translations_grels = triple_collection.find({ '_type': u'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'right_subject': {'$nin': [translated_node_id]} }) other_translations = node_collection.find({'_id': {'$in': [r.right_subject for r in other_translations_grels]} }) # --- END of code to show other translations return render_to_response("ndf/translate_detail.html", { 'group_id': Group.get_group_name_id(group_id)[1], 'groupid': Group.get_group_name_id(group_id)[1], 'source_node_id': node_id, 'source_node_obj': Node.get_node_by_id(node_id), 'node': node, 'other_translations': other_translations, 'card_url_name': 'show_translation', }, context_instance=RequestContext(request))
def handle(self, *args, **options): pandora_video_st = node_collection.one({ '_type': 'GSystemType', 'name': 'Pandora_video' }) source_id_at = node_collection.one( {'$and': [{ 'name': 'source_id' }, { '_type': 'AttributeType' }]}) if pandora_video_st and source_id_at: member_set = node_collection.find({ '$and': [{ 'member_of': { '$all': [ObjectId(pandora_video_st._id)] } }, { '_type': 'File' }] }) gattribute = triple_collection.find({ '_type': 'GAttribute', 'attribute_type': source_id_at._id }) for each in member_set: each.delete() print "Video ", each.name, " removed from member_of successfully !!\n" for each in gattribute: each.delete() print "Video ", each.name, " removed from attribute_set successfully !!\n"
def parse_data_create_gsystem(json_file_path): json_file_content = "" try: with open(json_file_path) as json_file: json_file_content = json_file.read() json_documents_list = json.loads(json_file_content) # Initiating empty node obj and other related data variables node = node_collection.collection.File() node_keys = node.keys() node_structure = node.structure # print "\n\n---------------", node_keys json_documents_list_spaces = json_documents_list json_documents_list = [] # Removes leading and trailing spaces from keys as well as values for json_document_spaces in json_documents_list_spaces: json_document = {} for key_spaces, value_spaces in json_document_spaces.iteritems(): json_document[key_spaces.strip().lower()] = value_spaces.strip() json_documents_list.append(json_document) except Exception as e: error_message = "\n!! While parsing the file ("+json_file_path+") got following error...\n " + str(e) log_print(error_message) raise error_message for i, json_document in enumerate(json_documents_list): info_message = "\n\n\n********** Processing row number : ["+ str(i + 2) + "] **********" log_print(info_message) try: parsed_json_document = {} attribute_relation_list = [] for key in json_document.iterkeys(): parsed_key = key.lower() if parsed_key in node_keys: # print parsed_key # adding the default field values e.g: created_by, member_of # created_by: if parsed_key == "created_by": if json_document[key]: temp_user_id = get_user_id(json_document[key].strip()) if temp_user_id: parsed_json_document[parsed_key] = temp_user_id else: parsed_json_document[parsed_key] = nroer_team_id else: parsed_json_document[parsed_key] = nroer_team_id # print "---", parsed_json_document[parsed_key] # contributors: elif parsed_key == "contributors": if json_document[key]: contrib_list = json_document[key].split(",") temp_contributors = [] for each_user in contrib_list: user_id = get_user_id(each_user.strip()) if user_id: temp_contributors.append(user_id) parsed_json_document[parsed_key] = temp_contributors else: parsed_json_document[parsed_key] = [nroer_team_id] # print "===", parsed_json_document[parsed_key] # tags: elif (parsed_key == "tags") and json_document[key]: parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # member_of: elif parsed_key == "member_of": parsed_json_document[parsed_key] = [file_gst._id] # print parsed_json_document[parsed_key] # --- END of adding the default field values else: # parsed_json_document[parsed_key] = json_document[key] parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # --- END of processing for remaining fields else: # key is not in the node_keys parsed_json_document[key] = json_document[key] attribute_relation_list.append(key) # print "key : ", key # --END of for loop --- # calling method to create File GSystems node_obj = create_resource_gsystem(parsed_json_document, i) nodeid = node_obj._id if node_obj else None # print "nodeid : ", nodeid # ----- for updating language ----- node_lang = get_language_tuple(eval(parsed_json_document['language'])) # print "============= :", node_lang # print "============= lang :", node_obj.language if node_obj and node_obj.language != node_lang: update_res = node_collection.collection.update( {'_id': ObjectId(nodeid), 'language': {'$ne': node_lang}}, {'$set': {'language': node_lang}}, upsert=False, multi=False ) if update_res['updatedExisting']: node_obj.reload() info_message = "\n\n- Update to language of resource: " + str(update_res) log_print(info_message) info_message = "\n\n- Now language of resource updates to: " + str(node_obj.language) log_print(info_message) # print "============= lang :", node_obj.language # ----- END of updating language ----- collection_name = parsed_json_document.get('collection', '') if collection_name and nodeid: collection_node = node_collection.find_one({ # '_type': 'File', 'member_of': {'$in': [file_gst._id]}, 'group_set': {'$in': [home_group._id]}, 'name': unicode(collection_name) }) if collection_node: add_to_collection_set(collection_node, nodeid) thumbnail_url = parsed_json_document.get('thumbnail') # print "thumbnail_url : ", thumbnail_url if thumbnail_url and nodeid: try: info_message = "\n\n- Attaching thumbnail to resource\n" log_print(info_message) attach_resource_thumbnail(thumbnail_url, nodeid, parsed_json_document, i) except Exception, e: print e # print type(nodeid), "-------", nodeid, "\n" # create thread node if isinstance(nodeid, ObjectId): thread_result = create_thread_obj(nodeid) # starting processing for the attributes and relations saving if isinstance(nodeid, ObjectId) and attribute_relation_list: node = node_collection.one({ "_id": ObjectId(nodeid) }) gst_possible_attributes_dict = node.get_possible_attributes(file_gst._id) # print gst_possible_attributes_dict relation_list = [] json_document['name'] = node.name # Write code for setting atrributes for key in attribute_relation_list: is_relation = True # print "\n", key, "----------\n" for attr_key, attr_value in gst_possible_attributes_dict.iteritems(): # print "\n", attr_key,"======", attr_value if key == attr_key: # print key is_relation = False # setting value to "0" for int, float, long (to avoid casting error) # if (attr_value['data_type'] in [int, float, long]) and (not json_document[key]): # json_document[key] = 0 if json_document[key]: # print "key : ", key, "\nvalue : ",json_document[key] info_message = "\n- For GAttribute parsing content | key: '" + attr_key + "' having value: '" + json_document[key] + "'" log_print(info_message) cast_to_data_type(json_document[key], attr_value['data_type']) if attr_value['data_type'] == "curricular": # setting int values for CR/XCR if json_document[key] == "CR": json_document[key] = 1 elif json_document[key] == "XCR": json_document[key] = 0 else: # needs to be confirm json_document[key] = 0 # json_document[key] = bool(int(json_document[key])) # print attr_value['data_type'], "@@@@@@@@@ : ", json_document[key] json_document[key] = cast_to_data_type(json_document[key], attr_value['data_type']) # print key, " !!!!!!!!! : ", json_document[key] subject_id = node._id # print "\n-----\nsubject_id: ", subject_id attribute_type_node = node_collection.one({ '_type': "AttributeType", '$or': [ {'name': {'$regex': "^"+attr_key+"$", '$options': 'i'} }, {'altnames': {'$regex': "^"+attr_key+"$", '$options': 'i'} } ] }) # print "\nattribute_type_node: ", attribute_type_node.name object_value = json_document[key] # print "\nobject_value: ", object_value ga_node = None info_message = "\n- Creating GAttribute ("+node.name+" -- "+attribute_type_node.name+" -- "+str(json_document[key])+") ...\n" log_print(info_message) ga_node = create_gattribute(subject_id, attribute_type_node, object_value) info_message = "- Created ga_node : "+ str(ga_node.name) + "\n" log_print(info_message) # To break outer for loop as key found break else: error_message = "\n!! DataNotFound: No data found for field ("+str(attr_key)+") while creating GSystem ( -- "+str(node.name)+")\n" log_print(error_message) # ---END of if (key == attr_key) if is_relation: relation_list.append(key) if not relation_list: # No possible relations defined for this node info_message = "\n!! ("+str(node.name)+"): No possible relations defined for this node.\n" log_print(info_message) return gst_possible_relations_dict = node.get_possible_relations(file_gst._id) # processing each entry in relation_list # print "=== relation_list : ", relation_list for key in relation_list: is_relation = True for rel_key, rel_value in gst_possible_relations_dict.iteritems(): if key == rel_key: # if key == "teaches": is_relation = False if json_document[key]: # most often the data is hierarchy sep by ":" if ":" in json_document[key]: formatted_list = [] temp_teaches_list = json_document[key].replace("\n", "").split(":") # print "\n temp_teaches", temp_teaches for v in temp_teaches_list: formatted_list.append(v.strip()) right_subject_id = [] # print "~~~~~~~~~~~", formatted_list # rsub_id = _get_id_from_hierarchy(formatted_list) rsub_id = get_id_from_hierarchy(formatted_list) # print "=== rsub_id : ", rsub_id hierarchy_output = None # checking every item in hierarchy exist and leaf node's _id found if rsub_id: right_subject_id.append(rsub_id) json_document[key] = right_subject_id # print json_document[key] else: error_message = "\n!! While creating teaches rel: Any one of the item in hierarchy"+ str(json_document[key]) +"does not exist in Db. \n!! So relation: " + str(key) + " cannot be created.\n" log_print(error_message) break # sometimes direct leaf-node may be present without hierarchy and ":" else: formatted_list = list(json_document[key].strip()) right_subject_id = [] right_subject_id.append(_get_id_from_hierarchy(formatted_list)) json_document[key] = right_subject_id # print "\n----------", json_document[key] info_message = "\n- For GRelation parsing content | key: " + str(rel_key) + " -- " + str(json_document[key]) log_print(info_message) # print list(json_document[key]) # perform_eval_type(key, json_document, "GSystem", "GSystem") for right_subject_id in json_document[key]: # print "\njson_document[key]: ", json_document[key] subject_id = node._id # print "subject_id : ", subject_id # print "node.name: ", node.name # Here we are appending list of ObjectIds of GSystemType's type_of field # along with the ObjectId of GSystemType's itself (whose GSystem is getting created) # This is because some of the RelationType's are holding Base class's ObjectId # and not that of the Derived one's # Delibrately keeping GSystemType's ObjectId first in the list # And hence, used $in operator in the query! rel_subject_type = [] rel_subject_type.append(file_gst._id) if file_gst.type_of: rel_subject_type.extend(file_gst.type_of) relation_type_node = node_collection.one({'_type': "RelationType", '$or': [{'name': {'$regex': "^"+rel_key+"$", '$options': 'i'}}, {'altnames': {'$regex': "^"+rel_key+"$", '$options': 'i'}}], 'subject_type': {'$in': rel_subject_type} }) right_subject_id_or_list = [] right_subject_id_or_list.append(ObjectId(right_subject_id)) nodes = triple_collection.find({'_type': "GRelation", 'subject': subject_id, 'relation_type.$id': relation_type_node._id }) # sending list of all the possible right subject to relation for n in nodes: if not n.right_subject in right_subject_id_or_list: right_subject_id_or_list.append(n.right_subject) info_message = "\n- Creating GRelation ("+ str(node.name)+ " -- "+ str(rel_key)+ " -- "+ str(right_subject_id_or_list)+") ..." log_print(info_message) gr_node = create_grelation(subject_id, relation_type_node, right_subject_id_or_list) info_message = "\n- Grelation processing done.\n" log_print(info_message) # To break outer for loop if key found break else: error_message = "\n!! DataNotFound: No data found for relation ("+ str(rel_key)+ ") while creating GSystem (" + str(file_gst.name) + " -- " + str(node.name) + ")\n" log_print(error_message) break # print relation_list else: info_message = "\n!! Either resource is already created or file is already saved into filehive/DB or file not found" log_print(info_message) continue except Exception as e: error_message = "\n While creating ("+str(json_document['name'])+") got following error...\n " + str(e) print "!!!!!!!!!!!!EEEEEEEERRRRRRRRRRRRRROOOOOOORRRRRRRRRRRRR......................" # file_error_msg = "\nFile with following details got an error: \n" file_error_msg = "\n========================" + " Row No : " + str(i + 2) + " ========================\n" # file_error_msg += "- Row No : " + str(i + 2) + "\n" file_error_msg += "- Name : " + json_document["name"] + "\n" file_error_msg += "- File Name: " + json_document["file_name"] + "\n" file_error_msg += "- ERROR : " + str(e) + "\n\n" file_error_msg += "- Following are the row details : \n\n" + unicode(json.dumps(json_document, sort_keys=True, indent=4, ensure_ascii=False)) + "\n" file_error_msg += "============================================================\n\n\n" log_error_rows.append(file_error_msg) log_print(error_message)
def lesson_create_edit(request, group_id, unit_group_id=None): ''' creation as well as edit of lessons returns following: { 'success': <BOOL: 0 or 1>, 'unit_hierarchy': <unit hierarchy json>, 'msg': <error msg or objectid of newly created obj> } ''' # parent_group_name, parent_group_id = Group.get_group_name_id(group_id) # parent unit id lesson_id = request.POST.get('lesson_id', None) lesson_language = request.POST.get('sel_lesson_lang', '') unit_id_post = request.POST.get('unit_id', '') lesson_content = request.POST.get('lesson_desc', '') # print "lesson_id: ", lesson_id # print "lesson_language: ", lesson_language # print "unit_id_post: ", unit_id_post unit_group_id = unit_id_post if unit_id_post else unit_group_id # getting parent unit object unit_group_obj = Group.get_group_name_id(unit_group_id, get_obj=True) result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': ''} if request.method == "POST": # lesson name lesson_name = request.POST.get('name', '').strip() if not lesson_name: msg = 'Name can not be empty.' result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': msg} # return HttpResponse(0) # check for uniqueness of name # unit_cs: unit collection_set unit_cs_list = unit_group_obj.collection_set unit_cs_objs_cur = Node.get_nodes_by_ids_list(unit_cs_list) if unit_cs_objs_cur: unit_cs_names_list = [u.name for u in unit_cs_objs_cur] if not lesson_id and unit_cs_objs_cur and lesson_name in unit_cs_names_list: # same name activity # currently following logic was only for "en" nodes. # commented and expecting following in future: # check for uniqueness w.r.t language selected within all sibling lessons's translated nodes # lesson_obj = Node.get_node_by_id(lesson_id) # if lesson_language != lesson_obj.language[0]: # if lesson_language: # language = get_language_tuple(lesson_language) # lesson_obj.language = language # lesson_obj.save() msg = u'Activity with same name exists in lesson: ' + unit_group_obj.name result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': msg} elif lesson_id and ObjectId.is_valid(lesson_id): # Update # getting default, "en" node: if lesson_language != "en": node = translated_node_id = None grel_node = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(lesson_id), 'relation_type': rt_translation_of._id, 'language': get_language_tuple(lesson_language), # 'status': 'PUBLISHED' }) if grel_node: # grelation found. # transalated node exists. # edit of existing translated node. # node = Node.get_node_by_id(grel_node.right_subject) # translated_node_id = node._id lesson_id = grel_node.right_subject else: # grelation NOT found. # create transalated node. user_id = request.user.id new_lesson_obj = node_collection.collection.GSystem() new_lesson_obj.fill_gstystem_values( name=lesson_name, content=lesson_content, member_of=gst_lesson_id, group_set=unit_group_obj._id, created_by=user_id, status=u'PUBLISHED') # print new_lesson_obj if lesson_language: language = get_language_tuple(lesson_language) new_lesson_obj.language = language new_lesson_obj.save(groupid=group_id) trans_grel_list = [ObjectId(new_lesson_obj._id)] trans_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_translation_of._id,'subject': ObjectId(lesson_id)},{'_id': 0, 'right_subject': 1}) for each_rel in trans_grels: trans_grel_list.append(each_rel['right_subject']) # translate_grel = create_grelation(node_id, rt_translation_of, trans_grel_list, language=language) create_grelation(lesson_id, rt_translation_of, trans_grel_list, language=language) lesson_obj = Node.get_node_by_id(lesson_id) if lesson_obj and (lesson_obj.name != lesson_name): trans_lesson = get_lang_node(lesson_obj._id, lesson_language) if trans_lesson: trans_lesson.name = lesson_name else: lesson_obj.name = lesson_name # if lesson_language: # language = get_language_tuple(lesson_language) # lesson_obj.language = language lesson_obj.save(group_id=group_id) unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Lesson name updated.' result_dict = { 'success': 1, 'unit_hierarchy': unit_structure, 'msg': str(lesson_obj._id) } else: unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Nothing to update.' result_dict = { 'success': 1, 'unit_hierarchy': unit_structure, 'msg': msg } else: # creating a fresh lesson object user_id = request.user.id new_lesson_obj = node_collection.collection.GSystem() new_lesson_obj.fill_gstystem_values(name=lesson_name, content=lesson_content, member_of=gst_lesson_id, group_set=unit_group_obj._id, created_by=user_id, status=u'PUBLISHED') # print new_lesson_obj if lesson_language: language = get_language_tuple(lesson_language) new_lesson_obj.language = language new_lesson_obj.save(groupid=group_id) unit_group_obj.collection_set.append(new_lesson_obj._id) unit_group_obj.save(groupid=group_id) unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Added lesson under lesson: ' + unit_group_obj.name result_dict = { 'success': 1, 'unit_hierarchy': unit_structure, 'msg': str(new_lesson_obj._id) } # return HttpResponse(json.dumps(unit_structure)) # return HttpResponse(1) return HttpResponse(json.dumps(result_dict))
def data_review_save(request, group_id): ''' Method to save each and every data-row edit of data review app ''' userid = request.user.pk try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) group_obj = node_collection.one({"_id": ObjectId(group_id)}) node_oid = request.POST.get("node_oid", "") node_details = request.POST.get("node_details", "") node_details = json.loads(node_details) # print "node_details : ", node_details # updating some key names of dictionary as per get_node_common_fields. node_details["lan"] = node_details.pop("language") node_details["prior_node_list"] = node_details.pop("prior_node") node_details["login-mode"] = node_details.pop("access_policy") status = node_details.pop("status") # node_details["collection_list"] = node_details.pop("collection") for future use # Making copy of POST QueryDict instance. # To make it mutable and fill in node_details value/s. post_req = request.POST.copy() # removing node_details dict from req post_req.pop('node_details') # adding values to post req post_req.update(node_details) # overwriting request.POST with newly created QueryDict instance post_req request.POST = post_req # print "\n---\n", request.POST, "\n---\n" copyright = request.POST.get('copyright', '') file_node = node_collection.one({"_id": ObjectId(node_oid)}) if request.method == "POST": edit_summary = [] file_node_before = file_node.copy() # copying before it is getting modified is_changed = get_node_common_fields(request, file_node, group_id, GST_FILE) for key, val in file_node_before.iteritems(): if file_node_before[key] != file_node[key]: temp_edit_summ = {} temp_edit_summ["name"] = "Field: " + key temp_edit_summ["before"] = file_node_before[key] temp_edit_summ["after"] = file_node[key] edit_summary.append(temp_edit_summ) # to fill/update attributes of the node and get updated attrs as return ga_nodes = get_node_metadata(request, file_node, is_changed=True) if len(ga_nodes): is_changed = True # adding the edit attribute name in summary for each_ga in ga_nodes: temp_edit_summ = {} temp_edit_summ["name"] = "Attribute: " + each_ga["node"]["attribute_type"]["name"] temp_edit_summ["before"] = each_ga["before_obj_value"] temp_edit_summ["after"] = each_ga["node"]["object_value"] edit_summary.append(temp_edit_summ) teaches_list = request.POST.get('teaches', '') # get the teaches list prev_teaches_list = request.POST.get("teaches_prev", "") # get the before-edit teaches list # check if teaches list exist means nodes added/removed for teaches relation_type # also check for if previous teaches list made empty with prev_teaches_list if (teaches_list != '') or prev_teaches_list: teaches_list = teaches_list.split(",") if teaches_list else [] teaches_list = [ObjectId(each_oid) for each_oid in teaches_list] relation_type_node = node_collection.one({'_type': "RelationType", 'name':'teaches'}) gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list) gr_nodes_oid_list = [ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes] if gr_nodes else [] prev_teaches_list = prev_teaches_list.split(",") if prev_teaches_list else [] prev_teaches_list = [ObjectId(each_oid) for each_oid in prev_teaches_list] if len(gr_nodes_oid_list) == len(prev_teaches_list) and set(gr_nodes_oid_list) == set(prev_teaches_list): pass else: rel_nodes = triple_collection.find({'_type': "GRelation", 'subject': file_node._id, 'relation_type': relation_type_node._id }) rel_oid_name = {} for each in rel_nodes: temp = {} temp[each.right_subject] = each.name rel_oid_name.update(temp) is_changed = True temp_edit_summ = {} temp_edit_summ["name"] = "Relation: Teaches" temp_edit_summ["before"] = [rel_oid_name[each_oid].split(" -- ")[2] for each_oid in prev_teaches_list] temp_edit_summ["after"] = [rel_oid_name[each_oid].split(" -- ")[2] for each_oid in gr_nodes_oid_list] edit_summary.append(temp_edit_summ) assesses_list = request.POST.get('assesses_list','') if assesses_list != '': assesses_list = assesses_list.split(",") assesses_list = [ObjectId(each_oid) for each_oid in assesses_list] relation_type_node = node_collection.one({'_type': "RelationType", 'name':'assesses'}) gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list) gr_nodes_oid_list = [ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes] if len(gr_nodes_oid_list) == len(teaches_list) and set(gr_nodes_oid_list) == set(teaches_list): pass else: is_changed = True # changing status to draft even if attributes/relations are changed if is_changed: file_node.status = unicode("DRAFT") file_node.modified_by = userid if userid not in file_node.contributors: file_node.contributors.append(userid) # checking if user is authenticated to change the status of node if status and ((group_obj.is_gstaff(request.user)) or (userid in group_obj.author_set)): if file_node.status != status: file_node.status = unicode(status) file_node.modified_by = userid if userid not in file_node.contributors: file_node.contributors.append(userid) is_changed = True if is_changed: file_node.save(groupid=group_id) # print edit_summary return HttpResponse(file_node.status)
def theme_topic_create_edit(request, group_id, app_set_id=None): ##################### # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) ###################### nodes_dict = [] create_edit = True themes_hierarchy = False themes_list_items = "" themes_cards = "" title = "" node = "" theme_topic_node = "" drawers = None drawer = None app_id = None nodes_list = [] parent_nodes_collection = "" translate=request.GET.get('translate','') app_GST = node_collection.find_one({"_id":ObjectId(app_set_id)}) if app_GST._id != theme_GST._id: app_obj = node_collection.one({'_id': ObjectId(app_GST.member_of[0])}) else: app_obj = theme_GST if app_obj: app_id = app_obj._id shelves = [] shelf_list = {} auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth: has_shelf_RT = node_collection.one({'_type': 'RelationType', 'name': u'has_shelf' }) shelf = triple_collection.find({'_type': 'GRelation', 'subject': ObjectId(auth._id), 'relation_type.$id': has_shelf_RT._id}) shelf_list = {} if shelf: for each in shelf: shelf_name = node_collection.one({'_id': ObjectId(each.right_subject)}) shelves.append(shelf_name) shelf_list[shelf_name.name] = [] for ID in shelf_name.collection_set: shelf_item = node_collection.one({'_id': ObjectId(ID) }) shelf_list[shelf_name.name].append(shelf_item.name) else: shelves = [] if request.method == "POST": if app_GST: create_edit = True themes_list_items = "" root_themes = [] root_themes_id = [] nodes_list = [] name = request.POST.get('name') collection_list = request.POST.get('collection_list','') prior_node_list = request.POST.get('prior_node_list','') teaches_list = request.POST.get('teaches_list','') assesses_list = request.POST.get('assesses_list','') # To find the root nodes to maintain the uniquness while creating and editing themes nodes = node_collection.find({'member_of': {'$all': [theme_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_themes.append(each.name) root_themes_id.append(each._id) if app_GST.name == "Theme" or app_GST.name == "Topic" or translate == "true": # For creating new themes & Topics themes_list_items = False create_edit = False themes_hierarchy = False themes_cards = True if name or translate == "true": if not name.upper() in (theme_name.upper() for theme_name in root_themes) or translate == "true": if translate != "true": theme_topic_node = node_collection.collection.GSystem() # get_node_common_fields(request, theme_topic_node, group_id, app_GST) theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, app_GST),groupid=group_id) if translate == "true": global list_trans_coll list_trans_coll = [] coll_set1=get_coll_set(app_GST._id) for each in coll_set1: theme_topic_node = node_collection.collection.GSystem() if "Theme" in each.member_of_names_list: app_obj = theme_GST if "theme_item" in each.member_of_names_list: app_obj = theme_item_GST if "topic" in each.member_of_names_list: app_obj = topic_GST theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, app_obj, each),groupid=group_id) coll_set_dict[each._id]=theme_topic_node._id relation_type = node_collection.one({'_type':'RelationType', 'name':'translation_of'}) # grelation=collection.GRelation() # grelation.relation_type=relation_type # grelation.subject=each._id # grelation.right_subject=theme_topic_node._id # grelation.name=u"" # grelation.save() gr_node = create_grelation(each._id, relation_type, theme_topic_node._id) for each in coll_set1: #if "Theme" in each.member_of_names_list: if each.collection_set: for collset in each.collection_set: p=coll_set_dict[each._id] parent_node = node_collection.one({'_id':ObjectId(str(p))}) n= coll_set_dict[collset] sub_node = node_collection.one({'_id':ObjectId(str(n))}) parent_node.collection_set.append(sub_node._id) parent_node.save(groupid=group_id) # To return themes card view for listing theme nodes after creating new Themes nodes.rewind() nodes_dict = nodes else: themes_list_items = False create_edit = False themes_hierarchy = True theme_topic_node = node_collection.one({'_id': ObjectId(app_GST._id)}) # For edititng themes if theme_GST._id in app_GST.member_of and translate != "true": # To find themes uniqueness within the context of its parent Theme collection, while editing theme name root_themes = [] nodes = node_collection.find({'member_of': {'$all': [theme_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: root_themes.append(each.name) if name: if name.upper() != theme_topic_node.name.upper(): if not name.upper() in (theme_name.upper() for theme_name in root_themes): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, theme_GST),groupid=group_id) else: theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, theme_GST),groupid=group_id) if translate != "true": # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append(node_id) i = i+1 theme_topic_node.save(groupid=group_id) # End of storing collection title = theme_GST.name nodes.rewind() nodes_dict = nodes # This will return to Themes Hierarchy themes_list_items = False create_edit = False themes_hierarchy = False themes_cards = True elif theme_item_GST._id in app_GST.member_of and translate != "true": title = "Theme Item" dict_drawer = {} dict2 = [] node = app_GST prior_theme_collection = [] parent_nodes_collection = "" # To display the theme-topic drawer while create or edit theme checked = "theme_item" # drawers = get_drawers(group_id, node._id, node.collection_set, checked) # Code for fetching drawer2 for k in node.collection_set: obj = node_collection.one({'_id': ObjectId(k) }) dict2.append(obj) dict_drawer['2'] = dict2 # drawers = dict_drawer # End of code for drawer2 drawer = dict_drawer['2'] # To find themes uniqueness within the context of its parent Theme collection, while editing theme item nodes = node_collection.find({'member_of': {'$all': [theme_item_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one({'_id': ObjectId(k) }) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps(prior_theme_collection) if not prior_theme_collection: root_nodes = node_collection.find({'member_of': {'$all': [theme_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for k in root_nodes: if app_GST._id in k.collection_set: root_themes = [] root_themes_id = [] for l in k.collection_set: objs = node_collection.one({'_id': ObjectId(l)}) root_themes.append(objs.name) root_themes_id.append(objs._id) # End of finding unique theme names for editing name # For adding a sub-theme-items and maintianing their uniqueness within their context nodes_list = [] for each in app_GST.collection_set: sub_theme = node_collection.one({'_id': ObjectId(each) }) nodes_list.append(sub_theme.name) nodes_list = json.dumps(nodes_list) # End of finding unique sub themes if name: if name.upper() != theme_topic_node.name.upper(): # If "Name" has changed if theme_topic_node._id in root_themes_id: # If editing node in root theme items if not name.upper() in (theme_name.upper() for theme_name in root_themes): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, theme_item_GST),groupid=group_id) else: # If editing theme item in prior_theme_collection hierarchy if not name.upper() in (theme_name.upper() for theme_name in prior_theme_collection): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, theme_item_GST),groupid=group_id) else: # If name not changed but other fields has changed theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, theme_item_GST),groupid=group_id) if translate != "true" and collection_list: # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append(node_id) i = i+1 theme_topic_node.save(groupid=group_id) # End of storing collection # This will return to Themes items edit if theme_topic_node: theme_topic_node.reload() node = theme_topic_node create_edit = True themes_hierarchy = False # For editing topics elif topic_GST._id in app_GST.member_of: root_topics = [] nodes_list = [] # To find the root nodes to maintain the uniquness while creating and editing topics nodes = node_collection.find({'member_of': {'$all': [topic_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_topics.append(each.name) # End of finding the root nodes if name: if theme_topic_node.name != name: topic_name = theme_topic_node.name if not name.upper() in (theme_name.upper() for theme_name in root_topics): theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, topic_GST),groupid=group_id) elif topic_name.upper() == name.upper(): theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, topic_GST),groupid=group_id) else: theme_topic_node.save(is_changed=get_node_common_fields(request, theme_topic_node, group_id, topic_GST),groupid=group_id) if collection_list: # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append(node_id) i = i+1 theme_topic_node.save(groupid=group_id) title = topic_GST.name # To fill the metadata info while creating and editing topic node metadata = request.POST.get("metadata_info", '') if metadata: # Only while metadata editing if metadata == "metadata": if theme_topic_node: get_node_metadata(request,theme_topic_node) # End of filling metadata if prior_node_list != '': theme_topic_node.prior_node = [] prior_node_list = prior_node_list.split(",") i = 0 while (i < len(prior_node_list)): node_id = ObjectId(prior_node_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.prior_node.append(node_id) i = i+1 theme_topic_node.save(groupid=group_id) if teaches_list !='': teaches_list=teaches_list.split(",") create_grelation_list(theme_topic_node._id,"teaches",teaches_list) if assesses_list !='': assesses_list=assesses_list.split(",") create_grelation_list(theme_topic_node._id,"assesses",assesses_list) # This will return to edit topic if theme_topic_node: theme_topic_node.reload() node = theme_topic_node create_edit = True themes_hierarchy = False else: app_node = None nodes_list = [] app_GST = node_collection.find_one({"_id":ObjectId(app_set_id)}) # print "\napp_GST in else: ",app_GST.name,"\n" if app_GST: # For adding new Theme & Topic if app_GST.name == "Theme" or app_GST.name == "Topic" or translate == True: print "22222" title = app_GST.name node = "" root_themes = [] # To find the root nodes to maintain the uniquness while creating new themes nodes = node_collection.find({'member_of': {'$all': [app_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_themes.append(each.name) root_themes = json.dumps(root_themes) nodes_list = root_themes # End of finding unique root level Themes else: if theme_GST._id in app_GST.member_of: title = "Theme" node = app_GST prior_theme_collection = [] parent_nodes_collection = "" drawer = [] # End of editing Themes # For editing theme item if theme_item_GST._id in app_GST.member_of: title = "Theme Item" dict_drawer = {} dict2 = [] node = app_GST prior_theme_collection = [] parent_nodes_collection = "" # To display the theme-topic drawer while create or edit theme checked = "theme_item" # drawers = get_drawers(group_id, node._id, node.collection_set, checked) for k in node.collection_set: obj = node_collection.one({'_id': ObjectId(k) }) dict2.append(obj) dict_drawer['2'] = dict2 drawer = dict_drawer['2'] # To find themes uniqueness within the context of its parent Theme collection, while editing theme name nodes = node_collection.find({'member_of': {'$all': [theme_item_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one({'_id': ObjectId(k) }) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps(prior_theme_collection) # End of finding unique theme names for editing name # For adding a sub-themes and maintianing their uniqueness within their context for each in app_GST.collection_set: sub_theme = node_collection.one({'_id': ObjectId(each) }) nodes_list.append(sub_theme.name) nodes_list = json.dumps(nodes_list) # End of finding unique sub themes # for editing topic elif topic_GST._id in app_GST.member_of: title = topic_GST.name node = app_GST prior_theme_collection = [] parent_nodes_collection = "" node.get_neighbourhood(node.member_of) # To find topics uniqueness within the context of its parent Theme item collection, while editing topic name nodes = node_collection.find({'member_of': {'$all': [theme_item_GST._id]},'group_set':{'$all': [ObjectId(group_id)]}}) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one({'_id': ObjectId(k) }) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps(prior_theme_collection) # End of finding unique theme names for editing name if translate: global list_trans_coll list_trans_coll = [] trans_coll_list = get_coll_set(str(app_GST._id)) print LANGUAGES return render_to_response("ndf/translation_page.html", {'group_id': group_id,'groupid': group_id,'title': title, 'node': app_GST, 'lan':LANGUAGES, 'list1':trans_coll_list },context_instance = RequestContext(request) ) if title == "Topic": return render_to_response("ndf/node_edit_base.html", {'group_id': group_id,'groupid': group_id, 'drawer': drawer, 'themes_cards': themes_cards, 'shelf_list': shelf_list,'shelves': shelves, 'create_edit': create_edit, 'themes_hierarchy': themes_hierarchy,'app_id': app_id,'appId':app._id, 'nodes_list': nodes_list,'title': title,'node': node, 'parent_nodes_collection': parent_nodes_collection, 'theme_GST_id': theme_GST._id,'theme_item_GST_id': theme_item_GST._id, 'topic_GST_id': topic_GST._id, 'themes_list_items': themes_list_items,'nodes':nodes_dict,'lan':LANGUAGES },context_instance = RequestContext(request) ) return render_to_response("ndf/theme.html", {'group_id': group_id,'groupid': group_id, 'drawer': drawer, 'themes_cards': themes_cards, 'theme_GST':theme_GST, 'theme_GST':theme_GST, 'shelf_list': shelf_list,'shelves': shelves, 'create_edit': create_edit, 'themes_hierarchy': themes_hierarchy,'app_id': app_id,'appId':app._id, 'nodes_list': nodes_list,'title': title,'node': node, 'parent_nodes_collection': parent_nodes_collection, 'theme_GST_id': theme_GST._id,'theme_item_GST_id': theme_item_GST._id, 'topic_GST_id': topic_GST._id, 'themes_list_items': themes_list_items,'nodes':nodes_dict,'lan':LANGUAGES },context_instance = RequestContext(request) )
def handle(self, *args, **options): # Keep latest changes in field(s) to be added at top # adding 'if_file' in GSystem instances: # 'if_file': { # 'mime_type': None, # 'original': {'_id': None, 'relurl': None}, # 'mid': {'_id': None, 'relurl': None}, # 'thumbnail': {'_id': None, 'relurl': None} # }, gsres = node_collection.collection.update( {"_type": {"$in": [u"GSystem", u"File", u"Group"]}, "if_file": {"$exists": False}}, { "$set": { "if_file": { "mime_type": None, "original": {"id": None, "relurl": None}, "mid": {"id": None, "relurl": None}, "thumbnail": {"id": None, "relurl": None}, } } }, upsert=False, multi=True, ) if gsres["updatedExisting"]: # and gsres['nModified']: print "\n Added 'if_file' field to " + gsres["n"].__str__() + " GSystem instances." # -------------------------------------------------------------------------- # Adding <'origin': []> field to all objects and inheritance of GSystem class # fetching all GSystem and it's inheritance class objects # all_gsystem_inherited_nodes = node_collection.find({'_type': {'$in': [u'GSystem', u'File', u'Group']}, 'origin': {'$exists': False} }) res = node_collection.collection.update( {"_type": {"$in": [u"GSystem", u"File", u"Group"]}, "origin": {"$exists": False}}, {"$set": {"origin": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Added 'origin' field to " + res["n"].__str__() + " GSystem instances." # ----------------------------------------------------------------------------- # Updating language fields data type: # - Firstly, replacing None to ('en', 'English') node_collection.collection.update( { "_type": { "$in": [ "AttributeType", "RelationType", "MetaType", "ProcessType", "GSystemType", "GSystem", "File", "Group", "Author", ] }, "language": {"$in": [None, "", u""]}, }, {"$set": {"language": ("en", "English")}}, upsert=False, multi=True, ) all_nodes = node_collection.find( { "_type": { "$in": [ "AttributeType", "RelationType", "MetaType", "ProcessType", "GSystemType", "GSystem", "File", "Group", "Author", ] } } ) all_languages = list(LANGUAGES) + OTHER_COMMON_LANGUAGES all_languages_concanated = reduce(lambda x, y: x + y, all_languages) # iterating over each document in the cursor: # - Secondly, replacing invalid language values to valid tuple from settings for each_node in all_nodes: if each_node.language and (each_node.language in all_languages_concanated): for each_lang in all_languages: if each_node.language in each_lang: # printing msg without checking update result for performance. print "Updated language field of: ", each_node.name print "\tFrom", each_node.language, " to: ", each_lang, "\n" node_collection.collection.update( {"_id": each_node._id}, {"$set": {"language": each_lang}}, upsert=False, multi=False ) # --- END of Language processing --- # adding all activated and logged-in user's id into author_set of "home" and "desk" group --- all_authors = node_collection.find({"_type": "Author"}) authors_list = [auth.created_by for auth in all_authors] # updating author_set of desk and home group w.ref. to home group's author_set home_group = node_collection.one({"_type": "Group", "name": "home"}) prev_home_author_set = home_group.author_set total_author_set = list(set(authors_list + home_group.author_set)) result = node_collection.collection.update( {"_type": "Group", "name": {"$in": [u"home", u"desk"]}, "author_set": {"$ne": total_author_set}}, {"$set": {"author_set": total_author_set}}, upsert=False, multi=True, ) if result["updatedExisting"]: # and result['nModified']: home_group.reload() print "\n Updated author_set of 'home' and 'desk' group:" + "\n\t - Previously it was : " + str( len(prev_home_author_set) ) + " users." "\n\t - Now it's updated to : " + str(len(home_group.author_set)) + " users." # -------------------------------------------------------------------------- # 'group_admin' of group should not be empty. So updating one for [] with creator of group. all_groups = node_collection.find({"_type": "Group"}) for each_group in all_groups: if not each_group.group_admin: res = node_collection.collection.update( {"_id": ObjectId(each_group._id)}, {"$set": {"group_admin": [each_group.created_by]}}, upsert=False, multi=False, ) if res["updatedExisting"]: each_group.reload() print "updated group_admin of: " + each_group.name + " from [] to :" + unicode( each_group.group_admin ) # -------------------------------------------------------------------------- # removing <'partner': bool> field from Group objects res = node_collection.collection.update( {"_type": {"$in": ["Group"]}}, {"$unset": {"partner": False}}, upsert=False, multi=True ) if res["updatedExisting"]: # and res['nModified']: print "\n Removed 'partner' field from " + res["n"].__str__() + " Group instances." # -------------------------------------------------------------------------- # Adding <'moderation_level': -1> field to Group objects res = node_collection.collection.update( { "_type": {"$in": ["Group"]}, "edit_policy": {"$nin": ["EDITABLE_MODERATED"]}, "moderation_level": {"$exists": False}, }, {"$set": {"moderation_level": -1}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Added 'moderation_level' field to " + res["n"].__str__() + " Group instances." # ----------------------------------------------------------------------------- # Replacing invalid value of agency_type field belonging to Author node by "Other" res = node_collection.collection.update( {"_type": "Author", "agency_type": {"$nin": GSTUDIO_AUTHOR_AGENCY_TYPES}}, {"$set": {"agency_type": u"Other"}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Replacing invalid value of agency_type field belonging to Author node by 'Other'" + "... #" + res[ "n" ].__str__() + " records updated." # ----------------------------------------------------------------------------- # From existing RelationType instance(s), finding Binary relationships # and Setting their "member_of" field's value as "Binary" (MetaType) mt_binary = node_collection.one({"_type": "MetaType", "name": "Binary"}) if mt_binary: res = node_collection.collection.update( {"_type": "RelationType", "object_type.0": {"$not": {"$type": 4}}}, {"$set": {"member_of": [mt_binary._id]}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res["nModified"]: print "\n 'member_of' field updated in following RelationType " + "instance(s) representing 'Binary Relationships':", res[ "n" ] # Replacing object_type of "trainer_of_course" & "master_trainer_of_course" # relationship from "Announced Course" to "NUSSD Course" nussd_course = node_collection.one({"_type": "GSystemType", "name": "NUSSD Course"}) if nussd_course: nussd_course_id = nussd_course._id res = node_collection.collection.update( {"_type": "RelationType", "name": "trainer_of_course", "object_value": {"$nin": [nussd_course_id]}}, {"$set": {"object_type": [nussd_course_id]}}, upsert=False, multi=False, ) if res["updatedExisting"]: # and res['nModified']: print "\n Replaced object_type of 'trainer_of_course' relationship" + " from 'Announced Course' to 'NUSSD Course'." res = node_collection.collection.update( { "_type": "RelationType", "name": "master_trainer_of_course", "object_value": {"$nin": [nussd_course_id]}, }, {"$set": {"object_type": [nussd_course_id]}}, upsert=False, multi=False, ) if res["updatedExisting"]: # and res['nModified']: print "\n Replaced object_type of 'master_trainer_of_course' relationship" + " from 'Announced Course' to 'NUSSD Course'." # Appending attribute_type_set and relation_type_set fields to existing MetaType nodes res = node_collection.collection.update( {"_type": "MetaType", "attribute_type_set": {"$exists": False}, "relation_type_set": {"$exists": False}}, {"$set": {"attribute_type_set": [], "relation_type_set": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Appending attribute_type_set and relation_type_set fields to existing MetaType nodes." # Renames RelaionType names -- "has_corresponding_task" to "has_current_approval_task" res = node_collection.collection.update( {"_type": "RelationType", "name": u"has_corresponding_task"}, {"$set": {"name": u"has_current_approval_task"}}, upsert=False, multi=False, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'name' field updated of RelationType (Renamed from has_corresponding_task to has_current_approval_task)" # Replaces "for_acourse" RelationType's object_cardinality field's value from 1 to 100 res = node_collection.collection.update( {"_type": "RelationType", "name": "for_acourse"}, {"$set": {"object_cardinality": 100}}, upsert=False, multi=False, ) if res["updatedExisting"]: # and res['nModified']: print "\n Replaced 'for_acourse' RelationType's 'object_cardinality' field's value from 1 to 100." file_gst = node_collection.one({"_type": "GSystemType", "name": "File"}) pandora_video_st = node_collection.one({"_type": "GSystemType", "name": "Pandora_video"}) # Update the url field of all nodes # if pandora_video_st: # nodes = node_collection.find({'member_of': {'$nin':[pandora_video_st._id],'$in':[file_gst._id]},'access_policy':'PUBLIC' }) # site = Site.objects.get(pk=1) # site = site.domain.__str__() # site = "127.0.0.1:8000" if (site == u'example.com') else site # count = 0 # for each in nodes: # grp_name = node_collection.one({'_id': ObjectId(each.group_set[0]) }).name # if "/" in each.mime_type: # filetype = each.mime_type.split("/")[1] # url_link = "http://" + site + "/" + grp_name.replace(" ","%20").encode('utf8') + "/file/readDoc/" + str(each._id) + "/" + str(each.name) + "." + str(filetype) # if each.url != unicode(url_link): # node_collection.collection.update({'_id':each._id},{'$set':{'url': unicode(url_link) }}) # count = count + 1 # if count: # print "\n 'url' field updated in following no. of documents: ", count # Update pandora videos 'member_of', 'created_by', 'modified_by', 'contributors' field if User.objects.filter(username="******").exists(): auth_id = User.objects.get(username="******").pk if auth_id and pandora_video_st: res = node_collection.collection.update( {"_type": "File", "member_of": {"$in": [pandora_video_st._id]}, "created_by": {"$ne": auth_id}}, { "$set": { "created_by": auth_id, "modified_by": auth_id, "member_of": [file_gst._id, pandora_video_st._id], }, "$push": {"contributors": auth_id}, }, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'created_by, modified_by & contributors' field updated for pandora videos in following no. of documents: ", res[ "n" ] # Update prior_node for each node in DB who has its collection_set all_nodes = node_collection.find( {"_type": {"$in": ["GSystem", "File", "Group"]}, "collection_set": {"$exists": True, "$not": {"$size": 0}}} ) count = 0 for each in all_nodes: if each.collection_set: for l in each.collection_set: obj = node_collection.one({"_id": ObjectId(l)}) if obj: if each._id not in obj.prior_node: node_collection.collection.update( {"_id": obj._id}, {"$push": {"prior_node": ObjectId(each._id)}} ) count = count + 1 if count: print "\n prior_node field updated in following no. of documents: ", count # Updating names (Stripped) in all theme , theme_items and topic documents theme_GST = node_collection.one({"_type": "GSystemType", "name": "Theme"}) theme_item_GST = node_collection.one({"_type": "GSystemType", "name": "theme_item"}) topic_GST = node_collection.one({"_type": "GSystemType", "name": "Topic"}) if theme_GST and theme_item_GST and topic_GST: nodes = node_collection.find({"member_of": {"$in": [theme_GST._id, theme_item_GST._id, topic_GST._id]}}) count = 0 for each in nodes: if each.name != each.name.strip(): node_collection.collection.update( {"_id": ObjectId(each._id)}, {"$set": {"name": each.name.strip()}} ) count = count + 1 if count: print "\n Name field updated (Stripped) in following no. of documents: ", count # Update's "status" field from DRAFT to PUBLISHED for all TYPE's node(s) res = node_collection.collection.update( {"_type": {"$in": ["MetaType", "GSystemType", "RelationType", "AttributeType"]}, "status": u"DRAFT"}, {"$set": {"status": u"PUBLISHED"}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'status' field updated for all TYPE's node(s) in following no. of documents: ", res["n"] # Update object_value of GAttribute(s) of "Assignee" AttributeType # Find those whose data-type is not list/Array # Replace those as list of value(s) assignee_at = node_collection.one({"_type": "AttributeType", "name": "Assignee"}) assignee_at = False if assignee_at: res = 0 assignee_cur = triple_collection.find({"_type": "GAttribute", "attribute_type.$id": assignee_at._id}) for each in assignee_cur: # If string ul_sv = [] if type(each.object_value) in [str, unicode]: if "," in each.object_value and "[" in each.object_value and "]" in each.object_value: ul_sv = each.object_value.strip("[]").replace(", ", ",").replace(" ,", ",").split(",") elif "," in each.object_value: ul_sv = each.object_value.replace(", ", ",").replace(" ,", ",").split(",") elif "[" in each.object_value or "]" in each.object_value: ul_sv = each.object_value.strip("[]").split(",") ul_id = [] for u in ul_sv: if not u.isdigit(): user = User.objects.get(username=u) else: user = User.objects.get(id=int(u)) if user: if user.id not in ul_id: ul_id.append(user.id) upres = triple_collection.collection.update( {"_id": each._id}, {"$set": {"object_value": ul_id}}, upsert=False, multi=False ) res += upres["n"] # If list elif type(each.object_value) == list: ul_id = [] for u in each.object_value: if type(u) in [str, unicode] and not u.isdigit(): if u.strip("[]"): user = User.objects.get(username=u) elif type(u) in [str, unicode] and u.isdigit(): if u.strip("[]"): user = User.objects.get(id=int(u)) else: user = User.objects.get(id=int(u)) if user: if user.id not in ul_id: ul_id.append(user.id) upres = triple_collection.collection.update( {"_id": each._id}, {"$set": {"object_value": ul_id}}, upsert=False, multi=False ) res += upres["n"] if res: print "\n Updated following no. of Assignee GAttribute document(s): ", res # Updates already created has_profile_pic grelations' status - Except latest one (PUBLISHED) others' are set to DELETED has_profile_pic = node_collection.one({"_type": "RelationType", "name": u"has_profile_pic"}) op = triple_collection.collection.aggregate( [ {"$match": {"_type": "GRelation", "relation_type.$id": has_profile_pic._id}}, { "$group": { "_id": {"auth_id": "$subject"}, "pp_data": {"$addToSet": {"gr_id": "$_id", "status": "$status"}}, } }, ] ) res = 0 for each in op["result"]: auth_id = each["_id"]["auth_id"] pub_id = None pub_res = 0 del_id = [] del_res = 0 for l in each["pp_data"]: if l["status"] == u"PUBLISHED": pub_id = l["gr_id"] else: del_id.append(l["gr_id"]) if not pub_id: pub_id = each["pp_data"][len(each["pp_data"]) - 1]["gr_id"] pub_res = node_collection.collection.update( {"_id": pub_id}, {"$set": {"status": u"PUBLISHED"}}, upsert=False, multi=False ) pub_res = pub_res["n"] del_id.pop() del_res = node_collection.collection.update( {"_id": {"$in": del_id}}, {"$set": {"status": u"DELETED"}}, upsert=False, multi=True ) if pub_res or del_res["n"]: res += 1 if res: print "\n Updated following no. of has_profile_pic GRelation document(s): ", res # Updates the value of object_cardinality to 100. So that teaches will behave as 1:M (one-to-many) relation. teaches = node_collection.one({"_type": "RelationType", "name": "teaches"}) res = node_collection.collection.update( {"_id": teaches._id, "object_cardinality": {"$ne": 100}}, {"$set": {"object_cardinality": 100}}, upsert=False, multi=False, ) if res["updatedExisting"]: print "\n 'teaches' RelationType updated with object_cardinality: 100. Changed document: ", res["n"] else: print "\n 'teaches' RelationType: no need to update." # Replacing object_type of "has_course" relationship from "NUSSD Course" to "Announced Course" ann_course = node_collection.one({"_type": "GSystemType", "name": "Announced Course"}) if ann_course: res = node_collection.collection.update( {"_type": "RelationType", "name": "has_course"}, {"$set": {"object_type": [ann_course._id]}}, upsert=False, multi=False, ) if res["updatedExisting"]: # and res['nModified']: print "\n Replaced object_type of 'has_course' relationship from 'NUSSD Course' to 'Announced Course'." # Adds "relation_set" field (with default value as []) to all documents belonging to GSystems. res = node_collection.collection.update( { "_type": { "$nin": [ "MetaType", "GSystemType", "RelationType", "AttributeType", "GRelation", "GAttribute", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "relation_set": {"$exists": False}, }, {"$set": {"relation_set": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'relation_set' field added to following no. of documents: ", res["n"] # Adds "attribute_set" field (with default value as []) to all documents belonging to GSystems. res = node_collection.collection.update( { "_type": { "$nin": [ "MetaType", "GSystemType", "RelationType", "AttributeType", "GRelation", "GAttribute", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "attribute_set": {"$exists": False}, }, {"$set": {"attribute_set": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'attribute_set' field added to following no. of documents: ", res["n"] # Adds "license" field (with default value as "") to all documents belonging to GSystems. res = node_collection.collection.update( { "_type": { "$nin": [ "MetaType", "GSystemType", "RelationType", "AttributeType", "GRelation", "GAttribute", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "license": {"$exists": False}, }, {"$set": {"license": None}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'license' field added to following no. of documents: ", res["n"] # Adding "Agency_type" field adding to group documents with default values res = node_collection.collection.update( {"_type": {"$in": ["Group"]}, "agency_type": {"$exists": False}}, {"$set": {"agency_type": "Project"}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'agency_type' field added to 'Group' documents totalling to : ", res["n"] # Adding "Agency_type" field adding to author documents with default values res = node_collection.collection.update( {"_type": {"$in": ["Author"]}, "agency_type": {"$exists": False}}, {"$set": {"agency_type": "Others"}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'agency_type' field added to 'Author' documents totalling to : ", res["n"] # Modify language field with unicode value if any document has language with dict datatype res = node_collection.collection.update({"language": {}}, {"$set": {"language": u""}}, upsert=False, multi=True) # Removing existing "cr_or_xcr" field with no default value res = node_collection.collection.update( {"_type": {"$in": ["Group"]}, "cr_or_xcr": {"$exists": True}}, {"$unset": {"cr_or_xcr": False}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Already existing 'cr_or_xcr' field removed from documents totalling to : ", res["n"] # Adding "curricular" field with no default value res = node_collection.collection.update( {"_type": {"$in": ["Group"]}, "curricular": {"$exists": False}}, {"$set": {"curricular": False}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'curricular' field added to all Group documents totalling to : ", res["n"] # Removing existing "partners" field with no default value res = node_collection.collection.update( {"_type": {"$in": ["Group"]}, "partners": {"$exists": True}}, {"$unset": {"partners": False}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n Already existing 'partners' field removed from documents totalling to : ", res["n"] # # Adding "partner" field with no default value # res = node_collection.collection.update({'_type': {'$in': ['Group']}, 'partner': {'$exists': False}}, # {'$set': {'partner': False }}, # upsert=False, multi=True # ) # if res['updatedExisting']: # and res['nModified']: # print "\n 'partner' field added to all Group documents totalling to : ", res['n'] # Adding "preferred_languages" field with no default value res = node_collection.collection.update( {"_type": {"$in": ["Author"]}, "preferred_languages": {"$exists": False}}, {"$set": {"preferred_languages": {}}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'preferred_languages' field added to all author documents totalling to : ", res["n"] # Adding "rating" field with no default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "rating": {"$exists": False}, }, {"$set": {"rating": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'rating' field added to following no. of documents: ", res["n"] # Adds 'subject_scope', 'attribute_type_scope', 'object_value_scope' field (with default value as "") to all documents which belongs to GAttribute res = node_collection.collection.update( {"_type": {"$in": ["Group", "Author"]}, "group_admin": {"$exists": False}}, {"$set": {"group_admin": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'group_admin' field added to following no. of documents: ", res["n"] # Adds 'subject_scope', 'attribute_type_scope', 'object_value_scope' field (with default value as "") to all documents which belongs to GAttribute res = triple_collection.collection.update( { "_type": "GAttribute", "subject_scope": {"$exists": False}, "attribute_type_scope": {"$exists": False}, "object_value_scope": {"$exists": False}, }, {"$set": {"subject_scope": "", "attribute_type_scope": "", "object_value_scope": ""}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'subject_scope', 'attribute_type_scope', 'object_value_scope' fields added to following no. of documents: ", res[ "n" ] # Adds 'subject_scope', 'relation_type_scope', 'right_subject_scope' field (with default value as "") to all documents which belongs to GRelation res = triple_collection.collection.update( { "_type": "GRelation", "subject_scope": {"$exists": False}, "relation_type_scope": {"$exists": False}, "right_subject_scope": {"$exists": False}, }, {"$set": {"subject_scope": "", "relation_type_scope": "", "right_subject_scope": ""}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n 'subject_scope', 'relation_type_scope', 'right_subject_scope' fields added to following no. of documents: ", res[ "n" ] # Adds "annotations" field (with default value as []) to all documents belonging to GSystems res = node_collection.collection.update( { "_type": { "$nin": [ "MetaType", "GSystemType", "RelationType", "AttributeType", "GRelation", "GAttribute", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "annotations": {"$exists": False}, }, {"$set": {"annotations": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n annotations field added to following no. of documents: ", res["n"] # Adds "group_set" field (with default value as []) to all documents except those which belongs to either GAttribute or GRelation res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "group_set": {"$exists": False}, }, {"$set": {"group_set": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n group_set field added to following no. of documents: ", res["n"] # Adds "property_order" field (with default value as []) to all documents except those which belongs to either GAttribute or GRelation res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "property_order": {"$exists": False}, }, {"$set": {"property_order": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n property_order field added to following no. of documents: ", res["n"] # Adding "modified_by" field with None as it's default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "modified_by": {"$exists": False}, }, {"$set": {"modified_by": None}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n modified_by field added to following no. of documents: ", res["n"] # Adding "complex_data_type" field with empty list as it's default value res = node_collection.collection.update( {"_type": "AttributeType", "complex_data_type": {"$exists": False}}, {"$set": {"complex_data_type": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n complex_data_type field added to following no. of documents: ", res["n"] # Adding "post_node" field with empty list as it's default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "post_node": {"$exists": False}, }, {"$set": {"post_node": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n post_node field added to following no. of documents: ", res["n"] # Adding "collection_set" field with empty list as it's default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "collection_set": {"$exists": False}, }, {"$set": {"collection_set": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n collection_set field added to following no. of documents: ", res["n"] # Adding "location" field with no default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "location": {"$exists": False}, }, {"$set": {"location": []}}, upsert=False, multi=True, ) if res["updatedExisting"]: # and res['nModified']: print "\n location field added to following no. of documents: ", res["n"], "\n" # Adding "language" field with no default value res = node_collection.collection.update( { "_type": { "$nin": ["GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder"] }, "language": {"$exists": False}, }, {"$set": {"language": unicode("")}}, upsert=False, multi=True, ) # Adding "access_policy" field # For Group documents, access_policy value is set depending upon their # group_type values, i.e. either PRIVATE/PUBLIC whichever is there node_collection.collection.update( {"_type": "Group", "group_type": "PRIVATE"}, {"$set": {"access_policy": u"PRIVATE"}}, upsert=False, multi=True, ) node_collection.collection.update( {"_type": "Group", "group_type": "PUBLIC"}, {"$set": {"access_policy": u"PUBLIC"}}, upsert=False, multi=True ) # For Non-Group documents which doesn't consits of access_policy field, add it with PUBLIC as it's default value node_collection.collection.update( { "_type": { "$nin": [ "Group", "GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "access_policy": {"$exists": False}, }, {"$set": {"access_policy": u"PUBLIC"}}, upsert=False, multi=True, ) node_collection.collection.update( { "_type": { "$nin": [ "Group", "GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "access_policy": {"$in": [None, "PUBLIC"]}, }, {"$set": {"access_policy": u"PUBLIC"}}, upsert=False, multi=True, ) node_collection.collection.update( { "_type": { "$nin": [ "Group", "GAttribute", "GRelation", "ReducedDocs", "ToReduceDocs", "IndexedWordList", "node_holder", ] }, "access_policy": "PRIVATE", }, {"$set": {"access_policy": u"PRIVATE"}}, upsert=False, multi=True, ) gstpage_node = node_collection.find_one({"name": "Page"}) gstwiki = node_collection.find_one({"name": "Wiki page"}) # page_nodes = node_collection.find({"member_of":gstpage_node._id}) # for i in page_nodes: # if gstwiki._id not in i.type_of: # i.type_of.append(gstwiki._id) # i.save() # else: # print i.name,"Page already Updated" nodes = node_collection.find( { "_type": "Author", "$or": [{"language_proficiency": {"$exists": False}}, {"subject_proficiency": {"$exists": False}}], } ) for i in nodes: node_collection.collection.update( {"_id": ObjectId(i._id)}, {"$set": {"language_proficiency": "", "subject_proficiency": ""}}, upsert=False, multi=False, ) print i.name, "Updated !!" # Add attributes to discussion thread for every page node. # If thread does not exist, create it. # pages_files_not_updated = [] """ Commented on Dec 5 2015 [email protected] to avoid unnecessary processing. This is a one-time script pages_files_not_updated = {} page_gst = node_collection.one( { '_type': "GSystemType", 'name': "Page" }) file_gst = node_collection.one( { '_type': "GSystemType", 'name': "File" }) page_file_cur = node_collection.find( { 'member_of': {'$in':[page_gst._id, file_gst._id]} , 'status': { '$in': [u'DRAFT', u'PUBLISHED']}} ).sort('last_update', -1) has_thread_rt = node_collection.one({"_type": "RelationType", "name": u"has_thread"}) twist_gst = node_collection.one({'_type': 'GSystemType', 'name': 'Twist'}) reply_gst = node_collection.one({'_type': 'GSystemType', 'name': 'Reply'}) rel_resp_at = node_collection.one({'_type': 'AttributeType', 'name': 'release_response'}) thr_inter_type_at = node_collection.one({'_type': 'AttributeType', 'name': 'thread_interaction_type'}) discussion_enable_at = node_collection.one({"_type": "AttributeType", "name": "discussion_enable"}) all_count = page_file_cur.count() print "\n Total pages and files found : ", all_count print "\n Processing " + str(all_count) + " will take time. Plase hold on ...\n" for idx, each_node in enumerate(page_file_cur): try: # print "Processing #",idx, " of ",all_count print ".", # print "\nPage# ",idx, "\t - ", each_node._id, '\t - ' , each_node.name, each_node.attribute_set release_response_val = True interaction_type_val = unicode('Comment') userid = each_node.created_by thread_obj = node_collection.one({"_type": "GSystem", "member_of": ObjectId(twist_gst._id), "prior_node": ObjectId(each_node._id)}) release_response_status = False thread_interaction_type_status = False discussion_enable_status = False has_thread_status = False # if get_attribute_value(each_node._id,"discussion_enable") != "": # discussion_enable_status = True if get_relation_value(each_node._id,"has_thread") != ("",""): has_thread_status = True if thread_obj: reply_cur = node_collection.find({'prior_node': each_node._id, 'member_of': reply_gst._id}) if reply_cur: for each_rep in reply_cur: node_collection.collection.update({'_id': each_rep._id},{'$set':{'prior_node':[thread_obj._id]}}, upsert = False, multi = False) each_rep.reload() # creating GRelation if not has_thread_status: gr = create_grelation(each_node._id, has_thread_rt, thread_obj._id) each_node.reload() if get_attribute_value(thread_obj._id,"release_response") != "": release_response_status = True if get_attribute_value(thread_obj._id,"thread_interaction_type") != "": thread_interaction_type_status = True if not release_response_status: if release_response_val: create_gattribute(thread_obj._id, rel_resp_at, release_response_val) thread_obj.reload() if not thread_interaction_type_status: if interaction_type_val: create_gattribute(thread_obj._id, thr_inter_type_at, interaction_type_val) thread_obj.reload() # print "\nThread_obj updated with new attr", thread_obj.attribute_set, '\n\n' else: thread_obj = node_collection.one({"_type": "GSystem", "member_of": ObjectId(twist_gst._id),"relation_set.thread_of": ObjectId(each_node._id)}) if thread_obj: if get_attribute_value(each_node._id,"discussion_enable") != True: create_gattribute(each_node._id, discussion_enable_at, True) else: if get_attribute_value(each_node._id,"discussion_enable") != False: create_gattribute(each_node._id, discussion_enable_at, False) # print "\n\n discussion_enable False" except Exception as e: pages_files_not_updated[str(each_node._id)] = str(e) print "\n\nError occurred for page ", each_node._id, "--", each_node.name,"--",e # print e, each_node._id pass print "\n------- Discussion thread for Page and File GST successfully completed-------\n" print "\n\n Pages/Files that were not able to updated\t", pages_files_not_updated """ # Correct Eventype and CollegeEventtype Node by setting their modified by field glist = node_collection.one({"_type": "GSystemType", "name": "GList"}) node = node_collection.find({"member_of": ObjectId(glist._id), "name": {"$in": ["Eventtype", "CollegeEvents"]}}) for i in node: i.modified_by = 1 i.save() print "Updated", i.name, "'s modified by feild from null to 1"
def uDashboard(request, group_id): try: usrid = int(group_id) auth = node_collection.one({'_type': "Author", 'created_by': usrid}) except: auth = get_group_name_id(group_id, get_obj=True) usrid = auth.created_by group_id = auth._id # Fetching user group of current user & then reassigning group_id with it's corresponding ObjectId value group_name = auth.name usrname = auth.name date_of_join = auth['created_at'] # current_user = request.user.pk current_user = usrid has_profile_pic = None profile_pic_image = None current_user_obj = None usr_fname = None usr_lname = None success_state = True old_profile_pics = [] is_already_selected = None task_gst = node_collection.one({'_type': "GSystemType", 'name': "Task"}) if current_user: exclued_from_public = "" if int(current_user) == int(usrid): Access_policy = ["PUBLIC", "PRIVATE"] if int(current_user) != int(usrid): Access_policy = ["PUBLIC"] else: Access_policy = ["PUBLIC"] exclued_from_public = ObjectId(task_gst._id) dashboard_count = {} group_list = [] user_activity = [] page_gst = node_collection.one({'_type': "GSystemType", 'name': 'Page'}) page_cur = node_collection.find({ 'member_of': { '$all': [page_gst._id] }, 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) file_cur = node_collection.find({ '_type': u"File", 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) forum_gst = node_collection.one({"_type": "GSystemType", "name": "Forum"}) forum_count = node_collection.find({ "_type": "GSystem", "member_of": forum_gst._id, 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) quiz_gst = node_collection.one({"_type": "GSystemType", "name": "Quiz"}) quiz_count = node_collection.find({ "_type": "GSystem", "member_of": quiz_gst._id, 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) thread_gst = node_collection.one({"_type": "GSystemType", "name": "Twist"}) thread_count = node_collection.find({ "_type": "GSystem", "member_of": thread_gst._id, 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) reply_gst = node_collection.one({"_type": "GSystemType", "name": "Reply"}) reply_count = node_collection.find({ "_type": "GSystem", "member_of": reply_gst._id, 'created_by': int(usrid), "status": { "$nin": ["HIDDEN"] } }) task_cur = "" if current_user: if int(current_user) == int(usrid): task_cur = node_collection.find({ 'member_of': task_gst._id, 'attribute_set.Status': { '$in': ["New", "In Progress"] }, 'attribute_set.Assignee': usrid }).sort('last_update', -1).limit(10) dashboard_count.update({'Task': task_cur.count()}) current_user_obj = User.objects.get(id=current_user) usr_fname = current_user_obj.first_name usr_lname = current_user_obj.last_name group_cur = node_collection.find({ '_type': "Group", 'name': { '$nin': ["home", auth.name] }, "access_policy": { "$in": Access_policy }, '$or': [{ 'group_admin': int(usrid) }, { 'author_set': int(usrid) }] }).sort('last_update', -1).limit(10) dashboard_count.update({'group': group_cur.count()}) # user activity gives all the activities of the users activity = "" activity_user = node_collection.find({ '$and': [{ '$or': [{ '_type': 'GSystem' }, { '_type': 'group' }, { '_type': 'File' }] }, { "access_policy": { "$in": Access_policy } }, { 'status': { '$in': [u"DRAFT", u"PUBLISHED"] } }, { 'member_of': { '$nin': [exclued_from_public] } }, { '$or': [{ 'created_by': int(usrid) }, { 'modified_by': int(usrid) }] }] }).sort('last_update', -1).limit(10) a_user = [] dashboard_count.update({'activity': activity_user.count()}) #for i in activity_user: # if i._type != 'Batch' or i._type != 'Course' or i._type != 'Module': # a_user.append(i) #loop replaced by a list comprehension a_user = [ i for i in activity_user if (i._type != 'Batch' or i._type != 'Course' or i._type != 'Module') ] #a temp. variable which stores the lookup for append method user_activity_append_temp = user_activity.append for each in a_user: if each.created_by == each.modified_by: if each.last_update == each.created_at: activity = 'created' else: activity = 'modified' else: activity = 'created' if each._type == 'Group': user_activity_append_temp(each) else: member_of = node_collection.find_one({"_id": each.member_of[0]}) user_activity_append_temp(each) ''' notification_list=[] notification_object = notification.NoticeSetting.objects.filter(user_id=int(ID)) for each in notification_object: ntid = each.notice_type_id ntype = notification.NoticeType.objects.get(id=ntid) label = ntype.label.split("-")[0] notification_list.append(label) Retrieving Tasks Assigned for User (Only "New" and "In Progress") user_assigned = [] attributetype_assignee = node_collection.find_one({"_type":'AttributeType', 'name':'Assignee'}) attr_assignee = triple_collection.find( {"_type": "GAttribute", "attribute_type.$id": attributetype_assignee._id, "object_value": request.user.id} ).sort('last_update', -1).limit(10) dashboard_count.update({'Task':attr_assignee.count()}) for attr in attr_assignee : task_node = node_collection.one({'_id':attr.subject}) if task_node: user_assigned.append(task_node) task_cur gives the task asigned to users ''' obj = node_collection.find({ '_type': { '$in': [u"GSystem", u"File"] }, 'contributors': int(usrid), 'group_set': { '$all': [ObjectId(group_id)] } }) collab_drawer = [] #a temp. variable which stores the lookup for append method collab_drawer_append_temp = collab_drawer.append """ To populate collaborators according to their latest modification of particular resource: """ for each in obj.sort('last_update', -1): for val in each.contributors: name = User.objects.get(pk=val).username collab_drawer_append_temp({ 'usrname': name, 'Id': val, 'resource': each.name }) shelves = [] datavisual = [] shelf_list = {} show_only_pie = True if not profile_pic_image: if auth: for each in auth.relation_set: if "has_profile_pic" in each: profile_pic_image = node_collection.one({ '_type': "GSystem", '_id': each["has_profile_pic"][0] }) break has_profile_pic_rt = node_collection.one({ '_type': 'RelationType', 'name': unicode('has_profile_pic') }) all_old_prof_pics = triple_collection.find({ '_type': "GRelation", "subject": auth._id, 'relation_type.$id': has_profile_pic_rt._id, 'status': u"DELETED" }) if all_old_prof_pics: for each_grel in all_old_prof_pics: n = node_collection.one({'_id': ObjectId(each_grel.right_subject)}) if n not in old_profile_pics: old_profile_pics.append(n) forum_create_rate = forum_count.count() * GSTUDIO_RESOURCES_CREATION_RATING file_create_rate = file_cur.count() * GSTUDIO_RESOURCES_CREATION_RATING page_create_rate = page_cur.count() * GSTUDIO_RESOURCES_CREATION_RATING quiz_create_rate = quiz_count.count() * GSTUDIO_RESOURCES_CREATION_RATING reply_create_rate = reply_count.count() * GSTUDIO_RESOURCES_REPLY_RATING thread_create_rate = thread_count.count( ) * GSTUDIO_RESOURCES_CREATION_RATING datavisual.append({"name": "Forum", "count": forum_create_rate}) datavisual.append({"name": "File", "count": file_create_rate}) datavisual.append({"name": "Page", "count": page_create_rate}) datavisual.append({"name": "Quiz", "count": quiz_create_rate}) datavisual.append({"name": "Reply", "count": reply_create_rate}) datavisual.append({"name": "Thread", "count": thread_create_rate}) datavisual.append({ "name": "Registration", "count": GSTUDIO_RESOURCES_REGISTRATION_RATING }) total_activity_rating = GSTUDIO_RESOURCES_REGISTRATION_RATING + ( page_cur.count() + file_cur.count() + forum_count.count() + quiz_count.count()) * GSTUDIO_RESOURCES_CREATION_RATING + ( thread_count.count() + reply_count.count()) * GSTUDIO_RESOURCES_REPLY_RATING return render_to_response("ndf/uDashboard.html", { 'usr': current_user, 'username': usrname, 'user_id': usrid, 'success': success_state, 'usr_fname': usr_fname, 'usr_lname': usr_lname, 'DOJ': date_of_join, 'author': auth, 'group_id': group_id, 'groupid': group_id, 'group_name': group_name, 'current_user_obj': current_user_obj, 'already_set': is_already_selected, 'user_groups': group_cur, 'prof_pic_obj': profile_pic_image, 'user_task': task_cur, 'group_count': group_cur.count(), 'page_count': page_cur.count(), 'file_count': file_cur.count(), 'user_activity': user_activity, 'dashboard_count': dashboard_count, 'show_only_pie': show_only_pie, 'datavisual': json.dumps(datavisual), 'total_activity_rating': total_activity_rating, 'old_profile_pics': old_profile_pics, 'site_name': GSTUDIO_SITE_NAME, }, context_instance=RequestContext(request))
def custom_app_view(request, group_id, app_name, app_id=None, app_set_id=None, app_set_instance_id=None): """ custom view for custom GAPPS """ #ins_objectid = ObjectId() #if ins_objectid.is_valid(group_id) is False : #group_ins = node_collection.find_one({'_type': "Group","name": group_id}) #auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) #if group_ins: #group_id = str(group_ins._id) #else : #auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) #if auth : #group_id = str(auth._id) #else : #pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) if app_id is None: if app_name == "partners": app_name = "Partners" app_ins = node_collection.find_one({'_type':"GSystemType", "name":app_name}) if app_ins: app_id = str(app_ins._id) app_collection_set = [] nodes_dict = [] atlist = [] rtlist = [] app = node_collection.find_one({"_id":ObjectId(app_id)}) app_set = "" nodes = "" nodes_dict = "" app_menu = "" app_set_template = "" app_set_instance_template = "" app_set_instance_name = "" app_set_name = "" title = "" tags = "" content = "" location = "" system = None system_id = "" system_type = "" system_mime_type = "" property_display_order = [] for eachset in app.collection_set: app_set = node_collection.find_one({"_id":eachset}) app_collection_set.append({"id": str(app_set._id), "name": app_set.name}) if app_set_id: classtype = "" app_set_template = "yes" systemtype = node_collection.find_one({"_id":ObjectId(app_set_id)}) systemtype_name = systemtype.name title = systemtype_name if request.method=="POST": search = request.POST.get("search","") classtype = request.POST.get("class","") nodes = list(node_collection.find({'name':{'$regex':search, '$options': 'i'},'member_of': {'$all': [systemtype._id]}})) else : nodes = list(node_collection.find({'member_of': {'$all': [systemtype._id]},'group_set':{'$all': [ObjectId(group_id)]}})) nodes_dict = [] for each in nodes: nodes_dict.append({"id":str(each._id), "name":each.name, "created_by":User.objects.get(id=each.created_by).username, "created_at":each.created_at}) else : app_menu = "yes" title = app_name if app_set_instance_id : app_set_instance_template = "yes" app_set_template = "" systemtype_attributetype_set = [] systemtype_relationtype_set = [] system = node_collection.find_one({"_id":ObjectId(app_set_instance_id)}) systemtype = node_collection.find_one({"_id":ObjectId(app_set_id)}) for each in systemtype.attribute_type_set: systemtype_attributetype_set.append({"type":each.name,"type_id":str(each._id),"value":each.data_type}) for each in systemtype.relation_type_set: systemtype_relationtype_set.append({"rt_name":each.name,"type_id":str(each._id)}) for eachatset in systemtype_attributetype_set : for eachattribute in triple_collection.find({"_type":"GAttribute", "subject":system._id, "attribute_type.$id":ObjectId(eachatset["type_id"])}): atlist.append({"type":eachatset["type"],"type_id":eachatset["type_id"],"value":eachattribute.object_value}) for eachrtset in systemtype_relationtype_set : for eachrelation in triple_collection.find({"_type":"GRelation", "subject":system._id, "relation_type.$id":ObjectId(eachrtset["type_id"])}): right_subject = node_collection.find_one({"_id":ObjectId(eachrelation.right_subject)}) rtlist.append({"type":eachrtset["rt_name"],"type_id":eachrtset["type_id"],"value_name": right_subject.name,"value_id":str(right_subject._id)}) # To support consistent view property_order = system.property_order system.get_neighbourhood(systemtype._id) for tab_name, fields_order in property_order: display_fields = [] for field, altname in fields_order: if system.structure[field] == bool: display_fields.append((altname, ("Yes" if system[field] else "No"))) elif not system[field]: display_fields.append((altname, system[field])) continue elif system.structure[field] == datetime.datetime: display_fields.append((altname, system[field].date())) elif type(system.structure[field]) == list: if system[field]: if type(system.structure[field][0]) == ObjectId: name_list = [] for right_sub_dict in system[field]: name_list.append(right_sub_dict.name) display_fields.append((altname, ", ".join(name_list))) else: display_fields.append((altname, ", ".join(system[field]))) else: display_fields.append((altname, system[field])) property_display_order.append((tab_name, display_fields)) # End of code tags = ",".join(system.tags) content = system.content location = system.location app_set_name = systemtype.name system_id = system._id system_type = system._type if system_type == 'File': system_mime_type = system.mime_type app_set_instance_name = system.name title = systemtype.name +"-" +system.name template = "ndf/custom_template_for_app.html" variable = RequestContext(request, {'groupid':group_id, 'app_name':app_name, 'app_id':app_id, "app_collection_set":app_collection_set,"app_set_id":app_set_id,"nodes":nodes_dict, "app_menu":app_menu, "app_set_template":app_set_template, "app_set_instance_template":app_set_instance_template, "app_set_name":app_set_name, "app_set_instance_name":app_set_instance_name, "title":title, "app_set_instance_atlist":atlist, "app_set_instance_rtlist":rtlist, 'tags':tags, 'location':location, "content":content, "system_id":system_id,"system_type":system_type,"mime_type":system_mime_type, "app_set_instance_id":app_set_instance_id , "node":system, 'group_id':group_id, "property_display_order": property_display_order}) return render_to_response(template, variable)
Hence this may not work for every server ''' import re from gnowsys_ndf.ndf.models import node_collection,triple_collection,Node from bs4 import BeautifulSoup from bson import ObjectId '''To identify the href without "/"''' regx1 = '^/sugar/activities/Paint.activity/' trnslnof_gst_id = Node.get_name_id_from_type('translation_of','RelationType')[1] activities_list = list(map(ObjectId,['59425d1c4975ac013cccbba3','59425e4d4975ac013cccbcb4'])) trnsnds_list = [] for each in activities_list: trnsnds_list.append(triple_collection.find({'_type':'GRelation','relation_type':trnslnof_gst_id,'subject':each}).distinct('right_subject')) activities_list.extend(eachid for eachlst in trnsnds_list for eachid in eachlst) grgsystmnds = node_collection.find({'_type':'GSystem', '_id':{'$in':activities_list}}) #for each in grgsystemnds: # each.get_relation_right_subject_nodes('translation_of') '''To fetch the faulty hrefs and update them accordingly.''' for index,each_nd in enumerate(grgsystmnds,start =1): #print index,each_nd._id,str(each_nd.content) soup = BeautifulSoup(each_nd.content) findflg = soup.find_all('a')
def parse_data_create_gsystem(json_file_path): json_file_content = "" try: with open(json_file_path) as json_file: json_file_content = json_file.read() json_documents_list = json.loads(json_file_content) # Process data in proper format node = node_collection.collection.File() node_keys = node.keys() node_structure = node.structure json_documents_list_spaces = json_documents_list json_documents_list = [] # Removes leading and trailing spaces from keys as well as values for json_document_spaces in json_documents_list_spaces: json_document = {} for key_spaces, value_spaces in json_document_spaces.iteritems(): json_document[key_spaces.strip().lower()] = value_spaces.strip() json_documents_list.append(json_document) except Exception as e: error_message = "\n!! While parsing the file ("+json_file_path+") got following error...\n " + str(e) log_list.append(str(error_message)) raise error_message for i, json_document in enumerate(json_documents_list): info_message = "\n\n\n******************** Processing row number : ["+ str(i)+ "] ********************" print info_message log_list.append(str(info_message)) try: # print "base_folder : ", json_document["base_folder"] # print "file_name : ", json_document["file_name"] is_base_folder = check_folder_exists(json_document["file_name"], json_document["base_folder"]) # print "is_base_folder : ", is_base_folder if is_base_folder: info_message = "- File gsystem holding collection is created. Having name : '" + str(json_document["base_folder"]) + "' and ObjectId : '" + str(is_base_folder) + "'" print info_message log_list.append(info_message) parsed_json_document = {} attribute_relation_list = [] for key in json_document.iterkeys(): parsed_key = key.lower() if parsed_key in node_keys: # print parsed_key # adding the default field values like: created_by, member_of, ... # created_by: if parsed_key == "created_by": if json_document[key]: temp_user_id = get_user_id(json_document[key].strip()) if temp_user_id: parsed_json_document[parsed_key] = temp_user_id else: parsed_json_document[parsed_key] = nroer_team_id else: # parsed_json_document[parsed_key] = get_user_id("nroer_team") parsed_json_document[parsed_key] = nroer_team_id # print "---", parsed_json_document[parsed_key] # contributors: elif parsed_key == "contributors": if json_document[key]: contrib_list = json_document[key].split(",") temp_contributors = [] for each_user in contrib_list: user_id = get_user_id(each_user.strip()) if user_id: temp_contributors.append(user_id) parsed_json_document[parsed_key] = temp_contributors else: parsed_json_document[parsed_key] = [nroer_team_id] # print "===", parsed_json_document[parsed_key] # tags: elif (parsed_key == "tags") and json_document[key]: parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # tag_list = json_document[key].replace("\n", "").split(",") # temp_tag_list = [] # for each_tag in tag_list: # if each_tag: # temp_tag_list.append(each_tag.strip()) # parsed_json_document[parsed_key] = temp_tag_list # print parsed_json_document[parsed_key] # member_of: elif parsed_key == "member_of": parsed_json_document[parsed_key] = [file_gst._id] # print parsed_json_document[parsed_key] else: # parsed_json_document[parsed_key] = json_document[key] parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # --- END of processing for remaining fields else: parsed_json_document[key] = json_document[key] attribute_relation_list.append(key) # calling method to create File GSystems nodeid = create_resource_gsystem(parsed_json_document) # print type(nodeid), "nodeid ------- : ", nodeid, "\n" # starting processing for the attributes and relations saving if isinstance(nodeid, ObjectId) and attribute_relation_list: node = node_collection.one({ "_id": ObjectId(nodeid) }) gst_possible_attributes_dict = node.get_possible_attributes(file_gst._id) # print gst_possible_attributes_dict relation_list = [] json_document['name'] = node.name # Write code for setting atrributes for key in attribute_relation_list: is_relation = True # print "\n", key, "----------\n" for attr_key, attr_value in gst_possible_attributes_dict.iteritems(): # print "\n", attr_key,"======", attr_value if key == attr_key: # print key is_relation = False # setting value to "0" for int, float, long (to avoid casting error) # if (attr_value['data_type'] in [int, float, long]) and (not json_document[key]): # json_document[key] = 0 if json_document[key]: info_message = "\n- For GAttribute parsing content | key: '" + attr_key + "' having value: '" + json_document[key] + "'" print info_message log_list.append(str(info_message)) cast_to_data_type(json_document[key], attr_value['data_type']) if attr_value['data_type'] == "curricular": # setting int values for CR/XCR if json_document[key] == "CR": json_document[key] = 1 elif json_document[key] == "XCR": json_document[key] = 0 else: # needs to be confirm json_document[key] = 0 json_document[key] = cast_to_data_type(json_document[key], attr_value['data_type']) # if attr_value['data_type'] == basestring: # info_message = "\n- For GAttribute parsing content | key: " + attr_key + " -- value: " + json_document[key] # print info_message # log_list.append(str(info_message)) # elif attr_value['data_type'] == unicode: # json_document[key] = unicode(json_document[key]) # elif attr_value['data_type'] == bool: # # setting int values for CR/XCR # if json_document[key] == "CR": # json_document[key] = 1 # elif json_document[key] == "XCR": # json_document[key] = 0 # json_document[key] = bool(int(json_document[key])) # elif attr_value['data_type'] == datetime.datetime: # json_document[key] = datetime.datetime.strptime(json_document[key], "%d/%m/%Y") # elif attr_value['data_type'] == int: # json_document[key] = int(json_document[key]) # elif attr_value['data_type'] == float: # json_document[key] = float(json_document[key]) # elif attr_value['data_type'] == long: # json_document[key] = long(json_document[key]) # elif type(attr_value['data_type']) == IS: # for op in attr_value['data_type']._operands: # if op.lower() == json_document[key].lower(): # json_document[key] = op # elif (attr_value['data_type'] in [list, dict]) or (type(attr_value['data_type']) in [list, dict]): # if "," not in json_document[key]: # # Necessary to inform perform_eval_type() that handle this value as list # json_document[key] = "\"" + json_document[key] + "\", " # else: # formatted_value = "" # for v in json_document[key].split(","): # formatted_value += "\""+v.strip(" ")+"\", " # json_document[key] = formatted_value # perform_eval_type(key, json_document, "GSystem") subject_id = node._id # print "\n-----\nsubject_id: ", subject_id attribute_type_node = node_collection.one({'_type': "AttributeType", '$or': [{'name': {'$regex': "^"+attr_key+"$", '$options': 'i'}}, {'altnames': {'$regex': "^"+attr_key+"$", '$options': 'i'}}] }) # print "\nattribute_type_node: ", attribute_type_node.name object_value = json_document[key] # print "\nobject_value: ", object_value ga_node = None info_message = "\n- Creating GAttribute ("+node.name+" -- "+attribute_type_node.name+" -- "+str(json_document[key])+") ...\n" print info_message log_list.append(str(info_message)) ga_node = create_gattribute(subject_id, attribute_type_node, object_value) info_message = "- Created ga_node : "+ str(ga_node.name) + "\n" print info_message log_list.append(str(info_message)) # To break outer for loop as key found break else: error_message = "\n!! DataNotFound: No data found for field ("+str(attr_key)+") while creating GSystem ( -- "+str(node.name)+")\n" print error_message log_list.append(str(error_message)) # ---END of if (key == attr_key) if is_relation: relation_list.append(key) # print "relation_list : ", relation_list if not relation_list: # No possible relations defined for this node info_message = "\n!! ("+str(node.name)+"): No possible relations defined for this node.\n" print info_message log_list.append(str(info_message)) return gst_possible_relations_dict = node.get_possible_relations(file_gst._id) # processing each entry in relation_list for key in relation_list: is_relation = True for rel_key, rel_value in gst_possible_relations_dict.iteritems(): if key == rel_key: # commented because teaches is only relation being used for time being # if key == "teaches": is_relation = False if json_document[key]: # ----------------------------- hierarchy_output = None def _get_id_from_hierarchy(hier_list, oid=None): ''' Returns the last hierarchical element's ObjectId. Arguments to be passes is list of unicode names. e.g. hier_list = [u'NCF', u'Science', u'Physical world', u'Materials', u'States of matter', u'Liquids'] ''' if len(hier_list) >= 2: # print hier_list, "len(hier_list) : ", len(hier_list) try: if oid: curr_oid = node_collection.one({ "_id": oid }) # print "curr_oid._id", curr_oid._id else: curr_oid = node_collection.one({ "name": hier_list[0], 'group_set': {'$all': [ObjectId(home_group._id)]}, 'member_of': {'$in': [ObjectId(theme_gst._id), ObjectId(theme_item_gst._id), ObjectId(topic_gst._id)]} }) if curr_oid: next_oid = node_collection.one({ "name": hier_list[1], 'group_set': {'$all': [ObjectId(home_group._id)]}, 'member_of': {'$in': [ObjectId(theme_item_gst._id), ObjectId(topic_gst._id)]}, '_id': {'$in': curr_oid.collection_set } }) # print "||||||", next_oid.name hier_list.remove(hier_list[0]) # print "calling _get_id_from_hierarchy(", hier_list,", ", next_oid._id,")" _get_id_from_hierarchy(hier_list, next_oid._id) else: error_message = "!! ObjectId of curr_oid does not found." print error_message log_list.append(error_message) except Exception as e: error_message = "\n!! Error in getting _id from teaches hierarchy. " + str(e) print error_message log_list.append(error_message) else: global hierarchy_output hierarchy_output = oid if oid else None return hierarchy_output # ----------------------------- # most often the data is hierarchy sep by ":" if ":" in json_document[key]: formatted_list = [] temp_teaches_list = json_document[key].replace("\n", "").split(":") # print "\n temp_teaches", temp_teaches for v in temp_teaches_list: formatted_list.append(v.strip()) right_subject_id = [] rsub_id = _get_id_from_hierarchy(formatted_list) if formatted_list else None # print hierarchy_output," |||||||||||||||||||", rsub_id # checking every item in hierarchy exist and leaf node's _id found if rsub_id: right_subject_id.append(rsub_id) json_document[key] = right_subject_id # print json_document[key] else: error_message = "\n!! While creating teaches rel: Any one of the item in hierarchy"+ str(json_document[key]) +"does not exist in Db. \n!! So relation: " + str(key) + "cannot be created.\n" log_list.append(error_message) break # sometimes direct leaf-node may be present without hierarchy and ":" else: formatted_list = list(json_document[key].strip()) right_subject_id = [] right_subject_id.append(_get_id_from_hierarchy(formatted_list)) json_document[key] = right_subject_id # print "\n----------", json_document[key] info_message = "\n- For GRelation parsing content | key: " + str(rel_key) + " -- " + str(json_document[key]) print info_message log_list.append(str(info_message)) # print list(json_document[key]) # perform_eval_type(key, json_document, "GSystem", "GSystem") for right_subject_id in json_document[key]: # print "\njson_document[key]: ", json_document[key] subject_id = node._id rel_subject_type = [] rel_subject_type.append(file_gst._id) if file_gst.type_of: rel_subject_type.extend(file_gst.type_of) relation_type_node = node_collection.one({'_type': "RelationType", '$or': [{'name': {'$regex': "^"+rel_key+"$", '$options': 'i'}}, {'altnames': {'$regex': "^"+rel_key+"$", '$options': 'i'}}], 'subject_type': {'$in': rel_subject_type} }) right_subject_id_or_list = [] right_subject_id_or_list.append(ObjectId(right_subject_id)) nodes = triple_collection.find({'_type': "GRelation", 'subject': subject_id, 'relation_type.$id': relation_type_node._id }) # sending list of all the possible right subject to relation for n in nodes: if not n.right_subject in right_subject_id_or_list: right_subject_id_or_list.append(n.right_subject) info_message = "\n- Creating GRelation ("+ str(node.name)+ " -- "+ str(rel_key)+ " -- "+ str(right_subject_id_or_list)+") ..." print info_message log_list.append(str(info_message)) gr_node = create_grelation(subject_id, relation_type_node, right_subject_id_or_list) info_message = "\n- Grelation processing done.\n" print info_message log_list.append(str(info_message)) # To break outer for loop if key found break else: error_message = "\n!! DataNotFound: No data found for relation ("+ str(rel_key)+ ") while creating GSystem (" + str(file_gst.name) + " -- " + str(node.name) + ")\n" print error_message log_list.append(str(error_message)) break # print relation_list else: # node object or attribute_relation_list does not exist. info_message = "\n!! Either resource is already created -- OR -- file is already saved into gridfs/DB -- OR -- file does not exists." print info_message log_list.append(str(info_message)) continue except Exception as e: error_message = "\n While creating ("+str(json_document['name'])+") got following error...\n " + str(e) print error_message # Keep it! log_list.append(str(error_message))
if node_id: asset_content_obj = node_collection.one({'_id': ObjectId(node_id)}) else: asset_content_obj = node_collection.collection.GSystem() asset_content_obj.fill_gstystem_values(request=request, uploaded_file=files[0], **kwargs) asset_content_obj.fill_node_values(**kwargs) asset_content_obj.save(groupid=group_id) asset_contents_list = [asset_content_obj._id] rt_has_asset_content = node_collection.one({ '_type': 'RelationType', 'name': 'has_assetcontent' }) asset_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_has_asset_content._id,'subject': asset_obj._id}, {'_id': 0, 'right_subject': 1}) for each_asset in asset_grels: asset_contents_list.append(each_asset['right_subject']) create_grelation(asset_obj._id, rt_has_asset_content, asset_contents_list) active_user_ids_list = [request.user.id] if GSTUDIO_BUDDY_LOGIN: active_user_ids_list += Buddy.get_buddy_userids_list_within_datetime( request.user.id, datetime.datetime.now()) # removing redundancy of user ids: active_user_ids_list = dict.fromkeys(active_user_ids_list).keys() counter_objs_cur = Counter.get_counter_objs_cur(active_user_ids_list, group_id) # counter_obj = Counter.get_counter_obj(request.user.id, group_id)
def clean_structure(): ''' This function perform cleaning activities. ''' # Setting email_id field of Author class ========================================= info_message = "\n\nSetting email_id field of following document(s) of Author class...\n" print info_message log_list.append(info_message) users = User.objects.all() for each in users: try: auth_node = node_collection.one({'_type': "Author", 'created_by': each.id}) if auth_node: res = node_collection.collection.update( {'_id': auth_node._id}, {'$set': {'email': each.email}}, upsert=False, multi=False ) if res['n']: auth_node.reload() info_message = "\n Author node's (" + str(auth_node._id) + " -- " + auth_node.name + ") email field updated with following value: " + auth_node.email else: info_message = "\n Author node's (" + str(auth_node._id) + " -- " + auth_node.name + ") email field update failed !!!" log_list.append(info_message) else: info_message = "\n No author node exists with this name (" + auth_node.name + ") !!!" log_list.append(info_message) except Exception as e: error_message = "\n Author node has multiple records... " + str(e) + "!!!" log_list.append(error_message) continue # Setting attribute_set & relation_set ================== info_message = "\n\nSetting attribute_set & relation_set for following document(s)...\n" print info_message log_list.append(info_message) # ------------------------------------------------------------------------------------ # Fetch all GSystems (including File, Group & Author as well; as these are sub-types of GSystem) # ------------------------------------------------------------------------------------ # Keeping timeout=False, as cursor may exceeds it's default time i.e. 10 mins for which it remains alive # Needs to be expicitly close # to fix broken documents which are having partial/outdated attributes/relations in their attribute_set/relation_set. # first make their attribute_set and relation_set empty and them fill them with latest key-values. gsystem_list = ["GSystem", "File", "Group", "Author"] # gsystem_list = ["Group", "Author"] node_collection.collection.update( {'_type': {'$in': gsystem_list}, 'attribute_set': {'$exists': True}, 'relation_set': {'$exists': True}}, {'$set': {'attribute_set': [], 'relation_set': []}}, upsert=False, multi=True ) gs = node_collection.find({'_type': {'$in': gsystem_list}, '$or': [{'attribute_set': []}, {'relation_set': []}] }, timeout=False) gs_count = gs.count() # for each_gs in gs: gs_iter = iter(gs) for i, each_gs in enumerate(gs_iter): attr_list = [] # attribute-list rel_list = [] # relation-list inv_rel_list = [] # inverse-relation-list # print " .", print " \n Processing node #.", (i+1), " out of ", gs_count, " ... ", if each_gs.member_of_names_list: info_message = "\n\n >>> " + str(each_gs.name) + " ("+str(each_gs.member_of_names_list[0])+")" else: info_message = "\n\n >>> " + str(each_gs.name) + " (ERROR: member_of field is not set properly for this document -- "+str(each_gs._id)+")" log_list.append(info_message) # ------------------------------------------------------------------------------------ # Fetch all attributes, if created in GAttribute Triple # Key-value pair will be appended only for those whose entry would be found in GAttribute Triple # ------------------------------------------------------------------------------------ ga = triple_collection.collection.aggregate([ {'$match': {'_type': "GAttribute", 'subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$attribute_type', 'value_val': '$object_value'}} ]) # ------------------------------------------------------------------------------------ # Fetch all relations, if created in GRelation Triple # Key-value pair will be appended only for those whose entry would be found in GRelation Triple # ------------------------------------------------------------------------------------ gr = triple_collection.collection.aggregate([ {'$match': {'_type': "GRelation", 'subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$relation_type', 'value_val': '$right_subject'}} ]) # ------------------------------------------------------------------------------------ # Fetch all inverse-relations, if created in GRelation Triple # Key-value pair will be appended only for those whose entry would be found in GRelation Triple # ------------------------------------------------------------------------------------ inv_gr = triple_collection.collection.aggregate([ {'$match': {'_type': "GRelation", 'right_subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$relation_type', 'value_val': '$subject'}} ]) if ga: # If any GAttribute found # ------------------------------------------------------------------------------------ # Setting up attr_list # ------------------------------------------------------------------------------------ # print "\n" for each_gar in ga["result"]: if each_gar: key_node = db.dereference(each_gar["key_val"]) # print "\t", key_node["name"], " -- ", each_gar["value_val"] # Append corresponding GAttribute as key-value pair in given attribute-list # key: attribute-type name # value: object_value from GAttribute document attr_list.append({key_node["name"]: each_gar["value_val"]}) if gr: # If any GRelation (relation) found # ------------------------------------------------------------------------------------ # Setting up rel_list # ------------------------------------------------------------------------------------ for each_grr in gr["result"]: if each_grr: key_node = db.dereference(each_grr["key_val"]) # Append corresponding GRelation as key-value pair in given relation-list # key: name field's value of relation-type's document # value: right_subject field's value of GRelation document if not rel_list: rel_list.append({key_node["name"]: [each_grr["value_val"]]}) else: key_found = False for each in rel_list: if key_node["name"] in each: each[key_node["name"]].append(each_grr["value_val"]) key_found = True if not key_found: rel_list.append({key_node["name"]: [each_grr["value_val"]]}) if inv_gr: # If any GRelation (inverse-relation) found # ------------------------------------------------------------------------------------ # Setting up inv_rel_list # ------------------------------------------------------------------------------------ for each_grr in inv_gr["result"]: if each_grr: key_node = db.dereference(each_grr["key_val"]) # Append corresponding GRelation as key-value pair in given inverse-relation-list # key: inverse_name field's value of relation-type's document # value: subject field's value of GRelation document if not inv_rel_list: inv_rel_list.append({key_node["inverse_name"]: [each_grr["value_val"]]}) else: key_found = False for each in inv_rel_list: if key_node["inverse_name"] in each: each[key_node["inverse_name"]].append(each_grr["value_val"]) key_found = True if not key_found: inv_rel_list.append({key_node["inverse_name"]: [each_grr["value_val"]]}) info_message = "" if attr_list: info_message += "\n\n\tAttributes: " + str(attr_list) else: info_message += "\n\n\tAttributes: No attribute found!" if rel_list: info_message += "\n\n\tRelations: " + str(rel_list) else: info_message += "\n\n\tRelations: No relation found!" if inv_rel_list: info_message += "\n\n\tInverse-Relations: " + str(inv_rel_list) else: info_message += "\n\n\tInverse-Relations: No inverse-relation found!" log_list.append(info_message) # ------------------------------------------------------------------------------------ # Finally set attribute_set & relation_set of current GSystem with modified attr_list & rel_list respectively # ------------------------------------------------------------------------------------ res = node_collection.collection.update( {'_id': each_gs._id}, {'$set': {'attribute_set': attr_list, 'relation_set': (rel_list + inv_rel_list)}}, upsert=False, multi=False ) if res['n']: info_message = "\n\n\t" + str(each_gs.name) + " updated succesfully !" log_list.append(info_message) print " -- attribute_set & relation_set updated succesfully !" # ------------------------------------------------------------------------------------ # Close cursor object if still alive # ------------------------------------------------------------------------------------ if gs.alive: info_message = "\n\n GSystem-Cursor state (before): " + str(gs.alive) log_list.append(info_message) gs.close() info_message = "\n\n GSystem-Cursor state (after): " + str(gs.alive) log_list.append(info_message) print "\n Setting attribute_set & relation_set completed succesfully !" # Rectify start_time & end_time of task ================== start_time = node_collection.one({'_type': "AttributeType", 'name': "start_time"}) end_time = node_collection.one({'_type': "AttributeType", 'name': "end_time"}) info_message = "\n\nRectifing start_time & end_time of following task(s)...\n" print info_message log_list.append(info_message) invalid_dates_cur = triple_collection.find({'attribute_type': {'$in': [start_time._id, end_time._id]}, 'object_value': {'$not': {'$type': 9}}}) for each in invalid_dates_cur: date_format_string = "" old_value = "" new_value = "" attribute_type_node = each.attribute_type if "-" in each.object_value and ":" in each.object_value: date_format_string = "%m-%d-%Y %H:%M" elif "/" in each.object_value and ":" in each.object_value: date_format_string = "%m/%d/%Y %H:%M" elif "-" in each.object_value: date_format_string = "%m-%d-%Y" elif "/" in each.object_value: date_format_string = "%m/%d/%Y" if date_format_string: old_value = each.object_value info_message = "\n\n\t" + str(each._id) + " -- " + str(old_value) res = triple_collection.collection.update({'_id': each._id}, {'$set': {'object_value': datetime.strptime(each.object_value, date_format_string)}}, upsert=False, multi=False ) if res['n']: print " .", each.reload() new_value = each.object_value info_message += " >> " + str(new_value) log_list.append(info_message) res = node_collection.collection.update({'_id': each.subject, 'attribute_set.'+attribute_type_node.name: old_value}, {'$set': {'attribute_set.$.'+attribute_type_node.name: new_value}}, upsert=False, multi=False ) if res["n"]: info_message = "\n\n\tNode's (" + str(each.subject) + ") attribute_set (" + attribute_type_node.name + ") updated succesfully." log_list.append(info_message) # Update type_of field to list type_of_cursor=node_collection.find({'type_of':{'$exists':True}}) for object_cur in type_of_cursor: if type(object_cur['type_of']) == ObjectId or object_cur['type_of'] == None: if type(object_cur['type_of']) == ObjectId : node_collection.collection.update({'_id':object_cur['_id']},{'$set':{'type_of':[object_cur['type_of']]}}) else : node_collection.collection.update({'_id':object_cur['_id']},{'$set':{'type_of':[]}}) # Removes n attribute if created accidently in existsing documents node_collection.collection.update({'n': {'$exists': True}}, {'$unset': {'n': ""}}, upsert=False, multi=True) # Updates wherever modified_by field is None with default value as either first contributor or the creator of the resource modified_by_cur = node_collection.find({'_type': {'$nin': ['GAttribute', 'GRelation', 'node_holder', 'ToReduceDocs', 'ReducedDocs', 'IndexedWordList']}, 'modified_by': None}) if modified_by_cur.count > 0: for n in modified_by_cur: if u'required_for' not in n.keys(): if "contributors" in n: if n.contributors: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.contributors[0]}}, upsert=False, multi=False) else: if n.created_by: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.created_by, 'contributors': [n.created_by]}}, upsert=False, multi=False) else: print "\n Please set created_by value for node (", n._id, " -- ", n._type, " : ", n.name, ")\n" # Updating faulty modified_by and contributors values (in case of user-group and file documents) cur = node_collection.find({'_type': {'$nin': ['node_holder', 'ToReduceDocs', 'ReducedDocs', 'IndexedWordList']}, 'modified_by': {'$exists': True}}) for n in cur: # By faulty, it means modified_by and contributors has 1 as their values # 1 stands for superuser # Instead of this value should be the creator of that resource # (even this is applicable only if created_by field of that resource holds some value) if u'required_for' not in n.keys(): if not n.created_by: print "\n Please set created_by value for node (", n._id, " -- ", n._type, " : ", n.name, ")" else: if n.created_by not in n.contributors: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.created_by, 'contributors': [n.created_by]} }, upsert=False, multi=False) # For delete the profile_pic as GST profile_pic_obj = node_collection.one({'_type': 'GSystemType','name': u'profile_pic'}) if profile_pic_obj: profile_pic_obj.delete() print "\n Deleted GST document of profile_pic.\n" # For adding visited_location field (default value set as []) in User Groups. try: author = node_collection.one({'_type': "GSystemType", 'name': "Author"}) if author: auth_cur = node_collection.find({'_type': 'Group', 'member_of': author._id }) if auth_cur.count() > 0: for each in auth_cur: node_collection.collection.update({'_id': each._id}, {'$set': {'_type': "Author"} }, upsert=False, multi=False) print "\n Updated user group: ", each.name cur = node_collection.find({'_type': "Author", 'visited_location': {'$exists': False}}) author_cur = node_collection.find({'_type': 'Author'}) if author_cur.count() > 0: for each in author_cur: if each.group_type == None: node_collection.collection.update({'_id': each._id}, {'$set': {'group_type': u"PUBLIC", 'edit_policy': u"NON_EDITABLE", 'subscription_policy': u"OPEN"} }, upsert=False, multi=False) print "\n Updated user group policies: ", each.name if cur.count(): print "\n" for each in cur: node_collection.collection.update({'_type': "Author", '_id': each._id}, {'$set': {'visited_location': []}}, upsert=False, multi=True) print " 'visited_location' field added to Author group (" + each.name + ")\n" else: error_message = "\n Exception while creating 'visited_location' field in Author class.\n Author GSystemType doesn't exists!!!\n" raise Exception(error_message) except Exception as e: print str(e) # INSERTED FOR MAP_REDUCE allIndexed = node_collection.find({"_type": "IndexedWordList", "required_for" : "storing_indexed_words"}) if allIndexed.count() == 0: print "\n Inserting indexes" j=1 while j<=27: obj = node_collection.collection.IndexedWordList() obj.word_start_id = float(j) obj.words = {} obj.required_for = u'storing_indexed_words' obj.save() j+=1 # Adding Task GST into start_time and end_time ATs subject_type start_time = node_collection.one({'_type': u'AttributeType', 'name': u'start_time'}) end_time = node_collection.one({'_type': u'AttributeType', 'name': u'end_time'}) task = node_collection.find_one({'_type':u'GSystemType', 'name':u'Task'}) if task: if start_time: if not task._id in start_time.subject_type : start_time.subject_type.append(task._id) start_time.save() if end_time: if not task._id in end_time.subject_type : end_time.subject_type.append(task._id) end_time.save()
def translate(request, group_id, node_id, lang, translated_node_id=None, **kwargs): ''' for EDIT: translate provided node to provided LANG CODE lang could be either proper/full language-name/language-code `node_id` is _id of source node. ''' group_name, group_id = Group.get_group_name_id(group_id) language = get_language_tuple(lang) source_obj = Node.get_node_by_id(node_id) existing_grel = translate_grel = translated_node = None if translated_node_id: translated_node = Node.get_node_by_id(translated_node_id) else: # get translated_node existing_grel = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'language': language }) if existing_grel: # get existing translated_node translated_node = Node.get_node_by_id(existing_grel.right_subject) translate_grel = existing_grel if request.method == 'GET': return render_to_response("ndf/translate_form.html", { 'group_id': group_id, 'node_obj': translated_node, 'source_obj': source_obj, 'post_url': reverse('translate', kwargs={ 'group_id': group_id, 'node_id': node_id, 'lang': lang, }), 'cancel_url': reverse('show_translation', kwargs={ 'group_id': group_id, 'node_id': node_id, 'lang': lang, }) }, context_instance=RequestContext(request)) elif request.method == 'POST': # explicit `if` check for `POST` if not translated_node: # create a new translated new # translated_node = node_collection.collection.GSystem() # copy source_obj's data into a new if source_obj._type == "Group": translated_node = node_collection.collection.GSystem() exclude_fields = [ '_id', 'member_of', '_type', 'type_of', 'modified_by', 'prior_node', 'post_node' ] for each in translated_node: if each not in exclude_fields: translated_node[each] = source_obj[each] translated_node.group_set.append(source_obj._id) else: translated_node = source_obj.__deepcopy__() translated_node['_id'] = ObjectId() translated_node.fill_gstystem_values(request=request, language=language, **kwargs) trans_alt_name = request.POST.get('altnames', None) translated_node.altnames = unicode(trans_alt_name) translated_node.member_of = [ObjectId(trans_node_gst_id)] translated_node.save(group_id=group_id) if not existing_grel: trans_grel_list = [ObjectId(translated_node._id)] trans_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_translation_of._id,'subject': ObjectId(node_id)},{'_id': 0, 'right_subject': 1}) for each_rel in trans_grels: trans_grel_list.append(each_rel['right_subject']) translate_grel = create_grelation(node_id, rt_translation_of, trans_grel_list, language=language) # page_gst_name, page_gst_id = Node.get_name_id_from_type('Page', 'GSystemType') # return HttpResponseRedirect(reverse('page_details', kwargs={'group_id': group_id, 'app_id': page_gst_id })) # return HttpResponseRedirect(reverse('all_translations', kwargs={'group_id': group_id, 'node_id': node_id })) return HttpResponseRedirect( reverse('show_translation', kwargs={ 'group_id': group_id, 'node_id': node_id, 'lang': lang }))
def update_registration_year(): ''' This renames registration_year to registration_date and updates existing value(s), if exists. ''' # Fetch the AttributeType (registration_year) & rename it using update command ryat = node_collection.one({'_type': "AttributeType", 'name': "registration_year"}) if not ryat: # It means already updated don't do anything ryat = node_collection.one({'_type': "AttributeType", 'name': "registration_date"}) info_message = "\n Already updated -- " + ryat.name + " (" + str(ryat._id) + ") !\n" log_list.append(info_message) return info_message = "\n Before update: " + str(ryat._id) + " -- " + ryat.name + " -- " + str(ryat["validators"]) log_list.append(info_message) res = node_collection.collection.update({'_id': ryat._id}, {'$set': {'name': u"registration_date", 'altnames': u"Date of Registration", 'validators': [u"m/d/Y", u"date_month_day_year", u"MM/DD/YYYY"] }}, upsert=False, multi=False ) if res['n']: ryat.reload() info_message = "\n After update: " + str(ryat._id) + " -- " + ryat.name + " -- " + str(ryat["validators"]) + "\n" log_list.append(info_message) # If AttributeType is updated successfully, then look out for any existing value(s) # With value as datetime.datetime(2014, 1, 1, 0, 0) # If found, replace it with datetime.datetime(2014, 1, 1, 0, 0) ry_cur = triple_collection.find({'_type': "GAttribute", 'attribute_type': ryat._id}) c = ry_cur.count() if c: info_message = "\n No. of existing value(s) found: " + str(c) log_list.append(info_message) for each in ry_cur: if each.object_value == datetime.datetime(2014, 1, 1, 0, 0): d = datetime.datetime(2014, 9, 2, 0, 0) res = triple_collection.collection.update({'_id': each._id}, {'$set': {'object_value': d, 'name': each.name.replace("2014-01-01 00:00:00", str(d)) }}, upsert=False, multi=False ) if res['n']: # n = triple_collection.one({'_id': each._id}) each.reload() info_message = "\n Updated: " + each.name#n.name else: info_message = "\n Not updated: " + each.name + " !!" log_list.append(info_message) else: info_message = "\n No documents exist for update !!\n" log_list.append(info_message) else: error_message = "Something went wrong while updating AttributeType - " + ryat.name + " (" + str(ryat._id) + ")" log_list.append(error_message) raise Exception(error_message)
def group_dashboard(request, group_id): """ This view returns data required for group's dashboard. """ has_profile_pic = None profile_pic_image = None old_profile_pics = [] has_profile_pic_str = "" is_already_selected = None try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) group_obj = node_collection.one({"_id": ObjectId(group_id)}) has_profile_pic_rt = node_collection.one({'_type': 'RelationType', 'name': unicode('has_profile_pic') }) all_old_prof_pics = triple_collection.find({'_type': "GRelation", "subject": group_obj._id, 'relation_type.$id': has_profile_pic_rt._id, 'status': u"DELETED"}) if all_old_prof_pics: for each_grel in all_old_prof_pics: n = node_collection.one({'_id': ObjectId(each_grel.right_subject)}) old_profile_pics.append(n) banner_pic="" for each in group_obj.relation_set: if "has_profile_pic" in each: if each["has_profile_pic"]: profile_pic_image = node_collection.one( {'_type': {"$in": ["GSystem", "File"]}, '_id': each["has_profile_pic"][0]} ) if "has_Banner_pic" in each: if each["has_Banner_pic"]: banner_pic = node_collection.one( {'_type': {"$in": ["GSystem", "File"]}, '_id': each["has_Banner_pic"][0]} ) # Approve StudentCourseEnrollment view approval = False enrollment_details = [] enrollment_columns = [] sce_gst = node_collection.one({'_type': "GSystemType", 'name': "StudentCourseEnrollment"}) if sce_gst: # Get StudentCourseEnrollment nodes which are there for approval sce_cur = node_collection.find({ 'member_of': sce_gst._id, 'group_set': ObjectId(group_id), # "attribute_set.enrollment_status": {"$nin": [u"OPEN"]}, "attribute_set.enrollment_status": {"$in": [u"PENDING", "APPROVAL"]}, 'status': u"PUBLISHED" }, { 'member_of': 1 }) if sce_cur.count(): approval = True enrollment_columns = ["College", "Course", "Status", "Enrolled", "Remaining", "Approved", "Rejected"] for sce_gs in sce_cur: sce_gs.get_neighbourhood(sce_gs.member_of) data = {} # approve_task = sce_gs.has_corresponding_task[0] approve_task = sce_gs.has_current_approval_task[0] approve_task.get_neighbourhood(approve_task.member_of) data["Status"] = approve_task.Status # Check for corresponding task's status # Continue with next if status is found as "Closed" # As we listing only 'In Progress'/'New' task(s) if data["Status"] == "Closed": continue data["_id"] = str(sce_gs._id) data["College"] = sce_gs.for_college[0].name if len(sce_gs.for_acourse) > 1: # It means it's a Foundation Course's (FC) enrollment start_enroll = None end_enroll = None for each in sce_gs.for_acourse[0].attribute_set: if not each: pass elif "start_time" in each: start_time = each["start_time"] elif "end_time" in each: end_time = each["end_time"] data["Course"] = "Foundation_Course" + "_" + start_time.strftime("%d-%b-%Y") + "_" + end_time.strftime("%d-%b-%Y") else: # Courses other than FC data["Course"] = sce_gs.for_acourse[0].name # data["Completed On"] = sce_gs.completed_on.strftime("%d/%m/%Y") remaining_count = None enrolled_list = [] approved_list = [] rejected_list = [] if sce_gs.has_key("has_enrolled"): if sce_gs["has_enrolled"]: enrolled_list = sce_gs["has_enrolled"] if sce_gs.has_key("has_approved"): if sce_gs["has_approved"]: approved_list = sce_gs["has_approved"] if sce_gs.has_key("has_rejected"): if sce_gs["has_rejected"]: rejected_list = sce_gs["has_rejected"] data["Enrolled"] = len(enrolled_list) data["Approved"] = len(approved_list) data["Rejected"] = len(rejected_list) remaining_count = len(enrolled_list) - (len(approved_list) + len(rejected_list)) data["Remaining"] = remaining_count enrollment_details.append(data) page = '1' return render_to_response ( "ndf/group_dashboard.html", { 'group_id': group_id, 'groupid': group_id,'old_profile_pics':old_profile_pics, 'approval': approval, 'enrollment_columns': enrollment_columns, 'enrollment_details': enrollment_details,'prof_pic_obj': profile_pic_image,'banner_pic':banner_pic,'page':page }, context_instance=RequestContext(request) )
def lesson_create_edit(request, group_id, unit_group_id=None): ''' creation as well as edit of lessons returns following: { 'success': <BOOL: 0 or 1>, 'unit_hierarchy': <unit hierarchy json>, 'msg': <error msg or objectid of newly created obj> } ''' # parent_group_name, parent_group_id = Group.get_group_name_id(group_id) # parent unit id lesson_id = request.POST.get('lesson_id', None) lesson_language = request.POST.get('sel_lesson_lang','') unit_id_post = request.POST.get('unit_id', '') lesson_content = request.POST.get('lesson_desc', '') # print "lesson_id: ", lesson_id # print "lesson_language: ", lesson_language # print "unit_id_post: ", unit_id_post unit_group_id = unit_id_post if unit_id_post else unit_group_id # getting parent unit object unit_group_obj = Group.get_group_name_id(unit_group_id, get_obj=True) result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': ''} if request.method == "POST": # lesson name lesson_name = request.POST.get('name', '').strip() if not lesson_name: msg = 'Name can not be empty.' result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': msg} # return HttpResponse(0) # check for uniqueness of name # unit_cs: unit collection_set unit_cs_list = unit_group_obj.collection_set unit_cs_objs_cur = Node.get_nodes_by_ids_list(unit_cs_list) if unit_cs_objs_cur: unit_cs_names_list = [u.name for u in unit_cs_objs_cur] if not lesson_id and unit_cs_objs_cur and lesson_name in unit_cs_names_list: # same name activity # currently following logic was only for "en" nodes. # commented and expecting following in future: # check for uniqueness w.r.t language selected within all sibling lessons's translated nodes # lesson_obj = Node.get_node_by_id(lesson_id) # if lesson_language != lesson_obj.language[0]: # if lesson_language: # language = get_language_tuple(lesson_language) # lesson_obj.language = language # lesson_obj.save() msg = u'Activity with same name exists in lesson: ' + unit_group_obj.name result_dict = {'success': 0, 'unit_hierarchy': [], 'msg': msg} elif lesson_id and ObjectId.is_valid(lesson_id): # Update # getting default, "en" node: if lesson_language != "en": node = translated_node_id = None grel_node = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(lesson_id), 'relation_type': rt_translation_of._id, 'language': get_language_tuple(lesson_language), # 'status': 'PUBLISHED' }) if grel_node: # grelation found. # transalated node exists. # edit of existing translated node. # node = Node.get_node_by_id(grel_node.right_subject) # translated_node_id = node._id lesson_id = grel_node.right_subject else: # grelation NOT found. # create transalated node. user_id = request.user.id new_lesson_obj = node_collection.collection.GSystem() new_lesson_obj.fill_gstystem_values(name=lesson_name, content=lesson_content, member_of=gst_lesson_id, group_set=unit_group_obj._id, created_by=user_id, status=u'PUBLISHED') # print new_lesson_obj if lesson_language: language = get_language_tuple(lesson_language) new_lesson_obj.language = language new_lesson_obj.save(groupid=group_id) trans_grel_list = [ObjectId(new_lesson_obj._id)] trans_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_translation_of._id,'subject': ObjectId(lesson_id)},{'_id': 0, 'right_subject': 1}) for each_rel in trans_grels: trans_grel_list.append(each_rel['right_subject']) # translate_grel = create_grelation(node_id, rt_translation_of, trans_grel_list, language=language) create_grelation(lesson_id, rt_translation_of, trans_grel_list, language=language) lesson_obj = Node.get_node_by_id(lesson_id) if lesson_obj and (lesson_obj.name != lesson_name): trans_lesson = get_lang_node(lesson_obj._id,lesson_language) if trans_lesson: trans_lesson.name = lesson_name else: lesson_obj.name = lesson_name # if lesson_language: # language = get_language_tuple(lesson_language) # lesson_obj.language = language lesson_obj.save(group_id=group_id) unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Lesson name updated.' result_dict = {'success': 1, 'unit_hierarchy': unit_structure, 'msg': str(lesson_obj._id)} else: unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Nothing to update.' result_dict = {'success': 1, 'unit_hierarchy': unit_structure, 'msg': msg} else: # creating a fresh lesson object user_id = request.user.id new_lesson_obj = node_collection.collection.GSystem() new_lesson_obj.fill_gstystem_values(name=lesson_name, content=lesson_content, member_of=gst_lesson_id, group_set=unit_group_obj._id, created_by=user_id, status=u'PUBLISHED') # print new_lesson_obj if lesson_language: language = get_language_tuple(lesson_language) new_lesson_obj.language = language new_lesson_obj.save(groupid=group_id) unit_group_obj.collection_set.append(new_lesson_obj._id) unit_group_obj.save(groupid=group_id) unit_structure = get_unit_hierarchy(unit_group_obj, request.LANGUAGE_CODE) msg = u'Added lesson under lesson: ' + unit_group_obj.name result_dict = {'success': 1, 'unit_hierarchy': unit_structure, 'msg': str(new_lesson_obj._id)} # return HttpResponse(json.dumps(unit_structure)) # return HttpResponse(1) return HttpResponse(json.dumps(result_dict))
def uDashboard(request, group_id): try: usrid = int(group_id) auth = node_collection.one({'_type': "Author", 'created_by': usrid}) except: auth = get_group_name_id(group_id, get_obj=True) usrid = auth.created_by group_id = auth._id # Fetching user group of current user & then reassigning group_id with it's corresponding ObjectId value group_name = auth.name usrname = auth.name date_of_join = auth['created_at'] # current_user = request.user.pk current_user = usrid has_profile_pic = None profile_pic_image = None current_user_obj = None usr_fname = None usr_lname = None success_state = True old_profile_pics = [] is_already_selected = None task_gst = node_collection.one( {'_type': "GSystemType", 'name': "Task"} ) if current_user: exclued_from_public = "" if int(current_user) == int(usrid): Access_policy=["PUBLIC","PRIVATE"] if int(current_user) != int(usrid): Access_policy=["PUBLIC"] else: Access_policy=["PUBLIC"] exclued_from_public = ObjectId(task_gst._id) dashboard_count = {} group_list = [] user_activity = [] page_gst = node_collection.one({'_type': "GSystemType", 'name': 'Page'}) page_cur = node_collection.find({'member_of': {'$all': [page_gst._id]}, 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) file_cur = node_collection.find({'_type': u"File", 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) forum_gst = node_collection.one({"_type": "GSystemType", "name": "Forum"}) forum_count = node_collection.find({"_type": "GSystem", "member_of": forum_gst._id, 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) quiz_gst = node_collection.one({"_type": "GSystemType", "name": "Quiz"}) quiz_count = node_collection.find({"_type": "GSystem", "member_of": quiz_gst._id, 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) thread_gst = node_collection.one({"_type": "GSystemType", "name": "Twist"}) thread_count =node_collection.find({"_type": "GSystem", "member_of": thread_gst._id, 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) reply_gst = node_collection.one({"_type": "GSystemType", "name": "Reply"}) reply_count = node_collection.find({"_type": "GSystem", "member_of": reply_gst._id, 'created_by': int(usrid), "status": {"$nin": ["HIDDEN"]}}) task_cur = "" if current_user: if int(current_user) == int(usrid): task_cur = node_collection.find( {'member_of': task_gst._id, 'attribute_set.Status': {'$in': ["New", "In Progress"]}, 'attribute_set.Assignee':usrid} ).sort('last_update', -1).limit(10) dashboard_count.update({'Task': task_cur.count()}) current_user_obj = User.objects.get(id=current_user) usr_fname = current_user_obj.first_name usr_lname = current_user_obj.last_name group_cur = node_collection.find( {'_type': "Group", 'name': {'$nin': ["home", auth.name]},"access_policy":{"$in":Access_policy}, '$or': [{'group_admin': int(usrid)}, {'author_set': int(usrid)}]}).sort('last_update', -1).limit(10) dashboard_count.update({'group':group_cur.count()}) # user activity gives all the activities of the users activity = "" activity_user = node_collection.find( {'$and': [{'$or': [{'_type': 'GSystem'}, {'_type': 'group'}, {'_type': 'File'}]}, {"access_policy": {"$in": Access_policy}},{'status':{'$in':[u"DRAFT",u"PUBLISHED"]}}, {'member_of': {'$nin': [exclued_from_public]}}, {'$or': [{'created_by': int(usrid)}, {'modified_by': int(usrid)}]}] }).sort('last_update', -1).limit(10) a_user = [] dashboard_count.update({'activity': activity_user.count()}) #for i in activity_user: # if i._type != 'Batch' or i._type != 'Course' or i._type != 'Module': # a_user.append(i) #loop replaced by a list comprehension a_user=[i for i in activity_user if (i._type != 'Batch' or i._type != 'Course' or i._type != 'Module')] #a temp. variable which stores the lookup for append method user_activity_append_temp=user_activity.append for each in a_user: if each.created_by == each.modified_by: if each.last_update == each.created_at: activity = 'created' else: activity = 'modified' else: activity = 'created' if each._type == 'Group': user_activity_append_temp(each) else: member_of = node_collection.find_one({"_id": each.member_of[0]}) user_activity_append_temp(each) ''' notification_list=[] notification_object = notification.NoticeSetting.objects.filter(user_id=int(ID)) for each in notification_object: ntid = each.notice_type_id ntype = notification.NoticeType.objects.get(id=ntid) label = ntype.label.split("-")[0] notification_list.append(label) Retrieving Tasks Assigned for User (Only "New" and "In Progress") user_assigned = [] attributetype_assignee = node_collection.find_one({"_type":'AttributeType', 'name':'Assignee'}) attr_assignee = triple_collection.find( {"_type": "GAttribute", "attribute_type.$id": attributetype_assignee._id, "object_value": request.user.id} ).sort('last_update', -1).limit(10) dashboard_count.update({'Task':attr_assignee.count()}) for attr in attr_assignee : task_node = node_collection.one({'_id':attr.subject}) if task_node: user_assigned.append(task_node) task_cur gives the task asigned to users ''' obj = node_collection.find( {'_type': {'$in': [u"GSystem", u"File"]}, 'contributors': int(usrid), 'group_set': {'$all': [ObjectId(group_id)]}} ) collab_drawer = [] #a temp. variable which stores the lookup for append method collab_drawer_append_temp=collab_drawer.append """ To populate collaborators according to their latest modification of particular resource: """ for each in obj.sort('last_update', -1): for val in each.contributors: name = User.objects.get(pk=val).username collab_drawer_append_temp({'usrname': name, 'Id': val, 'resource': each.name}) shelves = [] datavisual = [] shelf_list = {} show_only_pie = True if not profile_pic_image: if auth: for each in auth.relation_set: if "has_profile_pic" in each: profile_pic_image = node_collection.one( {'_type': "GSystem", '_id': each["has_profile_pic"][0]} ) break has_profile_pic_rt = node_collection.one({'_type': 'RelationType', 'name': unicode('has_profile_pic') }) all_old_prof_pics = triple_collection.find({'_type': "GRelation", "subject": auth._id, 'relation_type.$id': has_profile_pic_rt._id, 'status': u"DELETED"}) if all_old_prof_pics: for each_grel in all_old_prof_pics: n = node_collection.one({'_id': ObjectId(each_grel.right_subject)}) if n not in old_profile_pics: old_profile_pics.append(n) forum_create_rate = forum_count.count() * GSTUDIO_RESOURCES_CREATION_RATING file_create_rate = file_cur.count() * GSTUDIO_RESOURCES_CREATION_RATING page_create_rate = page_cur.count() * GSTUDIO_RESOURCES_CREATION_RATING quiz_create_rate = quiz_count.count() * GSTUDIO_RESOURCES_CREATION_RATING reply_create_rate = reply_count.count() * GSTUDIO_RESOURCES_REPLY_RATING thread_create_rate = thread_count.count() * GSTUDIO_RESOURCES_CREATION_RATING datavisual.append({"name": "Forum", "count": forum_create_rate}) datavisual.append({"name": "File", "count": file_create_rate}) datavisual.append({"name": "Page", "count": page_create_rate}) datavisual.append({"name": "Quiz", "count": quiz_create_rate}) datavisual.append({"name": "Reply", "count": reply_create_rate}) datavisual.append({"name": "Thread", "count": thread_create_rate}) datavisual.append({"name": "Registration", "count": GSTUDIO_RESOURCES_REGISTRATION_RATING}) total_activity_rating = GSTUDIO_RESOURCES_REGISTRATION_RATING + (page_cur.count() + file_cur.count() + forum_count.count() + quiz_count.count()) * GSTUDIO_RESOURCES_CREATION_RATING + (thread_count.count() + reply_count.count()) * GSTUDIO_RESOURCES_REPLY_RATING return render_to_response( "ndf/uDashboard.html", { 'usr': current_user, 'username': usrname, 'user_id': usrid, 'success': success_state, 'usr_fname':usr_fname, 'usr_lname':usr_lname, 'DOJ': date_of_join, 'author': auth, 'group_id': group_id, 'groupid': group_id, 'group_name': group_name, 'current_user_obj':current_user_obj, 'already_set': is_already_selected, 'user_groups': group_cur, 'prof_pic_obj': profile_pic_image, 'user_task': task_cur, 'group_count': group_cur.count(), 'page_count': page_cur.count(), 'file_count': file_cur.count(), 'user_activity': user_activity, 'dashboard_count': dashboard_count, 'show_only_pie': show_only_pie, 'datavisual': json.dumps(datavisual), 'total_activity_rating': total_activity_rating, 'old_profile_pics':old_profile_pics, 'site_name': GSTUDIO_SITE_NAME, }, context_instance=RequestContext(request) )
def translate(request, group_id, node_id, lang, translated_node_id=None, **kwargs): ''' for EDIT: translate provided node to provided LANG CODE lang could be either proper/full language-name/language-code `node_id` is _id of source node. ''' group_name, group_id = Group.get_group_name_id(group_id) language = get_language_tuple(lang) source_obj = Node.get_node_by_id(node_id) existing_grel = translate_grel = translated_node = None if translated_node_id: translated_node = Node.get_node_by_id(translated_node_id) else: # get translated_node existing_grel = triple_collection.one({ '_type': 'GRelation', 'subject': ObjectId(node_id), 'relation_type': rt_translation_of._id, 'language': language }) if existing_grel: # get existing translated_node translated_node = Node.get_node_by_id(existing_grel.right_subject) translate_grel = existing_grel if request.method == 'GET': return render_to_response("ndf/translate_form.html", { 'group_id': group_id, 'node_obj': translated_node, 'source_obj': source_obj, 'post_url': reverse('translate', kwargs={ 'group_id': group_id, 'node_id': node_id, 'lang': lang, }) }, context_instance=RequestContext(request)) elif request.method == 'POST': # explicit `if` check for `POST` if not translated_node: # create a new translated new # translated_node = node_collection.collection.GSystem() # copy source_obj's data into a new if source_obj._type == "Group": translated_node = node_collection.collection.GSystem() exclude_fields = ['_id','member_of','_type','type_of','modified_by','prior_node','post_node'] for each in translated_node: if each not in exclude_fields: translated_node[each] = source_obj[each] translated_node.group_set.append(source_obj._id) else: translated_node = source_obj.__deepcopy__() translated_node['_id'] = ObjectId() translated_node.fill_gstystem_values(request=request, language=language, **kwargs) trans_alt_name = request.POST.get('altnames', None) translated_node.altnames = unicode(trans_alt_name) translated_node.member_of = [ObjectId(trans_node_gst_id)] translated_node.save(group_id=group_id) if not existing_grel: trans_grel_list = [ObjectId(translated_node._id)] trans_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_translation_of._id,'subject': ObjectId(node_id)},{'_id': 0, 'right_subject': 1}) for each_rel in trans_grels: trans_grel_list.append(each_rel['right_subject']) translate_grel = create_grelation(node_id, rt_translation_of, trans_grel_list, language=language) # page_gst_name, page_gst_id = Node.get_name_id_from_type('Page', 'GSystemType') # return HttpResponseRedirect(reverse('page_details', kwargs={'group_id': group_id, 'app_id': page_gst_id })) # return HttpResponseRedirect(reverse('all_translations', kwargs={'group_id': group_id, 'node_id': node_id })) return HttpResponseRedirect(reverse('show_translation', kwargs={'group_id': group_id, 'node_id': node_id, 'lang': lang }))
def update_registration_year(): ''' This renames registration_year to registration_date and updates existing value(s), if exists. ''' # Fetch the AttributeType (registration_year) & rename it using update command ryat = node_collection.one({ '_type': "AttributeType", 'name': "registration_year" }) if not ryat: # It means already updated don't do anything ryat = node_collection.one({ '_type': "AttributeType", 'name': "registration_date" }) info_message = "\n Already updated -- " + ryat.name + " (" + str( ryat._id) + ") !\n" log_list.append(info_message) return info_message = "\n Before update: " + str( ryat._id) + " -- " + ryat.name + " -- " + str(ryat["validators"]) log_list.append(info_message) res = node_collection.collection.update({'_id': ryat._id}, { '$set': { 'name': u"registration_date", 'altnames': u"Date of Registration", 'validators': [u"m/d/Y", u"date_month_day_year", u"MM/DD/YYYY"] } }, upsert=False, multi=False) if res['n']: ryat.reload() info_message = "\n After update: " + str( ryat._id) + " -- " + ryat.name + " -- " + str( ryat["validators"]) + "\n" log_list.append(info_message) # If AttributeType is updated successfully, then look out for any existing value(s) # With value as datetime.datetime(2014, 1, 1, 0, 0) # If found, replace it with datetime.datetime(2014, 1, 1, 0, 0) ry_cur = triple_collection.find({ '_type': "GAttribute", 'attribute_type': ryat._id }) c = ry_cur.count() if c: info_message = "\n No. of existing value(s) found: " + str(c) log_list.append(info_message) for each in ry_cur: if each.object_value == datetime.datetime(2014, 1, 1, 0, 0): d = datetime.datetime(2014, 9, 2, 0, 0) res = triple_collection.collection.update( {'_id': each._id}, { '$set': { 'object_value': d, 'name': each.name.replace("2014-01-01 00:00:00", str(d)) } }, upsert=False, multi=False) if res['n']: # n = triple_collection.one({'_id': each._id}) each.reload() info_message = "\n Updated: " + each.name #n.name else: info_message = "\n Not updated: " + each.name + " !!" log_list.append(info_message) else: info_message = "\n No documents exist for update !!\n" log_list.append(info_message) else: error_message = "Something went wrong while updating AttributeType - " + ryat.name + " (" + str( ryat._id) + ")" log_list.append(error_message) raise Exception(error_message)
def save_batch(request, group_id): # def save_batch(batch_name, user_list, group_id, request, ac_id): group_name, group_id = get_group_name_id(group_id) response_dict = {"success": False} # new_batch_node = None rt_has_batch_member = node_collection.one({ '_type': 'RelationType', 'name': 'has_batch_member' }) if request.is_ajax() and request.method == "POST": ac_id = request.POST.get("ac_id", '') batch_name = request.POST.get("batch_name", '') batch_id = request.POST.get("batch_id", '') user_list = request.POST.getlist("user_list[]", '') # create_new_batch = request.POST.get("create_new_batch", '') # response_dict["old_batches"] = find_batches_of_ac(ac_id) user_list = [ObjectId(each) for each in user_list] all_batches_in_grp = [] if not batch_id: # b_node = node_collection.one({'member_of':GST_BATCH._id,'name':unicode(batch_name)}) b_node = node_collection.collection.GSystem() b_node.member_of.append(GST_BATCH._id) b_node.created_by = int(request.user.id) b_node.group_set.append(ObjectId(group_id)) b_node.name = batch_name b_node['altnames'] = batch_name.replace('_', ' ') b_node.contributors.append(int(request.user.id)) b_node.modified_by = int(request.user.id) b_node.save(groupid=group_id) all_batches_in_grp.append(b_node._id) rt_group_has_batch = node_collection.one({ '_type': 'RelationType', 'name': 'group_has_batch' }) relation_coll = triple_collection.find({ '_type': 'GRelation', 'relation_type': rt_group_has_batch._id, 'subject': ObjectId(group_id) }) for each in relation_coll: all_batches_in_grp.append(each.right_subject) # to get all batches of the group rt_has_course = node_collection.one({ '_type': 'RelationType', 'name': 'has_course' }) create_grelation(ObjectId(group_id), rt_group_has_batch, all_batches_in_grp) create_grelation(b_node._id, rt_has_course, ObjectId(ac_id)) response_dict['new_batch_created'] = True response_dict['new_batch_node_name'] = b_node.name response_dict['new_batch_node_id'] = str(b_node._id) else: response_dict['new_batch_created'] = False b_node = node_collection.one({'_id': ObjectId(batch_id)}) if user_list: create_grelation(b_node._id, rt_has_batch_member, user_list) response_dict['success'] = True return HttpResponse(json.dumps(response_dict, cls=NodeJSONEncoder))
def topic_detail_view(request, group_id, app_Id=None): try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) obj = node_collection.one({'_id': ObjectId(app_Id)}) app = node_collection.one({'_id': ObjectId(obj.member_of[0])}) app_id = app._id topic = "Topic" theme_id = None prior_obj = None # First get the navigation list till topic from theme map nav_l=request.GET.get('nav_li','') breadcrumbs_list = [] nav_li = "" #a temp. variable which stores the lookup for append method breadcrumbs_list_append_temp=breadcrumbs_list.append if nav_l: nav_li = nav_l nav_l = str(nav_l).split(",") # create beadcrumbs list from navigation list sent from template. for each in nav_l: each_obj = node_collection.one({'_id': ObjectId(each) }) # Theme object needs to be added in breadcrumbs for full navigation path from theme to topic # "nav_l" doesnt includes theme object since its not in tree hierarchy level, # hence Match the first element and get its prior node which is theme object, to include it in breadcrumbs list # print "!!!!!!!!! ", each_obj.name if each == nav_l[0]: if each_obj.prior_node: theme_obj = node_collection.one({'_id': ObjectId(each_obj.prior_node[0] ) }) theme_id = theme_obj._id breadcrumbs_list_append_temp( (str(theme_obj._id), theme_obj.name) ) breadcrumbs_list_append_temp( (str(each_obj._id), each_obj.name) ) if obj: if obj.prior_node: prior_obj = node_collection.one({'_id': ObjectId(obj.prior_node[0]) }) ###shelf### shelves = [] #a temp. variable which stores the lookup for append method shelves_append_temp=shelves.append shelf_list = {} auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth: has_shelf_RT = node_collection.one({'_type': 'RelationType', 'name': u'has_shelf' }) shelf = triple_collection.find({'_type': 'GRelation', 'subject': ObjectId(auth._id), 'relation_type.$id': has_shelf_RT._id}) shelf_list = {} if shelf: for each in shelf: shelf_name = node_collection.one({'_id': ObjectId(each.right_subject)}) shelves_append_temp(shelf_name) shelf_list[shelf_name.name] = [] #a temp. variable which stores the lookup for append method shelf_list_shlefname_append_temp=shelf_list[shelf_name.name].append for ID in shelf_name.collection_set: shelf_item = node_collection.one({'_id': ObjectId(ID) }) shelf_list_shlefname_append_temp(shelf_item.name) else: shelves = [] # print "theme_id: ", theme_id return render_to_response('ndf/topic_details.html', { 'node': obj,'app_id': app_id,"theme_id": theme_id, "prior_obj": prior_obj, 'group_id': group_id,'shelves': shelves,'topic': topic, 'nav_list':nav_li, 'groupid':group_id,'shelf_list': shelf_list,'breadcrumbs_list': breadcrumbs_list }, context_instance = RequestContext(request) )
def main(): columns = defaultdict(list) translation_dict = {} get_translation_rt = node_collection.one({ '_type': 'RelationType', 'name': u"translation_of" }) with open(schema_file_csv, 'rb') as f: reader = csv.reader(f) i = 1 reader.next() for row in reader: for (i, v) in enumerate(row): columns[i].append(v) translation_dict = dict(zip(columns[0], columns[1])) print translation_dict, "dict" for k, v in translation_dict.items(): app_items = node_collection.find({'name': k}) for each in list(app_items): get_node = node_collection.one({ '_id': ObjectId(each._id), 'member_of': gapp._id }) if get_node: name = v.decode('utf-8') print get_node.name node_rt = triple_collection.find({ '_type': "GRelation", 'subject': get_node._id, 'relation_type': get_translation_rt._id }) if node_rt.count() > 0: node = node_collection.one( {'_id': ObjectId(node_rt[0].right_subject)}) else: node = None node_rt = None if node is None: node = node_collection.collection.GSystem() node.name = unicode(name) node.access_policy = u"PUBLIC" node.contributors.append(1) node.created_by = 1 #node.group_set.append(get_group._id) node.language = u"hi" node.member_of.append(gapp._id) node.modified_by = 1 node.status = u"DRAFT" node.save() print "\nTranslated Node ", node.name, " created successfully\n" else: print "\nTranslated node ", node.name, " already exists\n" if node_rt is None: relation_type = node_collection.one({ '_type': 'RelationType', 'name': 'translation_of' }) gr_node = create_grelation(each._id, relation_type, node._id) # grelation = triple_collection.collection.GRelation() # grelation.relation_type=relation_type # grelation.subject=each._id # grelation.right_subject=node._id # grelation.name=u"" # grelation.save() print "\nGRelation for node ", node.name, " created sucessfully!!" else: print "\nGRelation for node ", node.name, " already exists\n"
def clean_structure(): ''' This function perform cleaning activities. ''' # Setting email_id field of Author class ========================================= info_message = "\n\nSetting email_id field of following document(s) of Author class...\n" print info_message log_list.append(info_message) users = User.objects.all() for each in users: try: auth_node = node_collection.one({'_type': "Author", 'created_by': each.id}) if auth_node: res = node_collection.collection.update( {'_id': auth_node._id}, {'$set': {'email': each.email}}, upsert=False, multi=False ) if res['n']: auth_node.reload() info_message = "\n Author node's (" + str(auth_node._id) + " -- " + auth_node.name + ") email field updated with following value: " + auth_node.email else: info_message = "\n Author node's (" + str(auth_node._id) + " -- " + auth_node.name + ") email field update failed !!!" log_list.append(info_message) else: info_message = "\n No author node exists with this name (" + auth_node.name + ") !!!" log_list.append(info_message) except Exception as e: error_message = "\n Author node has multiple records... " + str(e) + "!!!" log_list.append(error_message) continue # Setting attribute_set & relation_set ================== info_message = "\n\nSetting attribute_set & relation_set for following document(s)...\n" print info_message log_list.append(info_message) # ------------------------------------------------------------------------------------ # Fetch all GSystems (including File, Group & Author as well; as these are sub-types of GSystem) # ------------------------------------------------------------------------------------ # Keeping timeout=False, as cursor may exceeds it's default time i.e. 10 mins for which it remains alive # Needs to be expicitly close # to fix broken documents which are having partial/outdated attributes/relations in their attribute_set/relation_set. # first make their attribute_set and relation_set empty and them fill them with latest key-values. gsystem_list = ["GSystem", "File", "Group", "Author"] # gsystem_list = ["Group", "Author"] node_collection.collection.update( {'_type': {'$in': gsystem_list}, 'attribute_set': {'$exists': True}, 'relation_set': {'$exists': True}}, {'$set': {'attribute_set': [], 'relation_set': []}}, upsert=False, multi=True ) gs = node_collection.find({'_type': {'$in': gsystem_list}, '$or': [{'attribute_set': []}, {'relation_set': []}] }, timeout=False) gs_count = gs.count() # for each_gs in gs: gs_iter = iter(gs) for i, each_gs in enumerate(gs_iter): attr_list = [] # attribute-list rel_list = [] # relation-list inv_rel_list = [] # inverse-relation-list # print " .", print " \n Processing node #.", (i+1), " out of ", gs_count, " ... ", if each_gs.member_of_names_list: info_message = "\n\n >>> " + str(each_gs.name) + " ("+str(each_gs.member_of_names_list[0])+")" else: info_message = "\n\n >>> " + str(each_gs.name) + " (ERROR: member_of field is not set properly for this document -- "+str(each_gs._id)+")" log_list.append(info_message) # ------------------------------------------------------------------------------------ # Fetch all attributes, if created in GAttribute Triple # Key-value pair will be appended only for those whose entry would be found in GAttribute Triple # ------------------------------------------------------------------------------------ ga = triple_collection.collection.aggregate([ {'$match': {'_type': "GAttribute", 'subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$attribute_type', 'value_val': '$object_value'}} ]) # ------------------------------------------------------------------------------------ # Fetch all relations, if created in GRelation Triple # Key-value pair will be appended only for those whose entry would be found in GRelation Triple # ------------------------------------------------------------------------------------ gr = triple_collection.collection.aggregate([ {'$match': {'_type': "GRelation", 'subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$relation_type', 'value_val': '$right_subject'}} ]) # ------------------------------------------------------------------------------------ # Fetch all inverse-relations, if created in GRelation Triple # Key-value pair will be appended only for those whose entry would be found in GRelation Triple # ------------------------------------------------------------------------------------ inv_gr = triple_collection.collection.aggregate([ {'$match': {'_type': "GRelation", 'right_subject': each_gs._id, 'status': u"PUBLISHED"}}, {'$project': {'_id': 0, 'key_val': '$relation_type', 'value_val': '$subject'}} ]) if ga: # If any GAttribute found # ------------------------------------------------------------------------------------ # Setting up attr_list # ------------------------------------------------------------------------------------ # print "\n" for each_gar in ga["result"]: if each_gar: key_node = db.dereference(each_gar["key_val"]) # print "\t", key_node["name"], " -- ", each_gar["value_val"] # Append corresponding GAttribute as key-value pair in given attribute-list # key: attribute-type name # value: object_value from GAttribute document attr_list.append({key_node["name"]: each_gar["value_val"]}) if gr: # If any GRelation (relation) found # ------------------------------------------------------------------------------------ # Setting up rel_list # ------------------------------------------------------------------------------------ for each_grr in gr["result"]: if each_grr: key_node = db.dereference(each_grr["key_val"]) # Append corresponding GRelation as key-value pair in given relation-list # key: name field's value of relation-type's document # value: right_subject field's value of GRelation document if not rel_list: rel_list.append({key_node["name"]: [each_grr["value_val"]]}) else: key_found = False for each in rel_list: if key_node["name"] in each: each[key_node["name"]].append(each_grr["value_val"]) key_found = True if not key_found: rel_list.append({key_node["name"]: [each_grr["value_val"]]}) if inv_gr: # If any GRelation (inverse-relation) found # ------------------------------------------------------------------------------------ # Setting up inv_rel_list # ------------------------------------------------------------------------------------ for each_grr in inv_gr["result"]: if each_grr: key_node = db.dereference(each_grr["key_val"]) # Append corresponding GRelation as key-value pair in given inverse-relation-list # key: inverse_name field's value of relation-type's document # value: subject field's value of GRelation document if not inv_rel_list: inv_rel_list.append({key_node["inverse_name"]: [each_grr["value_val"]]}) else: key_found = False for each in inv_rel_list: if key_node["inverse_name"] in each: each[key_node["inverse_name"]].append(each_grr["value_val"]) key_found = True if not key_found: inv_rel_list.append({key_node["inverse_name"]: [each_grr["value_val"]]}) info_message = "" if attr_list: info_message += "\n\n\tAttributes: " + str(attr_list) else: info_message += "\n\n\tAttributes: No attribute found!" if rel_list: info_message += "\n\n\tRelations: " + str(rel_list) else: info_message += "\n\n\tRelations: No relation found!" if inv_rel_list: info_message += "\n\n\tInverse-Relations: " + str(inv_rel_list) else: info_message += "\n\n\tInverse-Relations: No inverse-relation found!" log_list.append(info_message) # ------------------------------------------------------------------------------------ # Finally set attribute_set & relation_set of current GSystem with modified attr_list & rel_list respectively # ------------------------------------------------------------------------------------ res = node_collection.collection.update( {'_id': each_gs._id}, {'$set': {'attribute_set': attr_list, 'relation_set': (rel_list + inv_rel_list)}}, upsert=False, multi=False ) if res['n']: info_message = "\n\n\t" + str(each_gs.name) + " updated succesfully !" log_list.append(info_message) print " -- attribute_set & relation_set updated succesfully !" # ------------------------------------------------------------------------------------ # Close cursor object if still alive # ------------------------------------------------------------------------------------ if gs.alive: info_message = "\n\n GSystem-Cursor state (before): " + str(gs.alive) log_list.append(info_message) gs.close() info_message = "\n\n GSystem-Cursor state (after): " + str(gs.alive) log_list.append(info_message) print "\n Setting attribute_set & relation_set completed succesfully !" # Rectify start_time & end_time of task ================== start_time = node_collection.one({'_type': "AttributeType", 'name': "start_time"}) end_time = node_collection.one({'_type': "AttributeType", 'name': "end_time"}) info_message = "\n\nRectifing start_time & end_time of following task(s)...\n" print info_message log_list.append(info_message) invalid_dates_cur = triple_collection.find({'attribute_type.$id': {'$in': [start_time._id, end_time._id]}, 'object_value': {'$not': {'$type': 9}}}) for each in invalid_dates_cur: date_format_string = "" old_value = "" new_value = "" attribute_type_node = each.attribute_type if "-" in each.object_value and ":" in each.object_value: date_format_string = "%m-%d-%Y %H:%M" elif "/" in each.object_value and ":" in each.object_value: date_format_string = "%m/%d/%Y %H:%M" elif "-" in each.object_value: date_format_string = "%m-%d-%Y" elif "/" in each.object_value: date_format_string = "%m/%d/%Y" if date_format_string: old_value = each.object_value info_message = "\n\n\t" + str(each._id) + " -- " + str(old_value) res = triple_collection.collection.update({'_id': each._id}, {'$set': {'object_value': datetime.strptime(each.object_value, date_format_string)}}, upsert=False, multi=False ) if res['n']: print " .", each.reload() new_value = each.object_value info_message += " >> " + str(new_value) log_list.append(info_message) res = node_collection.collection.update({'_id': each.subject, 'attribute_set.'+attribute_type_node.name: old_value}, {'$set': {'attribute_set.$.'+attribute_type_node.name: new_value}}, upsert=False, multi=False ) if res["n"]: info_message = "\n\n\tNode's (" + str(each.subject) + ") attribute_set (" + attribute_type_node.name + ") updated succesfully." log_list.append(info_message) # Update type_of field to list type_of_cursor=node_collection.find({'type_of':{'$exists':True}}) for object_cur in type_of_cursor: if type(object_cur['type_of']) == ObjectId or object_cur['type_of'] == None: if type(object_cur['type_of']) == ObjectId : node_collection.collection.update({'_id':object_cur['_id']},{'$set':{'type_of':[object_cur['type_of']]}}) else : node_collection.collection.update({'_id':object_cur['_id']},{'$set':{'type_of':[]}}) # Removes n attribute if created accidently in existsing documents node_collection.collection.update({'n': {'$exists': True}}, {'$unset': {'n': ""}}, upsert=False, multi=True) # Updates wherever modified_by field is None with default value as either first contributor or the creator of the resource modified_by_cur = node_collection.find({'_type': {'$nin': ['GAttribute', 'GRelation', 'node_holder', 'ToReduceDocs', 'ReducedDocs', 'IndexedWordList']}, 'modified_by': None}) if modified_by_cur.count > 0: for n in modified_by_cur: if u'required_for' not in n.keys(): if "contributors" in n: if n.contributors: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.contributors[0]}}, upsert=False, multi=False) else: if n.created_by: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.created_by, 'contributors': [n.created_by]}}, upsert=False, multi=False) else: print "\n Please set created_by value for node (", n._id, " -- ", n._type, " : ", n.name, ")\n" # Updating faulty modified_by and contributors values (in case of user-group and file documents) cur = node_collection.find({'_type': {'$nin': ['node_holder', 'ToReduceDocs', 'ReducedDocs', 'IndexedWordList']}, 'modified_by': {'$exists': True}}) for n in cur: # By faulty, it means modified_by and contributors has 1 as their values # 1 stands for superuser # Instead of this value should be the creator of that resource # (even this is applicable only if created_by field of that resource holds some value) if u'required_for' not in n.keys(): if not n.created_by: print "\n Please set created_by value for node (", n._id, " -- ", n._type, " : ", n.name, ")" else: if n.created_by not in n.contributors: node_collection.collection.update({'_id': n._id}, {'$set': {'modified_by': n.created_by, 'contributors': [n.created_by]} }, upsert=False, multi=False) # For delete the profile_pic as GST profile_pic_obj = node_collection.one({'_type': 'GSystemType','name': u'profile_pic'}) if profile_pic_obj: profile_pic_obj.delete() print "\n Deleted GST document of profile_pic.\n" # For adding visited_location field (default value set as []) in User Groups. try: author = node_collection.one({'_type': "GSystemType", 'name': "Author"}) if author: auth_cur = node_collection.find({'_type': 'Group', 'member_of': author._id }) if auth_cur.count() > 0: for each in auth_cur: node_collection.collection.update({'_id': each._id}, {'$set': {'_type': "Author"} }, upsert=False, multi=False) print "\n Updated user group: ", each.name cur = node_collection.find({'_type': "Author", 'visited_location': {'$exists': False}}) author_cur = node_collection.find({'_type': 'Author'}) if author_cur.count() > 0: for each in author_cur: if each.group_type == None: node_collection.collection.update({'_id': each._id}, {'$set': {'group_type': u"PUBLIC", 'edit_policy': u"NON_EDITABLE", 'subscription_policy': u"OPEN"} }, upsert=False, multi=False) print "\n Updated user group policies: ", each.name if cur.count(): print "\n" for each in cur: node_collection.collection.update({'_type': "Author", '_id': each._id}, {'$set': {'visited_location': []}}, upsert=False, multi=True) print " 'visited_location' field added to Author group (" + each.name + ")\n" else: error_message = "\n Exception while creating 'visited_location' field in Author class.\n Author GSystemType doesn't exists!!!\n" raise Exception(error_message) except Exception as e: print str(e) # INSERTED FOR MAP_REDUCE allIndexed = node_collection.find({"_type": "IndexedWordList", "required_for" : "storing_indexed_words"}) if allIndexed.count() == 0: print "\n Inserting indexes" j=1 while j<=27: obj = node_collection.collection.IndexedWordList() obj.word_start_id = float(j) obj.words = {} obj.required_for = u'storing_indexed_words' obj.save() j+=1 # Adding Task GST into start_time and end_time ATs subject_type start_time = node_collection.one({'_type': u'AttributeType', 'name': u'start_time'}) end_time = node_collection.one({'_type': u'AttributeType', 'name': u'end_time'}) task = node_collection.find_one({'_type':u'GSystemType', 'name':u'Task'}) if task: if start_time: if not task._id in start_time.subject_type : start_time.subject_type.append(task._id) start_time.save() if end_time: if not task._id in end_time.subject_type : end_time.subject_type.append(task._id) end_time.save()
def data_review_save(request, group_id): ''' Method to save each and every data-row edit of data review app ''' userid = request.user.pk try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) group_obj = node_collection.one({"_id": ObjectId(group_id)}) node_oid = request.POST.get("node_oid", "") node_details = request.POST.get("node_details", "") node_details = json.loads(node_details) # print "node_details : ", node_details # updating some key names of dictionary as per get_node_common_fields. node_details["lan"] = node_details.pop("language") node_details["prior_node_list"] = node_details.pop("prior_node") node_details["login-mode"] = node_details.pop("access_policy") status = node_details.pop("status") # node_details["collection_list"] = node_details.pop("collection") for future use # Making copy of POST QueryDict instance. # To make it mutable and fill in node_details value/s. post_req = request.POST.copy() # removing node_details dict from req post_req.pop('node_details') # adding values to post req post_req.update(node_details) # overwriting request.POST with newly created QueryDict instance post_req request.POST = post_req # print "\n---\n", request.POST, "\n---\n" license = request.POST.get('license', '') file_node = node_collection.one({"_id": ObjectId(node_oid)}) if request.method == "POST": edit_summary = [] file_node_before = file_node.copy( ) # copying before it is getting modified is_changed = get_node_common_fields(request, file_node, group_id, GST_FILE) for key, val in file_node_before.iteritems(): if file_node_before[key] != file_node[key]: temp_edit_summ = {} temp_edit_summ["name"] = "Field: " + key temp_edit_summ["before"] = file_node_before[key] temp_edit_summ["after"] = file_node[key] edit_summary.append(temp_edit_summ) # to fill/update attributes of the node and get updated attrs as return ga_nodes = get_node_metadata(request, file_node, is_changed=True) if len(ga_nodes): is_changed = True # adding the edit attribute name in summary for each_ga in ga_nodes: temp_edit_summ = {} temp_edit_summ["name"] = "Attribute: " + each_ga["node"][ "attribute_type"]["name"] temp_edit_summ["before"] = each_ga["before_obj_value"] temp_edit_summ["after"] = each_ga["node"]["object_value"] edit_summary.append(temp_edit_summ) teaches_list = request.POST.get('teaches', '') # get the teaches list prev_teaches_list = request.POST.get( "teaches_prev", "") # get the before-edit teaches list # check if teaches list exist means nodes added/removed for teaches relation_type # also check for if previous teaches list made empty with prev_teaches_list if (teaches_list != '') or prev_teaches_list: teaches_list = teaches_list.split(",") if teaches_list else [] teaches_list = [ObjectId(each_oid) for each_oid in teaches_list] relation_type_node = node_collection.one({ '_type': "RelationType", 'name': 'teaches' }) gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list) gr_nodes_oid_list = [ ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes ] if gr_nodes else [] prev_teaches_list = prev_teaches_list.split( ",") if prev_teaches_list else [] prev_teaches_list = [ ObjectId(each_oid) for each_oid in prev_teaches_list ] if len(gr_nodes_oid_list) == len(prev_teaches_list) and set( gr_nodes_oid_list) == set(prev_teaches_list): pass else: rel_nodes = triple_collection.find({ '_type': "GRelation", 'subject': file_node._id, 'relation_type': relation_type_node._id }) rel_oid_name = {} for each in rel_nodes: temp = {} temp[each.right_subject] = each.name rel_oid_name.update(temp) is_changed = True temp_edit_summ = {} temp_edit_summ["name"] = "Relation: Teaches" temp_edit_summ["before"] = [ rel_oid_name[each_oid].split(" -- ")[2] for each_oid in prev_teaches_list ] temp_edit_summ["after"] = [ rel_oid_name[each_oid].split(" -- ")[2] for each_oid in gr_nodes_oid_list ] edit_summary.append(temp_edit_summ) assesses_list = request.POST.get('assesses_list', '') if assesses_list != '': assesses_list = assesses_list.split(",") assesses_list = [ObjectId(each_oid) for each_oid in assesses_list] relation_type_node = node_collection.one({ '_type': "RelationType", 'name': 'assesses' }) gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list) gr_nodes_oid_list = [ ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes ] if len(gr_nodes_oid_list) == len(teaches_list) and set( gr_nodes_oid_list) == set(teaches_list): pass else: is_changed = True # changing status to draft even if attributes/relations are changed if is_changed: file_node.status = unicode("DRAFT") file_node.modified_by = userid if userid not in file_node.contributors: file_node.contributors.append(userid) # checking if user is authenticated to change the status of node if status and ((group_obj.is_gstaff(request.user)) or (userid in group_obj.author_set)): if file_node.status != status: file_node.status = unicode(status) file_node.modified_by = userid if userid not in file_node.contributors: file_node.contributors.append(userid) is_changed = True if is_changed: file_node.save(groupid=group_id) # print edit_summary return HttpResponse(file_node.status)
def theme_topic_create_edit(request, group_id, app_set_id=None): ##################### # ins_objectid = ObjectId() # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group", "name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) ###################### nodes_dict = [] create_edit = True themes_hierarchy = False themes_list_items = "" themes_cards = "" title = "" node = "" theme_topic_node = "" drawers = None drawer = None app_id = None nodes_list = [] parent_nodes_collection = "" translate = request.GET.get('translate', '') app_GST = node_collection.find_one({"_id": ObjectId(app_set_id)}) if app_GST._id != theme_GST._id: app_obj = node_collection.one({'_id': ObjectId(app_GST.member_of[0])}) else: app_obj = theme_GST if app_obj: app_id = app_obj._id shelves = [] shelf_list = {} auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: has_shelf_RT = node_collection.one({ '_type': 'RelationType', 'name': u'has_shelf' }) shelf = triple_collection.find({ '_type': 'GRelation', 'subject': ObjectId(auth._id), 'relation_type': has_shelf_RT._id }) shelf_list = {} if shelf: for each in shelf: shelf_name = node_collection.one( {'_id': ObjectId(each.right_subject)}) shelves.append(shelf_name) shelf_list[shelf_name.name] = [] for ID in shelf_name.collection_set: shelf_item = node_collection.one({'_id': ObjectId(ID)}) shelf_list[shelf_name.name].append(shelf_item.name) else: shelves = [] if request.method == "POST": if app_GST: create_edit = True themes_list_items = "" root_themes = [] root_themes_id = [] nodes_list = [] name = request.POST.get('name') collection_list = request.POST.get('collection_list', '') prior_node_list = request.POST.get('prior_node_list', '') teaches_list = request.POST.get('teaches_list', '') assesses_list = request.POST.get('assesses_list', '') # To find the root nodes to maintain the uniquness while creating and editing themes nodes = node_collection.find({ 'member_of': { '$all': [theme_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_themes.append(each.name) root_themes_id.append(each._id) if app_GST.name == "Theme" or app_GST.name == "Topic" or translate == "true": # For creating new themes & Topics themes_list_items = False create_edit = False themes_hierarchy = False themes_cards = True if name or translate == "true": if not name.upper() in (theme_name.upper() for theme_name in root_themes ) or translate == "true": if translate != "true": theme_topic_node = node_collection.collection.GSystem( ) # get_node_common_fields(request, theme_topic_node, group_id, app_GST) theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, app_GST), groupid=group_id) if translate == "true": global list_trans_coll list_trans_coll = [] coll_set1 = get_coll_set(app_GST._id) for each in coll_set1: theme_topic_node = node_collection.collection.GSystem( ) if "Theme" in each.member_of_names_list: app_obj = theme_GST if "theme_item" in each.member_of_names_list: app_obj = theme_item_GST if "topic" in each.member_of_names_list: app_obj = topic_GST theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, app_obj, each), groupid=group_id) coll_set_dict[each._id] = theme_topic_node._id relation_type = node_collection.one({ '_type': 'RelationType', 'name': 'translation_of' }) # grelation=collection.GRelation() # grelation.relation_type=relation_type # grelation.subject=each._id # grelation.right_subject=theme_topic_node._id # grelation.name=u"" # grelation.save() gr_node = create_grelation( each._id, relation_type, theme_topic_node._id) for each in coll_set1: #if "Theme" in each.member_of_names_list: if each.collection_set: for collset in each.collection_set: p = coll_set_dict[each._id] parent_node = node_collection.one( {'_id': ObjectId(str(p))}) n = coll_set_dict[collset] sub_node = node_collection.one( {'_id': ObjectId(str(n))}) parent_node.collection_set.append( sub_node._id) parent_node.save(groupid=group_id) # To return themes card view for listing theme nodes after creating new Themes nodes.rewind() nodes_dict = nodes else: themes_list_items = False create_edit = False themes_hierarchy = True theme_topic_node = node_collection.one( {'_id': ObjectId(app_GST._id)}) # For edititng themes if theme_GST._id in app_GST.member_of and translate != "true": # To find themes uniqueness within the context of its parent Theme collection, while editing theme name root_themes = [] nodes = node_collection.find({ 'member_of': { '$all': [theme_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: root_themes.append(each.name) if name: if name.upper() != theme_topic_node.name.upper(): if not name.upper() in ( theme_name.upper() for theme_name in root_themes): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, theme_GST), groupid=group_id) else: theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, theme_GST), groupid=group_id) if translate != "true": # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append(node_id) i = i + 1 theme_topic_node.save(groupid=group_id) # End of storing collection title = theme_GST.name nodes.rewind() nodes_dict = nodes # This will return to Themes Hierarchy themes_list_items = False create_edit = False themes_hierarchy = False themes_cards = True elif theme_item_GST._id in app_GST.member_of and translate != "true": title = "Theme Item" dict_drawer = {} dict2 = [] node = app_GST prior_theme_collection = [] parent_nodes_collection = "" # To display the theme-topic drawer while create or edit theme checked = "theme_item" # drawers = get_drawers(group_id, node._id, node.collection_set, checked) # Code for fetching drawer2 for k in node.collection_set: obj = node_collection.one({'_id': ObjectId(k)}) dict2.append(obj) dict_drawer['2'] = dict2 # drawers = dict_drawer # End of code for drawer2 drawer = dict_drawer['2'] # To find themes uniqueness within the context of its parent Theme collection, while editing theme item nodes = node_collection.find({ 'member_of': { '$all': [theme_item_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one( {'_id': ObjectId(k)}) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps( prior_theme_collection) if not prior_theme_collection: root_nodes = node_collection.find({ 'member_of': { '$all': [theme_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for k in root_nodes: if app_GST._id in k.collection_set: root_themes = [] root_themes_id = [] for l in k.collection_set: objs = node_collection.one( {'_id': ObjectId(l)}) root_themes.append(objs.name) root_themes_id.append(objs._id) # End of finding unique theme names for editing name # For adding a sub-theme-items and maintianing their uniqueness within their context nodes_list = [] for each in app_GST.collection_set: sub_theme = node_collection.one( {'_id': ObjectId(each)}) nodes_list.append(sub_theme.name) nodes_list = json.dumps(nodes_list) # End of finding unique sub themes if name: if name.upper() != theme_topic_node.name.upper(): # If "Name" has changed if theme_topic_node._id in root_themes_id: # If editing node in root theme items if not name.upper() in ( theme_name.upper() for theme_name in root_themes): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, theme_item_GST), groupid=group_id) else: # If editing theme item in prior_theme_collection hierarchy if not name.upper() in (theme_name.upper( ) for theme_name in prior_theme_collection): # get_node_common_fields(request, theme_topic_node, group_id, theme_GST) theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, theme_item_GST), groupid=group_id) else: # If name not changed but other fields has changed theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, theme_item_GST), groupid=group_id) if translate != "true" and collection_list: # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append(node_id) i = i + 1 theme_topic_node.save(groupid=group_id) # End of storing collection # This will return to Themes items edit if theme_topic_node: theme_topic_node.reload() node = theme_topic_node create_edit = True themes_hierarchy = False # For editing topics elif topic_GST._id in app_GST.member_of: root_topics = [] nodes_list = [] # To find the root nodes to maintain the uniquness while creating and editing topics nodes = node_collection.find({ 'member_of': { '$all': [topic_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_topics.append(each.name) # End of finding the root nodes if name: if theme_topic_node.name != name: topic_name = theme_topic_node.name if not name.upper() in ( theme_name.upper() for theme_name in root_topics): theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, topic_GST), groupid=group_id) elif topic_name.upper() == name.upper(): theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, topic_GST), groupid=group_id) else: theme_topic_node.save( is_changed=get_node_common_fields( request, theme_topic_node, group_id, topic_GST), groupid=group_id) if collection_list: # For storing and maintaning collection order if collection_list != '': theme_topic_node.collection_set = [] collection_list = collection_list.split(",") i = 0 while (i < len(collection_list)): node_id = ObjectId(collection_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.collection_set.append( node_id) i = i + 1 theme_topic_node.save(groupid=group_id) title = topic_GST.name # To fill the metadata info while creating and editing topic node metadata = request.POST.get("metadata_info", '') if metadata: # Only while metadata editing if metadata == "metadata": if theme_topic_node: get_node_metadata(request, theme_topic_node) # End of filling metadata if prior_node_list != '': theme_topic_node.prior_node = [] prior_node_list = prior_node_list.split(",") i = 0 while (i < len(prior_node_list)): node_id = ObjectId(prior_node_list[i]) if node_collection.one({"_id": node_id}): theme_topic_node.prior_node.append(node_id) i = i + 1 theme_topic_node.save(groupid=group_id) if teaches_list != '': teaches_list = teaches_list.split(",") create_grelation_list(theme_topic_node._id, "teaches", teaches_list) if assesses_list != '': assesses_list = assesses_list.split(",") create_grelation_list(theme_topic_node._id, "assesses", assesses_list) # This will return to edit topic if theme_topic_node: theme_topic_node.reload() node = theme_topic_node create_edit = True themes_hierarchy = False else: app_node = None nodes_list = [] app_GST = node_collection.find_one({"_id": ObjectId(app_set_id)}) # print "\napp_GST in else: ",app_GST.name,"\n" if app_GST: # For adding new Theme & Topic if app_GST.name == "Theme" or app_GST.name == "Topic" or translate == True: print "22222" title = app_GST.name node = "" root_themes = [] # To find the root nodes to maintain the uniquness while creating new themes nodes = node_collection.find({ 'member_of': { '$all': [app_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if each.collection_set: for k in each.collection_set: nodes_list.append(k) nodes.rewind() for each in nodes: if each._id not in nodes_list: root_themes.append(each.name) root_themes = json.dumps(root_themes) nodes_list = root_themes # End of finding unique root level Themes else: if theme_GST._id in app_GST.member_of: title = "Theme" node = app_GST prior_theme_collection = [] parent_nodes_collection = "" drawer = [] # End of editing Themes # For editing theme item if theme_item_GST._id in app_GST.member_of: title = "Theme Item" dict_drawer = {} dict2 = [] node = app_GST prior_theme_collection = [] parent_nodes_collection = "" # To display the theme-topic drawer while create or edit theme checked = "theme_item" # drawers = get_drawers(group_id, node._id, node.collection_set, checked) for k in node.collection_set: obj = node_collection.one({'_id': ObjectId(k)}) dict2.append(obj) dict_drawer['2'] = dict2 drawer = dict_drawer['2'] # To find themes uniqueness within the context of its parent Theme collection, while editing theme name nodes = node_collection.find({ 'member_of': { '$all': [theme_item_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one( {'_id': ObjectId(k)}) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps( prior_theme_collection) # End of finding unique theme names for editing name # For adding a sub-themes and maintianing their uniqueness within their context for each in app_GST.collection_set: sub_theme = node_collection.one( {'_id': ObjectId(each)}) nodes_list.append(sub_theme.name) nodes_list = json.dumps(nodes_list) # End of finding unique sub themes # for editing topic elif topic_GST._id in app_GST.member_of: title = topic_GST.name node = app_GST prior_theme_collection = [] parent_nodes_collection = "" node.get_neighbourhood(node.member_of) # To find topics uniqueness within the context of its parent Theme item collection, while editing topic name nodes = node_collection.find({ 'member_of': { '$all': [theme_item_GST._id] }, 'group_set': { '$all': [ObjectId(group_id)] } }) for each in nodes: if app_GST._id in each.collection_set: for k in each.collection_set: prior_theme = node_collection.one( {'_id': ObjectId(k)}) prior_theme_collection.append(prior_theme.name) parent_nodes_collection = json.dumps( prior_theme_collection) # End of finding unique theme names for editing name if translate: global list_trans_coll list_trans_coll = [] trans_coll_list = get_coll_set(str(app_GST._id)) print LANGUAGES return render_to_response( "ndf/translation_page.html", { 'group_id': group_id, 'groupid': group_id, 'title': title, 'node': app_GST, 'lan': LANGUAGES, 'list1': trans_coll_list }, context_instance=RequestContext(request)) if title == "Topic": return render_to_response("ndf/node_edit_base.html", { 'group_id': group_id, 'groupid': group_id, 'drawer': drawer, 'themes_cards': themes_cards, 'shelf_list': shelf_list, 'shelves': shelves, 'create_edit': create_edit, 'themes_hierarchy': themes_hierarchy, 'app_id': app_id, 'appId': app._id, 'nodes_list': nodes_list, 'title': title, 'node': node, 'parent_nodes_collection': parent_nodes_collection, 'theme_GST_id': theme_GST._id, 'theme_item_GST_id': theme_item_GST._id, 'topic_GST_id': topic_GST._id, 'themes_list_items': themes_list_items, 'nodes': nodes_dict, 'lan': LANGUAGES }, context_instance=RequestContext(request)) return render_to_response("ndf/theme.html", { 'group_id': group_id, 'groupid': group_id, 'drawer': drawer, 'themes_cards': themes_cards, 'theme_GST': theme_GST, 'theme_GST': theme_GST, 'shelf_list': shelf_list, 'shelves': shelves, 'create_edit': create_edit, 'themes_hierarchy': themes_hierarchy, 'app_id': app_id, 'appId': app._id, 'nodes_list': nodes_list, 'title': title, 'node': node, 'parent_nodes_collection': parent_nodes_collection, 'theme_GST_id': theme_GST._id, 'theme_item_GST_id': theme_item_GST._id, 'topic_GST_id': topic_GST._id, 'themes_list_items': themes_list_items, 'nodes': nodes_dict, 'lan': LANGUAGES }, context_instance=RequestContext(request))
asset_content_obj = None if node_id: asset_content_obj = node_collection.one({'_id': ObjectId(node_id)}) else: asset_content_obj = node_collection.collection.GSystem() asset_content_obj.fill_gstystem_values(request=request, uploaded_file=files[0], **kwargs) asset_content_obj.fill_node_values(**kwargs) asset_content_obj.save(groupid=group_id) asset_contents_list = [asset_content_obj._id] rt_has_asset_content = node_collection.one({'_type': 'RelationType', 'name': 'has_assetcontent'}) asset_grels = triple_collection.find({'_type': 'GRelation', \ 'relation_type': rt_has_asset_content._id,'subject': asset_obj._id}, {'_id': 0, 'right_subject': 1}) for each_asset in asset_grels: asset_contents_list.append(each_asset['right_subject']) create_grelation(asset_obj._id, rt_has_asset_content, asset_contents_list) active_user_ids_list = [request.user.id] if GSTUDIO_BUDDY_LOGIN: active_user_ids_list += Buddy.get_buddy_userids_list_within_datetime(request.user.id, datetime.datetime.now()) # removing redundancy of user ids: active_user_ids_list = dict.fromkeys(active_user_ids_list).keys() counter_objs_cur = Counter.get_counter_objs_cur(active_user_ids_list, group_id) # counter_obj = Counter.get_counter_obj(request.user.id, group_id) for each_counter_obj in counter_objs_cur: each_counter_obj['file']['created'] += 1
def topic_detail_view(request, group_id, app_Id=None): try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) obj = node_collection.one({'_id': ObjectId(app_Id)}) app = node_collection.one({'_id': ObjectId(obj.member_of[0])}) app_id = app._id topic = "Topic" theme_id = None prior_obj = None # First get the navigation list till topic from theme map nav_l = request.GET.get('nav_li', '') breadcrumbs_list = [] nav_li = "" #a temp. variable which stores the lookup for append method breadcrumbs_list_append_temp = breadcrumbs_list.append if nav_l: nav_li = nav_l nav_l = str(nav_l).split(",") # create beadcrumbs list from navigation list sent from template. for each in nav_l: each_obj = node_collection.one({'_id': ObjectId(each)}) # Theme object needs to be added in breadcrumbs for full navigation path from theme to topic # "nav_l" doesnt includes theme object since its not in tree hierarchy level, # hence Match the first element and get its prior node which is theme object, to include it in breadcrumbs list # print "!!!!!!!!! ", each_obj.name if each == nav_l[0]: if each_obj.prior_node: theme_obj = node_collection.one( {'_id': ObjectId(each_obj.prior_node[0])}) theme_id = theme_obj._id breadcrumbs_list_append_temp( (str(theme_obj._id), theme_obj.name)) breadcrumbs_list_append_temp((str(each_obj._id), each_obj.name)) if obj: if obj.prior_node: prior_obj = node_collection.one( {'_id': ObjectId(obj.prior_node[0])}) ###shelf### shelves = [] #a temp. variable which stores the lookup for append method shelves_append_temp = shelves.append shelf_list = {} auth = node_collection.one({ '_type': 'Author', 'name': unicode(request.user.username) }) if auth: has_shelf_RT = node_collection.one({ '_type': 'RelationType', 'name': u'has_shelf' }) shelf = triple_collection.find({ '_type': 'GRelation', 'subject': ObjectId(auth._id), 'relation_type': has_shelf_RT._id }) shelf_list = {} if shelf: for each in shelf: shelf_name = node_collection.one( {'_id': ObjectId(each.right_subject)}) shelves_append_temp(shelf_name) shelf_list[shelf_name.name] = [] #a temp. variable which stores the lookup for append method shelf_list_shlefname_append_temp = shelf_list[ shelf_name.name].append for ID in shelf_name.collection_set: shelf_item = node_collection.one({'_id': ObjectId(ID)}) shelf_list_shlefname_append_temp(shelf_item.name) else: shelves = [] # print "theme_id: ", theme_id return render_to_response('ndf/topic_details.html', { 'node': obj, 'app_id': app_id, "theme_id": theme_id, "prior_obj": prior_obj, 'group_id': group_id, 'shelves': shelves, 'topic': topic, 'nav_list': nav_li, 'groupid': group_id, 'shelf_list': shelf_list, 'breadcrumbs_list': breadcrumbs_list }, context_instance=RequestContext(request))
def parse_data_create_gsystem(json_file_path): json_file_content = "" try: with open(json_file_path) as json_file: json_file_content = json_file.read() json_documents_list = json.loads(json_file_content) # Initiating empty node obj and other related data variables node = node_collection.collection.GSystem() node_keys = node.keys() node_structure = node.structure # print "\n\n---------------", node_keys json_documents_list_spaces = json_documents_list json_documents_list = [] # Removes leading and trailing spaces from keys as well as values for json_document_spaces in json_documents_list_spaces: json_document = {} for key_spaces, value_spaces in json_document_spaces.iteritems(): json_document[key_spaces.strip().lower()] = value_spaces.strip() json_documents_list.append(json_document) except Exception as e: error_message = "\n!! While parsing the file ("+json_file_path+") got following error...\n " + str(e) log_print(error_message) raise error_message for i, json_document in enumerate(json_documents_list): info_message = "\n\n\n********** Processing row number : ["+ str(i + 2) + "] **********" log_print(info_message) try: parsed_json_document = {} attribute_relation_list = [] for key in json_document.iterkeys(): parsed_key = key.lower() if parsed_key in node_keys: # print parsed_key # adding the default field values e.g: created_by, member_of # created_by: if parsed_key == "created_by": if json_document[key]: temp_user_id = get_user_id(json_document[key].strip()) if temp_user_id: parsed_json_document[parsed_key] = temp_user_id else: parsed_json_document[parsed_key] = nroer_team_id else: parsed_json_document[parsed_key] = nroer_team_id # print "---", parsed_json_document[parsed_key] # contributors: elif parsed_key == "contributors": if json_document[key]: contrib_list = json_document[key].split(",") temp_contributors = [] for each_user in contrib_list: user_id = get_user_id(each_user.strip()) if user_id: temp_contributors.append(user_id) parsed_json_document[parsed_key] = temp_contributors else: parsed_json_document[parsed_key] = [nroer_team_id] # print "===", parsed_json_document[parsed_key] # tags: elif (parsed_key == "tags") and json_document[key]: parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # member_of: elif parsed_key == "member_of": parsed_json_document[parsed_key] = [file_gst._id] # print parsed_json_document[parsed_key] # --- END of adding the default field values else: # parsed_json_document[parsed_key] = json_document[key] parsed_json_document[parsed_key] = cast_to_data_type(json_document[key], node_structure.get(parsed_key)) # print parsed_json_document[parsed_key] # --- END of processing for remaining fields else: # key is not in the node_keys parsed_json_document[key] = json_document[key] attribute_relation_list.append(key) # print "key : ", key # --END of for loop --- # calling method to create File GSystems node_obj = create_resource_gsystem(parsed_json_document, i) nodeid = node_obj._id if node_obj else None # print "nodeid : ", nodeid # ----- for updating language ----- # node_lang = get_language_tuple(eval(parsed_json_document['language'])) # print "============= :", node_lang # print "============= lang :", node_obj.language # if node_obj and node_obj.language != node_lang: # update_res = node_collection.collection.update( # {'_id': ObjectId(nodeid), 'language': {'$ne': node_lang}}, # {'$set': {'language': node_lang}}, # upsert=False, # multi=False # ) # if update_res['updatedExisting']: # node_obj.reload() # info_message = "\n\n- Update to language of resource: " + str(update_res) # log_print(info_message) # info_message = "\n\n- Now language of resource updates to: " + str(node_obj.language) # log_print(info_message) # print "============= lang :", node_obj.language # ----- END of updating language ----- collection_name = parsed_json_document.get('collection', '') if collection_name and nodeid: collection_node = node_collection.find_one({ # '_type': 'File', 'member_of': {'$in': [file_gst._id]}, 'group_set': {'$in': [home_group._id]}, 'name': unicode(collection_name) }) if collection_node: add_to_collection_set(collection_node, nodeid) thumbnail_url = parsed_json_document.get('thumbnail') # print "thumbnail_url : ", thumbnail_url if (thumbnail_url and nodeid) and (thumbnail_url != parsed_json_document.get('thumbnail') ): try: info_message = "\n\n- Attaching thumbnail to resource\n" log_print(info_message) attach_resource_thumbnail(thumbnail_url, nodeid, parsed_json_document, i) except Exception, e: print e # print type(nodeid), "-------", nodeid, "\n" if (thumbnail_url == parsed_json_document.get('resource_link')) and (warehouse_group._id in node_obj.group_set) : for i,each_groupid in enumerate(node_obj.group_set): if each_groupid == warehouse_group._id: node_obj.group_set.pop(i) if home_group._id not in node_obj.group_set: node_obj.group_set.append(home_group._id) node_obj.save() # create thread node if isinstance(nodeid, ObjectId): thread_result = create_thread_obj(nodeid) # starting processing for the attributes and relations saving if isinstance(nodeid, ObjectId) and attribute_relation_list: node = node_collection.one({ "_id": ObjectId(nodeid) }) gst_possible_attributes_dict = node.get_possible_attributes(file_gst._id) # print gst_possible_attributes_dict relation_list = [] json_document['name'] = node.name # Write code for setting atrributes for key in attribute_relation_list: is_relation = True # print "\n", key, "----------\n" for attr_key, attr_value in gst_possible_attributes_dict.iteritems(): # print "\n", attr_key,"======", attr_value if key == attr_key: # print key is_relation = False # setting value to "0" for int, float, long (to avoid casting error) # if (attr_value['data_type'] in [int, float, long]) and (not json_document[key]): # json_document[key] = 0 if json_document[key]: # print "key : ", key, "\nvalue : ",json_document[key] info_message = "\n- For GAttribute parsing content | key: '" + attr_key + "' having value: '" + json_document[key] + "'" log_print(info_message) cast_to_data_type(json_document[key], attr_value['data_type']) if attr_value['data_type'] == "curricular": # setting int values for CR/XCR if json_document[key] == "CR": json_document[key] = 1 elif json_document[key] == "XCR": json_document[key] = 0 else: # needs to be confirm json_document[key] = 0 # json_document[key] = bool(int(json_document[key])) # print attr_value['data_type'], "@@@@@@@@@ : ", json_document[key] json_document[key] = cast_to_data_type(json_document[key], attr_value['data_type']) # print key, " !!!!!!!!! : ", json_document[key] subject_id = node._id # print "\n-----\nsubject_id: ", subject_id attribute_type_node = node_collection.one({ '_type': "AttributeType", '$or': [ {'name': {'$regex': "^"+attr_key+"$", '$options': 'i'} }, {'altnames': {'$regex': "^"+attr_key+"$", '$options': 'i'} } ] }) # print "\nattribute_type_node: ", attribute_type_node.name object_value = json_document[key] # print "\nobject_value: ", object_value ga_node = None info_message = "\n- Creating GAttribute ("+node.name+" -- "+attribute_type_node.name+" -- "+str(json_document[key])+") ...\n" log_print(info_message) ga_node = create_gattribute(subject_id, attribute_type_node, object_value) info_message = "- Created ga_node : "+ str(ga_node.name) + "\n" log_print(info_message) # To break outer for loop as key found break else: error_message = "\n!! DataNotFound: No data found for field ("+str(attr_key)+") while creating GSystem ( -- "+str(node.name)+")\n" log_print(error_message) # ---END of if (key == attr_key) if is_relation: relation_list.append(key) if not relation_list: # No possible relations defined for this node info_message = "\n!! ("+str(node.name)+"): No possible relations defined for this node.\n" log_print(info_message) return gst_possible_relations_dict = node.get_possible_relations(file_gst._id) # processing each entry in relation_list # print "=== relation_list : ", relation_list for key in relation_list: is_relation = True for rel_key, rel_value in gst_possible_relations_dict.iteritems(): if key == rel_key: # if key == "teaches": is_relation = False if json_document[key]: # most often the data is hierarchy sep by ":" if ":" in json_document[key]: formatted_list = [] temp_teaches_list = json_document[key].replace("\n", "").split(":") # print "\n temp_teaches", temp_teaches for v in temp_teaches_list: formatted_list.append(v.strip()) right_subject_id = [] # print "~~~~~~~~~~~", formatted_list # rsub_id = _get_id_from_hierarchy(formatted_list) rsub_id = get_id_from_hierarchy(formatted_list) # print "=== rsub_id : ", rsub_id hierarchy_output = None # checking every item in hierarchy exist and leaf node's _id found if rsub_id: right_subject_id.append(rsub_id) json_document[key] = right_subject_id # print json_document[key] else: error_message = "\n!! While creating teaches rel: Any one of the item in hierarchy"+ str(json_document[key]) +"does not exist in Db. \n!! So relation: " + str(key) + " cannot be created.\n" log_print(error_message) break # sometimes direct leaf-node may be present without hierarchy and ":" else: formatted_list = list(json_document[key].strip()) right_subject_id = [] right_subject_id.append(_get_id_from_hierarchy(formatted_list)) json_document[key] = right_subject_id # print "\n----------", json_document[key] info_message = "\n- For GRelation parsing content | key: " + str(rel_key) + " -- " + str(json_document[key]) log_print(info_message) # print list(json_document[key]) # perform_eval_type(key, json_document, "GSystem", "GSystem") for right_subject_id in json_document[key]: # print "\njson_document[key]: ", json_document[key] subject_id = node._id # print "subject_id : ", subject_id # print "node.name: ", node.name # Here we are appending list of ObjectIds of GSystemType's type_of field # along with the ObjectId of GSystemType's itself (whose GSystem is getting created) # This is because some of the RelationType's are holding Base class's ObjectId # and not that of the Derived one's # Delibrately keeping GSystemType's ObjectId first in the list # And hence, used $in operator in the query! rel_subject_type = [] rel_subject_type.append(file_gst._id) if file_gst.type_of: rel_subject_type.extend(file_gst.type_of) relation_type_node = node_collection.one({'_type': "RelationType", '$or': [{'name': {'$regex': "^"+rel_key+"$", '$options': 'i'}}, {'altnames': {'$regex': "^"+rel_key+"$", '$options': 'i'}}], 'subject_type': {'$in': rel_subject_type} }) right_subject_id_or_list = [] right_subject_id_or_list.append(ObjectId(right_subject_id)) nodes = triple_collection.find({'_type': "GRelation", 'subject': subject_id, 'relation_type': relation_type_node._id }) # sending list of all the possible right subject to relation for n in nodes: if not n.right_subject in right_subject_id_or_list: right_subject_id_or_list.append(n.right_subject) info_message = "\n- Creating GRelation ("+ str(node.name)+ " -- "+ str(rel_key)+ " -- "+ str(right_subject_id_or_list)+") ..." log_print(info_message) gr_node = create_grelation(subject_id, relation_type_node, right_subject_id_or_list) info_message = "\n- Grelation processing done.\n" log_print(info_message) # To break outer for loop if key found break else: error_message = "\n!! DataNotFound: No data found for relation ("+ str(rel_key)+ ") while creating GSystem (" + str(file_gst.name) + " -- " + str(node.name) + ")\n" log_print(error_message) break # print relation_list else: info_message = "\n!! Either resource is already created or file is already saved into filehive/DB or file not found" log_print(info_message) continue except Exception as e: error_message = "\n While creating ("+str(json_document['name'])+") got following error...\n " + str(e) print "!!!!!!!!!!!!EEEEEEEERRRRRRRRRRRRRROOOOOOORRRRRRRRRRRRR......................" # file_error_msg = "\nFile with following details got an error: \n" file_error_msg = "\n========================" + " Row No : " + str(i + 2) + " ========================\n" # file_error_msg += "- Row No : " + str(i + 2) + "\n" file_error_msg += "- Name : " + json_document["name"] + "\n" file_error_msg += "- File Name: " + json_document["file_name"] + "\n" file_error_msg += "- ERROR : " + str(e) + "\n\n" file_error_msg += "- Following are the row details : \n\n" + unicode(json.dumps(json_document, sort_keys=True, indent=4, ensure_ascii=False)) + "\n" file_error_msg += "============================================================\n\n\n" log_error_rows.append(file_error_msg) log_print(error_message)
def user_template_view(request, group_id): auth_group = None group_list = [] group_cur = node_collection.find({ '_type': "Group", 'name': { '$nin': ["home", request.user.username] } }).limit(4) for i in group_cur: group_list.append(i) blank_list = [] attributetype_assignee = node_collection.find_one({ "_type": 'AttributeType', 'name': 'Assignee' }) attr_assignee = triple_collection.find({ "_type": "GAttribute", "attribute_type.$id": attributetype_assignee._id, "object_value": request.user.username }) for attr in attr_assignee: task_node = node_collection.find_one({'_id': attr.subject}) blank_list.append(task_node) notification_object = notification.NoticeSetting.objects.filter( user_id=request.user.id) for each in notification_object: ntid = each.notice_type_id ntype = notification.NoticeType.objects.get(id=ntid) label = ntype.label.split("-")[0] blank_list.append({'label': label, 'display': ntype.display}) blank_list.reverse() blank_list = [] activity = "" activity_user = node_collection.find({ '$and': [{ '$or': [{ '_type': 'GSystem' }, { '_type': 'Group' }, { '_type': 'File' }] }, { '$or': [{ 'created_by': request.user.id }, { 'modified_by': request.user.id }] }] }).sort('last_update', -1).limit(4) for each in activity_user: if each.created_by == each.modified_by: if each.last_update == each.created_at: activity = 'created' else: activity = 'modified' else: activity = 'created' if each._type == 'Group': blank_list.append(each) else: member_of = node_collection.find_one({"_id": each.member_of[0]}) blank_list.append(each) print blank_list template = "ndf/task_card_view.html" #variable = RequestContext(request, {'TASK_inst': self_task,'group_name':group_name,'group_id': group_id, 'groupid': group_id,'send':send}) variable = RequestContext( request, { 'TASK_inst': blank_list, 'group_name': group_id, 'group_id': group_id, 'groupid': group_id }) return render_to_response(template, variable)
def details(request, group_id, topic_id): # ins_objectid = ObjectId() # group_ins = None # if ins_objectid.is_valid(group_id) is False : # group_ins = node_collection.find_one({'_type': "Group","name": group_id}) # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if group_ins: # group_id = str(group_ins._id) # else : # auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) # if auth : # group_id = str(auth._id) # else : # pass try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) selected_topic = node_collection.one({ "_type": u"GSystem", "_id": ObjectId(topic_id) }) topic_coll = node_collection.find({"_type": u"GSystem"}) topic_count = topic_coll.count() #print "here: " + str(topic_coll) context = RequestContext(request, { 'title': "WikiData Topics", 'topic_coll': topic_coll }) template = "ndf/wikidata.html" variable = RequestContext(request, {'title': "WikiData Topics"}) context_variables = {'title': "WikiData Topics"} context_instance = RequestContext(request, { 'title': "WikiData Topics", 'groupid': group_id, 'group_id': group_id }) attribute_set = triple_collection.find({ "_type": u"GAttribute", "subject": ObjectId(topic_id) }) #relation_set = triple_collection.find({"_type":u"GRelation", "subject":ObjectId(topic_id)}) relation_set = selected_topic.get_possible_relations( selected_topic.member_of) #print relation_set relation_set_dict = {} for rk, rv in relation_set.iteritems(): if rv["subject_or_right_subject_list"]: #print "\n val perse : ", rk for v in rv["subject_or_right_subject_list"]: #print "\t", v["name"] relation_set_dict[rk] = v["name"] flag = 0 == 1 return render( request, template, { 'title': "WikiData Topics", 'topic_coll': topic_coll, 'selected_topic': selected_topic, 'attribute_set': attribute_set, 'relation_set': relation_set_dict, 'groupid': group_id, 'group_id': group_id, 'flag': flag, 'topic_count': topic_count, 'node': selected_topic })
def group_dashboard(request, group_id): """ This view returns data required for group's dashboard. """ has_profile_pic = None profile_pic_image = None old_profile_pics = [] has_profile_pic_str = "" is_already_selected = None try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) group_obj = node_collection.one({"_id": ObjectId(group_id)}) has_profile_pic_rt = node_collection.one({ '_type': 'RelationType', 'name': unicode('has_profile_pic') }) all_old_prof_pics = triple_collection.find({ '_type': "GRelation", "subject": group_obj._id, 'relation_type.$id': has_profile_pic_rt._id, 'status': u"DELETED" }) if all_old_prof_pics: for each_grel in all_old_prof_pics: n = node_collection.one({'_id': ObjectId(each_grel.right_subject)}) old_profile_pics.append(n) banner_pic = "" for each in group_obj.relation_set: if "has_profile_pic" in each: if each["has_profile_pic"]: profile_pic_image = node_collection.one({ '_type': { "$in": ["GSystem", "File"] }, '_id': each["has_profile_pic"][0] }) if "has_Banner_pic" in each: if each["has_Banner_pic"]: banner_pic = node_collection.one({ '_type': { "$in": ["GSystem", "File"] }, '_id': each["has_Banner_pic"][0] }) # Approve StudentCourseEnrollment view approval = False enrollment_details = [] enrollment_columns = [] sce_gst = node_collection.one({ '_type': "GSystemType", 'name': "StudentCourseEnrollment" }) if sce_gst: # Get StudentCourseEnrollment nodes which are there for approval sce_cur = node_collection.find( { 'member_of': sce_gst._id, 'group_set': ObjectId(group_id), # "attribute_set.enrollment_status": {"$nin": [u"OPEN"]}, "attribute_set.enrollment_status": { "$in": [u"PENDING", "APPROVAL"] }, 'status': u"PUBLISHED" }, {'member_of': 1}) if sce_cur.count(): approval = True enrollment_columns = [ "College", "Course", "Status", "Enrolled", "Remaining", "Approved", "Rejected" ] for sce_gs in sce_cur: sce_gs.get_neighbourhood(sce_gs.member_of) data = {} # approve_task = sce_gs.has_corresponding_task[0] approve_task = sce_gs.has_current_approval_task[0] approve_task.get_neighbourhood(approve_task.member_of) data["Status"] = approve_task.Status # Check for corresponding task's status # Continue with next if status is found as "Closed" # As we listing only 'In Progress'/'New' task(s) if data["Status"] == "Closed": continue data["_id"] = str(sce_gs._id) data["College"] = sce_gs.for_college[0].name if len(sce_gs.for_acourse) > 1: # It means it's a Foundation Course's (FC) enrollment start_enroll = None end_enroll = None for each in sce_gs.for_acourse[0].attribute_set: if not each: pass elif "start_time" in each: start_time = each["start_time"] elif "end_time" in each: end_time = each["end_time"] data[ "Course"] = "Foundation_Course" + "_" + start_time.strftime( "%d-%b-%Y") + "_" + end_time.strftime("%d-%b-%Y") else: # Courses other than FC data["Course"] = sce_gs.for_acourse[0].name # data["Completed On"] = sce_gs.completed_on.strftime("%d/%m/%Y") remaining_count = None enrolled_list = [] approved_list = [] rejected_list = [] if sce_gs.has_key("has_enrolled"): if sce_gs["has_enrolled"]: enrolled_list = sce_gs["has_enrolled"] if sce_gs.has_key("has_approved"): if sce_gs["has_approved"]: approved_list = sce_gs["has_approved"] if sce_gs.has_key("has_rejected"): if sce_gs["has_rejected"]: rejected_list = sce_gs["has_rejected"] data["Enrolled"] = len(enrolled_list) data["Approved"] = len(approved_list) data["Rejected"] = len(rejected_list) remaining_count = len(enrolled_list) - (len(approved_list) + len(rejected_list)) data["Remaining"] = remaining_count enrollment_details.append(data) page = '1' return render_to_response("ndf/group_dashboard.html", { 'group_id': group_id, 'groupid': group_id, 'old_profile_pics': old_profile_pics, 'approval': approval, 'enrollment_columns': enrollment_columns, 'enrollment_details': enrollment_details, 'prof_pic_obj': profile_pic_image, 'banner_pic': banner_pic, 'page': page }, context_instance=RequestContext(request))