def imageDashboard(request, group_id, image_id=None): ''' fetching image acording to group name ''' ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass if image_id is None: image_ins = collection.Node.find_one({'_type':"GSystemType", "name":"Image"}) if image_ins: image_id = str(image_ins._id) img_col= collection.GSystem.find({'member_of': {'$all': [ObjectId(image_id)]},'_type':'File', 'group_set': {'$all': [ObjectId(group_id)]}}) template = "ndf/ImageDashboard.html" already_uploaded=request.GET.getlist('var',"") variable = RequestContext(request, {'imageCollection': img_col,'already_uploaded':already_uploaded,'groupid':group_id,'group_id':group_id }) return render_to_response(template, variable)
def publish_group(request,group_id,node): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass node=collection.Node.one({'_id':ObjectId(node)}) page_node,v=get_page(request,node) node.content = page_node.content node.content_org=page_node.content_org node.status=unicode("PUBLISHED") node.modified_by = int(request.user.id) node.save() return render_to_response("ndf/groupdashboard.html", { 'group_id':group_id, 'node':node, 'groupid':group_id }, context_instance=RequestContext(request) )
def daterange(duration): now = datetime.utcnow() current = pk > oid.from_datetime(now - duration) previous = (pk > oid.from_datetime(now - duration * 2)) & (pk < oid.from_datetime(now - duration)) return current, previous
def delete_module(request, group_id, _id): """This method will delete module object and its Attribute and Relation """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass pageurl = request.GET.get("next", "") try: node = collection.Node.one({'_id':ObjectId(_id)}) if node: attributes = collection.Triple.find({'_type':'GAttribute','subject':node._id}) relations = collection.Triple.find({'_type':'GRelation','subject':node._id}) if attributes.count() > 0: for each in attributes: collection.Triple.one({'_id':each['_id']}).delete() if relations.count() > 0: for each in relations: collection.Triple.one({'_id':each['_id']}).delete() node.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(pageurl)
def display_thread(request,group_id, thread_id, forum_id=None): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass try: thread = collection.Node.one({'_id': ObjectId(thread_id)}) forum="" for each in thread.prior_node: forum=collection.GSystem.one({'$and':[{'member_of': {'$all': [forum_st._id]}},{'_id':ObjectId(each)}]}) if forum: usrname = User.objects.get(id=forum.created_by).username variables = RequestContext(request, { 'forum':forum, 'thread':thread, 'groupid':group_id, 'group_id':group_id, 'eachrep':thread, 'user':request.user, 'forum_created_by':usrname }) return render_to_response("ndf/thread_details.html",variables) except: pass
def delete_sentry(request, group_id, node_id): """Change the status to Hidden. """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass gst_entry=collection.Node.one({'_id':ObjectId(node_id)}) gst_bibtex=gst_entry.member_of ''' converts [ObjectId('node_id')] to node_id ''' s=str(gst_bibtex) list(s) gst_bibtex=(s[11:-3]) gst_bibtex=unicode(gst_bibtex, "UTF-8") op = collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}}) return HttpResponseRedirect(reverse('view_entry', kwargs={'group_id': group_id,'node_id':gst_bibtex}))
def delete_file(request, group_id, _id): """Delete file and its data """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass file_collection = db[File.collection_name] auth = collection.Node.one({'_type': u'Group', 'name': unicode(request.user.username) }) pageurl = request.GET.get("next", "") try: cur = file_collection.File.one({'_id':ObjectId(_id)}) rel_obj = collection.GRelation.one({'subject': ObjectId(auth._id), 'right_subject': ObjectId(_id) }) if rel_obj : rel_obj.delete() if cur.fs_file_ids: for each in cur.fs_file_ids: cur.fs.files.delete(each) cur.delete() except Exception as e: print "Exception:", e return HttpResponseRedirect(pageurl)
def readDoc(request, _id, group_id, file_name = ""): '''Return Files ''' ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass file_node = collection.File.one({"_id": ObjectId(_id)}) if file_node is not None: if file_node.fs_file_ids: if (file_node.fs.files.exists(file_node.fs_file_ids[0])): grid_fs_obj = file_node.fs.files.get(ObjectId(file_node.fs_file_ids[0])) return HttpResponse(grid_fs_obj.read(), content_type = grid_fs_obj.content_type) else: return HttpResponse("") else: return HttpResponse("") else: return HttpResponse("")
def getFileThumbnail(request, group_id, _id): """Returns thumbnail of respective file """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass file_node = collection.File.one({"_id": ObjectId(_id)}) if file_node is not None: if file_node.fs_file_ids: if (file_node.fs.files.exists(file_node.fs_file_ids[1])): f = file_node.fs.files.get(ObjectId(file_node.fs_file_ids[1])) return HttpResponse(f.read(), content_type=f.content_type) else: return HttpResponse("") else: return HttpResponse("") else: return HttpResponse("")
def view_sentry(request,group_id,node_id): ''' for displaying a specific entry ''' ins_objectid=ObjectId() if ins_objectid.is_valid(group_id) is False: group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass GST_SENTRY=collection.Node.one({'_id':ObjectId(node_id)}) GST_one=collection.Node.one({'_type':'AttributeType','name':'BibTex_entry'}) gst_bibtex=GST_SENTRY.member_of ''' converts [ObjectId('node_id')] to node_id ''' s=str(gst_bibtex) list(s) gst_bibtex=(s[11:-3]) gst_bibtex=unicode(gst_bibtex, "UTF-8") gst_bibtex=collection.Node.one({'_id':ObjectId(gst_bibtex)}) gst_id=GST_SENTRY._id GST_SAttribute=collection.Node.one({'subject':GST_SENTRY._id,'attribute_type.$id':GST_one._id}) Bibtex=GST_SAttribute.object_value gst_note=GST_SENTRY.content_org variable=RequestContext(request,{'name':GST_SENTRY.name,'gst_note':gst_note,'Bibtex':Bibtex,'group_id':group_id,'groupid':group_id,'title':gst_bibtex.name}) template="ndf/view_sentry.html" print "before return" return render_to_response(template,variable)
def file_edit(request,group_id,_id): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass file_node = collection.File.one({"_id": ObjectId(_id)}) if request.method == "POST": get_node_common_fields(request, file_node, group_id, GST_FILE) file_node.save() return HttpResponseRedirect(reverse('file_detail', kwargs={'group_id': group_id, '_id': file_node._id})) else: return render_to_response("ndf/document_edit.html", { 'node': file_node, 'group_id': group_id, 'groupid':group_id }, context_instance=RequestContext(request) )
def delete_item(self, activity_id, **kwargs): require_access(c.project.neighborhood, 'admin') activity = Activity.query.get(_id=ObjectId(activity_id)) if not activity: raise exc.HTTPGone # find other copies of this activity on other user/projects timelines # but only within a small time window, so we can do efficient searching activity_ts = activity._id.generation_time time_window = timedelta(hours=1) all_copies = Activity.query.find({ '_id': { '$gt': ObjectId.from_datetime(activity_ts - time_window), '$lt': ObjectId.from_datetime(activity_ts + time_window), }, 'obj': activity.obj, 'target': activity.target, 'actor': activity.actor, 'verb': activity.verb, 'tags': activity.tags, }).all() log.info('Deleting %s copies of activity record: %s %s %s', len(all_copies), activity.actor.activity_url, activity.verb, activity.obj.activity_url) for activity in all_copies: activity.query.delete() return {'success': True}
def publish_page(request,group_id,node): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass node=collection.Node.one({'_id':ObjectId(node)}) group=collection.Node.one({'_id':ObjectId(group_id)}) if group.post_node: node.status=unicode("PUBLISHED") node.save('UnderModeration') else: page_node,v=get_page(request,node) node.content = page_node.content node.content_org=page_node.content_org node.status=unicode("PUBLISHED") node.modified_by = int(request.user.id) node.save() #no need to use this section as seprate view is created for group publish #if node._type == 'Group': # return HttpResponseRedirect(reverse('groupchange', kwargs={'group_id': group_id})) return HttpResponseRedirect(reverse('page_details', kwargs={'group_id': group_id, 'app_id': node._id}))
def video_detail(request, group_id, _id): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass vid_node = collection.File.one({"_id": ObjectId(_id)}) if vid_node._type == "GSystemType": return videoDashboard(request, group_id, _id) video_obj=request.GET.get("vid_id","") return render_to_response("ndf/video_detail.html", { 'node': vid_node, 'group_id': group_id, 'groupid':group_id, 'video_obj':video_obj }, context_instance = RequestContext(request) )
def display_forum(request,group_id,forum_id): forum = collection.Node.one({'_id': ObjectId(forum_id)}) usrname = User.objects.get(id=forum.created_by).username ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass forum_object = collection.Node.one({'_id': ObjectId(forum_id)}) if forum_object._type == "GSystemType": return forum(request, group_id, forum_id) variables=RequestContext(request,{'forum':forum,'groupid':group_id,'group_id':group_id, 'forum_created_by':usrname}) return render_to_response("ndf/forumdetails.html",variables)
def load(self, **kwargs): """ Save all passed-in data to object - convert all ID objects and strings into ObjectIds :param kwargs: data :return: self """ for k, v in kwargs.items(): if k == 'id' or (k in self.model._fields.keys() \ and isinstance(self.model._fields[k], db.ReferenceField)): if idify or k == 'id': if not isinstance(v, ObjectId): if isinstance(v, str): v = ObjectId(v) else: v = getattr(v, 'id') elif issubclass(v.__class__, db.Document): v = v.to_dbref() elif isinstance(v, str): v = DBRef(k, ObjectId(v)) elif hasattr(v, 'model'): v = v.model(**v.data()).to_dbref() else: v = None setattr(self, k, v) return self
def objs_gen(infos, depth=0, depth_lim=2): # print(depth) objs = list_generator(infos) if not objs: return objs if depth == depth_lim: return objs toget = [] for each in objs: if each is None: continue for ev in each.cache_info.values(): if not ObjectId.is_valid(ev[0]): continue if ev[0] in oids: continue toget.append(ev) oids.add(ObjectId(ev[0])) tmps = objs_gen(toget, depth + 1) # get_obj for each in tmps: # add obj in tmps to pool if each is None: continue obj_pool[each._id] = each for each in objs: # add obj in obj_pool to objs if each is None: continue for ek, ev in each.cache_info.items(): if not ObjectId.is_valid(ev[0]): continue ev = ObjectId(ev[0]) if ev in obj_pool: each.set_cache(ek, obj_pool[ev]) return objs
def get(self, itemid=None): a = None if ObjectId.is_valid(itemid): itemid = ObjectId(itemid) a = self.model.find_one({"_id": itemid, "created.by": current_user.id}) if a is None: pr = reqparse.RequestParser() pr.add_argument("surveyid", type=str, store_missing=False) pr.add_argument("surveyname", type=unicode, store_missing=False) args = pr.parse_args() sid = args.get("surveyid") sname = args.get("surveyname") if not sid and not sname: return jsonify({}) if sname: surveyobj = self.db["SurveyDQ"].find_one({"name": sname}) if surveyobj: sid = str(surveyobj["_id"]) if sid: if ObjectId.is_valid(sid): a = self.model.find_one({"surveyid": ObjectId(sid), "created.by": current_user.id}, sort=[("_id", pymongo.ASCENDING)]) if not a: a = self.model.find_one({"surveyname": sid, "created.by": current_user.id}, sort=[("_id", pymongo.ASCENDING)]) return jsonify(crudmgo.model_to_json(a, is_single=True) if a else {})
def edit_group(request,group_id): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass page_node = gs_collection.GSystem.one({"_id": ObjectId(group_id)}) if request.method == "POST": get_node_common_fields(request, page_node, group_id, gst_group) if page_node.access_policy == "PUBLIC": page_node.group_type = "PUBLIC" if page_node.access_policy == "PRIVATE": page_node.group_type = "PRIVATE" page_node.save() group_id=page_node._id return HttpResponseRedirect(reverse('groupchange', kwargs={'group_id':group_id})) page_node,ver=get_page(request,page_node) return render_to_response("ndf/edit_group.html", { 'node': page_node, 'groupid':group_id, 'group_id':group_id }, context_instance=RequestContext(request) )
def delete_thread(request,group_id,forum_id,node_id): """ Changing status of thread to HIDDEN """ ins_objectid = ObjectId() if ins_objectid.is_valid(node_id) : thread=node_collection.one({'_id':ObjectId(node_id)}) else: return forum = node_collection.one({'_id': ObjectId(forum_id)}) if ins_objectid.is_valid(group_id) is False : group_ins = node_collection.find_one({'_type': "Group","name": group_id}) auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass op = node_collection.collection.update({'_id': ObjectId(node_id)}, {'$set': {'status': u"HIDDEN"}}) node=node_collection.one({'_id':ObjectId(node_id)}) forum_threads = [] exstng_reply = node_collection.find({'$and':[{'_type':'GSystem'},{'prior_node':ObjectId(forum._id)}],'status':{'$nin':['HIDDEN']}}) exstng_reply.sort('created_at') forum_node=node_collection.one({'_id':ObjectId(forum_id)}) for each in exstng_reply: forum_threads.append(each.name) #send notifications to all group members colg=node_collection.one({'_id':ObjectId(group_id)}) for each in colg.author_set: if each != colg.created_by: bx=get_userobject(each) if bx: activity=request.user.username+" -deleted thread " prefix=" in the forum "+forum_node.name link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id) msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum." # no_check=forum_notification_status(group_id,auth._id) # if no_check: ret = set_notif_val(request,group_id,msg,activity,bx) activity=request.user.username+" -deleted thread " prefix=" in the forum "+forum_node.name bx=get_userobject(colg.created_by) if bx: link="http://"+sitename+"/"+str(colg._id)+"/forum/"+str(forum_node._id) msg=activity+"-"+node.name+prefix+"- in the group '"+colg.name+"' created by you."+"'\n"+"Please visit "+link+" to see the forum." # no_check=forum_notification_status(group_id,auth._id) # if no_check: ret = set_notif_val(request,group_id,msg,activity,bx) #send notification code ends here variables = RequestContext(request,{ 'forum':forum, 'groupid':group_id,'group_id':group_id, 'forum_created_by':User.objects.get(id=forum.created_by).username }) return render_to_response("ndf/forumdetails.html",variables)
def hongbao(): """ 定期统计用户发送口令, 获取红包的情况 规则: 用户向派派发送口令, 获得红包 :return: """ from datetime import datetime from bson import ObjectId import re redis = _redis_client() # 获得已发红包的用户 processed_users = set(json.loads(redis.get('viae/viae.provisional.hongbao/processed_users') or '[]')) # 获得红包处理进度的时间戳 utc_tz = timezone('UTC') processed_since = redis.get('viae/viae.provisional.hongbao/processed_ts') logger.info('Processing from %s' % processed_since) processed_since = datetime.strptime(processed_since, '%Y-%m-%d %H:%M:%S').replace(tzinfo=utc_tz) dummy_id = ObjectId.from_datetime(processed_since) # 找到哪些用户发送过红包口令 pattern = re.compile(u'(体验旅行派APP领现金红包|新用户口令|领新用户红包|从微信过来领红包|下单送北京大房免费住)', re.IGNORECASE) sender_list = mongo_hedy.Message.distinct('senderId', {'_id': {'$gt': dummy_id}, 'receiverId': 10000, 'contents': pattern}) # 这些用户必须不在已发送红包的列表中, 并且为两天内注册的 final_senders = {} user_dummy_id = ObjectId.from_datetime(processed_since - timedelta(days=7)) for s in filter(lambda v: v not in processed_users, sender_list): u = mongo_yunkai.UserInfo.find_one({'userId': s, '_id': {'$gt': user_dummy_id}}, {'userId': 1, 'nickName': 1}) if not u: continue final_senders[u['userId']] = u if final_senders: # 准备报表 sections = [] for uid, user in sorted(final_senders.items(), key=lambda v: v[0]): messages = mongo_hedy.Message.find({'senderId': uid, 'receiverId': 10000}, {'contents': 1}) c = '\n'.join([tmp['contents'] for tmp in messages]) sections.append(u'%d: %s\n%s\n\n' % (uid, user['nickName'], c)) processed_users.add(uid) email_contents = ''.join(sections).strip() from viae.job import send_email_to_group, send_email logger.info('Sending hongbao stats') send_email_to_group(groups='MARKETPLACE', subject=u'红包申请统计', body=email_contents) # 默认7天过期 expire = 7 * 24 * 3600 redis.set('viae/viae.provisional.hongbao/processed_users', json.dumps(list(processed_users)), expire) redis.set('viae/viae.provisional.hongbao/processed_ts', (datetime.utcnow() - timedelta(minutes=20)).replace(tzinfo=utc_tz).strftime('%Y-%m-%d %H:%M:%S'), expire)
def find_invalid_logs(): for log in NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)): # Derive UTC datetime object from ObjectId id_date = ObjectId(log._id).generation_time id_date = id_date.replace(tzinfo=None) - id_date.utcoffset() if id_date > log.date: yield log
def fix_invalid_log(log): new_dt = ObjectId(log._id).generation_time new_dt = new_dt.replace(tzinfo=None) - new_dt.utcoffset() NodeLog._fields['date'].__set__( log, new_dt, safe=False ) log.save()
def post(self, *args, **kwargs): """->This method will be called when a post request on url-/api/sendsms will be made. ->This method will get array of numbers,test,message and it will send the http request to text local API. ->The response received from textlocal will be checked and if successful then it will be stored in database else a reponse with status code 400 is sent. :param args: :param kwargs: """ # TODO # handle exceptions with transactions - will not arise try: validate_result = validator(self.parse_request_body()) if validate_result is True: if self.result["currentRound"] < 3: if self.result["currentRound"] == self.get_json_body_argument("currentRound"): teams = self.get_json_body_argument("teams") test = self.get_json_body_argument("test", default=False) numbers = [team["mobileNumber"] for team in teams] numbers_str_list = ','.join(map(str, numbers)) round_number = str(int(self.result['currentRound']) + 1) custom = self.result["_id"].__str__() + "_" + round_number sms_id = ObjectId() message = get_round_message( round_number, self.result['eventName'], self.get_json_body_argument('date'), self.get_json_body_argument('time'), self.get_json_body_argument('venue') ) response = yield send_textlocal_sms(message, numbers_str_list, round_number, custom, test) if response["status"] == "success": document = dict( _id=sms_id, numbers=numbers, message=message, eventId=self.result["_id"], round=round_number, test=test ) yield self.db.sms.insert(document) for team in teams: yield self.db.participants.update({"_id": ObjectId(team["_id"])}, {"$set": {"round" + round_number: "q"}}) yield self.db.events.update({"_id": self.result["_id"]}, {"$inc": {"currentRound": 1}}) self.respond(sms_id.__str__(), 200) else: self.respond(response, 400) else: self.respond("current round conflicts", 400) else: self.respond("no more round in event", 400) else: self.respond(validate_result, 400) except Exception as e: self.respond(e.__str__(), 500)
def create_edit_page(request, group_id, node_id=None): """Creates/Modifies details about the given quiz-item. """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass context_variables = { 'title': gst_page.name, 'group_id': group_id, 'groupid': group_id } available_nodes = collection.Node.find({'_type': u'GSystem', 'member_of': ObjectId(gst_page._id) }) nodes_list = [] for each in available_nodes: nodes_list.append(each.name) if node_id: page_node = collection.Node.one({'_type': u'GSystem', '_id': ObjectId(node_id)}) else: page_node = collection.GSystem() if request.method == "POST": get_node_common_fields(request, page_node, group_id, gst_page) page_node.save() return HttpResponseRedirect(reverse('page_details', kwargs={'group_id': group_id, 'app_id': page_node._id })) else: if node_id: page_node,ver=get_page(request,page_node) context_variables['node'] = page_node context_variables['groupid']=group_id context_variables['group_id']=group_id context_variables['nodes_list'] = json.dumps(nodes_list) else: context_variables['nodes_list'] = json.dumps(nodes_list) return render_to_response("ndf/page_create_edit.html", context_variables, context_instance=RequestContext(request) )
def resource_list(request,group_id,app_id=None): ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass if app_id is None: app_ins = collection.Node.find_one({'_type':'GSystemType', 'name': 'Browse Resource'}) if app_ins: app_id = str(app_ins._id) # if GST_browse_resource._id == ObjectId(app_id): """ * Renders a list of all 'Resources(XCR)' available within the database. """ title = GST_browse_resource.name file_id = GST_FILE._id files = collection.Node.find({'member_of': {'$all': [ObjectId(file_id)]}, '_type': 'File', 'fs_file_ids':{'$ne': []}, 'group_set': {'$all': [ObjectId(group_id)]}}).sort("last_update", -1) docCollection = collection.Node.find({'member_of': {'$nin': [ObjectId(GST_IMAGE._id), ObjectId(GST_VIDEO._id)]}, '_type': 'File','fs_file_ids': {'$ne': []}, 'group_set': {'$all': [ObjectId(group_id)]}}).sort("last_update", -1) imageCollection = collection.Node.find({'member_of': {'$all': [ObjectId(GST_IMAGE._id)]}, '_type': 'File','fs_file_ids': {'$ne': []}, 'group_set': {'$all': [ObjectId(group_id)]}}).sort("last_update", -1) videoCollection = collection.Node.find({'member_of': {'$all': [ObjectId(GST_VIDEO._id)]}, '_type': 'File','fs_file_ids': {'$ne': []}, 'group_set': {'$all': [ObjectId(group_id)]}}).sort("last_update", -1) already_uploaded = request.GET.getlist('var', "") pandora_video_st=collection.Node.one({'$and':[{'name':'Pandora_video'},{'_type':'GSystemType'}]}) source_id_at=collection.Node.one({'$and':[{'name':'source_id'},{'_type':'AttributeType'}]}) pandora_video_id=[] source_id_set=[] get_member_set=collection.Node.find({'$and':[{'member_of': {'$all': [ObjectId(pandora_video_st._id)]}},{'_type':'File'}]}) return render_to_response("ndf/resource_list.html", {'title': title, 'files': files, 'sourceid':source_id_set, 'wetube_videos':get_member_set, 'docCollection': docCollection, 'imageCollection': imageCollection, 'videoCollection': videoCollection, 'already_uploaded': already_uploaded, 'groupid': group_id, 'group_id':group_id }, context_instance = RequestContext(request))
def module(request, group_id, module_id=None): """ * Renders a list of all 'modules' available within the database. """ ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass if module_id is None: module_ins = collection.Node.find_one({'_type':"GSystemType", "name":"Module"}) if module_ins: module_id = str(module_ins._id) if request.method == "POST": # Module search view title = GST_MODULE.name search_field = request.POST['search_field'] module_coll = collection.Node.find({'member_of': {'$all': [ObjectId(GST_MODULE._id)]}, '$or': [{'name': {'$regex': search_field, '$options': 'i'}}, {'tags': {'$regex':search_field, '$options': 'i'}}], 'group_set': {'$all': [ObjectId(group_id)]} }).sort('last_update', -1) # module_nodes_count = course_coll.count() return render_to_response("ndf/module.html", {'title': title, 'searching': True, 'query': search_field, 'module_coll': module_coll, 'groupid':group_id, 'group_id':group_id }, context_instance=RequestContext(request) ) elif GST_MODULE._id == ObjectId(module_id): # Module list view title = GST_MODULE.name module_coll = collection.GSystem.find({'member_of': {'$all': [ObjectId(module_id)]}, 'group_set': {'$all': [ObjectId(group_id)]}}) template = "ndf/module.html" variable = RequestContext(request, {'title': title, 'module_coll': module_coll, 'group_id': group_id, 'groupid': group_id}) return render_to_response(template, variable)
def to_foreign(self, obj, name, value): # pylint:disable=unused-argument if isinstance(value, OID): return value if isinstance(value, datetime): return OID.from_datetime(value) if isinstance(value, timedelta): return OID.from_datetime(datetime.utcnow() + value) if isinstance(value, MutableMapping) and '_id' in value: return OID(value['_id']) return OID(unicode(value))
def app_selection(request,group_id): print "grpid=",group_id ins_objectid = ObjectId() if ins_objectid.is_valid(group_id) is False : group_ins = collection.Node.find_one({'_type': "Group","name": group_id}) auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if group_ins: group_id = str(group_ins._id) else : auth = collection.Node.one({'_type': 'Author', 'name': unicode(request.user.username) }) if auth : group_id = str(auth._id) else : pass try: grp=collection.Node.one({"_id":ObjectId(group_id)}) if request.method == "POST": lst=[] apps_to_set = request.POST['apps_to_set'] apps_list=apps_to_set.split(",") if apps_list: for each in apps_list: if each: obj=collection.Node.one({'_id':ObjectId(each)}) lst.append(obj); gattribute=collection.Node.one({'$and':[{'_type':'GAttribute'},{'attribute_type.$id':at_apps_list._id},{'subject':grp._id}]}) if gattribute: gattribute.delete() if lst: create_attribute=collection.GAttribute() create_attribute.attribute_type=at_apps_list create_attribute.subject=grp._id create_attribute.object_value=lst create_attribute.save() return HttpResponse("Success") else: list_apps=[] if not at_apps_list: return HttpResponse("Failure") poss_atts=grp.get_possible_attributes(at_apps_list._id) if poss_atts: list_apps=poss_atts['apps_list']['object_value'] st = get_all_gapps() print "inapp_list view",st,list_apps data_list=set_drawer_widget(st,list_apps) return HttpResponse(json.dumps(data_list)) except Exception as e: print "Error in app_selection "+str(e)
def edit_note(note_id): user_obj = model.User.objects(username=login.username())[0] if not ObjectId.is_valid(note_id): abort(400) if not model.Note.objects(pk=note_id, author=user_obj): abort(404) try: body = request.json['body'] subject = request.json['subject'] except: abort(400) if body != '': model.Note.objects(pk=note_id, author=user_obj)[0].update(set__body=body) if subject != '': model.Note.objects(pk=note_id, author=user_obj)[0].update(set__subject=subject) model.Note.objects(pk=note_id, author=user_obj)[0].update(set__modification_time=datetime.now()) return jsonify({ 'success': True, 'message': 'Note was edited', 'data': None })
def activity_details(): """ Looks for tools data in SYNC_DATA_PATH, postgres dump data in OUTPUT_DIR """ file_name = school_filename() # Generate the list of users that have logged in, from the pg_dump_all.sql # file. # 1) Look for "COPY auth_user " # 2) Start reading rows # 3) Match all rows where the last_login != date_joined # 4) Stop at the first "--" line # # Return a list of tuples [(username, userID)] user_list = generate_user_list() full_user_list = generate_user_list(full_list=True) all_user_data = {} for user_data in user_list: username = unicode(user_data[0].strip()) user_id = user_data[1] if username not in all_user_data: all_user_data[username] = [user_id] # activity_in_regex_pattern = '.*/course/activity_player.*' # activity_out_regex_pattern = '.*/course.*|.*my-desk.*|.*explore.*|.*tools/tool-page.*|.*course/content.*' activity_in_text_search = 'activity_player' # activity_out_text_search = 'course my-desk explore tool-page content' MC = MongoClient(host=MONGO_DB_HOST, port=MONGO_DB_PORT) GSTUDIO_DB = MC['gstudio-mongodb'] # This is timing out when there are many records, # so set the `no_cursor_timeout` # optional flag. # Changed from $regex to $text, which requires a # $text index on `calling_url` to be set... all_visits = GSTUDIO_DB['Benchmarks'].find( { '$text': { '$search': activity_in_text_search }, 'user': username }, no_cursor_timeout=True).sort('last_update', -1) print "\nTotal activity-player visits for {0}: {1}".format( username, all_visits.count()) # Once we know the sequence of Activity / Tool interaction, # we need to aggregate the data into the desired format # user_row = [] # prepend username and user_id later user_nav_out_by_session = get_all_nav_out_events_by_session( username, GSTUDIO_DB) for each_visit in all_visits: # Temporarily store each of these in row_data. # We'll also add in the tool activity to row_data. # Then sort by visited_on. row_blob = { 'unit': 'NA', 'visited_on': 'NA', 'language': 'NA', 'lesson': 'NA', 'activity': 'NA', 'timespent': 'NA', 'buddies': 'NA', 'out_action': 'NA', '_type': 'activity' } # last_update is saved as IST via Django, even though # it looks like UTC in MongoDB: # ISODate("2017-06-18T04:49:46.243Z") # Because they create the Benchmark last_updated field # with `datetime.datetime.now()`, which is system time. # So to make it consistent with the tool logging, we'll # convert it to IST... visited_on = convert_utc_to_ist(each_visit['last_update']) # if visited_on.date() >= start_date.date(): row_blob['visited_on'] = visited_on locale = 'en' if 'locale' in each_visit: locale = each_visit['locale'] row_blob['language'] = str(locale) calling_url_str = each_visit['calling_url'] if (calling_url_str.startswith('/') and calling_url_str.endswith('/')): splitted_results = calling_url_str.split('/') if len(splitted_results) == 7: unit_id = splitted_results[1] lesson_id = splitted_results[4] activity_id = splitted_results[5] unit_node = get_group_name_id(unit_id, get_obj=True) lesson_node = GSTUDIO_DB['Nodes'].find_one( {'_id': ObjectId(lesson_id)}) activity_node = GSTUDIO_DB['Nodes'].find_one( {'_id': ObjectId(activity_id)}) if not lesson_node: lesson_name = 'Deleted lesson? Unknown name.' else: lesson_name = lesson_node['name'] if not activity_node: activity_name = 'Deleted activity? Unknown name.' else: activity_name = activity_node['name'] if not unit_node: unit_name = 'Deleted unit? Unknown name.' else: unit_name = unit_node['name'] if 'altnames' in unit_node and unit_node['altnames']: unit_name = unit_node['altnames'] row_blob.update({ 'unit': slugify(unit_name), 'lesson': slugify(lesson_name), 'activity': slugify(activity_name) }) # Using $regex is slow and kills performance... # 4-5 seconds per query. # Wonder how we can speed it up while preserving # the data captured. # Changed from $regex to $text, which requires a # $text index on `calling_url` to be set... # Manually checking, it seems to capture the same data. # nav_out_action_cur = GSTUDIO_DB['Benchmarks'].find( # {'last_update': {'$gte': each_visit['last_update']}, # '_id': {'$ne': each_visit['_id']}, # 'user': username, # 'session_key': each_visit['session_key'], # '$text': { # '$search': activity_out_text_search # }}, {'last_update': 1, # 'name': 1}).sort('last_update', 1).limit(1) nav_out_action_cur = find_next_nav_out_action( user_nav_out_by_session, each_visit) if nav_out_action_cur is not None: # if nav_out_action_cur.count(): # nav_out_obj = nav_out_action_cur[0] nav_out_obj = nav_out_action_cur end_time = convert_utc_to_ist( nav_out_obj['last_update']) timespent = (end_time - visited_on).total_seconds() print " Time spent: ", timespent, " seconds." row_blob.update({ 'timespent': str(timespent), 'out_action': nav_out_obj['name'] }) else: print( " ## Unable to track time spent " "on this activity. ##") # Get the buddy ID from the username, using the CSVs buddies_obj = GSTUDIO_DB['Buddies'].find_one({ 'loggedin_userid': int(user_id), 'session_key': str(each_visit['session_key']) }) if buddies_obj: auth_id_list = buddies_obj['buddy_in_out'].keys() buddies_names = get_names_list_from_obj_id_list( auth_id_list, u'Author') row_blob.update({'buddies': buddies_names}) for buddy in buddies_names: if buddy not in all_user_data: # how do we get the buddy's user ID here? all_user_data[buddy] = [ get_buddy_id(full_user_list, buddy) ] all_user_data[buddy].append(row_blob) else: print( "## Unable to track time spent " "on this activity. ##") all_user_data[username].append(row_blob) all_visits.close() for username, user_row in all_user_data.items(): user_id = user_row[0] # Now also check for tool logs # Do this here because buddies may not have ``all_visits`` # from above, so they won't get their log files checked if # we leave this log-check above. for directory, subdirectory, files in os.walk(SYNC_DATA_PATH): if ('gstudio_tools_logs' in directory and user_log_file_in(user_id, files)): full_path = os.path.join(directory, get_user_log_filename(user_id, files)) user_row += get_tool_logs(full_path) # Don't need to do separate check for buddies, because gstudio # seems to log each buddy's tool events separately. # Once we've collected all the user data + logs, go through # each one and sort them. # Grab all of the found dates, so that we can add # filler data, so each user row in the output # has columns that line up. all_dates = get_all_dates_from_data(all_user_data) # Keep in mind that index 0 is the user_id row_data = [] for username, user_row in all_user_data.items(): user_id = user_row[0] del user_row[0] # Now all the data is in user_row, let's sort them in ascending # order by the `visited_on` key. user_row = sorted(user_row, key=lambda k: k['visited_on']) # Aggregate the data into five columns per date user_row = aggregate_user_data(user_row, all_dates) # We need to convert the datetime objects to nice strings # user_row = [serialize_datetime(r) for r in user_row] # Prepend the username and userID to the row # user_row = [username, user_id] + [data for r in user_row # for data in extract_data(r)] user_row = [username, user_id] + user_row row_data.append(user_row) with open(file_name, 'a') as file_handle: activity_writer = csv.writer(file_handle, dialect='excel') column_list = ['Username', 'UserID'] for one_date in all_dates: column_list += [ 'Date ({0})'.format(str(one_date)), 'Total Timestamps', 'Total Timespent (HH:MM:SS.microsec)', 'Activities Viewed', 'Activity Timestamps' ] activity_writer.writerow(column_list) # write the row_data out to the activity_writer for row in row_data: activity_writer.writerow(row) return len(row_data)
def get_one_group(self): app.logger.debug("get_activity %s,%s" % (self.object_id, self.user_id)) result = group_collection.find_one({'_id': ObjectId(self.object_id)}) ret = json.dumps(result, default=json_util.default) app.logger.debug("get_activity %s" % ret) return json.loads(ret)
def post(self, *args, **kwargs): task_id = ObjectId(self.get_argument('t_id')) courier_id = ObjectId(self.get_argument('c_id')) task = yield self.task_model.GetTaskFromId(task_id) subtask_condition = { '_id': { '$in': task['subtasks'] } } all_subtasks_can_schedule = True subtasks = yield self.subtask_model.find(subtask_condition).to_list(None) for subtask in subtasks: if subtask['status'] in ['delivering', 'confirmed', 'done', 'lock_for_confirm']: all_subtasks_can_schedule = False break if all_subtasks_can_schedule and task['status'] in ['waiting', 'dispatched']: courier = yield self.courier_model.GetCourierFromId(courier_id) condition = { '_id': task_id } updater = { '$set': { 'status': 'dispatched', 'dispatched_time': int(time.time()*1000), 'courier_id': courier['_id'], 'courier_name': courier['name'], 'courier_mobile': courier['mobile'] } } result = yield self.task_model.update(condition, updater) if result['updatedExisting'] and result['ok'] == 1: subtask_condition = { '_id': { '$in': task['subtasks'] } } subtask_updater = { '$set': { 'status': 'dispatched', 'dispatched_time': int(time.time()*1000) } } yield self.subtask_model.update(subtask_condition, subtask_updater, multi=True) # set order sending order_ids = [ObjectId(t['express_no']) for t in subtasks] yield self.order_model.update( { '_id': { '$in': order_ids } }, { '$set': { 'status': 'sending', 'courier_id': courier['_id'], 'courier_name': courier['name'], 'courier_mobile': courier['mobile'] } }, multi=True ) sms_args = {} logging.info('[TaskForward] sms send to: ' + str(courier['mobile'])) yield SendSM().Action(courier['mobile'], sms.SMS_NEW_TASK_NOTIFICATION, **sms_args) self.redirect('/task_detail?id=%s&task_forward=1&courier_id=%s' % (str(task_id),courier['_id'])) else: data = { 'task_id': task_id, 'flag': 'error', 'message': '转单失败' } self.render('task_forward.html', data=data) else: data = { 'task_id': task_id, 'flag': 'error', 'message': '转单失败:该状态下任务不可转' } self.render('task_forward.html', data=data)
def get_user_data(): dbClient = MongoClient('185.4.30.75', 27017) # dbClient = MongoClient('localhost', 27017) db = dbClient.ipn_db user = db.person.find({'_id': ObjectId('560121abcbf62c13d4567f0d')}) return user[0]
def test_duplicate(self): self.app.data.insert( 'desks', [{ '_id': ObjectId('5d385f17fe985ec5e1a78b49'), 'name': 'Politic Desk', 'default_content_profile': 'belga_text', 'default_content_template': 'content_template_1', 'desk_language': 'fr', 'source': 'politic' }]) self.app.data.insert('stages', [{ '_id': ObjectId('5d385f31fe985ec67a0ca583'), 'name': 'Incoming Stage', 'default_incoming': True, 'desk_order': 2, 'content_expiry': None, 'working_stage': False, 'is_visible': True, 'desk': ObjectId('5d385f17fe985ec5e1a78b49') }]) self.app.data.insert('content_templates', [{ '_id': 'content_template_1', 'template_name': 'belga text', 'is_public': True, 'data': { 'profile': 'belga_text', 'type': 'text', 'pubstatus': 'usable', 'format': 'HTML', 'headline': '', 'subject': [ { 'name': 'INT/GENERAL', 'qcode': 'INT/GENERAL', 'parent': 'INT', 'scheme': 'services-products' }, { 'name': 'default', 'qcode': 'default', 'scheme': 'distribution' }, ], 'language': 'en', 'keywords': ['some', 'keyword'], 'body_html': '' }, 'template_type': 'create', }]) item = { '_id': 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564', 'guid': 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564', 'headline': 'test headline', 'slugine': 'test slugline', 'state': 'published', 'type': 'text', 'keywords': ['foo', 'bar'], 'language': 'en' } self.app.data.insert('archive', [item]) self.assertRaises(StopDuplication, set_default_metadata_with_translate, item, dest_desk_id=ObjectId('5d385f17fe985ec5e1a78b49'), dest_stage_id=ObjectId('5d385f31fe985ec67a0ca583')) archive_service = get_resource_service('archive') new_item = archive_service.find_one( req=None, original_id= 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564' ) self.assertNotIn('translated_from', new_item)
def edit_media(user_id=None): """ :param user_id: 媒体为管理端(系统)所有时需要传入user_id = 0 :return: """ if user_id is None: user_id = current_user.str_id media_id = request.argget.all("id") name = request.argget.all("name") link = request.argget.all("link") link_name = request.argget.all("link_name") link_open_new_tab = str_to_num(request.argget.all("link_open_new_tab", 1)) title = request.argget.all("title") text = request.argget.all("text", "") text_html = request.argget.all("text_html", "") category_id = request.argget.all("category_id") s, r = arg_verify([("id", media_id)], required=True) if not s: return r s, r = arg_verify([(gettext("name"), name)], required=True) if not s: return r old_media = mdbs["web"].db.media.find_one({"_id": ObjectId(media_id)}) # 如果只是更新图片, 则保存上传图片 if request.files: data = file_upload(return_key=True, prefix="multimedia/{}/".format(old_media["type"])) if data["msg_type"] != "s": return data else: # 删除旧的图片 file_del(old_media["url"]) temp_url = None if "keys" in data: for key in data["keys"]: temp_url = key if temp_url: mdbs["web"].db.media.update_one( { "_id": ObjectId(media_id), "user_id": user_id}, {"$set": {"url": temp_url}} ) data = { "msg": gettext("Update picture successfully"), "msg_type": "s", "custom_status": 201} else: data = { "msg": gettext("Failed to update"), "msg_type": "e", "custom_status": 400} return data category = "Default" not_updated_category = False if category_id is None: # 不更新category not_updated_category = True elif category_id and category_id.lower() != "default": media_category = mdbs["web"].db.category.find_one( {"_id": ObjectId(category_id)}) if media_category: category = media_category["name"] elif category_id.lower() == "default": category_id = "" # 处理其他字段更新 query = { "name": name, "type": old_media["type"], "_id": { "$ne": ObjectId(media_id)}} if mdbs["web"].db.media.find_one(query): type_alias = old_media["type"] for k, v in get_config("category", "CATEGORY_TYPE").items(): if v == old_media["type"]: type_alias = k break data = { "msg": gettext('The type "{}" exists in the name "{}"').format(type_alias, name), "msg_type": "w", "custom_status": 403 } else: # 获取text_html使用的图片 old_imgs = old_media.get("text_imgs", []) if text_html: srcs = richtext_extract_img(richtext=text_html) else: srcs = [] text_imgs = clean_tempfile(user_id=current_user.str_id, type="image", old_file=old_imgs, keey_file=srcs) info = { "name": name, "link": link, "link_name": link_name, "link_open_new_tab": link_open_new_tab, "title": title, "text": text, "text_html": text_html, "text_imgs": text_imgs } if not not_updated_category: info["category_id"] = category_id info["category"] = category r = mdbs["web"].db.media.update_one( {"_id": ObjectId(media_id), "user_id": user_id}, {"$set": info}) if r.modified_count: data = { "msg": gettext("Modify the success"), "msg_type": "s", "custom_status": 201} else: data = { "msg": gettext("The content is not modified"), "msg_type": "w", "custom_status": 400} return data
def remove(): #Deleting a Task with various references key = request.values.get("_id") todos.remove({"_id": ObjectId(key)}) return redirect("/")
def gridfs_del_data(self, obj_id): #gridfs删除数据 self.fs.delete(ObjectId(obj_id))
def gridfs_get_data(self, obj_id): #得到上传数据的gridfs部分 return self.fs.get(ObjectId(obj_id)).read()
def __init__(self, event: Dict): self.action = "reported" if "reported" in event["state"] else "desired" self.device_id = ObjectId(event["state"][self.action]["device_id"]) self.status = bool(event["state"][self.action]["is_on"]) self.raw_event = event
def gridfs_get_crawldata(self, obj_id): #得到抓取任务 return self.fs_crawl.get(ObjectId(obj_id)).read()
def timer_fence(self, event: Dict) -> None: device_id = event["state"]["reported"]["device_id"] device = self.devices_repository.get(ObjectId(device_id)) if not device.timer_fence or device.timer_fence == 0: return self.iot_repository.start_timer_fence(event, device_id, device.timer_fence)
def update_amazon_listing(self, data, product_id): """Updates a product using various sources of data.""" collection = self.db.products product_id = ObjectId(product_id) product = collection.find_one({'_id': product_id}, projection={'sku': 1}) try: product_asin = product['sku'] except AttributeError: raise ValueError(f'Invalid product id: {product_id}') # Separate the data sources into API call results and raw updates if not isinstance(data, collections.Sequence): data = [data] api_calls = [ source for source in data if 'action' in source and 'params' in source ] raw_updates = [source for source in data if source not in api_calls] # Process API calls first for api_call in api_calls: call_type = api_call['action'] if call_type == 'ItemLookup': try: product.update(api_call['results'][product_asin]) except KeyError: logger.debug( f"API call {call_type} does not contain results for {product_asin}, ignoring..." ) elif call_type == 'GetCompetitivePricingForASIN': try: landed_price = api_call['results'][product_asin].get( 'landed_price', None) listing_price = api_call['results'][product_asin].get( 'listing_price', None) shipping = api_call['results'][product_asin].get( 'shipping', None) product[ 'price'] = landed_price if landed_price is not None else listing_price + shipping product['offers'] = api_call['results'][product_asin].get( 'offers', None) except KeyError: logger.debug( f"API call {call_type} does not contain results for {product_asin}, ignoring..." ) elif call_type == 'GetMyFeesEstimate': try: product['price'] = api_call['results'][product_asin]['price'] product['market_fees'] = api_call['results'][product_asin][ 'total_fees'] except KeyError: logger.debug( f"API call {call_type} does not contain results for {product_asin}, ignoring..." ) continue else: raise ValueError(f"Unrecognized API call: {call_type}") # Process raw updates for raw_data in raw_updates: product.update(raw_data) # Write to the DB collection.find_one_and_update( filter={'_id': product_id}, update={'$set': { **product }}, ) return str(product_id)
def get_group_name_id(group_name_or_id, get_obj=False): ''' Taken from https://github.com/gnowledge/gstudio/blob/master/ gnowsys-ndf/gnowsys_ndf/ndf/views/methods.py - This method takes possible group name/id as an argument and returns (group-name and id) or group object. - If no second argument is passed, as method name suggests, returned result is "group_name" first and "group_id" second. - When we need the entire group object, just pass second argument as boolian) True. In the case group object will be returned. Example 1: res_group_name, res_group_id = get_group_name_id( group_name_or_id) - "res_group_name" will contain name of the group. - "res_group_id" will contain _id/ObjectId of the group. Example 2: res_group_obj = get_group_name_id(group_name_or_id, get_obj=True) - "res_group_obj" will contain entire object. Optimization Tip: before calling this method, try to cast group_id to ObjectId as follows (or copy paste following snippet at start of function or wherever there is a need): try: group_id = ObjectId(group_id) except: group_name, group_id = get_group_name_id(group_id) ''' cache = Cache('/tmp/clix-research-data') # if cached result exists return it if not get_obj: slug = slugify(group_name_or_id) # for unicode strings like hindi-text slugify doesn't works cache_key = 'get_group_name_id_' + str(slug) if slug else str( abs(hash(group_name_or_id))) cache_result = cache.get(cache_key) if cache_result: return (cache_result[0], ObjectId(cache_result[1])) # --------------------------------- MC = MongoClient(host=MONGO_DB_HOST, port=MONGO_DB_PORT) GSTUDIO_DB = MC['gstudio-mongodb'] # case-1: argument - "group_name_or_id" is ObjectId if ObjectId.is_valid(group_name_or_id): group_obj = GSTUDIO_DB['Nodes'].find_one( {"_id": ObjectId(group_name_or_id)}) # checking if group_obj is valid if group_obj: # if (group_name_or_id == group_obj._id): group_id = group_name_or_id group_name = group_obj['name'] if get_obj: return group_obj else: # setting cache with both ObjectId and group_name cache.set(cache_key, (group_name, group_id), 60 * 60) cache_key = u'get_group_name_id_' + slugify(group_name) cache.set(cache_key, (group_name, group_id), 60 * 60) return group_name, group_id # case-2: argument - "group_name_or_id" is group name else: group_obj = GSTUDIO_DB['Nodes'].find_one({ "_type": { "$in": ["Group", "Author"] }, "name": unicode(group_name_or_id) }) # checking if group_obj is valid if group_obj: # if (group_name_or_id == group_obj.name): group_name = group_name_or_id group_id = group_obj['_id'] if get_obj: return group_obj else: # setting cache with both ObjectId and group_name cache.set(cache_key, (group_name, group_id), 60 * 60) cache_key = u'get_group_name_id_' + slugify(group_name) cache.set(cache_key, (group_name, group_id), 60 * 60) return group_name, group_id if get_obj: return None else: return None, None
{'$group': {'_id': '$symbol', 'deleted': {'$first': '$metadata.deleted'}}}, {'$match': {'deleted': {'$ne': True}}}, {'$project': {'_id': 0, 'symbol': '$_id'}} ] versions.aggregate.assert_called_once_with(pipeline) def test_snapshot_duplicate_raises_exception(): vs = create_autospec(VersionStore, _snapshots=Mock()) with pytest.raises(DuplicateSnapshotException) as e: vs._snapshots.find_one.return_value = True VersionStore.snapshot(vs, 'symbol') assert "Snapshot 'symbol' already exists" in str(e.value) TPL_VERSION = {'_id': ObjectId('5a2ffdf817f7041a4ff1aa82'), 'base_version_id': ObjectId('5a2ffd5917f70412ca78d80a'), 'append_count': 0, 'dtype_metadata': { 'index': ['index'], 'columns': ['A', 'B', 'C', 'D']}, 'segment_count': 1, 'symbol': 'SYM_E', 'up_to': 3, 'metadata': None, 'shape': [-1], 'version': 6, 'type': 'pandasdf', 'append_size': 0 }
def getObjectID(self, _id): return ObjectId(_id)
def add_media(user_id=None): """ :param user_id: 媒体为管理端(系统)所有时需要传入user_id = 0 :return: """ if user_id is None: user_id = current_user.str_id batch = request.argget.all("batch", False) name = request.argget.all("name") link = request.argget.all("link") link_open_new_tab = str_to_num(request.argget.all("link_open_new_tab", 1)) link_name = request.argget.all("link_name") title = request.argget.all("title") text = request.argget.all("text", "") text_html = request.argget.all("text_html", "") ctype = request.argget.all("ctype") category_id = request.argget.all("category_id") data = {} category = "Default" if category_id and category_id.lower() != "default": media_category = mdbs["web"].db.category.find_one( {"_id": ObjectId(category_id)}) if media_category: category = media_category["name"] elif not category_id or category_id.lower() == "default": category_id = "" s, r = arg_verify([(gettext("type"), ctype)], only=get_config("category", "CATEGORY_TYPE").values()) if not s: return r s, r = arg_verify([(gettext("name"), name)], required=True) if not s and not batch: return r # 如果有上传文件 if request.files: data = file_upload(return_key=True, prefix="multimedia/{}/".format(ctype)) if data["msg_type"] != "s": return data if not batch and mdbs["web"].db.media.find_one({"name": name, "type": ctype}): type_alias = ctype for k, v in get_config("category", "CATEGORY_TYPE").items(): if v == ctype: type_alias = k break data = { "msg": gettext('The type "{}" exists in the name "{}"').format(type_alias, name), "msg_type": "w", "custom_status": 403} else: # 获取text_html使用的图片 text_imgs = [] if text_html: srcs = richtext_extract_img(richtext=text_html) else: srcs = [] text_imgs = clean_tempfile(user_id=current_user.str_id, type="image", keey_file=srcs) info = { "category": category, "category_id": category_id, "link": link, "link_open_new_tab": link_open_new_tab, "link_name": link_name, "title": title, "text": text, "text_html": text_html, "text_imgs": text_imgs, "type": ctype, "time": time.time(), "user_id": user_id } if "keys" in data: for key in data["keys"]: rand_name = "{}_{}".format(name, uuid1()) info["name"] = rand_name info["url"] = key mdbs["web"].db.media.insert_one(info) data["msg"] = gettext("{} uploaded successfully").format( ctype.capitalize()) else: info["name"] = name info["url"] = None mdbs["web"].db.media.insert_one(info) data["msg"] = gettext("Added successfully").format( ctype.capitalize()) data["msg_type"] = "s" data["custom_status"] = 201 return data
# Users # Vacany User if returned["email"] == []: if returned["phone"] == []: user_object_id = 100000000000000000000000 else: check = userdb.find_one({"phones" : returned["phone"]}) if check is None: new_user_info = { "phones" : returned["phone"], "company_id" : ObjectId(f"{company_object_id}"), "created_at" : datetime.datetime.utcnow() } userdb.insert(new_user_info) user_object_id = userdb.find_one({"phones" : returned["phone"]}) user_object_id = user_object_id["_id"] print(user_object_id) else: user_object_id = userdb.find_one({"phones" : returned["phone"]}) user_object_id = user_object_id["_id"] print(user_object_id) else: if returned["phone"] == []: check = userdb.find_one({"email" : returned["email"][0]}) if check is None: new_user_info = {
def update(): id = request.values.get("_id") task = todos.find({"_id": ObjectId(id)}) return render_template('update.html', tasks=task, h=heading, t=title)
from epidermal import db from epidermal.cleanup_old_datasets import find_old_datasets from bson import ObjectId from datetime import datetime if __name__ == '__main__': db.datasets.update_one( {'_id': ObjectId('5d19863fbca3b773ba9601a4')}, {'$set': { 'date_accessed': datetime(year=2019, month=5, day=31) }}, upsert=False) for dbid in find_old_datasets(): print 'del', db.get_dataset_by_id(dbid)['name']
def enhance_coverage(planning, item, users): for c in (planning.get('coverages') or []): is_text = c.get('planning', {}).get('g2_content_type', '') == 'text' completed = (c.get('assigned_to') or {}).get('state') == ASSIGNMENT_WORKFLOW_STATE.COMPLETED assigned_to = c.get('assigned_to') or {} user = None desk = None if assigned_to.get('coverage_provider'): item['assignees'].append(assigned_to['coverage_provider']['name']) if is_text and not completed: item['text_assignees'].append(assigned_to['coverage_provider']['name']) elif assigned_to.get('user'): user = assigned_to['user'] users.append(user) elif assigned_to.get('desk'): desk = assigned_to.get('desk') desks.append(desk) # Get abstract from related text item if coverage is 'complete' if is_text: if completed: results = list(archive_service.get_from_mongo(req=None, lookup={ 'assignment_id': ObjectId( c['assigned_to']['assignment_id']), 'state': {'$in': ['published', 'corrected']}, 'pubstatus': 'usable', 'rewrite_of': None })) if len(results) > 0: item['published_archive_items'].append({ 'archive_text': get_first_paragraph_text(results[0].get('abstract')) or '', 'archive_slugline': results[0].get('slugline') or '' }) elif c.get('news_coverage_status', {}).get('qcode') == 'ncostat:int': if user: text_users.append(user) else: text_desks.append(desk) item['contacts'] = get_contacts_from_item(item)
def to_python(self, value): return ObjectId(value)
class FooOneExtended(FooSpanAnnotation): pass class FooTwoExtended(FooSpanAnnotation): pass class BlahDocAnnotation(DocumentAnnotation): prop_c: int = 42 ANNOTATION_IDS = [ str(ObjectId('5d657267b2870f18471ad12e')), str(ObjectId('5d657267b2870f18471ad12f')), str(ObjectId('5d657267b2870f18471ad130')), str(ObjectId('5d657267b2870f18471ad131')), str(ObjectId('5d657267b2870f18471ad132')), str(ObjectId('5d657267b2870f18471ad133')), str(ObjectId('5d657267b2870f18471ad134')) ] def test_typesys(): assert Annotation().scope == AnnotationScope.UNKNOWN assert SpannedAnnotation().scope == AnnotationScope.SPAN assert DocumentAnnotation().scope == AnnotationScope.DOCUMENT
# update portal portalPack = customerPack.portal_package content = portalPack.package.read() local_package_path ='../media/temp/portal_%s.tar.gz' % portalPack.svn_version try: fp = open(local_package_path, 'wb') fp.write(content) fp.close() print 'successfully download protal install package to ../media/temp/' except: print '下载portal包失败' return {'success': False, 'msg': '下载portal安装包失败'} for machine in customerPack.machines: # 建立连接 client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) print 'machine info. host: %s, username: %s, password: %s' %(machine.host, machine.username, machine.password) client.connect(machine.host, username=machine.username, password=machine.password) res = portal_update(client, customerPack.customer, machine, local_package_path) res_list.append(res) if not res['success']: return res_list # 关闭 ssh 连接 client.close() os.remove(local_package_path) return res_list if __name__ == "__main__": update_root_path = '/home/rzrk/update/' customerPack = CustomerPackage.objects.get(pk=ObjectId("53cf7b8efa03212e28ce1a2c")) res_list = update_customer_packages(customerPack) print res_list
def test_belga_keywords(self): self.app.data.insert( 'desks', [{ '_id': ObjectId('5d385f17fe985ec5e1a78b49'), 'name': 'Politic Desk', 'default_content_profile': 'belga_text', 'default_content_template': 'content_template_1', 'desk_language': 'fr', 'source': 'politic' }]) self.app.data.insert('stages', [{ '_id': ObjectId('5d385f31fe985ec67a0ca583'), 'name': 'Incoming Stage', 'default_incoming': True, 'desk_order': 2, 'content_expiry': None, 'working_stage': False, 'is_visible': True, 'desk': ObjectId('5d385f17fe985ec5e1a78b49') }]) self.app.data.insert('vocabularies', [{ "_id": "belga-keywords", "display_name": "Belga Keywords", "type": "manageable", "selection_type": "multi selection", "unique_field": "qcode", "schema": { "name": {}, "qcode": {}, "translations": {} }, "service": { "all": 1 }, "items": [{ "name": "BRIEF", "qcode": "BRIEF", "is_active": True, "translations": { "name": { "nl": "BRIEF", "fr": "BRIEF" } } }, { "name": "PREVIEW", "qcode": "PREVIEW", "is_active": True, "translations": { "name": { "nl": "VOORBERICHT", "fr": "AVANT-PAPIER" } } }] }]) self.app.data.insert('content_templates', [{ '_id': 'content_template_1', 'template_name': 'belga text', 'is_public': True, 'data': { 'profile': 'belga_text', 'type': 'text', 'pubstatus': 'usable', 'format': 'HTML', 'headline': '', 'language': 'en', 'keywords': ['some', 'keyword'], 'body_html': '' }, 'template_type': 'create', }]) item = { '_id': 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564', 'guid': 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564', 'headline': 'test headline', 'slugine': 'test slugline', 'state': 'published', 'type': 'text', "subject": [{ 'name': 'BRIEF', 'qcode': 'BRIEF', 'translations': { 'name': { 'nl': 'BRIEF', 'fr': 'BRIEF' } }, 'scheme': 'belga-keywords' }], 'keywords': ['foo', 'bar'], 'language': 'fr' } self.app.data.insert('archive', [item]) self.assertRaises(StopDuplication, set_default_metadata_with_translate, item, dest_desk_id=ObjectId('5d385f17fe985ec5e1a78b49'), dest_stage_id=ObjectId('5d385f31fe985ec67a0ca583')) archive_service = get_resource_service('archive') new_item = archive_service.find_one( req=None, original_id= 'urn:newsml:localhost:5000:2019-12-10T14:43:46.224107:d13ac5ae-7f43-4b7f-89a5-2c6835389564' ) self.assertEqual(item["subject"], new_item["subject"])
def get_nypd(self, nypd_id): nypd_doc = self.filtered_nypd.find_one({c.ID: ObjectId(nypd_id)}) return nypd_from_document(nypd_doc) if nypd_doc is not None else None
def _get_valid_id(self, id): try: return ObjectId(id) except InvalidId: return id
def remove_data(document_id): document = collection.delete_one({'_id': ObjectId(document_id)}) return document.acknowledged
'$gte': todayTimeStamp, '$lte': endTodayTimeStamp } }, SELECT=['diallist_id']) # if row['account_number'] == '0020020000001184': # print(diallistDetail) if diallistDetail != None: diallist = mongodb.getOne( MONGO_COLLECTION=diallist_collection, WHERE={'_id': diallistDetail['diallist_id']}, SELECT=['group_id']) if diallist != None: group = mongodb.getOne( MONGO_COLLECTION=group_collection, WHERE={'_id': ObjectId(diallist['group_id'])}, SELECT=['lead']) if group != None: if row['COMPANY'] == '': row['COMPANY'] = str(group['lead']) user = _mongodb.getOne( MONGO_COLLECTION=user_collection, WHERE={'extension': str(group['lead'])}, SELECT=['agentname']) if user != None: row['COMPANY'] += '-' + user['agentname'] # print(row['COMPANY']) else: diallist = mongodb.getOne( MONGO_COLLECTION=diallistDetail_collection,
def gridfs_del_crawldata(self, obj_id): #得到抓取任务 self.fs_crawl.delete(ObjectId(obj_id))