示例#1
0
 def get_last_1day_period_activity(self):
     f = Feed()
     data = []
     result = json.loads(f.get_last_1day_period_activity())
     entries = result['facets']['histo1']['entries']
     if len(entries) > 0:
         data = [entry["count"] for entry in entries]
     return data
示例#2
0
 def put_native_post(self, lat, lon, text, image_data_url, file_extn, cityname):
     f = Feed()
     user_id = 0
     username = '******'
     current_utc = Date().get_utcnow_str()
     filename = '{0}_{1}.{2}'.format(user_id, Date().get_utcnow_number(), file_extn)
     uploaded_img_url = '/static/uploads/{0}'.format(filename)
     if Img().save(uploaded_img_url, image_data_url):
         f.create_native_document(user_id, '/static/images/user_placeholder.png', text, lat, lon, current_utc,
                                  cityname, uploaded_img_url, username)
示例#3
0
 def categorize_post(self, document_id, category_id):
     f = Feed()
     data = self.get_post_by_id(document_id)
     if not data:
         return False
     else:
         d_index = data["_index"]
         d_doctype = data["_type"]
         d_id = document_id
     f.categorize_by_document_id(d_index, d_doctype, d_id, category_id)
     return True
示例#4
0
 def like_post(self, document_id, increment):
     f = Feed()
     data = self.get_post_by_id(document_id)
     if not data:
         return None
     else:
         d_index = data["_index"]
         d_doctype = data["_type"]
         d_id = document_id
         fields = data["fields"]
         fields = self.increment_upvote(fields, increment)
     # TODO convert the following into a UPDATE like the categorize_post()
     f.delete_by_document_id(d_index, d_doctype, d_id)
     f.create_document(index_name = d_index, doc_type = d_doctype, document_id = d_id, json_body = json.dumps(fields))
示例#5
0
 def get_feed_around_coord(self, from_datetime, coord, q_from, q_size, encoded_tags, radius, sort, filterdays):
     f = Feed()
     data = []
     result = json.loads(f.get_feed_around_coord(from_datetime, coord, q_from, q_size, encoded_tags, radius, sort,
                                                 filterdays))
     if result["hits"]["total"] > 0:
         for p in result["hits"]["hits"]:
             field = p["fields"]
             try:
                 url_util = Url()
                 media_url = url_util.get_url_from_string(field.get("content_img_url")[0])
                 if not media_url:
                     # see if text field has a url in it
                     media_url = url_util.get_url_from_string(field.get("text")[0].encode("utf-8"))
                 data.append(Post( p["_id"], field.get("post_id")[0], field.get("text")[0].encode("utf-8"), Date().get_obj(field.get("@timestamp")[0]),
                                   media_url, field.get("user_img_url")[0], field.get("type")[0], field.get("user_id")[0],
                                   field.get("place_name")[0], field.get("coord")[0], field.get("username")[0],
                                   field.get("up_votes")[0], 0, field.get("distance")[0]))
             except Exception, e:
                 # fetcher engine and logstash must ensure clean data gets into elasticsearch which confirms to the Post object
                 logging.exception(e)
                 logging.exception(p)