def sync_associations(): session = db_session() update_query = Question.__table__.update().values(is_associated=True).\ where(and_(Question.is_associated==False, Question.question_id.in_(select([Association.soen_id]).\ distinct().\ as_scalar()))) session.execute(update_query) session.commit() session.close() session = db_session() reverse_update_query = Question.__table__.update().values(is_associated=False).\ where(and_(Question.is_associated==True, ~Question.question_id.in_(select([Association.soen_id]).\ distinct().\ as_scalar()))) session.execute(reverse_update_query) session.commit() session.close() print "All associations were synced"
def update_most_viewed(): reader_session = db_session() question_count = reader_session.query(func.count(QuestionViewHistory.id)).\ filter(and_(QuestionViewHistory.counted==False, QuestionViewHistory.view_count>=MINIMUM_VIEW_COUNT_TO_ADD)).\ scalar() query = reader_session.query(QuestionViewHistory.id, QuestionViewHistory.question_id, QuestionViewHistory.view_count).\ filter(and_(QuestionViewHistory.counted==False, QuestionViewHistory.view_count>=MINIMUM_VIEW_COUNT_TO_ADD)) frame_size = 1000 progress_index = 0 counter = 0 print "Questions to update: %s, frame size: %s" % (question_count, frame_size) while counter <= question_count: all_questions = query.offset(0).limit(frame_size).all() counter = counter + frame_size wiriter_session = db_session() for question in all_questions: record_id, question_id, view_count = question most_viewed_question = wiriter_session.query(Question).filter_by( question_type=Question.question_type_most_viewed).filter_by( question_id=question_id).first() if most_viewed_question is None: most_viewed_question = Question( Question.question_type_most_viewed, question_id, view_count) wiriter_session.add(most_viewed_question) else: most_viewed_question.view_count += view_count qh = wiriter_session.query(QuestionViewHistory).filter_by( id=record_id).first() qh.counted = True wiriter_session.add(qh) print_progress_bar(progress_index, question_count, prefix='Progress:', suffix='Complete') progress_index += 1 wiriter_session.commit() wiriter_session.close() print "All questions were counted" sync_associations()
def setting_string(): pg_session = db_session() query = pg_session.query(Association.soen_id, Association.soint_id).distinct() pairs = query.all() count = query.count() - 1 association_list = list() resp = "" index = 0 delimiter = "," for pair in pairs: soen, soint = pair existed = False for item in association_list: if item["soen"] == soen or item["soint"] == soint: existed = True break if not existed and (int(soen) > 0 and int(soint) > 0): resp = resp + str(soen) + "=" + str(soint) if index < count: resp = resp + delimiter association_list.append({"soen": soen, "soint": soint}) index += 1 pg_session.close() return resp
def all(site_id): session = db_session() query = session.query(Activity).filter_by(site_id=site_id).order_by( asc(Activity.creation_date)) result = query.all() session.close() return result
def by_id(user_id): session = db_session() query = session.query(User).filter_by(id=user_id).order_by( desc(User.creation_date)) result = query.first() session.close() return result
def all_for_activity(activity_id): session = db_session() query = session.query(Action).filter_by( activity_id=activity_id).order_by(asc(Action.creation_date)) result = query.all() session.close() return result
def activists(activity_id): session = db_session() query = session.query(User).join(Action).filter( Action.activity_id == activity_id).distinct() result = query.all() session.close() return result
def update(self): with open(self.filename, 'rt', encoding="utf8") as csvfile: question_lengths = list() vocaboalary_sizes = list() csv_reader = csv.reader(csvfile, delimiter=',') for row in csv_reader: _, _, _, _, body, _ = row self.question_count +=1 question_lengths.append(len(body)) # All the paramentors should be the same as they are when we process real messages. vocaboalary = process_text(body, True, 2) vocaboalary = tf.compat.as_str(vocaboalary).split() vocaboalary_sizes.append(len(vocaboalary)) question_lengths = sorted(question_lengths) question_lengths_length = len(question_lengths) vocaboalary_sizes = sorted(vocaboalary_sizes) vocaboalary_sizes_length = len(vocaboalary_sizes) self.mimimum_question_length = question_lengths[question_lengths_length // self.divider_coefficient] self.maximum_question_length = question_lengths[question_lengths_length // self.divider_coefficient * (self.divider_coefficient - 1)] self.mimimum_question_word_count = vocaboalary_sizes[vocaboalary_sizes_length // self.divider_coefficient] self.maximum_question_word_count = vocaboalary_sizes[vocaboalary_sizes_length // self.divider_coefficient * (self.divider_coefficient - 1)] static_assessment = DBStaticAssessment(self.question_count, self.mimimum_question_length, self.maximum_question_length, self.mimimum_question_word_count, self.maximum_question_word_count) session = db_session() session.add(static_assessment) session.commit() session.close()
def by_id(action_id): session = db_session() query = session.query(Action).filter_by(id=action_id).order_by( asc(Action.creation_date)) result = query.first() session.close() return result
def last(site_id): session = db_session() query = session.query(Post).filter_by(site_id=site_id).order_by( desc(Post.creation_date)) result = query.first() session.close() return result
def ban_user(user_id): if g.user is None or g.user.role != 'moderator': abort(404) try: user_id = int(user_id) except: return jsonify(**{"status": False, "msg": gettext("Wrong params")}) another_user = User.by_id(user_id) if another_user is None or (another_user.role == 'moderator' and not another_user.is_banned): return jsonify(**{"status": False, "msg": gettext("Wrong params")}) current_status = another_user.is_banned another_user.is_banned = not another_user.is_banned pg_session = db_session() pg_session.add(another_user) pg_session.commit() pg_session.close() return jsonify( **{ "status": True, "msg": gettext("The user was %s" % (gettext("suspended") if not current_status else gettext("unsuspended"))) })
def by_id(event_id): session = db_session() query = session.query(Event).filter_by(id=event_id).order_by( asc(Event.creation_date)) result = query.first() session.close() return result
def last_by_user(user_id): session = db_session() query = session.query(Action).filter_by(user_id=user_id).order_by( desc(Action.creation_date)) result = query.first() session.close() return result
def by_user_and_action(user_id, action_id): session = db_session() query = session.query(Verification).filter_by( user_id=user_id, action_id=action_id).order_by(asc(Verification.creation_date)) result = query.first() session.close() return result
def by_site_id_and_activity_type(site_id, activity_type): session = db_session() query = session.query(Activity).filter_by( site_id=site_id, activity_type=activity_type).order_by(asc(Activity.creation_date)) result = query.first() session.close() return result
def get_last(model_name): session = db_session() query = session.query(TFModel).filter( TFModel.model_name == model_name).order_by(desc( TFModel.id)).first() session.close() return query.dump_filename
def by_meta_post_id(meta_post_id, site_id): session = db_session() query = session.query(Event).filter(Event.meta_post_id == meta_post_id, Event.site_id == site_id).order_by( asc(Event.creation_date)) result = query.first() session.close() return result
def paginate_type(type_id, page_num, per_page=2): session = db_session() query = session.query(JSONObjectData.extra.label('extra'), JSONObjectData.added.label('added')).\ filter(JSONObjectData.type_id==type_id).\ order_by(desc(JSONObjectData.added)) p = paginate_helper(query, page_num, per_page) session.close() return p
def user_attend_times(user_id, site_id): session = db_session() result = session.query(func.count(Activist.id)).join(Event).filter( Activist.user_id == user_id, Activist.role == Activist.role_attendee, Activist.canceled == False, Event.site_id == site_id).scalar() session.close() return result
def attendees(event_id): session = db_session() query = session.query(User).join(Activist).filter_by( event_id=event_id, role=Activist.role_attendee, canceled=False).order_by(asc(Activist.creation_date)) result = query.all() session.close() return result
def by_user_and_event(user_id, event_id): session = db_session() query = session.query(Activist).filter_by( event_id=event_id, user_id=user_id, role=Activist.role_attendee).order_by(asc(Activist.creation_date)) result = query.first() session.close() return result
def paginate_skipped(page_num, per_page=15): session = db_session() query = session.query(SiteComment).\ filter(SiteComment.analysed!=None).\ filter(SiteComment.skipped!=None).\ order_by(desc(SiteComment.creation_date)) p = paginate_helper(query, page_num, per_page) session.close() return p
def coordinators(activity_id): session = db_session() query = session.query(User).join(Activist).filter_by( activity_id=activity_id, role=Activist.role_coordinator, canceled=False).order_by(asc(Activist.creation_date)) result = query.all() session.close() return result
def is_attendee(user_id, event_id): session = db_session() result = session.query(func.count(Activist.id)).filter_by( user_id=user_id, event_id=event_id, role=Activist.role_attendee, canceled=False).scalar() session.close() return True if result > 0 else False
def user_coordinate_times(user_id, site_id): session = db_session() result = session.query(func.count(Activist.id)).join(Activity).filter( Activist.user_id == user_id, Activist.role == Activist.role_coordinator, Activist.canceled == False, Activity.site_id == site_id).distinct().scalar() session.close() return result
def skipped_comments(): session = db_session() query = session.query(SiteComment).\ filter(SiteComment.analysed!=None).\ filter(SiteComment.skipped!=None).\ order_by(desc(SiteComment.creation_date)) result = query.all() session.close() return result
def all_extra(type_id, limit=None, offset=None): session = db_session() query = session.query(JSONObjectData.extra.label('extra'), JSONObjectData.added.label('added')).filter(JSONObjectData.type_id==type_id).order_by(desc(JSONObjectData.added)) if limit is not None: query = query.limit(limit) if offset is not None: query = query.offset(offset) result = query.all() session.close() return result
def attend_event(event_id): if g.user is None or g.user.is_banned: abort(404) event_id = int(event_id) attend = request.args.get("attend", None) if event_id <= 0 or attend is None: return jsonify(**{"status": False, "msg": gettext("Wrong params")}) try: attend = json.loads(attend.lower()) except: return jsonify(**{"status": False, "msg": gettext("Wrong params")}) event = Event.by_id(event_id) if event is None: return jsonify(**{"status": False, "msg": gettext("Invalid params.")}) if event.date < datetime.datetime.now(): return jsonify( **{ "status": False, "msg": gettext( "This event has already happened. No one cannot apply on it." ) }) activist = Activist.by_user_and_event(g.user.id, event.id) if activist is None: if not attend: return jsonify( **{ "status": True, "msg": gettext("You are not on the list. No need to do anything!") }) activist = Activist(g.user.id, None, event_id, Activist.role_attendee) else: activist.canceled = not attend activist.updated_date = datetime.datetime.now() pg_session = db_session() pg_session.add(activist) pg_session.commit() pg_session.close() return jsonify( **{ "status": True, "msg": gettext("Your application has been saved. Thank you!") })
def start_analysis_loop(): static_assessment.load() min_to_end_stmnt = static_assessment.maximum_question_length // letters_per_min sec_to_end_stmnt = static_assessment.maximum_question_length // letters_per_second print("[start_analysis_loop] min to end: ", min_to_end_stmnt, ", sec to end: ", sec_to_end_stmnt) # Analyse all that was not up to now do_analyse() current_exec_event = None while True: channel_id, user_id, message_id, message, creation_time = message_queue.get( ) created_ago = creation_time - datetime.timedelta( minutes=min_to_end_stmnt) updated_ago = creation_time - datetime.timedelta( seconds=(len(message) // letters_per_second)) new_statment_was_created = False session = db_session() stmnt = Statement.query.\ filter(and_(Statement.channel_id==channel_id, Statement.user_id==user_id, Statement.created>created_ago, Statement.updated>updated_ago)).\ first() if stmnt is None: stmnt = Statement(channel_id, user_id, message_id, creation_time) session.add(stmnt) new_statment_was_created = True print("New stmnt (msgid): ", message_id) else: print("Update stmnt: ", stmnt.id) update_query = Statement.__table__.update().values(updated=creation_time, last_msg_id=message_id).\ where(Statement.id==stmnt.id) session.execute(update_query) session.commit() session.close() do_analyse() if current_exec_event is not None: scheduler.cancel(current_exec_event) current_exec_event = None else: if not scheduler.empty(): print("[start_analysis_loop] SYNC ERROR...") current_exec_event = scheduler.enter(sec_to_end_stmnt + 5, 1, do_analyse) scheduler.run(blocking=False)
def sync_telegram(self): channels = self.update_telegram_channels() session = db_session() for channel in channels: min_id = session.query(TelegramTextMessage.message_id).\ filter(TelegramTextMessage.channel_id==channel.id).\ order_by(desc(TelegramTextMessage.message_id)).limit(1).scalar() if not min_id: min_id = 0 setattr(channel, "min_id", min_id) session.close() self.get_content(channels, self.telegram_on_message_callback)