def batch_create_projects(app, data, batch_size=100, is_create=False, action=None): projects = [] for split_data in batch_split(data, batch_size): projs = create_projects(app, split_data, is_create=is_create) projects.extend(projs) if action: action(projs) return projects
def sync_user_statuses(user_statuses): app_user_statuses = {} for user_status in user_statuses: app_name, user_type, uid = parse_app_user_id_from_xchat_uid( user_status['user']) online = user_status['status'] == 'online' app_user_statuses.setdefault(app_name, []).append( (user_type, uid, online)) for app_name, statuses in app_user_statuses.items(): app = App.query.filter_by(name=app_name).one_or_none() if app is None: continue for split_statuses in batch_split(statuses, 100): update_user_statuses(app, split_statuses)
def create_projects(app_name, batch_size): import sys import json from app.utils.commons import batch_split from app.service.models import App from app.biz import app as app_biz app = App.query.filter_by(name=app_name).one() def action(projs): logger.info('created projects: %d', len(projs)) for lines in batch_split(sys.stdin.readlines(), batch_size=batch_size * 100): data = [json.loads(line) for line in lines] logger.info('create projects: %d', len(data)) app_biz.batch_create_projects(app, data, batch_size=batch_size, is_create=True, action=action)
def _sync_proj_xchat_msgs(proj, xchat_msg=None): proj_xchat = proj.xchat synced_count = 0 if xchat_msg and proj_xchat.msg_id + 1 == xchat_msg.id: _new_proj_xchat_msg(proj, (xchat_msg, )) synced_count += 1 try: msgs, has_more, no_more = xchat_client.fetch_chat_msgs( proj_xchat.chat_id, lid=proj_xchat.msg_id, limit=10000) for split_msgs in batch_split(msgs, 100): _new_proj_xchat_msg( proj, [parse_xchat_msg_from_data(msg) for msg in split_msgs]) synced_count += len(msgs) if has_more: synced_count += _sync_proj_xchat_msgs(proj) except: logger.exception('sync proj msgs error: %d', proj.id) return synced_count
def _sync_proj_xchat_migrated_msgs(proj): synced_count = 0 proj_xchat = proj.xchat if proj_xchat.start_msg_id <= 0: return synced_count try: msgs, has_more, no_more = xchat_client.fetch_chat_msgs( proj_xchat.chat_id, rid=proj_xchat.start_msg_id + 1, limit=100000, desc=True) for split_msgs in batch_split(msgs, 200): _insert_proj_xchat_msg( proj, [parse_xchat_msg_from_data(msg) for msg in split_msgs]) synced_count += len(msgs) if has_more: synced_count += _sync_proj_xchat_migrated_msgs(proj) except: logger.exception('sync proj migrated msgs error: %d', proj.id) return synced_count
def _migrate_proj_msgs(proj, msgs, start_msg_id=None, start_delta=None, batch_size=200): import arrow from sqlalchemy import desc from app.utils.commons import batch_split from app import xchat_client from app.service.models import Message from app.task import tasks xchat = proj.xchat if start_msg_id is not None: if xchat.start_msg_id < start_msg_id: logger.info('do_migrate_msgs: migrated %s, %s', proj.id, xchat.chat_id) return rt = arrow.get(int(msgs[0]['ts'])).datetime lt = arrow.get(int(msgs[-1]['ts'])).datetime # 根据时间去掉时间段重复的消息 lt_msg = proj.messages.filter(Message.ts >= lt, Message.ts <= rt).order_by(Message.ts).first() rt_msg = proj.messages.filter(Message.ts >= lt, Message.ts <= rt).order_by(desc(Message.ts)).first() if lt_msg and rt_msg: l_ts = arrow.get(lt_msg.ts).timestamp r_ts = arrow.get(rt_msg.ts).timestamp msgs = [msg for msg in msgs if not (l_ts <= int(msg['ts']) <= r_ts)] if len(msgs) <= 0: return count = 0 for split_msgs in batch_split(msgs, batch_size): ok, n = xchat_client.insert_chat_msgs(xchat.chat_id, split_msgs, start_delta=start_delta) start_delta += len(split_msgs) if ok: count += n else: break if count > 0: tasks.try_sync_proj_xchat_migrated_msgs.delay(proj.id) logger.info('do_migrate_msgs: %s, %s, %s', proj.id, xchat.chat_id, count)
def batch_create_or_update_staffs(app, data): for split_data in batch_split(data, 100): app_m.create_or_update_staffs(app, split_data)
def batch_create_or_update_customers(app, data): for split_data in batch_split(data, 100): app_m.create_or_update_customers(app, split_data)
def batch_update_projects(app, data): for split_data in batch_split(data, 100): update_projects(app, split_data)