def run_fetch(fabid, tries=1): if tries == 0: pmig = phabdb.phdb(db='fab_migration') import_priority = pmig.sql_x("SELECT priority FROM fab_meta WHERE id = %s", (fabid,)) if import_priority: log('updating existing record') pmig.sql_x("UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s", (ipriority['creation_failed'], now(), fabid)) else: print "%s does not seem to exist" % (fabid) pmig.close() print 'failed to grab %s' % (fabid,) return False try: if fetch(fabid): try: pmig = phabdb.phdb(db='fab_migration') pandmupdate = "UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s" pmig.sql_x(pandmupdate, (ipriority['creation_success'], now(), fabid)) print time.time() print 'done with %s' % (fabid,) except: return False return True except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) print 'failed to grab %s (%s)' % (fabid, e) return run_fetch(fabid, tries=tries)
def run_create(rtid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db) import_priority = pmig.sql_x("SELECT priority \ FROM rt_meta \ WHERE id = %s", \ (rtid,)) if import_priority: pmig.sql_x("UPDATE rt_meta \ SET priority=%s, modified=%s \ WHERE id = %s", (ipriority['creation_failed'], now(), rtid)) else: elog("%s does not seem to exist" % (rtid)) elog('failed to create %s' % (rtid,)) pmig.close() return False try: return create(rtid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to grab %s (%s)' % (rtid, e)) return run_create(rtid, tries=tries)
def fetch(bugid): pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) server = xmlrpclib.ServerProxy(config.Bugzilla_url, use_datetime=True) token_data = server.User.login({'login': config.Bugzilla_login, 'password': config.Bugzilla_password}) token = token_data['token'] kwargs = { 'ids': [bugid], 'Bugzilla_token': token } #grabbing one bug at a time for now buginfo = server.Bug.get(kwargs)['bugs'] buginfo = buginfo[0] # some bugs have header data but no actual content # https://bugzilla.wikimedia.org/show_bug.cgi?id=32738 com = server.Bug.comments(kwargs)['bugs'][str(bugid)]['comments'] #have to do for json buginfo['last_change_time'] = datetime_to_epoch(buginfo['last_change_time']) buginfo['creation_time'] = datetime_to_epoch(buginfo['creation_time']) if 'flags' in buginfo: for flag in buginfo['flags']: for k, v in flag.iteritems(): if isinstance(v, datetime.datetime): flag[k] = datetime_to_epoch(v) for c in com: c['creation_time'] = datetime_to_epoch(c['creation_time']) c['time'] = datetime_to_epoch(c['time']) # set ticket status for priority import status = bzlib.status_convert(buginfo['status'], buginfo['resolution']) if status == 'open': creation_priority = ipriority['unresolved'] else: creation_priority = ipriority['na'] current = pmig.sql_x("SELECT * from bugzilla_meta where id = %s", bugid) if current: update_values = (creation_priority, json.dumps(buginfo), json.dumps(com), now(), bugid) vlog('update: ' + str(update_values)) pmig.sql_x("UPDATE bugzilla_meta SET priority=%s, header=%s, comments=%s, modified=%s WHERE id = %s", update_values) else: insert_values = (bugid, creation_priority, json.dumps(buginfo), json.dumps(com), now(), now()) vlog('insert: ' + str(insert_values)) sql = "INSERT INTO bugzilla_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)" pmig.sql_x(sql, insert_values) pmig.close() return True
def run_create(bugid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) import_priority = pmig.sql_x("SELECT priority \ FROM bugzilla_meta \ WHERE id = %s", (bugid,)) if import_priority: pmig.sql_x("UPDATE bugzilla_meta \ SET priority=%s \ WHERE id = %s", (ipriority['update_failed'], bugid)) else: elog("%s does not seem to exist" % (bugid)) pmig.close() elog('failed to create %s' % (bugid,)) return False try: return create(bugid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to create %s (%s)' % (bugid, e)) return run_create(bugid, tries=tries)
def run_fetch(bugid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) current = pmig.sql_x("SELECT * from bugzilla_meta \ where id = %s", bugid) if current: update_values = (ipriority['fetch_failed'], '', '', now(), bugid) pmig.sql_x("UPDATE bugzilla_meta SET priority=%s, \ header=%s, \ comments=%s, \ modified=%s \ WHERE id = %s", update_values) else: insert_values = (bugid, ipriority['fetch_failed'], '', '', now(), now()) pmig.sql_x("INSERT INTO bugzilla_meta \ (id, priority, header, comments, modified, created) \ VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() elog('failed to grab %s' % (bugid,)) return False try: return fetch(bugid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to fetch %s (%s)' % (bugid, e)) return run_fetch(bugid, tries=tries)
def main(): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) phabm = phabmacros('', '', '') phabm.con = phab pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) server = xmlrpclib.ServerProxy(config.Bugzilla_url, use_datetime=True) bzdata = open("data/bugzilla.yaml", 'r') bzdata_yaml = yaml.load(bzdata) #product = 'Wikimedia' kwargs = { 'Bugzilla_login': config.Bugzilla_login, 'Bugzilla_password': config.Bugzilla_password} products = server.Product.get_selectable_products(kwargs)['ids'] print products for p in products: kwargs = { 'ids': p, 'Bugzilla_login': config.Bugzilla_login, 'Bugzilla_password': config.Bugzilla_password} pi = server.Product.get(kwargs)['products'][0] print pi['name'], pi['description'] for c in pi['components']: pname = "\n%s-%s\n\n%s" % (pi['name'], c['name']) print pname
def run_update(user, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) current = phabdb.get_user_migration_history(user['user'], pmig) if current: failed = ipriority['update_failed_comments'] log(phabdb.set_user_relations_priority(failed, user['user'], pmig)) else: log('%s user does not exist to update' % (user['user'])) pmig.close() elog('final fail to update %s' % (user['user'],)) return False try: return update(user) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to update %s' % (user,)) return run_update(user, tries=tries)
def main(): def remove(bugid): notice("Removing bugid %s" % (bugid,)) log(util.remove_issue_by_bugid(bugid, bzlib.prepend)) if not util.can_edit_ref: elog('%s reference field not editable on this install' % (bugid,)) sys.exit(1) if 'failed' in sys.argv: priority = ipriority['creation_failed'] else: priority = 0 pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) bugs = return_bug_list(dbcon=pmig, priority=priority) pmig.close() #Serious business if 'failed' in sys.argv or '-r' in sys.argv: for b in bugs: remove(b) from multiprocessing import Pool pool = Pool(processes=int(config.bz_createmulti)) _ = pool.map(run_create, bugs) complete = len(filter(bool, _)) failed = len(_) - complete print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
def run_fetch(fabid, tries=1): if tries == 0: pmig = phabdb.phdb(db='fab_migration') import_priority = pmig.sql_x("SELECT priority FROM fab_meta WHERE id = %s", (fabid,)) if import_priority: log('updating existing record') pmig.sql_x("UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s", (ipriority['fetch_failed'], now(), fabid)) else: insert_values = (fabid, ipriority['fetch_failed'], 'nan', 'nan', now(), now()) pmig.sql_x("INSERT INTO fab_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() log('failed to grab %s' % (fabid,)) return False try: if fetch(fabid): vlog(str(time.time())) log('done fetching %s' % (fabid,)) return True except Exception as e: tries -= 1 time.sleep(5) import traceback traceback.print_exc(file=sys.stdout) log('failed to grab %s (%s)' % (fabid, e)) return run_fetch(fabid, tries=tries)
def get_user_histories(verified): histories = [] pmig = phabdb.phdb(db=config.fabmigrate_db, user=config.fabmigrate_user, passwd=config.fabmigrate_passwd) #print 'verified', verified for v in verified: vlog(str(v)) # Get user history from old fab system saved_history = phabdb.get_user_migration_history(v[1], pmig) if not saved_history: log('%s verified email has no saved history' % (v[1],)) continue log('%s is being processed' % (v[1],)) history = {} history['user'] = v[1] history['userphid'] = v[0] history['assigned'] = saved_history[0] history['cc'] = saved_history[1] history['author'] = saved_history[2] history['created'] = saved_history[3] history['modified'] = saved_history[4] histories.append(history) pmig.close() return [util.translate_json_dict_items(d) for d in histories]
def run_update(bugid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db) current = pmig.sql_x("SELECT * from \ task_relations \ where id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations \ SET priority=%s, \ blocks=%s, \ modified=%s \ WHERE id = %s", (ipriority['creation_failed'], json.dumps([]), now(), bugid)) else: sql = "INSERT INTO task_relations \ (id, priority, blocks, modified) \ VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['creation_failed'], json.dumps([]), now())) pmig.close() elog('final fail to update %s' % (bugid,)) return False try: return update(bugid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to update %s' % (bugid,)) return run_update(bugid, tries=tries)
def populate(bugid): def add_comment_ref(owner): """ adds an issue reference to a user or later updating their comments """ ouser = pmig.sql_x("SELECT user FROM user_relations_comments WHERE user = %s", (owner,)) if ouser: jcommed = pmig.sql_x("SELECT issues FROM user_relations_comments WHERE user = %s", (owner,)) if jcommed and any(tflatten(jcommed)): issues = json.loads(jcommed[0][0]) else: issues = [] if bugid not in issues: log("Comment reference %s to %s" % (str(bugid), owner)) issues.append(bugid) pmig.sql_x("UPDATE user_relations_comments SET issues=%s, modified=%s WHERE user = %s", (json.dumps(issues), now(), owner)) else: issues = json.dumps([bugid]) insert_values = (owner, issues, now(), now()) pmig.sql_x("INSERT INTO user_relations_comments (user, issues, created, modified) VALUES (%s, %s, %s, %s)", insert_values) pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) issue = pmig.sql_x("SELECT id FROM bugzilla_meta WHERE id = %s", bugid) if not issue: log('issue %s does not exist for user population' % (bugid,)) return True fpriority= pmig.sql_x("SELECT priority FROM bugzilla_meta WHERE id = %s", bugid) if fpriority[0] == ipriority['fetch_failed']: log('issue %s does not fetched successfully for user population (failed fetch)' % (bugid,)) return True current = pmig.sql_x("SELECT comments, xcomments, modified FROM bugzilla_meta WHERE id = %s", bugid) if current: comments, xcomments, modified = current[0] else: log('%s not present for migration' % (bugid,)) return True com = json.loads(comments) xcom = json.loads(xcomments) commenters = [c['author'] for c in com if c['count'] > 0] commenters = set(commenters) log("commenters for issue %s: %s" % (bugid, str(commenters))) for c in commenters: add_comment_ref(c) pmig.close() return True
def update(user): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) phabm = phabmacros('', '', '') phabm.con = phab if phabdb.is_bot(user['userphid']): log("%s is a bot no action" % (user['user'])) return True epriority = phabdb.get_user_relations_comments_priority(user['user'], pmig) if epriority and len(epriority[0]) > 0: if epriority[0][0] == ipriority['update_success_comments']: log('Skipping %s as already updated' % (user['user'])) return True if not user['issues']: log("%s has no issues to update" % (user['user'],)) return True for i in user['issues']: comdetails = pmig.sql_x("SELECT comments, \ xcomments \ FROM rt_meta \ WHERE id = %s", (int(i),)) jcom, jxcom = comdetails[0] coms = json.loads(jcom) xcoms = json.loads(jxcom) for key, xi in xcoms.iteritems(): content = xi['content'] if xi["creator"] == user['user']: log("Updating comment %s for %s" % (xi['xctransaction'], user['user'])) phabdb.set_comment_author(xi['xctransaction'], user['userphid']) phabdb.set_comment_content(xi['xctransaction'], xi['content']) current = phabdb.get_user_migration_comment_history(user['user'], pmig) if current: success = ipriority['update_success_comments'] log(phabdb.set_user_relations_comments_priority(success, user['user'], pmig)) else: log('%s user does not exist to update' % (user['user'])) return False pmig.close() log(util.purge_cache()) return True
def update(bugid): pmig = phabdb.phdb(db=config.rtmigrate_db) epriority = pmig.sql_x( "SELECT priority \ from task_relations \ where id = %s", bugid, ) if epriority and epriority[0] == ipriority["update_success"]: log("skipping %s as blockers already updated" % (bugid,)) return True query = "SELECT header FROM rt_meta WHERE id = %s" header = pmig.sql_x(query, (bugid,)) if not header: log("no header found for %s" % (bugid,)) return "missing" def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (rtlib.prepend, ticket)) if not refid: return "" return refid[0] blocker_ref = extref(bugid) tinfo = json.loads(header[0][0]) if "refers_to" in tinfo["links"] and tinfo["links"]["refers_to"]: refers_to = [] for b in tinfo["links"]["refers_to"]: refersto_ref = extref(b) if not refersto_ref: continue refers_to.append(phabdb.get_task_id_by_phid(refersto_ref)) if refers_to: refers_block = "\n\n**Refers To:**\n" refers_block += "\n".join(["{T%s}" % r for r in refers_to]) log(phabdb.append_to_task_description(blocker_ref, refers_block)) if "refers_toby" in tinfo["links"] and tinfo["links"]["refers_toby"]: refers_toby = [] for b in tinfo["links"]["refers_toby"]: referstoby_ref = extref(b) if not referstoby_ref: continue refers_toby.append(phabdb.get_task_id_by_phid(referstoby_ref)) if refers_toby: refer_block = "\n\n**Referred To By:**\n" refer_block += "\n".join(["{T%s}" % r for r in refers_toby]) log(phabdb.append_to_task_description(blocker_ref, refer_block)) return True
def update(user): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) pmig = phabdb.phdb(host=config.dbhost, db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) phabm = phabmacros('', '', '') phabm.con = phab if phabdb.is_bot(user['userphid']): log("%s is a bot no action" % (user['user'])) return True epriority = phabdb.get_user_relations_priority(user['user'], pmig) if epriority and len(epriority[0]) > 0: if epriority[0][0] == ipriority['update_success']: log('Skipping %s as already updated' % (user['user'])) return True # 'author': [409, 410, 411, 404, 412], # 'cc': [221, 69, 203, 268, 261, 8], # 'created': 1410276037L, # 'modified': 1410276060L, # 'assigned': [97, 64, 150, 59, 6], # 'userphid': 'PHID-USER-4hsexplytovmqmcb7tq2', # 'user': u'*****@*****.**'} if user['assigned']: for ag in user['assigned']: vlog(phabm.sync_assigned(user['userphid'], ag, bzlib.prepend)) if user['author']: for a in user['author']: vlog(phabm.synced_authored(user['userphid'], a, bzlib.prepend)) if user['cc']: for ccd in user['cc']: vlog(phabdb.add_task_cc_by_ref(user['userphid'], ccd, bzlib.prepend)) current = phabdb.get_user_migration_history(user['user'], pmig) if current: log(phabdb.set_user_relations_priority(ipriority['update_success'], user['user'], pmig)) else: elog('%s user does not exist to update' % (user['user'])) return False pmig.close() return True
def update(bugid): pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) epriority = pmig.sql_x("SELECT priority from task_relations where id = %s", bugid) if epriority and epriority[0] == ipriority['update_success']: log('skipping %s as blockers already updated' % (bugid,)) return True hq = "SELECT header FROM bugzilla_meta WHERE id = %s" header = pmig.sql_x(hq, (bugid,)) if not header: elog('no header found for %s' % (bugid,)) return False def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (bzlib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(bugid) tinfo = json.loads(header[0][0]) if not tinfo['blocks']: log("%s doesn't block anything" % (str(bugid),)) return True for b in tinfo["blocks"]: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) vlog('%s is blocking %s' % (blocker_ref, str(blocks))) current = pmig.sql_x("SELECT * from task_relations where id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['update_success'], json.dumps(blocks), now(), bugid)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['update_success'], json.dumps(blocks), now())) pmig.close() return True
def update(id): fabdb = phabdb.phdb(db='fab_migration') epriority = fabdb.sql_x("SELECT priority from task_relations where id = %s", id) if epriority and epriority[0] == ipriority['creation_success']: log('Skipping %s as blockers already updated' % (id,)) return True hq = "SELECT header FROM fab_meta WHERE id = %s" header = fabdb.sql_x(hq, (id,)) if not header: vlog('no header found for %s' % (id,)) return True def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (fablib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(id) tinfo = json.loads(header[0]) vlog(tinfo) for b in tinfo['xblocking']: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) log('%s is blocking %s' % (blocker_ref, str(blocks))) current = fabdb.sql_x("SELECT * from task_relations where id = %s", id) if current: fabdb.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['creation_success'], json.dumps(blocks), now(), id)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" fabdb.sql_x(sql, (id, ipriority['creation_success'], json.dumps(blocks), now())) fabdb.close() return True
def get_user_histories(verified): histories = [] pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) for v in verified: vlog(str(v)) saved_history = phabdb.get_user_migration_comment_history(v[1], pmig) if not saved_history: log('%s verified email has no saved history' % (v[1],)) continue log('%s is being processed' % (v[1],)) history = {} history['user'] = v[1] history['userphid'] = v[0] history['issues'] = saved_history[0] history['created'] = saved_history[1] history['modified'] = saved_history[2] histories.append(history) pmig.close() return [util.translate_json_dict_items(d) for d in histories]
def run_fetch(tid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) insert_values = (tid, ipriority['fetch_failed'], '', '', now(), now()) pmig.sql_x("INSERT INTO rt_meta \ (id, priority, header, comments, created, modified) \ VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() elog('failed to grab %s' % (tid,)) return False try: return fetch(tid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to grab %s (%s)' % (tid, e)) return run_fetch(tid, tries=tries)
def run_update(fabid, tries=1): if tries == 0: log('final fail to grab %s' % (fabid,)) pmig = phabdb.phdb(db='fab_migration') current = pmig.sql_x("SELECT * from task_relations where id = %s", fabid) if current: pmig.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['creation_failed'], json.dumps([]), now(), fabid)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (fabid, ipriority['creation_failed'], json.dumps([]), now())) pmig.close() return False try: if update(fabid): log('%s done with %s' % (str(int(time.time())), fabid,)) return True except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) log('failed to grab %s (%s)' % (fabid, e)) return run_update(fabid, tries=tries)
def run_update(user, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.fabmigrate_db, user=config.fabmigrate_user, passwd=config.fabmigrate_passwd) current = phabdb.get_user_migration_history(user['user'], pmig) if current: log(phabdb.set_user_relations_priority(ipriority['update_failed'], user['user'], pmig)) else: log('%s user does not exist to update' % (user['user'])) pmig.close() log('final fail to update %s' % (user['user'],)) return False try: if update(user): log('%s done with %s' % (str(int(time.time())), user,)) return True except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) log('failed to update %s (%s)' % (user, e)) return run_update(user, tries=tries)
def update(bugid): pmig = phabdb.phdb(db=config.rtmigrate_db) epriority = pmig.sql_x("SELECT priority \ from task_relations \ where id = %s", bugid) if epriority and epriority[0] == ipriority['update_success']: log('skipping %s as blockers already updated' % (bugid,)) return True query = "SELECT header FROM rt_meta WHERE id = %s" header = pmig.sql_x(query, (bugid,)) if not header: elog('no header found for %s' % (bugid,)) return 'missing' def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (rtlib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(bugid) tinfo = json.loads(header[0][0]) upstream = [] if 'parent' in tinfo['links']: upstream += tinfo['links']['parent'] if 'blocks' in tinfo['links']: upstream += tinfo['links']['blocks'] if upstream: for b in upstream: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) vlog('%s is blocking %s' % (blocker_ref, str(blocks))) current = pmig.sql_x("SELECT * \ from task_relations \ WHERE id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations \ SET priority=%s, blocks=%s, modified=%s \ WHERE id = %s", (ipriority['update_success'], json.dumps(blocks), now(), bugid)) else: sql = "INSERT INTO task_relations \ (id, priority, blocks, modified) \ VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['update_success'], json.dumps(blocks), now())) pmig.close() return True
def populate(bugid): pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) issue = pmig.sql_x("SELECT id FROM bugzilla_meta WHERE id = %s", bugid) if not issue: log('issue %s does not exist for user population' % (bugid,)) return True fpriority= pmig.sql_x("SELECT priority FROM bugzilla_meta WHERE id = %s", bugid) if fpriority[0] == ipriority['fetch_failed']: log('issue %s does not fetched successfully for user population (failed fetch)' % (bugid,)) return True current = pmig.sql_x("SELECT priority, header, comments, created, modified FROM bugzilla_meta WHERE id = %s", bugid) if current: import_priority, buginfo, com, created, modified = current[0] else: log('%s not present for migration' % (bugid,)) return True bzdata = open("data/bugzilla.yaml", 'r') bzdata_yaml = yaml.load(bzdata) mlists = bzdata_yaml['assigned_to_lists'].split(' ') vlog(mlists) header = json.loads(buginfo) vlog(str(header)) relations = {} relations['author'] = header["creator"] relations['cc'] = header['cc'] if header['assigned_to'] not in mlists: vlog("adding assignee %s to %s" % (header['assigned_to'], bugid)) relations['owner'] = header['assigned_to'] else: vlog("skipping %s assigned to %s" % (bugid, header['assigned_to'])) relations['owner'] = '' for k, v in relations.iteritems(): if relations[k]: relations[k] = filter(bool, v) def add_owner(owner): ouser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (owner,)) if ouser: jassigned = pmig.sql_x("SELECT assigned FROM user_relations WHERE user = %s", (owner,)) jflat = tflatten(jassigned) if any(jflat): assigned = json.loads(jassigned[0][0]) else: assigned = [] if bugid not in assigned: log("Assigning %s to %s" % (str(bugid), owner)) assigned.append(bugid) vlog("owner %s" % (str(assigned),)) pmig.sql_x("UPDATE user_relations SET assigned=%s, modified=%s WHERE user = %s", (json.dumps(assigned), now(), owner)) else: vlog('inserting new record') assigned = json.dumps([bugid]) insert_values = (owner, assigned, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, assigned, created, modified) VALUES (%s, %s, %s, %s)", insert_values) def add_author(author): euser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (relations['author'],)) if euser: jauthored = pmig.sql_x("SELECT author FROM user_relations WHERE user = %s", (relations['author'],)) jflat = tflatten(jauthored) if any(jflat): authored = json.loads(jauthored[0][0]) else: authored = [] if bugid not in authored: authored.append(bugid) vlog("author %s" % (str(authored),)) pmig.sql_x("UPDATE user_relations SET author=%s, modified=%s WHERE user = %s", (json.dumps(authored), now(), relations['author'])) else: vlog('inserting new record') authored = json.dumps([bugid]) insert_values = (relations['author'], authored, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, author, created, modified) VALUES (%s, %s, %s, %s)", insert_values) def add_cc(ccuser): eccuser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (ccuser,)) if eccuser: jcc = pmig.sql_x("SELECT cc FROM user_relations WHERE user = %s", (ccuser,)) jflat = tflatten(jcc) if any(jflat): cc = json.loads(jcc[0][0]) else: cc = [] if bugid not in cc: cc.append(bugid) vlog("cc %s" % (str(cc),)) pmig.sql_x("UPDATE user_relations SET cc=%s, modified=%s WHERE user = %s", (json.dumps(cc), now(), ccuser)) else: vlog('inserting new record') cc = json.dumps([bugid]) insert_values = (ccuser, cc, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, cc, created, modified) VALUES (%s, %s, %s, %s)", insert_values) if relations['author']: add_author(relations['author']) if relations['owner']: add_owner(relations['owner']) if relations['cc']: for u in filter(bool, relations['cc']): add_cc(u) pmig.close() return True
def dbcon(db): return phabdb.phdb(db=db, host=c.dbslave, user=c.phuser_user, passwd=c.phuser_passwd)
def fetch(PHABTICKETID): PHABTICKETID = int(PHABTICKETID) parser = ConfigParser.SafeConfigParser() parser_mode = 'oldfab' parser.read(configfile) oldfab = Phabricator(parser.get(parser_mode, 'user'), parser.get(parser_mode, 'cert'), parser.get(parser_mode, 'host')) #dummy instance of phabapi phabm = phabmacros('', '', '') #assign newphab instance as self.con for dummyphab phabm.con = oldfab """ <Result: {u'authorPHID': u'PHID-USER-qbtllnzb6pwl3ttzqa3m', u'status': u'open', u'phid': u'PHID-TASK-qr3fpbtk6kdx4slhgnsd', u'description': u'', u'objectName': u'T10', u'title': u'Get icinga alerts into logstash', u'priorityColor': u'red', u'dependsOnTaskPHIDs': [], u'auxiliary': [], u'uri': u'http://fab.wmflabs.org/T10', u'ccPHIDs': [u'PHID-USER-qbtllnzb6pwl3ttzqa3m'], u'isClosed': False, u'dateModified': u'1399311492', u'ownerPHID': None, u'statusName': u'Open', u'dateCreated': u'1391716779', u'projectPHIDs': [u'PHID-PROJ-5ncvaivs3upngr7ijqy2'], u'id': u'10', u'priority': u'High'}> """ tinfo = oldfab.maniphest.info(task_id=PHABTICKETID).response vlog(tinfo) if 'objectName' in tinfo: log("Fetching %s" % (tinfo['objectName'])) comments = comments_by_task(tinfo['phid']) for i, c in comments.iteritems(): comments[i]['xcommenter'] = dict(oldfab.user.info(phid=c['xuserphid'])) ordered_comments = collections.OrderedDict(sorted(comments.items())) vlog(str(ordered_comments)) """ <Result: {u'userName': u'bd808', u'phid': u'PHID-USER-qbtllnzb6pwl3ttzqa3m', u'realName': u'Bryan Davis', u'roles': [u'admin',u'verified', u'approved', u'activated'], u'image': u'http://fab.wmflabs.org/file/data/fijwoqt62w6atpond4vb/PHID-FILE-37htsfegn7bnlfvzwsts/profile-profile-gravatar', u'uri': u'http://fab.wmflabs.org/p/bd808/'}> """ authorInfo = oldfab.user.info(phid=tinfo['authorPHID']) tinfo['xauthor'] = phabdb.email_by_userphid(authorInfo['phid']) lauthor = tinfo['xauthor'] or 'no author' vlog('author: ' + lauthor) ccs = [] if tinfo['ccPHIDs']: for c in tinfo['ccPHIDs']: ccInfo = oldfab.user.info(phid=c) ccs.append(phabdb.email_by_userphid(ccInfo['phid'])) tinfo['xccs'] = ccs vlog('ccs: ' + str(ccs)) if tinfo['ownerPHID']: tinfo['xowner'] = phabdb.email_by_userphid(tinfo['ownerPHID']) else: tinfo['xowner'] = None """ u'data': {u'PHID-PROJ-5ncvaivs3upngr7ijqy2': {u'phid': u'PHID-PROJ-5ncvaivs3upngr7ijqy2', u'name': u'logstash', u'dateCreated': u'1391641549', u'members': [u'PHID-USER-65zhggegfvhojb4nynay'], u'id': u'3', u'dateModified': u'1398282408', u'slugs': [u'logstash']}}, u'slugMap': []}> """ project_names = [] associated_projects = tinfo['projectPHIDs'] vlog('associated projects: %s' % (str(tinfo['projectPHIDs']))) #if we try to query for an empty list we get back ALLLLLL if associated_projects: pinfo = oldfab.project.query(phids=associated_projects) if pinfo['data']: for p in pinfo['data'].values(): project_names.append(p['name']) def norm(pname): return pname.lower().replace(' ', '_').replace('-', '_') norm_projects = [norm(p) for p in fablib.saved] saved_projects = [norm(p) in norm_projects for p in project_names] if not any(saved_projects): print "Skipping %s as it's not in a saved project" % (PHABTICKETID) return True vlog('project names: ' + str(project_names)) tinfo['xprojects'] = project_names status = tinfo['status'] if status != 'open': creation_priority = ipriority['na'] else: creation_priority = ipriority['unresolved'] blocked_tasks = phabdb.get_tasks_blocked(tinfo['phid']) if blocked_tasks: blocked = [] block_details = oldfab.maniphest.find(phids=blocked_tasks) for k, v in block_details.iteritems(): blocked.append(v['id']) else: blocked = [] vlog('blocking: %s' % (str(blocked))) tinfo['xblocking'] = blocked pmig = phabdb.phdb(db='fab_migration') current = pmig.sql_x("SELECT * from fab_meta where id = %s", PHABTICKETID) if current: log('updating current record %s' % (PHABTICKETID,)) update_values = (creation_priority, json.dumps(tinfo), json.dumps(comments), now(), PHABTICKETID) pmig.sql_x("UPDATE fab_meta SET priority=%s, header=%s, comments=%s, modified=%s WHERE id = %s", update_values) else: log('inserting new record %s' % (PHABTICKETID,)) insert_values = (PHABTICKETID, creation_priority, json.dumps(tinfo), json.dumps(comments), now(), now()) pmig.sql_x("INSERT INTO fab_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() return True
def create(bugid): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) phabm = phabmacros('', '', '') phabm.con = phab pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) current = pmig.sql_x("SELECT priority, \ header, \ comments, \ created, \ modified \ FROM bugzilla_meta WHERE id = %s", (bugid,)) if current: import_priority, buginfo, com, created, modified = current[0] else: elog('%s not present for migration' % (bugid,)) return False def get_ref(id): refexists = phabdb.reference_ticket('%s%s' % (bzlib.prepend, id)) if refexists: return refexists[0] buginfo = json.loads(buginfo) com = json.loads(com) bugid = int(bugid) vlog(bugid) vlog(buginfo) ticket = get_ref(bugid) print 'TICKET ', ticket def is_sensitive(name): return name.strip().lower().startswith('security') def project_security_settings(pname): if is_sensitive(pname): ephid = phabdb.get_project_phid('security') edit = ephid else: edit = 'users' view = 'public' return edit, view server = xmlrpclib.ServerProxy(config.Bugzilla_url, use_datetime=True) token_data = server.User.login({'login': config.Bugzilla_login, 'password': config.Bugzilla_password}) token = token_data['token'] #http://www.bugzilla.org/docs/tip/en/html/api/Bugzilla/WebService/Bug.html#attachments kwargs = { 'ids': [bugid], 'Bugzilla_token': token } #list of projects to add to ticket ptags = [] if buginfo['status'] == 'VERIFIED': vlog("Adding 'verified' to %s" % (ticket,)) ptags.append(('verified', 'tags')) if buginfo['status'].lower() == 'patch_to_review': vlog("Adding 'Patch-For-Review' to %s" % (ticket,)) ptags.append(('Patch-For-Review', 'tags', 'green')) log("status recognized as %s" % (buginfo['status'],)) phids = [] for p in ptags: edit, view = project_security_settings(p[0]) phid = phabm.ensure_project(p[0], edit=edit, view=view) phids.append(phid) if p[1] is not None: vlog("setting project %s icon to %s" % (p[0], p[1])) set_project_icon(p[0], icon=p[1]) for phid in phids: phabdb.set_related_project(ticket, phid) pmig.close() return True
def populate(rtid): pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) issue = pmig.sql_x("SELECT id FROM rt_meta WHERE id = %s", rtid) if not issue: log("issue %s does not exist for user population" % (rtid,)) return "missing" fpriority = pmig.sql_x("SELECT priority FROM rt_meta WHERE id = %s", rtid) if fpriority[0] == ipriority["fetch_failed"]: log("issue %s does not fetched successfully for user population (failed fetch)" % (rtid,)) return True current = pmig.sql_x("SELECT priority, header, comments, created, modified FROM rt_meta WHERE id = %s", rtid) if current: import_priority, buginfo, com, created, modified = current[0] else: log("%s not present for migration" % (rtid,)) return True header = json.loads(buginfo) vlog(str(header)) relations = {} relations["author"] = rtlib.user_lookup(header["Creator"]) ccusers = header["AdminCc"].split(",") + header["Cc"].split(",") relations["cc"] = ccusers relations["cc"] = [cc.strip() for cc in relations["cc"] if cc] # RT uses a literal nobody for no assigned if header["Owner"] == "Nobody": relations["owner"] = "" else: relations["owner"] = rtlib.user_lookup(header["Owner"]) for k, v in relations.iteritems(): if relations[k]: relations[k] = filter(bool, v) def add_owner(owner): ouser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (owner,)) if ouser: jassigned = pmig.sql_x("SELECT assigned FROM user_relations WHERE user = %s", (owner,)) jflat = tflatten(jassigned) if any(jflat): assigned = json.loads(jassigned[0][0]) else: assigned = [] if rtid not in assigned: log("Assigning %s to %s" % (str(rtid), owner)) assigned.append(rtid) vlog("owner %s" % (str(assigned),)) pmig.sql_x( "UPDATE user_relations SET assigned=%s, modified=%s WHERE user = %s", (json.dumps(assigned), now(), owner), ) else: vlog("inserting new record") assigned = json.dumps([rtid]) insert_values = (owner, assigned, now(), now()) pmig.sql_x( "INSERT INTO user_relations (user, assigned, created, modified) VALUES (%s, %s, %s, %s)", insert_values ) def add_author(author): euser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (relations["author"],)) if euser: jauthored = pmig.sql_x("SELECT author FROM user_relations WHERE user = %s", (relations["author"],)) jflat = tflatten(jauthored) if any(jflat): authored = json.loads(jauthored[0][0]) else: authored = [] if rtid not in authored: authored.append(rtid) vlog("author %s" % (str(authored),)) pmig.sql_x( "UPDATE user_relations SET author=%s, modified=%s WHERE user = %s", (json.dumps(authored), now(), relations["author"]), ) else: vlog("inserting new record") authored = json.dumps([rtid]) insert_values = (relations["author"], authored, now(), now()) pmig.sql_x( "INSERT INTO user_relations (user, author, created, modified) VALUES (%s, %s, %s, %s)", insert_values ) def add_cc(ccuser): eccuser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (ccuser,)) if eccuser: jcc = pmig.sql_x("SELECT cc FROM user_relations WHERE user = %s", (ccuser,)) jflat = tflatten(jcc) if any(jflat): cc = json.loads(jcc[0][0]) else: cc = [] if rtid not in cc: cc.append(rtid) vlog("cc %s" % (str(cc),)) pmig.sql_x("UPDATE user_relations SET cc=%s, modified=%s WHERE user = %s", (json.dumps(cc), now(), ccuser)) else: vlog("inserting new record") cc = json.dumps([rtid]) insert_values = (ccuser, cc, now(), now()) pmig.sql_x( "INSERT INTO user_relations (user, cc, created, modified) VALUES (%s, %s, %s, %s)", insert_values ) if relations["author"]: add_author(relations["author"]) if relations["owner"]: add_owner(relations["owner"]) if relations["cc"]: for u in filter(bool, relations["cc"]): add_cc(u) pmig.close() return True
def fetch(PHABTICKETID): PHABTICKETID = int(PHABTICKETID) parser = ConfigParser.SafeConfigParser() parser_mode = 'phab' parser.read(configfile) phab = Phabricator(parser.get(parser_mode, 'username'), parser.get(parser_mode, 'certificate'), parser.get(parser_mode, 'host')) #dummy instance of phabapi phabm = phabmacros('', '', '') phabm.con = phab pmig = phabdb.phdb(db='fab_migration') issue = pmig.sql_x("SELECT id FROM fab_meta WHERE id = %s", PHABTICKETID) if not issue: log('%s not present for migration' % (PHABTICKETID,)) return True exists = phabdb.reference_ticket('%s%s' % (fablib.prepend, PHABTICKETID)) if exists: log('reference ticket %s already exists' % (PHABTICKETID,)) return True tid, import_priority, header, com, created, modified = pmig.sql_x("SELECT * FROM fab_meta WHERE id = %s", PHABTICKETID) vlog('priority: %d' % (import_priority,)) tinfo = json.loads(header) comments = json.loads(com) proj_phids = [] for pn in tinfo['xprojects']: proj_phids.append(phabm.ensure_project(pn)) vlog(proj_phids) priorities = {"Unbreak Now!": 100, "Needs Triage": 90, "High": 80, "Normal": 50, "Low": 25, "Needs Volunteer": 10, 0: 10, '0': 10} newticket = phab.maniphest.createtask(title=tinfo['title'], description=tinfo['description'], projectPHIDs=proj_phids, priority=priorities[tinfo['priority']], auxiliary={"std:maniphest:external_reference":"fl%s" % (PHABTICKETID,)}) phabdb.set_task_ctime(newticket['phid'], tinfo['dateCreated']) log('setting ctime of %s for %s' % (tinfo['dateCreated'], newticket['id'])) log('Created phab ticket %s for %s' % (newticket['id'], PHABTICKETID)) vlog(newticket) # 0 {'xcommenter': {u'userName': u'uvhooligan', # u'phid': u'PHID-USER-lb2dbts4cdunqxzjqf2d', # u'realName': u'Un Ver Hooligan', # u'roles': [u'unverified', u'approved', u'activated'], # u'image': u'http://fabapi.wmflabs.org/res/phabricator/3eb28cd9/rsrc/image/avatar.png', # u'uri': u'http://fabapi.wmflabs.org/p/uvhooligan/'}, # 'created': 1409875492L, 'xuseremail': None, # 'text': 'hi guys I hate email', 'last_edit': 1409875492L, # 'xuserphid': 'PHID-USER-lb2dbts4cdunqxzjqf2d'} csorted = sorted(comments.values(), key=lambda k: k['created']) for k, v in enumerate(csorted): created = epoch_to_datetime(v['created']) user = v['xcommenter']['userName'] comment_body = "**%s** wrote on `%s`\n\n%s" % (user, created, v['text']) vlog(phabm.task_comment(newticket['id'], comment_body)) if tinfo["status"] == "wontfix": tinfo["status"] = 'resolved' if tinfo['status'] != 'open': log('set status %s' % (tinfo['status'])) vlog(phabm.task_comment(newticket['id'], '//importing issue status//')) vlog(phabm.set_status(newticket['id'], tinfo['status'])) phabdb.set_task_mtime(newticket['phid'], tinfo['dateModified']) log('setting modtime of %s for %s' % (tinfo['dateModified'], newticket['id'])) pmig.close() time.sleep(1) return True
def fetch(fabid): ausers = {} pmig = phabdb.phdb(db='fab_migration') issue = pmig.sql_x("SELECT id FROM fab_meta WHERE id = %s", fabid) if not issue: log('issue %s does not exist for user population' % (fabid,)) return True fpriority= pmig.sql_x("SELECT priority FROM fab_meta WHERE id = %s", fabid) if fpriority[0] == ipriority['fetch_failed']: log('issue %s does not fetched successfully for user population (failed fetch)' % (fabid,)) return True tid, import_priority, jheader, com, created, modified = pmig.sql_x("SELECT * FROM fab_meta WHERE id = %s", fabid) header = json.loads(jheader) vlog(str(header)) relations = {} relations['author'] = header['xauthor'] relations['cc'] = header['xccs'] relations['owner'] = header['xowner'] for k, v in relations.iteritems(): if relations[k]: relations[k] = filter(bool, v) def add_owner(owner): ouser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (owner,)) if ouser: jassigned = pmig.sql_x("SELECT assigned FROM user_relations WHERE user = %s", (owner,)) if jassigned[0]: assigned = json.loads(jassigned[0]) else: assigned = [] if fabid not in assigned: log("Assigning %s to %s" % (str(fabid), owner)) assigned.append(fabid) vlog("owner %s" % (str(assigned),)) pmig.sql_x("UPDATE user_relations SET assigned=%s, modified=%s WHERE user = %s", (json.dumps(assigned), now(), owner)) else: vlog('inserting new record') assigned = json.dumps([fabid]) insert_values = (owner, assigned, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, assigned, created, modified) VALUES (%s, %s, %s, %s)", insert_values) def add_author(author): euser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (relations['author'],)) if euser: jauthored = pmig.sql_x("SELECT author FROM user_relations WHERE user = %s", (relations['author'],)) if jauthored[0]: authored = json.loads(jauthored[0]) else: authored = [] if fabid not in authored: authored.append(fabid) vlog("author %s" % (str(authored),)) pmig.sql_x("UPDATE user_relations SET author=%s, modified=%s WHERE user = %s", (json.dumps(authored), now(), relations['author'])) else: vlog('inserting new record') authored = json.dumps([fabid]) insert_values = (relations['author'], authored, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, author, created, modified) VALUES (%s, %s, %s, %s)", insert_values) def add_cc(ccuser): eccuser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (ccuser,)) if eccuser: jcc = pmig.sql_x("SELECT cc FROM user_relations WHERE user = %s", (ccuser,)) if jcc[0]: cc = json.loads(jcc[0]) else: cc = [] if fabid not in cc: cc.append(fabid) vlog("cc %s" % (str(cc),)) pmig.sql_x("UPDATE user_relations SET cc=%s, modified=%s WHERE user = %s", (json.dumps(cc), now(), ccuser)) else: vlog('inserting new record') cc = json.dumps([fabid]) insert_values = (ccuser, cc, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, cc, created, modified) VALUES (%s, %s, %s, %s)", insert_values) if relations['author']: add_author(relations['author']) if relations['owner']: add_owner(relations['owner']) if relations['cc']: for u in filter(bool, relations['cc']): add_cc(u) pmig.close() return True
def main(): parser = argparse.ArgumentParser(description='Updates user header metadata from bugzilla') parser.add_argument('-a', action="store_true", default=False) parser.add_argument('-e', action="store", dest='email') parser.add_argument('-m', action="store", dest="starting_epoch", default=None) parser.add_argument('-v', action="store_true", default=False) args = parser.parse_args() pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) if args.a: starting_epoch = phabdb.get_user_relations_last_finish(pmig) users, finish_epoch = phabdb.get_verified_users(starting_epoch, config.bz_updatelimit) elif args.email: users = phabdb.get_verified_user(args.email) starting_epoch = 0 finish_epoch = 0 elif args.starting_epoch: users, finish_epoch = phabdb.get_verified_users(args.starting_epoch) starting_epoch = args.starting_epoch else: parser.print_help() sys.exit(1) if not any(users): log("Existing as there are no new verified users") sys.exit() histories = get_user_histories(filter(bool, users)) user_count = len(histories) icounts = [] for u in histories: c = 0 if u['cc']: c += len(u['cc']) if u['author']: c += len(u['author']) if u['assigned']: c += len(u['assigned']) icounts.append(c) issue_count = sum(icounts) log("User Count %s" % (str(user_count))) log("Issue Count %s" % (str(issue_count))) pid = os.getpid() source = util.source_name(sys.argv[0]) phabdb.user_relations_start(pid, source, int(time.time()), ipriority['na'], starting_epoch, user_count, issue_count, pmig) from multiprocessing import Pool pool = Pool(processes=int(config.bz_updatemulti)) _ = pool.map(run_update, histories) complete = len(filter(bool, _)) failed = len(_) - complete phabdb.user_relations_finish(pid, int(time.time()), ipriority['update_success'], finish_epoch, complete, failed, pmig) pmig.close() pm = phabmacros(config.phab_user, config.phab_cert, config.phab_host) vlog(util.update_blog(source, complete, failed, user_count, issue_count, pm)) print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)