def run_fetch(fabid, tries=1): if tries == 0: pmig = phabdb.phdb(db='fab_migration') import_priority = pmig.sql_x("SELECT priority FROM fab_meta WHERE id = %s", (fabid,)) if import_priority: log('updating existing record') pmig.sql_x("UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s", (ipriority['fetch_failed'], now(), fabid)) else: insert_values = (fabid, ipriority['fetch_failed'], 'nan', 'nan', now(), now()) pmig.sql_x("INSERT INTO fab_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() log('failed to grab %s' % (fabid,)) return False try: if fetch(fabid): vlog(str(time.time())) log('done fetching %s' % (fabid,)) return True except Exception as e: tries -= 1 time.sleep(5) import traceback traceback.print_exc(file=sys.stdout) log('failed to grab %s (%s)' % (fabid, e)) return run_fetch(fabid, tries=tries)
def add_comment_ref(owner): """ adds an issue reference to a user or later updating their comments """ ouser = pmig.sql_x("SELECT user FROM user_relations_comments WHERE user = %s", (owner,)) if ouser: jcommed = pmig.sql_x("SELECT issues FROM user_relations_comments WHERE user = %s", (owner,)) if jcommed and any(tflatten(jcommed)): issues = json.loads(jcommed[0][0]) else: issues = [] if bugid not in issues: log("Comment reference %s to %s" % (str(bugid), owner)) issues.append(bugid) pmig.sql_x("UPDATE user_relations_comments SET issues=%s, modified=%s WHERE user = %s", (json.dumps(issues), now(), owner)) else: issues = json.dumps([bugid]) insert_values = (owner, issues, now(), now()) pmig.sql_x("INSERT INTO user_relations_comments (user, issues, created, modified) VALUES (%s, %s, %s, %s)", insert_values)
def run_fetch(bugid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) current = pmig.sql_x("SELECT * from bugzilla_meta \ where id = %s", bugid) if current: update_values = (ipriority['fetch_failed'], '', '', now(), bugid) pmig.sql_x("UPDATE bugzilla_meta SET priority=%s, \ header=%s, \ comments=%s, \ modified=%s \ WHERE id = %s", update_values) else: insert_values = (bugid, ipriority['fetch_failed'], '', '', now(), now()) pmig.sql_x("INSERT INTO bugzilla_meta \ (id, priority, header, comments, modified, created) \ VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() elog('failed to grab %s' % (bugid,)) return False try: return fetch(bugid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to fetch %s (%s)' % (bugid, e)) return run_fetch(bugid, tries=tries)
def add_owner(owner): ouser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (owner,)) if ouser: jassigned = pmig.sql_x("SELECT assigned FROM user_relations WHERE user = %s", (owner,)) jflat = tflatten(jassigned) if any(jflat): assigned = json.loads(jassigned[0][0]) else: assigned = [] if bugid not in assigned: log("Assigning %s to %s" % (str(bugid), owner)) assigned.append(bugid) vlog("owner %s" % (str(assigned),)) pmig.sql_x("UPDATE user_relations SET assigned=%s, modified=%s WHERE user = %s", (json.dumps(assigned), now(), owner)) else: vlog('inserting new record') assigned = json.dumps([bugid]) insert_values = (owner, assigned, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, assigned, created, modified) VALUES (%s, %s, %s, %s)", insert_values)
def run_fetch(fabid, tries=1): if tries == 0: pmig = phabdb.phdb(db='fab_migration') import_priority = pmig.sql_x("SELECT priority FROM fab_meta WHERE id = %s", (fabid,)) if import_priority: log('updating existing record') pmig.sql_x("UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s", (ipriority['creation_failed'], now(), fabid)) else: print "%s does not seem to exist" % (fabid) pmig.close() print 'failed to grab %s' % (fabid,) return False try: if fetch(fabid): try: pmig = phabdb.phdb(db='fab_migration') pandmupdate = "UPDATE fab_meta SET priority=%s, modified=%s WHERE id = %s" pmig.sql_x(pandmupdate, (ipriority['creation_success'], now(), fabid)) print time.time() print 'done with %s' % (fabid,) except: return False return True except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) print 'failed to grab %s (%s)' % (fabid, e) return run_fetch(fabid, tries=tries)
def run_update(bugid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db) current = pmig.sql_x("SELECT * from \ task_relations \ where id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations \ SET priority=%s, \ blocks=%s, \ modified=%s \ WHERE id = %s", (ipriority['creation_failed'], json.dumps([]), now(), bugid)) else: sql = "INSERT INTO task_relations \ (id, priority, blocks, modified) \ VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['creation_failed'], json.dumps([]), now())) pmig.close() elog('final fail to update %s' % (bugid,)) return False try: return update(bugid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to update %s' % (bugid,)) return run_update(bugid, tries=tries)
def update(bugid): pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) epriority = pmig.sql_x("SELECT priority from task_relations where id = %s", bugid) if epriority and epriority[0] == ipriority['update_success']: log('skipping %s as blockers already updated' % (bugid,)) return True hq = "SELECT header FROM bugzilla_meta WHERE id = %s" header = pmig.sql_x(hq, (bugid,)) if not header: elog('no header found for %s' % (bugid,)) return False def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (bzlib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(bugid) tinfo = json.loads(header[0][0]) if not tinfo['blocks']: log("%s doesn't block anything" % (str(bugid),)) return True for b in tinfo["blocks"]: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) vlog('%s is blocking %s' % (blocker_ref, str(blocks))) current = pmig.sql_x("SELECT * from task_relations where id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['update_success'], json.dumps(blocks), now(), bugid)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['update_success'], json.dumps(blocks), now())) pmig.close() return True
def run_create(rtid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db) import_priority = pmig.sql_x("SELECT priority \ FROM rt_meta \ WHERE id = %s", \ (rtid,)) if import_priority: pmig.sql_x("UPDATE rt_meta \ SET priority=%s, modified=%s \ WHERE id = %s", (ipriority['creation_failed'], now(), rtid)) else: elog("%s does not seem to exist" % (rtid)) elog('failed to create %s' % (rtid,)) pmig.close() return False try: return create(rtid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to grab %s (%s)' % (rtid, e)) return run_create(rtid, tries=tries)
def fetch(bugid): pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) server = xmlrpclib.ServerProxy(config.Bugzilla_url, use_datetime=True) token_data = server.User.login({'login': config.Bugzilla_login, 'password': config.Bugzilla_password}) token = token_data['token'] kwargs = { 'ids': [bugid], 'Bugzilla_token': token } #grabbing one bug at a time for now buginfo = server.Bug.get(kwargs)['bugs'] buginfo = buginfo[0] # some bugs have header data but no actual content # https://bugzilla.wikimedia.org/show_bug.cgi?id=32738 com = server.Bug.comments(kwargs)['bugs'][str(bugid)]['comments'] #have to do for json buginfo['last_change_time'] = datetime_to_epoch(buginfo['last_change_time']) buginfo['creation_time'] = datetime_to_epoch(buginfo['creation_time']) if 'flags' in buginfo: for flag in buginfo['flags']: for k, v in flag.iteritems(): if isinstance(v, datetime.datetime): flag[k] = datetime_to_epoch(v) for c in com: c['creation_time'] = datetime_to_epoch(c['creation_time']) c['time'] = datetime_to_epoch(c['time']) # set ticket status for priority import status = bzlib.status_convert(buginfo['status'], buginfo['resolution']) if status == 'open': creation_priority = ipriority['unresolved'] else: creation_priority = ipriority['na'] current = pmig.sql_x("SELECT * from bugzilla_meta where id = %s", bugid) if current: update_values = (creation_priority, json.dumps(buginfo), json.dumps(com), now(), bugid) vlog('update: ' + str(update_values)) pmig.sql_x("UPDATE bugzilla_meta SET priority=%s, header=%s, comments=%s, modified=%s WHERE id = %s", update_values) else: insert_values = (bugid, creation_priority, json.dumps(buginfo), json.dumps(com), now(), now()) vlog('insert: ' + str(insert_values)) sql = "INSERT INTO bugzilla_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)" pmig.sql_x(sql, insert_values) pmig.close() return True
def update(id): fabdb = phabdb.phdb(db='fab_migration') epriority = fabdb.sql_x("SELECT priority from task_relations where id = %s", id) if epriority and epriority[0] == ipriority['creation_success']: log('Skipping %s as blockers already updated' % (id,)) return True hq = "SELECT header FROM fab_meta WHERE id = %s" header = fabdb.sql_x(hq, (id,)) if not header: vlog('no header found for %s' % (id,)) return True def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (fablib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(id) tinfo = json.loads(header[0]) vlog(tinfo) for b in tinfo['xblocking']: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) log('%s is blocking %s' % (blocker_ref, str(blocks))) current = fabdb.sql_x("SELECT * from task_relations where id = %s", id) if current: fabdb.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['creation_success'], json.dumps(blocks), now(), id)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" fabdb.sql_x(sql, (id, ipriority['creation_success'], json.dumps(blocks), now())) fabdb.close() return True
def add_cc(ccuser): eccuser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (ccuser,)) if eccuser: jcc = pmig.sql_x("SELECT cc FROM user_relations WHERE user = %s", (ccuser,)) jflat = tflatten(jcc) if any(jflat): cc = json.loads(jcc[0][0]) else: cc = [] if rtid not in cc: cc.append(rtid) vlog("cc %s" % (str(cc),)) pmig.sql_x("UPDATE user_relations SET cc=%s, modified=%s WHERE user = %s", (json.dumps(cc), now(), ccuser)) else: vlog("inserting new record") cc = json.dumps([rtid]) insert_values = (ccuser, cc, now(), now()) pmig.sql_x( "INSERT INTO user_relations (user, cc, created, modified) VALUES (%s, %s, %s, %s)", insert_values )
def add_author(author): euser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (relations['author'],)) if euser: jauthored = pmig.sql_x("SELECT author FROM user_relations WHERE user = %s", (relations['author'],)) if jauthored[0]: authored = json.loads(jauthored[0]) else: authored = [] if fabid not in authored: authored.append(fabid) vlog("author %s" % (str(authored),)) pmig.sql_x("UPDATE user_relations SET author=%s, modified=%s WHERE user = %s", (json.dumps(authored), now(), relations['author'])) else: vlog('inserting new record') authored = json.dumps([fabid]) insert_values = (relations['author'], authored, now(), now()) pmig.sql_x("INSERT INTO user_relations (user, author, created, modified) VALUES (%s, %s, %s, %s)", insert_values)
def run_fetch(tid, tries=1): if tries == 0: pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) insert_values = (tid, ipriority['fetch_failed'], '', '', now(), now()) pmig.sql_x("INSERT INTO rt_meta \ (id, priority, header, comments, created, modified) \ VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() elog('failed to grab %s' % (tid,)) return False try: return fetch(tid) except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) elog('failed to grab %s (%s)' % (tid, e)) return run_fetch(tid, tries=tries)
def add_author(author): euser = pmig.sql_x("SELECT user FROM user_relations WHERE user = %s", (relations["author"],)) if euser: jauthored = pmig.sql_x("SELECT author FROM user_relations WHERE user = %s", (relations["author"],)) jflat = tflatten(jauthored) if any(jflat): authored = json.loads(jauthored[0][0]) else: authored = [] if rtid not in authored: authored.append(rtid) vlog("author %s" % (str(authored),)) pmig.sql_x( "UPDATE user_relations SET author=%s, modified=%s WHERE user = %s", (json.dumps(authored), now(), relations["author"]), ) else: vlog("inserting new record") authored = json.dumps([rtid]) insert_values = (relations["author"], authored, now(), now()) pmig.sql_x( "INSERT INTO user_relations (user, author, created, modified) VALUES (%s, %s, %s, %s)", insert_values )
def run_update(fabid, tries=1): if tries == 0: log('final fail to grab %s' % (fabid,)) pmig = phabdb.phdb(db='fab_migration') current = pmig.sql_x("SELECT * from task_relations where id = %s", fabid) if current: pmig.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s", (ipriority['creation_failed'], json.dumps([]), now(), fabid)) else: sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (fabid, ipriority['creation_failed'], json.dumps([]), now())) pmig.close() return False try: if update(fabid): log('%s done with %s' % (str(int(time.time())), fabid,)) return True except Exception as e: import traceback tries -= 1 time.sleep(5) traceback.print_exc(file=sys.stdout) log('failed to grab %s (%s)' % (fabid, e)) return run_update(fabid, tries=tries)
def update(bugid): pmig = phabdb.phdb(db=config.rtmigrate_db) epriority = pmig.sql_x("SELECT priority \ from task_relations \ where id = %s", bugid) if epriority and epriority[0] == ipriority['update_success']: log('skipping %s as blockers already updated' % (bugid,)) return True query = "SELECT header FROM rt_meta WHERE id = %s" header = pmig.sql_x(query, (bugid,)) if not header: elog('no header found for %s' % (bugid,)) return 'missing' def extref(ticket): refid = phabdb.reference_ticket("%s%s" % (rtlib.prepend, ticket)) if not refid: return '' return refid[0] blocker_ref = extref(bugid) tinfo = json.loads(header[0][0]) upstream = [] if 'parent' in tinfo['links']: upstream += tinfo['links']['parent'] if 'blocks' in tinfo['links']: upstream += tinfo['links']['blocks'] if upstream: for b in upstream: blocked_ref = extref(b) log("%s is blocking %s" % (blocker_ref, blocked_ref)) if blocked_ref: log(phabdb.set_blocked_task(blocker_ref, blocked_ref)) else: log('%s is missing blocker %s' % (blocked_ref, blocker_ref)) blocks = phabdb.get_tasks_blocked(blocker_ref) vlog('%s is blocking %s' % (blocker_ref, str(blocks))) current = pmig.sql_x("SELECT * \ from task_relations \ WHERE id = %s", bugid) if current: pmig.sql_x("UPDATE task_relations \ SET priority=%s, blocks=%s, modified=%s \ WHERE id = %s", (ipriority['update_success'], json.dumps(blocks), now(), bugid)) else: sql = "INSERT INTO task_relations \ (id, priority, blocks, modified) \ VALUES (%s, %s, %s, %s)" pmig.sql_x(sql, (bugid, ipriority['update_success'], json.dumps(blocks), now())) pmig.close() return True
def create(bugid): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) phabm = phabmacros('', '', '') phabm.con = phab pmig = phabdb.phdb(db=config.bzmigrate_db, user=config.bzmigrate_user, passwd=config.bzmigrate_passwd) current = pmig.sql_x("SELECT priority, \ header, \ comments, \ created, \ modified \ FROM bugzilla_meta WHERE id = %s", (bugid,)) if current: import_priority, buginfo, com, created, modified = current[0] else: pmig.close() elog('%s not present for migration' % (bugid,)) return False def get_ref(id): refexists = phabdb.reference_ticket('%s%s' % (bzlib.prepend, id)) if refexists: pmig.close() return refexists if get_ref(bugid): log('reference ticket %s already exists' % (bugid,)) return True buginfo = json.loads(buginfo) com = json.loads(com) bugid = int(bugid) vlog(bugid) vlog(buginfo) buginfo["secstate"] = 'none' # And the relevant herald rule must be in place. if bzlib.is_sensitive(buginfo["product"]): buginfo["secstate"] = 'security-bug' log("found security-bug issue %s" % (bugid,)) #this allows testing while respecting security queue if config.bz_security.lower() != 'true': log("ignoring security issue %s" % (bugid,)) return True server = xmlrpclib.ServerProxy(config.Bugzilla_url, use_datetime=True) token_data = server.User.login({'login': config.Bugzilla_login, 'password': config.Bugzilla_password}) token = token_data['token'] #http://www.bugzilla.org/docs/tip/en/html/api/Bugzilla/WebService/Bug.html#attachments kwargs = { 'ids': [bugid], 'Bugzilla_token': token } bzdata= open("data/bugzilla.yaml", 'r') bzdata_yaml = yaml.load(bzdata) tag_keys = bzdata_yaml['keywords_to_tags'].split(' ') mlists = bzdata_yaml['assigned_to_lists'].split(' ') vlog("Mailinglists: " + str(mlists)) attached = server.Bug.attachments(kwargs)['bugs'][str(bugid)] vlog("bz attached items %s" % (str(attached))) #process ticket uploads to map attach id to phab file id uploads = {} for a in attached: vlog("processing bz attachment %s" % (str(a))) if a['is_private']: vlog('ignoring private attachment: %s' % (str(a))) a['ignore'] = 'private' elif a['is_obsolete'] == 1: vlog('ignoring obsolete attachment: %s' % (str(a))) a['ignore'] = 'obsolete' else: if buginfo["secstate"] == 'none': viewpolicy = 'public' else: viewpolicy = phabdb.get_project_phid('security') try: #upload = phabm.upload_file(a['file_name'], a['data'].data) upload = phabm.upload_file(a['file_name'], a['data'].data, viewpolicy) a['phid'] = upload['phid'] a['name'] = upload['name'] a['objectName'] = upload['objectName'] except Exception as e: print "\n%s ATTACHMENT CORRUPTED -- %s\n" % (str(bugid), str(a)) print "%s --" % (str(e)) a['ignore'] = 'corrupt on retrieval' uploads[a['id']] = a log('%s attachment count: %s' % (bugid, str(len(uploads.keys())))) vlog("phab upload details: %s" % (str(uploads))) #list of projects to add to ticket ptags = [] #mask emails for public consumption buginfo['cc'] = [c.split('@')[0] for c in buginfo['cc']] # Convert bugzilla source to phabricator buginfo['status'] = bzlib.status_convert(buginfo['status'], buginfo['resolution']) buginfo['priority'] = bzlib.priority_convert(buginfo['priority']) if '-d' in sys.argv: with open('dump', 'w') as d: d.write(str(json.dumps(buginfo))) if buginfo['status'].lower() == 'patch_to_review': ptags.append(('patch_to_review', 'tags', 'green')) if buginfo['status'] == 'verified': ptags.append(('verified', 'tags')) if buginfo['cf_browser'] not in ['---', "Other"]: btag = "Browser_Support_%s" % (buginfo['cf_browser'].replace(' ', '-'),) log('Adding browser tag: %s' % (btag,)) ptags.append((btag, 'tags')) if buginfo['target_milestone'] != '---': log('Creating milestone: %s' % (buginfo['target_milestone'],)) ptags.append((buginfo['target_milestone'], 'truck')) #set defaults to be overridden by sec if needed buginfo['viewPolicy'] = 'public' buginfo['editPolicy'] = 'users' buginfo['project'] = bzlib.sanitize_project_name(buginfo["product"], buginfo["component"]) vlog(buginfo['project']) ptags.append((buginfo['project'], None)) title = buginfo['summary'] clean_com = [] for c in com: if not isinstance(c, dict): c = ast.literal_eval(c) clean_c = bzlib.build_comment(c, buginfo['secstate']) clean_com.append(clean_c) log('project: ' + buginfo['project']) try: # strip out comment 0 as description description = clean_com[0] del clean_com[0] except IndexError: log("%s has no comment 0" % (str(bugid))) # some random tasks were created at a point in bugzilla # history with metadata but no comment 0 # https://bugzilla.wikimedia.org/show_bug.cgi?id=32056 description = {'author': buginfo['creator'].split('@')[0], 'text': '//this issue has no description//', 'creation_time': buginfo['creation_time']} created = epoch_to_datetime(description['creation_time']) desc_block = "**Author:** `%s`\n\n**Description:**\n%s\n" % (description['author'], description['text']) # https://phabricator.wikimedia.org/T694 desc_tail = '--------------------------' desc_tail += "\n**Version**: %s" % (buginfo['version']) desc_tail += "\n**Severity**: %s" % (buginfo['severity'] or 'none') if buginfo['op_sys'] != 'All': desc_tail += "\n**OS**: %s" % (buginfo['op_sys']) if "platform" in buginfo and buginfo['platform'] != 'All': desc_tail += "\n**Platform**: %s" % (buginfo['platform']) if buginfo['whiteboard']: desc_tail += "\n**Whiteboard**: %s" % (buginfo['whiteboard']) if buginfo['url']: desc_tail += "\n**URL**: %s" % (buginfo['url']) if buginfo['see_also']: desc_tail += "\n**See Also**:\n%s" % ('\n'.join(buginfo['see_also'])) attachments = '' if 'attachment' in description: attached = int(description['attachment']) if attached in uploads: cattached = uploads[int(description['attachment'])] if 'objectName' in cattached: attachments = "\n\n**Attached**: {%s}" % (cattached['objectName']) if 'ignore' in cattached: attachments = "\n\n//attachment %s ignored as %s//" % (cattached['file_name'], cattached['ignore']) else: attachments = "\n\n//attachment missing in source//" desc_tail += attachments full_description = desc_block + '\n' + desc_tail keys = buginfo['keywords'] for k in keys: if k in tag_keys: if k == 'ops': k = 'operations' ptags.append((k, 'tags')) def project_security_settings(pname): if bzlib.is_sensitive(pname): ephid = phabdb.get_project_phid('security') edit = ephid else: edit = 'public' view = 'public' return edit, view phids = [] for p in ptags: edit, view = project_security_settings(p[0]) phid = phabm.ensure_project(p[0], edit=edit, view=view) phids.append(phid) if p[1] is not None: vlog("setting project %s icon to %s" % (p[0], p[1])) set_project_icon(p[0], icon=p[1]) log("ptags: " + str(ptags)) vlog("phids: " + str(phids)) #buginfo'assigned_to': u'*****@*****.**' assignee = buginfo['assigned_to'] ccphids = [] if assignee in mlists: ccphids.append(mailinglist_phid(assignee)) # viewPolicy = buginfo['viewPolicy'], # editPolicy = buginfo['editPolicy'], vlog("Ticket Info: %s" % (desc_block,)) ticket = phab.maniphest.createtask(title=buginfo['summary'], description=full_description, projectPHIDs=phids, ccPHIDs=ccphids, priority=buginfo['priority'], auxiliary={"std:maniphest:external_reference":"bz%s" % (bugid,), "std:maniphest:security_topic":"%s" % (buginfo["secstate"],)}) log("Created task: T%s (%s)" % (ticket['id'], ticket['phid'])) botphid = phabdb.get_phid_by_username(config.phab_user) phabdb.set_task_title_transaction(ticket['phid'], botphid, buginfo['viewPolicy'], buginfo['editPolicy']) phabdb.set_task_ctime(ticket['phid'], int(buginfo['creation_time'].split('.')[0])) fmt_comments = {} for c in clean_com: fmt_comment = {} created = epoch_to_datetime(c['creation_time']) comment_header = "**%s** wrote:\n\n" % (c['author'],) comment_body = c['text'] attachments = '' if 'attachment' in c: attached = int(c['attachment']) if attached in uploads: cattached = uploads[int(c['attachment'])] if 'objectName' in cattached: attachments += "\n\n**Attached**: {%s}" % (cattached['objectName']) if 'ignore' in cattached: attachments += "\n\n//attachment %s ignored as %s//" % (cattached['file_name'], cattached['ignore']) else: attachments += "\n\n//attachment missing in source//" fmt_comment['xpreamble'] = comment_header fmt_comment['xattached'] = attachments phabm.task_comment(ticket['id'], comment_header + comment_body + attachments) ctransaction = phabdb.last_comment(ticket['phid']) phabdb.set_comment_time(ctransaction, c['creation_time']) fmt_comment['xctransaction'] = ctransaction fmt_comments[c['count']] = fmt_comment if buginfo['status'] != 'open': log("setting status for T%s to %s" % (ticket['id'], buginfo['status'])) phabdb.set_issue_status(ticket['phid'], buginfo['status']) phabdb.set_task_mtime(ticket['phid'], int(buginfo['last_change_time'].split('.')[0])) xcomments = json.dumps(fmt_comments) pmig.sql_x("UPDATE bugzilla_meta \ SET xcomments=%s WHERE id = %s", (xcomments, bugid)) pmig.sql_x("UPDATE bugzilla_meta \ SET priority=%s, modified=%s WHERE id = %s", (ipriority['creation_success'], now(), bugid)) pmig.close() return True
def fetch(PHABTICKETID): PHABTICKETID = int(PHABTICKETID) parser = ConfigParser.SafeConfigParser() parser_mode = 'oldfab' parser.read(configfile) oldfab = Phabricator(parser.get(parser_mode, 'user'), parser.get(parser_mode, 'cert'), parser.get(parser_mode, 'host')) #dummy instance of phabapi phabm = phabmacros('', '', '') #assign newphab instance as self.con for dummyphab phabm.con = oldfab """ <Result: {u'authorPHID': u'PHID-USER-qbtllnzb6pwl3ttzqa3m', u'status': u'open', u'phid': u'PHID-TASK-qr3fpbtk6kdx4slhgnsd', u'description': u'', u'objectName': u'T10', u'title': u'Get icinga alerts into logstash', u'priorityColor': u'red', u'dependsOnTaskPHIDs': [], u'auxiliary': [], u'uri': u'http://fab.wmflabs.org/T10', u'ccPHIDs': [u'PHID-USER-qbtllnzb6pwl3ttzqa3m'], u'isClosed': False, u'dateModified': u'1399311492', u'ownerPHID': None, u'statusName': u'Open', u'dateCreated': u'1391716779', u'projectPHIDs': [u'PHID-PROJ-5ncvaivs3upngr7ijqy2'], u'id': u'10', u'priority': u'High'}> """ tinfo = oldfab.maniphest.info(task_id=PHABTICKETID).response vlog(tinfo) if 'objectName' in tinfo: log("Fetching %s" % (tinfo['objectName'])) comments = comments_by_task(tinfo['phid']) for i, c in comments.iteritems(): comments[i]['xcommenter'] = dict(oldfab.user.info(phid=c['xuserphid'])) ordered_comments = collections.OrderedDict(sorted(comments.items())) vlog(str(ordered_comments)) """ <Result: {u'userName': u'bd808', u'phid': u'PHID-USER-qbtllnzb6pwl3ttzqa3m', u'realName': u'Bryan Davis', u'roles': [u'admin',u'verified', u'approved', u'activated'], u'image': u'http://fab.wmflabs.org/file/data/fijwoqt62w6atpond4vb/PHID-FILE-37htsfegn7bnlfvzwsts/profile-profile-gravatar', u'uri': u'http://fab.wmflabs.org/p/bd808/'}> """ authorInfo = oldfab.user.info(phid=tinfo['authorPHID']) tinfo['xauthor'] = phabdb.email_by_userphid(authorInfo['phid']) lauthor = tinfo['xauthor'] or 'no author' vlog('author: ' + lauthor) ccs = [] if tinfo['ccPHIDs']: for c in tinfo['ccPHIDs']: ccInfo = oldfab.user.info(phid=c) ccs.append(phabdb.email_by_userphid(ccInfo['phid'])) tinfo['xccs'] = ccs vlog('ccs: ' + str(ccs)) if tinfo['ownerPHID']: tinfo['xowner'] = phabdb.email_by_userphid(tinfo['ownerPHID']) else: tinfo['xowner'] = None """ u'data': {u'PHID-PROJ-5ncvaivs3upngr7ijqy2': {u'phid': u'PHID-PROJ-5ncvaivs3upngr7ijqy2', u'name': u'logstash', u'dateCreated': u'1391641549', u'members': [u'PHID-USER-65zhggegfvhojb4nynay'], u'id': u'3', u'dateModified': u'1398282408', u'slugs': [u'logstash']}}, u'slugMap': []}> """ project_names = [] associated_projects = tinfo['projectPHIDs'] vlog('associated projects: %s' % (str(tinfo['projectPHIDs']))) #if we try to query for an empty list we get back ALLLLLL if associated_projects: pinfo = oldfab.project.query(phids=associated_projects) if pinfo['data']: for p in pinfo['data'].values(): project_names.append(p['name']) def norm(pname): return pname.lower().replace(' ', '_').replace('-', '_') norm_projects = [norm(p) for p in fablib.saved] saved_projects = [norm(p) in norm_projects for p in project_names] if not any(saved_projects): print "Skipping %s as it's not in a saved project" % (PHABTICKETID) return True vlog('project names: ' + str(project_names)) tinfo['xprojects'] = project_names status = tinfo['status'] if status != 'open': creation_priority = ipriority['na'] else: creation_priority = ipriority['unresolved'] blocked_tasks = phabdb.get_tasks_blocked(tinfo['phid']) if blocked_tasks: blocked = [] block_details = oldfab.maniphest.find(phids=blocked_tasks) for k, v in block_details.iteritems(): blocked.append(v['id']) else: blocked = [] vlog('blocking: %s' % (str(blocked))) tinfo['xblocking'] = blocked pmig = phabdb.phdb(db='fab_migration') current = pmig.sql_x("SELECT * from fab_meta where id = %s", PHABTICKETID) if current: log('updating current record %s' % (PHABTICKETID,)) update_values = (creation_priority, json.dumps(tinfo), json.dumps(comments), now(), PHABTICKETID) pmig.sql_x("UPDATE fab_meta SET priority=%s, header=%s, comments=%s, modified=%s WHERE id = %s", update_values) else: log('inserting new record %s' % (PHABTICKETID,)) insert_values = (PHABTICKETID, creation_priority, json.dumps(tinfo), json.dumps(comments), now(), now()) pmig.sql_x("INSERT INTO fab_meta (id, priority, header, comments, created, modified) VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() return True
def fetch(tid): response = resource.RTResource(config.rt_url, config.rt_login, config.rt_passwd, authenticators.CookieAuthenticator) log("fetching issue %s" % (tid,)) tinfo = response.get(path="ticket/%s" % (tid,)) history = response.get(path="ticket/%s/history?format=l" % (tid,)) links = response.get(path="ticket/%s/links/show" % (tid,)) vlog(tinfo) if re.search('\#\sTicket\s\d+\sdoes\snot\sexist.$', tinfo.strip()): log("Skipped as source missing for %s" % (tid,)) return 'missing' # some private todo's and such if 'You are not allowed to display' in tinfo: log("Skipped as access denied for %s" % (tid,)) return 'denied' #breaking detailed history into posts #23/23 (id/114376/total) comments = re.split("\d+\/\d+\s+\(id\/.\d+\/total\)", history) comments = [c.rstrip('#').rstrip('--') for c in comments] # we get back freeform text and create a dict dtinfo = {} link_dict = rtlib.links_to_dict(links) dtinfo['links'] = link_dict for cv in tinfo.strip().splitlines(): if not cv: continue cv_kv = re.split(':', cv, 1) if len(cv_kv) > 1: k = cv_kv[0] v = cv_kv[1] dtinfo[k.strip()] = v.strip() vlog("Enabled queues: %s" % (str(rtlib.enabled))) if dtinfo['Queue'] not in rtlib.enabled: log("Skipped as disabled queue for %s (%s)" % (str(tid), dtinfo['Queue'])) return 'disabled' if dtinfo['Status'] == 'resolved': creation_priority = ipriority['na'] else: creation_priority = ipriority['unresolved'] com = json.dumps(comments) tinfo = json.dumps(dtinfo) pmig = phabdb.phdb(db=config.rtmigrate_db, user=config.rtmigrate_user, passwd=config.rtmigrate_passwd) current = pmig.sql_x("SELECT * from rt_meta where id = %s", tid) if current: update_values = (creation_priority, tinfo, com, now(), now()) pmig.sql_x("UPDATE rt_meta SET priority=%s, \ header=%s, \ comments=%s, \ modified=%s \ WHERE id = %s", update_values) vlog('update: ' + str(update_values)) else: insert_values = (tid, creation_priority, tinfo, com, now(), now()) pmig.sql_x("INSERT INTO rt_meta \ (id, priority, header, comments, created, modified) \ VALUES (%s, %s, %s, %s, %s, %s)", insert_values) pmig.close() return True
def create(rtid): phab = Phabricator(config.phab_user, config.phab_cert, config.phab_host) phabm = phabmacros('', '', '') phabm.con = phab pmig = phdb(db=config.rtmigrate_db) response = resource.RTResource(config.rt_url, config.rt_login, config.rt_passwd, authenticators.CookieAuthenticator) current = pmig.sql_x("SELECT priority, header, \ comments, created, modified \ FROM rt_meta WHERE id = %s", (rtid,)) if current: import_priority, rtinfo, com, created, modified = current[0] else: log('%s not present for migration' % (rtid,)) return 'missing' if not rtinfo: log("ignoring invalid data for issue %s" % (rtid,)) return False def get_ref(id): refexists = phabdb.reference_ticket('%s%s' % (rtlib.prepend, id)) if refexists: return refexists if get_ref(rtid): log('reference ticket %s already exists' % (rtid,)) return True def remove_sig(content): return re.split('--\s?\n', content)[0] def uob(obj, encoding='utf-8'): """ unicode or bust""" if isinstance(obj, basestring): if not isinstance(obj, unicode): obj = unicode(obj, encoding) return obj def sanitize_text(line): if line.strip() and not line.lstrip().startswith('>'): # in remarkup having '--' on a new line seems to bold last # line so signatures really cause issues if all(map(lambda c: c in '-', line.strip())): return '%%%{0}%%%'.format(line.strip()) elif line.strip() == '-------- Original Message --------': return '%%%{0}%%%'.format(line.strip()) elif line.strip() == '---------- Forwarded message ----------': return '%%%{0}%%%'.format(unicode(line.strip())) elif line.strip().startswith('#'): return uob('%%%') + uob(line.strip()) + uob('%%%') else: return uob('%%%') + uob(line).strip() + uob('%%%') elif line.strip().startswith('>'): quoted_content = line.lstrip('>').strip() if not quoted_content.lstrip('>').strip(): return line.strip() if all(map(lambda c: c in '-', quoted_content.lstrip('>').strip())): return "> ~~" else: return uob(line.strip()) else: vlog("ignoring content line %s" % (line,)) return None botphid = phabdb.get_phid_by_username(config.phab_user) viewpolicy = phabdb.get_project_phid('WMF-NDA') if not viewpolicy: elog("View policy group not present: %s" % (viewpolicy,)) return False # Example: # id: ticket/8175/attachments\n # Attachments: 141490: (Unnamed) (multipart/mixed / 0b), # 141491: (Unnamed) (text/html / 23b), # 141492: 0jp9B09.jpg (image/jpeg / 117.4k), attachments = response.get(path="ticket/%s/attachments/" % (rtid,)) if not attachments: raise Exception("no attachment response: %s" % (rtid)) history = response.get(path="ticket/%s/history?format=l" % (rtid,)) rtinfo = json.loads(rtinfo) comments = json.loads(com) vlog(rtid) vlog(rtinfo) comment_dict = {} for i, c in enumerate(comments): cwork = {} comment_dict[i] = cwork if not 'Attachments:' in c: pass attachsplit = c.split('Attachments:') if len(attachsplit) > 1: body, attached = attachsplit[0], attachsplit[1] else: body, attached = c, '0' comment_dict[i]['text_body'] = unicode(body) comment_dict[i]['attached'] = attached # Example: # Ticket: 8175\nTimeTaken: 0\n # Type: # Create\nField: # Data: \nDescription: Ticket created by cpettet\n\n # Content: test ticket description\n\n\n # Creator: cpettet\nCreated: 2014-08-21 21:21:38\n\n'} params = {'id': 'id:(.*)', 'ticket': 'Ticket:(.*)', 'timetaken': 'TimeTaken:(.*)', 'content': 'Content:(.*)', 'creator': 'Creator:(.*)', 'description': 'Description:(.*)', 'created': 'Created:(.*)', 'ovalue': 'OldValue:(.*)', 'nvalue': 'NewValue:(.*)'} for k, v in comment_dict.iteritems(): text_body = v['text_body'] comment_dict[k]['body'] = {} for paramkey, regex in params.iteritems(): value = re.search(regex, text_body) if value: comment_dict[k]['body'][paramkey] = value.group(1).strip() else: comment_dict[k]['body'][paramkey] = None if 'Content' in text_body: content = text_body.split('Content:')[1] content = content.split('Creator:') comment_dict[k]['body']['content'] = content creator = comment_dict[k]['body']['creator'] if creator and '@' in creator: comment_dict[k]['body']['creator'] = rtlib.sanitize_email(creator) #15475: untitled (18.7k) comment_attachments= re.findall('(\d+):\s', v['attached']) comment_dict[k]['body']['attached'] = comment_attachments # due to the nature of the RT api sometimes whitespacing becomes # a noise comment if not any(comment_dict[comment_dict.keys()[0]]['body'].values()): vlog('dropping %s comment' % (str(comment_dict[comment_dict.keys()[0]],))) del comment_dict[0] #attachments into a dict def attach_to_kv(attachments_output): attached = re.split('Attachments:', attachments_output, 1)[1] ainfo = {} for at in attached.strip().splitlines(): if not at: continue k, v = re.split(':', at, 1) ainfo[k.strip()] = v.strip() return ainfo ainfo = attach_to_kv(attachments) #lots of junk attachments from emailing comments and ticket creation ainfo_f = {} for k, v in ainfo.iteritems(): if '(Unnamed)' not in v: ainfo_f[k] = v #taking attachment text and convert to tuple (name, content type, size) ainfo_ext = {} comments = re.split("\d+\/\d+\s+\(id\/.\d+\/total\)", history) for k, v in ainfo_f.iteritems(): # Handle general attachment case: # NO: 686318802.html (application/octet-stream / 19.5k), # YES: Summary_686318802.pdf (application/unknown / 215.3k), extract = re.search('(.*)\.(\S{3,4})\s\((.*)\s\/\s(.*)\)', v) # due to goofy email handling of signature/x-header/meta info # it seems they sometimes # become malformed attachments. Such as when a response into # rt was directed to a mailinglist # Example: # ->Attached Message Part (text/plain / 158b) # # Private-l mailing list # [email protected] # https://lists.wikimedia.org/mailman/listinfo/private-l if extract: fdetails = extract.groups() if not extract and v.startswith('Attached Message Part'): continue if not extract: extract = re.match('(\S+)\s\((.*)\/(.*)\),.*', v) if not extract: elog("attachment CORRUPT or FAILED extraction: %s %s (%s)" % (k, v, rtid)) continue fdetails = extract.group(1), '', extract.group(2), extract.group(3) if not fdetails: elog("attachment CORRUPT or FAILED extraction: %s %s (%s)" % (k, v, rtid)) continue ainfo_ext[k] = fdetails vlog(ainfo_ext[k]) # deb # cgi attachment_types = ['pdf', 'jpeg', 'asc', 'tgz', 'csr', 'jpg', 'png', 'xls', 'xls', 'csv', 'docx', 'gif', 'html', 'htm', 'txt', 'diff', 'log', 'zip', 'rtf', 'tmpl', 'vcf', 'pub', 'sql', 'odt', 'p7s', 'iso', 'ods', 'conf', 'doc', 'xff', 'eml'] #Uploading attachment dl = [] #('Quote Summary_686318802', 'pdf', 'application/unknown', '215.3k') uploaded = {} for k, v in ainfo_ext.iteritems(): file_extension = v[1].lower() # vendors have this weird habit of capitalizing extension names # make sure we can handle the extension type otherwise #if file_extension not in attachment_types: # elog("Unknown Exception (%s) %s %s" % (rtid, v, file_extension)) # #raise Exception('unknown extension: %s (%s)' % (v, rtid)) full = "ticket/%s/attachments/%s/content" % (rtid, k) vcontent = response.get(path=full, headers={'Content-Type': v[2], 'Content-Length': v[3] }) #PDF's don't react well to stripping header -- fine without it if file_extension.strip() == 'pdf': sanscontent = ''.join(vcontent.readlines()) else: vcontent = vcontent.readlines() sanscontent = ''.join(vcontent[2:]) if file_extension: fname = "%s.%s" % (v[0], file_extension) else: fname = v[0] upload = phabm.upload_file(fname, sanscontent, viewpolicy) uploaded[k] = upload if rtinfo['Queue'] not in rtlib.enabled: log("%s not in an enabled queue" % (rtid,)) return True ptags = [] # In a practical sense ops-requets seemed to get tagged # with straight Operations group in Phab so we backfill # this for consistency. if rtinfo['Queue'] == 'ops-requests': ptags.append('operations') pname = rtlib.project_translate(rtinfo['Queue']) ptags.append(pname) phids = [] for p in ptags: phids.append(phabm.ensure_project(p)) rtinfo['xpriority'] = rtlib.priority_convert(rtinfo['Priority']) rtinfo['xstatus'] = rtlib.status_convert(rtinfo['Status']) import collections # {'ovalue': u'open', # 'description': u"Status changed from 'open' to 'resolved' by robh", # 'nvalue': None, 'creator': u'robh', 'attached': [], # 'timetaken': u'0', 'created': u'2011-07-01 02:47:24', # 'content': [u' This transaction appears to have no content\n', u' # robh\nCreated: 2011-07-01 02:47:24\n'], # 'ticket': u'1000', 'id': u'23192'} ordered_comments = collections.OrderedDict(sorted(comment_dict.items())) upfiles = uploaded.keys() # much like bugzilla comment 0 is the task description header = comment_dict[comment_dict.keys()[0]] del comment_dict[comment_dict.keys()[0]] dtext_san = [] dtext_list = header['body']['content'][0].splitlines() for t in dtext_list: dtext_san.append(sanitize_text(rtlib.shadow_emails(t))) dtext = '\n'.join(filter(None, dtext_san)) #dtext = '\n'.join(filter(None, sanitize_text(rtlib.shadow_emails(dtext_list)))) full_description = "**Author:** `%s`\n\n**Description:**\n%s\n" % (rtinfo['Creator'].strip(), dtext) hafound = header['body']['attached'] header_attachments = [] for at in hafound: if at in upfiles: header_attachments.append('{F%s}' % uploaded[at]['id']) if 'CF.{Bugzilla ticket}' in rtinfo and rtinfo['CF.{Bugzilla ticket}'] or header_attachments: full_description += '\n__________________________\n\n' if 'CF.{Bugzilla ticket}' in rtinfo and rtinfo['CF.{Bugzilla ticket}']: obzurl = 'https://old-bugzilla.wikimedia.org/show_bug.cgi?id=' obz = "[[ %s%s | %s ]]" % (obzurl, rtinfo['CF.{Bugzilla ticket}'], rtinfo['CF.{Bugzilla ticket}'],) bzref = int(rtinfo['CF.{Bugzilla ticket}'].strip()) newbzref = bzref + 2000 full_description += "**Bugzilla Ticket**: %s => %s\n" % (obz, '{T%s}' % (newbzref,)) if header_attachments: full_description += '\n'.join(header_attachments) vlog("Ticket Info: %s" % (full_description,)) ticket = phab.maniphest.createtask(title=rtinfo['Subject'], description=full_description, projectPHIDs=phids, ccPHIDs=[], priority=rtinfo['xpriority'], auxiliary={"std:maniphest:external_reference":"rt%s" % (rtid,)}) # XXX: perms phabdb.set_task_title_transaction(ticket['phid'], botphid, 'public', 'public') phabdb.set_task_ctime(ticket['phid'], rtlib.str_to_epoch(rtinfo['Created'])) phabdb.set_task_policy(ticket['phid'], viewpolicy) #vlog(str(ordered_comments)) fmt_comments = {} for comment, contents in comment_dict.iteritems(): fmt_comment = {} dbody = contents['body'] if dbody['content'] is None and dbody['creator'] is None: continue elif dbody['content'] is None: content = 'no content found' else: mailsan = rtlib.shadow_emails(dbody['content'][0]) content_literal = [] for c in mailsan.splitlines(): content_literal.append(sanitize_text(c)) content = '\n'.join(filter(None, content_literal)) # In case of attachment but not much else if not content and dbody['attached']: content = True void_content = 'This transaction appears to have no content' if not content == True and void_content in content: content = None auto_actions = ['Outgoing email about a comment recorded by RT_System', 'Outgoing email recorded by RT_System'] if dbody['description'] in auto_actions: vlog("ignoring comment: %s/%s" % (dbody['description'], content)) continue preamble = '' cbody = '' if content: if dbody['creator'] is None: dbody['creator'] = '//creator field not set in source//' preamble += "`%s wrote:`\n\n" % (dbody['creator'].strip(),) if content == True: content = '' cbody += "%s" % (content.strip() or '//no content//',) if dbody['nvalue'] or dbody['ovalue']: value_update = '' value_update_text = rtlib.shadow_emails(dbody['description']) value_update_text = value_update_text.replace('fsck.com-rt', 'https') relations = ['Reference by ticket', 'Dependency by', 'Reference to ticket', 'Dependency on', 'Merged into ticket', 'Membership in'] states = ['open', 'resolved', 'new', 'stalled'] if any(map(lambda x: x in dbody['description'], relations)): value_update = value_update_text elif re.search('tags\s\S+\sadded', dbody['description']): value_update = "%s added tag %s" % (dbody['creator'], dbody['nvalue']) elif re.search('Taken\sby\s\S+', dbody['description']): value_update = "Issue taken by **%s**" % (dbody['creator'],) else: value_update = "//%s//" % (value_update_text,) cbody += value_update afound = contents['body']['attached'] cbody_attachments = [] for a in afound: if a in upfiles: cbody_attachments.append('{F%s}' % uploaded[a]['id']) if cbody_attachments: cbody += '\n__________________________\n\n' cbody += '\n'.join(cbody_attachments) fmt_comment['xattached'] = cbody_attachments phabm.task_comment(ticket['id'], preamble + cbody) ctransaction = phabdb.last_comment(ticket['phid']) try: created = rtlib.str_to_epoch_comments(dbody['created']) except (ValueError, TypeError): # A handful of issues seems to show NULL creation times # for now reason: see 1953 for example of NULL # 3001 for example of None elog("Could not determine comment time for %s" % (rtid,)) dbody['created'] = rtlib.str_to_epoch(rtinfo['Created']) phabdb.set_comment_time(ctransaction, created) fmt_comment['xctransaction'] = ctransaction fmt_comment['preamble'] = preamble fmt_comment['content'] = cbody fmt_comment['created'] = created fmt_comment['author'] = dbody['creator'] fmt_comment['creator'] = rtlib.user_lookup(dbody['creator']) # XXX TRX both ways? cid = len(fmt_comments.keys()) + 1 fmt_comment['count'] = cid fmt_comments[cid] = fmt_comment if rtinfo['Status'].lower() != 'open': log('setting %s to status %s' % (rtid, rtinfo['xstatus'].lower())) phabdb.set_issue_status(ticket['phid'], rtinfo['xstatus'].lower()) log("Created task: T%s (%s)" % (ticket['id'], ticket['phid'])) phabdb.set_task_mtime(ticket['phid'], rtlib.str_to_epoch(rtinfo['LastUpdated'])) xcomments = json.dumps(fmt_comments) pmig.sql_x("UPDATE rt_meta SET xcomments=%s WHERE id = %s", (xcomments, rtid)) pmig.sql_x("UPDATE rt_meta SET priority=%s, modified=%s WHERE id = %s", (ipriority['creation_success'], now(), rtid)) pmig.close() return True