aboutsummaryrefslogtreecommitdiff
path: root/archive
diff options
context:
space:
mode:
authorcpettet <rush@wikimedia.org>2014-12-16 16:03:11 -0600
committercpettet <rush@wikimedia.org>2014-12-16 16:03:11 -0600
commit39d05f179c749aa14cdf9c7fc5e0c95efb5a29da (patch)
tree633b5e4a6c255a7a668ac7e11a7ebffad7d32065 /archive
parenta339253851065809235d3b1dcd9e6eb315e58152 (diff)
rt cleanup and user updates for header and comments
Diffstat (limited to 'archive')
-rwxr-xr-xarchive/fab_update_tasks.py92
-rwxr-xr-xarchive/fab_update_user.py206
2 files changed, 298 insertions, 0 deletions
diff --git a/archive/fab_update_tasks.py b/archive/fab_update_tasks.py
new file mode 100755
index 0000000..d6a7bf7
--- /dev/null
+++ b/archive/fab_update_tasks.py
@@ -0,0 +1,92 @@
+import sys
+import time
+import json
+import multiprocessing
+from wmfphablib import phabdb
+from wmfphablib import log
+from wmfphablib import vlog
+from wmfphablib import now
+from wmfphablib import return_bug_list
+from wmfphablib import fablib
+from wmfphablib import ipriority
+
+
+def update(id):
+ fabdb = phabdb.phdb(db='fab_migration')
+
+ epriority = fabdb.sql_x("SELECT priority from task_relations where id = %s", id)
+ if epriority and epriority[0] == ipriority['creation_success']:
+ log('Skipping %s as blockers already updated' % (id,))
+ return True
+
+ hq = "SELECT header FROM fab_meta WHERE id = %s"
+ header = fabdb.sql_x(hq, (id,))
+ if not header:
+ vlog('no header found for %s' % (id,))
+ return True
+
+ def extref(ticket):
+ refid = phabdb.reference_ticket("%s%s" % (fablib.prepend, ticket))
+ if not refid:
+ return ''
+ return refid[0]
+
+ blocker_ref = extref(id)
+ tinfo = json.loads(header[0])
+ vlog(tinfo)
+ for b in tinfo['xblocking']:
+ blocked_ref = extref(b)
+ log("%s is blocking %s" % (blocker_ref, blocked_ref))
+ if blocked_ref:
+ log(phabdb.set_blocked_task(blocker_ref, blocked_ref))
+ else:
+ log('%s is missing blocker %s' % (blocked_ref, blocker_ref))
+
+ blocks = phabdb.get_tasks_blocked(blocker_ref)
+ log('%s is blocking %s' % (blocker_ref, str(blocks)))
+ current = fabdb.sql_x("SELECT * from task_relations where id = %s", id)
+ if current:
+ fabdb.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s",
+ (ipriority['creation_success'], json.dumps(blocks), now(), id))
+ else:
+ sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)"
+ fabdb.sql_x(sql, (id, ipriority['creation_success'], json.dumps(blocks), now()))
+ fabdb.close()
+ return True
+
+
+def run_update(fabid, tries=1):
+ if tries == 0:
+ log('final fail to grab %s' % (fabid,))
+ pmig = phabdb.phdb(db='fab_migration')
+ current = pmig.sql_x("SELECT * from task_relations where id = %s", fabid)
+ if current:
+ pmig.sql_x("UPDATE task_relations SET priority=%s, blocks=%s, modified=%s WHERE id = %s",
+ (ipriority['creation_failed'], json.dumps([]), now(), fabid))
+ else:
+ sql = "INSERT INTO task_relations (id, priority, blocks, modified) VALUES (%s, %s, %s, %s)"
+ pmig.sql_x(sql, (fabid, ipriority['creation_failed'], json.dumps([]), now()))
+ pmig.close()
+ return False
+ try:
+ if update(fabid):
+ log('%s done with %s' % (str(int(time.time())), fabid,))
+ return True
+ except Exception as e:
+ import traceback
+ tries -= 1
+ time.sleep(5)
+ traceback.print_exc(file=sys.stdout)
+ log('failed to grab %s (%s)' % (fabid, e))
+ return run_update(fabid, tries=tries)
+
+
+
+bugs = return_bug_list()
+log("Count %s" % (str(len(bugs))))
+from multiprocessing import Pool
+pool = Pool(processes=2)
+_ = pool.map(run_update, bugs)
+complete = len(filter(bool, _))
+failed = len(_) - complete
+print 'completed %s, failed %s' % (complete, failed)
diff --git a/archive/fab_update_user.py b/archive/fab_update_user.py
new file mode 100755
index 0000000..8d8440c
--- /dev/null
+++ b/archive/fab_update_user.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python
+import os
+import argparse
+import time
+import json
+import multiprocessing
+import sys
+import collections
+from phabricator import Phabricator
+from wmfphablib import Phab as phabmacros
+from wmfphablib import phabdb
+from wmfphablib import log
+from wmfphablib import config
+from wmfphablib import util
+from wmfphablib import vlog
+from wmfphablib import epoch_to_datetime
+from wmfphablib import now
+from wmfphablib import return_bug_list
+from wmfphablib import ipriority
+
+
+def update(user):
+ vlog(user)
+ phab = Phabricator(config.phab_user,
+ config.phab_cert,
+ config.phab_host)
+
+ pmig = phabdb.phdb(db=config.fabmigrate_db,
+ user=config.fabmigrate_user,
+ passwd=config.fabmigrate_passwd)
+
+ phabm = phabmacros('', '', '')
+ phabm.con = phab
+
+ if phabdb.is_bot(user['userphid']):
+ log("%s is a bot no action" % (user['user']))
+ return True
+
+ epriority = phabdb.get_user_relations_priority(user['user'], pmig)
+ if epriority and epriority[0] == ipriority['update_success']:
+ log('Skipping %s as already updated' % (user['user']))
+ return True
+
+ # 'author': [409, 410, 411, 404, 412],
+ # 'cc': [221, 69, 203, 268, 261, 8],
+ # 'created': 1410276037L,
+ # 'modified': 1410276060L,
+ # 'assigned': [97, 64, 150, 59, 6],
+ # 'userphid': 'PHID-USER-4hsexplytovmqmcb7tq2',
+ # 'user': u'chase.mp@xxx.com'}
+
+ if user['assigned']:
+ for ag in user['assigned']:
+ vlog(phabm.sync_assigned(user['userphid'], ag, 'fl'))
+
+ if user['author']:
+ for a in user['author']:
+ vlog(phabm.synced_authored(user['userphid'], a, 'fl'))
+
+ if user['cc']:
+ for ccd in user['cc']:
+ vlog(phabdb.add_task_cc_by_ref(user['userphid'], ccd))
+
+ current = phabdb.get_user_migration_history(user['user'], pmig)
+ if current:
+ log(phabdb.set_user_relations_priority(ipriority['update_success'], user['user'], pmig))
+ else:
+ log('%s user does not exist to update' % (user['user']))
+ return False
+ pmig.close()
+ return True
+
+def run_update(user, tries=1):
+ if tries == 0:
+ pmig = phabdb.phdb(db=config.fabmigrate_db,
+ user=config.fabmigrate_user,
+ passwd=config.fabmigrate_passwd)
+ current = phabdb.get_user_migration_history(user['user'], pmig)
+ if current:
+ log(phabdb.set_user_relations_priority(ipriority['update_failed'], user['user'], pmig))
+ else:
+ log('%s user does not exist to update' % (user['user']))
+ pmig.close()
+ log('final fail to update %s' % (user['user'],))
+ return False
+ try:
+ if update(user):
+ log('%s done with %s' % (str(int(time.time())), user,))
+ return True
+ except Exception as e:
+ import traceback
+ tries -= 1
+ time.sleep(5)
+ traceback.print_exc(file=sys.stdout)
+ log('failed to update %s (%s)' % (user, e))
+ return run_update(user, tries=tries)
+
+def get_user_histories(verified):
+ histories = []
+ pmig = phabdb.phdb(db=config.fabmigrate_db,
+ user=config.fabmigrate_user,
+ passwd=config.fabmigrate_passwd)
+ #print 'verified', verified
+ for v in verified:
+ vlog(str(v))
+ # Get user history from old fab system
+ saved_history = phabdb.get_user_migration_history(v[1], pmig)
+ if not saved_history:
+ log('%s verified email has no saved history' % (v[1],))
+ continue
+ log('%s is being processed' % (v[1],))
+ history = {}
+ history['user'] = v[1]
+ history['userphid'] = v[0]
+ history['assigned'] = saved_history[0]
+ history['cc'] = saved_history[1]
+ history['author'] = saved_history[2]
+ history['created'] = saved_history[3]
+ history['modified'] = saved_history[4]
+ histories.append(history)
+ pmig.close()
+ return [util.translate_json_dict_items(d) for d in histories]
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Updates user metadata from fab')
+ parser.add_argument('-a', action="store_true", default=False)
+ parser.add_argument('-e', action="store", dest='email')
+ parser.add_argument('-m', action="store", dest="starting_epoch", default=None)
+ parser.add_argument('-v', action="store_true", default=False)
+ args = parser.parse_args()
+
+ pmig = phabdb.phdb(db=config.fabmigrate_db,
+ user=config.fabmigrate_user,
+ passwd=config.fabmigrate_passwd)
+
+ if args.a:
+ starting_epoch = phabdb.get_user_relations_last_finish(pmig)
+ users, finish_epoch = phabdb.get_verified_users(starting_epoch, config.fab_limit)
+ elif args.email:
+ users = phabdb.get_verified_user(args.email)
+ starting_epoch = 0
+ finish_epoch = 0
+ elif args.starting_epoch:
+ users, finish_epoch = phabdb.get_verified_users(args.starting_epoch)
+ starting_epoch = args.starting_epoch
+ else:
+ parser.print_help()
+ sys.exit(1)
+
+ if not any(users):
+ log("Existing as there are no new verified users")
+ sys.exit()
+
+ histories = get_user_histories(filter(bool, users))
+ user_count = len(histories)
+
+ icounts = []
+ for u in histories:
+ c = 0
+ if u['cc']:
+ c += len(u['cc'])
+ if u['author']:
+ c += len(u['author'])
+ if u['assigned']:
+ c += len(u['assigned'])
+ icounts.append(c)
+ issue_count = sum(icounts)
+
+ log("User Count %s" % (str(user_count)))
+ log("Issue Count %s" % (str(issue_count)))
+
+ if user_count == 0:
+ log("Existing as there are no new verified users")
+ sys.exit()
+
+ pid = os.getpid()
+ source = util.source_name(sys.argv[0])
+ phabdb.user_relations_start(pid,
+ source,
+ int(time.time()),
+ ipriority['na'],
+ starting_epoch,
+ user_count, issue_count, pmig)
+
+ from multiprocessing import Pool
+ pool = Pool(processes=config.fab_multi)
+ _ = pool.map(run_update, histories)
+ complete = len(filter(bool, _))
+ failed = len(_) - complete
+ phabdb.user_relations_finish(pid,
+ int(time.time()),
+ ipriority['update_success'],
+ finish_epoch,
+ complete,
+ failed,
+ pmig)
+
+ pm = phabmacros(config.phab_user, config.phab_cert, config.phab_host)
+ vlog(util.update_blog(source, complete, failed, user_count, issue_count, pm))
+
+ print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
+ pmig.close()
+
+if __name__ == '__main__':
+ main()