aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcpettet <rush@wikimedia.org>2014-10-17 12:02:16 -0500
committercpettet <rush@wikimedia.org>2014-10-17 12:02:16 -0500
commit34704424ca1b5e14546df2a331ce0d419908812a (patch)
treea002d5eb22239ebc655685e3b2e205d715c93af5
parent9de6eb39b3aed74286474603db4944e6b268fff3 (diff)
cleanup and blog updates
-rwxr-xr-xbugzilla_create.py4
-rwxr-xr-xbugzilla_populate_user_relations_comments_table.py4
-rwxr-xr-xbugzilla_update_user_comments.py35
-rwxr-xr-xbugzilla_update_user_header.py30
-rw-r--r--dump1
-rwxr-xr-xfab_update_user.py42
-rwxr-xr-xwmfphablib/__init__.py42
-rwxr-xr-xwmfphablib/bzlib.py90
-rwxr-xr-xwmfphablib/phabapi.py25
-rwxr-xr-xwmfphablib/phabdb.py41
-rw-r--r--wmfphablib/util.py48
11 files changed, 235 insertions, 127 deletions
diff --git a/bugzilla_create.py b/bugzilla_create.py
index 81f8d08..65f7ecb 100755
--- a/bugzilla_create.py
+++ b/bugzilla_create.py
@@ -3,7 +3,6 @@ import multiprocessing
import time
import yaml
import ast
-import base64
import json
import sys
import xmlrpclib
@@ -26,7 +25,6 @@ from wmfphablib import util
from wmfphablib import datetime_to_epoch
from wmfphablib import epoch_to_datetime
from wmfphablib import ipriority
-from email.parser import Parser
def create(bugid):
@@ -83,7 +81,7 @@ def create(bugid):
a['name'] = upload['name']
a['objectName'] = upload['objectName']
uploads[a['id']] = a
- log('Attachment count: ' + str(len(uploads.keys())))
+ log('%s attachment count: %s' % (bugid, str(len(uploads.keys()))))
#list of projects to add to ticket
ptags = []
diff --git a/bugzilla_populate_user_relations_comments_table.py b/bugzilla_populate_user_relations_comments_table.py
index af6e380..cefc5e3 100755
--- a/bugzilla_populate_user_relations_comments_table.py
+++ b/bugzilla_populate_user_relations_comments_table.py
@@ -21,7 +21,7 @@ from wmfphablib import return_bug_list
def populate(bugid):
def add_comment_ref(owner):
- """ adds an issue reference to a user
+ """ adds an issue reference to a user or later updating their comments
"""
ouser = pmig.sql_x("SELECT user FROM user_relations_comments WHERE user = %s", (owner,))
if ouser:
@@ -69,7 +69,7 @@ def populate(bugid):
xcom = json.loads(xcomments)
commenters = [c['author'] for c in com if c['count'] > 0]
commenters = set(commenters)
- log("Commenters for issue %s: %s" % (bugid, str(commenters)))
+ log("commenters for issue %s: %s" % (bugid, str(commenters)))
for c in commenters:
add_comment_ref(c)
pmig.close()
diff --git a/bugzilla_update_user_comments.py b/bugzilla_update_user_comments.py
index 764db05..f98ff72 100755
--- a/bugzilla_update_user_comments.py
+++ b/bugzilla_update_user_comments.py
@@ -119,25 +119,6 @@ def get_user_histories(verified):
pmig.close()
return [util.translate_json_dict_items(d) for d in histories]
-def get_verified_users(modtime, limit=None):
- #Find the task in new Phabricator that matches our lookup
- verified = phabdb.get_verified_emails(modtime=modtime, limit=limit)
- create_times = [v[2] for v in verified]
- try:
- newest = max(create_times)
- except ValueError:
- newest = modtime
- return verified, newest
-
-def get_verified_user(email):
- phid, email, is_verified = phabdb.get_user_email_info(email)
- log("Single verified user: %s, %s, %s" % (phid, email, is_verified))
- if is_verified:
- return [(phid, email)]
- else:
- log("%s is not a verified email" % (email,))
- return [()]
-
def main():
parser = argparse.ArgumentParser(description='Updates user header metadata from bugzilla')
@@ -153,13 +134,13 @@ def main():
if args.a:
starting_epoch = phabdb.get_user_relations_last_finish(pmig)
- users, finish_epoch = get_verified_users(starting_epoch, config.fab_limit)
+ users, finish_epoch = phabdb.get_verified_users(starting_epoch, config.fab_limit)
elif args.email:
- users = get_verified_user(args.email)
+ users = phabdb.get_verified_user(args.email)
starting_epoch = 0
finish_epoch = 0
elif args.starting_epoch:
- users, finish_epoch = get_verified_users(args.starting_epoch)
+ users, finish_epoch = phabdb.get_verified_users(args.starting_epoch)
starting_epoch = args.starting_epoch
else:
parser.print_help()
@@ -184,9 +165,11 @@ def main():
log("Issue Count %s" % (str(issue_count)))
pid = os.getpid()
+ source = util.source_name(sys.argv[0])
phabdb.user_relations_start(pid,
+ source,
int(time.time()),
- 0,
+ ipriority['na'],
starting_epoch,
user_count, issue_count, pmig)
@@ -203,8 +186,12 @@ def main():
complete,
failed,
pmig)
- print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
+
+ pm = phabmacros(config.phab_user, config.phab_cert, config.phab_host)
+ vlog(util.update_blog(source, complete, failed, user_count, issue_count, pm))
+
pmig.close()
+ print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
if __name__ == '__main__':
main()
diff --git a/bugzilla_update_user_header.py b/bugzilla_update_user_header.py
index deba274..b079e15 100755
--- a/bugzilla_update_user_header.py
+++ b/bugzilla_update_user_header.py
@@ -122,24 +122,6 @@ def get_user_histories(verified):
pmig.close()
return [util.translate_json_dict_items(d) for d in histories]
-def get_verified_users(modtime, limit=None):
- #Find the task in new Phabricator that matches our lookup
- verified = phabdb.get_verified_emails(modtime=modtime, limit=limit)
- create_times = [v[2] for v in verified]
- try:
- newest = max(create_times)
- except ValueError:
- newest = modtime
- return verified, newest
-
-def get_verified_user(email):
- phid, email, is_verified = phabdb.get_user_email_info(email)
- log("Single specified user: %s, %s, %s" % (phid, email, is_verified))
- if is_verified:
- return [(phid, email)]
- else:
- log("%s is not a verified email" % (email,))
- return [()]
def main():
parser = argparse.ArgumentParser(description='Updates user header metadata from bugzilla')
@@ -155,13 +137,13 @@ def main():
if args.a:
starting_epoch = phabdb.get_user_relations_last_finish(pmig)
- users, finish_epoch = get_verified_users(starting_epoch, config.fab_limit)
+ users, finish_epoch = phabdb.get_verified_users(starting_epoch, config.fab_limit)
elif args.email:
- users = get_verified_user(args.email)
+ users = phabdb.get_verified_user(args.email)
starting_epoch = 0
finish_epoch = 0
elif args.starting_epoch:
- users, finish_epoch = get_verified_users(args.starting_epoch)
+ users, finish_epoch = phabdb.get_verified_users(args.starting_epoch)
starting_epoch = args.starting_epoch
else:
parser.print_help()
@@ -190,9 +172,11 @@ def main():
log("Issue Count %s" % (str(issue_count)))
pid = os.getpid()
+ source = util.source_name(sys.argv[0])
phabdb.user_relations_start(pid,
+ source,
int(time.time()),
- 0,
+ ipriority['na'],
starting_epoch,
user_count, issue_count, pmig)
@@ -210,6 +194,8 @@ def main():
failed,
pmig)
pmig.close()
+ pm = phabmacros(config.phab_user, config.phab_cert, config.phab_host)
+ vlog(util.update_blog(source, complete, failed, user_count, issue_count, pm))
print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
if __name__ == '__main__':
diff --git a/dump b/dump
new file mode 100644
index 0000000..09618cf
--- /dev/null
+++ b/dump
@@ -0,0 +1 @@
+{"whiteboard": "", "classification": "Unclassified", "creator": "wikipedia@mail.trumpkin.de", "cc": ["bugzillas+padREMOVETHISdu"], "depends_on": [], "cf_platform": "---", "is_open": false, "keywords": ["need-parsertest"], "cf_browser": "---", "id": 1001, "severity": "normal", "is_confirmed": true, "is_creator_accessible": true, "priority": 50, "platform": "All", "version": "unspecified", "see_also": [], "status": "resolved", "product": "MediaWiki", "blocks": [343], "qa_contact": "", "creation_time": "1102170540.0", "component": "Parser", "groups": [], "target_milestone": "---", "is_cc_accessible": true, "url": "http://de.wikipedia.org/w/index.php?title=Benutzer:Habakuk/Bilder&oldid=3609588", "cf_hugglebeta": "---", "summary": "In ext. links don't change \"\\\" to \"/\"", "op_sys": "All", "flags": [], "assigned_to": "wikibugs-l@lists.wikimedia.org", "update_token": "1412964339-4xwMydiZQdPsr-rrTPZRox1pUdhzk7wNLzN1dGT1S0Q", "resolution": "FIXED", "last_change_time": "1119618256.0"} \ No newline at end of file
diff --git a/fab_update_user.py b/fab_update_user.py
index 1e84909..8d8440c 100755
--- a/fab_update_user.py
+++ b/fab_update_user.py
@@ -121,32 +121,6 @@ def get_user_histories(verified):
pmig.close()
return [util.translate_json_dict_items(d) for d in histories]
-def get_verified_users(modtime, limit=None):
- #Find the task in new Phabricator that matches our lookup
- verified = phabdb.get_verified_emails(modtime=modtime, limit=limit)
- create_times = [v[2] for v in verified]
- try:
- newest = max(create_times)
- except ValueError:
- newest = modtime
- return verified, newest
-
-def get_verified_user(email):
- phid, email, is_verified = phabdb.get_user_email_info(email)
- log("Single verified user: %s, %s, %s" % (phid, email, is_verified))
- if is_verified:
- return [(phid, email)]
- else:
- log("%s is not a verified email" % (email,))
- return [()]
-
-def last_finish():
- pmig = phabdb.phdb(db=config.fabmigrate_db,
- user=config.fabmigrate_user,
- passwd=config.fabmigrate_passwd)
- pmig.close()
- ftime = phabdb.get_user_relations_last_finish(pmig)
- return ftime or 1
def main():
parser = argparse.ArgumentParser(description='Updates user metadata from fab')
@@ -162,13 +136,13 @@ def main():
if args.a:
starting_epoch = phabdb.get_user_relations_last_finish(pmig)
- users, finish_epoch = get_verified_users(starting_epoch, config.fab_limit)
+ users, finish_epoch = phabdb.get_verified_users(starting_epoch, config.fab_limit)
elif args.email:
- users = get_verified_user(args.email)
+ users = phabdb.get_verified_user(args.email)
starting_epoch = 0
finish_epoch = 0
elif args.starting_epoch:
- users, finish_epoch = get_verified_users(args.starting_epoch)
+ users, finish_epoch = phabdb.get_verified_users(args.starting_epoch)
starting_epoch = args.starting_epoch
else:
parser.print_help()
@@ -200,13 +174,15 @@ def main():
log("Existing as there are no new verified users")
sys.exit()
-
pid = os.getpid()
+ source = util.source_name(sys.argv[0])
phabdb.user_relations_start(pid,
+ source,
int(time.time()),
- 0,
+ ipriority['na'],
starting_epoch,
user_count, issue_count, pmig)
+
from multiprocessing import Pool
pool = Pool(processes=config.fab_multi)
_ = pool.map(run_update, histories)
@@ -219,6 +195,10 @@ def main():
complete,
failed,
pmig)
+
+ pm = phabmacros(config.phab_user, config.phab_cert, config.phab_host)
+ vlog(util.update_blog(source, complete, failed, user_count, issue_count, pm))
+
print '%s completed %s, failed %s' % (sys.argv[0], complete, failed)
pmig.close()
diff --git a/wmfphablib/__init__.py b/wmfphablib/__init__.py
index c41bde8..28ee7b6 100755
--- a/wmfphablib/__init__.py
+++ b/wmfphablib/__init__.py
@@ -2,15 +2,20 @@ import os
import sys
import syslog
import datetime
-import bzwmfphab as bzlib
+import bzlib
import rtlib
import fablib
+import time
+from util import log
+from util import vlog
+from util import errorlog
+from util import datetime_to_epoch
+from util import epoch_to_datetime
from phabapi import phabapi as Phab
from phabdb import phdb
from phabdb import mailinglist_phid
from phabdb import set_project_icon
from config import cfile as configfile
-import time
def now():
return int(time.time())
@@ -42,40 +47,7 @@ def return_bug_list():
log("Bugs count: %d" % (len(bugs)))
return bugs
-def datetime_to_epoch(date_time):
- return str((date_time - datetime.datetime(1970,1,1)).total_seconds())
-
-def epoch_to_datetime(epoch, timezone='UTC'):
- return str((datetime.datetime.fromtimestamp(int(float(epoch))
- ).strftime('%Y-%m-%d %H:%M:%S'))) + " (%s)" % (timezone,)
-
-def errorlog(msg):
- msg = unicode(msg)
- try:
- syslog.syslog(msg)
- print >> sys.stderr, msg
- except:
- print 'error logging, well...error output'
-
-def log(msg):
- msg = unicode(msg)
- if '-v' in ''.join(sys.argv):
- try:
- syslog.syslog(msg)
- print msg
- except:
- print 'error logging output'
-
-def vlog(msg):
- msg = unicode(msg)
- if '-vv' in ''.join(sys.argv):
- try:
- print '-> ', msg
- except:
- print 'error logging output'
-
def save_attachment(name, data):
f = open(name, 'wb')
f.write(data)
f.close()
-
diff --git a/wmfphablib/bzlib.py b/wmfphablib/bzlib.py
new file mode 100755
index 0000000..ffce4ff
--- /dev/null
+++ b/wmfphablib/bzlib.py
@@ -0,0 +1,90 @@
+import re
+
+prepend = 'bz'
+security_mask = '_hidden_'
+
+def build_comment(c):
+ """ takes a native bz comment dict and outputs
+ a dict ready for processing into phab
+ """
+ clean_c = {}
+ clean_c['author'] = c['author'].split('@')[0]
+ clean_c['creation_time'] = str(c['creation_time'])
+ clean_c['creation_time'] = int(float(c['creation_time']))
+ if c['author'] != c['creator']:
+ clean_c['creator'] = c['creator'].split('@')[0]
+
+ clean_c['count'] = c['count']
+ if c['count'] == 0:
+ clean_c['bug_id'] = c['bug_id']
+
+ if c['is_private']:
+ c['text'] = security_mask
+
+ attachment = find_attachment_in_comment(c['text'])
+ if attachment:
+ fmt_text = []
+ text = c['text'].splitlines()
+ for t in text:
+ if not t.startswith('Created attachment'):
+ fmt_text.append(t)
+ c['text'] = '\n'.join(fmt_text)
+ clean_c['attachment'] = attachment
+ clean_c['text'] = c['text']
+ return clean_c
+
+def find_attachment_in_comment(text):
+ a = re.search('Created\sattachment\s(\d+)', text)
+ if a:
+ return a.group(1)
+ else:
+ return ''
+
+def status_convert(bz_status):
+ """
+ UNCONFIRMED (default) Open + Needs Triage (default)
+ NEW Open
+ ASSIGNED open
+ PATCH_TO_REVIEW open
+ NEED_INFO needs_info
+ RESOLVED FIXED resolved
+ RESOLVED INVALID invalid
+ RESOLVED WONTFIX declined
+ RESOLVED WORKSFORME resolved
+ RESOLVED DUPLICATE closed
+
+ needs_info stalled
+ resolved closed
+ invalid no historical value will be purged eventually (spam, etc)
+ declined we have decided not too -- even though we could
+ """
+
+ statuses = {'new': 'open',
+ 'resolved': 'resolved',
+ 'reopened': 'open',
+ 'closed': 'resolved',
+ 'need_info': 'needs_info',
+ 'verified': 'resolved',
+ 'assigned': 'open',
+ 'unconfirmed': 'open',
+ 'patch_to_review': 'open'}
+
+ return statuses[bz_status.lower()]
+
+def priority_convert(bz_priority):
+ """
+ "100" : "Unbreak Now!",
+ "90" : "Needs Triage",
+ "80" : "High",
+ "50" : "Normal",
+ "25" : "Low",
+ "10" : "Needs Volunteer",
+ """
+ priorities = {'unprioritized': 90,
+ 'immediate': 100,
+ 'highest': 100,
+ 'high': 80,
+ 'normal': 50,
+ 'low': 25,
+ 'lowest': 10}
+ return priorities[bz_priority.lower()]
diff --git a/wmfphablib/phabapi.py b/wmfphablib/phabapi.py
index 20c79f3..e6c1be1 100755
--- a/wmfphablib/phabapi.py
+++ b/wmfphablib/phabapi.py
@@ -1,15 +1,19 @@
-def log(m):
- pass
-
import base64
import phabricator
from phabricator import Phabricator
import phabdb
+from . import log
+from . import vlog
+from . import errorlog as elog
class phabapi:
def __init__(self, user, cert, host):
+ self.user = user
+ self.cert = cert
+ self.host = host
+
if host:
self.con = Phabricator(username=user,
certificate=cert,
@@ -17,6 +21,17 @@ class phabapi:
else:
self.con = None
+
+ def blog_update(self, botname, title, body):
+ blogphid = phabdb.get_bot_blog(botname)
+ if blogphid is None:
+ elog('blogphid is none')
+ return
+ return self.con.phame.createpost(blogPHID=blogphid,
+ body=body,
+ title=title,
+ phameTitle=title)
+
def sync_assigned(self, userphid, id, prepend):
refs = phabdb.reference_ticket('%s%s' % (prepend, id))
if not refs:
@@ -27,7 +42,7 @@ class phabapi:
log('current owner found for => %s' % (str(id),))
return current
log('assigning T%s to %s' % (str(id), userphid))
- return self.con.maniphest.update(phid=refs[0], ownerPHID=userphid)
+ return phabdb.set_issue_assigned(refs[0], userphid)
def synced_authored(self, phid, id, ref):
refs = phabdb.reference_ticket('%s%s' % (ref, id))
@@ -65,7 +80,7 @@ class phabapi:
existing_proj = self.con.project.query(names=[project_name])
if not existing_proj['data']:
- log('need to make: ' + project_name)
+ log('need to create project(s) ' + project_name)
try:
new_proj = self.con.project.create(name=project_name, members=pmembers)
#XXX: Bug where we have to specify a members array!
diff --git a/wmfphablib/phabdb.py b/wmfphablib/phabdb.py
index 33d1305..d1f721c 100755
--- a/wmfphablib/phabdb.py
+++ b/wmfphablib/phabdb.py
@@ -16,20 +16,20 @@ from config import fabmigrate_passwd
def get_user_relations_last_finish(dbcon):
#get_user_relations_last_finish(pmig)
- fin = dbcon.sql_x("SELECT max(finish_epoch) from user_relations_job", ())
+ fin = dbcon.sql_x("SELECT max(finish_epoch) from user_relations_jobs", ())
try:
return int(fin[0][0])
except:
return 1
-def user_relations_start(pid, start, status, start_epoch, user_count, issue_count, dbcon):
- insert_values = (pid, start, status, start_epoch, user_count, issue_count, int(time.time()))
- query = "INSERT INTO user_relations_job (pid, start, status, start_epoch, user_count, issue_count, modified) VALUES (%s, %s, %s, %s, %s, %s, %s)"
+def user_relations_start(pid, source, start, status, start_epoch, user_count, issue_count, dbcon):
+ insert_values = (pid, source, start, status, start_epoch, user_count, issue_count, int(time.time()))
+ query = "INSERT INTO user_relations_jobs (pid, source, start, status, start_epoch, user_count, issue_count, modified) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
return dbcon.sql_x(query, insert_values)
def user_relations_finish(pid, finish, status, finish_epoch, completed, failed, dbcon):
update_values = (finish, status, finish_epoch, completed, failed, int(time.time()), pid)
- return dbcon.sql_x("UPDATE user_relations_job SET finish=%s, status=%s, finish_epoch=%s, completed=%s , failed=%s, modified=%s WHERE pid = %s",
+ return dbcon.sql_x("UPDATE user_relations_jobs SET finish=%s, status=%s, finish_epoch=%s, completed=%s , failed=%s, modified=%s WHERE pid = %s",
update_values)
def get_user_relations_priority(user, dbcon):
@@ -80,6 +80,13 @@ def get_user_migration_comment_history(user, dbcon):
return ()
return saved_history[0]
+def get_bot_blog(botname):
+ p = phdb(db='phabricator_phame', user=phuser_user, passwd=phuser_passwd)
+ _ = p.sql_x("SELECT phid from phame_blog where name=%s", ('%s_updates' % (botname,)), limit=1)
+ p.close()
+ if _ is not None and len(_[0]) > 0:
+ return _[0][0]
+
def is_bot(userphid):
p = phdb(db='phabricator_user', user=phuser_user, passwd=phuser_passwd)
isbot = p.sql_x("SELECT isSystemAgent from user where phid=%s", (userphid,), limit=1)
@@ -104,6 +111,11 @@ def set_issue_status(taskphid, status):
p.sql_x("UPDATE maniphest_task SET status=%s WHERE phid=%s", (status, taskphid))
p.close()
+def set_issue_assigned(taskphid, userphid):
+ p = phdb(db='phabricator_maniphest', user=phuser_user, passwd=phuser_passwd)
+ p.sql_x("UPDATE maniphest_task SET ownerPHID=%s WHERE phid=%s", (userphid, taskphid))
+ p.close()
+
def set_comment_content(transxphid, content):
"""set manual content for a comment
:param transxphid: str
@@ -219,6 +231,15 @@ def get_user_relations():
return ''
return _
+def get_verified_user(email):
+ phid, email, is_verified = get_user_email_info(email)
+ log("Single specified user: %s, %s, %s" % (phid, email, is_verified))
+ if is_verified:
+ return [(phid, email)]
+ else:
+ log("%s is not a verified email" % (email,))
+ return [()]
+
def get_user_email_info(emailaddress):
p = phdb(db='phabricator_user', user=phuser_user, passwd=phuser_passwd)
sql = "SELECT userPHID, address, isVerified from user_email where address=%s"
@@ -227,6 +248,16 @@ def get_user_email_info(emailaddress):
p.close()
return _[0] or ''
+def get_verified_users(modtime, limit=None):
+ #Find the task in new Phabricator that matches our lookup
+ verified = get_verified_emails(modtime=modtime, limit=limit)
+ create_times = [v[2] for v in verified]
+ try:
+ newest = max(create_times)
+ except ValueError:
+ newest = modtime
+ return verified, newest
+
def get_verified_emails(modtime=0, limit=None):
p = phdb(db='phabricator_user', user=phuser_user, passwd=phuser_passwd)
sql = "SELECT userPHID, address, dateModified from user_email where dateModified > %s and isVerified = 1"
diff --git a/wmfphablib/util.py b/wmfphablib/util.py
index a34f601..302146a 100644
--- a/wmfphablib/util.py
+++ b/wmfphablib/util.py
@@ -1,6 +1,54 @@
+import os
import sys
import json
import subprocess
+import config
+import time
+import datetime
+import syslog
+
+def datetime_to_epoch(date_time):
+ return str((date_time - datetime.datetime(1970,1,1)).total_seconds())
+
+def epoch_to_datetime(epoch, timezone='UTC'):
+ return str((datetime.datetime.fromtimestamp(int(float(epoch))
+ ).strftime('%Y-%m-%d %H:%M:%S'))) + " (%s)" % (timezone,)
+
+def errorlog(msg):
+ msg = unicode(msg)
+ try:
+ syslog.syslog(msg)
+ print >> sys.stderr, msg
+ except:
+ print 'error logging, well...error output'
+
+def log(msg):
+ msg = unicode(msg)
+ if '-v' in ''.join(sys.argv):
+ try:
+ syslog.syslog(msg)
+ print msg
+ except:
+ print 'error logging output'
+
+def vlog(msg):
+ msg = unicode(msg)
+ if '-vv' in ''.join(sys.argv):
+ try:
+ print '-> ', msg
+ except:
+ print 'error logging output'
+
+def update_blog(source, complete, failed, user_count, issue_count, apicon):
+ title = "%s completed %s / failed %s" % (epoch_to_datetime(time.time()),
+ complete,
+ failed)
+ print title
+ body = "%s:\nUsers updated: %s\nIssues affected: %s" % (source, user_count, issue_count)
+ return apicon.blog_update(apicon.user, title, body)
+
+def source_name(path):
+ return os.path.basename(path.strip('.py'))
def can_edit_ref():
f = open('/srv/phab/phabricator/conf/local/local.json', 'r').read()