# Part of Acrobat.
import string, cPickle, random, urllib, sys, time, re, os, twitter, subprocess, datetime, urlparse
from collections import defaultdict
from irclib import irc_lower, nm_to_n
# query karma
def karmaq(bot, cmd, nick, conn, public, karma):
try:
item=cmd.split()[1].lower()
except IndexError:
item=None
if item==None:
bot.automsg(public,nick,"I have karma on %s items." %
len(karma.keys()))
elif karma.has_key(item):
bot.automsg(public,nick,"%s has karma %s."
%(item,karma[item]))
else:
bot.automsg(public,nick, "%s has no karma set." % item)
# delete karma
def karmadelq(bot, cmd, nick, conn, public, karma):
try:
item=cmd.split()[1].lower()
except IndexError:
conn.notice(nick, "What should I delete?")
return
if nick != bot.owner:
conn.notice(nick, "You are not my owner.")
return
if karma.has_key(item):
del karma[item]
conn.notice(nick, "Item %s deleted."%item)
else:
conn.notice(nick, "There is no karma stored for %s."%item)
# help - provides the URL of the help file
def helpq(bot, cmd, nick, conn, public):
bot.automsg(public,nick,
"For help see http://www.chiark.greenend.org.uk/~matthewv/irc/servus.html")
# query bot status
def infoq(bot, cmd, nick, conn, public, karma):
bot.automsg(public,nick,
("I am Acrobat %s, on %s, as nick %s. "+
"My owner is %s; I have karma on %s items.") %
(bot.revision.split()[1], bot.channel, conn.get_nickname(),
bot.owner, len(karma.keys())))
# Check on fish stocks
def fish_quota(pond):
if pond.DoS:
if time.time()>=pond.quotatime:
pond.DoS=0
else:
return
if (time.time()-pond.quotatime)>pond.fish_time_inc:
pond.cur_fish+=(((time.time()-pond.quotatime)
/pond.fish_time_inc)*pond.fish_inc)
if pond.cur_fish>pond.max_fish:
pond.cur_fish=pond.max_fish
pond.quotatime=time.time()
# List of things the bot might be called to work round the self-trouting code
synonyms=["itself","the bot","themself"]
# trout someone, or flirt with them
def troutq(bot, cmd, nick, conn, public, cfg):
fishlist=cfg[0]
selftrout=cfg[1]
quietmsg=cfg[2]
notargetmsg=cfg[3]
nofishmsg=cfg[4]
fishpond=cfg[5]
selftroutchance=cfg[6]
fish_quota(fishpond)
if fishpond.DoS:
conn.notice(nick, quietmsg%fishpond.Boring_Git)
return
if fishpond.cur_fish<=0:
conn.notice(nick, nofishmsg)
return
target = string.join(cmd.split()[1:])
if len(target)==0:
conn.notice(nick, notargetmsg)
return
me = bot.connection.get_nickname()
trout_msg = random.choice(fishlist)
fishpond.last=trout_msg
# The bot won't trout or flirt with itself;
if irc_lower(me) == irc_lower(target) or irc_lower(target) in synonyms:
target = nick
# There's a chance the game may be given away if the request was not
# public...
if not public:
if random.random()<=selftroutchance:
trout_msg=trout_msg+(selftrout%nick)
conn.action(bot.channel, trout_msg % target)
fishpond.cur_fish-=1
# slash a pair
def slashq(bot, cmd, nick, conn, public, cfg):
fishlist=cfg[0]
selfslash=cfg[1]
quietmsg=cfg[2]
notargetmsg=cfg[3]
nofishmsg=cfg[4]
fishpond=cfg[5]
selfslashchance=cfg[6]
fish_quota(fishpond)
if fishpond.DoS:
conn.notice(nick, quietmsg%fishpond.Boring_Git)
return
if fishpond.cur_fish<=0:
conn.notice(nick, nofishmsg)
return
target = string.join(cmd.split()[1:])
#who = cmd.split()[1:]
who = ' '.join(cmd.split()[1:]).split(' / ')
if len(who) < 2:
conn.notice(nick, "it takes two to tango!")
return
elif len(who) > 2:
conn.notice(nick, "we'll have none of that round here")
return
me = bot.connection.get_nickname()
slash_msg = random.choice(fishlist)
fishpond.last=slash_msg
# The bot won't slash people with themselves
if irc_lower(who[0]) == irc_lower(who[1]):
conn.notice(nick, "oooooh no missus!")
return
# The bot won't slash with itself, instead slashing the requester
for n in [0,1]:
if irc_lower(me) == irc_lower(who[n]) or irc_lower(who[n]) in synonyms:
who[n] = nick
# Perhaps someone asked to slash themselves with the bot then we get
if irc_lower(who[0]) == irc_lower(who[1]):
conn.notice(nick, "you wish!")
return
# There's a chance the game may be given away if the request was not
# public...
if not public:
if random.random()<=selfslashchance:
slash_msg=slash_msg+(selfslash%nick)
conn.action(bot.channel, slash_msg % (who[0], who[1]))
fishpond.cur_fish-=1
#query units
def unitq(bot, cmd, nick, conn, public):
args = ' '.join(cmd.split()[1:]).split(' as ')
if len(args) != 2:
args = ' '.join(cmd.split()[1:]).split(' / ')
if len(args) != 2:
conn.notice(nick, "syntax: units arg1 as arg2")
return
if args[1]=='?':
sin,sout=os.popen4(["units","--verbose","--",args[0]],"r")
else:
sin,sout=os.popen4(["units","--verbose","--",args[0],args[1]],"r")
sin.close()
res=sout.readlines()
#popen2 doesn't clean up the child properly. Do this by hand
child=os.wait()
if os.WEXITSTATUS(child[1])==0:
bot.automsg(public,nick,res[0].strip())
else:
conn.notice(nick,'; '.join(map(lambda x: x.strip(),res)))
# Shut up trouting for a minute
def nofishq(bot, cmd, nick, conn, public, fish):
fish.cur_fish=0
fish.DoS=1
fish.Boring_Git=nick
fish.quotatime=time.time()
fish.quotatime+=fish.nofish_time
conn.notice(nick, "Fish stocks depleted, as you wish.")
# rehash bot config
def reloadq(bot, cmd, nick, conn, public):
if not public and irc_lower(nick) == irc_lower(bot.owner):
try:
reload(bot.config)
conn.notice(nick, "Config reloaded.")
except ImportError:
conn.notice(nick, "Config reloading failed!")
else:
bot.automsg(public,nick,
"Configuration can only be reloaded by my owner, by /msg.")
# quit irc
def quitq(bot, cmd, nick, conn, public):
if irc_lower(nick) == irc_lower(bot.owner):
bot.die(msg = "I have been chosen!")
elif public:
conn.notice(nick, "Such aggression in public!")
else:
conn.notice(nick, "You're not my owner.")
# google for something
def googleq(bot, cmd, nick, conn, public):
cmdrest = string.join(cmd.split()[1:])
# "I'm Feeling Lucky" rather than try and parse the html
targ = ("http://www.google.com/search?q=%s&btnI=I'm+Feeling+Lucky"
% urllib.quote_plus(cmdrest))
try:
# get redirected and grab the resulting url for returning
gsearch = urllib.urlopen(targ).geturl()
if gsearch != targ: # we've found something
bot.automsg(public,nick,str(gsearch))
else: # we haven't found anything.
bot.automsg(public,nick,"No pages found.")
except IOError: # if the connection times out. This blocks. :(
bot.automsg(public,nick,"The web's broken. Waah!")
# Look up the definition of something using google
def defineq(bot, cmd, nick, conn, public):
#this doesn't work any more
bot.automsg(public,nick,"'define' is broken because google are bastards :(")
return
cmdrest = string.join(cmd.split()[1:])
targ = ("http://www.google.co.uk/search?q=define%%3A%s&ie=utf-8&oe=utf-8"
% urllib.quote_plus(cmdrest))
try:
# Just slurp everything into a string
defnpage = urllib.urlopen(targ).read()
# For definitions we really do have to parse the HTML, sadly.
# This is of course going to be a bit fragile. We first look for
# 'Definitions of %s on the Web' -- if this isn't present we
# assume we have the 'no definitions found page'.
# The first defn starts after the following
tag, but as the
# first
in a
# Following that we assume that each definition is all the non-markup
# before a
tag. Currently we just dump out the first definition.
match = re.search(r"Definitions of .*? on the Web.*?- \s*([^>]*)((
)|( - ))",defnpage,re.MULTILINE)
if match == None:
bot.automsg(public,nick,"Some things defy definition.")
else:
# We assume google has truncated the definition for us so this
# won't flood the channel with text...
defn = " ".join(match.group(1).split("\n"))
bot.automsg(public,nick,defn)
except IOError: # if the connection times out. This blocks. :(
bot.automsg(public,nick,"The web's broken. Waah!")
# Look up a currency conversion via xe.com
def currencyq(bot, cmd, nick, conn, public):
args = ' '.join(cmd.split()[1:]).split(' as ')
if len(args) != 2 or len(args[0]) != 3 or len(args[1]) != 3:
conn.notice(nick, "syntax: currency arg1 as arg2")
return
targ = ("http://www.xe.com/ucc/convert.cgi?From=%s&To=%s" % (args[0], args[1]))
try:
currencypage = urllib.urlopen(targ).read()
match = re.search(r"(1 %s = [\d\.]+ %s)" % (args[0].upper(),args[1].upper()),currencypage,re.MULTILINE)
if match == None:
bot.automsg(public,nick,"Dear Chief Secretary, there is no money.")
else:
conversion = match.group(1);
conversion = conversion.replace(' ',' ');
bot.automsg(public,nick,conversion + " (from xe.com)")
except IOError: # if the connection times out. This blocks. :(
bot.automsg(public,nick,"The web's broken. Waah!")
### extract the commit message and timestamp for commit
def __getcommitinfo(commit):
cmd=["git","log","-n","1","--pretty=format:%ct|%s",commit]
x=subprocess.Popen(cmd,
stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out,err=x.communicate()
if len(err):
return(err)
ts,mes=out.split('|')
when=datetime.date.fromtimestamp(float(ts))
return mes.strip(), when
###Return an array of commit messages and timestamps for lines in db that match what
def __getcommits(db,keys,what):
ans=[]
for k in keys:
if what in k:
ret=__getcommitinfo(db[k])
if len(ret)==1: #error message
return ["Error message from git blame: %s" % ret]
else:
ans.append( (k,ret[0],ret[1]) )
return ans
###search all three databases for what
def __getall(tdb,tdbk,fdb,fdbk,sdb,sdbk,what):
if what.strip()=="":
return []
tans=__getcommits(tdb,tdbk,what)
fans=__getcommits(fdb,fdbk,what)
sans=__getcommits(sdb,sdbk,what)
return tans+fans+sans
def blameq(bot,cmd,nick,conn,public,fish,tdb,tdbk,fdb,fdbk,sdb,sdbk):
clist=cmd.split()
if len(clist) < 2:
bot.automsg(public,nick,"Who or what do you want to blame?")
return
cwhat=' '.join(clist[2:])
if clist[1]=="#last":
ans=__getall(tdb,tdbk,fdb,fdbk,sdb,sdbk,fish.last)
elif clist[1]=="#trouts" or clist[1]=="#trout":
ans=__getcommits(tdb,tdbk,cwhat)
elif clist[1]=="#flirts" or clist[1]=="#flirt":
ans=__getcommits(fdb,fdbk,cwhat)
elif clist[1]=="#slashes" or clist[1]=="#slash":
ans=__getcommits(sdb,sdbk,cwhat)
else:
cwhat=' '.join(clist[1:])
ans=__getall(tdb,tdbk,fdb,fdbk,sdb,sdbk,cwhat)
if len(ans)==0:
bot.automsg(public,nick,"No match found")
elif len(ans)==1:
if len(ans[0])==1:
bot.automsg(public,nick,ans[0])
else:
bot.automsg(public,nick,"Modified %s: %s" % (ans[0][2].isoformat(),ans[0][1]))
elif len(ans)>4:
bot.automsg(public,nick,"I found %d matches, which is too many. Please be more specific!" % (len(ans)) )
else:
for a in ans:
if len(a)==1:
bot.automsg(public,nick,a)
else:
bot.automsg(public,nick,"'%s' modified on %s: %s" % (a[0],a[2].isoformat(),a[1]))
### say to msg/channel
def sayq(bot, cmd, nick, conn, public):
if irc_lower(nick) == irc_lower(bot.owner):
conn.privmsg(bot.channel, string.join(cmd.split()[1:]))
else:
if not public:
conn.notice(nick, "You're not my owner!")
### action to msg/channel
def doq(bot, cmd, nick, conn, public):
sys.stderr.write(irc_lower(bot.owner))
sys.stderr.write(irc_lower(nick))
if not public:
if irc_lower(nick) == irc_lower(bot.owner):
conn.action(bot.channel, string.join(cmd.split()[1:]))
else:
conn.notice(nick, "You're not my owner!")
###disconnect
def disconnq(bot, cmd, nick, conn, public):
if cmd == "disconnect": # hop off for 60s
bot.disconnect(msg="Be right back.")
### list keys of a dictionary
def listkeysq(bot, cmd, nick, conn, public, dict, sort=False):
d=dict.keys()
if sort:
d.sort()
bot.automsg(public,nick,string.join(d))
### rot13 text (yes, I could have typed out the letters....)
### also "foo".encode('rot13') would have worked
def rot13q(bot, cmd, nick, conn, public):
a=''.join(map(chr,range((ord('a')),(ord('z')+1))))
b=a[13:]+a[:13]
trans=string.maketrans(a+a.upper(),b+b.upper())
conn.notice(nick, string.join(cmd.split()[1:]).translate(trans))
### URL-tracking stuff
### return a easy-to-read approximation of a time period
def nicetime(tempus):
if (tempus<120):
tm="%d seconds ago"%int(tempus)
elif (tempus<7200):
tm="%d minutes ago"%int(tempus/60)
if (tempus>7200):
tm="%d hours ago"%int(tempus/3600)
return tm
### class to store URL data
class UrlLog:
"contains meta-data about a URL seen on-channel"
def __init__(self,url,nick):
self.nick=nick
self.url=url
self.first=time.time()
self.count=1
self.lastseen=time.time()
self.lastasked=time.time()
def recenttime(self):
return max(self.lastseen,self.lastasked)
def firstmen(self):
return nicetime(time.time()-self.first)
def urltype(self):
z=min(len(urlcomplaints)-1, self.count-1)
return urlcomplaints[z]
#(?:) is a regexp that doesn't group
urlre = re.compile(r"((?:(?:http)|(?:nsfw))s?://[^ ]+)( |$)")
hturlre= re.compile(r"(http)(s?://[^ ]+)( |$)")
#matches \bre\:?\s+ before a regexp; (?i)==case insensitive match
shibboleth = re.compile(r"(?i)\bre\:?\s+((?:(?:http)|(?:nsfw))s?://[^ ]+)( |$)")
urlcomplaints = ["a contemporary","an interesting","a fascinating","an overused","a vastly overused"]
### Deal with /msg bot url or ~url in channel
def urlq(bot, cmd, nick, conn, public,urldb):
if (not urlre.search(cmd)):
bot.automsg(False,nick,"Please use 'url' only with http, https, nsfw, or nsfws URLs")
return
urlstring=urlre.search(cmd).group(1)
url=canonical_url(urlstring)
if (url in urldb):
T = urldb[url]
complaint="That's %s URL that was first mentioned %s by %s" % \
(T.urltype(),T.firstmen(),T.nick)
if (public):
complaint=complaint+". Furthermore it defeats the point of this command to use it other than via /msg."
T.count+=1
bot.automsg(False,nick,complaint)
T.lastasked=time.time()
#URL suppressed, so mention in #urls
if urlstring != cmd.split()[1]: #first argument to URL was not the url
conn.privmsg("#urls","%s remarks: %s" % (nick," ".join(cmd.split()[1:])))
else:
conn.privmsg("#urls","(via %s) %s"%(nick," ".join(cmd.split()[1:])))
else:
if (public):
bot.automsg(False,nick,"That URL was unique. There is little point in using !url out loud; please use it via /msg")
else:
if urlstring != cmd.split()[1]: #first argument to URL was not the url
conn.privmsg(bot.channel,"%s remarks: %s" % (nick," ".join(cmd.split()[1:])))
else:
conn.privmsg(bot.channel,"(via %s) %s"%(nick," ".join(cmd.split()[1:])))
urldb[url]=UrlLog(url,nick)
### Deal with URLs spotted in channel
def dourl(bot,conn,nick,command,urldb):
urlstring=urlre.search(command).group(1)
urlstring=canonical_url(urlstring)
if urlstring in urldb:
T=urldb[urlstring]
message="observes %s URL, first mentioned %s by %s" % \
(T.urltype(),T.firstmen(),T.nick)
if shibboleth.search(command)==None:
conn.action(bot.channel, message)
T.lastseen=time.time()
T.count+=1
else:
urldb[urlstring]=UrlLog(urlstring,nick)
### Expire old urls
def urlexpire(urldb,expire):
urls=urldb.keys()
for u in urls:
if time.time() - urldb[u].recenttime() > expire:
del urldb[u]
# canonicalise BBC URLs (internal use only)
def canonical_url(urlstring):
if "nsfw://" in urlstring or "nsfws://" in urlstring:
urlstring=urlstring.replace("nsfw","http",1)
if (urlstring.find("news.bbc.co.uk") != -1):
for middle in ("/low/","/mobile/"):
x = urlstring.find(middle)
if (x != -1):
urlstring.replace(middle,"/hi/")
return urlstring
# automatically make nsfw urls for you and pass them on to url
def nsfwq(bot,cmd,nick,conn,public,urldb):
if (not hturlre.search(cmd)):
bot.automsg(False,nick,"Please use 'nsfw' only with http or https URLs")
return
newcmd=hturlre.sub(nsfwify,cmd)
urlq(bot,newcmd,nick,conn,public,urldb)
def nsfwify(match):
a,b,c=match.groups()
return 'nsfw'+b+c
#get tweet text
def twitterq(bot,cmd,nick,conn,public,twitapi):
if (not urlre.search(cmd)):
bot.automsg(False,nick,"Please use 'twit' only with http or https URLs")
return
urlstring = urlre.search(cmd).group(1)
if (urlstring.find("twitter.com") !=-1):
stringout = getTweet(urlstring,twitapi)
bot.automsg(public, nick, stringout)
def getTweet(urlstring,twitapi,inclusion=False):
unobfuscate_urls=True
expand_included_tweets=True
parts = string.split(urlstring,'/')
tweetID = parts[-1]
try:
status = twitapi.GetStatus(tweetID)
if status == {}:
return "twitapi.GetStatus returned nothing :-("
if status.user == None and status.text == None:
return "Empty status object returned :("
if status.retweeted_status and status.retweeted_status.text:
status = status.retweeted_status
if status.user is not None:
tweeter_screen = status.user.screen_name #.encode('UTF-8', 'replace')
tweeter_name = status.user.name #.encode('UTF-8', 'replace')
else:
tweeter_screen = "[not returned]" ; tweeter_name = "[not returned]"
tweeter_name = tweeter_name + " RTing " + status.user.name #.encode('UTF-8', 'replace')
tweetText = status.full_text
if status.media:
replacements = defaultdict( list )
for medium in status.media:
replacements[medium.url].append(medium.media_url_https)
for k,v in replacements.items():
v = [re.sub(r"/tweet_video_thumb/([\w\-]+).jpg", r"/tweet_video/\1.mp4", link) for link in v]
if len(v) > 1:
replacementstring = "[" + " ; ".join(v) +"]"
else:
replacementstring = v[0]
tweetText = tweetText.replace(k, replacementstring)
for url in status.urls:
toReplace = url.expanded_url
if unobfuscate_urls:
import urllib
rv = urlparse.urlparse(toReplace)
if rv.hostname in {
# sourced from http://bit.do/list-of-url-shorteners.php
"bit.do", "t.co", "lnkd.in", "db.tt", "qr.ae", "adf.ly",
"goo.gl", "bitly.com", "cur.lv", "tinyurl.com", "ow.ly",
"bit.ly", "adcrun.ch", "ity.im", "q.gs", "viralurl.com",
"is.gd", "po.st", "vur.me", "bc.vc", "twitthis.com", "u.to",
"j.mp", "buzurl.com", "cutt.us", "u.bb", "yourls.org",
"crisco.com", "x.co", "prettylinkpro.com", "viralurl.biz",
"adcraft.co", "virl.ws", "scrnch.me", "filoops.info", "vurl.bz",
"vzturl.com", "lemde.fr", "qr.net", "1url.com", "tweez.me",
"7vd.cn", "v.gd", "dft.ba", "aka.gr", "tr.im",
# added by ASB:
"trib.al", "dlvr.it"
}:
#expand list as needed.
response = urllib.urlopen('http://urlex.org/txt/' + toReplace)
resptext = response.read()
if resptext.startswith('http'): # ie it looks urlish (http or https)
if resptext != toReplace:
toReplace = resptext
# maybe make a note of the domain of the original URL to compile list of shortenable domains?
# remove tracking utm_ query parameters, for privacy and brevity
# code snippet from https://gist.github.com/lepture/5997883
rv = urlparse.urlparse(toReplace)
if rv.query:
query = re.sub(r'utm_\w+=[^&]+&?', '', rv.query)
if query:
toReplace = '%s://%s%s?%s' % (rv.scheme, rv.hostname, rv.path, query)
else:
toReplace = '%s://%s%s' % (rv.scheme, rv.hostname, rv.path) # leave off the final '?'
if expand_included_tweets and not inclusion:
if rv.hostname == 'twitter.com' and re.search(r'status/\d+',rv.path):
quotedtweet = getTweet(toReplace, twitapi, inclusion=True) # inclusion parameter limits recursion.
tweetText += " Q{" + quotedtweet + "}"
tweetText = tweetText.replace(url.url, toReplace)
tweetText = tweetText.replace(">",">")
tweetText = tweetText.replace("<","<")
tweetText = tweetText.replace("&","&")
tweetText = tweetText.replace("\n"," ")
stringout = "tweet by %s (%s): %s" %(tweeter_screen,tweeter_name,tweetText)
except twitter.TwitterError:
terror = sys.exc_info()
stringout = "Twitter error: %s" % terror[1].__str__()
except Exception:
terror = sys.exc_info()
stringout = "Error: %s" % terror[1].__str__()
if inclusion:
return stringout # don't want to double-encode it, so just pass it on for now and encode later
return stringout.encode('UTF-8', 'replace')