# Part of Acrobat.
-import string, cPickle, random, urllib, sys, time, re, os, twitter, subprocess, datetime, urlparse
+import string, cPickle, random, urllib, sys, time, re, os, twitter, subprocess, datetime, urlparse, hashlib
from collections import defaultdict
from irclib import irc_lower, nm_to_n
+try:
+ from blame_filter import bfd
+except ImportError:
+ bfd = None
+
# query karma
def karmaq(bot, cmd, nick, conn, public, karma):
try:
me = bot.connection.get_nickname()
trout_msg = random.choice(fishlist)
fishpond.last=trout_msg
+ fishpond.last_cfg=cfg
# The bot won't trout or flirt with itself;
if irc_lower(me) == irc_lower(target) or irc_lower(target) in synonyms:
target = nick
me = bot.connection.get_nickname()
slash_msg = random.choice(fishlist)
fishpond.last=slash_msg
+ fishpond.last_cfg=cfg
# The bot won't slash people with themselves
if irc_lower(who[0]) == irc_lower(who[1]):
conn.notice(nick, "oooooh no missus!")
return(err)
ts,mes=out.split('|')
+ mes=mes.strip()
+ md5mes=hashlib.md5(mes).hexdigest()
+ if bfd and md5mes in bfd:
+ mes=bfd[md5mes]
when=datetime.date.fromtimestamp(float(ts))
- return mes.strip(), when
+ return mes, when
###Return an array of commit messages and timestamps for lines in db that match what
def __getcommits(db,keys,what):
sans=__getcommits(sdb,sdbk,what)
return tans+fans+sans
-def blameq(bot,cmd,nick,conn,public,fish,tdb,tdbk,fdb,fdbk,sdb,sdbk):
+def blameq(bot,cmd,nick,conn,public,fish,cfgs):
+ tdb,tdbk,x = cfgs[0][7] # urgh, magic, to support magic knowledge below
+ fdb,fdbk,x = cfgs[1][7]
+ sdb,sdbk,x = cfgs[2][7]
clist=cmd.split()
if len(clist) < 2:
bot.automsg(public,nick,"Who or what do you want to blame?")
return
cwhat=' '.join(clist[2:])
if clist[1]=="#last":
- ans=__getall(tdb,tdbk,fdb,fdbk,sdb,sdbk,fish.last)
+ if fish.last_cfg is None:
+ bot.automsg(public,nick,"Nothing")
+ return
+ xdb,xdbk,kindsfile = fish.last_cfg[7]
+ ans=__getcommits(xdb,xdbk,fish.last)
elif clist[1]=="#trouts" or clist[1]=="#trout":
ans=__getcommits(tdb,tdbk,cwhat)
elif clist[1]=="#flirts" or clist[1]=="#flirt":
self.nick=nick
self.url=url
self.first=time.time()
+ self.localfirst=time.localtime(self.first)
self.count=1
self.lastseen=time.time()
self.lastasked=time.time()
def recenttime(self):
return max(self.lastseen,self.lastasked)
def firstmen(self):
- return nicetime(time.time()-self.first)
+ n=time.localtime(time.time())
+ s="%02d:%02d" % (self.localfirst.tm_hour,self.localfirst.tm_min)
+ if n.tm_yday != self.localfirst.tm_yday:
+ s+=time.strftime(" on %d %B", self.localfirst)
+ return s
def urltype(self):
z=min(len(urlinfos)-1, self.count-1)
return urlinfos[z]
hturlre= re.compile(r"(http)(s?://[^ ]+)( |$)")
#matches \bre\:?\s+ before a regexp; (?i)==case insensitive match
shibboleth = re.compile(r"(?i)\bre\:?\s+((?:(?:http)|(?:nsfw))s?://[^ ]+)( |$)")
+#How long (in s) to wait since the most recent mention before commenting
+url_repeat_time = 300
urlinfos = ["a new",
"a fascinating",
"an interesting",
url=canonical_url(urlstring)
if (url in urldb):
T = urldb[url]
- complaint="That's %s URL that was first mentioned %s by %s" % \
- (T.urltype(),T.firstmen(),T.nick)
+ comment="I saw that URL in scrool, first mentioned by %s at %s" % \
+ (T.nick,T.firstmen())
if (public):
- complaint=complaint+". Furthermore it defeats the point of this command to use it other than via /msg."
+ comment=comment+". Furthermore it defeats the point of this command to use it other than via /msg."
T.count+=1
- bot.automsg(False,nick,complaint)
+ bot.automsg(False,nick,comment)
T.lastasked=time.time()
#URL suppressed, so mention in #urls
if urlstring != cmd.split()[1]: #first argument to URL was not the url
if urlstring in urldb:
T=urldb[urlstring]
- message="observes %s URL, first mentioned %s by %s" % \
- (T.urltype(),T.firstmen(),T.nick)
- if shibboleth.search(command)==None:
+ message="saw that URL in scrool, first mentioned by %s at %s" % \
+ (T.nick,T.firstmen())
+ if shibboleth.search(command)==None and \
+ time.time() - T.lastseen > url_repeat_time:
conn.action(bot.channel, message)
T.lastseen=time.time()
T.count+=1
unobfuscate_urls=True
expand_included_tweets=True
stringsout=[]
-
- parts = string.split(urlstring,'/')
- tweetID = parts[-1]
+
+ path = urlparse.urlparse(urlstring).path
+ tweetID = path.split('/')[-1]
try:
status = twitapi.GetStatus(tweetID)
if status == {}: