chiark / gitweb /
duty table
[ypp-sc-tools.db-live.git] / yoweb-scrape
1 #!/usr/bin/python
2
3 import signal
4 signal.signal(signal.SIGINT, signal.SIG_DFL)
5
6 import os
7 import time
8 import urllib
9 import urllib2
10 import errno
11 import sys
12 import re as regexp
13 from optparse import OptionParser
14
15 from BeautifulSoup import BeautifulSoup
16
17 opts = None
18
19
20 puzzles = ('Swordfighting/Bilging/Sailing/Rigging/Navigating'+
21         '/Battle Navigation/Gunning/Carpentry/Rumble/Treasure Haul'+
22         '/Drinking/Spades/Hearts/Treasure Drop/Poker/Distilling'+
23         '/Alchemistry/Shipwrightery/Blacksmithing/Foraging').split('/')
24
25 standingvals = ('Able/Distinguished/Respected/Master'+
26                 '/Renowned/Grand-Master/Legendary/Ultimate').split('/')
27
28 pirate_ref_re = regexp.compile('^/yoweb/pirate\\.wm')
29
30 max_pirate_namelen = 20
31
32
33 def debug(m):
34         if opts.debug:
35                 print >>sys.stderr, m
36
37 class Fetcher:
38         def __init__(self, ocean, cachedir):
39                 debug('Fetcher init %s' % cachedir)
40                 self.ocean = ocean
41                 self.cachedir = cachedir
42                 try: os.mkdir(cachedir)
43                 except (OSError,IOError), oe:
44                         if oe.errno != errno.EEXIST: raise
45                 self._cache_scan(time.time())
46
47         def _cache_scan(self, now):
48                 # returns list of ages, unsorted
49                 ages = []
50                 debug('Fetcher   scan_cache')
51                 for leaf in os.listdir(self.cachedir):
52                         if not leaf.startswith('#'): continue
53                         path = self.cachedir + '/' + leaf
54                         try: s = os.stat(path)
55                         except (OSError,IOError), oe:
56                                 if oe.errno != errno.ENOENT: raise
57                                 continue
58                         age = now - s.st_mtime
59                         if age > opts.expire_age:
60                                 debug('Fetcher    expire %d %s' % (age, path))
61                                 try: os.remove(path)
62                                 except (OSError,IOError), oe:
63                                         if oe.errno != errno.ENOENT: raise
64                                 continue
65                         ages.append(age)
66                 return ages
67
68         def _rate_limit_cache_clean(self, now):
69                 ages = self._cache_scan(now)
70                 ages.sort()
71                 debug('Fetcher   ages ' + `ages`)
72                 min_age = 1
73                 need_wait = 0
74                 for age in ages:
75                         if age < min_age:
76                                 debug('Fetcher   morewait min=%d age=%d' %
77                                         (min_age, age))
78                                 need_wait = max(need_wait, min_age - age)
79                         min_age += 1
80                         min_age *= 1.5
81                 if need_wait > 0:
82                         debug('Fetcher   wait %d' % need_wait)
83                         time.sleep(need_wait)
84
85         def fetch(self, url, max_age):
86                 debug('Fetcher fetch %s' % url)
87                 cache_corename = urllib.quote_plus(url)
88                 cache_item = "%s/#%s#" % (self.cachedir, cache_corename)
89                 try: f = file(cache_item, 'r')
90                 except (OSError,IOError), oe:
91                         if oe.errno != errno.ENOENT: raise
92                         f = None
93                 now = time.time()
94                 max_age = max(opts.min_max_age, max(max_age, opts.expire_age))
95                 if f is not None:
96                         s = os.fstat(f.fileno())
97                         age = now - s.st_mtime
98                         if age > max_age:
99                                 debug('Fetcher  stale')
100                                 f = None
101                 if f is not None:
102                         data = f.read()
103                         f.close()
104                         debug('Fetcher  cached %d > %d' % (max_age, age))
105                         return data
106
107                 debug('Fetcher  fetch')
108                 self._rate_limit_cache_clean(now)
109
110                 stream = urllib2.urlopen(url)
111                 data = stream.read()
112                 cache_tmp = "%s/#%s~%d#" % (
113                         self.cachedir, cache_corename, os.getpid())
114                 f = file(cache_tmp, 'w')
115                 f.write(data)
116                 f.close()
117                 os.rename(cache_tmp, cache_item)
118                 debug('Fetcher  stored')
119                 return data
120
121         def yoweb(self, kind, tail, max_age):
122                 url = 'http://%s.puzzlepirates.com/yoweb/%s%s' % (
123                         self.ocean, kind, tail)
124                 return self.fetch(url, max_age)
125
126 class SoupLog:
127         def __init__(self):
128                 self.msgs = [ ]
129         def msg(self, m):
130                 self.msgs.append(m)
131         def soupm(self, obj, m):
132                 self.msg(m + '; in ' + `obj`)
133         def needs_msgs(self, child_souplog):
134                 self.msgs += child_souplog.msgs
135                 child_souplog.msgs = [ ]
136
137 def soup_text(obj):
138         str = ''.join(obj.findAll(text=True))
139         return str.strip()
140
141 class SomethingSoupInfo(SoupLog):
142         def __init__(self, kind, tail, max_age):
143                 SoupLog.__init__(self)
144                 html = fetcher.yoweb(kind, tail, max_age)
145                 self._soup = BeautifulSoup(html,
146                         convertEntities=BeautifulSoup.HTML_ENTITIES
147                         )
148
149 class PirateInfo(SomethingSoupInfo):
150         # Public data members:
151         #  pi.standings = { 'Treasure Haul': 'Able' ... }
152         #  pi.crew = (id, name)
153         #  pi.flag = (id, name)
154         #  pi.msgs = [ 'message describing problem with scrape' ]
155                 
156         def __init__(self, pirate, max_age=300):
157                 SomethingSoupInfo.__init__(self,
158                         'pirate.wm?target=', pirate, max_age)
159                 self._find_standings()
160                 self.crew = self._find_crewflag('crew',
161                         '^/yoweb/crew/info\\.wm')
162                 self.flag = self._find_crewflag('flag',
163                         '^/yoweb/flag/info\\.wm')
164
165         def _find_standings(self):
166                 imgs = self._soup.findAll('img',
167                         src=regexp.compile('/yoweb/images/stat.*'))
168                 re = regexp.compile(
169 u'\\s*\\S*/([-A-Za-z]+)\\s*$|\\s*\\S*/\\S*\\s*\\(ocean\\-wide(?:\\s|\\xa0)+([-A-Za-z]+)\\)\\s*$'
170                         )
171                 standings = { }
172
173                 for skill in puzzles:
174                         standings[skill] = [ ]
175
176                 skl = SoupLog()
177
178                 for img in imgs:
179                         try: puzzle = img['alt']
180                         except KeyError: continue
181
182                         if not puzzle in puzzles:
183                                 skl.soupm(img, 'unknown puzzle: "%s"' % puzzle)
184                                 continue
185                         key = img.findParent('td')
186                         if key is None:
187                                 skl.soupm(img, 'puzzle at root! "%s"' % puzzle)
188                                 continue
189                         valelem = key.findNextSibling('td')
190                         if valelem is None:
191                                 skl.soupm(key, 'puzzle missing sibling "%s"'
192                                         % puzzle)
193                                 continue
194                         valstr = soup_text(valelem)
195                         match = re.match(valstr)
196                         if match is None:
197                                 skl.soupm(key, ('puzzle "%s" unparseable'+
198                                         ' standing "%s"') % (puzzle, valstr))
199                                 continue
200                         standing = match.group(match.lastindex)
201                         standings[puzzle].append(standing)
202
203                 self.standings = { }
204
205                 for puzzle in puzzles:
206                         sl = standings[puzzle]
207                         if len(sl) > 1:
208                                 skl.msg('puzzle "%s" multiple standings %s' %
209                                                 (puzzle, `sl`))
210                                 continue
211                         if not len(sl):
212                                 skl.msg('puzzle "%s" no standing found' % puzzle)
213                                 continue
214                         standing = sl[0]
215                         for i in range(0, len(standingvals)-1):
216                                 if standing == standingvals[i]:
217                                         self.standings[puzzle] = i
218                         if not puzzle in self.standings:
219                                 skl.msg('puzzle "%s" unknown standing "%s"' %
220                                         (puzzle, standing))
221
222                 all_standings_ok = True
223                 for puzzle in puzzles:
224                         if not puzzle in self.standings:
225                                 self.needs_msgs(skl)
226
227         def _find_crewflag(self, cf, yoweb_re):
228                 things = self._soup.findAll('a', href=regexp.compile(yoweb_re))
229                 if len(things) != 1:
230                         self.msg('zero or several %s id references found' % cf)
231                         return None
232                 thing = things[0]
233                 id_re = '\\b%sid\\=(\\w+)$' % cf
234                 id_haystack = thing['href']
235                 match = regexp.compile(id_re).search(id_haystack)
236                 if match is None:
237                         self.soupm(thing, ('incomprehensible %s id ref'+
238                                 ' (%s in %s)') % (cf, id_re, id_haystack))
239                         return None
240                 name = soup_text(thing)
241                 return (match.group(1), name)
242
243         def __str__(self):
244                 return `(self.crew, self.flag, self.standings, self.msgs)`
245
246 class CrewInfo(SomethingSoupInfo):
247         # Public data members:
248         #  ci.crew = [ ('Captain',        ['Pirate', ...]),
249         #              ('Senior Officer', [...]),
250         #               ... ]
251         #  pi.msgs = [ 'message describing problem with scrape' ]
252
253         def __init__(self, crewid, max_age=300):
254                 SomethingSoupInfo.__init__(self,
255                         'crew/info.wm?crewid=', crewid, max_age)
256                 self._find_crew()
257
258         def _find_crew(self):
259                 self.crew = []
260                 capts = self._soup.findAll('img',
261                         src='/yoweb/images/crew-captain.png')
262                 if len(capts) != 1:
263                         self.msg('crew members: no. of captain images != 1')
264                         return
265                 tbl = capts[0]
266                 while not tbl.find('a', href=pirate_ref_re):
267                         tbl = tbl.findParent('table')
268                         if not tbl:
269                                 self.msg('crew members: cannot find table')
270                                 return
271                 current_rank_crew = None
272                 crew_rank_re = regexp.compile('/yoweb/images/crew')
273                 for row in tbl.contents:
274                         # findAll(recurse=False)
275                         if isinstance(row, unicode):
276                                 continue
277
278                         is_rank = row.find('img', attrs={'src': crew_rank_re})
279                         if is_rank:
280                                 rank = soup_text(row)
281                                 current_rank_crew = []
282                                 self.crew.append((rank, current_rank_crew))
283                                 continue
284                         for cell in row.findAll('a', href=pirate_ref_re):
285                                 if current_rank_crew is None:
286                                         self.soupm(cell, 'crew members: crew'
287                                                 ' before rank')
288                                         continue
289                                 current_rank_crew.append(soup_text(cell))
290
291         def __str__(self):
292                 return `(self.crew, self.msgs)`
293
294 class StandingsTable:
295         def __init__(self, use_puzzles=puzzles):
296                 self._puzzles = use_puzzles
297                 self.s = ''
298
299         def _pline(self, pirate, puzstrs):
300                 self.s += '%-*s' % (max_pirate_namelen, pirate)
301                 for v in puzstrs:
302                         self.s += ' %-*.*s' % (5,5, v)
303                 self.s += '\n'
304
305         def _puzstr(self, pi, puzzle):
306                 if not isinstance(puzzle,list): puzzle = [puzzle]
307                 try: standing = max([pi.standings[p] for p in puzzle])
308                 except KeyError: return '?'
309                 c1 = standingvals[standing][0]
310                 if standing < 3: c1 = c1.lower() # 3 = Master
311                 hashes = '*' * (standing / 2)
312                 equals = '+' * (standing % 2)
313                 return c1 + hashes + equals
314
315         def headings(self):
316                 def puzn_redact(name):
317                         if isinstance(name,list):
318                                 return '/'.join(
319                                         ["%.2s" % puzn_redact(n)
320                                          for n in name])
321                         spc = name.find(' ')
322                         if spc < 0: return name
323                         return name[0:min(4,spc)] + name[spc+1:]
324                 self._pline('', map(puzn_redact, self._puzzles))
325         def literalline(self, line):
326                 self.s += line + '\n'
327         def pirate(self, pirate):
328                 pi = PirateInfo(pirate, 600)
329                 puzstrs = [self._puzstr(pi,puz) for puz in self._puzzles]
330                 self._pline(pirate, puzstrs)
331
332         def results(self):
333                 return self.s
334
335 def do_pirate(pirates, bu):
336         print '{'
337         for pirate in pirates:
338                 info = PirateInfo(pirate)
339                 print '%s: %s,' % (`pirate`, info)
340         print '}'
341
342 def prep_crew_of(args, bu, max_age=300):
343         if len(args) != 1: bu('crew-of takes one pirate name')
344         pi = PirateInfo(args[0])
345         return CrewInfo(pi.crew[0])
346
347 def do_crew_of(args, bu):
348         ci = prep_crew_of(args, bu)
349         print ci
350
351 def do_standings_crew_of(args, bu):
352         ci = prep_crew_of(args, bu, 60)
353         tab = StandingsTable()
354         tab.headings()
355         for (rank, members) in ci.crew:
356                 if not members: continue
357                 tab.literalline('%s:' % rank)
358                 for p in members:
359                         tab.pirate(p)
360         print tab.results()
361
362 def main():
363         global opts, fetcher
364
365         pa = OptionParser(
366 '''usage: .../yoweb-scrape [OPTION...] ACTION [ARGS...]
367 actions:
368  yoweb-scrape [--ocean OCEAN ...] pirate PIRATE
369  yoweb-scrape [--ocean OCEAN ...] crew-of PIRATE
370  yoweb-scrape [--ocean OCEAN ...] standings-crew-of PIRATE
371 ''')
372         ao = pa.add_option
373         ao('-O','--ocean',dest='ocean', metavar='OCEAN',
374                 default='ice',
375                 help='select ocean OCEAN')
376         ao('--cache-dir', dest='cache_dir', metavar='DIR',
377                 default='~/.yoweb-scrape-cache',
378                 help='cache yoweb pages in DIR')
379         ao('-D','--debug', action='store_true', dest='debug', default=False,
380                 help='enable debugging output')
381         ao('-q','--quiet', action='store_true', dest='quiet',
382                 help='suppress warning output')
383         (opts,args) = pa.parse_args()
384
385         if len(args) < 1:
386                 pa.error('need a mode argument')
387
388         mode = args[0]
389         mode_fn_name = 'do_' + mode.replace('_','#').replace('-','_')
390         try: mode_fn = globals()[mode_fn_name]
391         except KeyError: pa.error('unknown mode "%s"' % mode)
392
393         # fixed parameters
394         opts.min_max_age = 60
395         opts.expire_age = 3600
396
397         if opts.cache_dir.startswith('~/'):
398                 opts.cache_dir = os.getenv('HOME') + opts.cache_dir[1:]
399
400         fetcher = Fetcher(opts.ocean, opts.cache_dir)
401
402         mode_fn(args[1:], pa.error)
403
404 main()