chiark / gitweb /
add .gitignore
[ypp-sc-tools.db-live.git] / yoweb-scrape
1 #!/usr/bin/python
2
3 import signal
4 signal.signal(signal.SIGINT, signal.SIG_DFL)
5
6 import os
7 import time
8 import urllib
9 import urllib2
10 import errno
11 import sys
12 import re as regexp
13 from optparse import OptionParser
14
15 from BeautifulSoup import BeautifulSoup
16
17 opts = None
18
19 duties = ('Swordfighting/Bilging/Sailing/Rigging/Navigating'+
20         '/Battle Navigation/Gunning/Carpentry/Rumble/Treasure Haul'+
21         '/Drinking/Spades/Hearts/Treasure Drop/Poker/Distilling'+
22         '/Alchemistry/Shipwrightery/Blacksmithing/Foraging').split('/')
23
24 standingvals = ('Able/Distinguished/Respected/Master/Renowned'+
25                 '/Grand-Master/Legendary/Ultimate').split('/')
26
27 pirate_ref_re = regexp.compile('^/yoweb/pirate\\.wm')
28
29 def debug(m):
30         if opts.debug:
31                 print >>sys.stderr, m
32
33 class Fetcher:
34         def __init__(self, ocean, cachedir):
35                 debug('Fetcher init %s' % cachedir)
36                 self.ocean = ocean
37                 self.cachedir = cachedir
38                 try: os.mkdir(cachedir)
39                 except (OSError,IOError), oe:
40                         if oe.errno != errno.EEXIST: raise
41                 self._cache_scan(time.time())
42
43         def _cache_scan(self, now):
44                 # returns list of ages, unsorted
45                 ages = []
46                 debug('Fetcher   scan_cache')
47                 for leaf in os.listdir(self.cachedir):
48                         if not leaf.startswith('#'): continue
49                         path = self.cachedir + '/' + leaf
50                         try: s = os.stat(path)
51                         except (OSError,IOError), oe:
52                                 if oe.errno != errno.ENOENT: raise
53                                 continue
54                         age = now - s.st_mtime
55                         if age > opts.max_age:
56                                 debug('Fetcher    expire %d %s' % (age, path))
57                                 try: os.remove(path)
58                                 except (OSError,IOError), oe:
59                                         if oe.errno != errno.ENOENT: raise
60                                 continue
61                         ages.append(age)
62                 return ages
63
64         def _rate_limit_cache_clean(self, now):
65                 ages = self._cache_scan(now)
66                 ages.sort()
67                 debug('Fetcher   ages ' + `ages`)
68                 min_age = 1
69                 need_wait = 0
70                 for age in ages:
71                         if age < min_age:
72                                 debug('Fetcher   morewait min=%d age=%d' %
73                                         (min_age, age))
74                                 need_wait = max(need_wait, min_age - age)
75                         min_age *= 2
76                         min_age += 1
77                 if need_wait > 0:
78                         debug('Fetcher   wait %d' % need_wait)
79                         time.sleep(need_wait)
80
81         def fetch(self, url):
82                 debug('Fetcher fetch %s' % url)
83                 cache_corename = urllib.quote_plus(url)
84                 cache_item = "%s/#%s#" % (self.cachedir, cache_corename)
85                 try: f = file(cache_item, 'r')
86                 except (OSError,IOError), oe:
87                         if oe.errno != errno.ENOENT: raise
88                         f = None
89                 now = time.time()
90                 if f is not None:
91                         s = os.fstat(f.fileno())
92                         if now > s.st_mtime + opts.max_age:
93                                 debug('Fetcher  stale')
94                                 f = None
95                 if f is not None:
96                         data = f.read()
97                         f.close()
98                         debug('Fetcher  cached')
99                         return data
100
101                 debug('Fetcher  fetch')
102                 self._rate_limit_cache_clean(now)
103
104                 stream = urllib2.urlopen(url)
105                 data = stream.read()
106                 cache_tmp = "%s/#%s~%d#" % (
107                         self.cachedir, cache_corename, os.getpid())
108                 f = file(cache_tmp, 'w')
109                 f.write(data)
110                 f.close()
111                 os.rename(cache_tmp, cache_item)
112                 debug('Fetcher  stored')
113                 return data
114
115         def yoweb(self, kind, tail):
116                 url = 'http://%s.puzzlepirates.com/yoweb/%s%s' % (
117                         self.ocean, kind, tail)
118                 return self.fetch(url)
119
120 class SoupLog:
121         def __init__(self):
122                 self.msgs = [ ]
123         def msg(self, m):
124                 self.msgs.append(m)
125         def soupm(self, obj, m):
126                 self.msg(m + '; in ' + `obj`)
127         def needs_msgs(self, child_souplog):
128                 self.msgs += child_souplog.msgs
129                 child_souplog.msgs = [ ]
130
131 def soup_text(obj):
132         str = ''.join(obj.findAll(text=True))
133         return str.strip()
134
135 class SomethingSoupInfo(SoupLog):
136         def __init__(self, kind, tail):
137                 SoupLog.__init__(self)
138                 html = fetcher.yoweb(kind, tail)
139                 self.soup = BeautifulSoup(html,
140                         convertEntities=BeautifulSoup.HTML_ENTITIES
141                         )
142
143 class PirateInfo(SomethingSoupInfo):
144         # Public data members:
145         #  pi.standings = { 'Treasure Haul': 'Able' ... }
146         #  pi.crew = (id, name)
147         #  pi.flag = (id, name)
148         #  pi.msgs = [ 'message describing problem with scrape' ]
149                 
150         def __init__(self, pirate):
151                 SomethingSoupInfo.__init__(self,
152                         'pirate.wm?target=', pirate)
153                 self._find_standings()
154                 self.crew = self._find_crewflag('crew',
155                         '^/yoweb/crew/info\\.wm')
156                 self.flag = self._find_crewflag('flag',
157                         '^/yoweb/flag/info\\.wm')
158
159         def _find_standings(self):
160                 imgs = self.soup.findAll('img',
161                         src=regexp.compile('/yoweb/images/stat.*'))
162                 re = regexp.compile(
163 u'\\s*\\S*/([-A-Za-z]+)\\s*$|\\s*\\S*/\\S*\\s*\\(ocean\\-wide(?:\\s|\\xa0)+([-A-Za-z]+)\\)\\s*$'
164                         )
165                 standings = { }
166
167                 for skill in duties:
168                         standings[skill] = [ ]
169
170                 skl = SoupLog()
171
172                 for img in imgs:
173                         try: duty = img['alt']
174                         except KeyError: continue
175
176                         if not duty in duties:
177                                 skl.soupm(img, 'unknown duty: "%s"' % duty)
178                                 continue
179                         key = img.findParent('td')
180                         if key is None:
181                                 skl.soupm(img, 'duty at root! "%s"' % duty)
182                                 continue
183                         valelem = key.findNextSibling('td')
184                         if valelem is None:
185                                 skl.soupm(key, 'duty missing sibling "%s"'
186                                         % duty)
187                                 continue
188                         valstr = soup_text(valelem)
189                         match = re.match(valstr)
190                         if match is None:
191                                 skl.soupm(key, ('duty "%s" unparseable'+
192                                         ' standing "%s"') % (duty, valstr))
193                                 continue
194                         standing = match.group(match.lastindex)
195                         standings[duty].append(standing)
196
197                 self.standings = { }
198
199                 for duty in duties:
200                         sl = standings[duty]
201                         if len(sl) > 1:
202                                 skl.msg('duty "%s" multiple standings %s' %
203                                                 (duty, `sl`))
204                                 continue
205                         if not len(sl):
206                                 skl.msg('duty "%s" no standing found' % duty)
207                                 continue
208                         standing = sl[0]
209                         for i in range(0, len(standingvals)-1):
210                                 if standing == standingvals[i]:
211                                         self.standings[duty] = i
212                         if not duty in self.standings:
213                                 skl.msg('duty "%s" unknown standing "%s"' %
214                                         (duty, standing))
215
216                 all_standings_ok = True
217                 for duty in duties:
218                         if not duty in self.standings:
219                                 self.needs_msgs(skl)
220
221         def _find_crewflag(self, cf, yoweb_re):
222                 things = self.soup.findAll('a', href=regexp.compile(yoweb_re))
223                 if len(things) != 1:
224                         self.msg('zero or several %s id references found' % cf)
225                         return None
226                 thing = things[0]
227                 id_re = '\\b%sid\\=(\\w+)$' % cf
228                 id_haystack = thing['href']
229                 match = regexp.compile(id_re).search(id_haystack)
230                 if match is None:
231                         self.soupm(thing, ('incomprehensible %s id ref'+
232                                 ' (%s in %s)') % (cf, id_re, id_haystack))
233                         return None
234                 name = soup_text(thing)
235                 return (match.group(1), name)
236
237         def __str__(self):
238                 return `(self.crew, self.flag, self.standings, self.msgs)`
239
240 class CrewInfo(SomethingSoupInfo):
241         # Public data members:
242         #  ci.crew = [ ('Captain',        ['Pirate', ...]),
243         #              ('Senior Officer', [...]),
244         #               ... ]
245         #  pi.msgs = [ 'message describing problem with scrape' ]
246
247         def __init__(self, crewid):
248                 SomethingSoupInfo.__init__(self,
249                         'crew/info.wm?crewid=', crewid)
250                 self._find_crew()
251
252         def _find_crew(self):
253                 self.crew = []
254                 capts = self.soup.findAll('img',
255                         src='/yoweb/images/crew-captain.png')
256                 if len(capts) != 1:
257                         self.msg('crew members: no. of captain images != 1')
258                         return
259                 tbl = capts[0]
260                 while not tbl.find('a', href=pirate_ref_re):
261                         tbl = tbl.findParent('table')
262                         if not tbl:
263                                 self.msg('crew members: cannot find table')
264                                 return
265                 current_rank_crew = None
266                 crew_rank_re = regexp.compile('/yoweb/images/crew')
267                 for row in tbl.contents:
268                         # findAll(recurse=False)
269                         if isinstance(row, unicode):
270                                 continue
271
272                         is_rank = row.find('img', attrs={'src': crew_rank_re})
273                         if is_rank:
274                                 rank = soup_text(row)
275                                 current_rank_crew = []
276                                 self.crew.append((rank, current_rank_crew))
277                                 continue
278                         for cell in row.findAll('a', href=pirate_ref_re):
279                                 if current_rank_crew is None:
280                                         self.soupm(cell, 'crew members: crew'
281                                                 ' before rank')
282                                         continue
283                                 current_rank_crew.append(soup_text(cell))
284
285         def __str__(self):
286                 return `(self.crew, self.msgs)`
287
288 def do_pirate(pirates, bu):
289         print '{'
290         for pirate in pirates:
291                 info = PirateInfo(pirate)
292                 print '%s: %s,' % (`pirate`, info)
293         print '}'
294
295 def prep_crew_of(args, bu):
296         if len(args) != 1: bu('crew-of takes one pirate name')
297         pi = PirateInfo(args[0])
298         return CrewInfo(pi.crew[0])
299
300 def do_crew_of(args, bu):
301         ci = prep_crew_of(args, bu)
302         print ci
303
304 #def do_dutytab_crew_of(pirates, badusage):
305 #       if len(pirates) != 1: badusage('dutytab-crew-of takes one pirate name')
306
307 def main():
308         global opts, fetcher
309
310         pa = OptionParser(
311 '''usage: .../yoweb-scrape [OPTION...] ACTION [ARGS...]
312 actions:
313  yoweb-scrape [--ocean OCEAN ...] pirate PIRATE
314  yoweb-scrape [--ocean OCEAN ...] crew-of PIRATE
315  yoweb-scrape [--ocean OCEAN ...] dutytab-crew-of PIRATE
316 ''')
317         ao = pa.add_option
318         ao('-O','--ocean',dest='ocean', metavar='OCEAN',
319                 default='ice',
320                 help='select ocean OCEAN')
321         ao('--cache-dir', dest='cache_dir', metavar='DIR',
322                 default='~/.yoweb-scrape-cache',
323                 help='cache yoweb pages in DIR')
324         ao('-D','--debug', action='store_true', dest='debug', default=False,
325                 help='enable debugging output')
326         ao('-q','--quiet', action='store_true', dest='quiet',
327                 help='suppress warning output')
328         (opts,args) = pa.parse_args()
329
330         if len(args) < 1:
331                 pa.error('need a mode argument')
332
333         mode = args[0]
334         mode_fn_name = 'do_' + mode.replace('_','#').replace('-','_')
335         try: mode_fn = globals()[mode_fn_name]
336         except KeyError: pa.error('unknown mode "%s"' % mode)
337
338         # fixed parameters
339         opts.max_age = 240
340
341         if opts.cache_dir.startswith('~/'):
342                 opts.cache_dir = os.getenv('HOME') + opts.cache_dir[1:]
343
344         fetcher = Fetcher(opts.ocean, opts.cache_dir)
345
346         mode_fn(args[1:], pa.error)
347
348 main()