chiark / gitweb /
routesearch: fix up web performance parameterisation
[ypp-sc-tools.db-test.git] / yarrg / yppedia-ocean-scraper
index 401de9bd6925fe60b904aa5f09b2791f99fc76a7..68efa38f6781980c63308a55b55b4334195873d5 100755 (executable)
@@ -37,11 +37,42 @@ signal.signal(signal.SIGINT, signal.SIG_DFL)
 import sys
 import os
 import urllib
-import urllib2
 import re as regexp
+import subprocess
 from optparse import OptionParser
 from BeautifulSoup import BeautifulSoup
 
+
+# For fuck's sake!
+import codecs
+import locale
+def fix_stdout():
+    sys.stdout = codecs.EncodedFile(sys.stdout, locale.getpreferredencoding())
+    def null_decode(input, errors='strict'):
+        return input, len(input)
+    sys.stdout.decode = null_decode
+# From
+#  http://ewx.livejournal.com/457086.html?thread=3016574
+#  http://ewx.livejournal.com/457086.html?thread=3016574
+# lightly modified.
+# See also Debian #415968.
+fix_stdout()
+
+
+# User agent:
+class YarrgURLopener(urllib.FancyURLopener):
+       base_version= urllib.URLopener().version
+       proc= subprocess.Popen(
+               ["./database-info-fetch", "useragentstringmap",
+                base_version, "manual islands/topology fetch"],
+               shell=False,
+               stderr=None,
+               stdout=subprocess.PIPE,
+               )
+       version = proc.communicate()[0].rstrip('\n');
+       assert(proc.returncode is not None and proc.returncode == 0)
+urllib._urlopener = YarrgURLopener()
+
 ocean = None
 soup = None
 opts = None
@@ -53,15 +84,19 @@ def debug(k,v):
 
 def fetch():
        global soup
-       url = ('http://yppedia.puzzlepirates.com/%s_Ocean' %
-               urllib.quote(ocean,''))
+       if opts.chart:
+               url_base = 'index.php?title=Template:Map:%s_Ocean&action=edit'
+       else:
+               url_base = '%s_Ocean'
+       url = ('http://yppedia.puzzlepirates.com/' +
+                       (url_base % urllib.quote(ocean,'')))
        debug('fetching',url)
-       dataf = urllib2.urlopen(url)
+       dataf = urllib.urlopen(url)
        debug('fetched',dataf)
        soup = BeautifulSoup(dataf)
 
 
-title_arch_re = regexp.compile('(\\S+) Archipelago \\((\\S+)\\)$')
+title_arch_re = regexp.compile('(\\S.*\\S) Archipelago \\((\\S+)\\)$')
 title_any_re = regexp.compile('(\\S.*\\S) \((\\S+)\\)$')
 href_img_re = regexp.compile('\\.png$')
 
@@ -78,9 +113,20 @@ def title_arch_ok(t):
        if o is None: return False
        return o == ocean
 
-def parse():
-       firstarch = soup.find('a', attrs = {'title': title_arch_ok})
-       debug('fa',firstarch)
+def parse_chart():
+       ta = soup.find('textarea')
+       debug('ta',ta)
+       s = ta.string
+       debug('s',s)
+       s = regexp.sub(r'\&lt\;', '<', s)
+       s = regexp.sub(r'\&gt\;', '>', s)
+       s = regexp.sub(r'\&quot\;', '"', s)
+       s = regexp.sub(r'\&amp\;', '&', s)
+       debug('s',s)
+       return s
+
+def parse_ocean():
+       content = soup.find('div', attrs = {'id': 'content'})
 
        def findall_title_arch_ok(t):
                return t.findAll('a', attrs = {'title': title_arch_ok})
@@ -89,7 +135,7 @@ def parse():
                if u.name != 'table': return False
                return len(findall_title_arch_ok(u)) > 1
 
-       archestable = firstarch.findParent('table', attrs={'border':'1'})
+       archestable = content.findChild('table', attrs={'border':'1'})
        debug('at',archestable)
 
        archsoups = []
@@ -107,8 +153,8 @@ def parse():
                debug('links',links)
                if not links: continue
                (a,o) = title_arch_info(links[0]['title'])
+               debug('arch-ocean', (a,o))
                assert(o == ocean)
-               debug('arch', a)
                assert(a not in arches)
                isles = []
                for link in links[1:]:
@@ -136,8 +182,11 @@ def main():
        global opts
 
        pa = OptionParser(
-               '''usage: .../yppedia-ocean-scraper [--debug] OCEAN''')
+'''usage: .../yppedia-ocean-scraper [--debug] [--chart] OCEAN''')
        ao = pa.add_option
+
+       ao('--chart', action='store_true', dest='chart',
+               help='print chart source rather than arch/island info')
        ao('--debug', action='count', dest='debug', default=0,
                help='enable debugging output')
 
@@ -148,7 +197,10 @@ def main():
        ocean = args[0]
 
        fetch()
-       parse()
-       output()
+       if opts.chart:
+               print parse_chart()
+       else:
+               parse_ocean()
+               output()
 
 main()