# helper program for getting information from yppedia
-# This is part of ypp-sc-tools, a set of third-party tools for assisting
-# players of Yohoho Puzzle Pirates.
+# This is part of the YARRG website. YARRG is a tool and website
+# for assisting players of Yohoho Puzzle Pirates.
#
# Copyright (C) 2009 Ian Jackson <ijackson@chiark.greenend.org.uk>
#
# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
+# GNU Affero General Public License for more details.
#
-# You should have received a copy of the GNU General Public License
+# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Yohoho and Puzzle Pirates are probably trademarks of Three Rings and
import sys
import os
import urllib
-import urllib2
import re as regexp
+import subprocess
from optparse import OptionParser
from BeautifulSoup import BeautifulSoup
+
+# For fuck's sake!
+import codecs
+import locale
+def fix_stdout():
+ sys.stdout = codecs.EncodedFile(sys.stdout, locale.getpreferredencoding())
+ def null_decode(input, errors='strict'):
+ return input, len(input)
+ sys.stdout.decode = null_decode
+# From
+# http://ewx.livejournal.com/457086.html?thread=3016574
+# http://ewx.livejournal.com/457086.html?thread=3016574
+# lightly modified.
+# See also Debian #415968.
+fix_stdout()
+
+
+# User agent:
+class YarrgURLopener(urllib.FancyURLopener):
+ base_version= urllib.URLopener().version
+ proc= subprocess.Popen(
+ ["./database-info-fetch", "useragentstringmap",
+ base_version, "manual islands/topology fetch"],
+ shell=False,
+ stderr=None,
+ stdout=subprocess.PIPE,
+ )
+ version = proc.communicate()[0].rstrip('\n');
+ assert(proc.returncode is not None and proc.returncode == 0)
+urllib._urlopener = YarrgURLopener()
+
ocean = None
soup = None
opts = None
def fetch():
global soup
- url = ('http://yppedia.puzzlepirates.com/%s_Ocean' %
- urllib.quote(ocean,''))
+ if opts.chart:
+ url_base = 'index.php?title=Template:Map:%s_Ocean&action=edit'
+ else:
+ url_base = '%s_Ocean'
+ url = ('http://yppedia.puzzlepirates.com/' +
+ (url_base % urllib.quote(ocean,'')))
debug('fetching',url)
- dataf = urllib2.urlopen(url)
+ dataf = urllib.urlopen(url)
debug('fetched',dataf)
soup = BeautifulSoup(dataf)
-title_arch_re = regexp.compile('(\\S+) Archipelago \\((\\S+)\\)$')
+title_arch_re = regexp.compile('(\\S.*\\S) Archipelago \\((\\S+)\\)$')
title_any_re = regexp.compile('(\\S.*\\S) \((\\S+)\\)$')
href_img_re = regexp.compile('\\.png$')
if o is None: return False
return o == ocean
-def parse():
- firstarch = soup.find('a', attrs = {'title': title_arch_ok})
- debug('fa',firstarch)
+def parse_chart():
+ ta = soup.find('textarea')
+ debug('ta',ta)
+ s = ta.string
+ debug('s',s)
+ s = regexp.sub(r'\<\;', '<', s)
+ s = regexp.sub(r'\>\;', '>', s)
+ s = regexp.sub(r'\"\;', '"', s)
+ s = regexp.sub(r'\&\;', '&', s)
+ debug('s',s)
+ return s
+
+def parse_ocean():
+ content = soup.find('div', attrs = {'id': 'content'})
def findall_title_arch_ok(t):
return t.findAll('a', attrs = {'title': title_arch_ok})
if u.name != 'table': return False
return len(findall_title_arch_ok(u)) > 1
- archestable = firstarch.findParent('table', attrs={'border':'1'})
+ archestable = content.findChild('table', attrs={'border':'1'})
debug('at',archestable)
archsoups = []
debug('links',links)
if not links: continue
(a,o) = title_arch_info(links[0]['title'])
+ debug('arch-ocean', (a,o))
assert(o == ocean)
- debug('arch', a)
assert(a not in arches)
isles = []
for link in links[1:]:
global opts
pa = OptionParser(
- '''usage: .../yppedia-ocean-scraper [--debug] OCEAN''')
+'''usage: .../yppedia-ocean-scraper [--debug] [--chart] OCEAN''')
ao = pa.add_option
+
+ ao('--chart', action='store_true', dest='chart',
+ help='print chart source rather than arch/island info')
ao('--debug', action='count', dest='debug', default=0,
help='enable debugging output')
ocean = args[0]
fetch()
- parse()
- output()
+ if opts.chart:
+ print parse_chart()
+ else:
+ parse_ocean()
+ output()
main()