X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~yarrgweb/git?p=ypp-sc-tools.db-test.git;a=blobdiff_plain;f=yarrg%2Fyppedia-ocean-scraper;h=d55f7acab83d7d0a03ae21655059e4277e1e9068;hp=30d0c4a8de8d895292879a074a6bf654ecdec4c5;hb=b14effcf3ea7900eb4198f6e50112da751f29add;hpb=efeac1885cd0211754b34e173a5efb74b6cbedb2 diff --git a/yarrg/yppedia-ocean-scraper b/yarrg/yppedia-ocean-scraper index 30d0c4a..d55f7ac 100755 --- a/yarrg/yppedia-ocean-scraper +++ b/yarrg/yppedia-ocean-scraper @@ -2,22 +2,22 @@ # helper program for getting information from yppedia -# This is part of ypp-sc-tools, a set of third-party tools for assisting -# players of Yohoho Puzzle Pirates. +# This is part of the YARRG website. YARRG is a tool and website +# for assisting players of Yohoho Puzzle Pirates. # # Copyright (C) 2009 Ian Jackson # # This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. +# GNU Affero General Public License for more details. # -# You should have received a copy of the GNU General Public License +# You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # # Yohoho and Puzzle Pirates are probably trademarks of Three Rings and @@ -37,11 +37,42 @@ signal.signal(signal.SIGINT, signal.SIG_DFL) import sys import os import urllib -import urllib2 import re as regexp +import subprocess from optparse import OptionParser from BeautifulSoup import BeautifulSoup + +# For fuck's sake! +import codecs +import locale +def fix_stdout(): + sys.stdout = codecs.EncodedFile(sys.stdout, locale.getpreferredencoding()) + def null_decode(input, errors='strict'): + return input, len(input) + sys.stdout.decode = null_decode +# From +# http://ewx.livejournal.com/457086.html?thread=3016574 +# http://ewx.livejournal.com/457086.html?thread=3016574 +# lightly modified. +# See also Debian #415968. +fix_stdout() + + +# User agent: +class YarrgURLopener(urllib.FancyURLopener): + base_version= urllib.URLopener().version + proc= subprocess.Popen( + ["./database-info-fetch", "useragentstringmap", + base_version, "manual islands/topology fetch"], + shell=False, + stderr=None, + stdout=subprocess.PIPE, + ) + version = proc.communicate()[0].rstrip('\n'); + assert(proc.returncode is not None and proc.returncode == 0) +urllib._urlopener = YarrgURLopener() + ocean = None soup = None opts = None @@ -60,7 +91,7 @@ def fetch(): url = ('http://yppedia.puzzlepirates.com/' + (url_base % urllib.quote(ocean,''))) debug('fetching',url) - dataf = urllib2.urlopen(url) + dataf = urllib.urlopen(url) debug('fetched',dataf) soup = BeautifulSoup(dataf) @@ -89,13 +120,13 @@ def parse_chart(): debug('s',s) s = regexp.sub(r'\<\;', '<', s) s = regexp.sub(r'\>\;', '>', s) + s = regexp.sub(r'\"\;', '"', s) s = regexp.sub(r'\&\;', '&', s) debug('s',s) return s def parse_ocean(): - firstarch = soup.find('a', attrs = {'title': title_arch_ok}) - debug('fa',firstarch) + content = soup.find('div', attrs = {'id': 'content'}) def findall_title_arch_ok(t): return t.findAll('a', attrs = {'title': title_arch_ok}) @@ -104,7 +135,7 @@ def parse_ocean(): if u.name != 'table': return False return len(findall_title_arch_ok(u)) > 1 - archestable = firstarch.findParent('table', attrs={'border':'1'}) + archestable = content.findChild('table', attrs={'border':'1'}) debug('at',archestable) archsoups = []