3 # helper program for getting information from yppedia
5 # This is part of the YARRG website. YARRG is a tool and website
6 # for assisting players of Yohoho Puzzle Pirates.
8 # Copyright (C) 2009 Ian Jackson <ijackson@chiark.greenend.org.uk>
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU Affero General Public License as
12 # published by the Free Software Foundation, either version 3 of the
13 # License, or (at your option) any later version.
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU Affero General Public License for more details.
20 # You should have received a copy of the GNU Affero General Public License
21 # along with this program. If not, see <http://www.gnu.org/licenses/>.
23 # Yohoho and Puzzle Pirates are probably trademarks of Three Rings and
24 # are used without permission. This program is not endorsed or
25 # sponsored by Three Rings.
28 yppedia-ocean-scraper is part of ypp-sc-tools Copyright (C) 2009 Ian Jackson
29 This program comes with ABSOLUTELY NO WARRANTY; this is free software,
30 and you are welcome to redistribute it under certain conditions. For
31 details, read the top of the yppedia-ocean-scraper file.
35 signal.signal(signal.SIGINT, signal.SIG_DFL)
42 from optparse import OptionParser
43 from BeautifulSoup import BeautifulSoup
50 sys.stdout = codecs.EncodedFile(sys.stdout, locale.getpreferredencoding())
51 def null_decode(input, errors='strict'):
52 return input, len(input)
53 sys.stdout.decode = null_decode
55 # http://ewx.livejournal.com/457086.html?thread=3016574
56 # http://ewx.livejournal.com/457086.html?thread=3016574
58 # See also Debian #415968.
63 class YarrgURLopener(urllib.FancyURLopener):
64 base_version= urllib.URLopener().version
65 proc= subprocess.Popen(
66 ["./database-info-fetch", "useragentstringmap",
67 base_version, "manual islands/topology fetch"],
70 stdout=subprocess.PIPE,
72 version = proc.communicate()[0].rstrip('\n');
73 assert(proc.returncode is not None and proc.returncode == 0)
74 urllib._urlopener = YarrgURLopener()
83 print >>sys.stderr, k,`v`
88 url_base = 'index.php?title=Template:Map:%s_Ocean&action=edit'
91 url_base = url_base % urllib.quote(ocean,'')
92 if opts.localhtml is None:
93 url = ('http://yppedia.puzzlepirates.com/' + url_base)
95 dataf = urllib.urlopen(url)
96 debug('fetched',dataf)
98 dataf = file(opts.localhtml + '/' + url_base, 'r')
99 soup = BeautifulSoup(dataf)
101 title_arch_re = regexp.compile('(\\S.*\\S) Archipelago \\((\\S+)\\)$')
102 title_any_re = regexp.compile('(\\S.*\\S) \((\\S+)\\)$')
103 href_img_re = regexp.compile('\\.png$')
105 def title_arch_info(t):
106 # returns (arch,ocean)
108 if t is None: return (None,None)
109 m = title_arch_re.match(t)
110 if not m: return (None,None)
113 def title_arch_ok(t):
114 (a,o) = title_arch_info(t)
115 if o is None: return False
119 ta = soup.find('textarea')
123 s = regexp.sub(r'\<\;', '<', s)
124 s = regexp.sub(r'\>\;', '>', s)
125 s = regexp.sub(r'\"\;', '"', s)
126 s = regexp.sub(r'\&\;', '&', s)
131 content = soup.find('div', attrs = {'id': 'content'})
133 def findall_title_arch_ok(t):
134 return t.findAll('a', attrs = {'title': title_arch_ok})
136 def is_archestable(u):
137 if u.name != 'table': return False
138 return len(findall_title_arch_ok(u)) > 1
140 archestable = content.findChild('table', attrs={'border':'1'})
141 debug('at',archestable)
144 for row in archestable.findAll('tr',recursive=False):
145 archsoups += row.findAll('td',recursive=False)
146 debug('ac',archsoups)
149 return len(v.findAll(text = regexp.compile('.*Large'))) > 0
151 return u.findParent(is_island)
153 for arch in archsoups:
154 links = arch.findAll('a', href=True)
156 if not links: continue
157 (a,o) = title_arch_info(links[0]['title'])
158 debug('arch-ocean', (a,o))
160 assert(a not in arches)
162 for link in links[1:]:
164 if href_img_re.search(link['href']): continue
165 m = title_any_re.match(link['title'])
166 assert(m.group(2) == ocean)
168 debug('island', island)
179 for island in arches[a]:
187 '''usage: .../yppedia-ocean-scraper [--debug] [--chart] OCEAN''')
190 ao('--chart', action='store_true', dest='chart',
191 help='print chart source rather than arch/island info')
192 ao('--debug', action='count', dest='debug', default=0,
193 help='enable debugging output')
194 ao('--local-html-dir', action='store', dest='localhtml',
195 help='get yppedia pages from local directory LOCALHTML'+
196 ' instead of via HTTP')
198 (opts,args) = pa.parse_args()
200 print >>sys.stderr, copyright_info
201 pa.error('need an ocean argument')