From ccd0343d131799985f18cf5ac6c3c963bed230aa Mon Sep 17 00:00:00 2001 From: "Sean B. Palmer" Date: Tue, 17 May 2011 00:04:14 +0100 Subject: Added multilingual wikipedia search capability. --- modules/wikipedia.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/modules/wikipedia.py b/modules/wikipedia.py index 30a23f3..4a4a11b 100755 --- a/modules/wikipedia.py +++ b/modules/wikipedia.py @@ -10,9 +10,9 @@ http://inamidst.com/phenny/ import re, urllib import web -wikiuri = 'http://en.wikipedia.org/wiki/%s' -wikisearch = 'http://en.wikipedia.org/wiki/Special:Search?' \ - + 'search=%s&fulltext=Search' +wikiuri = 'http://%s.wikipedia.org/wiki/%s' +# wikisearch = 'http://%s.wikipedia.org/wiki/Special:Search?' \ +# + 'search=%s&fulltext=Search' r_tr = re.compile(r'(?ims)]*>.*?') r_paragraph = re.compile(r'(?ims)]*>.*?

|]*>.*?') @@ -59,30 +59,30 @@ def search(term): return uri[len('http://en.wikipedia.org/wiki/'):] else: return term -def wikipedia(term, last=False): +def wikipedia(term, language='en', last=False): global wikiuri if not '%' in term: if isinstance(term, unicode): t = term.encode('utf-8') else: t = term q = urllib.quote(t) - u = wikiuri % q + u = wikiuri % (language, q) bytes = web.get(u) - else: bytes = web.get(wikiuri % term) + else: bytes = web.get(wikiuri % (language, term)) bytes = r_tr.sub('', bytes) if not last: r = r_redirect.search(bytes[:4096]) if r: term = urllib.unquote(r.group(1)) - return wikipedia(term, last=True) + return wikipedia(term, language=language, last=True) paragraphs = r_paragraph.findall(bytes) if not paragraphs: if not last: term = search(term) - return wikipedia(term, last=True) + return wikipedia(term, language=language, last=True) return None # Pre-process @@ -115,7 +115,7 @@ def wikipedia(term, last=False): if not m: if not last: term = search(term) - return wikipedia(term, last=True) + return wikipedia(term, language=language, last=True) return None sentence = m.group(0) @@ -130,14 +130,14 @@ def wikipedia(term, last=False): or ('or add a request for it' in sentence)): if not last: term = search(term) - return wikipedia(term, last=True) + return wikipedia(term, language=language, last=True) return None sentence = '"' + sentence.replace('"', "'") + '"' sentence = sentence.decode('utf-8').encode('utf-8') wikiuri = wikiuri.decode('utf-8').encode('utf-8') term = term.decode('utf-8').encode('utf-8') - return sentence + ' - ' + (wikiuri % term) + return sentence + ' - ' + (wikiuri % (language, term)) def wik(phenny, input): origterm = input.groups()[1] @@ -146,12 +146,19 @@ def wik(phenny, input): origterm = origterm.encode('utf-8') term = urllib.unquote(origterm) + language = 'en' + if term.startswith(':') and (' ' in term): + a, b = term.split(' ', 1) + a = a.lstrip(':') + if a.isalpha(): + language, term = a, b term = term[0].upper() + term[1:] term = term.replace(' ', '_') - try: result = wikipedia(term) + try: result = wikipedia(term, language) except IOError: - error = "Can't connect to en.wikipedia.org (%s)" % (wikiuri % term) + args = (language, wikiuri % (language, term)) + error = "Can't connect to %s.wikipedia.org (%s)" % args return phenny.say(error) if result is not None: -- cgit v1.2.3-1-g7c22