Skip to content

Commit

Permalink
convert _wiki_request query_params fom kwarg to regular argument
Browse files Browse the repository at this point in the history
we may want to overhaul _wiki_request in the future to take meta arguments,
which we can't do if it assumes all kwargs are for the request. also, since
arguments passed to _wiki_request are currently all dict literals anyway, the
kwarg style isn't that helpful anyway
  • Loading branch information
goldsmith committed May 15, 2014
1 parent 90bb26b commit 2c7b0e2
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 23 deletions.
2 changes: 1 addition & 1 deletion tests/geosearch_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class _wiki_request(object):
calls = defaultdict(int)

@classmethod
def __call__(cls, **params):
def __call__(cls, params):
cls.calls[params.__str__()] += 1
return mock_data["_wiki_request calls"][tuple(sorted(params.items()))]

Expand Down
2 changes: 1 addition & 1 deletion tests/page_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


# mock out _wiki_request
def _wiki_request(**params):
def _wiki_request(params):
return mock_data["_wiki_request calls"][tuple(sorted(params.items()))]
wikipedia._wiki_request = _wiki_request

Expand Down
2 changes: 1 addition & 1 deletion tests/search_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class _wiki_request(object):
calls = defaultdict(int)

@classmethod
def __call__(cls, **params):
def __call__(cls, params):
cls.calls[params.__str__()] += 1
return mock_data["_wiki_request calls"][tuple(sorted(params.items()))]

Expand Down
40 changes: 20 additions & 20 deletions wikipedia/wikipedia.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,14 @@ def search(query, results=10, suggestion=False):
search_params = {
'list': 'search',
'srprop': '',
'srlimit': results
'srlimit': results,
'limit': results,
'srsearch': query
}
if suggestion:
search_params['srinfo'] = 'suggestion'
search_params['srsearch'] = query
search_params['limit'] = results

raw_results = _wiki_request(**search_params)
raw_results = _wiki_request(search_params)

if 'error' in raw_results:
if raw_results['error']['info'] in ('HTTP request timed out.', 'Pool queue is full'):
Expand Down Expand Up @@ -142,7 +142,7 @@ def geosearch(latitude, longitude, title=None, results=10, radius=1000):
if title:
search_params['titles'] = title

raw_results = _wiki_request(**search_params)
raw_results = _wiki_request(search_params)

if 'error' in raw_results:
if raw_results['error']['info'] in ('HTTP request timed out.', 'Pool queue is full'):
Expand Down Expand Up @@ -173,7 +173,7 @@ def suggest(query):
}
search_params['srsearch'] = query

raw_result = _wiki_request(**search_params)
raw_result = _wiki_request(search_params)

if raw_result['query'].get('searchinfo'):
return raw_result['query']['searchinfo']['suggestion']
Expand All @@ -198,7 +198,7 @@ def random(pages=1):
'rnlimit': pages,
}

request = _wiki_request(**query_params)
request = _wiki_request(query_params)
titles = [page['title'] for page in request['query']['random']]

if len(titles) == 1:
Expand Down Expand Up @@ -241,7 +241,7 @@ def summary(title, sentences=0, chars=0, auto_suggest=True, redirect=True):
else:
query_params['exintro'] = ''

request = _wiki_request(**query_params)
request = _wiki_request(query_params)
summary = request['query']['pages'][pageid]['extract']

return summary
Expand Down Expand Up @@ -328,7 +328,7 @@ def load(self, redirect=True, preload=False):
else:
query_params['pageids'] = self.pageid

request = _wiki_request(**query_params)
request = _wiki_request(query_params)

pages = request['query']['pages']
pageid = list(pages.keys())[0]
Expand All @@ -351,7 +351,7 @@ def load(self, redirect=True, preload=False):
'titles': self.title
}

request = _wiki_request(**query_params)
request = _wiki_request(query_params)

extract = request['query']['pages'][pageid]['extract']

Expand All @@ -378,7 +378,7 @@ def load(self, redirect=True, preload=False):
query_params['pageids'] = self.pageid
else:
query_params['titles'] = self.title
request = _wiki_request(**query_params)
request = _wiki_request(query_params)
html = request['query']['pages'][pageid]['revisions'][0]['*']

lis = BeautifulSoup(html).find_all('li')
Expand Down Expand Up @@ -408,7 +408,7 @@ def html(self):
'titles': self.title
}

request = _wiki_request(**query_params)
request = _wiki_request(query_params)
self._html = request['query']['pages'][self.pageid]['revisions'][0]['*']

return self._html
Expand All @@ -429,7 +429,7 @@ def content(self):
query_params['titles'] = self.title
else:
query_params['pageids'] = self.pageid
request = _wiki_request(**query_params)
request = _wiki_request(query_params)
self._content = request['query']['pages'][self.pageid]['extract']
self._revision_id = request['query']['pages'][self.pageid]['revisions'][0]['revid']
self._parent_id = request['query']['pages'][self.pageid]['revisions'][0]['parentid']
Expand Down Expand Up @@ -484,7 +484,7 @@ def summary(self):
else:
query_params['pageids'] = self.pageid

request = _wiki_request(**query_params)
request = _wiki_request(query_params)
self._summary = request['query']['pages'][self.pageid]['extract']

return self._summary
Expand All @@ -507,7 +507,7 @@ def images(self):
else:
query_params['pageids'] = self.pageid

request = _wiki_request(**query_params)
request = _wiki_request(query_params)

image_keys = request['query']['pages'].keys()
images = (request['query']['pages'][key] for key in image_keys)
Expand All @@ -527,7 +527,7 @@ def coordinates(self):
'titles': self.title,
}

request = _wiki_request(**query_params)
request = _wiki_request(query_params)

coordinates = request['query']['pages'][self.pageid]['coordinates']

Expand All @@ -552,7 +552,7 @@ def references(self):
else:
query_params['pageids'] = self.pageid

request = _wiki_request(**query_params)
request = _wiki_request(query_params)

links = request['query']['pages'][self.pageid]['extlinks']
relative_urls = (link['*'] for link in links)
Expand Down Expand Up @@ -591,7 +591,7 @@ def links(self):
params = request.copy()
params.update(lastContinue)

request = _wiki_request(**params)
request = _wiki_request(params)
self._links.extend([link['title'] for link in request['query']['pages'][self.pageid]['links']])

if 'continue' not in request:
Expand All @@ -617,7 +617,7 @@ def sections(self):
else:
query_params['pageid'] = self.pageid

request = _wiki_request(**query_params)
request = _wiki_request(query_params)
self._sections = [section['line'] for section in request['parse']['sections']]

return self._sections
Expand Down Expand Up @@ -657,7 +657,7 @@ def donate():
webbrowser.open('https://donate.wikimedia.org/w/index.php?title=Special:FundraiserLandingPage', new=2)


def _wiki_request(**params):
def _wiki_request(params):
'''
Make a request to the Wikipedia API using the given search parameters.
Returns a parsed dict of the JSON response.
Expand Down

0 comments on commit 2c7b0e2

Please sign in to comment.