searxng/searx/engines/github.py

60 lines
1.3 KiB
Python
Raw Normal View History

2014-09-02 15:37:47 +00:00
## Github (It)
#
2014-09-02 15:37:47 +00:00
# @website https://github.com/
# @provide-api yes (https://developer.github.com/v3/)
#
2014-09-02 15:37:47 +00:00
# @using-api yes
# @results JSON
# @stable yes (using api)
# @parse url, title, content
2013-10-20 19:53:49 +00:00
from urllib import urlencode
from json import loads
2013-10-22 21:17:22 +00:00
from cgi import escape
2013-10-20 19:53:49 +00:00
2014-09-02 15:37:47 +00:00
# engine dependent config
2013-10-20 19:53:49 +00:00
categories = ['it']
2014-09-02 15:37:47 +00:00
# search-url
2014-01-20 01:31:20 +00:00
search_url = 'https://api.github.com/search/repositories?sort=stars&order=desc&{query}' # noqa
accept_header = 'application/vnd.github.preview.text-match+json'
2013-10-20 19:53:49 +00:00
2014-09-02 15:37:47 +00:00
# do search-request
2013-10-20 19:53:49 +00:00
def request(query, params):
2013-10-23 21:55:37 +00:00
params['url'] = search_url.format(query=urlencode({'q': query}))
2014-09-02 15:37:47 +00:00
2014-01-20 01:31:20 +00:00
params['headers']['Accept'] = accept_header
2014-09-02 15:37:47 +00:00
2013-10-20 19:53:49 +00:00
return params
2014-09-02 15:37:47 +00:00
# get response from search-request
2013-10-20 19:53:49 +00:00
def response(resp):
results = []
2014-09-02 15:37:47 +00:00
2013-10-20 19:53:49 +00:00
search_res = loads(resp.text)
2014-09-02 15:37:47 +00:00
# check if items are recieved
2013-10-20 19:53:49 +00:00
if not 'items' in search_res:
2014-09-02 15:37:47 +00:00
return []
# parse results
2013-10-20 19:53:49 +00:00
for res in search_res['items']:
title = res['name']
url = res['html_url']
2014-09-02 15:37:47 +00:00
if res['description']:
content = escape(res['description'][:500])
else:
content = ''
2014-09-02 15:37:47 +00:00
# append result
results.append({'url': url,
'title': title,
'content': content})
# return results
2013-10-20 19:53:49 +00:00
return results