| 
									
										
										
										
											2014-09-13 18:47:28 +02:00
										 |  |  | '''
 | 
					
						
							|  |  |  | searx is free software: you can redistribute it and/or modify | 
					
						
							|  |  |  | it under the terms of the GNU Affero General Public License as published by | 
					
						
							|  |  |  | the Free Software Foundation, either version 3 of the License, or | 
					
						
							|  |  |  | (at your option) any later version. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | searx is distributed in the hope that it will be useful, | 
					
						
							|  |  |  | but WITHOUT ANY WARRANTY; without even the implied warranty of | 
					
						
							|  |  |  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | 
					
						
							|  |  |  | GNU Affero General Public License for more details. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | You should have received a copy of the GNU Affero General Public License | 
					
						
							|  |  |  | along with searx. If not, see < http://www.gnu.org/licenses/ >. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | (C) 2013- by Adam Tauber, <asciimoo@gmail.com> | 
					
						
							|  |  |  | '''
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | from lxml import etree | 
					
						
							|  |  |  | from json import loads | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-22 18:13:50 +01:00
										 |  |  | from requests import RequestException | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-04-10 00:59:25 +02:00
										 |  |  | from searx import settings | 
					
						
							|  |  |  | from searx.poolrequests import get as http_get | 
					
						
							| 
									
										
										
										
											2021-02-22 18:13:50 +01:00
										 |  |  | from searx.exceptions import SearxEngineResponseException | 
					
						
							| 
									
										
										
										
											2018-01-19 03:51:27 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-04-10 00:59:25 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | def get(*args, **kwargs): | 
					
						
							| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  |     if 'timeout' not in kwargs: | 
					
						
							| 
									
										
										
										
											2015-08-02 19:38:27 +02:00
										 |  |  |         kwargs['timeout'] = settings['outgoing']['request_timeout'] | 
					
						
							| 
									
										
										
										
											2021-02-22 18:13:50 +01:00
										 |  |  |     kwargs['raise_for_httperror'] = True | 
					
						
							| 
									
										
										
										
											2015-04-10 00:59:25 +02:00
										 |  |  |     return http_get(*args, **kwargs) | 
					
						
							| 
									
										
										
										
											2015-01-10 16:42:57 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def dbpedia(query, lang): | 
					
						
							| 
									
										
										
										
											2015-05-02 11:43:12 +02:00
										 |  |  |     # dbpedia autocompleter, no HTTPS | 
					
						
							| 
									
										
										
										
											2020-12-04 16:47:43 +01:00
										 |  |  |     autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?' | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-01-18 12:47:31 +01:00
										 |  |  |     response = get(autocomplete_url + urlencode(dict(QueryString=query))) | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     results = [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if response.ok: | 
					
						
							|  |  |  |         dom = etree.fromstring(response.content) | 
					
						
							| 
									
										
										
										
											2020-12-04 16:47:43 +01:00
										 |  |  |         results = dom.xpath('//Result/Label//text()') | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return results | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def duckduckgo(query, lang): | 
					
						
							| 
									
										
										
										
											2014-09-13 18:47:28 +02:00
										 |  |  |     # duckduckgo autocompleter | 
					
						
							| 
									
										
										
										
											2014-09-07 23:56:06 +02:00
										 |  |  |     url = 'https://ac.duckduckgo.com/ac/?{0}&type=list' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     resp = loads(get(url.format(urlencode(dict(q=query)))).text) | 
					
						
							|  |  |  |     if len(resp) > 1: | 
					
						
							|  |  |  |         return resp[1] | 
					
						
							|  |  |  |     return [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def google(query, lang): | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  |     # google autocompleter | 
					
						
							| 
									
										
										
										
											2015-06-01 20:45:18 +02:00
										 |  |  |     autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&' | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  |     response = get(autocomplete_url + urlencode(dict(hl=lang, q=query))) | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     results = [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if response.ok: | 
					
						
							| 
									
										
										
										
											2014-03-29 17:04:33 +01:00
										 |  |  |         dom = etree.fromstring(response.text) | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  |         results = dom.xpath('//suggestion/@data') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return results | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def startpage(query, lang): | 
					
						
							|  |  |  |     # startpage autocompleter | 
					
						
							| 
									
										
										
										
											2015-06-01 20:45:18 +02:00
										 |  |  |     url = 'https://startpage.com/do/suggest?{query}' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     resp = get(url.format(query=urlencode({'query': query}))).text.split('\n') | 
					
						
							|  |  |  |     if len(resp) > 1: | 
					
						
							|  |  |  |         return resp | 
					
						
							|  |  |  |     return [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-14 19:19:24 +01:00
										 |  |  | def swisscows(query, lang): | 
					
						
							|  |  |  |     # swisscows autocompleter | 
					
						
							|  |  |  |     url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     resp = loads(get(url.format(query=urlencode({'query': query}))).text) | 
					
						
							|  |  |  |     return resp | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def qwant(query, lang): | 
					
						
							| 
									
										
										
										
											2016-03-02 12:54:06 +01:00
										 |  |  |     # qwant autocompleter (additional parameter : lang=en_en&count=xxx ) | 
					
						
							|  |  |  |     url = 'https://api.qwant.com/api/suggest?{query}' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  |     resp = get(url.format(query=urlencode({'q': query, 'lang': lang}))) | 
					
						
							| 
									
										
										
										
											2016-03-02 12:54:06 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     results = [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if resp.ok: | 
					
						
							|  |  |  |         data = loads(resp.text) | 
					
						
							|  |  |  |         if data['status'] == 'success': | 
					
						
							|  |  |  |             for item in data['data']['items']: | 
					
						
							|  |  |  |                 results.append(item['value']) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return results | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  | def wikipedia(query, lang): | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  |     # wikipedia autocompleter | 
					
						
							| 
									
										
										
										
											2016-03-30 02:53:31 +02:00
										 |  |  |     url = 'https://' + lang + '.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json' | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-11-04 19:53:42 +01:00
										 |  |  |     resp = loads(get(url.format(urlencode(dict(search=query)))).text) | 
					
						
							| 
									
										
										
										
											2014-03-29 17:15:59 +01:00
										 |  |  |     if len(resp) > 1: | 
					
						
							|  |  |  |         return resp[1] | 
					
						
							|  |  |  |     return [] | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | backends = {'dbpedia': dbpedia, | 
					
						
							| 
									
										
										
										
											2014-09-07 23:56:06 +02:00
										 |  |  |             'duckduckgo': duckduckgo, | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  |             'google': google, | 
					
						
							| 
									
										
										
										
											2015-06-01 20:45:18 +02:00
										 |  |  |             'startpage': startpage, | 
					
						
							| 
									
										
										
										
											2020-02-14 19:19:24 +01:00
										 |  |  |             'swisscows': swisscows, | 
					
						
							| 
									
										
										
										
											2016-03-02 12:54:06 +01:00
										 |  |  |             'qwant': qwant, | 
					
						
							| 
									
										
										
										
											2014-03-29 16:30:49 +01:00
										 |  |  |             'wikipedia': wikipedia | 
					
						
							|  |  |  |             } | 
					
						
							| 
									
										
										
										
											2021-02-22 18:13:50 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def search_autocomplete(backend_name, query, lang): | 
					
						
							|  |  |  |     backend = backends.get(backend_name) | 
					
						
							|  |  |  |     if backend is None: | 
					
						
							|  |  |  |         return [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         return backend(query, lang) | 
					
						
							|  |  |  |     except (RequestException, SearxEngineResponseException): | 
					
						
							|  |  |  |         return [] |