| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | """
 | 
					
						
							|  |  |  |  Wikipedia (Web) | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import quote | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | from json import loads | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | from lxml.html import fromstring | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  | from searx.utils import match_language, searx_useragent | 
					
						
							| 
									
										
										
											
												[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
   * can contains network definition
   * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
     keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
   * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
   * local_addresses can be "192.168.0.1/24" (it supports IPv6)
   * support_ipv4 & support_ipv6: both True by default
     see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
   * either a full network description
   * either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
											
										 
											2021-04-05 10:43:33 +02:00
										 |  |  | from searx.network import raise_for_httperror | 
					
						
							| 
									
										
										
										
											2016-08-06 06:34:56 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # about | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://www.wikipedia.org/', | 
					
						
							|  |  |  |     "wikidata_id": 'Q52', | 
					
						
							|  |  |  |     "official_api_documentation": 'https://en.wikipedia.org/api/', | 
					
						
							|  |  |  |     "use_official_api": True, | 
					
						
							|  |  |  |     "require_api_key": False, | 
					
						
							|  |  |  |     "results": 'JSON', | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-08-01 17:01:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | send_accept_language_header = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | # search-url | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | search_url = 'https://{language}.wikipedia.org/api/rest_v1/page/summary/{title}' | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | supported_languages_url = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias' | 
					
						
							| 
									
										
										
										
											2021-02-09 05:56:45 +01:00
										 |  |  | language_variants = {"zh": ("zh-cn", "zh-hk", "zh-mo", "zh-my", "zh-sg", "zh-tw")} | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # set language in base_url | 
					
						
							|  |  |  | def url_lang(lang): | 
					
						
							| 
									
										
										
										
											2019-01-06 15:27:46 +01:00
										 |  |  |     lang_pre = lang.split('-')[0] | 
					
						
							| 
									
										
										
										
											2019-01-07 21:28:58 +01:00
										 |  |  |     if lang_pre == 'all' or lang_pre not in supported_languages and lang_pre not in language_aliases: | 
					
						
							| 
									
										
										
										
											2019-01-06 15:27:46 +01:00
										 |  |  |         return 'en' | 
					
						
							| 
									
										
										
										
											2018-11-26 06:32:48 +01:00
										 |  |  |     return match_language(lang, supported_languages, language_aliases).split('-')[0] | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # do search-request | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							|  |  |  |     if query.islower(): | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |         query = query.title() | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-09 05:56:45 +01:00
										 |  |  |     language = url_lang(params['language']) | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     params['url'] = search_url.format(title=quote(query), language=language) | 
					
						
							| 
									
										
										
										
											2021-02-09 05:56:45 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |     params['headers']['User-Agent'] = searx_useragent() | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  |     params['raise_for_httperror'] = False | 
					
						
							| 
									
										
										
										
											2020-12-04 20:04:39 +01:00
										 |  |  |     params['soft_max_redirects'] = 2 | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get response from search-request | 
					
						
							|  |  |  | def response(resp): | 
					
						
							| 
									
										
										
										
											2020-12-04 20:04:39 +01:00
										 |  |  |     if resp.status_code == 404: | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  |         return [] | 
					
						
							| 
									
										
										
										
											2021-02-11 12:29:21 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     if resp.status_code == 400: | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             api_result = loads(resp.text) | 
					
						
							|  |  |  |         except: | 
					
						
							|  |  |  |             pass | 
					
						
							|  |  |  |         else: | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             if ( | 
					
						
							|  |  |  |                 api_result['type'] == 'https://mediawiki.org/wiki/HyperSwitch/errors/bad_request' | 
					
						
							|  |  |  |                 and api_result['detail'] == 'title-invalid-characters' | 
					
						
							|  |  |  |             ): | 
					
						
							| 
									
										
										
										
											2021-02-11 12:29:21 +01:00
										 |  |  |                 return [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  |     raise_for_httperror(resp) | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |     results = [] | 
					
						
							|  |  |  |     api_result = loads(resp.text) | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |     # skip disambiguation pages | 
					
						
							| 
									
										
										
										
											2020-12-07 17:42:05 +01:00
										 |  |  |     if api_result.get('type') != 'standard': | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |         return [] | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-25 08:31:39 +01:00
										 |  |  |     title = api_result['title'] | 
					
						
							| 
									
										
										
										
											2020-09-08 07:05:21 +02:00
										 |  |  |     wikipedia_link = api_result['content_urls']['desktop']['page'] | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     results.append({'url': wikipedia_link, 'title': title}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     results.append( | 
					
						
							|  |  |  |         { | 
					
						
							|  |  |  |             'infobox': title, | 
					
						
							|  |  |  |             'id': wikipedia_link, | 
					
						
							|  |  |  |             'content': api_result.get('extract', ''), | 
					
						
							|  |  |  |             'img_src': api_result.get('thumbnail', {}).get('source'), | 
					
						
							|  |  |  |             'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}], | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return results | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get supported languages from their site | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  | def _fetch_supported_languages(resp): | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |     supported_languages = {} | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  |     dom = fromstring(resp.text) | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |     tables = dom.xpath('//table[contains(@class,"sortable")]') | 
					
						
							|  |  |  |     for table in tables: | 
					
						
							|  |  |  |         # exclude header row | 
					
						
							|  |  |  |         trs = table.xpath('.//tr')[1:] | 
					
						
							|  |  |  |         for tr in trs: | 
					
						
							|  |  |  |             td = tr.xpath('./td') | 
					
						
							|  |  |  |             code = td[3].xpath('./a')[0].text | 
					
						
							|  |  |  |             name = td[2].xpath('./a')[0].text | 
					
						
							|  |  |  |             english_name = td[1].xpath('./a')[0].text | 
					
						
							|  |  |  |             articles = int(td[4].xpath('./a/b')[0].text.replace(',', '')) | 
					
						
							| 
									
										
										
										
											2016-12-17 05:14:14 +01:00
										 |  |  |             # exclude languages with too few articles | 
					
						
							| 
									
										
										
										
											2016-12-29 06:24:56 +01:00
										 |  |  |             if articles >= 100: | 
					
						
							| 
									
										
										
										
											2021-02-26 07:49:15 +01:00
										 |  |  |                 supported_languages[code] = {"name": name, "english_name": english_name} | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return supported_languages |