| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  | """
 | 
					
						
							|  |  |  |  DuckDuckGo (Web) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @website     https://duckduckgo.com/ | 
					
						
							|  |  |  |  @provide-api yes (https://duckduckgo.com/api), | 
					
						
							|  |  |  |               but not all results from search-site | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @using-api   no | 
					
						
							|  |  |  |  @results     HTML (using search portal) | 
					
						
							|  |  |  |  @stable      no (HTML can change) | 
					
						
							|  |  |  |  @parse       url, title, content | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @todo        rewrite to api | 
					
						
							|  |  |  | """
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-21 16:33:17 +01:00
										 |  |  | from lxml.html import fromstring | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | from json import loads | 
					
						
							| 
									
										
										
										
											2020-10-02 18:13:56 +02:00
										 |  |  | from searx.utils import extract_text, match_language, eval_xpath | 
					
						
							| 
									
										
										
										
											2013-10-14 23:09:13 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | # engine dependent config | 
					
						
							|  |  |  | categories = ['general'] | 
					
						
							| 
									
										
										
										
											2020-10-09 15:01:40 +02:00
										 |  |  | paging = False | 
					
						
							| 
									
										
										
										
											2014-09-07 22:52:13 +02:00
										 |  |  | language_support = True | 
					
						
							| 
									
										
										
										
											2018-02-14 23:17:46 +01:00
										 |  |  | supported_languages_url = 'https://duckduckgo.com/util/u172.js' | 
					
						
							| 
									
										
										
										
											2016-07-18 16:15:37 +02:00
										 |  |  | time_range_support = True | 
					
						
							| 
									
										
										
										
											2013-10-14 23:09:13 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-03-01 05:30:48 +01:00
										 |  |  | language_aliases = { | 
					
						
							|  |  |  |     'ar-SA': 'ar-XA', | 
					
						
							|  |  |  |     'es-419': 'es-XL', | 
					
						
							|  |  |  |     'ja': 'jp-JP', | 
					
						
							|  |  |  |     'ko': 'kr-KR', | 
					
						
							|  |  |  |     'sl-SI': 'sl-SL', | 
					
						
							|  |  |  |     'zh-TW': 'tzh-TW', | 
					
						
							|  |  |  |     'zh-HK': 'tzh-HK' | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | # search-url | 
					
						
							| 
									
										
										
										
											2020-10-09 15:01:40 +02:00
										 |  |  | url = 'https://html.duckduckgo.com/html' | 
					
						
							| 
									
										
										
										
											2016-07-18 16:15:37 +02:00
										 |  |  | time_range_dict = {'day': 'd', | 
					
						
							|  |  |  |                    'week': 'w', | 
					
						
							|  |  |  |                    'month': 'm'} | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | # specific xpath variables | 
					
						
							| 
									
										
										
										
											2016-03-22 03:19:13 +01:00
										 |  |  | result_xpath = '//div[@class="result results_links results_links_deep web-result "]'  # noqa | 
					
						
							|  |  |  | url_xpath = './/a[@class="result__a"]/@href' | 
					
						
							|  |  |  | title_xpath = './/a[@class="result__a"]' | 
					
						
							|  |  |  | content_xpath = './/a[@class="result__snippet"]' | 
					
						
							| 
									
										
										
										
											2020-06-13 23:42:16 +02:00
										 |  |  | correction_xpath = '//div[@id="did_you_mean"]//a' | 
					
						
							| 
									
										
										
										
											2014-03-29 16:38:45 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-05-21 05:33:08 +02:00
										 |  |  | # match query's language to a region code that duckduckgo will accept | 
					
						
							| 
									
										
										
										
											2020-11-16 09:43:23 +01:00
										 |  |  | def get_region_code(lang, lang_list=None): | 
					
						
							| 
									
										
										
										
											2019-01-06 15:27:46 +01:00
										 |  |  |     if lang == 'all': | 
					
						
							|  |  |  |         return None | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-11-16 09:43:23 +01:00
										 |  |  |     lang_code = match_language(lang, lang_list or [], language_aliases, 'wt-WT') | 
					
						
							| 
									
										
										
										
											2018-03-01 05:30:48 +01:00
										 |  |  |     lang_parts = lang_code.split('-') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # country code goes first | 
					
						
							|  |  |  |     return lang_parts[1].lower() + '-' + lang_parts[0].lower() | 
					
						
							| 
									
										
										
										
											2017-05-21 05:33:08 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							| 
									
										
										
										
											2020-10-09 15:01:40 +02:00
										 |  |  |     if params['time_range'] is not None and params['time_range'] not in time_range_dict: | 
					
						
							| 
									
										
										
										
											2017-05-21 05:33:08 +02:00
										 |  |  |         return params | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-09 15:01:40 +02:00
										 |  |  |     params['url'] = url | 
					
						
							|  |  |  |     params['method'] = 'POST' | 
					
						
							|  |  |  |     params['data']['b'] = '' | 
					
						
							|  |  |  |     params['data']['q'] = query | 
					
						
							|  |  |  |     params['data']['df'] = '' | 
					
						
							| 
									
										
										
										
											2016-06-03 07:14:23 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-03-01 05:30:48 +01:00
										 |  |  |     region_code = get_region_code(params['language'], supported_languages) | 
					
						
							| 
									
										
										
										
											2020-10-09 15:01:40 +02:00
										 |  |  |     if region_code: | 
					
						
							|  |  |  |         params['data']['kl'] = region_code | 
					
						
							|  |  |  |         params['cookies']['kl'] = region_code | 
					
						
							|  |  |  |     if params['time_range'] in time_range_dict: | 
					
						
							|  |  |  |         params['data']['df'] = time_range_dict[params['time_range']] | 
					
						
							| 
									
										
										
										
											2016-07-18 16:15:37 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-10-14 23:09:13 +02:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | # get response from search-request | 
					
						
							| 
									
										
										
										
											2013-10-14 23:09:13 +02:00
										 |  |  | def response(resp): | 
					
						
							| 
									
										
										
										
											2013-10-15 19:11:43 +02:00
										 |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2014-03-21 16:33:17 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     doc = fromstring(resp.text) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 18:12:42 +02:00
										 |  |  |     # parse results | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |     for i, r in enumerate(eval_xpath(doc, result_xpath)): | 
					
						
							| 
									
										
										
										
											2019-10-14 13:52:15 +02:00
										 |  |  |         if i >= 30: | 
					
						
							|  |  |  |             break | 
					
						
							| 
									
										
										
										
											2014-03-21 18:17:13 +01:00
										 |  |  |         try: | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |             res_url = eval_xpath(r, url_xpath)[-1] | 
					
						
							| 
									
										
										
										
											2014-03-21 18:17:13 +01:00
										 |  |  |         except: | 
					
						
							|  |  |  |             continue | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-21 16:33:17 +01:00
										 |  |  |         if not res_url: | 
					
						
							| 
									
										
										
										
											2013-10-15 19:11:43 +02:00
										 |  |  |             continue | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |         title = extract_text(eval_xpath(r, title_xpath)) | 
					
						
							|  |  |  |         content = extract_text(eval_xpath(r, content_xpath)) | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # append result | 
					
						
							| 
									
										
										
										
											2014-03-21 16:33:17 +01:00
										 |  |  |         results.append({'title': title, | 
					
						
							|  |  |  |                         'content': content, | 
					
						
							| 
									
										
										
										
											2015-09-07 23:13:04 +02:00
										 |  |  |                         'url': res_url}) | 
					
						
							| 
									
										
										
										
											2014-03-21 16:33:17 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-06-13 23:42:16 +02:00
										 |  |  |     # parse correction | 
					
						
							|  |  |  |     for correction in eval_xpath(doc, correction_xpath): | 
					
						
							|  |  |  |         # append correction | 
					
						
							|  |  |  |         results.append({'correction': extract_text(correction)}) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:14:57 +02:00
										 |  |  |     # return results | 
					
						
							| 
									
										
										
										
											2013-10-15 19:11:43 +02:00
										 |  |  |     return results | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get supported languages from their site | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  | def _fetch_supported_languages(resp): | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # response is a js file with regions as an embedded object | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  |     response_page = resp.text | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |     response_page = response_page[response_page.find('regions:{') + 8:] | 
					
						
							|  |  |  |     response_page = response_page[:response_page.find('}') + 1] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     regions_json = loads(response_page) | 
					
						
							|  |  |  |     supported_languages = map((lambda x: x[3:] + '-' + x[:2].upper()), regions_json.keys()) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-10-10 23:52:41 +02:00
										 |  |  |     return list(supported_languages) |