| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  | """The JSON engine is a *generic* engine with which it is possible to configure
 | 
					
						
							|  |  |  | engines in the settings. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | .. todo:: | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    - The JSON engine needs documentation!! | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    - The parameters of the JSON engine should be adapted to those of the XPath | 
					
						
							|  |  |  |      engine. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | """
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-10-05 12:52:08 +02:00
										 |  |  | from collections.abc import Iterable | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | from json import loads | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2021-02-10 16:40:03 +01:00
										 |  |  | from searx.utils import to_string, html_to_text | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | search_url = None | 
					
						
							|  |  |  | url_query = None | 
					
						
							| 
									
										
										
										
											2023-10-20 11:26:28 +02:00
										 |  |  | url_prefix = "" | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | content_query = None | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | title_query = None | 
					
						
							| 
									
										
										
										
											2021-02-10 16:40:03 +01:00
										 |  |  | content_html_to_text = False | 
					
						
							|  |  |  | title_html_to_text = False | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | paging = False | 
					
						
							| 
									
										
										
										
											2016-08-12 12:15:27 +02:00
										 |  |  | suggestion_query = '' | 
					
						
							|  |  |  | results_query = '' | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-06-10 23:26:55 +02:00
										 |  |  | cookies = {} | 
					
						
							|  |  |  | headers = {} | 
					
						
							|  |  |  | '''Some engines might offer different result based on cookies or headers.
 | 
					
						
							|  |  |  | Possible use-case: To set safesearch cookie or header to moderate.'''
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  | # parameters for engines with paging support | 
					
						
							|  |  |  | # | 
					
						
							|  |  |  | # number of results on each page | 
					
						
							|  |  |  | # (only needed if the site requires not a page number, but an offset) | 
					
						
							|  |  |  | page_size = 1 | 
					
						
							|  |  |  | # number of the first page (usually 0 or 1) | 
					
						
							|  |  |  | first_page_num = 1 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | def iterate(iterable): | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |     if isinstance(iterable, dict): | 
					
						
							|  |  |  |         items = iterable.items() | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     else: | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |         items = enumerate(iterable) | 
					
						
							|  |  |  |     for index, value in items: | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |         yield str(index), value | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | def is_iterable(obj): | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |     if isinstance(obj, str): | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  |         return False | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     return isinstance(obj, Iterable) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  | def parse(query):  # pylint: disable=redefined-outer-name | 
					
						
							|  |  |  |     q = []  # pylint: disable=invalid-name | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     for part in query.split('/'): | 
					
						
							|  |  |  |         if part == '': | 
					
						
							|  |  |  |             continue | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |         q.append(part) | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     return q | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  | def do_query(data, q):  # pylint: disable=invalid-name | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     ret = [] | 
					
						
							| 
									
										
										
										
											2014-02-11 13:13:51 +01:00
										 |  |  |     if not q: | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |         return ret | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     qkey = q[0] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  |     for key, value in iterate(data): | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         if len(q) == 1: | 
					
						
							|  |  |  |             if key == qkey: | 
					
						
							|  |  |  |                 ret.append(value) | 
					
						
							|  |  |  |             elif is_iterable(value): | 
					
						
							|  |  |  |                 ret.extend(do_query(value, q)) | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             if not is_iterable(value): | 
					
						
							|  |  |  |                 continue | 
					
						
							|  |  |  |             if key == qkey: | 
					
						
							|  |  |  |                 ret.extend(do_query(value, q[1:])) | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 ret.extend(do_query(value, q)) | 
					
						
							|  |  |  |     return ret | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | def query(data, query_string): | 
					
						
							|  |  |  |     q = parse(query_string) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return do_query(data, q) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-20 02:31:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  | def request(query, params):  # pylint: disable=redefined-outer-name | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     query = urlencode({'q': query})[2:] | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |     fp = {'query': query}  # pylint: disable=invalid-name | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  |     if paging and search_url.find('{pageno}') >= 0: | 
					
						
							| 
									
										
										
										
											2016-08-14 13:46:54 +02:00
										 |  |  |         fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-06-10 23:26:55 +02:00
										 |  |  |     params['cookies'].update(cookies) | 
					
						
							|  |  |  |     params['headers'].update(headers) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  |     params['url'] = search_url.format(**fp) | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     params['query'] = query | 
					
						
							| 
									
										
										
										
											2016-07-16 11:26:29 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-02-10 16:40:03 +01:00
										 |  |  | def identity(arg): | 
					
						
							|  |  |  |     return arg | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							|  |  |  |     json = loads(resp.text) | 
					
						
							| 
									
										
										
										
											2021-02-10 16:40:03 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     title_filter = html_to_text if title_html_to_text else identity | 
					
						
							|  |  |  |     content_filter = html_to_text if content_html_to_text else identity | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-08-12 12:15:27 +02:00
										 |  |  |     if results_query: | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |         rs = query(json, results_query)  # pylint: disable=invalid-name | 
					
						
							|  |  |  |         if not rs: | 
					
						
							| 
									
										
										
										
											2017-11-02 00:43:29 +01:00
										 |  |  |             return results | 
					
						
							|  |  |  |         for result in rs[0]: | 
					
						
							| 
									
										
										
										
											2017-11-02 01:08:15 +01:00
										 |  |  |             try: | 
					
						
							|  |  |  |                 url = query(result, url_query)[0] | 
					
						
							|  |  |  |                 title = query(result, title_query)[0] | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |             except:  # pylint: disable=bare-except | 
					
						
							| 
									
										
										
										
											2017-11-02 01:08:15 +01:00
										 |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2017-11-02 00:43:29 +01:00
										 |  |  |             try: | 
					
						
							|  |  |  |                 content = query(result, content_query)[0] | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |             except:  # pylint: disable=bare-except | 
					
						
							| 
									
										
										
										
											2017-11-02 00:43:29 +01:00
										 |  |  |                 content = "" | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             results.append( | 
					
						
							|  |  |  |                 { | 
					
						
							| 
									
										
										
										
											2023-10-20 11:26:28 +02:00
										 |  |  |                     'url': url_prefix + to_string(url), | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |                     'title': title_filter(to_string(title)), | 
					
						
							|  |  |  |                     'content': content_filter(to_string(content)), | 
					
						
							|  |  |  |                 } | 
					
						
							|  |  |  |             ) | 
					
						
							| 
									
										
										
										
											2016-08-12 12:15:27 +02:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2024-09-12 10:10:20 +02:00
										 |  |  |         for result in json: | 
					
						
							|  |  |  |             url = query(result, url_query)[0] | 
					
						
							|  |  |  |             title = query(result, title_query)[0] | 
					
						
							|  |  |  |             content = query(result, content_query)[0] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             results.append( | 
					
						
							|  |  |  |                 { | 
					
						
							| 
									
										
										
										
											2023-10-20 11:26:28 +02:00
										 |  |  |                     'url': url_prefix + to_string(url), | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |                     'title': title_filter(to_string(title)), | 
					
						
							|  |  |  |                     'content': content_filter(to_string(content)), | 
					
						
							|  |  |  |                 } | 
					
						
							|  |  |  |             ) | 
					
						
							| 
									
										
										
										
											2016-08-12 12:15:27 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     if not suggestion_query: | 
					
						
							|  |  |  |         return results | 
					
						
							|  |  |  |     for suggestion in query(json, suggestion_query): | 
					
						
							|  |  |  |         results.append({'suggestion': suggestion}) | 
					
						
							| 
									
										
										
										
											2013-11-19 15:49:52 +01:00
										 |  |  |     return results |