[refactor] typification of SearXNG / EngineResults
In [1] and [2] we discussed the need of a Result.results property and how we can
avoid unclear code.  This patch implements a class for the reslut-lists of
engines::
    searx.result_types.EngineResults
A simple example for the usage in engine development::
    from searx.result_types import EngineResults
    ...
    def response(resp) -> EngineResults:
        res = EngineResults()
        ...
        res.add( res.types.Answer(answer="lorem ipsum ..", url="https://example.org") )
        ...
        return res
[1] https://github.com/searxng/searxng/pull/4183#pullrequestreview-257400034
[2] https://github.com/searxng/searxng/pull/4183#issuecomment-2614301580
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
			
			
This commit is contained in:
		
							parent
							
								
									edfbf1e118
								
							
						
					
					
						commit
						36a1ef1239
					
				| @ -19,6 +19,14 @@ Engine Implementations | |||||||
|    engine_overview |    engine_overview | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | ResultList and engines | ||||||
|  | ====================== | ||||||
|  | 
 | ||||||
|  | .. autoclass:: searx.result_types.ResultList | ||||||
|  | 
 | ||||||
|  | .. autoclass:: searx.result_types.EngineResults | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| Engine Types | Engine Types | ||||||
| ============ | ============ | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -139,7 +139,7 @@ from searx.utils import ( | |||||||
|     get_embeded_stream_url, |     get_embeded_stream_url, | ||||||
| ) | ) | ||||||
| from searx.enginelib.traits import EngineTraits | from searx.enginelib.traits import EngineTraits | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import logging |     import logging | ||||||
| @ -249,7 +249,7 @@ def _extract_published_date(published_date_raw): | |||||||
|         return None |         return None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
| 
 | 
 | ||||||
|     if brave_category in ('search', 'goggles'): |     if brave_category in ('search', 'goggles'): | ||||||
|         return _parse_search(resp) |         return _parse_search(resp) | ||||||
| @ -270,9 +270,9 @@ def response(resp): | |||||||
|     raise ValueError(f"Unsupported brave category: {brave_category}") |     raise ValueError(f"Unsupported brave category: {brave_category}") | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _parse_search(resp): | def _parse_search(resp) -> EngineResults: | ||||||
|  |     result_list = EngineResults() | ||||||
| 
 | 
 | ||||||
|     result_list = [] |  | ||||||
|     dom = html.fromstring(resp.text) |     dom = html.fromstring(resp.text) | ||||||
| 
 | 
 | ||||||
|     # I doubt that Brave is still providing the "answer" class / I haven't seen |     # I doubt that Brave is still providing the "answer" class / I haven't seen | ||||||
| @ -282,7 +282,7 @@ def _parse_search(resp): | |||||||
|         url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None) |         url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None) | ||||||
|         answer = extract_text(answer_tag) |         answer = extract_text(answer_tag) | ||||||
|         if answer is not None: |         if answer is not None: | ||||||
|             Answer(results=result_list, answer=answer, url=url) |             result_list.add(result_list.types.Answer(answer=answer, url=url)) | ||||||
| 
 | 
 | ||||||
|     # xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]' |     # xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]' | ||||||
|     xpath_results = '//div[contains(@class, "snippet ")]' |     xpath_results = '//div[contains(@class, "snippet ")]' | ||||||
| @ -339,8 +339,8 @@ def _parse_search(resp): | |||||||
|     return result_list |     return result_list | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _parse_news(json_resp): | def _parse_news(json_resp) -> EngineResults: | ||||||
|     result_list = [] |     result_list = EngineResults() | ||||||
| 
 | 
 | ||||||
|     for result in json_resp["results"]: |     for result in json_resp["results"]: | ||||||
|         item = { |         item = { | ||||||
| @ -356,8 +356,8 @@ def _parse_news(json_resp): | |||||||
|     return result_list |     return result_list | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _parse_images(json_resp): | def _parse_images(json_resp) -> EngineResults: | ||||||
|     result_list = [] |     result_list = EngineResults() | ||||||
| 
 | 
 | ||||||
|     for result in json_resp["results"]: |     for result in json_resp["results"]: | ||||||
|         item = { |         item = { | ||||||
| @ -375,8 +375,8 @@ def _parse_images(json_resp): | |||||||
|     return result_list |     return result_list | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _parse_videos(json_resp): | def _parse_videos(json_resp) -> EngineResults: | ||||||
|     result_list = [] |     result_list = EngineResults() | ||||||
| 
 | 
 | ||||||
|     for result in json_resp["results"]: |     for result in json_resp["results"]: | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| # SPDX-License-Identifier: AGPL-3.0-or-later | # SPDX-License-Identifier: AGPL-3.0-or-later | ||||||
| """Deepl translation engine""" | """Deepl translation engine""" | ||||||
| 
 | 
 | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| about = { | about = { | ||||||
|     "website": 'https://deepl.com', |     "website": 'https://deepl.com', | ||||||
| @ -39,15 +39,14 @@ def request(_query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |  | ||||||
| 
 | 
 | ||||||
|     result = resp.json() |     res = EngineResults() | ||||||
|  |     data = resp.json() | ||||||
|  |     if not data.get('translations'): | ||||||
|  |         return res | ||||||
| 
 | 
 | ||||||
|     if not result.get('translations'): |     translations = [res.types.Translations.Item(text=t['text']) for t in data['translations']] | ||||||
|         return results |     res.add(res.types.Translations(translations=translations)) | ||||||
| 
 | 
 | ||||||
|     translations = [Translations.Item(text=t['text']) for t in result['translations']] |     return res | ||||||
|     Translations(results=results, translations=translations) |  | ||||||
| 
 |  | ||||||
|     return results |  | ||||||
|  | |||||||
| @ -13,6 +13,7 @@ close to the implementation, its just a simple example.  To get in use of this | |||||||
| """ | """ | ||||||
| 
 | 
 | ||||||
| import json | import json | ||||||
|  | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| engine_type = 'offline' | engine_type = 'offline' | ||||||
| categories = ['general'] | categories = ['general'] | ||||||
| @ -48,14 +49,14 @@ def init(engine_settings=None): | |||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def search(query, request_params): | def search(query, request_params) -> EngineResults: | ||||||
|     """Query (offline) engine and return results.  Assemble the list of results from |     """Query (offline) engine and return results.  Assemble the list of results from | ||||||
|     your local engine.  In this demo engine we ignore the 'query' term, usual |     your local engine.  In this demo engine we ignore the 'query' term, usual | ||||||
|     you would pass the 'query' term to your local engine to filter out the |     you would pass the 'query' term to your local engine to filter out the | ||||||
|     results. |     results. | ||||||
| 
 | 
 | ||||||
|     """ |     """ | ||||||
|     ret_val = [] |     res = EngineResults() | ||||||
| 
 | 
 | ||||||
|     result_list = json.loads(_my_offline_engine) |     result_list = json.loads(_my_offline_engine) | ||||||
| 
 | 
 | ||||||
| @ -67,6 +68,6 @@ def search(query, request_params): | |||||||
|             # choose a result template or comment out to use the *default* |             # choose a result template or comment out to use the *default* | ||||||
|             'template': 'key-value.html', |             'template': 'key-value.html', | ||||||
|         } |         } | ||||||
|         ret_val.append(entry) |         res.append(entry) | ||||||
| 
 | 
 | ||||||
|     return ret_val |     return res | ||||||
|  | |||||||
| @ -17,6 +17,7 @@ list in ``settings.yml``: | |||||||
| 
 | 
 | ||||||
| from json import loads | from json import loads | ||||||
| from urllib.parse import urlencode | from urllib.parse import urlencode | ||||||
|  | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| engine_type = 'online' | engine_type = 'online' | ||||||
| send_accept_language_header = True | send_accept_language_header = True | ||||||
| @ -70,21 +71,28 @@ def request(query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     """Parse out the result items from the response.  In this example we parse the |     """Parse out the result items from the response.  In this example we parse the | ||||||
|     response from `api.artic.edu <https://artic.edu>`__ and filter out all |     response from `api.artic.edu <https://artic.edu>`__ and filter out all | ||||||
|     images. |     images. | ||||||
| 
 | 
 | ||||||
|     """ |     """ | ||||||
|     results = [] |     res = EngineResults() | ||||||
|     json_data = loads(resp.text) |     json_data = loads(resp.text) | ||||||
| 
 | 
 | ||||||
|  |     res.add( | ||||||
|  |         res.types.Answer( | ||||||
|  |             answer="this is a dummy answer ..", | ||||||
|  |             url="https://example.org", | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|     for result in json_data['data']: |     for result in json_data['data']: | ||||||
| 
 | 
 | ||||||
|         if not result['image_id']: |         if not result['image_id']: | ||||||
|             continue |             continue | ||||||
| 
 | 
 | ||||||
|         results.append( |         res.append( | ||||||
|             { |             { | ||||||
|                 'url': 'https://artic.edu/artworks/%(id)s' % result, |                 'url': 'https://artic.edu/artworks/%(id)s' % result, | ||||||
|                 'title': result['title'] + " (%(date_display)s) // %(artist_display)s" % result, |                 'title': result['title'] + " (%(date_display)s) // %(artist_display)s" % result, | ||||||
| @ -95,4 +103,4 @@ def response(resp): | |||||||
|             } |             } | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     return results |     return res | ||||||
|  | |||||||
| @ -7,7 +7,7 @@ import urllib.parse | |||||||
| from lxml import html | from lxml import html | ||||||
| 
 | 
 | ||||||
| from searx.utils import eval_xpath, extract_text | from searx.utils import eval_xpath, extract_text | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| from searx.network import get as http_get  # https://github.com/searxng/searxng/issues/762 | from searx.network import get as http_get  # https://github.com/searxng/searxng/issues/762 | ||||||
| 
 | 
 | ||||||
| # about | # about | ||||||
| @ -43,9 +43,9 @@ def _clean_up_node(node): | |||||||
|             n.getparent().remove(n) |             n.getparent().remove(n) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|  |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     results = [] |  | ||||||
|     item_list = [] |     item_list = [] | ||||||
| 
 | 
 | ||||||
|     if not resp.ok: |     if not resp.ok: | ||||||
| @ -85,7 +85,7 @@ def response(resp): | |||||||
| 
 | 
 | ||||||
|             synonyms.append(p_text) |             synonyms.append(p_text) | ||||||
| 
 | 
 | ||||||
|         item = Translations.Item(text=text, synonyms=synonyms) |         item = results.types.Translations.Item(text=text, synonyms=synonyms) | ||||||
|         item_list.append(item) |         item_list.append(item) | ||||||
| 
 | 
 | ||||||
|     # the "autotranslate" of dictzone is loaded by the JS from URL: |     # the "autotranslate" of dictzone is loaded by the JS from URL: | ||||||
| @ -98,7 +98,7 @@ def response(resp): | |||||||
|     # works only sometimes? |     # works only sometimes? | ||||||
|     autotranslate = http_get(f"{base_url}/trans/{query}/{from_lang}_{to_lang}", timeout=1.0) |     autotranslate = http_get(f"{base_url}/trans/{query}/{from_lang}_{to_lang}", timeout=1.0) | ||||||
|     if autotranslate.ok and autotranslate.text: |     if autotranslate.ok and autotranslate.text: | ||||||
|         item_list.insert(0, Translations.Item(text=autotranslate.text)) |         item_list.insert(0, results.types.Translations.Item(text=autotranslate.text)) | ||||||
| 
 | 
 | ||||||
|     Translations(results=results, translations=item_list, url=resp.search_params["url"]) |     results.add(results.types.Translations(translations=item_list, url=resp.search_params["url"])) | ||||||
|     return results |     return results | ||||||
|  | |||||||
| @ -27,7 +27,7 @@ from searx.network import get  # see https://github.com/searxng/searxng/issues/7 | |||||||
| from searx import redisdb | from searx import redisdb | ||||||
| from searx.enginelib.traits import EngineTraits | from searx.enginelib.traits import EngineTraits | ||||||
| from searx.exceptions import SearxEngineCaptchaException | from searx.exceptions import SearxEngineCaptchaException | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import logging |     import logging | ||||||
| @ -355,12 +355,12 @@ def is_ddg_captcha(dom): | |||||||
|     return bool(eval_xpath(dom, "//form[@id='challenge-form']")) |     return bool(eval_xpath(dom, "//form[@id='challenge-form']")) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|  |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     if resp.status_code == 303: |     if resp.status_code == 303: | ||||||
|         return [] |         return results | ||||||
| 
 | 
 | ||||||
|     results = [] |  | ||||||
|     doc = lxml.html.fromstring(resp.text) |     doc = lxml.html.fromstring(resp.text) | ||||||
| 
 | 
 | ||||||
|     if is_ddg_captcha(doc): |     if is_ddg_captcha(doc): | ||||||
| @ -398,8 +398,15 @@ def response(resp): | |||||||
|         and "URL Decoded:" not in zero_click |         and "URL Decoded:" not in zero_click | ||||||
|     ): |     ): | ||||||
|         current_query = resp.search_params["data"].get("q") |         current_query = resp.search_params["data"].get("q") | ||||||
| 
 |         results.add( | ||||||
|         Answer(results=results, answer=zero_click, url="https://duckduckgo.com/?" + urlencode({"q": current_query})) |             results.types.Answer( | ||||||
|  |                 answer=zero_click, | ||||||
|  |                 url="https://duckduckgo.com/?" | ||||||
|  |                 + urlencode( | ||||||
|  |                     {"q": current_query}, | ||||||
|  |                 ), | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     return results |     return results | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -21,7 +21,7 @@ from lxml import html | |||||||
| from searx.data import WIKIDATA_UNITS | from searx.data import WIKIDATA_UNITS | ||||||
| from searx.utils import extract_text, html_to_text, get_string_replaces_function | from searx.utils import extract_text, html_to_text, get_string_replaces_function | ||||||
| from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom | from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import logging |     import logging | ||||||
| @ -76,9 +76,9 @@ def request(query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     # pylint: disable=too-many-locals, too-many-branches, too-many-statements |     # pylint: disable=too-many-locals, too-many-branches, too-many-statements | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     search_res = resp.json() |     search_res = resp.json() | ||||||
| 
 | 
 | ||||||
| @ -103,7 +103,12 @@ def response(resp): | |||||||
|         answer_type = search_res.get('AnswerType') |         answer_type = search_res.get('AnswerType') | ||||||
|         logger.debug('AnswerType="%s" Answer="%s"', answer_type, answer) |         logger.debug('AnswerType="%s" Answer="%s"', answer_type, answer) | ||||||
|         if isinstance(answer, str) and answer_type not in ['calc', 'ip']: |         if isinstance(answer, str) and answer_type not in ['calc', 'ip']: | ||||||
|             Answer(results=results, answer=html_to_text(answer), url=search_res.get('AbstractURL', '')) |             results.add( | ||||||
|  |                 results.types.Answer( | ||||||
|  |                     answer=html_to_text(answer), | ||||||
|  |                     url=search_res.get('AbstractURL', ''), | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|     # add infobox |     # add infobox | ||||||
|     if 'Definition' in search_res: |     if 'Definition' in search_res: | ||||||
|  | |||||||
| @ -25,7 +25,7 @@ from searx.locales import language_tag, region_tag, get_official_locales | |||||||
| from searx.network import get  # see https://github.com/searxng/searxng/issues/762 | from searx.network import get  # see https://github.com/searxng/searxng/issues/762 | ||||||
| from searx.exceptions import SearxEngineCaptchaException | from searx.exceptions import SearxEngineCaptchaException | ||||||
| from searx.enginelib.traits import EngineTraits | from searx.enginelib.traits import EngineTraits | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import logging |     import logging | ||||||
| @ -316,12 +316,12 @@ def _parse_data_images(dom): | |||||||
|     return data_image_map |     return data_image_map | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     """Get response from google's search request""" |     """Get response from google's search request""" | ||||||
|     # pylint: disable=too-many-branches, too-many-statements |     # pylint: disable=too-many-branches, too-many-statements | ||||||
|     detect_google_sorry(resp) |     detect_google_sorry(resp) | ||||||
| 
 | 
 | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     # convert the text to dom |     # convert the text to dom | ||||||
|     dom = html.fromstring(resp.text) |     dom = html.fromstring(resp.text) | ||||||
| @ -332,7 +332,12 @@ def response(resp): | |||||||
|     for item in answer_list: |     for item in answer_list: | ||||||
|         for bubble in eval_xpath(item, './/div[@class="nnFGuf"]'): |         for bubble in eval_xpath(item, './/div[@class="nnFGuf"]'): | ||||||
|             bubble.drop_tree() |             bubble.drop_tree() | ||||||
|         Answer(results=results, answer=extract_text(item), url=(eval_xpath(item, '../..//a/@href') + [None])[0]) |         results.add( | ||||||
|  |             results.types.Answer( | ||||||
|  |                 answer=extract_text(item), | ||||||
|  |                 url=(eval_xpath(item, '../..//a/@href') + [None])[0], | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     # parse results |     # parse results | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -3,7 +3,7 @@ | |||||||
| 
 | 
 | ||||||
| import random | import random | ||||||
| import json | import json | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| about = { | about = { | ||||||
|     "website": 'https://libretranslate.com', |     "website": 'https://libretranslate.com', | ||||||
| @ -45,15 +45,15 @@ def request(_query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     json_resp = resp.json() |     json_resp = resp.json() | ||||||
|     text = json_resp.get('translatedText') |     text = json_resp.get('translatedText') | ||||||
|     if not text: |     if not text: | ||||||
|         return results |         return results | ||||||
| 
 | 
 | ||||||
|     item = Translations.Item(text=text, examples=json_resp.get('alternatives', [])) |     item = results.types.Translations.Item(text=text, examples=json_resp.get('alternatives', [])) | ||||||
|     Translations(results=results, translations=[item]) |     results.add(results.types.Translations(translations=[item])) | ||||||
| 
 | 
 | ||||||
|     return results |     return results | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| # SPDX-License-Identifier: AGPL-3.0-or-later | # SPDX-License-Identifier: AGPL-3.0-or-later | ||||||
| """Lingva (alternative Google Translate frontend)""" | """Lingva (alternative Google Translate frontend)""" | ||||||
| 
 | 
 | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| about = { | about = { | ||||||
|     "website": 'https://lingva.ml', |     "website": 'https://lingva.ml', | ||||||
| @ -23,8 +23,8 @@ def request(_query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     result = resp.json() |     result = resp.json() | ||||||
|     info = result["info"] |     info = result["info"] | ||||||
| @ -44,7 +44,7 @@ def response(resp): | |||||||
|     for definition in info['definitions']: |     for definition in info['definitions']: | ||||||
|         for translation in definition['list']: |         for translation in definition['list']: | ||||||
|             data.append( |             data.append( | ||||||
|                 Translations.Item( |                 results.types.Translations.Item( | ||||||
|                     text=result['translation'], |                     text=result['translation'], | ||||||
|                     definitions=[translation['definition']] if translation['definition'] else [], |                     definitions=[translation['definition']] if translation['definition'] else [], | ||||||
|                     examples=[translation['example']] if translation['example'] else [], |                     examples=[translation['example']] if translation['example'] else [], | ||||||
| @ -55,19 +55,20 @@ def response(resp): | |||||||
|     for translation in info["extraTranslations"]: |     for translation in info["extraTranslations"]: | ||||||
|         for word in translation["list"]: |         for word in translation["list"]: | ||||||
|             data.append( |             data.append( | ||||||
|                 Translations.Item( |                 results.types.Translations.Item( | ||||||
|                     text=word['word'], |                     text=word['word'], | ||||||
|                     definitions=word['meanings'], |                     definitions=word['meanings'], | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|     if not data and result['translation']: |     if not data and result['translation']: | ||||||
|         data.append(Translations.Item(text=result['translation'])) |         data.append(results.types.Translations.Item(text=result['translation'])) | ||||||
| 
 | 
 | ||||||
|     params = resp.search_params |     params = resp.search_params | ||||||
|     Translations( |     results.add( | ||||||
|         results=results, |         results.types.Translations( | ||||||
|         translations=data, |             translations=data, | ||||||
|         url=f"{url}/{params['from_lang'][1]}/{params['to_lang'][1]}/{params['query']}", |             url=f"{url}/{params['from_lang'][1]}/{params['to_lang'][1]}/{params['query']}", | ||||||
|  |         ) | ||||||
|     ) |     ) | ||||||
|     return results |     return results | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ import random | |||||||
| import re | import re | ||||||
| import urllib.parse | import urllib.parse | ||||||
| 
 | 
 | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| about = { | about = { | ||||||
|     "website": 'https://codeberg.org/aryak/mozhi', |     "website": 'https://codeberg.org/aryak/mozhi', | ||||||
| @ -33,11 +33,11 @@ def request(_query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |     res = EngineResults() | ||||||
|     translation = resp.json() |     translation = resp.json() | ||||||
| 
 | 
 | ||||||
|     item = Translations.Item(text=translation['translated-text']) |     item = res.types.Translations.Item(text=translation['translated-text']) | ||||||
| 
 | 
 | ||||||
|     if translation['target_transliteration'] and not re.match( |     if translation['target_transliteration'] and not re.match( | ||||||
|         re_transliteration_unsupported, translation['target_transliteration'] |         re_transliteration_unsupported, translation['target_transliteration'] | ||||||
| @ -57,5 +57,5 @@ def response(resp): | |||||||
|     url = urllib.parse.urlparse(resp.search_params["url"]) |     url = urllib.parse.urlparse(resp.search_params["url"]) | ||||||
|     # remove the api path |     # remove the api path | ||||||
|     url = url._replace(path="", fragment="").geturl() |     url = url._replace(path="", fragment="").geturl() | ||||||
|     Translations(results=results, translations=[item], url=url) |     res.add(res.types.Translations(translations=[item], url=url)) | ||||||
|     return results |     return res | ||||||
|  | |||||||
| @ -13,7 +13,7 @@ from flask_babel import gettext | |||||||
| from searx.data import OSM_KEYS_TAGS, CURRENCIES | from searx.data import OSM_KEYS_TAGS, CURRENCIES | ||||||
| from searx.external_urls import get_external_url | from searx.external_urls import get_external_url | ||||||
| from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail | from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| # about | # about | ||||||
| about = { | about = { | ||||||
| @ -141,8 +141,8 @@ def request(query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     nominatim_json = resp.json() |     nominatim_json = resp.json() | ||||||
|     user_language = resp.search_params['language'] |     user_language = resp.search_params['language'] | ||||||
| @ -152,10 +152,12 @@ def response(resp): | |||||||
|         l = re.findall(r"\s*(.*)\s+to\s+(.+)", resp.search_params["query"]) |         l = re.findall(r"\s*(.*)\s+to\s+(.+)", resp.search_params["query"]) | ||||||
|     if l: |     if l: | ||||||
|         point1, point2 = [urllib.parse.quote_plus(p) for p in l[0]] |         point1, point2 = [urllib.parse.quote_plus(p) for p in l[0]] | ||||||
|         Answer( | 
 | ||||||
|             results=results, |         results.add( | ||||||
|             answer=gettext('Show route in map ..'), |             results.types.Answer( | ||||||
|             url=f"{route_url}/?point={point1}&point={point2}", |                 answer=gettext('Show route in map ..'), | ||||||
|  |                 url=f"{route_url}/?point={point1}&point={point2}", | ||||||
|  |             ) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # simplify the code below: make sure extratags is a dictionary |     # simplify the code below: make sure extratags is a dictionary | ||||||
|  | |||||||
| @ -19,6 +19,8 @@ from urllib.parse import urlencode | |||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from flask_babel import gettext | from flask_babel import gettext | ||||||
| 
 | 
 | ||||||
|  | from searx.result_types import EngineResults | ||||||
|  | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import logging |     import logging | ||||||
| 
 | 
 | ||||||
| @ -154,9 +156,9 @@ def parse_tineye_match(match_json): | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     """Parse HTTP response from TinEye.""" |     """Parse HTTP response from TinEye.""" | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     # handle the 422 client side errors, and the possible 400 status code error |     # handle the 422 client side errors, and the possible 400 status code error | ||||||
|     if resp.status_code in (400, 422): |     if resp.status_code in (400, 422): | ||||||
| @ -183,8 +185,7 @@ def response(resp): | |||||||
|                 message = ','.join(description) |                 message = ','.join(description) | ||||||
| 
 | 
 | ||||||
|         # see https://github.com/searxng/searxng/pull/1456#issuecomment-1193105023 |         # see https://github.com/searxng/searxng/pull/1456#issuecomment-1193105023 | ||||||
|         # from searx.result_types import Answer |         # results.add(results.types.Answer(answer=message)) | ||||||
|         # Answer(results=results, answer=message) |  | ||||||
|         logger.info(message) |         logger.info(message) | ||||||
|         return results |         return results | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ | |||||||
| 
 | 
 | ||||||
| import urllib.parse | import urllib.parse | ||||||
| 
 | 
 | ||||||
| from searx.result_types import Translations | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| # about | # about | ||||||
| about = { | about = { | ||||||
| @ -37,8 +37,8 @@ def request(query, params):  # pylint: disable=unused-argument | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp): | def response(resp) -> EngineResults: | ||||||
|     results = [] |     results = EngineResults() | ||||||
|     data = resp.json() |     data = resp.json() | ||||||
| 
 | 
 | ||||||
|     args = { |     args = { | ||||||
| @ -53,7 +53,7 @@ def response(resp): | |||||||
| 
 | 
 | ||||||
|     examples = [f"{m['segment']} : {m['translation']}" for m in data['matches'] if m['translation'] != text] |     examples = [f"{m['segment']} : {m['translation']}" for m in data['matches'] if m['translation'] != text] | ||||||
| 
 | 
 | ||||||
|     item = Translations.Item(text=text, examples=examples) |     item = results.types.Translations.Item(text=text, examples=examples) | ||||||
|     Translations(results=results, translations=[item], url=link) |     results.add(results.types.Translations(translations=[item], url=link)) | ||||||
| 
 | 
 | ||||||
|     return results |     return results | ||||||
|  | |||||||
| @ -74,6 +74,7 @@ from urllib.parse import urlencode | |||||||
| from lxml import html | from lxml import html | ||||||
| from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list | from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list | ||||||
| from searx.network import raise_for_httperror | from searx.network import raise_for_httperror | ||||||
|  | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| search_url = None | search_url = None | ||||||
| """ | """ | ||||||
| @ -261,15 +262,15 @@ def request(query, params): | |||||||
|     return params |     return params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def response(resp):  # pylint: disable=too-many-branches | def response(resp) -> EngineResults:  # pylint: disable=too-many-branches | ||||||
|     '''Scrap *results* from the response (see :ref:`result types`).''' |     """Scrap *results* from the response (see :ref:`result types`).""" | ||||||
|  |     results = EngineResults() | ||||||
|  | 
 | ||||||
|     if no_result_for_http_status and resp.status_code in no_result_for_http_status: |     if no_result_for_http_status and resp.status_code in no_result_for_http_status: | ||||||
|         return [] |         return results | ||||||
| 
 | 
 | ||||||
|     raise_for_httperror(resp) |     raise_for_httperror(resp) | ||||||
| 
 | 
 | ||||||
|     results = [] |  | ||||||
| 
 |  | ||||||
|     if not resp.text: |     if not resp.text: | ||||||
|         return results |         return results | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -14,7 +14,7 @@ import babel | |||||||
| import babel.numbers | import babel.numbers | ||||||
| from flask_babel import gettext | from flask_babel import gettext | ||||||
| 
 | 
 | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| name = "Basic Calculator" | name = "Basic Calculator" | ||||||
| description = gettext("Calculate mathematical expressions via the search bar") | description = gettext("Calculate mathematical expressions via the search bar") | ||||||
| @ -94,8 +94,8 @@ def timeout_func(timeout, func, *args, **kwargs): | |||||||
|     return ret_val |     return ret_val | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def post_search(request, search) -> list[Answer]: | def post_search(request, search) -> EngineResults: | ||||||
|     results = [] |     results = EngineResults() | ||||||
| 
 | 
 | ||||||
|     # only show the result of the expression on the first page |     # only show the result of the expression on the first page | ||||||
|     if search.search_query.pageno > 1: |     if search.search_query.pageno > 1: | ||||||
| @ -135,6 +135,6 @@ def post_search(request, search) -> list[Answer]: | |||||||
|         return results |         return results | ||||||
| 
 | 
 | ||||||
|     res = babel.numbers.format_decimal(res, locale=ui_locale) |     res = babel.numbers.format_decimal(res, locale=ui_locale) | ||||||
|     Answer(results=results, answer=f"{search.search_query.query} = {res}") |     results.add(results.types.Answer(answer=f"{search.search_query.query} = {res}")) | ||||||
| 
 | 
 | ||||||
|     return results |     return results | ||||||
|  | |||||||
| @ -9,7 +9,7 @@ import hashlib | |||||||
| from flask_babel import gettext | from flask_babel import gettext | ||||||
| 
 | 
 | ||||||
| from searx.plugins import Plugin, PluginInfo | from searx.plugins import Plugin, PluginInfo | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| if typing.TYPE_CHECKING: | if typing.TYPE_CHECKING: | ||||||
|     from searx.search import SearchWithPlugins |     from searx.search import SearchWithPlugins | ||||||
| @ -37,9 +37,9 @@ class SXNGPlugin(Plugin): | |||||||
|             preference_section="query", |             preference_section="query", | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> list[Answer]: |     def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults: | ||||||
|         """Returns a result list only for the first page.""" |         """Returns a result list only for the first page.""" | ||||||
|         results = [] |         results = EngineResults() | ||||||
| 
 | 
 | ||||||
|         if search.search_query.pageno > 1: |         if search.search_query.pageno > 1: | ||||||
|             return results |             return results | ||||||
| @ -61,6 +61,6 @@ class SXNGPlugin(Plugin): | |||||||
|         f.update(string.encode("utf-8").strip()) |         f.update(string.encode("utf-8").strip()) | ||||||
|         answer = function + " " + gettext("hash digest") + ": " + f.hexdigest() |         answer = function + " " + gettext("hash digest") + ": " + f.hexdigest() | ||||||
| 
 | 
 | ||||||
|         Answer(results=results, answer=answer) |         results.add(results.types.Answer(answer=answer)) | ||||||
| 
 | 
 | ||||||
|         return results |         return results | ||||||
|  | |||||||
| @ -7,7 +7,7 @@ import re | |||||||
| from flask_babel import gettext | from flask_babel import gettext | ||||||
| 
 | 
 | ||||||
| from searx.botdetection._helpers import get_real_ip | from searx.botdetection._helpers import get_real_ip | ||||||
| from searx.result_types import Answer | from searx.result_types import EngineResults | ||||||
| 
 | 
 | ||||||
| from . import Plugin, PluginInfo | from . import Plugin, PluginInfo | ||||||
| 
 | 
 | ||||||
| @ -41,17 +41,17 @@ class SXNGPlugin(Plugin): | |||||||
|             preference_section="query", |             preference_section="query", | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> list[Answer]: |     def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults: | ||||||
|         """Returns a result list only for the first page.""" |         """Returns a result list only for the first page.""" | ||||||
|         results = [] |         results = EngineResults() | ||||||
| 
 | 
 | ||||||
|         if search.search_query.pageno > 1: |         if search.search_query.pageno > 1: | ||||||
|             return results |             return results | ||||||
| 
 | 
 | ||||||
|         if self.ip_regex.search(search.search_query.query): |         if self.ip_regex.search(search.search_query.query): | ||||||
|             Answer(results=results, answer=gettext("Your IP is: ") + get_real_ip(request)) |             results.add(results.types.Answer(answer=gettext("Your IP is: ") + get_real_ip(request))) | ||||||
| 
 | 
 | ||||||
|         if self.ua_regex.match(search.search_query.query): |         if self.ua_regex.match(search.search_query.query): | ||||||
|             Answer(results=results, answer=gettext("Your user-agent is: ") + str(request.user_agent)) |             results.add(results.types.Answer(answer=gettext("Your user-agent is: ") + str(request.user_agent))) | ||||||
| 
 | 
 | ||||||
|         return results |         return results | ||||||
|  | |||||||
| @ -9,10 +9,50 @@ | |||||||
|    gradually.  For more, please read :ref:`result types`. |    gradually.  For more, please read :ref:`result types`. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | # pylint: disable=too-few-public-methods | ||||||
| 
 | 
 | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| 
 | 
 | ||||||
| __all__ = ["Result", "AnswerSet", "Answer", "Translations"] | __all__ = ["Result", "EngineResults", "AnswerSet", "Answer", "Translations"] | ||||||
|  | 
 | ||||||
|  | import abc | ||||||
|  | 
 | ||||||
|  | from searx import enginelib | ||||||
| 
 | 
 | ||||||
| from ._base import Result, LegacyResult | from ._base import Result, LegacyResult | ||||||
| from .answer import AnswerSet, Answer, Translations | from .answer import AnswerSet, Answer, Translations | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ResultList(list, abc.ABC): | ||||||
|  |     """Base class of all result lists (abstract).""" | ||||||
|  | 
 | ||||||
|  |     class types:  # pylint: disable=invalid-name | ||||||
|  |         """The collection of result types (which have already been implemented).""" | ||||||
|  | 
 | ||||||
|  |         Answer = Answer | ||||||
|  |         Translations = Translations | ||||||
|  | 
 | ||||||
|  |     def __init__(self): | ||||||
|  |         # pylint: disable=useless-parent-delegation | ||||||
|  |         super().__init__() | ||||||
|  | 
 | ||||||
|  |     def add(self, result: Result): | ||||||
|  |         """Add a :py:`Result` item to the result list.""" | ||||||
|  |         self.append(result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EngineResults(ResultList): | ||||||
|  |     """Result list that should be used by engine developers.  For convenience, | ||||||
|  |     engine developers don't need to import types / see :py:obj:`ResultList.types`. | ||||||
|  | 
 | ||||||
|  |     .. code:: python | ||||||
|  | 
 | ||||||
|  |        from searx.result_types import EngineResults | ||||||
|  |        ... | ||||||
|  |        def response(resp) -> EngineResults: | ||||||
|  |            res = EngineResults() | ||||||
|  |            ... | ||||||
|  |            res.add( res.types.Answer(answer="lorem ipsum ..", url="https://example.org") ) | ||||||
|  |            ... | ||||||
|  |            return res | ||||||
|  |     """ | ||||||
|  | |||||||
| @ -53,27 +53,6 @@ class Result(msgspec.Struct, kw_only=True): | |||||||
|     The field is optional and is initialized from the context if necessary. |     The field is optional and is initialized from the context if necessary. | ||||||
|     """ |     """ | ||||||
| 
 | 
 | ||||||
|     results: list = []  # https://jcristharif.com/msgspec/structs.html#default-values |  | ||||||
|     """Result list of an :origin:`engine <searx/engines>` response or a |  | ||||||
|     :origin:`answerer <searx/answerers>` to which the answer should be added. |  | ||||||
| 
 |  | ||||||
|     This field is only present for the sake of simplicity.  Typically, the |  | ||||||
|     response function of an engine has a result list that is returned at the |  | ||||||
|     end. By specifying the result list in the constructor of the result, this |  | ||||||
|     result is then immediately added to the list (this parameter does not have |  | ||||||
|     another function). |  | ||||||
| 
 |  | ||||||
|     .. code:: python |  | ||||||
| 
 |  | ||||||
|        def response(resp): |  | ||||||
|            results = [] |  | ||||||
|            ... |  | ||||||
|            Answer(results=results, answer=answer, url=url) |  | ||||||
|            ... |  | ||||||
|            return results |  | ||||||
| 
 |  | ||||||
|     """ |  | ||||||
| 
 |  | ||||||
|     def normalize_result_fields(self): |     def normalize_result_fields(self): | ||||||
|         """Normalize a result .. |         """Normalize a result .. | ||||||
| 
 | 
 | ||||||
| @ -92,9 +71,7 @@ class Result(msgspec.Struct, kw_only=True): | |||||||
|                 self.url = self.parsed_url.geturl() |                 self.url = self.parsed_url.geturl() | ||||||
| 
 | 
 | ||||||
|     def __post_init__(self): |     def __post_init__(self): | ||||||
|         """Add *this* result to the result list.""" |         pass | ||||||
| 
 |  | ||||||
|         self.results.append(self) |  | ||||||
| 
 | 
 | ||||||
|     def __hash__(self) -> int: |     def __hash__(self) -> int: | ||||||
|         """Generates a hash value that uniquely identifies the content of *this* |         """Generates a hash value that uniquely identifies the content of *this* | ||||||
|  | |||||||
| @ -70,7 +70,7 @@ class TestXpathEngine(SearxTestCase): | |||||||
| 
 | 
 | ||||||
|         response = mock.Mock(text=self.html, status_code=200) |         response = mock.Mock(text=self.html, status_code=200) | ||||||
|         results = xpath.response(response) |         results = xpath.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertIsInstance(results, list) | ||||||
|         self.assertEqual(len(results), 2) |         self.assertEqual(len(results), 2) | ||||||
|         self.assertEqual(results[0]['title'], 'Result 1') |         self.assertEqual(results[0]['title'], 'Result 1') | ||||||
|         self.assertEqual(results[0]['url'], 'https://result1.com/') |         self.assertEqual(results[0]['url'], 'https://result1.com/') | ||||||
| @ -82,7 +82,7 @@ class TestXpathEngine(SearxTestCase): | |||||||
|         # with cached urls, without results_xpath |         # with cached urls, without results_xpath | ||||||
|         xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href' |         xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href' | ||||||
|         results = xpath.response(response) |         results = xpath.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertIsInstance(results, list) | ||||||
|         self.assertEqual(len(results), 2) |         self.assertEqual(len(results), 2) | ||||||
|         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com') |         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com') | ||||||
|         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com') |         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com') | ||||||
| @ -112,7 +112,7 @@ class TestXpathEngine(SearxTestCase): | |||||||
| 
 | 
 | ||||||
|         response = mock.Mock(text=self.html, status_code=200) |         response = mock.Mock(text=self.html, status_code=200) | ||||||
|         results = xpath.response(response) |         results = xpath.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertIsInstance(results, list) | ||||||
|         self.assertEqual(len(results), 2) |         self.assertEqual(len(results), 2) | ||||||
|         self.assertEqual(results[0]['title'], 'Result 1') |         self.assertEqual(results[0]['title'], 'Result 1') | ||||||
|         self.assertEqual(results[0]['url'], 'https://result1.com/') |         self.assertEqual(results[0]['url'], 'https://result1.com/') | ||||||
| @ -124,7 +124,7 @@ class TestXpathEngine(SearxTestCase): | |||||||
|         # with cached urls, with results_xpath |         # with cached urls, with results_xpath | ||||||
|         xpath.cached_xpath = './/a[@class="cached"]/@href' |         xpath.cached_xpath = './/a[@class="cached"]/@href' | ||||||
|         results = xpath.response(response) |         results = xpath.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertIsInstance(results, list) | ||||||
|         self.assertEqual(len(results), 2) |         self.assertEqual(len(results), 2) | ||||||
|         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com') |         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com') | ||||||
|         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com') |         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com') | ||||||
|  | |||||||
| @ -38,7 +38,7 @@ class PluginCalculator(SearxTestCase): | |||||||
|         with self.app.test_request_context(): |         with self.app.test_request_context(): | ||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             query = "1+1" |             query = "1+1" | ||||||
|             answer = Answer(results=[], answer=f"{query} = {eval(query)}")  # pylint: disable=eval-used |             answer = Answer(answer=f"{query} = {eval(query)}")  # pylint: disable=eval-used | ||||||
| 
 | 
 | ||||||
|             search = do_post_search(query, self.storage, pageno=1) |             search = do_post_search(query, self.storage, pageno=1) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
| @ -81,7 +81,7 @@ class PluginCalculator(SearxTestCase): | |||||||
|         with self.app.test_request_context(): |         with self.app.test_request_context(): | ||||||
|             self.pref.parse_dict({"locale": lang}) |             self.pref.parse_dict({"locale": lang}) | ||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             answer = Answer(results=[], answer=f"{query} = {res}") |             answer = Answer(answer=f"{query} = {res}") | ||||||
| 
 | 
 | ||||||
|             search = do_post_search(query, self.storage) |             search = do_post_search(query, self.storage) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
|  | |||||||
| @ -51,7 +51,7 @@ class PluginHashTest(SearxTestCase): | |||||||
|     def test_hash_digest_new(self, query: str, res: str): |     def test_hash_digest_new(self, query: str, res: str): | ||||||
|         with self.app.test_request_context(): |         with self.app.test_request_context(): | ||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             answer = Answer(results=[], answer=res) |             answer = Answer(answer=res) | ||||||
| 
 | 
 | ||||||
|             search = do_post_search(query, self.storage) |             search = do_post_search(query, self.storage) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
| @ -60,7 +60,7 @@ class PluginHashTest(SearxTestCase): | |||||||
|         with self.app.test_request_context(): |         with self.app.test_request_context(): | ||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             query, res = query_res[0] |             query, res = query_res[0] | ||||||
|             answer = Answer(results=[], answer=res) |             answer = Answer(answer=res) | ||||||
| 
 | 
 | ||||||
|             search = do_post_search(query, self.storage, pageno=1) |             search = do_post_search(query, self.storage, pageno=1) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
|  | |||||||
| @ -39,7 +39,7 @@ class PluginIPSelfInfo(SearxTestCase): | |||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             sxng_request.remote_addr = "127.0.0.1" |             sxng_request.remote_addr = "127.0.0.1" | ||||||
|             sxng_request.headers = {"X-Forwarded-For": "1.2.3.4, 127.0.0.1", "X-Real-IP": "127.0.0.1"}  # type: ignore |             sxng_request.headers = {"X-Forwarded-For": "1.2.3.4, 127.0.0.1", "X-Real-IP": "127.0.0.1"}  # type: ignore | ||||||
|             answer = Answer(results=[], answer=gettext("Your IP is: ") + "127.0.0.1") |             answer = Answer(answer=gettext("Your IP is: ") + "127.0.0.1") | ||||||
| 
 | 
 | ||||||
|             search = do_post_search("ip", self.storage, pageno=1) |             search = do_post_search("ip", self.storage, pageno=1) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
| @ -60,7 +60,7 @@ class PluginIPSelfInfo(SearxTestCase): | |||||||
|         with self.app.test_request_context(): |         with self.app.test_request_context(): | ||||||
|             sxng_request.preferences = self.pref |             sxng_request.preferences = self.pref | ||||||
|             sxng_request.user_agent = "Dummy agent"  # type: ignore |             sxng_request.user_agent = "Dummy agent"  # type: ignore | ||||||
|             answer = Answer(results=[], answer=gettext("Your user-agent is: ") + "Dummy agent") |             answer = Answer(answer=gettext("Your user-agent is: ") + "Dummy agent") | ||||||
| 
 | 
 | ||||||
|             search = do_post_search(query, self.storage, pageno=1) |             search = do_post_search(query, self.storage, pageno=1) | ||||||
|             self.assertIn(answer, search.result_container.answers) |             self.assertIn(answer, search.result_container.answers) | ||||||
|  | |||||||
| @ -101,6 +101,6 @@ class PluginStorage(SearxTestCase): | |||||||
|             ret = self.storage.on_result( |             ret = self.storage.on_result( | ||||||
|                 sxng_request, |                 sxng_request, | ||||||
|                 get_search_mock("lorem ipsum", user_plugins=["plg001", "plg002"]), |                 get_search_mock("lorem ipsum", user_plugins=["plg001", "plg002"]), | ||||||
|                 Result(results=[]), |                 Result(), | ||||||
|             ) |             ) | ||||||
|             self.assertFalse(ret) |             self.assertFalse(ret) | ||||||
|  | |||||||
		Loading…
	
		Reference in New Issue
	
	Block a user