| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2021-04-26 20:18:20 +02:00
										 |  |  | # lint: pylint | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | """This is the implementation of the Google WEB engine.  Some of this
 | 
					
						
							|  |  |  | implementations (manly the :py:obj:`get_google_info`) are shared by other | 
					
						
							|  |  |  | engines: | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-21 18:15:40 +02:00
										 |  |  | - :ref:`google images engine` | 
					
						
							|  |  |  | - :ref:`google news engine` | 
					
						
							|  |  |  | - :ref:`google videos engine` | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | - :ref:`google scholar engine` | 
					
						
							|  |  |  | - :ref:`google autocomplete` | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | from typing import TYPE_CHECKING | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import re | 
					
						
							| 
									
										
										
										
											2021-03-18 19:59:01 +01:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | from lxml import html | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | import babel | 
					
						
							|  |  |  | import babel.core | 
					
						
							|  |  |  | import babel.languages | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from searx.utils import extract_text, eval_xpath, eval_xpath_list, eval_xpath_getindex | 
					
						
							| 
									
										
										
										
											2023-09-15 09:53:03 +02:00
										 |  |  | from searx.locales import language_tag, region_tag, get_official_locales | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  | from searx.network import get  # see https://github.com/searxng/searxng/issues/762 | 
					
						
							| 
									
										
										
										
											2020-11-26 17:22:54 +01:00
										 |  |  | from searx.exceptions import SearxEngineCaptchaException | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | from searx.enginelib.traits import EngineTraits | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | if TYPE_CHECKING: | 
					
						
							|  |  |  |     import logging | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     logger: logging.Logger | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | traits: EngineTraits | 
					
						
							| 
									
										
										
										
											2020-11-26 17:22:54 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # about | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://www.google.com', | 
					
						
							|  |  |  |     "wikidata_id": 'Q9366', | 
					
						
							|  |  |  |     "official_api_documentation": 'https://developers.google.com/custom-search/', | 
					
						
							|  |  |  |     "use_official_api": False, | 
					
						
							|  |  |  |     "require_api_key": False, | 
					
						
							|  |  |  |     "results": 'HTML', | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 15:10:05 +02:00
										 |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2021-12-22 16:58:52 +01:00
										 |  |  | categories = ['general', 'web'] | 
					
						
							| 
									
										
										
										
											2014-09-01 15:10:05 +02:00
										 |  |  | paging = True | 
					
						
							| 
									
										
										
										
											2016-07-18 16:09:18 +02:00
										 |  |  | time_range_support = True | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | safesearch = True | 
					
						
							| 
									
										
										
										
											2014-01-29 19:28:38 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  | time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'} | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | # Filter results. 0: None, 1: Moderate, 2: Strict | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  | filter_mapping = {0: 'off', 1: 'medium', 2: 'high'} | 
					
						
							| 
									
										
										
										
											2015-06-05 11:23:24 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  | # specific xpath variables | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | # ------------------------ | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  | results_xpath = './/div[contains(@jscontroller, "SC7lYd")]' | 
					
						
							| 
									
										
										
										
											2022-11-10 18:57:27 +01:00
										 |  |  | title_xpath = './/a/h3[1]' | 
					
						
							|  |  |  | href_xpath = './/a[h3]/@href' | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  | content_xpath = './/div[@data-sncf]' | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | # Suggestions are links placed in a *card-section*, we extract only the text | 
					
						
							|  |  |  | # from the links not the links itself. | 
					
						
							| 
									
										
										
										
											2021-11-25 19:38:14 +01:00
										 |  |  | suggestion_xpath = '//div[contains(@class, "EIaa9b")]//a' | 
					
						
							| 
									
										
										
										
											2020-07-08 00:46:03 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | # UI_ASYNC = 'use_ac:true,_fmt:html' # returns a HTTP 500 when user search for | 
					
						
							|  |  |  | #                                    # celebrities like '!google natasha allegri' | 
					
						
							|  |  |  | #                                    # or '!google chris evans' | 
					
						
							|  |  |  | UI_ASYNC = 'use_ac:true,_fmt:prog' | 
					
						
							|  |  |  | """Format of the response from UI's async request.""" | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | def get_google_info(params, eng_traits): | 
					
						
							|  |  |  |     """Composing various (language) properties for the google engines (:ref:`google
 | 
					
						
							|  |  |  |     API`). | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-21 18:15:40 +02:00
										 |  |  |     This function is called by the various google engines (:ref:`google web | 
					
						
							|  |  |  |     engine`, :ref:`google images engine`, :ref:`google news engine` and | 
					
						
							|  |  |  |     :ref:`google videos engine`). | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     :param dict param: Request parameters of the engine.  At least | 
					
						
							|  |  |  |         a ``searxng_locale`` key should be in the dictionary. | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     :param eng_traits: Engine's traits fetched from google preferences | 
					
						
							|  |  |  |         (:py:obj:`searx.enginelib.traits.EngineTraits`) | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     :rtype: dict | 
					
						
							|  |  |  |     :returns: | 
					
						
							|  |  |  |         Py-Dictionary with the key/value pairs: | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         language: | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             The language code that is used by google (e.g. ``lang_en`` or | 
					
						
							|  |  |  |             ``lang_zh-TW``) | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         country: | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             The country code that is used by google (e.g. ``US`` or ``TW``) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         locale: | 
					
						
							|  |  |  |             A instance of :py:obj:`babel.core.Locale` build from the | 
					
						
							|  |  |  |             ``searxng_locale`` value. | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         subdomain: | 
					
						
							|  |  |  |             Google subdomain :py:obj:`google_domains` that fits to the country | 
					
						
							|  |  |  |             code. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         params: | 
					
						
							|  |  |  |             Py-Dictionary with additional request arguments (can be passed to | 
					
						
							|  |  |  |             :py:func:`urllib.parse.urlencode`). | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             - ``hl`` parameter: specifies the interface language of user interface. | 
					
						
							|  |  |  |             - ``lr`` parameter: restricts search results to documents written in | 
					
						
							|  |  |  |               a particular language. | 
					
						
							|  |  |  |             - ``cr`` parameter: restricts search results to documents | 
					
						
							|  |  |  |               originating in a particular country. | 
					
						
							|  |  |  |             - ``ie`` parameter: sets the character encoding scheme that should | 
					
						
							|  |  |  |               be used to interpret the query string ('utf8'). | 
					
						
							|  |  |  |             - ``oe`` parameter: sets the character encoding scheme that should | 
					
						
							|  |  |  |               be used to decode the XML result ('utf8'). | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  |         headers: | 
					
						
							|  |  |  |             Py-Dictionary with additional HTTP headers (can be passed to | 
					
						
							|  |  |  |             request's headers) | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |             - ``Accept: '*/*`` | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  |     """
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  |     ret_val = { | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         'language': None, | 
					
						
							|  |  |  |         'country': None, | 
					
						
							|  |  |  |         'subdomain': None, | 
					
						
							|  |  |  |         'params': {}, | 
					
						
							|  |  |  |         'headers': {}, | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |         'cookies': {}, | 
					
						
							|  |  |  |         'locale': None, | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     sxng_locale = params.get('searxng_locale', 'all') | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         locale = babel.Locale.parse(sxng_locale, sep='-') | 
					
						
							|  |  |  |     except babel.core.UnknownLocaleError: | 
					
						
							|  |  |  |         locale = None | 
					
						
							| 
									
										
										
										
											2021-01-26 11:49:27 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     eng_lang = eng_traits.get_language(sxng_locale, 'lang_en') | 
					
						
							|  |  |  |     lang_code = eng_lang.split('_')[-1]  # lang_zh-TW --> zh-TW / lang_en --> en | 
					
						
							|  |  |  |     country = eng_traits.get_region(sxng_locale, eng_traits.all_locale) | 
					
						
							| 
									
										
										
										
											2018-03-01 05:30:48 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # Test zh_hans & zh_hant --> in the topmost links in the result list of list | 
					
						
							|  |  |  |     # TW and HK you should a find wiktionary.org zh_hant link.  In the result | 
					
						
							|  |  |  |     # list of zh-CN should not be no hant link instead you should find | 
					
						
							|  |  |  |     # zh.m.wikipedia.org/zh somewhere in the top. | 
					
						
							| 
									
										
										
										
											2015-05-30 17:41:40 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # '!go æ—¥ :zh-TW' --> https://zh.m.wiktionary.org/zh-hant/%E6%97%A5 | 
					
						
							|  |  |  |     # '!go æ—¥ :zh-CN' --> https://zh.m.wikipedia.org/zh/%E6%97%A5 | 
					
						
							| 
									
										
										
										
											2021-01-26 11:49:27 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     ret_val['language'] = eng_lang | 
					
						
							|  |  |  |     ret_val['country'] = country | 
					
						
							|  |  |  |     ret_val['locale'] = locale | 
					
						
							|  |  |  |     ret_val['subdomain'] = eng_traits.custom['supported_domains'].get(country.upper(), 'www.google.com') | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # hl parameter: | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     #   The hl parameter specifies the interface language (host language) of | 
					
						
							|  |  |  |     #   your user interface. To improve the performance and the quality of your | 
					
						
							|  |  |  |     #   search results, you are strongly encouraged to set this parameter | 
					
						
							|  |  |  |     #   explicitly. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#hlsp | 
					
						
							|  |  |  |     # The Interface Language: | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  |     #   https://developers.google.com/custom-search/docs/xml_results_appendices#interfaceLanguages | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-26 13:33:12 +02:00
										 |  |  |     # https://github.com/searxng/searxng/issues/2515#issuecomment-1607150817 | 
					
						
							|  |  |  |     ret_val['params']['hl'] = f'{lang_code}-{country}' | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # lr parameter: | 
					
						
							|  |  |  |     #   The lr (language restrict) parameter restricts search results to | 
					
						
							|  |  |  |     #   documents written in a particular language. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#lrsp | 
					
						
							|  |  |  |     #   Language Collection Values: | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results_appendices#languageCollections | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # | 
					
						
							|  |  |  |     # To select 'all' languages an empty 'lr' value is used. | 
					
						
							|  |  |  |     # | 
					
						
							| 
									
										
										
										
											2023-09-15 09:53:03 +02:00
										 |  |  |     # Different to other google services, Google Scholar supports to select more | 
					
						
							|  |  |  |     # than one language. The languages are separated by a pipe '|' (logical OR). | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # By example: &lr=lang_zh-TW%7Clang_de selects articles written in | 
					
						
							|  |  |  |     # traditional chinese OR german language. | 
					
						
							| 
									
										
										
										
											2021-06-06 08:18:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     ret_val['params']['lr'] = eng_lang | 
					
						
							|  |  |  |     if sxng_locale == 'all': | 
					
						
							|  |  |  |         ret_val['params']['lr'] = '' | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # cr parameter: | 
					
						
							|  |  |  |     #   The cr parameter restricts search results to documents originating in a | 
					
						
							|  |  |  |     #   particular country. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#crsp | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-08-23 18:30:21 +02:00
										 |  |  |     # specify a region (country) only if a region is given in the selected | 
					
						
							|  |  |  |     # locale --> https://github.com/searxng/searxng/issues/2672 | 
					
						
							|  |  |  |     ret_val['params']['cr'] = '' | 
					
						
							|  |  |  |     if len(sxng_locale.split('-')) > 1: | 
					
						
							|  |  |  |         ret_val['params']['cr'] = 'country' + country | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-09-15 09:53:03 +02:00
										 |  |  |     # gl parameter: (mandatory by Google News) | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     #   The gl parameter value is a two-letter country code. For WebSearch | 
					
						
							|  |  |  |     #   results, the gl parameter boosts search results whose country of origin | 
					
						
							|  |  |  |     #   matches the parameter value. See the Country Codes section for a list of | 
					
						
							|  |  |  |     #   valid values. | 
					
						
							|  |  |  |     #   Specifying a gl parameter value in WebSearch requests should improve the | 
					
						
							|  |  |  |     #   relevance of results. This is particularly true for international | 
					
						
							|  |  |  |     #   customers and, even more specifically, for customers in English-speaking | 
					
						
							|  |  |  |     #   countries other than the United States. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#glsp | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-26 13:33:12 +02:00
										 |  |  |     # https://github.com/searxng/searxng/issues/2515#issuecomment-1606294635 | 
					
						
							|  |  |  |     # ret_val['params']['gl'] = country | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # ie parameter: | 
					
						
							|  |  |  |     #   The ie parameter sets the character encoding scheme that should be used | 
					
						
							|  |  |  |     #   to interpret the query string. The default ie value is latin1. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#iesp | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     ret_val['params']['ie'] = 'utf8' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # oe parameter: | 
					
						
							|  |  |  |     #   The oe parameter sets the character encoding scheme that should be used | 
					
						
							|  |  |  |     #   to decode the XML result. The default oe value is latin1. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#oesp | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     ret_val['params']['oe'] = 'utf8' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # num parameter: | 
					
						
							|  |  |  |     #   The num parameter identifies the number of search results to return. | 
					
						
							|  |  |  |     #   The default num value is 10, and the maximum value is 20. If you request | 
					
						
							|  |  |  |     #   more than 20 results, only 20 results will be returned. | 
					
						
							|  |  |  |     #   https://developers.google.com/custom-search/docs/xml_results#numsp | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # HINT: seems to have no effect (tested in google WEB & Images) | 
					
						
							|  |  |  |     # ret_val['params']['num'] = 20 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # HTTP headers | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     ret_val['headers']['Accept'] = '*/*' | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # Cookies | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # - https://github.com/searxng/searxng/pull/1679#issuecomment-1235432746 | 
					
						
							|  |  |  |     # - https://github.com/searxng/searxng/issues/1555 | 
					
						
							|  |  |  |     ret_val['cookies']['CONSENT'] = "YES+" | 
					
						
							| 
									
										
										
										
											2021-06-11 16:06:36 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-26 11:49:27 +01:00
										 |  |  |     return ret_val | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-22 18:49:45 +01:00
										 |  |  | def detect_google_sorry(resp): | 
					
						
							| 
									
										
										
										
											2021-03-18 19:59:01 +01:00
										 |  |  |     if resp.url.host == 'sorry.google.com' or resp.url.path.startswith('/sorry'): | 
					
						
							| 
									
										
										
										
											2021-01-22 18:49:45 +01:00
										 |  |  |         raise SearxEngineCaptchaException() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-08 00:46:03 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | def request(query, params): | 
					
						
							|  |  |  |     """Google search request""" | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # pylint: disable=line-too-long | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |     offset = (params['pageno'] - 1) * 10 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     google_info = get_google_info(params, traits) | 
					
						
							| 
									
										
										
										
											2021-06-21 12:18:28 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-26 11:49:27 +01:00
										 |  |  |     # https://www.google.de/search?q=corona&hl=de&lr=lang_de&start=0&tbs=qdr%3Ad&safe=medium | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     query_url = ( | 
					
						
							|  |  |  |         'https://' | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |         + google_info['subdomain'] | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         + '/search' | 
					
						
							|  |  |  |         + "?" | 
					
						
							|  |  |  |         + urlencode( | 
					
						
							|  |  |  |             { | 
					
						
							|  |  |  |                 'q': query, | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |                 **google_info['params'], | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |                 'filter': '0', | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |                 'start': offset, | 
					
						
							|  |  |  |                 # 'vet': '12ahUKEwik3ZbIzfn7AhXMX_EDHbUDBh0QxK8CegQIARAC..i', | 
					
						
							|  |  |  |                 # 'ved': '2ahUKEwik3ZbIzfn7AhXMX_EDHbUDBh0Q_skCegQIARAG', | 
					
						
							|  |  |  |                 # 'cs' : 1, | 
					
						
							|  |  |  |                 # 'sa': 'N', | 
					
						
							|  |  |  |                 # 'yv': 3, | 
					
						
							|  |  |  |                 # 'prmd': 'vin', | 
					
						
							|  |  |  |                 # 'ei': 'GASaY6TxOcy_xc8PtYeY6AE', | 
					
						
							|  |  |  |                 # 'sa': 'N', | 
					
						
							|  |  |  |                 # 'sstk': 'AcOHfVkD7sWCSAheZi-0tx_09XDO55gTWY0JNq3_V26cNN-c8lfD45aZYPI8s_Bqp8s57AHz5pxchDtAGCA_cikAWSjy9kw3kgg' | 
					
						
							|  |  |  |                 # formally known as use_mobile_ui | 
					
						
							|  |  |  |                 'asearch': 'arc', | 
					
						
							|  |  |  |                 'async': UI_ASYNC, | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             } | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2014-09-01 15:10:05 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |     if params['time_range'] in time_range_dict: | 
					
						
							|  |  |  |         query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]}) | 
					
						
							|  |  |  |     if params['safesearch']: | 
					
						
							|  |  |  |         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]}) | 
					
						
							| 
									
										
										
										
											2021-01-26 11:49:27 +01:00
										 |  |  |     params['url'] = query_url | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     params['cookies'] = google_info['cookies'] | 
					
						
							|  |  |  |     params['headers'].update(google_info['headers']) | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |     return params | 
					
						
							| 
									
										
										
										
											2014-01-29 19:28:38 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-08 00:46:03 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  | # =26;[3,"dimg_ZNMiZPCqE4apxc8P3a2tuAQ_137"]a87;data:image/jpeg;base64,/9j/4AAQSkZJRgABA | 
					
						
							|  |  |  | # ...6T+9Nl4cnD+gr9OK8I56/tX3l86nWYw//2Q==26; | 
					
						
							|  |  |  | RE_DATA_IMAGE = re.compile(r'"(dimg_[^"]*)"[^;]*;(data:image[^;]*;[^;]*);') | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def _parse_data_images(dom): | 
					
						
							|  |  |  |     data_image_map = {} | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  |     for img_id, data_image in RE_DATA_IMAGE.findall(dom.text_content()): | 
					
						
							|  |  |  |         end_pos = data_image.rfind('=') | 
					
						
							|  |  |  |         if end_pos > 0: | 
					
						
							|  |  |  |             data_image = data_image[: end_pos + 1] | 
					
						
							|  |  |  |         data_image_map[img_id] = data_image | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     logger.debug('data:image objects --> %s', list(data_image_map.keys())) | 
					
						
							|  |  |  |     return data_image_map | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-01-29 19:28:38 +01:00
										 |  |  | def response(resp): | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |     """Get response from google's search request""" | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # pylint: disable=too-many-branches, too-many-statements | 
					
						
							| 
									
										
										
										
											2021-01-22 18:49:45 +01:00
										 |  |  |     detect_google_sorry(resp) | 
					
						
							| 
									
										
										
										
											2017-12-05 20:38:34 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-22 18:49:45 +01:00
										 |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2015-05-30 17:41:40 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # convert the text to dom | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  |     dom = html.fromstring(resp.text) | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  |     data_image_map = _parse_data_images(dom) | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |     # results --> answer | 
					
						
							| 
									
										
										
										
											2021-06-21 16:46:08 +02:00
										 |  |  |     answer_list = eval_xpath(dom, '//div[contains(@class, "LGOjhe")]') | 
					
						
							| 
									
										
										
										
											2023-08-16 08:50:51 +02:00
										 |  |  |     for item in answer_list: | 
					
						
							|  |  |  |         results.append( | 
					
						
							|  |  |  |             { | 
					
						
							|  |  |  |                 'answer': item.xpath("normalize-space()"), | 
					
						
							|  |  |  |                 'url': (eval_xpath(item, '../..//a/@href') + [None])[0], | 
					
						
							|  |  |  |             } | 
					
						
							|  |  |  |         ) | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 15:10:05 +02:00
										 |  |  |     # parse results | 
					
						
							| 
									
										
										
										
											2022-01-18 11:05:45 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     for result in eval_xpath_list(dom, results_xpath):  # pylint: disable=too-many-nested-blocks | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  |         try: | 
					
						
							| 
									
										
										
										
											2020-11-26 17:22:54 +01:00
										 |  |  |             title_tag = eval_xpath_getindex(result, title_xpath, 0, default=None) | 
					
						
							|  |  |  |             if title_tag is None: | 
					
						
							| 
									
										
										
										
											2020-10-01 09:44:29 +02:00
										 |  |  |                 # this not one of the common google results *section* | 
					
						
							| 
									
										
										
										
											2022-09-27 17:01:00 +02:00
										 |  |  |                 logger.debug('ignoring item from the result_xpath list: missing title') | 
					
						
							| 
									
										
										
										
											2020-10-01 09:44:29 +02:00
										 |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2020-11-26 17:22:54 +01:00
										 |  |  |             title = extract_text(title_tag) | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-26 12:51:54 +01:00
										 |  |  |             url = eval_xpath_getindex(result, href_xpath, 0, None) | 
					
						
							|  |  |  |             if url is None: | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  |                 logger.debug('ignoring item from the result_xpath list: missing url of title "%s"', title) | 
					
						
							| 
									
										
										
										
											2021-01-26 12:51:54 +01:00
										 |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  |             content_nodes = eval_xpath(result, content_xpath) | 
					
						
							|  |  |  |             content = extract_text(content_nodes) | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |             if not content: | 
					
						
							| 
									
										
										
										
											2022-09-27 17:01:00 +02:00
										 |  |  |                 logger.debug('ignoring item from the result_xpath list: missing content of title "%s"', title) | 
					
						
							| 
									
										
										
										
											2022-01-18 13:23:35 +01:00
										 |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2023-03-28 14:39:16 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |             img_src = content_nodes[0].xpath('.//img/@src') | 
					
						
							|  |  |  |             if img_src: | 
					
						
							|  |  |  |                 img_src = img_src[0] | 
					
						
							|  |  |  |                 if img_src.startswith('data:image'): | 
					
						
							|  |  |  |                     img_id = content_nodes[0].xpath('.//img/@id') | 
					
						
							|  |  |  |                     if img_id: | 
					
						
							|  |  |  |                         img_src = data_image_map.get(img_id[0]) | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 img_src = None | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             results.append({'url': url, 'title': title, 'content': content, 'img_src': img_src}) | 
					
						
							| 
									
										
										
										
											2022-01-18 13:23:35 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-07 21:50:59 +02:00
										 |  |  |         except Exception as e:  # pylint: disable=broad-except | 
					
						
							|  |  |  |             logger.error(e, exc_info=True) | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # parse suggestion | 
					
						
							| 
									
										
										
										
											2020-11-26 17:22:54 +01:00
										 |  |  |     for suggestion in eval_xpath_list(dom, suggestion_xpath): | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  |         # append suggestion | 
					
						
							| 
									
										
										
										
											2016-12-09 11:44:24 +01:00
										 |  |  |         results.append({'suggestion': extract_text(suggestion)}) | 
					
						
							| 
									
										
										
										
											2014-09-01 15:10:05 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # return results | 
					
						
							| 
									
										
										
										
											2014-01-29 19:28:38 +01:00
										 |  |  |     return results | 
					
						
							| 
									
										
										
										
											2014-09-14 14:40:55 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-08 00:46:03 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | # get supported languages from their site | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | skip_countries = [ | 
					
						
							|  |  |  |     # official language of google-country not in google-languages | 
					
						
							|  |  |  |     'AL',  # Albanien (sq) | 
					
						
							|  |  |  |     'AZ',  # Aserbaidschan  (az) | 
					
						
							|  |  |  |     'BD',  # Bangladesch (bn) | 
					
						
							|  |  |  |     'BN',  # Brunei Darussalam (ms) | 
					
						
							|  |  |  |     'BT',  # Bhutan (dz) | 
					
						
							|  |  |  |     'ET',  # Äthiopien (am) | 
					
						
							|  |  |  |     'GE',  # Georgien (ka, os) | 
					
						
							|  |  |  |     'GL',  # Grönland (kl) | 
					
						
							|  |  |  |     'KH',  # Kambodscha (km) | 
					
						
							|  |  |  |     'LA',  # Laos (lo) | 
					
						
							|  |  |  |     'LK',  # Sri Lanka (si, ta) | 
					
						
							|  |  |  |     'ME',  # Montenegro (sr) | 
					
						
							|  |  |  |     'MK',  # Nordmazedonien (mk, sq) | 
					
						
							|  |  |  |     'MM',  # Myanmar (my) | 
					
						
							|  |  |  |     'MN',  # Mongolei (mn) | 
					
						
							|  |  |  |     'MV',  # Malediven (dv) // dv_MV is unknown by babel | 
					
						
							|  |  |  |     'MY',  # Malaysia (ms) | 
					
						
							|  |  |  |     'NP',  # Nepal (ne) | 
					
						
							|  |  |  |     'TJ',  # Tadschikistan (tg) | 
					
						
							|  |  |  |     'TM',  # Turkmenistan (tk) | 
					
						
							|  |  |  |     'UZ',  # Usbekistan (uz) | 
					
						
							|  |  |  | ] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  | def fetch_traits(engine_traits: EngineTraits, add_domains: bool = True): | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  |     """Fetch languages from Google.""" | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # pylint: disable=import-outside-toplevel, too-many-branches | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     engine_traits.custom['supported_domains'] = {} | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |     resp = get('https://www.google.com/preferences') | 
					
						
							|  |  |  |     if not resp.ok:  # type: ignore | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |         raise RuntimeError("Response from Google's preferences is not OK.") | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |     dom = html.fromstring(resp.text)  # type: ignore | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # supported language codes | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     lang_map = {'no': 'nb'} | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  |     for x in eval_xpath_list(dom, '//*[@id="langSec"]//input[@name="lr"]'): | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         eng_lang = x.get("value").split('_')[-1] | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-') | 
					
						
							|  |  |  |         except babel.UnknownLocaleError: | 
					
						
							|  |  |  |             print("ERROR: %s -> %s is unknown by babel" % (x.get("data-name"), eng_lang)) | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  |         sxng_lang = language_tag(locale) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         conflict = engine_traits.languages.get(sxng_lang) | 
					
						
							|  |  |  |         if conflict: | 
					
						
							|  |  |  |             if conflict != eng_lang: | 
					
						
							|  |  |  |                 print("CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang)) | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  |         engine_traits.languages[sxng_lang] = 'lang_' + eng_lang | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # alias languages | 
					
						
							|  |  |  |     engine_traits.languages['zh'] = 'lang_zh-CN' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |     # supported region codes | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  |     for x in eval_xpath_list(dom, '//*[@name="region"]/..//input[@name="region"]'): | 
					
						
							|  |  |  |         eng_country = x.get("value") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if eng_country in skip_countries: | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  |         if eng_country == 'ZZ': | 
					
						
							|  |  |  |             engine_traits.all_locale = 'ZZ' | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-09-15 09:53:03 +02:00
										 |  |  |         sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=True) | 
					
						
							| 
									
										
										
										
											2022-10-08 11:32:08 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         if not sxng_locales: | 
					
						
							|  |  |  |             print("ERROR: can't map from google country %s (%s) to a babel region." % (x.get('data-name'), eng_country)) | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         for sxng_locale in sxng_locales: | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             engine_traits.regions[region_tag(sxng_locale)] = eng_country | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # alias regions | 
					
						
							|  |  |  |     engine_traits.regions['zh-CN'] = 'HK' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # supported domains | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if add_domains: | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |         resp = get('https://www.google.com/supported_domains') | 
					
						
							|  |  |  |         if not resp.ok:  # type: ignore | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             raise RuntimeError("Response from https://www.google.com/supported_domains is not OK.") | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |         for domain in resp.text.split():  # type: ignore | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             domain = domain.strip() | 
					
						
							|  |  |  |             if not domain or domain in [ | 
					
						
							|  |  |  |                 '.google.com', | 
					
						
							|  |  |  |             ]: | 
					
						
							|  |  |  |                 continue | 
					
						
							|  |  |  |             region = domain.split('.')[-1].upper() | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |             engine_traits.custom['supported_domains'][region] = 'www' + domain  # type: ignore | 
					
						
							| 
									
										
										
										
											2022-12-04 22:57:22 +01:00
										 |  |  |             if region == 'HK': | 
					
						
							|  |  |  |                 # There is no google.cn, we use .com.hk for zh-CN | 
					
						
							| 
									
										
										
										
											2023-06-25 12:37:31 +02:00
										 |  |  |                 engine_traits.custom['supported_domains']['CN'] = 'www' + domain  # type: ignore |