[enh] google engine: supports "default language"
Same behaviour behaviour than Whoogle [1].  Only the google engine with the
"Default language" choice "(all)"" is changed by this patch.
When searching for a locate place, the result are in the expect language,
without missing results [2]:
  > When a language is not specified, the language interpretation is left up to
  > Google to decide how the search results should be delivered.
The query parameters are copied from Whoogle.  With the ``all`` language:
- add parameter ``source=lnt``
- don't use parameter ``lr``
- don't add a ``Accept-Language`` HTTP header.
The new signature of function ``get_lang_info()`` is:
    lang_info = get_lang_info(params, lang_list, custom_aliases, supported_any_language)
Argument ``supported_any_language`` is True for google.py and False for the other
google engines.  With this patch the function now returns:
- query parameters: ``lang_info['params']``
- HTTP headers: ``lang_info['headers']``
- and as before this patch:
  - ``lang_info['subdomain']``
  - ``lang_info['country']``
  - ``lang_info['language']``
[1] https://github.com/benbusby/whoogle-search
[2] https://github.com/benbusby/whoogle-search/releases/tag/v0.5.4
			
			
This commit is contained in:
		
							parent
							
								
									73dbee45a6
								
							
						
					
					
						commit
						1c67b6aece
					
				| @ -133,11 +133,12 @@ suggestion_xpath = '//div[contains(@class, "card-section")]//a' | ||||
| spelling_suggestion_xpath = '//div[@class="med"]/p/a' | ||||
| 
 | ||||
| 
 | ||||
| def get_lang_info(params, lang_list, custom_aliases): | ||||
| def get_lang_info(params, lang_list, custom_aliases, supported_any_language): | ||||
|     ret_val = {} | ||||
| 
 | ||||
|     _lang = params['language'] | ||||
|     if _lang.lower() == 'all': | ||||
|     _any_language = _lang.lower() == 'all' | ||||
|     if _any_language: | ||||
|         _lang = 'en-US' | ||||
| 
 | ||||
|     language = match_language(_lang, lang_list, custom_aliases) | ||||
| @ -159,31 +160,36 @@ def get_lang_info(params, lang_list, custom_aliases): | ||||
|     # the combination (en-US, en-EN, de-DE, de-AU, fr-FR, fr-FR) | ||||
|     lang_country = '%s-%s' % (language, country) | ||||
| 
 | ||||
|     # Accept-Language: fr-CH, fr;q=0.8, en;q=0.6, *;q=0.5 | ||||
|     ret_val['Accept-Language'] = ','.join([ | ||||
|         lang_country, | ||||
|         language + ';q=0.8,', | ||||
|         'en;q=0.6', | ||||
|         '*;q=0.5', | ||||
|     ]) | ||||
| 
 | ||||
|     # subdomain | ||||
|     ret_val['subdomain']  = 'www.' + google_domains.get(country.upper(), 'google.com') | ||||
| 
 | ||||
|     ret_val['params'] = {} | ||||
|     ret_val['headers'] = {} | ||||
| 
 | ||||
|     if _any_language and supported_any_language: | ||||
|         # based on whoogle | ||||
|         ret_val['params']['source'] = 'lnt' | ||||
|     else: | ||||
|         # Accept-Language: fr-CH, fr;q=0.8, en;q=0.6, *;q=0.5 | ||||
|         ret_val['headers']['Accept-Language'] = ','.join([ | ||||
|             lang_country, | ||||
|             language + ';q=0.8,', | ||||
|             'en;q=0.6', | ||||
|             '*;q=0.5', | ||||
|         ]) | ||||
| 
 | ||||
|         # lr parameter: | ||||
|         #   https://developers.google.com/custom-search/docs/xml_results#lrsp | ||||
|         # Language Collection Values: | ||||
|         #   https://developers.google.com/custom-search/docs/xml_results_appendices#languageCollections | ||||
|         ret_val['params']['lr'] = "lang_" + lang_list.get(lang_country, language) | ||||
| 
 | ||||
|     ret_val['params']['hl'] = lang_list.get(lang_country, language) | ||||
| 
 | ||||
|     # hl parameter: | ||||
|     #   https://developers.google.com/custom-search/docs/xml_results#hlsp The | ||||
|     # Interface Language: | ||||
|     #   https://developers.google.com/custom-search/docs/xml_results_appendices#interfaceLanguages | ||||
| 
 | ||||
|     ret_val['hl'] = lang_list.get(lang_country, language) | ||||
| 
 | ||||
|     # lr parameter: | ||||
|     #   https://developers.google.com/custom-search/docs/xml_results#lrsp | ||||
|     # Language Collection Values: | ||||
|     #   https://developers.google.com/custom-search/docs/xml_results_appendices#languageCollections | ||||
| 
 | ||||
|     ret_val['lr'] = "lang_" + lang_list.get(lang_country, language) | ||||
| 
 | ||||
|     return ret_val | ||||
| 
 | ||||
| def detect_google_sorry(resp): | ||||
| @ -198,14 +204,13 @@ def request(query, params): | ||||
| 
 | ||||
|     lang_info = get_lang_info( | ||||
|         # pylint: disable=undefined-variable | ||||
|         params, supported_languages, language_aliases | ||||
|         params, supported_languages, language_aliases, True | ||||
|     ) | ||||
| 
 | ||||
|     # https://www.google.de/search?q=corona&hl=de&lr=lang_de&start=0&tbs=qdr%3Ad&safe=medium | ||||
|     query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ | ||||
|         'q': query, | ||||
|         'hl': lang_info['hl'], | ||||
|         'lr': lang_info['lr'], | ||||
|         **lang_info['params'], | ||||
|         'ie': "utf8", | ||||
|         'oe': "utf8", | ||||
|         'start': offset, | ||||
| @ -219,8 +224,8 @@ def request(query, params): | ||||
|     logger.debug("query_url --> %s", query_url) | ||||
|     params['url'] = query_url | ||||
| 
 | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) | ||||
|     params['headers']['Accept-Language'] = lang_info['Accept-Language'] | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language')) | ||||
|     params['headers'].update(lang_info['headers']) | ||||
|     params['headers']['Accept'] = ( | ||||
|         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' | ||||
|     ) | ||||
|  | ||||
| @ -101,14 +101,13 @@ def request(query, params): | ||||
| 
 | ||||
|     lang_info = get_lang_info( | ||||
|         # pylint: disable=undefined-variable | ||||
|         params, supported_languages, language_aliases | ||||
|         params, supported_languages, language_aliases, False | ||||
|     ) | ||||
| 
 | ||||
|     query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ | ||||
|         'q': query, | ||||
|         'tbm': "isch", | ||||
|         'hl': lang_info['hl'], | ||||
|         'lr': lang_info['lr'], | ||||
|         **lang_info['params'], | ||||
|         'ie': "utf8", | ||||
|         'oe': "utf8", | ||||
|         'num': 30, | ||||
| @ -122,8 +121,8 @@ def request(query, params): | ||||
|     logger.debug("query_url --> %s", query_url) | ||||
|     params['url'] = query_url | ||||
| 
 | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) | ||||
|     params['headers']['Accept-Language'] = lang_info['Accept-Language'] | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language')) | ||||
|     params['headers'].update(lang_info['headers']) | ||||
|     params['headers']['Accept'] = ( | ||||
|         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' | ||||
|     ) | ||||
|  | ||||
| @ -83,7 +83,7 @@ def request(query, params): | ||||
| 
 | ||||
|     lang_info = get_lang_info( | ||||
|         # pylint: disable=undefined-variable | ||||
|         params, supported_languages, language_aliases | ||||
|         params, supported_languages, language_aliases, False | ||||
|     ) | ||||
| 
 | ||||
|     # google news has only one domain | ||||
| @ -102,8 +102,7 @@ def request(query, params): | ||||
| 
 | ||||
|     query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ | ||||
|         'q': query, | ||||
|         'hl': lang_info['hl'], | ||||
|         'lr': lang_info['lr'], | ||||
|         **lang_info['params'], | ||||
|         'ie': "utf8", | ||||
|         'oe': "utf8", | ||||
|         'gl': lang_info['country'], | ||||
| @ -112,8 +111,8 @@ def request(query, params): | ||||
|     logger.debug("query_url --> %s", query_url) | ||||
|     params['url'] = query_url | ||||
| 
 | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) | ||||
|     params['headers']['Accept-Language'] = lang_info['Accept-Language'] | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language')) | ||||
|     params['headers'].update(lang_info['headers']) | ||||
|     params['headers']['Accept'] = ( | ||||
|         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' | ||||
|         ) | ||||
|  | ||||
| @ -81,15 +81,14 @@ def request(query, params): | ||||
| 
 | ||||
|         # params, {}, language_aliases | ||||
| 
 | ||||
|         params, supported_languages, language_aliases | ||||
|         params, supported_languages, language_aliases, False | ||||
|     ) | ||||
|     # subdomain is: scholar.google.xy | ||||
|     lang_info['subdomain'] = lang_info['subdomain'].replace("www.", "scholar.") | ||||
| 
 | ||||
|     query_url = 'https://'+ lang_info['subdomain'] + '/scholar' + "?" + urlencode({ | ||||
|         'q':  query, | ||||
|         'hl': lang_info['hl'], | ||||
|         'lr': lang_info['lr'], | ||||
|         **lang_info['params'], | ||||
|         'ie': "utf8", | ||||
|         'oe':  "utf8", | ||||
|         'start' : offset, | ||||
| @ -100,8 +99,8 @@ def request(query, params): | ||||
|     logger.debug("query_url --> %s", query_url) | ||||
|     params['url'] = query_url | ||||
| 
 | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) | ||||
|     params['headers']['Accept-Language'] = lang_info['Accept-Language'] | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language')) | ||||
|     params['headers'].update(lang_info['headers']) | ||||
|     params['headers']['Accept'] = ( | ||||
|         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' | ||||
|     ) | ||||
|  | ||||
| @ -119,14 +119,13 @@ def request(query, params): | ||||
| 
 | ||||
|     lang_info = get_lang_info( | ||||
|         # pylint: disable=undefined-variable | ||||
|         params, supported_languages, language_aliases | ||||
|         params, supported_languages, language_aliases, False | ||||
|     ) | ||||
| 
 | ||||
|     query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ | ||||
|         'q':   query, | ||||
|         'tbm': "vid", | ||||
|         'hl': lang_info['hl'], | ||||
|         'lr': lang_info['lr'], | ||||
|         **lang_info['params'], | ||||
|         'ie': "utf8", | ||||
|         'oe': "utf8", | ||||
|     }) | ||||
| @ -139,8 +138,8 @@ def request(query, params): | ||||
|     logger.debug("query_url --> %s", query_url) | ||||
|     params['url'] = query_url | ||||
| 
 | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) | ||||
|     params['headers']['Accept-Language'] = lang_info['Accept-Language'] | ||||
|     logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language')) | ||||
|     params['headers'].update(lang_info['headers']) | ||||
|     params['headers']['Accept'] = ( | ||||
|         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' | ||||
|         ) | ||||
|  | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user