| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | #!/usr/bin/env python | 
					
						
							| 
									
										
										
										
											2022-01-03 12:58:48 +01:00
										 |  |  | # lint: pylint | 
					
						
							| 
									
										
										
										
											2021-10-03 15:12:09 +02:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-01-03 12:40:06 +01:00
										 |  |  | """Fetch website description from websites and from
 | 
					
						
							|  |  |  | :origin:`searx/engines/wikidata.py` engine. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | Output file: :origin:`searx/data/engine_descriptions.json`. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-01-03 12:58:48 +01:00
										 |  |  | # pylint: disable=invalid-name, global-statement | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | import json | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | from urllib.parse import urlparse | 
					
						
							|  |  |  | from os.path import join | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | from lxml.html import fromstring | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  | from searx.engines import wikidata, set_loggers | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | from searx.utils import extract_text, match_language | 
					
						
							| 
									
										
										
										
											2022-06-29 20:56:16 +02:00
										 |  |  | from searx.locales import LOCALE_NAMES, locales_initialize | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | from searx import searx_dir | 
					
						
							| 
									
										
										
										
											2022-12-11 16:45:47 +01:00
										 |  |  | from searx.utils import gen_useragent, detect_language | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | import searx.search | 
					
						
							| 
									
										
										
											
												[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
   * can contains network definition
   * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
     keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
   * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
   * local_addresses can be "192.168.0.1/24" (it supports IPv6)
   * support_ipv4 & support_ipv6: both True by default
     see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
   * either a full network description
   * either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
											
										 
											2021-04-05 10:43:33 +02:00
										 |  |  | import searx.network | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  | set_loggers(wikidata, 'wikidata') | 
					
						
							| 
									
										
										
										
											2022-06-29 20:56:16 +02:00
										 |  |  | locales_initialize() | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | SPARQL_WIKIPEDIA_ARTICLE = """
 | 
					
						
							|  |  |  | SELECT DISTINCT ?item ?name | 
					
						
							|  |  |  | WHERE { | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |   hint:Query hint:optimizer "None". | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |   VALUES ?item { %IDS% } | 
					
						
							|  |  |  |   ?article schema:about ?item ; | 
					
						
							|  |  |  |               schema:inLanguage ?lang ; | 
					
						
							|  |  |  |               schema:name ?name ; | 
					
						
							|  |  |  |               schema:isPartOf [ wikibase:wikiGroup "wikipedia" ] . | 
					
						
							|  |  |  |   FILTER(?lang in (%LANGUAGES_SPARQL%)) . | 
					
						
							|  |  |  |   FILTER (!CONTAINS(?name, ':')) . | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | SPARQL_DESCRIPTION = """
 | 
					
						
							|  |  |  | SELECT DISTINCT ?item ?itemDescription | 
					
						
							|  |  |  | WHERE { | 
					
						
							|  |  |  |   VALUES ?item { %IDS% } | 
					
						
							|  |  |  |   ?item schema:description ?itemDescription . | 
					
						
							|  |  |  |   FILTER (lang(?itemDescription) in (%LANGUAGES_SPARQL%)) | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | ORDER BY ?itemLang | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | NOT_A_DESCRIPTION = [ | 
					
						
							|  |  |  |     'web site', | 
					
						
							|  |  |  |     'site web', | 
					
						
							|  |  |  |     'komputa serĉilo', | 
					
						
							|  |  |  |     'interreta serĉilo', | 
					
						
							|  |  |  |     'bilaketa motor', | 
					
						
							|  |  |  |     'web search engine', | 
					
						
							|  |  |  |     'wikimedia täpsustuslehekülg', | 
					
						
							|  |  |  | ] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | SKIP_ENGINE_SOURCE = [ | 
					
						
							| 
									
										
										
										
											2021-12-27 09:16:03 +01:00
										 |  |  |     # fmt: off | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     ('gitlab', 'wikidata') | 
					
						
							|  |  |  |     # descriptions are about wikipedia disambiguation pages | 
					
						
							| 
									
										
										
										
											2021-12-27 09:16:03 +01:00
										 |  |  |     # fmt: on | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | ] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-08-03 15:13:00 +02:00
										 |  |  | LANGUAGES = LOCALE_NAMES.keys() | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | WIKIPEDIA_LANGUAGES = {'language': 'wikipedia_language'} | 
					
						
							|  |  |  | LANGUAGES_SPARQL = '' | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | IDS = None | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | descriptions = {} | 
					
						
							|  |  |  | wd_to_engine_name = {} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def normalize_description(description): | 
					
						
							|  |  |  |     for c in [chr(c) for c in range(0, 31)]: | 
					
						
							|  |  |  |         description = description.replace(c, ' ') | 
					
						
							|  |  |  |     description = ' '.join(description.strip().split()) | 
					
						
							|  |  |  |     return description | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def update_description(engine_name, lang, description, source, replace=True): | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     if not isinstance(description, str): | 
					
						
							|  |  |  |         return | 
					
						
							|  |  |  |     description = normalize_description(description) | 
					
						
							|  |  |  |     if description.lower() == engine_name.lower(): | 
					
						
							|  |  |  |         return | 
					
						
							|  |  |  |     if description.lower() in NOT_A_DESCRIPTION: | 
					
						
							|  |  |  |         return | 
					
						
							|  |  |  |     if (engine_name, source) in SKIP_ENGINE_SOURCE: | 
					
						
							|  |  |  |         return | 
					
						
							|  |  |  |     if ' ' not in description: | 
					
						
							|  |  |  |         # skip unique word description (like "website") | 
					
						
							|  |  |  |         return | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     if replace or lang not in descriptions[engine_name]: | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         descriptions[engine_name][lang] = [description, source] | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | def get_wikipedia_summary(lang, pageid): | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     params = {'language': lang.replace('_', '-'), 'headers': {}} | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     searx.engines.engines['wikipedia'].request(pageid, params) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     try: | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         response = searx.network.get(params['url'], headers=params['headers'], timeout=10) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         response.raise_for_status() | 
					
						
							|  |  |  |         api_result = json.loads(response.text) | 
					
						
							|  |  |  |         return api_result.get('extract') | 
					
						
							| 
									
										
										
										
											2022-01-03 12:58:48 +01:00
										 |  |  |     except Exception:  # pylint: disable=broad-except | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         return None | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def get_website_description(url, lang1, lang2=None): | 
					
						
							|  |  |  |     headers = { | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         'User-Agent': gen_useragent(), | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', | 
					
						
							|  |  |  |         'DNT': '1', | 
					
						
							|  |  |  |         'Upgrade-Insecure-Requests': '1', | 
					
						
							|  |  |  |         'Sec-GPC': '1', | 
					
						
							|  |  |  |         'Cache-Control': 'max-age=0', | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  |     if lang1 is not None: | 
					
						
							|  |  |  |         lang_list = [lang1] | 
					
						
							|  |  |  |         if lang2 is not None: | 
					
						
							|  |  |  |             lang_list.append(lang2) | 
					
						
							|  |  |  |         headers['Accept-Language'] = f'{",".join(lang_list)};q=0.8' | 
					
						
							|  |  |  |     try: | 
					
						
							| 
									
										
										
											
												[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
   * can contains network definition
   * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
     keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
   * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
   * local_addresses can be "192.168.0.1/24" (it supports IPv6)
   * support_ipv4 & support_ipv6: both True by default
     see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
   * either a full network description
   * either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
											
										 
											2021-04-05 10:43:33 +02:00
										 |  |  |         response = searx.network.get(url, headers=headers, timeout=10) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         response.raise_for_status() | 
					
						
							| 
									
										
										
										
											2022-01-03 12:58:48 +01:00
										 |  |  |     except Exception:  # pylint: disable=broad-except | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         return (None, None) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         html = fromstring(response.text) | 
					
						
							|  |  |  |     except ValueError: | 
					
						
							|  |  |  |         html = fromstring(response.content) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     description = extract_text(html.xpath('/html/head/meta[@name="description"]/@content')) | 
					
						
							|  |  |  |     if not description: | 
					
						
							|  |  |  |         description = extract_text(html.xpath('/html/head/meta[@property="og:description"]/@content')) | 
					
						
							|  |  |  |     if not description: | 
					
						
							|  |  |  |         description = extract_text(html.xpath('/html/head/title')) | 
					
						
							|  |  |  |     lang = extract_text(html.xpath('/html/@lang')) | 
					
						
							|  |  |  |     if lang is None and len(lang1) > 0: | 
					
						
							|  |  |  |         lang = lang1 | 
					
						
							|  |  |  |     lang = detect_language(description) or lang or 'en' | 
					
						
							|  |  |  |     lang = lang.split('_')[0] | 
					
						
							|  |  |  |     lang = lang.split('-')[0] | 
					
						
							|  |  |  |     return (lang, description) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def initialize(): | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     global IDS, WIKIPEDIA_LANGUAGES, LANGUAGES_SPARQL | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     searx.search.initialize() | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     wikipedia_engine = searx.engines.engines['wikipedia'] | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     WIKIPEDIA_LANGUAGES = {language: wikipedia_engine.url_lang(language.replace('_', '-')) for language in LANGUAGES} | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     WIKIPEDIA_LANGUAGES['nb_NO'] = 'no' | 
					
						
							|  |  |  |     LANGUAGES_SPARQL = ', '.join(f"'{l}'" for l in set(WIKIPEDIA_LANGUAGES.values())) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     for engine_name, engine in searx.engines.engines.items(): | 
					
						
							|  |  |  |         descriptions[engine_name] = {} | 
					
						
							|  |  |  |         wikidata_id = getattr(engine, "about", {}).get('wikidata_id') | 
					
						
							|  |  |  |         if wikidata_id is not None: | 
					
						
							|  |  |  |             wd_to_engine_name.setdefault(wikidata_id, set()).add(engine_name) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     IDS = ' '.join(list(map(lambda wd_id: 'wd:' + wd_id, wd_to_engine_name.keys()))) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def fetch_wikidata_descriptions(): | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     searx.network.set_timeout_for_thread(60) | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  |     result = wikidata.send_wikidata_query( | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         SPARQL_DESCRIPTION.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  |     ) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     if result is not None: | 
					
						
							|  |  |  |         for binding in result['results']['bindings']: | 
					
						
							|  |  |  |             wikidata_id = binding['item']['value'].replace('http://www.wikidata.org/entity/', '') | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |             wikidata_lang = binding['itemDescription']['xml:lang'] | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |             description = binding['itemDescription']['value'] | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |             for engine_name in wd_to_engine_name[wikidata_id]: | 
					
						
							|  |  |  |                 for lang in LANGUAGES: | 
					
						
							|  |  |  |                     if WIKIPEDIA_LANGUAGES[lang] == wikidata_lang: | 
					
						
							|  |  |  |                         update_description(engine_name, lang, description, 'wikidata') | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def fetch_wikipedia_descriptions(): | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  |     result = wikidata.send_wikidata_query( | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         SPARQL_WIKIPEDIA_ARTICLE.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) | 
					
						
							| 
									
										
										
										
											2021-09-19 11:10:02 +02:00
										 |  |  |     ) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     if result is not None: | 
					
						
							|  |  |  |         for binding in result['results']['bindings']: | 
					
						
							|  |  |  |             wikidata_id = binding['item']['value'].replace('http://www.wikidata.org/entity/', '') | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |             wikidata_lang = binding['name']['xml:lang'] | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |             pageid = binding['name']['value'] | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |             for engine_name in wd_to_engine_name[wikidata_id]: | 
					
						
							|  |  |  |                 for lang in LANGUAGES: | 
					
						
							|  |  |  |                     if WIKIPEDIA_LANGUAGES[lang] == wikidata_lang: | 
					
						
							|  |  |  |                         description = get_wikipedia_summary(lang, pageid) | 
					
						
							|  |  |  |                         update_description(engine_name, lang, description, 'wikipedia') | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def normalize_url(url): | 
					
						
							|  |  |  |     url = url.replace('{language}', 'en') | 
					
						
							|  |  |  |     url = urlparse(url)._replace(path='/', params='', query='', fragment='').geturl() | 
					
						
							|  |  |  |     url = url.replace('https://api.', 'https://') | 
					
						
							|  |  |  |     return url | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def fetch_website_description(engine_name, website): | 
					
						
							|  |  |  |     default_lang, default_description = get_website_description(website, None, None) | 
					
						
							|  |  |  |     if default_lang is None or default_description is None: | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         # the front page can't be fetched: skip this engine | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |         return | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     wikipedia_languages_r = {V: K for K, V in WIKIPEDIA_LANGUAGES.items()} | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     languages = ['en', 'es', 'pt', 'ru', 'tr', 'fr'] | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     languages = languages + [l for l in LANGUAGES if l not in languages] | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     previous_matched_lang = None | 
					
						
							|  |  |  |     previous_count = 0 | 
					
						
							|  |  |  |     for lang in languages: | 
					
						
							|  |  |  |         if lang not in descriptions[engine_name]: | 
					
						
							|  |  |  |             fetched_lang, desc = get_website_description(website, lang, WIKIPEDIA_LANGUAGES[lang]) | 
					
						
							|  |  |  |             if fetched_lang is None or desc is None: | 
					
						
							|  |  |  |                 continue | 
					
						
							|  |  |  |             matched_lang = match_language(fetched_lang, LANGUAGES, fallback=None) | 
					
						
							|  |  |  |             if matched_lang is None: | 
					
						
							|  |  |  |                 fetched_wikipedia_lang = match_language(fetched_lang, WIKIPEDIA_LANGUAGES.values(), fallback=None) | 
					
						
							|  |  |  |                 matched_lang = wikipedia_languages_r.get(fetched_wikipedia_lang) | 
					
						
							|  |  |  |             if matched_lang is not None: | 
					
						
							|  |  |  |                 update_description(engine_name, matched_lang, desc, website, replace=False) | 
					
						
							|  |  |  |             # check if desc changed with the different lang values | 
					
						
							|  |  |  |             if matched_lang == previous_matched_lang: | 
					
						
							|  |  |  |                 previous_count += 1 | 
					
						
							|  |  |  |                 if previous_count == 6: | 
					
						
							|  |  |  |                     # the website has returned the same description for 6 different languages in Accept-Language header | 
					
						
							|  |  |  |                     # stop now | 
					
						
							|  |  |  |                     break | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |             else: | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |                 previous_matched_lang = matched_lang | 
					
						
							|  |  |  |                 previous_count = 0 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def fetch_website_descriptions(): | 
					
						
							|  |  |  |     for engine_name, engine in searx.engines.engines.items(): | 
					
						
							|  |  |  |         website = getattr(engine, "about", {}).get('website') | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         if website is None and hasattr(engine, "search_url"): | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |             website = normalize_url(getattr(engine, "search_url")) | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |         if website is None and hasattr(engine, "base_url"): | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |             website = normalize_url(getattr(engine, "base_url")) | 
					
						
							|  |  |  |         if website is not None: | 
					
						
							|  |  |  |             fetch_website_description(engine_name, website) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | def get_engine_descriptions_filename(): | 
					
						
							|  |  |  |     return join(join(searx_dir, "data"), "engine_descriptions.json") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def get_output(): | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     From descriptions[engine][language] = [description, source] | 
					
						
							|  |  |  |     To | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     * output[language][engine] = description_and_source | 
					
						
							|  |  |  |     * description_and_source can be: | 
					
						
							|  |  |  |        * [description, source] | 
					
						
							|  |  |  |        * description (if source = "wikipedia") | 
					
						
							|  |  |  |        * [f"engine:lang", "ref"] (reference to another existing description) | 
					
						
							|  |  |  |     """
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     output = {locale: {} for locale in LOCALE_NAMES} | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     seen_descriptions = {} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for engine_name, lang_descriptions in descriptions.items(): | 
					
						
							|  |  |  |         for language, description in lang_descriptions.items(): | 
					
						
							|  |  |  |             if description[0] in seen_descriptions: | 
					
						
							|  |  |  |                 ref = seen_descriptions[description[0]] | 
					
						
							|  |  |  |                 description = [f'{ref[0]}:{ref[1]}', 'ref'] | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 seen_descriptions[description[0]] = (engine_name, language) | 
					
						
							|  |  |  |                 if description[1] == 'wikipedia': | 
					
						
							|  |  |  |                     description = description[0] | 
					
						
							|  |  |  |             output.setdefault(language, {}).setdefault(engine_name, description) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return output | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | def main(): | 
					
						
							|  |  |  |     initialize() | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     print('Fetching wikidata descriptions') | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     fetch_wikidata_descriptions() | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     print('Fetching wikipedia descriptions') | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     fetch_wikipedia_descriptions() | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     print('Fetching website descriptions') | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  |     fetch_website_descriptions() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-18 10:58:57 +02:00
										 |  |  |     output = get_output() | 
					
						
							|  |  |  |     with open(get_engine_descriptions_filename(), 'w', encoding='utf8') as f: | 
					
						
							|  |  |  |         f.write(json.dumps(output, indent=1, separators=(',', ':'), ensure_ascii=False)) | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | if __name__ == "__main__": | 
					
						
							|  |  |  |     main() |