| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							|  |  |  | # pylint: disable=missing-module-docstring | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | import re | 
					
						
							| 
									
										
										
										
											2021-03-02 14:24:55 +01:00
										 |  |  | from collections import defaultdict | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | from operator import itemgetter | 
					
						
							|  |  |  | from threading import RLock | 
					
						
							| 
									
										
										
										
											2022-01-17 11:11:39 +01:00
										 |  |  | from typing import List, NamedTuple, Set | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urlparse, unquote | 
					
						
							| 
									
										
										
										
											2022-01-17 08:06:31 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-09-08 10:09:11 +02:00
										 |  |  | from searx import logger | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | from searx.engines import engines | 
					
						
							| 
									
										
										
										
											2021-04-14 17:23:15 +02:00
										 |  |  | from searx.metrics import histogram_observe, counter_add, count_error | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-07-11 15:29:47 +02:00
										 |  |  | CONTENT_LEN_IGNORED_CHARS_REGEX = re.compile(r'[,;:!?\./\\\\ ()-_]', re.M | re.U) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | WHITESPACE_REGEX = re.compile('( |\t|\n)+', re.M | re.U) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # return the meaningful length of the content for a result | 
					
						
							|  |  |  | def result_content_len(content): | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  |     if isinstance(content, str): | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         return len(CONTENT_LEN_IGNORED_CHARS_REGEX.sub('', content)) | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  |     return 0 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def compare_urls(url_a, url_b): | 
					
						
							| 
									
										
										
										
											2020-10-26 19:22:19 +01:00
										 |  |  |     """Lazy compare between two URL.
 | 
					
						
							|  |  |  |     "www.example.com" and "example.com" are equals. | 
					
						
							|  |  |  |     "www.example.com/path/" and "www.example.com/path" are equals. | 
					
						
							|  |  |  |     "https://www.example.com/" and "http://www.example.com/" are equals. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Args: | 
					
						
							|  |  |  |         url_a (ParseResult): first URL | 
					
						
							|  |  |  |         url_b (ParseResult): second URL | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Returns: | 
					
						
							|  |  |  |         bool: True if url_a and url_b are equals | 
					
						
							|  |  |  |     """
 | 
					
						
							| 
									
										
										
										
											2016-06-24 07:38:17 +02:00
										 |  |  |     # ignore www. in comparison | 
					
						
							|  |  |  |     if url_a.netloc.startswith('www.'): | 
					
						
							|  |  |  |         host_a = url_a.netloc.replace('www.', '', 1) | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         host_a = url_a.netloc | 
					
						
							|  |  |  |     if url_b.netloc.startswith('www.'): | 
					
						
							|  |  |  |         host_b = url_b.netloc.replace('www.', '', 1) | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         host_b = url_b.netloc | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-08-14 13:40:28 +02:00
										 |  |  |     if host_a != host_b or url_a.query != url_b.query or url_a.fragment != url_b.fragment: | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         return False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # remove / from the end of the url if required | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     path_a = url_a.path[:-1] if url_a.path.endswith('/') else url_a.path | 
					
						
							|  |  |  |     path_b = url_b.path[:-1] if url_b.path.endswith('/') else url_b.path | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return unquote(path_a) == unquote(path_b) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  | def merge_two_infoboxes(infobox1, infobox2):  # pylint: disable=too-many-branches, too-many-statements | 
					
						
							| 
									
										
										
										
											2016-06-28 06:35:43 +02:00
										 |  |  |     # get engines weights | 
					
						
							|  |  |  |     if hasattr(engines[infobox1['engine']], 'weight'): | 
					
						
							|  |  |  |         weight1 = engines[infobox1['engine']].weight | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         weight1 = 1 | 
					
						
							|  |  |  |     if hasattr(engines[infobox2['engine']], 'weight'): | 
					
						
							|  |  |  |         weight2 = engines[infobox2['engine']].weight | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         weight2 = 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if weight2 > weight1: | 
					
						
							|  |  |  |         infobox1['engine'] = infobox2['engine'] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-07-27 08:26:30 +02:00
										 |  |  |     infobox1['engines'] |= infobox2['engines'] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     if 'urls' in infobox2: | 
					
						
							|  |  |  |         urls1 = infobox1.get('urls', None) | 
					
						
							|  |  |  |         if urls1 is None: | 
					
						
							|  |  |  |             urls1 = [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-06-24 07:38:17 +02:00
										 |  |  |         for url2 in infobox2.get('urls', []): | 
					
						
							|  |  |  |             unique_url = True | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |             parsed_url2 = urlparse(url2.get('url', '')) | 
					
						
							| 
									
										
										
										
											2020-10-26 19:22:19 +01:00
										 |  |  |             entity_url2 = url2.get('entity') | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |             for url1 in urls1: | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |                 if (entity_url2 is not None and url1.get('entity') == entity_url2) or compare_urls( | 
					
						
							|  |  |  |                     urlparse(url1.get('url', '')), parsed_url2 | 
					
						
							|  |  |  |                 ): | 
					
						
							| 
									
										
										
										
											2016-06-24 07:38:17 +02:00
										 |  |  |                     unique_url = False | 
					
						
							|  |  |  |                     break | 
					
						
							|  |  |  |             if unique_url: | 
					
						
							|  |  |  |                 urls1.append(url2) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-06-24 07:38:17 +02:00
										 |  |  |         infobox1['urls'] = urls1 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  |     if 'img_src' in infobox2: | 
					
						
							|  |  |  |         img1 = infobox1.get('img_src', None) | 
					
						
							|  |  |  |         img2 = infobox2.get('img_src') | 
					
						
							|  |  |  |         if img1 is None: | 
					
						
							|  |  |  |             infobox1['img_src'] = img2 | 
					
						
							| 
									
										
										
										
											2016-06-28 06:35:43 +02:00
										 |  |  |         elif weight2 > weight1: | 
					
						
							|  |  |  |             infobox1['img_src'] = img2 | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     if 'attributes' in infobox2: | 
					
						
							| 
									
										
										
										
											2020-10-26 19:22:19 +01:00
										 |  |  |         attributes1 = infobox1.get('attributes') | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         if attributes1 is None: | 
					
						
							| 
									
										
										
										
											2020-10-26 19:22:19 +01:00
										 |  |  |             infobox1['attributes'] = attributes1 = [] | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         attributeSet = set() | 
					
						
							| 
									
										
										
										
											2020-10-26 19:22:19 +01:00
										 |  |  |         for attribute in attributes1: | 
					
						
							|  |  |  |             label = attribute.get('label') | 
					
						
							|  |  |  |             if label not in attributeSet: | 
					
						
							|  |  |  |                 attributeSet.add(label) | 
					
						
							|  |  |  |             entity = attribute.get('entity') | 
					
						
							|  |  |  |             if entity not in attributeSet: | 
					
						
							|  |  |  |                 attributeSet.add(entity) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         for attribute in infobox2.get('attributes', []): | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             if attribute.get('label') not in attributeSet and attribute.get('entity') not in attributeSet: | 
					
						
							| 
									
										
										
										
											2016-06-28 06:35:43 +02:00
										 |  |  |                 attributes1.append(attribute) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     if 'content' in infobox2: | 
					
						
							|  |  |  |         content1 = infobox1.get('content', None) | 
					
						
							|  |  |  |         content2 = infobox2.get('content', '') | 
					
						
							|  |  |  |         if content1 is not None: | 
					
						
							|  |  |  |             if result_content_len(content2) > result_content_len(content1): | 
					
						
							|  |  |  |                 infobox1['content'] = content2 | 
					
						
							|  |  |  |         else: | 
					
						
							| 
									
										
										
										
											2016-03-14 07:32:36 +01:00
										 |  |  |             infobox1['content'] = content2 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-05-05 20:43:45 +02:00
										 |  |  | def result_score(result, priority): | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     weight = 1.0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for result_engine in result['engines']: | 
					
						
							|  |  |  |         if hasattr(engines[result_engine], 'weight'): | 
					
						
							|  |  |  |             weight *= float(engines[result_engine].weight) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-05-05 20:43:45 +02:00
										 |  |  |     weight *= len(result['positions']) | 
					
						
							|  |  |  |     score = 0 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-05-05 20:43:45 +02:00
										 |  |  |     for position in result['positions']: | 
					
						
							|  |  |  |         if priority == 'low': | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  |         if priority == 'high': | 
					
						
							|  |  |  |             score += weight | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             score += weight / position | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return score | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  | class Timing(NamedTuple):  # pylint: disable=missing-class-docstring | 
					
						
							| 
									
										
										
										
											2022-01-17 08:06:31 +01:00
										 |  |  |     engine: str | 
					
						
							|  |  |  |     total: float | 
					
						
							|  |  |  |     load: float | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  | class UnresponsiveEngine(NamedTuple):  # pylint: disable=missing-class-docstring | 
					
						
							| 
									
										
										
										
											2022-01-17 11:11:39 +01:00
										 |  |  |     engine: str | 
					
						
							|  |  |  |     error_type: str | 
					
						
							|  |  |  |     suspended: bool | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-08-12 09:42:27 +02:00
										 |  |  | class ResultContainer: | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     """docstring for ResultContainer""" | 
					
						
							| 
									
										
										
										
											2016-07-10 16:44:27 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     __slots__ = ( | 
					
						
							|  |  |  |         '_merged_results', | 
					
						
							|  |  |  |         'infoboxes', | 
					
						
							|  |  |  |         'suggestions', | 
					
						
							|  |  |  |         'answers', | 
					
						
							|  |  |  |         'corrections', | 
					
						
							|  |  |  |         '_number_of_results', | 
					
						
							|  |  |  |         '_closed', | 
					
						
							|  |  |  |         'paging', | 
					
						
							|  |  |  |         'unresponsive_engines', | 
					
						
							|  |  |  |         'timings', | 
					
						
							|  |  |  |         'redirect_url', | 
					
						
							|  |  |  |         'engine_data', | 
					
						
							|  |  |  |         'on_result', | 
					
						
							|  |  |  |         '_lock', | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2020-09-08 10:09:11 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     def __init__(self): | 
					
						
							| 
									
										
										
										
											2020-08-12 09:42:27 +02:00
										 |  |  |         super().__init__() | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         self._merged_results = [] | 
					
						
							|  |  |  |         self.infoboxes = [] | 
					
						
							|  |  |  |         self.suggestions = set() | 
					
						
							| 
									
										
										
										
											2020-06-09 17:01:59 +02:00
										 |  |  |         self.answers = {} | 
					
						
							| 
									
										
										
										
											2017-01-14 09:40:37 +01:00
										 |  |  |         self.corrections = set() | 
					
						
							| 
									
										
										
										
											2016-07-16 21:37:40 +02:00
										 |  |  |         self._number_of_results = [] | 
					
						
							| 
									
										
										
										
											2021-03-02 14:24:55 +01:00
										 |  |  |         self.engine_data = defaultdict(dict) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |         self._closed = False | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         self.paging = False | 
					
						
							| 
									
										
										
										
											2022-01-17 11:11:39 +01:00
										 |  |  |         self.unresponsive_engines: Set[UnresponsiveEngine] = set() | 
					
						
							| 
									
										
										
										
											2022-01-17 08:06:31 +01:00
										 |  |  |         self.timings: List[Timing] = [] | 
					
						
							| 
									
										
										
										
											2020-07-03 15:25:04 +02:00
										 |  |  |         self.redirect_url = None | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |         self.on_result = lambda _: True | 
					
						
							| 
									
										
										
										
											2021-11-26 08:46:43 +01:00
										 |  |  |         self._lock = RLock() | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  |     def extend(self, engine_name, results):  # pylint: disable=too-many-branches | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |         if self._closed: | 
					
						
							|  |  |  |             return | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-09-08 10:09:11 +02:00
										 |  |  |         standard_result_count = 0 | 
					
						
							| 
									
										
										
										
											2020-11-26 15:12:11 +01:00
										 |  |  |         error_msgs = set() | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         for result in list(results): | 
					
						
							| 
									
										
										
										
											2016-12-30 17:16:53 +01:00
										 |  |  |             result['engine'] = engine_name | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             if 'suggestion' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |                 self.suggestions.add(result['suggestion']) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'answer' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2020-06-09 17:01:59 +02:00
										 |  |  |                 self.answers[result['answer']] = result | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'correction' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2017-01-14 09:40:37 +01:00
										 |  |  |                 self.corrections.add(result['correction']) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'infobox' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |                 self._merge_infobox(result) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'number_of_results' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2016-07-16 21:37:40 +02:00
										 |  |  |                 self._number_of_results.append(result['number_of_results']) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'engine_data' in result and self.on_result(result): | 
					
						
							| 
									
										
										
										
											2021-03-02 14:24:55 +01:00
										 |  |  |                 self.engine_data[engine_name][result['key']] = result['engine_data'] | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             elif 'url' in result: | 
					
						
							| 
									
										
										
										
											2020-09-08 10:09:11 +02:00
										 |  |  |                 # standard result (url, title, content) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |                 if not self._is_valid_url_result(result, error_msgs): | 
					
						
							|  |  |  |                     continue | 
					
						
							|  |  |  |                 # normalize the result | 
					
						
							|  |  |  |                 self._normalize_url_result(result) | 
					
						
							|  |  |  |                 # call on_result call searx.search.SearchWithPlugins._on_result | 
					
						
							|  |  |  |                 # which calls the plugins | 
					
						
							|  |  |  |                 if not self.on_result(result): | 
					
						
							|  |  |  |                     continue | 
					
						
							|  |  |  |                 self.__merge_url_result(result, standard_result_count + 1) | 
					
						
							|  |  |  |                 standard_result_count += 1 | 
					
						
							|  |  |  |             elif self.on_result(result): | 
					
						
							|  |  |  |                 self.__merge_result_no_url(result, standard_result_count + 1) | 
					
						
							|  |  |  |                 standard_result_count += 1 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-11-26 15:12:11 +01:00
										 |  |  |         if len(error_msgs) > 0: | 
					
						
							|  |  |  |             for msg in error_msgs: | 
					
						
							| 
									
										
										
										
											2021-04-17 18:15:50 +02:00
										 |  |  |                 count_error(engine_name, 'some results are invalids: ' + msg, secondary=True) | 
					
						
							| 
									
										
										
										
											2020-11-26 15:12:11 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-19 20:53:51 +01:00
										 |  |  |         if engine_name in engines: | 
					
						
							| 
									
										
										
										
											2021-04-14 17:23:15 +02:00
										 |  |  |             histogram_observe(standard_result_count, 'engine', engine_name, 'result', 'count') | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-22 11:49:45 +01:00
										 |  |  |         if not self.paging and engine_name in engines and engines[engine_name].paging: | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             self.paging = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |     def _merge_infobox(self, infobox): | 
					
						
							|  |  |  |         add_infobox = True | 
					
						
							|  |  |  |         infobox_id = infobox.get('id', None) | 
					
						
							| 
									
										
										
										
											2020-07-27 08:26:30 +02:00
										 |  |  |         infobox['engines'] = set([infobox['engine']]) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         if infobox_id is not None: | 
					
						
							| 
									
										
										
										
											2019-11-15 09:31:37 +01:00
										 |  |  |             parsed_url_infobox_id = urlparse(infobox_id) | 
					
						
							| 
									
										
										
										
											2021-11-26 08:46:43 +01:00
										 |  |  |             with self._lock: | 
					
						
							|  |  |  |                 for existingIndex in self.infoboxes: | 
					
						
							|  |  |  |                     if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id): | 
					
						
							|  |  |  |                         merge_two_infoboxes(existingIndex, infobox) | 
					
						
							|  |  |  |                         add_infobox = False | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         if add_infobox: | 
					
						
							|  |  |  |             self.infoboxes.append(infobox) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |     def _is_valid_url_result(self, result, error_msgs): | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         if 'url' in result: | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |             if not isinstance(result['url'], str): | 
					
						
							|  |  |  |                 logger.debug('result: invalid URL: %s', str(result)) | 
					
						
							|  |  |  |                 error_msgs.add('invalid URL') | 
					
						
							|  |  |  |                 return False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if 'title' in result and not isinstance(result['title'], str): | 
					
						
							|  |  |  |             logger.debug('result: invalid title: %s', str(result)) | 
					
						
							|  |  |  |             error_msgs.add('invalid title') | 
					
						
							|  |  |  |             return False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if 'content' in result: | 
					
						
							|  |  |  |             if not isinstance(result['content'], str): | 
					
						
							|  |  |  |                 logger.debug('result: invalid content: %s', str(result)) | 
					
						
							|  |  |  |                 error_msgs.add('invalid content') | 
					
						
							|  |  |  |                 return False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return True | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _normalize_url_result(self, result): | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         """Return True if the result is valid""" | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         result['parsed_url'] = urlparse(result['url']) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # if the result has no scheme, use http as default | 
					
						
							|  |  |  |         if not result['parsed_url'].scheme: | 
					
						
							|  |  |  |             result['parsed_url'] = result['parsed_url']._replace(scheme="http") | 
					
						
							| 
									
										
										
										
											2016-02-26 12:05:11 +01:00
										 |  |  |             result['url'] = result['parsed_url'].geturl() | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-21 22:18:48 +01:00
										 |  |  |         # avoid duplicate content between the content and title fields | 
					
						
							|  |  |  |         if result.get('content') == result.get('title'): | 
					
						
							|  |  |  |             del result['content'] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # make sure there is a template | 
					
						
							|  |  |  |         if 'template' not in result: | 
					
						
							|  |  |  |             result['template'] = 'default.html' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-09-27 17:01:00 +02:00
										 |  |  |         # strip multiple spaces and carriage returns from content | 
					
						
							| 
									
										
										
										
											2021-09-10 17:53:49 +02:00
										 |  |  |         if result.get('content'): | 
					
						
							|  |  |  |             result['content'] = WHITESPACE_REGEX.sub(' ', result['content']) | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def __merge_url_result(self, result, position): | 
					
						
							|  |  |  |         result['engines'] = set([result['engine']]) | 
					
						
							| 
									
										
										
										
											2021-11-26 08:46:43 +01:00
										 |  |  |         with self._lock: | 
					
						
							|  |  |  |             duplicated = self.__find_duplicated_http_result(result) | 
					
						
							|  |  |  |             if duplicated: | 
					
						
							|  |  |  |                 self.__merge_duplicated_http_result(duplicated, result, position) | 
					
						
							|  |  |  |                 return | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # if there is no duplicate found, append result | 
					
						
							|  |  |  |             result['positions'] = [position] | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |             self._merged_results.append(result) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def __find_duplicated_http_result(self, result): | 
					
						
							| 
									
										
										
										
											2019-07-27 08:35:21 +02:00
										 |  |  |         result_template = result.get('template') | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         for merged_result in self._merged_results: | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |             if 'parsed_url' not in merged_result: | 
					
						
							|  |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             if compare_urls(result['parsed_url'], merged_result['parsed_url']) and result_template == merged_result.get( | 
					
						
							|  |  |  |                 'template' | 
					
						
							|  |  |  |             ): | 
					
						
							| 
									
										
										
										
											2019-07-27 08:35:21 +02:00
										 |  |  |                 if result_template != 'images.html': | 
					
						
							|  |  |  |                     # not an image, same template, same url : it's a duplicate | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |                     return merged_result | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |                 # it's an image | 
					
						
							|  |  |  |                 # it's a duplicate if the parsed_url, template and img_src are different | 
					
						
							|  |  |  |                 if result.get('img_src', '') == merged_result.get('img_src', ''): | 
					
						
							|  |  |  |                     return merged_result | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         return None | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |     def __merge_duplicated_http_result(self, duplicated, result, position): | 
					
						
							|  |  |  |         # using content with more text | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         if result_content_len(result.get('content', '')) > result_content_len(duplicated.get('content', '')): | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |             duplicated['content'] = result['content'] | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         # merge all result's parameters not found in duplicate | 
					
						
							|  |  |  |         for key in result.keys(): | 
					
						
							|  |  |  |             if not duplicated.get(key): | 
					
						
							|  |  |  |                 duplicated[key] = result.get(key) | 
					
						
							| 
									
										
										
										
											2017-07-22 05:32:18 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         # add the new position | 
					
						
							|  |  |  |         duplicated['positions'].append(position) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         # add engine to list of result-engines | 
					
						
							|  |  |  |         duplicated['engines'].add(result['engine']) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         # using https if possible | 
					
						
							|  |  |  |         if duplicated['parsed_url'].scheme != 'https' and result['parsed_url'].scheme == 'https': | 
					
						
							|  |  |  |             duplicated['url'] = result['parsed_url'].geturl() | 
					
						
							|  |  |  |             duplicated['parsed_url'] = result['parsed_url'] | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |     def __merge_result_no_url(self, result, position): | 
					
						
							|  |  |  |         result['engines'] = set([result['engine']]) | 
					
						
							|  |  |  |         result['positions'] = [position] | 
					
						
							| 
									
										
										
										
											2021-11-26 08:46:43 +01:00
										 |  |  |         with self._lock: | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |             self._merged_results.append(result) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |     def close(self): | 
					
						
							|  |  |  |         self._closed = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |         for result in self._merged_results: | 
					
						
							| 
									
										
										
										
											2024-05-05 20:43:45 +02:00
										 |  |  |             result['score'] = result_score(result, result.get('priority')) | 
					
						
							| 
									
										
										
										
											2024-02-04 09:56:23 +01:00
										 |  |  |             # removing html content and whitespace duplications | 
					
						
							| 
									
										
										
										
											2023-06-18 16:43:48 +02:00
										 |  |  |             if result.get('content'): | 
					
						
							| 
									
										
										
										
											2024-08-28 14:53:22 +02:00
										 |  |  |                 result['content'] = result['content'].strip() | 
					
						
							| 
									
										
										
										
											2024-02-04 09:56:23 +01:00
										 |  |  |             if result.get('title'): | 
					
						
							| 
									
										
										
										
											2024-08-28 14:53:22 +02:00
										 |  |  |                 result['title'] = ' '.join(result['title'].strip().split()) | 
					
						
							| 
									
										
										
										
											2024-02-04 09:56:23 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-04-14 17:23:15 +02:00
										 |  |  |             for result_engine in result['engines']: | 
					
						
							| 
									
										
										
										
											2024-05-05 20:43:45 +02:00
										 |  |  |                 counter_add(result['score'], 'engine', result_engine, 'score') | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         results = sorted(self._merged_results, key=itemgetter('score'), reverse=True) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # pass 2 : group results by category and template | 
					
						
							|  |  |  |         gresults = [] | 
					
						
							|  |  |  |         categoryPositions = {} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-11-16 09:43:23 +01:00
										 |  |  |         for res in results: | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  |             # do we need to handle more than one category per engine? | 
					
						
							| 
									
										
										
										
											2020-12-10 10:57:07 +01:00
										 |  |  |             engine = engines[res['engine']] | 
					
						
							|  |  |  |             res['category'] = engine.categories[0] if len(engine.categories) > 0 else '' | 
					
						
							| 
									
										
										
										
											2017-03-14 12:35:40 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  |             # do we need to handle more than one category per engine? | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             category = ( | 
					
						
							|  |  |  |                 res['category'] | 
					
						
							|  |  |  |                 + ':' | 
					
						
							|  |  |  |                 + res.get('template', '') | 
					
						
							|  |  |  |                 + ':' | 
					
						
							|  |  |  |                 + ('img_src' if 'img_src' in res or 'thumbnail' in res else '') | 
					
						
							|  |  |  |             ) | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             current = None if category not in categoryPositions else categoryPositions[category] | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |             # group with previous results using the same category | 
					
						
							|  |  |  |             # if the group can accept more result and is not too far | 
					
						
							|  |  |  |             # from the current position | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             if current is not None and (current['count'] > 0) and (len(gresults) - current['index'] < 20): | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |                 # group with the previous results using | 
					
						
							|  |  |  |                 # the same category with this one | 
					
						
							|  |  |  |                 index = current['index'] | 
					
						
							|  |  |  |                 gresults.insert(index, res) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # update every index after the current one | 
					
						
							|  |  |  |                 # (including the current one) | 
					
						
							| 
									
										
										
										
											2024-03-11 14:06:26 +01:00
										 |  |  |                 for k in categoryPositions:  # pylint: disable=consider-using-dict-items | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  |                     v = categoryPositions[k]['index'] | 
					
						
							|  |  |  |                     if v >= index: | 
					
						
							|  |  |  |                         categoryPositions[k]['index'] = v + 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # update this category | 
					
						
							|  |  |  |                 current['count'] -= 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 # same category | 
					
						
							|  |  |  |                 gresults.append(res) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # update categoryIndex | 
					
						
							|  |  |  |                 categoryPositions[category] = {'index': len(gresults), 'count': 8} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         # update _merged_results | 
					
						
							|  |  |  |         self._merged_results = gresults | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def get_ordered_results(self): | 
					
						
							| 
									
										
										
										
											2021-09-06 08:49:13 +02:00
										 |  |  |         if not self._closed: | 
					
						
							|  |  |  |             self.close() | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         return self._merged_results | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def results_length(self): | 
					
						
							|  |  |  |         return len(self._merged_results) | 
					
						
							| 
									
										
										
										
											2016-07-16 21:37:40 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-18 16:43:48 +02:00
										 |  |  |     @property | 
					
						
							|  |  |  |     def number_of_results(self) -> int: | 
					
						
							|  |  |  |         """Returns the average of results number, returns zero if the average
 | 
					
						
							|  |  |  |         result number is smaller than the actual result count."""
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-05-18 23:16:09 +02:00
										 |  |  |         with self._lock: | 
					
						
							|  |  |  |             if not self._closed: | 
					
						
							|  |  |  |                 logger.error("call to ResultContainer.number_of_results before ResultContainer.close") | 
					
						
							|  |  |  |                 return 0 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             resultnum_sum = sum(self._number_of_results) | 
					
						
							|  |  |  |             if not resultnum_sum or not self._number_of_results: | 
					
						
							|  |  |  |                 return 0 | 
					
						
							| 
									
										
										
										
											2023-06-18 16:43:48 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-05-18 23:16:09 +02:00
										 |  |  |             average = int(resultnum_sum / len(self._number_of_results)) | 
					
						
							|  |  |  |             if average < self.results_length(): | 
					
						
							|  |  |  |                 average = 0 | 
					
						
							|  |  |  |             return average | 
					
						
							| 
									
										
										
										
											2017-07-09 22:09:46 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-01-17 11:11:39 +01:00
										 |  |  |     def add_unresponsive_engine(self, engine_name: str, error_type: str, suspended: bool = False): | 
					
						
							| 
									
										
										
										
											2024-05-18 23:16:09 +02:00
										 |  |  |         with self._lock: | 
					
						
							|  |  |  |             if self._closed: | 
					
						
							|  |  |  |                 logger.error("call to ResultContainer.add_unresponsive_engine after ResultContainer.close") | 
					
						
							|  |  |  |                 return | 
					
						
							|  |  |  |             if engines[engine_name].display_error_messages: | 
					
						
							|  |  |  |                 self.unresponsive_engines.add(UnresponsiveEngine(engine_name, error_type, suspended)) | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-01-17 08:06:31 +01:00
										 |  |  |     def add_timing(self, engine_name: str, engine_time: float, page_load_time: float): | 
					
						
							| 
									
										
										
										
											2024-05-18 23:16:09 +02:00
										 |  |  |         with self._lock: | 
					
						
							|  |  |  |             if self._closed: | 
					
						
							|  |  |  |                 logger.error("call to ResultContainer.add_timing after ResultContainer.close") | 
					
						
							|  |  |  |                 return | 
					
						
							|  |  |  |             self.timings.append(Timing(engine_name, total=engine_time, load=page_load_time)) | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def get_timings(self): | 
					
						
							| 
									
										
										
										
											2024-05-18 23:16:09 +02:00
										 |  |  |         with self._lock: | 
					
						
							|  |  |  |             if not self._closed: | 
					
						
							|  |  |  |                 logger.error("call to ResultContainer.get_timings before ResultContainer.close") | 
					
						
							|  |  |  |                 return [] | 
					
						
							|  |  |  |             return self.timings |