| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | '''
 | 
					
						
							|  |  |  | searx is free software: you can redistribute it and/or modify | 
					
						
							|  |  |  | it under the terms of the GNU Affero General Public License as published by | 
					
						
							|  |  |  | the Free Software Foundation, either version 3 of the License, or | 
					
						
							|  |  |  | (at your option) any later version. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | searx is distributed in the hope that it will be useful, | 
					
						
							|  |  |  | but WITHOUT ANY WARRANTY; without even the implied warranty of | 
					
						
							|  |  |  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | 
					
						
							|  |  |  | GNU Affero General Public License for more details. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | You should have received a copy of the GNU Affero General Public License | 
					
						
							|  |  |  | along with searx. If not, see < http://www.gnu.org/licenses/ >. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | (C) 2013- by Adam Tauber, <asciimoo@gmail.com> | 
					
						
							|  |  |  | '''
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-07-31 23:39:58 +02:00
										 |  |  | import gc | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | import sys | 
					
						
							| 
									
										
										
										
											2014-12-05 19:24:11 +01:00
										 |  |  | import threading | 
					
						
							| 
									
										
										
										
											2014-12-14 01:18:01 +01:00
										 |  |  | from time import time | 
					
						
							| 
									
										
										
										
											2016-09-06 00:36:33 +02:00
										 |  |  | from uuid import uuid4 | 
					
						
							| 
									
										
										
										
											2017-07-09 22:09:46 +02:00
										 |  |  | from flask_babel import gettext | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  | import requests.exceptions | 
					
						
							| 
									
										
										
										
											2016-07-31 23:39:58 +02:00
										 |  |  | import searx.poolrequests as requests_lib | 
					
						
							| 
									
										
										
										
											2014-02-07 01:19:07 +01:00
										 |  |  | from searx.engines import ( | 
					
						
							| 
									
										
										
										
											2019-01-25 20:54:23 +01:00
										 |  |  |     categories, engines, settings | 
					
						
							| 
									
										
										
										
											2014-02-07 01:19:07 +01:00
										 |  |  | ) | 
					
						
							| 
									
										
										
										
											2016-11-19 20:53:51 +01:00
										 |  |  | from searx.answerers import ask | 
					
						
							| 
									
										
										
										
											2016-04-08 16:38:05 +02:00
										 |  |  | from searx.utils import gen_useragent | 
					
						
							| 
									
										
										
										
											2017-03-02 00:11:51 +01:00
										 |  |  | from searx.query import RawTextQuery, SearchQuery, VALID_LANGUAGE_CODE | 
					
						
							| 
									
										
										
										
											2015-10-03 17:26:07 +02:00
										 |  |  | from searx.results import ResultContainer | 
					
						
							| 
									
										
										
										
											2015-01-09 04:13:05 +01:00
										 |  |  | from searx import logger | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | from searx.plugins import plugins | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  | from searx.exceptions import SearxParameterException | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | try: | 
					
						
							|  |  |  |     from thread import start_new_thread | 
					
						
							|  |  |  | except: | 
					
						
							|  |  |  |     from _thread import start_new_thread | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | if sys.version_info[0] == 3: | 
					
						
							|  |  |  |     unicode = str | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-09 04:13:05 +01:00
										 |  |  | logger = logger.getChild('search') | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | number_of_searches = 0 | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  | max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None) | 
					
						
							|  |  |  | if max_request_timeout is None: | 
					
						
							|  |  |  |     logger.info('max_request_timeout={0}'.format(max_request_timeout)) | 
					
						
							|  |  |  | else: | 
					
						
							|  |  |  |     if isinstance(max_request_timeout, float): | 
					
						
							|  |  |  |         logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout)) | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         logger.critical('outgoing.max_request_timeout if defined has to be float') | 
					
						
							|  |  |  |         from sys import exit | 
					
						
							|  |  |  |         exit(1) | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  | def send_http_request(engine, request_params): | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     # create dictionary which contain all | 
					
						
							|  |  |  |     # informations about the request | 
					
						
							|  |  |  |     request_args = dict( | 
					
						
							|  |  |  |         headers=request_params['headers'], | 
					
						
							|  |  |  |         cookies=request_params['cookies'], | 
					
						
							|  |  |  |         verify=request_params['verify'] | 
					
						
							|  |  |  |     ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # specific type of request (GET or POST) | 
					
						
							|  |  |  |     if request_params['method'] == 'GET': | 
					
						
							|  |  |  |         req = requests_lib.get | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         req = requests_lib.post | 
					
						
							|  |  |  |         request_args['data'] = request_params['data'] | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     # send the request | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |     return req(request_params['url'], **request_args) | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  | def search_one_http_request(engine, query, request_params): | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |     # update request parameters dependent on | 
					
						
							|  |  |  |     # search-engine (contained in engines folder) | 
					
						
							|  |  |  |     engine.request(query, request_params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     # ignoring empty urls | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |     if request_params['url'] is None: | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |         return None | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     if not request_params['url']: | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |         return None | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # send request | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |     response = send_http_request(engine, request_params) | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     # parse the response | 
					
						
							|  |  |  |     response.search_params = request_params | 
					
						
							|  |  |  |     return engine.response(response) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  | def search_one_offline_request(engine, query, request_params): | 
					
						
							|  |  |  |     return engine.search(query, request_params) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-30 17:37:46 +01:00
										 |  |  | def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |     if engines[engine_name].offline: | 
					
						
							| 
									
										
										
										
											2019-09-30 14:27:13 +02:00
										 |  |  |         return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)  # noqa | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |     return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | 
					
						
							|  |  |  |     engine = engines[engine_name] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         search_results = search_one_offline_request(engine, query, request_params) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if search_results: | 
					
						
							|  |  |  |             result_container.extend(engine_name, search_results) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             engine_time = time() - start_time | 
					
						
							|  |  |  |             result_container.add_timing(engine_name, engine_time, engine_time) | 
					
						
							|  |  |  |             with threading.RLock(): | 
					
						
							|  |  |  |                 engine.stats['engine_time'] += engine_time | 
					
						
							|  |  |  |                 engine.stats['engine_time_count'] += 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     except ValueError as e: | 
					
						
							|  |  |  |         record_offline_engine_stats_on_error(engine, result_container, start_time) | 
					
						
							|  |  |  |         logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e)) | 
					
						
							|  |  |  |     except Exception as e: | 
					
						
							|  |  |  |         record_offline_engine_stats_on_error(engine, result_container, start_time) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         result_container.add_unresponsive_engine(( | 
					
						
							|  |  |  |             engine_name, | 
					
						
							|  |  |  |             u'{0}: {1}'.format(gettext('unexpected crash'), e), | 
					
						
							|  |  |  |         )) | 
					
						
							|  |  |  |         logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def record_offline_engine_stats_on_error(engine, result_container, start_time): | 
					
						
							|  |  |  |     engine_time = time() - start_time | 
					
						
							|  |  |  |     result_container.add_timing(engine.name, engine_time, engine_time) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     with threading.RLock(): | 
					
						
							|  |  |  |         engine.stats['errors'] += 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |     # set timeout for all HTTP requests | 
					
						
							|  |  |  |     requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time) | 
					
						
							|  |  |  |     # reset the HTTP total time | 
					
						
							|  |  |  |     requests_lib.reset_time_for_thread() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     engine = engines[engine_name] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |     # suppose everything will be alright | 
					
						
							|  |  |  |     requests_exception = False | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |     try: | 
					
						
							|  |  |  |         # send requests and parse the results | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |         search_results = search_one_http_request(engine, query, request_params) | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |         # check if the engine accepted the request | 
					
						
							|  |  |  |         if search_results is not None: | 
					
						
							|  |  |  |             # yes, so add results | 
					
						
							|  |  |  |             result_container.extend(engine_name, search_results) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # update engine time when there is no exception | 
					
						
							|  |  |  |             engine_time = time() - start_time | 
					
						
							|  |  |  |             page_load_time = requests_lib.get_time_for_thread() | 
					
						
							|  |  |  |             result_container.add_timing(engine_name, engine_time, page_load_time) | 
					
						
							|  |  |  |             with threading.RLock(): | 
					
						
							|  |  |  |                 engine.stats['engine_time'] += engine_time | 
					
						
							|  |  |  |                 engine.stats['engine_time_count'] += 1 | 
					
						
							|  |  |  |                 # update stats with the total HTTP time | 
					
						
							|  |  |  |                 engine.stats['page_load_time'] += page_load_time | 
					
						
							|  |  |  |                 engine.stats['page_load_count'] += 1 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     except Exception as e: | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |         # Timing | 
					
						
							|  |  |  |         engine_time = time() - start_time | 
					
						
							|  |  |  |         page_load_time = requests_lib.get_time_for_thread() | 
					
						
							|  |  |  |         result_container.add_timing(engine_name, engine_time, page_load_time) | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |         # Record the errors | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |         with threading.RLock(): | 
					
						
							|  |  |  |             engine.stats['errors'] += 1 | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         if (issubclass(e.__class__, requests.exceptions.Timeout)): | 
					
						
							| 
									
										
										
										
											2017-07-09 22:09:46 +02:00
										 |  |  |             result_container.add_unresponsive_engine((engine_name, gettext('timeout'))) | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             # requests timeout (connect or read) | 
					
						
							|  |  |  |             logger.error("engine {0} : HTTP requests timeout" | 
					
						
							|  |  |  |                          "(search duration : {1} s, timeout: {2} s) : {3}" | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |                          .format(engine_name, engine_time, timeout_limit, e.__class__.__name__)) | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             requests_exception = True | 
					
						
							| 
									
										
										
										
											2016-12-30 18:08:48 +01:00
										 |  |  |         elif (issubclass(e.__class__, requests.exceptions.RequestException)): | 
					
						
							| 
									
										
										
										
											2017-07-09 22:09:46 +02:00
										 |  |  |             result_container.add_unresponsive_engine((engine_name, gettext('request exception'))) | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             # other requests exception | 
					
						
							|  |  |  |             logger.exception("engine {0} : requests exception" | 
					
						
							|  |  |  |                              "(search duration : {1} s, timeout: {2} s) : {3}" | 
					
						
							| 
									
										
										
										
											2019-07-17 10:38:45 +02:00
										 |  |  |                              .format(engine_name, engine_time, timeout_limit, e)) | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             requests_exception = True | 
					
						
							|  |  |  |         else: | 
					
						
							| 
									
										
										
										
											2017-12-05 20:38:32 +01:00
										 |  |  |             result_container.add_unresponsive_engine(( | 
					
						
							|  |  |  |                 engine_name, | 
					
						
							|  |  |  |                 u'{0}: {1}'.format(gettext('unexpected crash'), e), | 
					
						
							|  |  |  |             )) | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             # others errors | 
					
						
							|  |  |  |             logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) | 
					
						
							| 
									
										
										
										
											2014-12-19 20:01:01 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |     # suspend or not the engine if there are HTTP errors | 
					
						
							|  |  |  |     with threading.RLock(): | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |         if requests_exception: | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |             # update continuous_errors / suspend_end_time | 
					
						
							|  |  |  |             engine.continuous_errors += 1 | 
					
						
							| 
									
										
										
										
											2018-08-19 15:29:52 +02:00
										 |  |  |             engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'], | 
					
						
							| 
									
										
										
										
											2018-08-19 15:32:32 +02:00
										 |  |  |                                                    engine.continuous_errors * settings['search']['ban_time_on_fail']) | 
					
						
							| 
									
										
										
										
											2017-07-23 11:56:57 +02:00
										 |  |  |         else: | 
					
						
							|  |  |  |             # no HTTP error (perhaps an engine error) | 
					
						
							|  |  |  |             # anyway, reset the suspend variables | 
					
						
							|  |  |  |             engine.continuous_errors = 0 | 
					
						
							|  |  |  |             engine.suspend_end_time = 0 | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-30 17:37:46 +01:00
										 |  |  | def search_multiple_requests(requests, result_container, start_time, timeout_limit): | 
					
						
							| 
									
										
										
										
											2016-09-06 00:36:33 +02:00
										 |  |  |     search_id = uuid4().__str__() | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     for engine_name, query, request_params in requests: | 
					
						
							| 
									
										
										
										
											2014-12-05 19:24:11 +01:00
										 |  |  |         th = threading.Thread( | 
					
						
							| 
									
										
										
										
											2016-12-29 11:08:19 +01:00
										 |  |  |             target=search_one_request_safe, | 
					
						
							| 
									
										
										
										
											2016-12-30 17:37:46 +01:00
										 |  |  |             args=(engine_name, query, request_params, result_container, start_time, timeout_limit), | 
					
						
							| 
									
										
										
										
											2016-09-06 00:36:33 +02:00
										 |  |  |             name=search_id, | 
					
						
							| 
									
										
										
										
											2014-12-05 19:24:11 +01:00
										 |  |  |         ) | 
					
						
							| 
									
										
										
										
											2014-12-19 13:59:41 +01:00
										 |  |  |         th._engine_name = engine_name | 
					
						
							| 
									
										
										
										
											2014-12-05 19:24:11 +01:00
										 |  |  |         th.start() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for th in threading.enumerate(): | 
					
						
							| 
									
										
										
										
											2016-09-06 00:36:33 +02:00
										 |  |  |         if th.name == search_id: | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |             remaining_time = max(0.0, timeout_limit - (time() - start_time)) | 
					
						
							| 
									
										
										
										
											2014-12-14 01:18:01 +01:00
										 |  |  |             th.join(remaining_time) | 
					
						
							|  |  |  |             if th.isAlive(): | 
					
						
							| 
									
										
										
										
											2017-07-19 17:24:50 +02:00
										 |  |  |                 result_container.add_unresponsive_engine((th._engine_name, gettext('timeout'))) | 
					
						
							| 
									
										
										
										
											2015-01-09 04:13:05 +01:00
										 |  |  |                 logger.warning('engine timeout: {0}'.format(th._engine_name)) | 
					
						
							| 
									
										
										
										
											2014-12-14 01:18:01 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-12-05 19:24:11 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | # get default reqest parameter | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | def default_request_params(): | 
					
						
							|  |  |  |     return { | 
					
						
							| 
									
										
										
										
											2014-12-29 21:31:04 +01:00
										 |  |  |         'method': 'GET', | 
					
						
							|  |  |  |         'headers': {}, | 
					
						
							|  |  |  |         'data': {}, | 
					
						
							|  |  |  |         'url': '', | 
					
						
							|  |  |  |         'cookies': {}, | 
					
						
							|  |  |  |         'verify': True | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-26 08:51:04 +02:00
										 |  |  | # remove duplicate queries. | 
					
						
							|  |  |  | # FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music' | 
					
						
							|  |  |  | def deduplicate_query_engines(query_engines): | 
					
						
							|  |  |  |     uniq_query_engines = {q["category"] + '|' + q["name"]: q for q in query_engines} | 
					
						
							|  |  |  |     return uniq_query_engines.values() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-02 14:52:22 +01:00
										 |  |  | def get_search_query_from_webapp(preferences, form): | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |     # no text for the query ? | 
					
						
							|  |  |  |     if not form.get('q'): | 
					
						
							|  |  |  |         raise SearxParameterException('q', '') | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # set blocked engines | 
					
						
							|  |  |  |     disabled_engines = preferences.engines.get_disabled() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # parse query, if tags are set, which change | 
					
						
							|  |  |  |     # the serch engine or search-language | 
					
						
							| 
									
										
										
										
											2016-11-02 14:52:22 +01:00
										 |  |  |     raw_text_query = RawTextQuery(form['q'], disabled_engines) | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |     raw_text_query.parse_query() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # set query | 
					
						
							|  |  |  |     query = raw_text_query.getSearchQuery() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |     # get and check page number | 
					
						
							|  |  |  |     pageno_param = form.get('pageno', '1') | 
					
						
							|  |  |  |     if not pageno_param.isdigit() or int(pageno_param) < 1: | 
					
						
							|  |  |  |         raise SearxParameterException('pageno', pageno_param) | 
					
						
							|  |  |  |     query_pageno = int(pageno_param) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # get language | 
					
						
							| 
									
										
										
										
											2016-12-14 04:36:40 +01:00
										 |  |  |     # set specific language if set on request, query or preferences | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |     # TODO support search with multible languages | 
					
						
							|  |  |  |     if len(raw_text_query.languages): | 
					
						
							|  |  |  |         query_lang = raw_text_query.languages[-1] | 
					
						
							| 
									
										
										
										
											2016-12-14 04:36:40 +01:00
										 |  |  |     elif 'language' in form: | 
					
						
							|  |  |  |         query_lang = form.get('language') | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         query_lang = preferences.get_value('language') | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |     # check language | 
					
						
							| 
									
										
										
										
											2017-03-02 00:11:51 +01:00
										 |  |  |     if not VALID_LANGUAGE_CODE.match(query_lang): | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |         raise SearxParameterException('language', query_lang) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # get safesearch | 
					
						
							|  |  |  |     if 'safesearch' in form: | 
					
						
							|  |  |  |         query_safesearch = form.get('safesearch') | 
					
						
							|  |  |  |         # first check safesearch | 
					
						
							|  |  |  |         if not query_safesearch.isdigit(): | 
					
						
							|  |  |  |             raise SearxParameterException('safesearch', query_safesearch) | 
					
						
							|  |  |  |         query_safesearch = int(query_safesearch) | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         query_safesearch = preferences.get_value('safesearch') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # safesearch : second check | 
					
						
							|  |  |  |     if query_safesearch < 0 or query_safesearch > 2: | 
					
						
							|  |  |  |         raise SearxParameterException('safesearch', query_safesearch) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # get time_range | 
					
						
							| 
									
										
										
										
											2016-11-02 14:52:22 +01:00
										 |  |  |     query_time_range = form.get('time_range') | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |     # check time_range | 
					
						
							| 
									
										
										
										
											2017-01-21 20:21:32 +01:00
										 |  |  |     if query_time_range not in ('None', None, '', 'day', 'week', 'month', 'year'): | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |         raise SearxParameterException('time_range', query_time_range) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # query_engines | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |     query_engines = raw_text_query.engines | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |     # timeout_limit | 
					
						
							|  |  |  |     query_timeout = raw_text_query.timeout_limit | 
					
						
							|  |  |  |     if query_timeout is None and 'timeout_limit' in form: | 
					
						
							|  |  |  |         raw_time_limit = form.get('timeout_limit') | 
					
						
							| 
									
										
										
										
											2019-08-02 20:04:37 +02:00
										 |  |  |         if raw_time_limit in ['None', '']: | 
					
						
							|  |  |  |             raw_time_limit = None | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             try: | 
					
						
							|  |  |  |                 query_timeout = float(raw_time_limit) | 
					
						
							|  |  |  |             except ValueError: | 
					
						
							|  |  |  |                 raise SearxParameterException('timeout_limit', raw_time_limit) | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-20 18:52:47 +01:00
										 |  |  |     # query_categories | 
					
						
							|  |  |  |     query_categories = [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |     # if engines are calculated from query, | 
					
						
							|  |  |  |     # set categories by using that informations | 
					
						
							|  |  |  |     if query_engines and raw_text_query.specific: | 
					
						
							| 
									
										
										
										
											2018-03-22 11:02:24 +01:00
										 |  |  |         additional_categories = set() | 
					
						
							|  |  |  |         for engine in query_engines: | 
					
						
							|  |  |  |             if 'from_bang' in engine and engine['from_bang']: | 
					
						
							|  |  |  |                 additional_categories.add('none') | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 additional_categories.add(engine['category']) | 
					
						
							|  |  |  |         query_categories = list(additional_categories) | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # otherwise, using defined categories to | 
					
						
							|  |  |  |     # calculate which engines should be used | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         # set categories/engines | 
					
						
							|  |  |  |         load_default_categories = True | 
					
						
							| 
									
										
										
										
											2016-11-02 14:52:22 +01:00
										 |  |  |         for pd_name, pd in form.items(): | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             if pd_name == 'categories': | 
					
						
							|  |  |  |                 query_categories.extend(categ for categ in map(unicode.strip, pd.split(',')) if categ in categories) | 
					
						
							|  |  |  |             elif pd_name == 'engines': | 
					
						
							|  |  |  |                 pd_engines = [{'category': engines[engine].categories[0], | 
					
						
							|  |  |  |                                'name': engine} | 
					
						
							|  |  |  |                               for engine in map(unicode.strip, pd.split(',')) if engine in engines] | 
					
						
							|  |  |  |                 if pd_engines: | 
					
						
							|  |  |  |                     query_engines.extend(pd_engines) | 
					
						
							|  |  |  |                     load_default_categories = False | 
					
						
							|  |  |  |             elif pd_name.startswith('category_'): | 
					
						
							|  |  |  |                 category = pd_name[9:] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # if category is not found in list, skip | 
					
						
							|  |  |  |                 if category not in categories: | 
					
						
							|  |  |  |                     continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 if pd != 'off': | 
					
						
							|  |  |  |                     # add category to list | 
					
						
							|  |  |  |                     query_categories.append(category) | 
					
						
							|  |  |  |                 elif category in query_categories: | 
					
						
							|  |  |  |                     # remove category from list if property is set to 'off' | 
					
						
							|  |  |  |                     query_categories.remove(category) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if not load_default_categories: | 
					
						
							|  |  |  |             if not query_categories: | 
					
						
							|  |  |  |                 query_categories = list(set(engine['category'] | 
					
						
							| 
									
										
										
										
											2016-12-15 11:59:21 +01:00
										 |  |  |                                             for engine in query_engines)) | 
					
						
							| 
									
										
										
										
											2014-02-07 01:19:07 +01:00
										 |  |  |         else: | 
					
						
							| 
									
										
										
										
											2014-10-19 12:41:04 +02:00
										 |  |  |             # if no category is specified for this search, | 
					
						
							|  |  |  |             # using user-defined default-configuration which | 
					
						
							|  |  |  |             # (is stored in cookie) | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             if not query_categories: | 
					
						
							|  |  |  |                 cookie_categories = preferences.get_value('categories') | 
					
						
							| 
									
										
										
										
											2014-02-07 01:19:07 +01:00
										 |  |  |                 for ccateg in cookie_categories: | 
					
						
							|  |  |  |                     if ccateg in categories: | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |                         query_categories.append(ccateg) | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-10-19 12:41:04 +02:00
										 |  |  |             # if still no category is specified, using general | 
					
						
							|  |  |  |             # as default-category | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             if not query_categories: | 
					
						
							|  |  |  |                 query_categories = ['general'] | 
					
						
							| 
									
										
										
										
											2014-02-07 01:19:07 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-10-19 12:41:04 +02:00
										 |  |  |             # using all engines for that search, which are | 
					
						
							|  |  |  |             # declared under the specific categories | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             for categ in query_categories: | 
					
						
							|  |  |  |                 query_engines.extend({'category': categ, | 
					
						
							|  |  |  |                                       'name': engine.name} | 
					
						
							|  |  |  |                                      for engine in categories[categ] | 
					
						
							|  |  |  |                                      if (engine.name, categ) not in disabled_engines) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-26 08:51:04 +02:00
										 |  |  |     query_engines = deduplicate_query_engines(query_engines) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-16 16:27:29 +02:00
										 |  |  |     return (SearchQuery(query, query_engines, query_categories, | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |                         query_lang, query_safesearch, query_pageno, | 
					
						
							| 
									
										
										
										
											2020-02-01 11:01:17 +01:00
										 |  |  |                         query_time_range, query_timeout, preferences), | 
					
						
							| 
									
										
										
										
											2019-07-16 16:27:29 +02:00
										 |  |  |             raw_text_query) | 
					
						
							| 
									
										
										
										
											2014-02-09 01:07:18 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | class Search(object): | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     """Search information container""" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def __init__(self, search_query): | 
					
						
							|  |  |  |         # init vars | 
					
						
							|  |  |  |         super(Search, self).__init__() | 
					
						
							|  |  |  |         self.search_query = search_query | 
					
						
							|  |  |  |         self.result_container = ResultContainer() | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |         self.actual_timeout = None | 
					
						
							| 
									
										
										
										
											2016-02-20 00:21:56 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  |     # do search-request | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |     def search(self): | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |         global number_of_searches | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |         # start time | 
					
						
							|  |  |  |         start_time = time() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-09 23:11:45 +01:00
										 |  |  |         # answeres ? | 
					
						
							| 
									
										
										
										
											2016-11-19 20:53:51 +01:00
										 |  |  |         answerers_results = ask(self.search_query) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if answerers_results: | 
					
						
							|  |  |  |             for results in answerers_results: | 
					
						
							|  |  |  |                 self.result_container.extend('answer', results) | 
					
						
							|  |  |  |             return self.result_container | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  |         # init vars | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |         requests = [] | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:44:11 +02:00
										 |  |  |         # increase number of searches | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |         number_of_searches += 1 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # set default useragent | 
					
						
							| 
									
										
										
										
											2014-10-19 12:41:04 +02:00
										 |  |  |         # user_agent = request.headers.get('User-Agent', '') | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |         user_agent = gen_useragent() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         search_query = self.search_query | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |         # max of all selected engine timeout | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |         default_timeout = 0 | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  |         # start search-reqest for all selected engines | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         for selected_engine in search_query.engines: | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |             if selected_engine['name'] not in engines: | 
					
						
							|  |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             engine = engines[selected_engine['name']] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-02-01 11:01:17 +01:00
										 |  |  |             if not search_query.preferences.validate_token(engine): | 
					
						
							|  |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             # skip suspended engines | 
					
						
							| 
									
										
										
										
											2016-11-06 02:51:14 +01:00
										 |  |  |             if engine.suspend_end_time >= time(): | 
					
						
							|  |  |  |                 logger.debug('Engine currently suspended: %s', selected_engine['name']) | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  |             # if paging is not supported, skip | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             if search_query.pageno > 1 and not engine.paging: | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             # if time_range is not supported, skip | 
					
						
							|  |  |  |             if search_query.time_range and not engine.time_range_support: | 
					
						
							| 
									
										
										
										
											2016-07-17 18:42:30 +02:00
										 |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  |             # set default request parameters | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |             request_params = {} | 
					
						
							|  |  |  |             if not engine.offline: | 
					
						
							|  |  |  |                 request_params = default_request_params() | 
					
						
							|  |  |  |                 request_params['headers']['User-Agent'] = user_agent | 
					
						
							| 
									
										
										
										
											2015-06-03 17:16:12 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |                 if hasattr(engine, 'language') and engine.language: | 
					
						
							|  |  |  |                     request_params['language'] = engine.language | 
					
						
							|  |  |  |                 else: | 
					
						
							|  |  |  |                     request_params['language'] = search_query.lang | 
					
						
							| 
									
										
										
										
											2015-06-03 17:16:12 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-09-23 17:14:32 +02:00
										 |  |  |                 request_params['safesearch'] = search_query.safesearch | 
					
						
							|  |  |  |                 request_params['time_range'] = search_query.time_range | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             request_params['category'] = selected_engine['category'] | 
					
						
							|  |  |  |             request_params['pageno'] = search_query.pageno | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |             # append request to list | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |             requests.append((selected_engine['name'], search_query.query, request_params)) | 
					
						
							| 
									
										
										
										
											2014-07-07 13:59:27 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |             # update default_timeout | 
					
						
							|  |  |  |             default_timeout = max(default_timeout, engine.timeout) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # adjust timeout | 
					
						
							|  |  |  |         self.actual_timeout = default_timeout | 
					
						
							|  |  |  |         query_timeout = self.search_query.timeout_limit | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if max_request_timeout is None and query_timeout is None: | 
					
						
							|  |  |  |             # No max, no user query: default_timeout | 
					
						
							|  |  |  |             pass | 
					
						
							|  |  |  |         elif max_request_timeout is None and query_timeout is not None: | 
					
						
							|  |  |  |             # No max, but user query: From user query except if above default | 
					
						
							|  |  |  |             self.actual_timeout = min(default_timeout, query_timeout) | 
					
						
							|  |  |  |         elif max_request_timeout is not None and query_timeout is None: | 
					
						
							|  |  |  |             # Max, no user query: Default except if above max | 
					
						
							|  |  |  |             self.actual_timeout = min(default_timeout, max_request_timeout) | 
					
						
							|  |  |  |         elif max_request_timeout is not None and query_timeout is not None: | 
					
						
							|  |  |  |             # Max & user query: From user query except if above max | 
					
						
							|  |  |  |             self.actual_timeout = min(query_timeout, max_request_timeout) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})" | 
					
						
							|  |  |  |                      .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # send all search-request | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |         if requests: | 
					
						
							| 
									
										
										
										
											2019-08-02 13:50:51 +02:00
										 |  |  |             search_multiple_requests(requests, self.result_container, start_time, self.actual_timeout) | 
					
						
							| 
									
										
										
										
											2016-11-05 13:45:20 +01:00
										 |  |  |             start_new_thread(gc.collect, tuple()) | 
					
						
							| 
									
										
										
										
											2014-09-13 18:25:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-28 16:51:41 +02:00
										 |  |  |         # return results, suggestions, answers and infoboxes | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         return self.result_container | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class SearchWithPlugins(Search): | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 14:01:53 +02:00
										 |  |  |     """Similar to the Search class but call the plugins.""" | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-02 12:06:04 +01:00
										 |  |  |     def __init__(self, search_query, ordered_plugin_list, request): | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         super(SearchWithPlugins, self).__init__(search_query) | 
					
						
							| 
									
										
										
										
											2017-01-02 12:06:04 +01:00
										 |  |  |         self.ordered_plugin_list = ordered_plugin_list | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         self.request = request | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def search(self): | 
					
						
							| 
									
										
										
										
											2017-01-02 12:06:04 +01:00
										 |  |  |         if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self): | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |             super(SearchWithPlugins, self).search() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-01-02 12:06:04 +01:00
										 |  |  |         plugins.call(self.ordered_plugin_list, 'post_search', self.request, self) | 
					
						
							| 
									
										
										
										
											2016-10-22 14:01:53 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         results = self.result_container.get_ordered_results() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         for result in results: | 
					
						
							| 
									
										
										
										
											2017-01-02 12:06:04 +01:00
										 |  |  |             plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result) | 
					
						
							| 
									
										
										
										
											2016-10-22 14:01:53 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-22 13:10:31 +02:00
										 |  |  |         return self.result_container |