diff --git a/searx/engines/__init__.py b/searx/engines/__init__.py index 95eda6dde..6c3ac7a42 100644 --- a/searx/engines/__init__.py +++ b/searx/engines/__init__.py @@ -21,7 +21,6 @@ import threading from os.path import realpath, dirname from babel.localedata import locale_identifiers from urllib.parse import urlparse -from flask_babel import gettext from operator import itemgetter from searx import settings from searx import logger @@ -51,8 +50,6 @@ engine_default_args = {'paging': False, 'shortcut': '-', 'disabled': False, 'enable_http': False, - 'suspend_end_time': 0, - 'continuous_errors': 0, 'time_range_support': False, 'engine_type': 'online', 'display_error_messages': True, @@ -138,22 +135,6 @@ def load_engine(engine_data): setattr(engine, 'fetch_supported_languages', lambda: engine._fetch_supported_languages(get(engine.supported_languages_url, headers=headers))) - engine.stats = { - 'sent_search_count': 0, # sent search - 'search_count': 0, # succesful search - 'result_count': 0, - 'engine_time': 0, - 'engine_time_count': 0, - 'score_count': 0, - 'errors': 0 - } - - engine_type = getattr(engine, 'engine_type', 'online') - - if engine_type != 'offline': - engine.stats['page_load_time'] = 0 - engine.stats['page_load_count'] = 0 - # tor related settings if settings['outgoing'].get('using_tor_proxy'): # use onion url if using tor. @@ -177,103 +158,6 @@ def load_engine(engine_data): return engine -def to_percentage(stats, maxvalue): - for engine_stat in stats: - if maxvalue: - engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100) - else: - engine_stat['percentage'] = 0 - return stats - - -def get_engines_stats(preferences): - # TODO refactor - pageloads = [] - engine_times = [] - results = [] - scores = [] - errors = [] - scores_per_result = [] - - max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa - for engine in engines.values(): - if not preferences.validate_token(engine): - continue - - if engine.stats['search_count'] == 0: - continue - - results_num = \ - engine.stats['result_count'] / float(engine.stats['search_count']) - - if engine.stats['engine_time_count'] != 0: - this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa - else: - this_engine_time = 0 - - if results_num: - score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa - score_per_result = score / results_num - else: - score = score_per_result = 0.0 - - if engine.engine_type != 'offline': - load_times = 0 - if engine.stats['page_load_count'] != 0: - load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa - max_pageload = max(load_times, max_pageload) - pageloads.append({'avg': load_times, 'name': engine.name}) - - max_engine_times = max(this_engine_time, max_engine_times) - max_results = max(results_num, max_results) - max_score = max(score, max_score) - max_score_per_result = max(score_per_result, max_score_per_result) - max_errors = max(max_errors, engine.stats['errors']) - - engine_times.append({'avg': this_engine_time, 'name': engine.name}) - results.append({'avg': results_num, 'name': engine.name}) - scores.append({'avg': score, 'name': engine.name}) - errors.append({'avg': engine.stats['errors'], 'name': engine.name}) - scores_per_result.append({ - 'avg': score_per_result, - 'name': engine.name - }) - - pageloads = to_percentage(pageloads, max_pageload) - engine_times = to_percentage(engine_times, max_engine_times) - results = to_percentage(results, max_results) - scores = to_percentage(scores, max_score) - scores_per_result = to_percentage(scores_per_result, max_score_per_result) - errors = to_percentage(errors, max_errors) - - return [ - ( - gettext('Engine time (sec)'), - sorted(engine_times, key=itemgetter('avg')) - ), - ( - gettext('Page loads (sec)'), - sorted(pageloads, key=itemgetter('avg')) - ), - ( - gettext('Number of results'), - sorted(results, key=itemgetter('avg'), reverse=True) - ), - ( - gettext('Scores'), - sorted(scores, key=itemgetter('avg'), reverse=True) - ), - ( - gettext('Scores per result'), - sorted(scores_per_result, key=itemgetter('avg'), reverse=True) - ), - ( - gettext('Errors'), - sorted(errors, key=itemgetter('avg'), reverse=True) - ), - ] - - def load_engines(engine_list): global engines, engine_shortcuts engines.clear() diff --git a/searx/metrics/__init__.py b/searx/metrics/__init__.py new file mode 100644 index 000000000..bae62c915 --- /dev/null +++ b/searx/metrics/__init__.py @@ -0,0 +1,206 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later + +import typing +import math +import contextlib +from timeit import default_timer +from operator import itemgetter + +from searx.engines import engines +from .models import HistogramStorage, CounterStorage +from .error_recorder import count_error, count_exception, errors_per_engines + +__all__ = ["initialize", + "get_engines_stats", "get_engine_errors", + "histogram", "histogram_observe", "histogram_observe_time", + "counter", "counter_inc", "counter_add", + "count_error", "count_exception"] + + +ENDPOINTS = {'search'} + + +histogram_storage: typing.Optional[HistogramStorage] = None +counter_storage: typing.Optional[CounterStorage] = None + + +@contextlib.contextmanager +def histogram_observe_time(*args): + h = histogram_storage.get(*args) + before = default_timer() + yield before + duration = default_timer() - before + if h: + h.observe(duration) + else: + raise ValueError("histogram " + repr((*args,)) + " doesn't not exist") + + +def histogram_observe(duration, *args): + histogram_storage.get(*args).observe(duration) + + +def histogram(*args, raise_on_not_found=True): + h = histogram_storage.get(*args) + if raise_on_not_found and h is None: + raise ValueError("histogram " + repr((*args,)) + " doesn't not exist") + return h + + +def counter_inc(*args): + counter_storage.add(1, *args) + + +def counter_add(value, *args): + counter_storage.add(value, *args) + + +def counter(*args): + return counter_storage.get(*args) + + +def initialize(engine_names=None): + """ + Initialize metrics + """ + global counter_storage, histogram_storage + + counter_storage = CounterStorage() + histogram_storage = HistogramStorage() + + # max_timeout = max of all the engine.timeout + max_timeout = 2 + for engine_name in (engine_names or engines): + if engine_name in engines: + max_timeout = max(max_timeout, engines[engine_name].timeout) + + # histogram configuration + histogram_width = 0.1 + histogram_size = int(1.5 * max_timeout / histogram_width) + + # engines + for engine_name in (engine_names or engines): + # search count + counter_storage.configure('engine', engine_name, 'search', 'count', 'sent') + counter_storage.configure('engine', engine_name, 'search', 'count', 'successful') + # global counter of errors + counter_storage.configure('engine', engine_name, 'search', 'count', 'error') + # score of the engine + counter_storage.configure('engine', engine_name, 'score') + # result count per requests + histogram_storage.configure(1, 100, 'engine', engine_name, 'result', 'count') + # time doing HTTP requests + histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'http') + # total time + # .time.request and ...response times may overlap .time.http time. + histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'total') + + +def get_engine_errors(engline_list): + result = {} + engine_names = list(errors_per_engines.keys()) + engine_names.sort() + for engine_name in engine_names: + if engine_name not in engline_list: + continue + + error_stats = errors_per_engines[engine_name] + sent_search_count = max(counter('engine', engine_name, 'search', 'count', 'sent'), 1) + sorted_context_count_list = sorted(error_stats.items(), key=lambda context_count: context_count[1]) + r = [] + for context, count in sorted_context_count_list: + percentage = round(20 * count / sent_search_count) * 5 + r.append({ + 'filename': context.filename, + 'function': context.function, + 'line_no': context.line_no, + 'code': context.code, + 'exception_classname': context.exception_classname, + 'log_message': context.log_message, + 'log_parameters': context.log_parameters, + 'secondary': context.secondary, + 'percentage': percentage, + }) + result[engine_name] = sorted(r, reverse=True, key=lambda d: d['percentage']) + return result + + +def to_percentage(stats, maxvalue): + for engine_stat in stats: + if maxvalue: + engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100) + else: + engine_stat['percentage'] = 0 + return stats + + +def get_engines_stats(engine_list): + global counter_storage, histogram_storage + + assert counter_storage is not None + assert histogram_storage is not None + + list_time = [] + list_time_http = [] + list_time_total = [] + list_result_count = [] + list_error_count = [] + list_scores = [] + list_scores_per_result = [] + + max_error_count = max_http_time = max_time_total = max_result_count = max_score = None # noqa + for engine_name in engine_list: + error_count = counter('engine', engine_name, 'search', 'count', 'error') + + if counter('engine', engine_name, 'search', 'count', 'sent') > 0: + list_error_count.append({'avg': error_count, 'name': engine_name}) + max_error_count = max(error_count, max_error_count or 0) + + successful_count = counter('engine', engine_name, 'search', 'count', 'successful') + if successful_count == 0: + continue + + result_count_sum = histogram('engine', engine_name, 'result', 'count').sum + time_total = histogram('engine', engine_name, 'time', 'total').percentage(50) + time_http = histogram('engine', engine_name, 'time', 'http').percentage(50) + result_count = result_count_sum / float(successful_count) + + if result_count: + score = counter('engine', engine_name, 'score') # noqa + score_per_result = score / float(result_count_sum) + else: + score = score_per_result = 0.0 + + max_time_total = max(time_total, max_time_total or 0) + max_http_time = max(time_http, max_http_time or 0) + max_result_count = max(result_count, max_result_count or 0) + max_score = max(score, max_score or 0) + + list_time.append({'total': round(time_total, 1), + 'http': round(time_http, 1), + 'name': engine_name, + 'processing': round(time_total - time_http, 1)}) + list_time_total.append({'avg': time_total, 'name': engine_name}) + list_time_http.append({'avg': time_http, 'name': engine_name}) + list_result_count.append({'avg': result_count, 'name': engine_name}) + list_scores.append({'avg': score, 'name': engine_name}) + list_scores_per_result.append({'avg': score_per_result, 'name': engine_name}) + + list_time = sorted(list_time, key=itemgetter('total')) + list_time_total = sorted(to_percentage(list_time_total, max_time_total), key=itemgetter('avg')) + list_time_http = sorted(to_percentage(list_time_http, max_http_time), key=itemgetter('avg')) + list_result_count = sorted(to_percentage(list_result_count, max_result_count), key=itemgetter('avg'), reverse=True) + list_scores = sorted(list_scores, key=itemgetter('avg'), reverse=True) + list_scores_per_result = sorted(list_scores_per_result, key=itemgetter('avg'), reverse=True) + list_error_count = sorted(to_percentage(list_error_count, max_error_count), key=itemgetter('avg'), reverse=True) + + return { + 'time': list_time, + 'max_time': math.ceil(max_time_total or 0), + 'time_total': list_time_total, + 'time_http': list_time_http, + 'result_count': list_result_count, + 'scores': list_scores, + 'scores_per_result': list_scores_per_result, + 'error_count': list_error_count, + } diff --git a/searx/metrology/error_recorder.py b/searx/metrics/error_recorder.py similarity index 84% rename from searx/metrology/error_recorder.py rename to searx/metrics/error_recorder.py index 167d1c8aa..2bf25fb0d 100644 --- a/searx/metrology/error_recorder.py +++ b/searx/metrics/error_recorder.py @@ -1,6 +1,5 @@ import typing import inspect -import logging from json import JSONDecodeError from urllib.parse import urlparse from httpx import HTTPError, HTTPStatusError @@ -9,16 +8,15 @@ from searx.exceptions import (SearxXPathSyntaxException, SearxEngineXPathExcepti from searx import logger -logging.basicConfig(level=logging.INFO) - errors_per_engines = {} class ErrorContext: - __slots__ = 'filename', 'function', 'line_no', 'code', 'exception_classname', 'log_message', 'log_parameters' + __slots__ = ('filename', 'function', 'line_no', 'code', 'exception_classname', + 'log_message', 'log_parameters', 'secondary') - def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters): + def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary): self.filename = filename self.function = function self.line_no = line_no @@ -26,22 +24,24 @@ class ErrorContext: self.exception_classname = exception_classname self.log_message = log_message self.log_parameters = log_parameters + self.secondary = secondary def __eq__(self, o) -> bool: if not isinstance(o, ErrorContext): return False return self.filename == o.filename and self.function == o.function and self.line_no == o.line_no\ and self.code == o.code and self.exception_classname == o.exception_classname\ - and self.log_message == o.log_message and self.log_parameters == o.log_parameters + and self.log_message == o.log_message and self.log_parameters == o.log_parameters \ + and self.secondary == o.secondary def __hash__(self): return hash((self.filename, self.function, self.line_no, self.code, self.exception_classname, self.log_message, - self.log_parameters)) + self.log_parameters, self.secondary)) def __repr__(self): - return "ErrorContext({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\ + return "ErrorContext({!r}, {!r}, {!r}, {!r}, {!r}, {!r}) {!r}".\ format(self.filename, self.line_no, self.code, self.exception_classname, self.log_message, - self.log_parameters) + self.log_parameters, self.secondary) def add_error_context(engine_name: str, error_context: ErrorContext) -> None: @@ -114,31 +114,32 @@ def get_exception_classname(exc: Exception) -> str: return exc_module + '.' + exc_name -def get_error_context(framerecords, exception_classname, log_message, log_parameters) -> ErrorContext: +def get_error_context(framerecords, exception_classname, log_message, log_parameters, secondary) -> ErrorContext: searx_frame = get_trace(framerecords) filename = searx_frame.filename function = searx_frame.function line_no = searx_frame.lineno code = searx_frame.code_context[0].strip() del framerecords - return ErrorContext(filename, function, line_no, code, exception_classname, log_message, log_parameters) + return ErrorContext(filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary) -def record_exception(engine_name: str, exc: Exception) -> None: +def count_exception(engine_name: str, exc: Exception, secondary: bool = False) -> None: framerecords = inspect.trace() try: exception_classname = get_exception_classname(exc) log_parameters = get_messages(exc, framerecords[-1][1]) - error_context = get_error_context(framerecords, exception_classname, None, log_parameters) + error_context = get_error_context(framerecords, exception_classname, None, log_parameters, secondary) add_error_context(engine_name, error_context) finally: del framerecords -def record_error(engine_name: str, log_message: str, log_parameters: typing.Optional[typing.Tuple] = None) -> None: +def count_error(engine_name: str, log_message: str, log_parameters: typing.Optional[typing.Tuple] = None, + secondary: bool = False) -> None: framerecords = list(reversed(inspect.stack()[1:])) try: - error_context = get_error_context(framerecords, None, log_message, log_parameters or ()) + error_context = get_error_context(framerecords, None, log_message, log_parameters or (), secondary) add_error_context(engine_name, error_context) finally: del framerecords diff --git a/searx/metrics/models.py b/searx/metrics/models.py new file mode 100644 index 000000000..8936a51e3 --- /dev/null +++ b/searx/metrics/models.py @@ -0,0 +1,156 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later + +import decimal +import threading + +from searx import logger + + +__all__ = ["Histogram", "HistogramStorage", "CounterStorage"] + +logger = logger.getChild('searx.metrics') + + +class Histogram: + + _slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width' + + def __init__(self, width=10, size=200): + self._lock = threading.Lock() + self._width = width + self._size = size + self._quartiles = [0] * size + self._count = 0 + self._sum = 0 + + def observe(self, value): + q = int(value / self._width) + if q < 0: + """Value below zero is ignored""" + q = 0 + if q >= self._size: + """Value above the maximum is replaced by the maximum""" + q = self._size - 1 + with self._lock: + self._quartiles[q] += 1 + self._count += 1 + self._sum += value + + @property + def quartiles(self): + return list(self._quartiles) + + @property + def count(self): + return self._count + + @property + def sum(self): + return self._sum + + @property + def average(self): + with self._lock: + if self._count != 0: + return self._sum / self._count + else: + return 0 + + @property + def quartile_percentage(self): + ''' Quartile in percentage ''' + with self._lock: + if self._count > 0: + return [int(q * 100 / self._count) for q in self._quartiles] + else: + return self._quartiles + + @property + def quartile_percentage_map(self): + result = {} + # use Decimal to avoid rounding errors + x = decimal.Decimal(0) + width = decimal.Decimal(self._width) + width_exponent = -width.as_tuple().exponent + with self._lock: + if self._count > 0: + for y in self._quartiles: + yp = int(y * 100 / self._count) + if yp != 0: + result[round(float(x), width_exponent)] = yp + x += width + return result + + def percentage(self, percentage): + # use Decimal to avoid rounding errors + x = decimal.Decimal(0) + width = decimal.Decimal(self._width) + stop_at_value = decimal.Decimal(self._count) / 100 * percentage + sum_value = 0 + with self._lock: + if self._count > 0: + for y in self._quartiles: + sum_value += y + if sum_value >= stop_at_value: + return x + x += width + return None + + def __repr__(self): + return "Histogram" + + +class HistogramStorage: + + __slots__ = 'measures' + + def __init__(self): + self.clear() + + def clear(self): + self.measures = {} + + def configure(self, width, size, *args): + measure = Histogram(width, size) + self.measures[args] = measure + return measure + + def get(self, *args): + return self.measures.get(args, None) + + def dump(self): + logger.debug("Histograms:") + ks = sorted(self.measures.keys(), key='/'.join) + for k in ks: + logger.debug("- %-60s %s", '|'.join(k), self.measures[k]) + + +class CounterStorage: + + __slots__ = 'counters', 'lock' + + def __init__(self): + self.lock = threading.Lock() + self.clear() + + def clear(self): + with self.lock: + self.counters = {} + + def configure(self, *args): + with self.lock: + self.counters[args] = 0 + + def get(self, *args): + return self.counters[args] + + def add(self, value, *args): + with self.lock: + self.counters[args] += value + + def dump(self): + with self.lock: + ks = sorted(self.counters.keys(), key='/'.join) + logger.debug("Counters:") + for k in ks: + logger.debug("- %-60s %s", '|'.join(k), self.counters[k]) diff --git a/searx/metrology/__init__.py b/searx/metrology/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/searx/network/__init__.py b/searx/network/__init__.py index dbd31c781..40665f7d6 100644 --- a/searx/network/__init__.py +++ b/searx/network/__init__.py @@ -3,7 +3,7 @@ import asyncio import threading import concurrent.futures -from time import time +from timeit import default_timer import httpx import h2.exceptions @@ -65,7 +65,7 @@ def get_context_network(): def request(method, url, **kwargs): """same as requests/requests/api.py request(...)""" - time_before_request = time() + time_before_request = default_timer() # timeout (httpx) if 'timeout' in kwargs: @@ -82,7 +82,7 @@ def request(method, url, **kwargs): timeout += 0.2 # overhead start_time = getattr(THREADLOCAL, 'start_time', time_before_request) if start_time: - timeout -= time() - start_time + timeout -= default_timer() - start_time # raise_for_error check_for_httperror = True @@ -111,7 +111,7 @@ def request(method, url, **kwargs): # update total_time. # See get_time_for_thread() and reset_time_for_thread() if hasattr(THREADLOCAL, 'total_time'): - time_after_request = time() + time_after_request = default_timer() THREADLOCAL.total_time += time_after_request - time_before_request # raise an exception diff --git a/searx/network/network.py b/searx/network/network.py index f50acf595..15c23d193 100644 --- a/searx/network/network.py +++ b/searx/network/network.py @@ -199,7 +199,7 @@ class Network: def get_network(name=None): global NETWORKS - return NETWORKS[name or DEFAULT_NAME] + return NETWORKS.get(name or DEFAULT_NAME) def initialize(settings_engines=None, settings_outgoing=None): diff --git a/searx/raise_for_httperror/__init__.py b/searx/raise_for_httperror/__init__.py new file mode 100644 index 000000000..b133da507 --- /dev/null +++ b/searx/raise_for_httperror/__init__.py @@ -0,0 +1,2 @@ +# compatibility with searx/searx +from searx.network import raise_for_httperror diff --git a/searx/results.py b/searx/results.py index b3b874118..a1c1d8527 100644 --- a/searx/results.py +++ b/searx/results.py @@ -5,7 +5,7 @@ from threading import RLock from urllib.parse import urlparse, unquote from searx import logger from searx.engines import engines -from searx.metrology.error_recorder import record_error +from searx.metrics import histogram_observe, counter_add, count_error CONTENT_LEN_IGNORED_CHARS_REGEX = re.compile(r'[,;:!?\./\\\\ ()-_]', re.M | re.U) @@ -196,12 +196,10 @@ class ResultContainer: if len(error_msgs) > 0: for msg in error_msgs: - record_error(engine_name, 'some results are invalids: ' + msg) + count_error(engine_name, 'some results are invalids: ' + msg, secondary=True) if engine_name in engines: - with RLock(): - engines[engine_name].stats['search_count'] += 1 - engines[engine_name].stats['result_count'] += standard_result_count + histogram_observe(standard_result_count, 'engine', engine_name, 'result', 'count') if not self.paging and standard_result_count > 0 and engine_name in engines\ and engines[engine_name].paging: @@ -301,9 +299,8 @@ class ResultContainer: for result in self._merged_results: score = result_score(result) result['score'] = score - with RLock(): - for result_engine in result['engines']: - engines[result_engine].stats['score_count'] += score + for result_engine in result['engines']: + counter_add(score, 'engine', result_engine, 'score') results = sorted(self._merged_results, key=itemgetter('score'), reverse=True) @@ -369,9 +366,9 @@ class ResultContainer: return 0 return resultnum_sum / len(self._number_of_results) - def add_unresponsive_engine(self, engine_name, error_type, error_message=None): + def add_unresponsive_engine(self, engine_name, error_type, error_message=None, suspended=False): if engines[engine_name].display_error_messages: - self.unresponsive_engines.add((engine_name, error_type, error_message)) + self.unresponsive_engines.add((engine_name, error_type, error_message, suspended)) def add_timing(self, engine_name, engine_time, page_load_time): self.timings.append({ diff --git a/searx/search/__init__.py b/searx/search/__init__.py index f777e8595..9b26f38de 100644 --- a/searx/search/__init__.py +++ b/searx/search/__init__.py @@ -18,7 +18,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >. import typing import gc import threading -from time import time +from timeit import default_timer from uuid import uuid4 from _thread import start_new_thread @@ -31,6 +31,7 @@ from searx.plugins import plugins from searx.search.models import EngineRef, SearchQuery from searx.search.processors import processors, initialize as initialize_processors from searx.search.checker import initialize as initialize_checker +from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time logger = logger.getChild('search') @@ -50,6 +51,7 @@ else: def initialize(settings_engines=None, enable_checker=False): settings_engines = settings_engines or settings['engines'] initialize_processors(settings_engines) + initialize_metrics([engine['name'] for engine in settings_engines]) if enable_checker: initialize_checker() @@ -106,13 +108,16 @@ class Search: for engineref in self.search_query.engineref_list: processor = processors[engineref.name] + # stop the request now if the engine is suspend + if processor.extend_container_if_suspended(self.result_container): + continue + # set default request parameters request_params = processor.get_params(self.search_query, engineref.category) if request_params is None: continue - with threading.RLock(): - processor.engine.stats['sent_search_count'] += 1 + counter_inc('engine', engineref.name, 'search', 'count', 'sent') # append request to list requests.append((engineref.name, self.search_query.query, request_params)) @@ -157,7 +162,7 @@ class Search: for th in threading.enumerate(): if th.name == search_id: - remaining_time = max(0.0, self.actual_timeout - (time() - self.start_time)) + remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time)) th.join(remaining_time) if th.is_alive(): th._timeout = True @@ -180,12 +185,10 @@ class Search: # do search-request def search(self): - self.start_time = time() - + self.start_time = default_timer() if not self.search_external_bang(): if not self.search_answerers(): self.search_standard() - return self.result_container diff --git a/searx/search/checker/impl.py b/searx/search/checker/impl.py index e54b3f68d..dd090c513 100644 --- a/searx/search/checker/impl.py +++ b/searx/search/checker/impl.py @@ -4,8 +4,8 @@ import typing import types import functools import itertools -import threading from time import time +from timeit import default_timer from urllib.parse import urlparse import re @@ -17,6 +17,7 @@ from searx import network, logger from searx.results import ResultContainer from searx.search.models import SearchQuery, EngineRef from searx.search.processors import EngineProcessor +from searx.metrics import counter_inc logger = logger.getChild('searx.search.checker') @@ -385,9 +386,8 @@ class Checker: engineref_category = search_query.engineref_list[0].category params = self.processor.get_params(search_query, engineref_category) if params is not None: - with threading.RLock(): - self.processor.engine.stats['sent_search_count'] += 1 - self.processor.search(search_query.query, params, result_container, time(), 5) + counter_inc('engine', search_query.engineref_list[0].name, 'search', 'count', 'sent') + self.processor.search(search_query.query, params, result_container, default_timer(), 5) return result_container def get_result_container_tests(self, test_name: str, search_query: SearchQuery) -> ResultContainerTests: diff --git a/searx/search/processors/abstract.py b/searx/search/processors/abstract.py index 26dab069f..854f6df6a 100644 --- a/searx/search/processors/abstract.py +++ b/searx/search/processors/abstract.py @@ -1,17 +1,110 @@ # SPDX-License-Identifier: AGPL-3.0-or-later +import threading from abc import abstractmethod, ABC +from timeit import default_timer + from searx import logger +from searx.engines import settings +from searx.network import get_time_for_thread, get_network +from searx.metrics import histogram_observe, counter_inc, count_exception, count_error +from searx.exceptions import SearxEngineAccessDeniedException logger = logger.getChild('searx.search.processor') +SUSPENDED_STATUS = {} + + +class SuspendedStatus: + + __slots__ = 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock' + + def __init__(self): + self.lock = threading.Lock() + self.continuous_errors = 0 + self.suspend_end_time = 0 + self.suspend_reason = None + + @property + def is_suspended(self): + return self.suspend_end_time >= default_timer() + + def suspend(self, suspended_time, suspend_reason): + with self.lock: + # update continuous_errors / suspend_end_time + self.continuous_errors += 1 + if suspended_time is None: + suspended_time = min(settings['search']['max_ban_time_on_fail'], + self.continuous_errors * settings['search']['ban_time_on_fail']) + self.suspend_end_time = default_timer() + suspended_time + self.suspend_reason = suspend_reason + logger.debug('Suspend engine for %i seconds', suspended_time) + + def resume(self): + with self.lock: + # reset the suspend variables + self.continuous_errors = 0 + self.suspend_end_time = 0 + self.suspend_reason = None class EngineProcessor(ABC): + __slots__ = 'engine', 'engine_name', 'lock', 'suspended_status' + def __init__(self, engine, engine_name): self.engine = engine self.engine_name = engine_name + key = get_network(self.engine_name) + key = id(key) if key else self.engine_name + self.suspended_status = SUSPENDED_STATUS.setdefault(key, SuspendedStatus()) + + def handle_exception(self, result_container, reason, exception, suspend=False, display_exception=True): + # update result_container + error_message = str(exception) if display_exception and exception else None + result_container.add_unresponsive_engine(self.engine_name, reason, error_message) + # metrics + counter_inc('engine', self.engine_name, 'search', 'count', 'error') + if exception: + count_exception(self.engine_name, exception) + else: + count_error(self.engine_name, reason) + # suspend the engine ? + if suspend: + suspended_time = None + if isinstance(exception, SearxEngineAccessDeniedException): + suspended_time = exception.suspended_time + self.suspended_status.suspend(suspended_time, reason) # pylint: disable=no-member + + def _extend_container_basic(self, result_container, start_time, search_results): + # update result_container + result_container.extend(self.engine_name, search_results) + engine_time = default_timer() - start_time + page_load_time = get_time_for_thread() + result_container.add_timing(self.engine_name, engine_time, page_load_time) + # metrics + counter_inc('engine', self.engine_name, 'search', 'count', 'successful') + histogram_observe(engine_time, 'engine', self.engine_name, 'time', 'total') + if page_load_time is not None: + histogram_observe(page_load_time, 'engine', self.engine_name, 'time', 'http') + + def extend_container(self, result_container, start_time, search_results): + if getattr(threading.current_thread(), '_timeout', False): + # the main thread is not waiting anymore + self.handle_exception(result_container, 'Timeout', None) + else: + # check if the engine accepted the request + if search_results is not None: + self._extend_container_basic(result_container, start_time, search_results) + self.suspended_status.resume() + + def extend_container_if_suspended(self, result_container): + if self.suspended_status.is_suspended: + result_container.add_unresponsive_engine(self.engine_name, + self.suspended_status.suspend_reason, + suspended=True) + return True + return False def get_params(self, search_query, engine_category): # if paging is not supported, skip diff --git a/searx/search/processors/offline.py b/searx/search/processors/offline.py index ede8eb5e1..5186b346a 100644 --- a/searx/search/processors/offline.py +++ b/searx/search/processors/offline.py @@ -1,51 +1,26 @@ # SPDX-License-Identifier: AGPL-3.0-or-later -import threading -from time import time from searx import logger -from searx.metrology.error_recorder import record_exception, record_error from searx.search.processors.abstract import EngineProcessor -logger = logger.getChild('search.processor.offline') +logger = logger.getChild('searx.search.processor.offline') class OfflineProcessor(EngineProcessor): engine_type = 'offline' - def _record_stats_on_error(self, result_container, start_time): - engine_time = time() - start_time - result_container.add_timing(self.engine_name, engine_time, engine_time) - - with threading.RLock(): - self.engine.stats['errors'] += 1 - def _search_basic(self, query, params): return self.engine.search(query, params) def search(self, query, params, result_container, start_time, timeout_limit): try: search_results = self._search_basic(query, params) - - if search_results: - result_container.extend(self.engine_name, search_results) - - engine_time = time() - start_time - result_container.add_timing(self.engine_name, engine_time, engine_time) - with threading.RLock(): - self.engine.stats['engine_time'] += engine_time - self.engine.stats['engine_time_count'] += 1 - + self.extend_container(result_container, start_time, search_results) except ValueError as e: - record_exception(self.engine_name, e) - self._record_stats_on_error(result_container, start_time) + # do not record the error logger.exception('engine {0} : invalid input : {1}'.format(self.engine_name, e)) except Exception as e: - record_exception(self.engine_name, e) - self._record_stats_on_error(result_container, start_time) - result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash', str(e)) + self.handle_exception(result_container, 'unexpected crash', e) logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) - else: - if getattr(threading.current_thread(), '_timeout', False): - record_error(self.engine_name, 'Timeout') diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py index 66719ea9b..c39937023 100644 --- a/searx/search/processors/online.py +++ b/searx/search/processors/online.py @@ -1,23 +1,21 @@ # SPDX-License-Identifier: AGPL-3.0-or-later from time import time -import threading import asyncio import httpx import searx.network -from searx.engines import settings from searx import logger from searx.utils import gen_useragent from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException, SearxEngineTooManyRequestsException,) -from searx.metrology.error_recorder import record_exception, record_error +from searx.metrics.error_recorder import count_error from searx.search.processors.abstract import EngineProcessor -logger = logger.getChild('search.processor.online') +logger = logger.getChild('searx.search.processor.online') def default_request_params(): @@ -41,11 +39,6 @@ class OnlineProcessor(EngineProcessor): if params is None: return None - # skip suspended engines - if self.engine.suspend_end_time >= time(): - logger.debug('Engine currently suspended: %s', self.engine_name) - return None - # add default params params.update(default_request_params()) @@ -97,9 +90,10 @@ class OnlineProcessor(EngineProcessor): status_code = str(response.status_code or '') reason = response.reason_phrase or '' hostname = response.url.host - record_error(self.engine_name, - '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects), - (status_code, reason, hostname)) + count_error(self.engine_name, + '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects), + (status_code, reason, hostname), + secondary=True) return response @@ -130,89 +124,38 @@ class OnlineProcessor(EngineProcessor): # set the network searx.network.set_context_network_name(self.engine_name) - # suppose everything will be alright - http_exception = False - suspended_time = None - try: # send requests and parse the results search_results = self._search_basic(query, params) - - # check if the engine accepted the request - if search_results is not None: - # yes, so add results - result_container.extend(self.engine_name, search_results) - - # update engine time when there is no exception - engine_time = time() - start_time - page_load_time = searx.network.get_time_for_thread() - result_container.add_timing(self.engine_name, engine_time, page_load_time) - with threading.RLock(): - self.engine.stats['engine_time'] += engine_time - self.engine.stats['engine_time_count'] += 1 - # update stats with the total HTTP time - self.engine.stats['page_load_time'] += page_load_time - self.engine.stats['page_load_count'] += 1 - except Exception as e: - record_exception(self.engine_name, e) - - # Timing - engine_time = time() - start_time - page_load_time = searx.network.get_time_for_thread() - result_container.add_timing(self.engine_name, engine_time, page_load_time) - - # Record the errors - with threading.RLock(): - self.engine.stats['errors'] += 1 - - if (issubclass(e.__class__, (httpx.TimeoutException, asyncio.TimeoutError))): - result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout') - # requests timeout (connect or read) - logger.error("engine {0} : HTTP requests timeout" + self.extend_container(result_container, start_time, search_results) + except (httpx.TimeoutException, asyncio.TimeoutError) as e: + # requests timeout (connect or read) + self.handle_exception(result_container, 'HTTP timeout', e, suspend=True, display_exception=False) + logger.error("engine {0} : HTTP requests timeout" + "(search duration : {1} s, timeout: {2} s) : {3}" + .format(self.engine_name, time() - start_time, + timeout_limit, + e.__class__.__name__)) + except (httpx.HTTPError, httpx.StreamError) as e: + # other requests exception + self.handle_exception(result_container, 'HTTP error', e, suspend=True, display_exception=False) + logger.exception("engine {0} : requests exception" "(search duration : {1} s, timeout: {2} s) : {3}" - .format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__)) - http_exception = True - elif (issubclass(e.__class__, (httpx.HTTPError, httpx.StreamError))): - result_container.add_unresponsive_engine(self.engine_name, 'HTTP error') - # other requests exception - logger.exception("engine {0} : requests exception" - "(search duration : {1} s, timeout: {2} s) : {3}" - .format(self.engine_name, engine_time, timeout_limit, e)) - http_exception = True - elif (issubclass(e.__class__, SearxEngineCaptchaException)): - result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required') - logger.exception('engine {0} : CAPTCHA'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - elif (issubclass(e.__class__, SearxEngineTooManyRequestsException)): - result_container.add_unresponsive_engine(self.engine_name, 'too many requests') - logger.exception('engine {0} : Too many requests'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - elif (issubclass(e.__class__, SearxEngineAccessDeniedException)): - result_container.add_unresponsive_engine(self.engine_name, 'blocked') - logger.exception('engine {0} : Searx is blocked'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - else: - result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash') - # others errors - logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) - else: - if getattr(threading.current_thread(), '_timeout', False): - record_error(self.engine_name, 'Timeout') - - # suspend the engine if there is an HTTP error - # or suspended_time is defined - with threading.RLock(): - if http_exception or suspended_time: - # update continuous_errors / suspend_end_time - self.engine.continuous_errors += 1 - if suspended_time is None: - suspended_time = min(settings['search']['max_ban_time_on_fail'], - self.engine.continuous_errors * settings['search']['ban_time_on_fail']) - self.engine.suspend_end_time = time() + suspended_time - else: - # reset the suspend variables - self.engine.continuous_errors = 0 - self.engine.suspend_end_time = 0 + .format(self.engine_name, time() - start_time, + timeout_limit, + e)) + except SearxEngineCaptchaException as e: + self.handle_exception(result_container, 'CAPTCHA required', e, suspend=True, display_exception=False) + logger.exception('engine {0} : CAPTCHA'.format(self.engine_name)) + except SearxEngineTooManyRequestsException as e: + self.handle_exception(result_container, 'too many requests', e, suspend=True, display_exception=False) + logger.exception('engine {0} : Too many requests'.format(self.engine_name)) + except SearxEngineAccessDeniedException as e: + self.handle_exception(result_container, 'blocked', e, suspend=True, display_exception=False) + logger.exception('engine {0} : Searx is blocked'.format(self.engine_name)) + except Exception as e: + self.handle_exception(result_container, 'unexpected crash', e, display_exception=False) + logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) def get_default_tests(self): tests = {} diff --git a/searx/static/themes/oscar/css/logicodev-dark.css b/searx/static/themes/oscar/css/logicodev-dark.css index 9bacb3c13..618de9327 100644 --- a/searx/static/themes/oscar/css/logicodev-dark.css +++ b/searx/static/themes/oscar/css/logicodev-dark.css @@ -923,12 +923,78 @@ input.cursor-text { padding: 0.5rem 1rem; margin: 0rem 0 0 2rem; border: 1px solid #ddd; + box-shadow: 2px 2px 2px 0px rgba(0, 0, 0, 0.1); background: white; font-size: 14px; font-weight: normal; z-index: 1000000; } +td:hover .engine-tooltip, th:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; } +/* stacked-bar-chart */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 3rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + flex-grow: 1; + align-items: center; + display: inline-flex; +} +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} +.stacked-bar-chart-base { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} +.stacked-bar-chart-median { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: #000000; + border: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate80 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border: 1px solid rgba(0, 0, 0, 0.3); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate95 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-bottom: 1px dotted rgba(0, 0, 0, 0.5); + padding: 0; +} +.stacked-bar-chart-rate100 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-left: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.4rem 0; + width: 1px; +} diff --git a/searx/static/themes/oscar/css/logicodev-dark.min.css b/searx/static/themes/oscar/css/logicodev-dark.min.css index a70a109f4..09aed0298 100644 Binary files a/searx/static/themes/oscar/css/logicodev-dark.min.css and b/searx/static/themes/oscar/css/logicodev-dark.min.css differ diff --git a/searx/static/themes/oscar/css/logicodev-dark.min.css.map b/searx/static/themes/oscar/css/logicodev-dark.min.css.map index 4cd2eb8c5..71062db2e 100644 Binary files a/searx/static/themes/oscar/css/logicodev-dark.min.css.map and b/searx/static/themes/oscar/css/logicodev-dark.min.css.map differ diff --git a/searx/static/themes/oscar/css/logicodev.css b/searx/static/themes/oscar/css/logicodev.css index 6e5bddce3..4f6b36b11 100644 --- a/searx/static/themes/oscar/css/logicodev.css +++ b/searx/static/themes/oscar/css/logicodev.css @@ -896,15 +896,81 @@ input.cursor-text { padding: 0.5rem 1rem; margin: 0rem 0 0 2rem; border: 1px solid #ddd; + box-shadow: 2px 2px 2px 0px rgba(0, 0, 0, 0.1); background: white; font-size: 14px; font-weight: normal; z-index: 1000000; } +td:hover .engine-tooltip, th:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; } +/* stacked-bar-chart */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 3rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + flex-grow: 1; + align-items: center; + display: inline-flex; +} +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} +.stacked-bar-chart-base { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} +.stacked-bar-chart-median { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: #d5d8d7; + border: 1px solid rgba(213, 216, 215, 0.9); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate80 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border: 1px solid rgba(213, 216, 215, 0.3); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate95 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-bottom: 1px dotted rgba(213, 216, 215, 0.5); + padding: 0; +} +.stacked-bar-chart-rate100 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-left: 1px solid rgba(213, 216, 215, 0.9); + padding: 0.4rem 0; + width: 1px; +} /*Global*/ body { background: #1d1f21 none !important; diff --git a/searx/static/themes/oscar/css/logicodev.min.css b/searx/static/themes/oscar/css/logicodev.min.css index 12ddfe00e..db035aa75 100644 Binary files a/searx/static/themes/oscar/css/logicodev.min.css and b/searx/static/themes/oscar/css/logicodev.min.css differ diff --git a/searx/static/themes/oscar/css/logicodev.min.css.map b/searx/static/themes/oscar/css/logicodev.min.css.map index 3e15ed5ec..50598d2ef 100644 Binary files a/searx/static/themes/oscar/css/logicodev.min.css.map and b/searx/static/themes/oscar/css/logicodev.min.css.map differ diff --git a/searx/static/themes/oscar/css/pointhi.css b/searx/static/themes/oscar/css/pointhi.css index c648f2b60..64f612d79 100644 --- a/searx/static/themes/oscar/css/pointhi.css +++ b/searx/static/themes/oscar/css/pointhi.css @@ -688,6 +688,71 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not z-index: 1000000; } th:hover .engine-tooltip, +td:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; } +/* stacked-bar-chart */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 3rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + flex-grow: 1; + align-items: center; + display: inline-flex; +} +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} +.stacked-bar-chart-base { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} +.stacked-bar-chart-median { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: #000000; + border: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate80 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border: 1px solid rgba(0, 0, 0, 0.3); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate95 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-bottom: 1px dotted rgba(0, 0, 0, 0.5); + padding: 0; +} +.stacked-bar-chart-rate100 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-left: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.4rem 0; + width: 1px; +} diff --git a/searx/static/themes/oscar/css/pointhi.min.css b/searx/static/themes/oscar/css/pointhi.min.css index 02bee6ad7..4332e4767 100644 Binary files a/searx/static/themes/oscar/css/pointhi.min.css and b/searx/static/themes/oscar/css/pointhi.min.css differ diff --git a/searx/static/themes/oscar/css/pointhi.min.css.map b/searx/static/themes/oscar/css/pointhi.min.css.map index 1d18b1fd7..abb30817f 100644 Binary files a/searx/static/themes/oscar/css/pointhi.min.css.map and b/searx/static/themes/oscar/css/pointhi.min.css.map differ diff --git a/searx/static/themes/oscar/js/searx.min.js b/searx/static/themes/oscar/js/searx.min.js index 8b17d4f61..b31aad6f0 100644 Binary files a/searx/static/themes/oscar/js/searx.min.js and b/searx/static/themes/oscar/js/searx.min.js differ diff --git a/searx/static/themes/oscar/src/less/logicodev-dark/oscar.less b/searx/static/themes/oscar/src/less/logicodev-dark/oscar.less index ff37594c8..38a4424ac 100644 --- a/searx/static/themes/oscar/src/less/logicodev-dark/oscar.less +++ b/searx/static/themes/oscar/src/less/logicodev-dark/oscar.less @@ -1,4 +1,7 @@ @import "../logicodev/variables.less"; + +@stacked-bar-chart: rgb(213, 216, 215, 1); + @import "../logicodev/footer.less"; @import "../logicodev/checkbox.less"; @import "../logicodev/onoff.less"; diff --git a/searx/static/themes/oscar/src/less/logicodev/preferences.less b/searx/static/themes/oscar/src/less/logicodev/preferences.less index ccd0b0249..32e230413 100644 --- a/searx/static/themes/oscar/src/less/logicodev/preferences.less +++ b/searx/static/themes/oscar/src/less/logicodev/preferences.less @@ -20,12 +20,72 @@ input.cursor-text { padding: 0.5rem 1rem; margin: 0rem 0 0 2rem; border: 1px solid #ddd; + box-shadow: 2px 2px 2px 0px rgba(0,0,0,0.1); background: white; font-size: 14px; font-weight: normal; z-index: 1000000; } -th:hover .engine-tooltip, .engine-tooltip:hover { +td:hover .engine-tooltip, th:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; -} \ No newline at end of file +} + +/* stacked-bar-chart */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 3rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + flex-grow: 1; + align-items: center; + display: inline-flex; +} + +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} + +.stacked-bar-chart-base { + display:flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} + +.stacked-bar-chart-median { + .stacked-bar-chart-base(); + background: @stacked-bar-chart; + border: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate80 { + .stacked-bar-chart-base(); + background: transparent; + border: 1px solid fade(@stacked-bar-chart, 30%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate95 { + .stacked-bar-chart-base(); + background: transparent; + border-bottom: 1px dotted fade(@stacked-bar-chart, 50%); + padding: 0; +} + +.stacked-bar-chart-rate100 { + .stacked-bar-chart-base(); + background: transparent; + border-left: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.4rem 0; + width: 1px; +} diff --git a/searx/static/themes/oscar/src/less/logicodev/variables.less b/searx/static/themes/oscar/src/less/logicodev/variables.less index 4ee8df8e7..ae2516c05 100644 --- a/searx/static/themes/oscar/src/less/logicodev/variables.less +++ b/searx/static/themes/oscar/src/less/logicodev/variables.less @@ -14,3 +14,5 @@ @light-green: #01D7D4; @orange: #FFA92F; @dark-red: #c9432f; + +@stacked-bar-chart: rgb(0, 0, 0); diff --git a/searx/static/themes/oscar/src/less/pointhi/oscar.less b/searx/static/themes/oscar/src/less/pointhi/oscar.less index 037bfa59b..6f92a0791 100644 --- a/searx/static/themes/oscar/src/less/pointhi/oscar.less +++ b/searx/static/themes/oscar/src/less/pointhi/oscar.less @@ -1,3 +1,5 @@ +@import "variables.less"; + @import "footer.less"; @import "checkbox.less"; diff --git a/searx/static/themes/oscar/src/less/pointhi/preferences.less b/searx/static/themes/oscar/src/less/pointhi/preferences.less index f3a6bee22..cb63674ed 100644 --- a/searx/static/themes/oscar/src/less/pointhi/preferences.less +++ b/searx/static/themes/oscar/src/less/pointhi/preferences.less @@ -14,6 +14,66 @@ z-index: 1000000; } -th:hover .engine-tooltip, .engine-tooltip:hover { +th:hover .engine-tooltip, td:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; } + +/* stacked-bar-chart */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 3rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + flex-grow: 1; + align-items: center; + display: inline-flex; +} + +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} + +.stacked-bar-chart-base { + display:flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} + +.stacked-bar-chart-median { + .stacked-bar-chart-base(); + background: @stacked-bar-chart; + border: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate80 { + .stacked-bar-chart-base(); + background: transparent; + border: 1px solid fade(@stacked-bar-chart, 30%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate95 { + .stacked-bar-chart-base(); + background: transparent; + border-bottom: 1px dotted fade(@stacked-bar-chart, 50%); + padding: 0; +} + +.stacked-bar-chart-rate100 { + .stacked-bar-chart-base(); + background: transparent; + border-left: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.4rem 0; + width: 1px; +} + diff --git a/searx/static/themes/oscar/src/less/pointhi/variables.less b/searx/static/themes/oscar/src/less/pointhi/variables.less new file mode 100644 index 000000000..991f03098 --- /dev/null +++ b/searx/static/themes/oscar/src/less/pointhi/variables.less @@ -0,0 +1 @@ +@stacked-bar-chart: rgb(0, 0, 0); diff --git a/searx/static/themes/simple/css/searx-rtl.css b/searx/static/themes/simple/css/searx-rtl.css index 6b9b47d85..0da2850c5 100644 --- a/searx/static/themes/simple/css/searx-rtl.css +++ b/searx/static/themes/simple/css/searx-rtl.css @@ -1,4 +1,4 @@ -/*! searx | 23-03-2021 | */ +/*! searx | 21-04-2021 | */ /* * searx, A privacy-respecting, hackable metasearch engine * @@ -692,6 +692,12 @@ html.js .show_if_nojs { .danger { background-color: #fae1e1; } +.warning { + background: #faf5e1; +} +.success { + background: #e3fae1; +} .badge { display: inline-block; color: #fff; @@ -1147,6 +1153,69 @@ select:focus { transform: rotate(360deg); } } +/* -- stacked bar chart -- */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 4rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + align-items: center; + display: inline-flex; +} +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} +.stacked-bar-chart-base { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} +.stacked-bar-chart-median { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: #000000; + border: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate80 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border: 1px solid rgba(0, 0, 0, 0.3); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate95 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-bottom: 1px dotted rgba(0, 0, 0, 0.5); + padding: 0; +} +.stacked-bar-chart-rate100 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-left: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.4rem 0; + width: 1px; +} /*! Autocomplete.js v2.6.3 | license MIT | (c) 2017, Baptiste Donaux | http://autocomplete-js.com */ .autocomplete { position: absolute; @@ -1435,8 +1504,10 @@ select:focus { font-size: 14px; font-weight: normal; z-index: 1000000; + text-align: left; } #main_preferences th:hover .engine-tooltip, +#main_preferences td:hover .engine-tooltip, #main_preferences .engine-tooltip:hover { display: inline-block; } diff --git a/searx/static/themes/simple/css/searx-rtl.min.css b/searx/static/themes/simple/css/searx-rtl.min.css index 3b5d7f753..615b88ec5 100644 Binary files a/searx/static/themes/simple/css/searx-rtl.min.css and b/searx/static/themes/simple/css/searx-rtl.min.css differ diff --git a/searx/static/themes/simple/css/searx.css b/searx/static/themes/simple/css/searx.css index 484fdc82d..15b9f0853 100644 --- a/searx/static/themes/simple/css/searx.css +++ b/searx/static/themes/simple/css/searx.css @@ -1,4 +1,4 @@ -/*! searx | 23-03-2021 | */ +/*! searx | 21-04-2021 | */ /* * searx, A privacy-respecting, hackable metasearch engine * @@ -692,6 +692,12 @@ html.js .show_if_nojs { .danger { background-color: #fae1e1; } +.warning { + background: #faf5e1; +} +.success { + background: #e3fae1; +} .badge { display: inline-block; color: #fff; @@ -1147,6 +1153,69 @@ select:focus { transform: rotate(360deg); } } +/* -- stacked bar chart -- */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 4rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + align-items: center; + display: inline-flex; +} +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} +.stacked-bar-chart-base { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} +.stacked-bar-chart-median { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: #000000; + border: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate80 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border: 1px solid rgba(0, 0, 0, 0.3); + padding: 0.3rem 0; +} +.stacked-bar-chart-rate95 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-bottom: 1px dotted rgba(0, 0, 0, 0.5); + padding: 0; +} +.stacked-bar-chart-rate100 { + display: flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; + background: transparent; + border-left: 1px solid rgba(0, 0, 0, 0.9); + padding: 0.4rem 0; + width: 1px; +} /*! Autocomplete.js v2.6.3 | license MIT | (c) 2017, Baptiste Donaux | http://autocomplete-js.com */ .autocomplete { position: absolute; @@ -1435,8 +1504,10 @@ select:focus { font-size: 14px; font-weight: normal; z-index: 1000000; + text-align: left; } #main_preferences th:hover .engine-tooltip, +#main_preferences td:hover .engine-tooltip, #main_preferences .engine-tooltip:hover { display: inline-block; } diff --git a/searx/static/themes/simple/css/searx.min.css b/searx/static/themes/simple/css/searx.min.css index 2757ba434..52ad98ecd 100644 Binary files a/searx/static/themes/simple/css/searx.min.css and b/searx/static/themes/simple/css/searx.min.css differ diff --git a/searx/static/themes/simple/js/searx.head.min.js b/searx/static/themes/simple/js/searx.head.min.js index dd85086ee..043f25515 100644 Binary files a/searx/static/themes/simple/js/searx.head.min.js and b/searx/static/themes/simple/js/searx.head.min.js differ diff --git a/searx/static/themes/simple/js/searx.min.js b/searx/static/themes/simple/js/searx.min.js index 17daac2a4..8ae15bede 100644 Binary files a/searx/static/themes/simple/js/searx.min.js and b/searx/static/themes/simple/js/searx.min.js differ diff --git a/searx/static/themes/simple/less/definitions.less b/searx/static/themes/simple/less/definitions.less index 7a43720b8..c1465a579 100644 --- a/searx/static/themes/simple/less/definitions.less +++ b/searx/static/themes/simple/less/definitions.less @@ -19,6 +19,9 @@ @color-warning: #dbba34; @color-warning-background: lighten(@color-warning, 40%); +@color-success: #42db34; +@color-success-background: lighten(@color-success, 40%); + /// General @color-font: #444; diff --git a/searx/static/themes/simple/less/preferences.less b/searx/static/themes/simple/less/preferences.less index 26c4f6ddd..93bdaad27 100644 --- a/searx/static/themes/simple/less/preferences.less +++ b/searx/static/themes/simple/less/preferences.less @@ -105,9 +105,10 @@ font-size: 14px; font-weight: normal; z-index: 1000000; + text-align: left; } - th:hover .engine-tooltip, .engine-tooltip:hover { + th:hover .engine-tooltip, td:hover .engine-tooltip, .engine-tooltip:hover { display: inline-block; } diff --git a/searx/static/themes/simple/less/style.less b/searx/static/themes/simple/less/style.less index e00b2deff..55fb721c9 100644 --- a/searx/static/themes/simple/less/style.less +++ b/searx/static/themes/simple/less/style.less @@ -4,6 +4,8 @@ * To convert "style.less" to "style.css" run: $make styles */ +@stacked-bar-chart: rgb(0, 0, 0); + @import "normalize.less"; @import "definitions.less"; diff --git a/searx/static/themes/simple/less/toolkit.less b/searx/static/themes/simple/less/toolkit.less index 46ea17b3a..b3dba9ea9 100644 --- a/searx/static/themes/simple/less/toolkit.less +++ b/searx/static/themes/simple/less/toolkit.less @@ -36,6 +36,14 @@ html.js .show_if_nojs { background-color: @color-error-background; } +.warning { + background: @color-warning-background; +} + +.success { + background: @color-success-background; +} + .badge { display: inline-block; color: #fff; @@ -465,4 +473,62 @@ select { -webkit-transform: rotate(360deg); transform: rotate(360deg); } -} \ No newline at end of file +} + +/* -- stacked bar chart -- */ +.stacked-bar-chart { + margin: 0; + padding: 0 0.125rem 0 4rem; + width: 100%; + width: -moz-available; + width: -webkit-fill-available; + width: fill; + flex-direction: row; + flex-wrap: nowrap; + align-items: center; + display: inline-flex; +} + +.stacked-bar-chart-value { + width: 3rem; + display: inline-block; + position: absolute; + padding: 0 0.5rem; + text-align: right; +} + +.stacked-bar-chart-base { + display:flex; + flex-shrink: 0; + flex-grow: 0; + flex-basis: unset; +} + +.stacked-bar-chart-median { + .stacked-bar-chart-base(); + background: @stacked-bar-chart; + border: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate80 { + .stacked-bar-chart-base(); + background: transparent; + border: 1px solid fade(@stacked-bar-chart, 30%); + padding: 0.3rem 0; +} + +.stacked-bar-chart-rate95 { + .stacked-bar-chart-base(); + background: transparent; + border-bottom: 1px dotted fade(@stacked-bar-chart, 50%); + padding: 0; +} + +.stacked-bar-chart-rate100 { + .stacked-bar-chart-base(); + background: transparent; + border-left: 1px solid fade(@stacked-bar-chart, 90%); + padding: 0.4rem 0; + width: 1px; +} diff --git a/searx/templates/oscar/macros.html b/searx/templates/oscar/macros.html index 1cf46074f..0e9dc227a 100644 --- a/searx/templates/oscar/macros.html +++ b/searx/templates/oscar/macros.html @@ -134,13 +134,11 @@ custom-select{% if rtl %}-rtl{% endif %} {%- endmacro %} {% macro support_toggle(supports) -%} - {%- if supports -%} - - {{- _("supported") -}} - + {%- if supports == '?' -%} + {{- "" -}} + {%- elif supports -%} + {{- "" -}} {%- else -%} - - {{- _("not supported") -}} - + {{- "" -}} {%- endif -%} {%- endmacro %} diff --git a/searx/templates/oscar/preferences.html b/searx/templates/oscar/preferences.html index 2602c19d9..e8cae1ca5 100644 --- a/searx/templates/oscar/preferences.html +++ b/searx/templates/oscar/preferences.html @@ -1,16 +1,74 @@ {% from 'oscar/macros.html' import preferences_item_header, preferences_item_header_rtl, preferences_item_footer, preferences_item_footer_rtl, checkbox_toggle, support_toggle, custom_select_class %} {% extends "oscar/base.html" %} -{% macro engine_about(search_engine, id) -%} -{% if search_engine.about is defined %} +{%- macro engine_about(search_engine, id) -%} +{% if search_engine.about is defined or stats[search_engine.name]['result_count'] > 0 %} {% set about = search_engine.about %} {%- endif -%} {%- endmacro %} -{% block title %}{{ _('preferences') }} - {% endblock %} + +{%- macro engine_time(engine_name, css_align_class) -%} +{{- "" -}} + {%- if stats[engine_name].time != None -%} + {{- stats[engine_name].time -}}{{- "" -}} + {{- "" -}} + + {%- endif -%} + +{%- endmacro -%} + +{%- macro engine_reliability(engine_name, css_align_class) -%} +{% set r = reliabilities.get(engine_name, {}).get('reliablity', None) %} +{% set checker_result = reliabilities.get(engine_name, {}).get('checker', []) %} +{% set errors = reliabilities.get(engine_name, {}).get('errors', []) %} +{% if r != None %} + {% if r <= 50 %}{% set label = 'danger' %} + {% elif r < 80 %}{% set label = 'warning' %} + {% elif r < 90 %}{% set label = 'default' %} + {% else %}{% set label = 'success' %} + {% endif %} +{% else %} + {% set r = '' %} +{% endif %} +{% if checker_result or errors %} +{{- "" -}} + + {%- if reliabilities[engine_name].checker %}{{ icon('exclamation-sign', 'The checker fails on the some tests') }}{% endif %} {{ r -}} + {{- "" -}} + {{- "" -}} + +{%- else -%} +{{ r }} +{%- endif -%} +{%- endmacro -%} + +{%- block title %}{{ _('preferences') }} - {% endblock -%} + {% block content %}
@@ -182,7 +240,6 @@
-