| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | # lint: pylint | 
					
						
							|  |  |  | """Qwant (Web, News, Images, Videos)
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | This engine uses the Qwant API (https://api.qwant.com/v3). The API is | 
					
						
							|  |  |  | undocumented but can be reverse engineered by reading the network log of | 
					
						
							|  |  |  | https://www.qwant.com/ queries. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | This implementation is used by different qwant engines in the settings.yml:: | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   - name: qwant | 
					
						
							|  |  |  |     categories: general | 
					
						
							|  |  |  |     ... | 
					
						
							|  |  |  |   - name: qwant news | 
					
						
							|  |  |  |     categories: news | 
					
						
							|  |  |  |     ... | 
					
						
							|  |  |  |   - name: qwant images | 
					
						
							|  |  |  |     categories: images | 
					
						
							|  |  |  |     ... | 
					
						
							|  |  |  |   - name: qwant videos | 
					
						
							|  |  |  |     categories: videos | 
					
						
							|  |  |  |     ... | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | from datetime import ( | 
					
						
							|  |  |  |     datetime, | 
					
						
							|  |  |  |     timedelta, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2016-12-10 21:27:47 +01:00
										 |  |  | from json import loads | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  | from flask_babel import gettext | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | from searx.utils import match_language | 
					
						
							|  |  |  | from searx.exceptions import SearxEngineAPIException | 
					
						
							| 
									
										
										
											
												[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
   * can contains network definition
   * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
     keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
   * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
   * local_addresses can be "192.168.0.1/24" (it supports IPv6)
   * support_ipv4 & support_ipv6: both True by default
     see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
   * either a full network description
   * either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
											
										 
											2021-04-05 10:43:33 +02:00
										 |  |  | from searx.network import raise_for_httperror | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # about | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://www.qwant.com/', | 
					
						
							|  |  |  |     "wikidata_id": 'Q14657870', | 
					
						
							|  |  |  |     "official_api_documentation": None, | 
					
						
							|  |  |  |     "use_official_api": True, | 
					
						
							|  |  |  |     "require_api_key": False, | 
					
						
							|  |  |  |     "results": 'JSON', | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2020-11-03 11:35:53 +01:00
										 |  |  | categories = [] | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | paging = True | 
					
						
							| 
									
										
										
										
											2021-05-03 02:24:28 +02:00
										 |  |  | supported_languages_url = about['website'] | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | category_to_keyword = { | 
					
						
							|  |  |  |     'general': 'web', | 
					
						
							|  |  |  |     'news': 'news', | 
					
						
							|  |  |  |     'images': 'images', | 
					
						
							|  |  |  |     'videos': 'videos', | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2015-06-02 20:36:58 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | # search-url | 
					
						
							| 
									
										
										
										
											2021-11-17 18:13:54 +01:00
										 |  |  | url = 'https://api.qwant.com/v3/search/{keyword}?{query}&count={count}&offset={offset}' | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     """Qwant search request""" | 
					
						
							|  |  |  |     keyword = category_to_keyword[categories[0]] | 
					
						
							|  |  |  |     count = 10  # web: count must be equal to 10 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if keyword == 'images': | 
					
						
							|  |  |  |         count = 50 | 
					
						
							|  |  |  |         offset = (params['pageno'] - 1) * count | 
					
						
							|  |  |  |         # count + offset must be lower than 250 | 
					
						
							|  |  |  |         offset = min(offset, 199) | 
					
						
							| 
									
										
										
										
											2015-06-02 22:11:47 +02:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |         offset = (params['pageno'] - 1) * count | 
					
						
							|  |  |  |         # count + offset must be lower than 50 | 
					
						
							|  |  |  |         offset = min(offset, 40) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     params['url'] = url.format( | 
					
						
							|  |  |  |         keyword = keyword, | 
					
						
							|  |  |  |         query = urlencode({'q': query}), | 
					
						
							|  |  |  |         offset = offset, | 
					
						
							|  |  |  |         count = count, | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2017-07-20 22:47:20 +02:00
										 |  |  |     # add language tag | 
					
						
							| 
									
										
										
											
												[fix] qwant engine - prevent API locale exception on lang 'all'
Has been reported in [1], error message::
    Error
        Error: searx.exceptions.SearxEngineAPIException
        Percentage: 0
        Parameters: ('API error::locale must be a string,locale must be one of
        the following values: en_gb, en_ie, en_us, en_ca, en_in, en_my, en_au,
        en_nz, cy_gb, gd_gb, de_de, de_ch, de_at, fr_fr, br_fr, fr_be, fr_ch,
        fr_ca, fr_ad, fc_ca, ec_ca, co_fr, es_es, es_ar, es_cl, es_co, es_mx,
        es_pe, es_ad, ca_es, ca_ad, ca_fr, eu_es, eu_fr, it_it, it_ch, pt_br,
        pt_pt, pt_ad, nl_be, nl_nl, pl_pl, zh_hk, zh_cn, fi_fi, bg_bg, et_ee,
        hu_hu, da_dk, nb_no, sv_se, ko_kr, th_th, cs_cz, ro_ro, el_gr',)
        File name: searx/engines/qwant.py:114
        Function: response
        Code: raise SearxEngineAPIException('API error::' + msg)
[1] https://github.com/searxng/searxng/issues/222
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
											
										 
											2021-07-24 14:45:32 +02:00
										 |  |  |     if params['language'] == 'all': | 
					
						
							| 
									
										
										
										
											2021-10-14 14:15:55 +02:00
										 |  |  |         params['url'] += '&locale=en_US' | 
					
						
							| 
									
										
										
											
												[fix] qwant engine - prevent API locale exception on lang 'all'
Has been reported in [1], error message::
    Error
        Error: searx.exceptions.SearxEngineAPIException
        Percentage: 0
        Parameters: ('API error::locale must be a string,locale must be one of
        the following values: en_gb, en_ie, en_us, en_ca, en_in, en_my, en_au,
        en_nz, cy_gb, gd_gb, de_de, de_ch, de_at, fr_fr, br_fr, fr_be, fr_ch,
        fr_ca, fr_ad, fc_ca, ec_ca, co_fr, es_es, es_ar, es_cl, es_co, es_mx,
        es_pe, es_ad, ca_es, ca_ad, ca_fr, eu_es, eu_fr, it_it, it_ch, pt_br,
        pt_pt, pt_ad, nl_be, nl_nl, pl_pl, zh_hk, zh_cn, fi_fi, bg_bg, et_ee,
        hu_hu, da_dk, nb_no, sv_se, ko_kr, th_th, cs_cz, ro_ro, el_gr',)
        File name: searx/engines/qwant.py:114
        Function: response
        Code: raise SearxEngineAPIException('API error::' + msg)
[1] https://github.com/searxng/searxng/issues/222
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
											
										 
											2021-07-24 14:45:32 +02:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |         language = match_language( | 
					
						
							|  |  |  |             params['language'], | 
					
						
							|  |  |  |             supported_languages, | 
					
						
							|  |  |  |             language_aliases, | 
					
						
							|  |  |  |         ) | 
					
						
							| 
									
										
										
										
											2021-10-14 14:15:55 +02:00
										 |  |  |         params['url'] += '&locale=' + language.replace('-', '_') | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  |     params['raise_for_httperror'] = False | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def response(resp): | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     """Get response from Qwant's search request""" | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  |     # pylint: disable=too-many-locals, too-many-branches, too-many-statements | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     keyword = category_to_keyword[categories[0]] | 
					
						
							|  |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # load JSON result | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  |     search_results = loads(resp.text) | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     data = search_results.get('data', {}) | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  |     # check for an API error | 
					
						
							|  |  |  |     if search_results.get('status') != 'success': | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |         msg = ",".join(data.get('message', ['unknown', ])) | 
					
						
							|  |  |  |         raise SearxEngineAPIException('API error::' + msg) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # raise for other errors | 
					
						
							|  |  |  |     raise_for_httperror(resp) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if keyword == 'web': | 
					
						
							|  |  |  |         # The WEB query contains a list named 'mainline'.  This list can contain | 
					
						
							|  |  |  |         # different result types (e.g. mainline[0]['type'] returns type of the | 
					
						
							|  |  |  |         # result items in mainline[0]['items'] | 
					
						
							|  |  |  |         mainline = data.get('result', {}).get('items', {}).get('mainline', {}) | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         # Queries on News, Images and Videos do not have a list named 'mainline' | 
					
						
							|  |  |  |         # in the response.  The result items are directly in the list | 
					
						
							|  |  |  |         # result['items']. | 
					
						
							|  |  |  |         mainline = data.get('result', {}).get('items', []) | 
					
						
							|  |  |  |         mainline = [ | 
					
						
							|  |  |  |             {'type' : keyword, 'items' : mainline }, | 
					
						
							|  |  |  |         ] | 
					
						
							| 
									
										
										
										
											2020-12-09 21:23:20 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  |     # return empty array if there are no results | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     if not mainline: | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  |         return [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |     for row in mainline: | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         mainline_type = row.get('type', 'web') | 
					
						
							| 
									
										
										
										
											2021-10-12 20:06:37 +02:00
										 |  |  |         if mainline_type != keyword: | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |         if mainline_type == 'ads': | 
					
						
							|  |  |  |             # ignore adds | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         mainline_items = row.get('items', []) | 
					
						
							|  |  |  |         for item in mainline_items: | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
											
												[fix] qwant engine - prevent API locale exception on lang 'all'
Has been reported in [1], error message::
    Error
        Error: searx.exceptions.SearxEngineAPIException
        Percentage: 0
        Parameters: ('API error::locale must be a string,locale must be one of
        the following values: en_gb, en_ie, en_us, en_ca, en_in, en_my, en_au,
        en_nz, cy_gb, gd_gb, de_de, de_ch, de_at, fr_fr, br_fr, fr_be, fr_ch,
        fr_ca, fr_ad, fc_ca, ec_ca, co_fr, es_es, es_ar, es_cl, es_co, es_mx,
        es_pe, es_ad, ca_es, ca_ad, ca_fr, eu_es, eu_fr, it_it, it_ch, pt_br,
        pt_pt, pt_ad, nl_be, nl_nl, pl_pl, zh_hk, zh_cn, fi_fi, bg_bg, et_ee,
        hu_hu, da_dk, nb_no, sv_se, ko_kr, th_th, cs_cz, ro_ro, el_gr',)
        File name: searx/engines/qwant.py:114
        Function: response
        Code: raise SearxEngineAPIException('API error::' + msg)
[1] https://github.com/searxng/searxng/issues/222
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
											
										 
											2021-07-24 14:45:32 +02:00
										 |  |  |             title = item.get('title', None) | 
					
						
							|  |  |  |             res_url = item.get('url', None) | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |             if mainline_type == 'web': | 
					
						
							|  |  |  |                 content = item['desc'] | 
					
						
							|  |  |  |                 results.append({ | 
					
						
							|  |  |  |                     'title': title, | 
					
						
							|  |  |  |                     'url': res_url, | 
					
						
							|  |  |  |                     'content': content, | 
					
						
							|  |  |  |                 }) | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |             elif mainline_type == 'news': | 
					
						
							| 
									
										
										
										
											2021-07-24 13:52:36 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |                 pub_date = item['date'] | 
					
						
							|  |  |  |                 if pub_date is not None: | 
					
						
							|  |  |  |                     pub_date = datetime.fromtimestamp(pub_date) | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |                 news_media = item.get('media', []) | 
					
						
							| 
									
										
										
										
											2017-02-12 14:58:49 +01:00
										 |  |  |                 img_src = None | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |                 if news_media: | 
					
						
							|  |  |  |                     img_src = news_media[0].get('pict', {}).get('url', None) | 
					
						
							|  |  |  |                 results.append({ | 
					
						
							|  |  |  |                     'title': title, | 
					
						
							|  |  |  |                     'url': res_url, | 
					
						
							|  |  |  |                     'publishedDate': pub_date, | 
					
						
							|  |  |  |                     'img_src': img_src, | 
					
						
							|  |  |  |                 }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             elif mainline_type == 'images': | 
					
						
							|  |  |  |                 thumbnail = item['thumbnail'] | 
					
						
							|  |  |  |                 img_src = item['media'] | 
					
						
							|  |  |  |                 results.append({ | 
					
						
							|  |  |  |                     'title': title, | 
					
						
							|  |  |  |                     'url': res_url, | 
					
						
							|  |  |  |                     'template': 'images.html', | 
					
						
							|  |  |  |                     'thumbnail_src': thumbnail, | 
					
						
							|  |  |  |                     'img_src': img_src, | 
					
						
							|  |  |  |                 }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             elif mainline_type == 'videos': | 
					
						
							| 
									
										
										
										
											2021-07-16 15:32:12 +02:00
										 |  |  |                 # some videos do not have a description: while qwant-video | 
					
						
							|  |  |  |                 # returns an empty string, such video from a qwant-web query | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  |                 # miss the 'desc' key. | 
					
						
							| 
									
										
										
										
											2021-07-16 15:32:12 +02:00
										 |  |  |                 d, s, c = item.get('desc'), item.get('source'), item.get('channel') | 
					
						
							|  |  |  |                 content_parts = [] | 
					
						
							|  |  |  |                 if d: | 
					
						
							|  |  |  |                     content_parts.append(d) | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  |                 if s: | 
					
						
							| 
									
										
										
										
											2021-07-16 15:32:12 +02:00
										 |  |  |                     content_parts.append("%s: %s " % (gettext("Source"), s)) | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  |                 if c: | 
					
						
							| 
									
										
										
										
											2021-07-16 15:32:12 +02:00
										 |  |  |                     content_parts.append("%s: %s " % (gettext("Channel"), c)) | 
					
						
							|  |  |  |                 content = ' // '.join(content_parts) | 
					
						
							| 
									
										
										
										
											2021-07-24 13:52:36 +02:00
										 |  |  |                 length = item['duration'] | 
					
						
							|  |  |  |                 if length is not None: | 
					
						
							|  |  |  |                     length = timedelta(milliseconds=length) | 
					
						
							|  |  |  |                 pub_date = item['date'] | 
					
						
							|  |  |  |                 if pub_date is not None: | 
					
						
							|  |  |  |                     pub_date = datetime.fromtimestamp(pub_date) | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |                 thumbnail = item['thumbnail'] | 
					
						
							| 
									
										
										
										
											2021-07-15 20:10:37 +02:00
										 |  |  |                 # from some locations (DE and others?) the s2 link do | 
					
						
							|  |  |  |                 # response a 'Please wait ..' but does not deliver the thumbnail | 
					
						
							|  |  |  |                 thumbnail = thumbnail.replace( | 
					
						
							|  |  |  |                     'https://s2.qwant.com', | 
					
						
							|  |  |  |                     'https://s1.qwant.com', 1 | 
					
						
							|  |  |  |                 ) | 
					
						
							| 
									
										
										
										
											2021-07-13 18:16:09 +02:00
										 |  |  |                 results.append({ | 
					
						
							|  |  |  |                     'title': title, | 
					
						
							|  |  |  |                     'url': res_url, | 
					
						
							|  |  |  |                     'content': content, | 
					
						
							|  |  |  |                     'publishedDate': pub_date, | 
					
						
							|  |  |  |                     'thumbnail': thumbnail, | 
					
						
							|  |  |  |                     'template': 'videos.html', | 
					
						
							|  |  |  |                     'length':  length, | 
					
						
							|  |  |  |             }) | 
					
						
							| 
									
										
										
										
											2015-06-01 00:00:32 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return results | 
					
						
							| 
									
										
										
										
											2017-02-25 03:21:48 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get supported languages from their site | 
					
						
							|  |  |  | def _fetch_supported_languages(resp): | 
					
						
							|  |  |  |     # list of regions is embedded in page as a js object | 
					
						
							|  |  |  |     response_text = resp.text | 
					
						
							| 
									
										
										
										
											2021-05-03 02:24:28 +02:00
										 |  |  |     response_text = response_text[response_text.find('INITIAL_PROPS'):] | 
					
						
							|  |  |  |     response_text = response_text[response_text.find('{'):response_text.find('</script>')] | 
					
						
							| 
									
										
										
										
											2017-02-25 03:21:48 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     regions_json = loads(response_text) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-05-03 02:24:28 +02:00
										 |  |  |     supported_languages = [] | 
					
						
							|  |  |  |     for country, langs in regions_json['locales'].items(): | 
					
						
							|  |  |  |         for lang in langs['langs']: | 
					
						
							|  |  |  |             lang_code = "{lang}-{country}".format(lang=lang, country=country) | 
					
						
							|  |  |  |             supported_languages.append(lang_code) | 
					
						
							| 
									
										
										
										
											2017-02-25 03:21:48 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return supported_languages |