| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | """
 | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  |  Swisscows (Web, Images) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |  @website     https://swisscows.ch | 
					
						
							|  |  |  |  @provide-api no | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @using-api   no | 
					
						
							|  |  |  |  @results     HTML (using search portal) | 
					
						
							|  |  |  |  @stable      no (HTML can change) | 
					
						
							|  |  |  |  @parse       url, title, content | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from json import loads | 
					
						
							|  |  |  | import re | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | from lxml.html import fromstring | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | from searx.url_utils import unquote, urlencode | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  | categories = ['general', 'images'] | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | paging = True | 
					
						
							|  |  |  | language_support = True | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # search-url | 
					
						
							|  |  |  | base_url = 'https://swisscows.ch/' | 
					
						
							|  |  |  | search_string = '?{query}&page={page}' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  | supported_languages_url = base_url | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | # regex | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | regex_json = re.compile(b'initialData: {"Request":(.|\n)*},\s*environment') | 
					
						
							|  |  |  | regex_json_remove_start = re.compile(b'^initialData:\s*') | 
					
						
							|  |  |  | regex_json_remove_end = re.compile(b',\s*environment$') | 
					
						
							|  |  |  | regex_img_url_remove_start = re.compile(b'^https?://i\.swisscows\.ch/\?link=') | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # do search-request | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							|  |  |  |     if params['language'] == 'all': | 
					
						
							|  |  |  |         ui_language = 'browser' | 
					
						
							|  |  |  |         region = 'browser' | 
					
						
							| 
									
										
										
										
											2016-10-30 03:04:01 +01:00
										 |  |  |     elif params['language'].split('-')[0] == 'no': | 
					
						
							|  |  |  |         region = 'nb-NO' | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2016-08-06 06:34:56 +02:00
										 |  |  |         region = params['language'] | 
					
						
							|  |  |  |         ui_language = params['language'].split('-')[0] | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     search_path = search_string.format( | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |         query=urlencode({'query': query, 'uiLanguage': ui_language, 'region': region}), | 
					
						
							|  |  |  |         page=params['pageno'] | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  |     # image search query is something like 'image?{query}&page={page}' | 
					
						
							|  |  |  |     if params['category'] == 'images': | 
					
						
							|  |  |  |         search_path = 'image' + search_path | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  |     params['url'] = base_url + search_path | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get response from search-request | 
					
						
							|  |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |     json_regex = regex_json.search(resp.text) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # check if results are returned | 
					
						
							|  |  |  |     if not json_regex: | 
					
						
							|  |  |  |         return [] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |     json_raw = regex_json_remove_end.sub(b'', regex_json_remove_start.sub(b'', json_regex.group())) | 
					
						
							|  |  |  |     json = loads(json_raw.decode('utf-8')) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  |     # parse results | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  |     for result in json['Results'].get('items', []): | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  |         result_title = result['Title'].replace(u'\uE000', '').replace(u'\uE001', '') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # parse image results | 
					
						
							|  |  |  |         if result.get('ContentType', '').startswith('image'): | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |             img_url = unquote(regex_img_url_remove_start.sub(b'', result['Url'].encode('utf-8')).decode('utf-8')) | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |             # append result | 
					
						
							|  |  |  |             results.append({'url': result['SourceUrl'], | 
					
						
							| 
									
										
										
										
											2016-12-09 11:44:24 +01:00
										 |  |  |                             'title': result['Title'], | 
					
						
							| 
									
										
										
										
											2015-06-01 12:30:07 +02:00
										 |  |  |                             'content': '', | 
					
						
							|  |  |  |                             'img_src': img_url, | 
					
						
							|  |  |  |                             'template': 'images.html'}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # parse general results | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             result_url = result['Url'].replace(u'\uE000', '').replace(u'\uE001', '') | 
					
						
							|  |  |  |             result_content = result['Description'].replace(u'\uE000', '').replace(u'\uE001', '') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # append result | 
					
						
							|  |  |  |             results.append({'url': result_url, | 
					
						
							| 
									
										
										
										
											2016-12-09 11:44:24 +01:00
										 |  |  |                             'title': result_title, | 
					
						
							|  |  |  |                             'content': result_content}) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # parse images | 
					
						
							|  |  |  |     for result in json.get('Images', []): | 
					
						
							|  |  |  |         # decode image url | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |         img_url = unquote(regex_img_url_remove_start.sub(b'', result['Url'].encode('utf-8')).decode('utf-8')) | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # append result | 
					
						
							|  |  |  |         results.append({'url': result['SourceUrl'], | 
					
						
							| 
									
										
										
										
											2016-12-09 11:44:24 +01:00
										 |  |  |                         'title': result['Title'], | 
					
						
							| 
									
										
										
										
											2015-06-01 09:15:50 +02:00
										 |  |  |                         'content': '', | 
					
						
							|  |  |  |                         'img_src': img_url, | 
					
						
							|  |  |  |                         'template': 'images.html'}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # return results | 
					
						
							|  |  |  |     return results | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # get supported languages from their site | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  | def _fetch_supported_languages(resp): | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |     supported_languages = [] | 
					
						
							| 
									
										
										
										
											2016-12-15 07:34:43 +01:00
										 |  |  |     dom = fromstring(resp.text) | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |     options = dom.xpath('//div[@id="regions-popup"]//ul/li/a') | 
					
						
							|  |  |  |     for option in options: | 
					
						
							| 
									
										
										
										
											2017-10-10 23:52:41 +02:00
										 |  |  |         code = option.xpath('./@data-search-language')[0] | 
					
						
							| 
									
										
										
										
											2017-03-02 00:11:51 +01:00
										 |  |  |         if code.startswith('nb-'): | 
					
						
							|  |  |  |             code = code.replace('nb', 'no', 1) | 
					
						
							| 
									
										
										
										
											2016-11-06 03:51:38 +01:00
										 |  |  |         supported_languages.append(code) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return supported_languages |