| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  | """
 | 
					
						
							|  |  |  |  Bing (News) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @website     https://www.bing.com/news | 
					
						
							|  |  |  |  @provide-api yes (http://datamarket.azure.com/dataset/bing/search), | 
					
						
							|  |  |  |               max. 5000 query/month | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @using-api   no (because of query limit) | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  |  @results     RSS (using search portal) | 
					
						
							|  |  |  |  @stable      yes (except perhaps for the images) | 
					
						
							|  |  |  |  @parse       url, title, content, publishedDate, thumbnail | 
					
						
							| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  | """
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | from urllib import urlencode | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  | from urlparse import urlparse, parse_qsl | 
					
						
							|  |  |  | from datetime import datetime | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | from dateutil import parser | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  | from lxml import etree | 
					
						
							|  |  |  | from searx.utils import list_get | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | categories = ['news'] | 
					
						
							|  |  |  | paging = True | 
					
						
							|  |  |  | language_support = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # search-url | 
					
						
							|  |  |  | base_url = 'https://www.bing.com/' | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  | search_string = 'news/search?{query}&first={offset}&format=RSS' | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # remove click | 
					
						
							|  |  |  | def url_cleanup(url_string): | 
					
						
							|  |  |  |     parsed_url = urlparse(url_string) | 
					
						
							|  |  |  |     if parsed_url.netloc == 'www.bing.com' and parsed_url.path == '/news/apiclick.aspx': | 
					
						
							|  |  |  |         query = dict(parse_qsl(parsed_url.query)) | 
					
						
							|  |  |  |         return query.get('url', None) | 
					
						
							|  |  |  |     return url_string | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # replace the http://*bing4.com/th?id=... by https://www.bing.com/th?id=... | 
					
						
							|  |  |  | def image_url_cleanup(url_string): | 
					
						
							|  |  |  |     parsed_url = urlparse(url_string) | 
					
						
							|  |  |  |     if parsed_url.netloc.endswith('bing4.com') and parsed_url.path == '/th': | 
					
						
							|  |  |  |         query = dict(parse_qsl(parsed_url.query)) | 
					
						
							|  |  |  |         return "https://www.bing.com/th?id=" + query.get('id') | 
					
						
							|  |  |  |     return url_string | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:13:44 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # do search-request | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | def request(query, params): | 
					
						
							|  |  |  |     offset = (params['pageno'] - 1) * 10 + 1 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     if params['language'] == 'all': | 
					
						
							|  |  |  |         language = 'en-US' | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         language = params['language'].replace('_', '-') | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     search_path = search_string.format( | 
					
						
							|  |  |  |         query=urlencode({'q': query, 'setmkt': language}), | 
					
						
							|  |  |  |         offset=offset) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     params['url'] = base_url + search_path | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # get response from search-request | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  |     rss = etree.fromstring(resp.content) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     ns = rss.nsmap | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # parse results | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  |     for item in rss.xpath('./channel/item'): | 
					
						
							|  |  |  |         # url / title / content | 
					
						
							|  |  |  |         url = url_cleanup(item.xpath('./link/text()')[0]) | 
					
						
							|  |  |  |         title = list_get(item.xpath('./title/text()'), 0, url) | 
					
						
							|  |  |  |         content = list_get(item.xpath('./description/text()'), 0, '') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # publishedDate | 
					
						
							|  |  |  |         publishedDate = list_get(item.xpath('./pubDate/text()'), 0) | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             publishedDate = parser.parse(publishedDate, dayfirst=False) | 
					
						
							|  |  |  |         except TypeError: | 
					
						
							|  |  |  |             publishedDate = datetime.now() | 
					
						
							|  |  |  |         except ValueError: | 
					
						
							|  |  |  |             publishedDate = datetime.now() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # thumbnail | 
					
						
							|  |  |  |         thumbnail = list_get(item.xpath('./News:Image/text()', namespaces=ns), 0) | 
					
						
							|  |  |  |         if thumbnail is not None: | 
					
						
							|  |  |  |             thumbnail = image_url_cleanup(thumbnail) | 
					
						
							| 
									
										
										
										
											2014-12-07 16:37:56 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         # append result | 
					
						
							| 
									
										
										
										
											2015-06-04 18:30:08 +02:00
										 |  |  |         if thumbnail is not None: | 
					
						
							|  |  |  |             results.append({'template': 'videos.html', | 
					
						
							|  |  |  |                             'url': url, | 
					
						
							|  |  |  |                             'title': title, | 
					
						
							|  |  |  |                             'publishedDate': publishedDate, | 
					
						
							|  |  |  |                             'content': content, | 
					
						
							|  |  |  |                             'thumbnail': thumbnail}) | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             results.append({'url': url, | 
					
						
							|  |  |  |                             'title': title, | 
					
						
							|  |  |  |                             'publishedDate': publishedDate, | 
					
						
							|  |  |  |                             'content': content}) | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # return results | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     return results |