| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  | """
 | 
					
						
							|  |  |  |  Bing (News) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @website     https://www.bing.com/news | 
					
						
							|  |  |  |  @provide-api yes (http://datamarket.azure.com/dataset/bing/search), | 
					
						
							|  |  |  |               max. 5000 query/month | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  @using-api   no (because of query limit) | 
					
						
							|  |  |  |  @results     HTML (using search portal) | 
					
						
							|  |  |  |  @stable      no (HTML can change) | 
					
						
							|  |  |  |  @parse       url, title, content, publishedDate | 
					
						
							|  |  |  | """
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | from urllib import urlencode | 
					
						
							|  |  |  | from cgi import escape | 
					
						
							|  |  |  | from lxml import html | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | from datetime import datetime, timedelta | 
					
						
							|  |  |  | from dateutil import parser | 
					
						
							|  |  |  | import re | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  | from searx.engines.xpath import extract_text | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | categories = ['news'] | 
					
						
							|  |  |  | paging = True | 
					
						
							|  |  |  | language_support = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # search-url | 
					
						
							|  |  |  | base_url = 'https://www.bing.com/' | 
					
						
							|  |  |  | search_string = 'news/search?{query}&first={offset}' | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-02 17:13:44 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # do search-request | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | def request(query, params): | 
					
						
							|  |  |  |     offset = (params['pageno'] - 1) * 10 + 1 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     if params['language'] == 'all': | 
					
						
							|  |  |  |         language = 'en-US' | 
					
						
							|  |  |  |     else: | 
					
						
							|  |  |  |         language = params['language'].replace('_', '-') | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     search_path = search_string.format( | 
					
						
							|  |  |  |         query=urlencode({'q': query, 'setmkt': language}), | 
					
						
							|  |  |  |         offset=offset) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-22 22:46:34 +01:00
										 |  |  |     params['cookies']['_FP'] = "ui=en-US" | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     params['url'] = base_url + search_path | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | # get response from search-request | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     dom = html.fromstring(resp.content) | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # parse results | 
					
						
							|  |  |  |     for result in dom.xpath('//div[@class="sn_r"]'): | 
					
						
							|  |  |  |         link = result.xpath('.//div[@class="newstitle"]/a')[0] | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |         url = link.attrib.get('href') | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  |         title = extract_text(link) | 
					
						
							|  |  |  |         contentXPath = result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]') | 
					
						
							| 
									
										
										
										
											2015-01-29 21:19:59 +01:00
										 |  |  |         content = escape(extract_text(contentXPath)) | 
					
						
							| 
									
										
										
										
											2014-12-07 16:37:56 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         # parse publishedDate | 
					
						
							| 
									
										
										
										
											2014-12-16 17:26:16 +01:00
										 |  |  |         publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div' | 
					
						
							|  |  |  |                                           '//span[contains(@class,"sn_ST")]' | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  |                                           '//span[contains(@class,"sn_tm")]') | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-29 21:19:59 +01:00
										 |  |  |         publishedDate = escape(extract_text(publishedDateXPath)) | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         if re.match("^[0-9]+ minute(s|) ago$", publishedDate): | 
					
						
							|  |  |  |             timeNumbers = re.findall(r'\d+', publishedDate) | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  |             publishedDate = datetime.now() - timedelta(minutes=int(timeNumbers[0])) | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         elif re.match("^[0-9]+ hour(s|) ago$", publishedDate): | 
					
						
							|  |  |  |             timeNumbers = re.findall(r'\d+', publishedDate) | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  |             publishedDate = datetime.now() - timedelta(hours=int(timeNumbers[0])) | 
					
						
							|  |  |  |         elif re.match("^[0-9]+ hour(s|), [0-9]+ minute(s|) ago$", publishedDate): | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |             timeNumbers = re.findall(r'\d+', publishedDate) | 
					
						
							|  |  |  |             publishedDate = datetime.now()\ | 
					
						
							|  |  |  |                 - timedelta(hours=int(timeNumbers[0]))\ | 
					
						
							|  |  |  |                 - timedelta(minutes=int(timeNumbers[1])) | 
					
						
							| 
									
										
										
										
											2014-09-07 18:10:05 +02:00
										 |  |  |         elif re.match("^[0-9]+ day(s|) ago$", publishedDate): | 
					
						
							|  |  |  |             timeNumbers = re.findall(r'\d+', publishedDate) | 
					
						
							| 
									
										
										
										
											2015-01-29 20:56:57 +01:00
										 |  |  |             publishedDate = datetime.now() - timedelta(days=int(timeNumbers[0])) | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         else: | 
					
						
							| 
									
										
										
										
											2014-09-07 18:10:05 +02:00
										 |  |  |             try: | 
					
						
							|  |  |  |                 publishedDate = parser.parse(publishedDate, dayfirst=False) | 
					
						
							|  |  |  |             except TypeError: | 
					
						
							|  |  |  |                 publishedDate = datetime.now() | 
					
						
							| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  |             except ValueError: | 
					
						
							|  |  |  |                 publishedDate = datetime.now() | 
					
						
							| 
									
										
										
										
											2014-12-07 16:37:56 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |         # append result | 
					
						
							| 
									
										
										
										
											2014-12-07 16:37:56 +01:00
										 |  |  |         results.append({'url': url, | 
					
						
							|  |  |  |                         'title': title, | 
					
						
							| 
									
										
										
										
											2014-09-01 14:38:59 +02:00
										 |  |  |                         'publishedDate': publishedDate, | 
					
						
							|  |  |  |                         'content': content}) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # return results | 
					
						
							| 
									
										
										
										
											2014-03-04 13:10:04 +01:00
										 |  |  |     return results |