| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2021-04-26 20:18:20 +02:00
										 |  |  | # lint: pylint | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | """SolidTorrents
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | from datetime import datetime | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | import random | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from lxml import html | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 11:00:56 +01:00
										 |  |  | from searx.utils import ( | 
					
						
							|  |  |  |     extract_text, | 
					
						
							|  |  |  |     eval_xpath, | 
					
						
							|  |  |  |     eval_xpath_getindex, | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |     eval_xpath_list, | 
					
						
							| 
									
										
										
										
											2022-02-04 11:00:56 +01:00
										 |  |  |     get_torrent_size, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://www.solidtorrents.net/', | 
					
						
							|  |  |  |     "wikidata_id": None, | 
					
						
							|  |  |  |     "official_api_documentation": None, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     "use_official_api": False, | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     "require_api_key": False, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     "results": 'HTML', | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | categories = ['files'] | 
					
						
							|  |  |  | paging = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  | # base_url can be overwritten by a list of URLs in the settings.yml | 
					
						
							|  |  |  | base_url = 'https://solidtorrents.net' | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     if isinstance(base_url, list): | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |         params['base_url'] = random.choice(base_url) | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |         params['base_url'] = base_url | 
					
						
							|  |  |  |     search_url = params['base_url'] + '/search?{query}' | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     page = (params['pageno'] - 1) * 20 | 
					
						
							|  |  |  |     query = urlencode({'q': query, 'page': page}) | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     params['url'] = search_url.format(query=query) | 
					
						
							|  |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     dom = html.fromstring(resp.text) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for result in eval_xpath(dom, '//div[contains(@class, "search-result")]'): | 
					
						
							|  |  |  |         a = eval_xpath_getindex(result, './div/h5/a', 0, None) | 
					
						
							|  |  |  |         if a is None: | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  |         title = extract_text(a) | 
					
						
							|  |  |  |         url = eval_xpath_getindex(a, '@href', 0, None) | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |         categ = eval_xpath(result, './div//a[contains(@class, "category")]') | 
					
						
							|  |  |  |         metadata = extract_text(categ) | 
					
						
							|  |  |  |         stats = eval_xpath_list(result, './div//div[contains(@class, "stats")]/div', min_len=5) | 
					
						
							| 
									
										
										
										
											2022-02-04 11:00:56 +01:00
										 |  |  |         n, u = extract_text(stats[1]).split() | 
					
						
							|  |  |  |         filesize = get_torrent_size(n, u) | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |         leech = extract_text(stats[2]) | 
					
						
							|  |  |  |         seed = extract_text(stats[3]) | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |         torrentfile = eval_xpath_getindex(result, './div//a[contains(@class, "dl-torrent")]/@href', 0, None) | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |         magnet = eval_xpath_getindex(result, './div//a[contains(@class, "dl-magnet")]/@href', 0, None) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         params = { | 
					
						
							|  |  |  |             'seed': seed, | 
					
						
							|  |  |  |             'leech': leech, | 
					
						
							|  |  |  |             'title': title, | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |             'url': resp.search_params['base_url'] + url, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |             'filesize': filesize, | 
					
						
							|  |  |  |             'magnetlink': magnet, | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |             'torrentfile': torrentfile, | 
					
						
							|  |  |  |             'metadata': metadata, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |             'template': "torrent.html", | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         date_str = extract_text(stats[4]) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             params['publishedDate'] = datetime.strptime(date_str, '%b %d, %Y') | 
					
						
							|  |  |  |         except ValueError: | 
					
						
							|  |  |  |             pass | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         results.append(params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     return results |