| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2021-04-26 20:18:20 +02:00
										 |  |  | # lint: pylint | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | """SolidTorrents
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | from datetime import datetime | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | import random | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from lxml import html | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 11:00:56 +01:00
										 |  |  | from searx.utils import ( | 
					
						
							|  |  |  |     extract_text, | 
					
						
							|  |  |  |     eval_xpath, | 
					
						
							|  |  |  |     eval_xpath_getindex, | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |     eval_xpath_list, | 
					
						
							| 
									
										
										
										
											2022-02-04 11:00:56 +01:00
										 |  |  |     get_torrent_size, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | about = { | 
					
						
							| 
									
										
										
										
											2023-09-14 09:33:46 +02:00
										 |  |  |     "website": 'https://www.solidtorrents.to/', | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     "wikidata_id": None, | 
					
						
							|  |  |  |     "official_api_documentation": None, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     "use_official_api": False, | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     "require_api_key": False, | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     "results": 'HTML', | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | categories = ['files'] | 
					
						
							|  |  |  | paging = True | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  | # base_url can be overwritten by a list of URLs in the settings.yml | 
					
						
							| 
									
										
										
										
											2023-09-14 09:33:46 +02:00
										 |  |  | base_url = 'https://solidtorrents.to' | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     if isinstance(base_url, list): | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |         params['base_url'] = random.choice(base_url) | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     else: | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |         params['base_url'] = base_url | 
					
						
							|  |  |  |     search_url = params['base_url'] + '/search?{query}' | 
					
						
							| 
									
										
										
										
											2023-09-14 09:33:05 +02:00
										 |  |  |     query = urlencode({'q': query, 'page': params['pageno']}) | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     params['url'] = search_url.format(query=query) | 
					
						
							|  |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def response(resp): | 
					
						
							|  |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |     dom = html.fromstring(resp.text) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-09-14 09:32:17 +02:00
										 |  |  |     for result in eval_xpath(dom, '//li[contains(@class, "search-result")]'): | 
					
						
							|  |  |  |         torrentfile = eval_xpath_getindex(result, './/a[contains(@class, "dl-torrent")]/@href', 0, None) | 
					
						
							|  |  |  |         magnet = eval_xpath_getindex(result, './/a[contains(@class, "dl-magnet")]/@href', 0, None) | 
					
						
							|  |  |  |         if torrentfile is None or magnet is None: | 
					
						
							|  |  |  |             continue  # ignore anime results that which aren't actually torrents | 
					
						
							|  |  |  |         title = eval_xpath_getindex(result, './/h5[contains(@class, "title")]', 0, None) | 
					
						
							|  |  |  |         url = eval_xpath_getindex(result, './/h5[contains(@class, "title")]/a/@href', 0, None) | 
					
						
							|  |  |  |         categ = eval_xpath(result, './/a[contains(@class, "category")]') | 
					
						
							|  |  |  |         stats = eval_xpath_list(result, './/div[contains(@class, "stats")]/div', min_len=5) | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         params = { | 
					
						
							| 
									
										
										
										
											2023-09-14 09:32:17 +02:00
										 |  |  |             'seed': extract_text(stats[3]), | 
					
						
							|  |  |  |             'leech': extract_text(stats[2]), | 
					
						
							|  |  |  |             'title': extract_text(title), | 
					
						
							| 
									
										
										
										
											2022-02-04 14:41:53 +01:00
										 |  |  |             'url': resp.search_params['base_url'] + url, | 
					
						
							| 
									
										
										
										
											2023-09-14 09:32:17 +02:00
										 |  |  |             'filesize': get_torrent_size(*extract_text(stats[1]).split()), | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |             'magnetlink': magnet, | 
					
						
							| 
									
										
										
										
											2022-02-04 14:30:00 +01:00
										 |  |  |             'torrentfile': torrentfile, | 
					
						
							| 
									
										
										
										
											2023-09-14 09:32:17 +02:00
										 |  |  |             'metadata': extract_text(categ), | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |             'template': "torrent.html", | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try: | 
					
						
							| 
									
										
										
										
											2023-09-14 09:32:17 +02:00
										 |  |  |             params['publishedDate'] = datetime.strptime(extract_text(stats[4]), '%b %d, %Y') | 
					
						
							| 
									
										
										
										
											2022-02-04 01:11:44 +01:00
										 |  |  |         except ValueError: | 
					
						
							|  |  |  |             pass | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         results.append(params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-03-07 18:14:30 +01:00
										 |  |  |     return results |