| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  | """
 | 
					
						
							|  |  |  |  DigBT (Videos, Music, Files) | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urljoin | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  | from lxml import html | 
					
						
							| 
									
										
										
										
											2024-06-12 22:35:13 +02:00
										 |  |  | from searx.utils import extract_text | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # about | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://digbt.org', | 
					
						
							|  |  |  |     "wikidata_id": None, | 
					
						
							|  |  |  |     "official_api_documentation": None, | 
					
						
							|  |  |  |     "use_official_api": False, | 
					
						
							|  |  |  |     "require_api_key": False, | 
					
						
							|  |  |  |     "results": 'HTML', | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | categories = ['videos', 'music', 'files'] | 
					
						
							|  |  |  | paging = True | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | URL = 'https://digbt.org' | 
					
						
							|  |  |  | SEARCH_URL = URL + '/search/{query}-time-{pageno}' | 
					
						
							|  |  |  | FILESIZE = 3 | 
					
						
							|  |  |  | FILESIZE_MULTIPLIER = 4 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def request(query, params): | 
					
						
							|  |  |  |     params['url'] = SEARCH_URL.format(query=query, pageno=params['pageno']) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def response(resp): | 
					
						
							| 
									
										
										
										
											2016-11-30 18:43:03 +01:00
										 |  |  |     dom = html.fromstring(resp.text) | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  |     search_res = dom.xpath('.//td[@class="x-item"]') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     if not search_res: | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |         return [] | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 07:45:08 +01:00
										 |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  |     for result in search_res: | 
					
						
							|  |  |  |         url = urljoin(URL, result.xpath('.//a[@title]/@href')[0]) | 
					
						
							| 
									
										
										
										
											2016-09-20 22:35:54 +02:00
										 |  |  |         title = extract_text(result.xpath('.//a[@title]')) | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  |         content = extract_text(result.xpath('.//div[@class="files"]')) | 
					
						
							|  |  |  |         files_data = extract_text(result.xpath('.//div[@class="tail"]')).split() | 
					
						
							| 
									
										
										
										
											2024-06-12 22:35:13 +02:00
										 |  |  |         filesize = f"{files_data[FILESIZE]} {files_data[FILESIZE_MULTIPLIER]}" | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  |         magnetlink = result.xpath('.//div[@class="tail"]//a[@class="title"]/@href')[0] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |         results.append( | 
					
						
							|  |  |  |             { | 
					
						
							|  |  |  |                 'url': url, | 
					
						
							|  |  |  |                 'title': title, | 
					
						
							|  |  |  |                 'content': content, | 
					
						
							|  |  |  |                 'filesize': filesize, | 
					
						
							|  |  |  |                 'magnetlink': magnetlink, | 
					
						
							|  |  |  |                 'seed': 'N/A', | 
					
						
							|  |  |  |                 'leech': 'N/A', | 
					
						
							|  |  |  |                 'template': 'torrent.html', | 
					
						
							|  |  |  |             } | 
					
						
							|  |  |  |         ) | 
					
						
							| 
									
										
										
										
											2016-08-13 14:55:47 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return results |