| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # SPDX-License-Identifier: AGPL-3.0-or-later | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | # lint: pylint | 
					
						
							|  |  |  | """Flickr (Images)
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-05-02 15:45:17 +02:00
										 |  |  | """
 | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | from typing import TYPE_CHECKING | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import json | 
					
						
							| 
									
										
										
										
											2016-10-30 21:20:40 +01:00
										 |  |  | from time import time | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | import re | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  | from urllib.parse import urlencode | 
					
						
							| 
									
										
										
										
											2019-08-02 13:37:13 +02:00
										 |  |  | from searx.utils import ecma_unescape, html_to_text | 
					
						
							| 
									
										
										
										
											2015-01-27 19:25:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | if TYPE_CHECKING: | 
					
						
							|  |  |  |     import logging | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     logger: logging.Logger | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-01-13 11:31:25 +01:00
										 |  |  | # about | 
					
						
							|  |  |  | about = { | 
					
						
							|  |  |  |     "website": 'https://www.flickr.com', | 
					
						
							|  |  |  |     "wikidata_id": 'Q103204', | 
					
						
							|  |  |  |     "official_api_documentation": 'https://secure.flickr.com/services/api/flickr.photos.search.html', | 
					
						
							|  |  |  |     "use_official_api": False, | 
					
						
							|  |  |  |     "require_api_key": False, | 
					
						
							|  |  |  |     "results": 'HTML', | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | # engine dependent config | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | categories = ['images'] | 
					
						
							|  |  |  | paging = True | 
					
						
							| 
									
										
										
										
											2016-10-30 21:20:40 +01:00
										 |  |  | time_range_support = True | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | safesearch = False | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  | time_range_dict = { | 
					
						
							|  |  |  |     'day': 60 * 60 * 24, | 
					
						
							|  |  |  |     'week': 60 * 60 * 24 * 7, | 
					
						
							|  |  |  |     'month': 60 * 60 * 24 * 7 * 4, | 
					
						
							|  |  |  |     'year': 60 * 60 * 24 * 7 * 52, | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | image_sizes = ('o', 'k', 'h', 'b', 'c', 'z', 'm', 'n', 't', 'q', 's') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | search_url = 'https://www.flickr.com/search?{query}&page={page}' | 
					
						
							|  |  |  | time_range_url = '&min_upload_date={start}&max_upload_date={end}' | 
					
						
							|  |  |  | photo_url = 'https://www.flickr.com/photos/{userid}/{photoid}' | 
					
						
							|  |  |  | modelexport_re = re.compile(r"^\s*modelExport:\s*({.*}),$", re.M) | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | def build_flickr_url(user_id, photo_id): | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |     return photo_url.format(userid=user_id, photoid=photo_id) | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-30 21:20:40 +01:00
										 |  |  | def _get_time_range_url(time_range): | 
					
						
							|  |  |  |     if time_range in time_range_dict: | 
					
						
							|  |  |  |         return time_range_url.format(start=time(), end=str(int(time()) - time_range_dict[time_range])) | 
					
						
							|  |  |  |     return '' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-05-27 13:49:25 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-10-30 21:20:40 +01:00
										 |  |  | def request(query, params): | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |     params['url'] = search_url.format(query=urlencode({'text': query}), page=params['pageno']) + _get_time_range_url( | 
					
						
							|  |  |  |         params['time_range'] | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | def response(resp):  # pylint: disable=too-many-branches | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  |     results = [] | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-28 10:42:00 +02:00
										 |  |  |     matches = modelexport_re.search(resp.text) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |     if matches is None: | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  |         return results | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     match = matches.group(1) | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |     model_export = json.loads(match) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-28 10:42:00 +02:00
										 |  |  |     if 'legend' not in model_export: | 
					
						
							|  |  |  |         return results | 
					
						
							|  |  |  |     legend = model_export['legend'] | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-28 10:42:00 +02:00
										 |  |  |     # handle empty page | 
					
						
							|  |  |  |     if not legend or not legend[0]: | 
					
						
							|  |  |  |         return results | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |     for x, index in enumerate(legend): | 
					
						
							|  |  |  |         if len(index) != 8: | 
					
						
							|  |  |  |             logger.debug("skip legend enty %s : %s", x, index) | 
					
						
							|  |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         photo = model_export['main'][index[0]][int(index[1])][index[2]][index[3]][index[4]][index[5]][int(index[6])][ | 
					
						
							|  |  |  |             index[7] | 
					
						
							|  |  |  |         ] | 
					
						
							| 
									
										
										
										
											2019-08-02 13:37:13 +02:00
										 |  |  |         author = ecma_unescape(photo.get('realname', '')) | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |         source = ecma_unescape(photo.get('username', '')) | 
					
						
							|  |  |  |         if source: | 
					
						
							|  |  |  |             source += ' @ Flickr' | 
					
						
							| 
									
										
										
										
											2019-08-02 13:37:13 +02:00
										 |  |  |         title = ecma_unescape(photo.get('title', '')) | 
					
						
							|  |  |  |         content = html_to_text(ecma_unescape(photo.get('description', ''))) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |         img_src = None | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  |         # From the biggest to the lowest format | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |         size_data = None | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |         for image_size in image_sizes: | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |             if image_size in photo['sizes']['data']: | 
					
						
							|  |  |  |                 size_data = photo['sizes']['data'][image_size]['data'] | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |                 break | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |         if not size_data: | 
					
						
							|  |  |  |             logger.debug('cannot find valid image size: {0}'.format(repr(photo['sizes']['data']))) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  |             continue | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |         img_src = size_data['url'] | 
					
						
							|  |  |  |         img_format = f"{size_data['width']} x {size_data['height']}" | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-05-27 13:49:25 +02:00
										 |  |  |         # For a bigger thumbnail, keep only the url_z, not the url_n | 
					
						
							| 
									
										
										
										
											2023-03-30 21:04:53 +02:00
										 |  |  |         if 'n' in photo['sizes']['data']: | 
					
						
							|  |  |  |             thumbnail_src = photo['sizes']['data']['n']['data']['url'] | 
					
						
							|  |  |  |         elif 'z' in photo['sizes']['data']: | 
					
						
							|  |  |  |             thumbnail_src = photo['sizes']['data']['z']['data']['url'] | 
					
						
							| 
									
										
										
										
											2015-01-17 19:21:09 +01:00
										 |  |  |         else: | 
					
						
							|  |  |  |             thumbnail_src = img_src | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-07-28 10:42:00 +02:00
										 |  |  |         if 'ownerNsid' not in photo: | 
					
						
							|  |  |  |             # should not happen, disowned photo? Show it anyway | 
					
						
							|  |  |  |             url = img_src | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             url = build_flickr_url(photo['ownerNsid'], photo['id']) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2020-01-02 22:29:10 +01:00
										 |  |  |         result = { | 
					
						
							|  |  |  |             'url': url, | 
					
						
							|  |  |  |             'img_src': img_src, | 
					
						
							|  |  |  |             'thumbnail_src': thumbnail_src, | 
					
						
							|  |  |  |             'source': source, | 
					
						
							|  |  |  |             'img_format': img_format, | 
					
						
							| 
									
										
										
										
											2021-12-27 09:26:22 +01:00
										 |  |  |             'template': 'images.html', | 
					
						
							| 
									
										
										
										
											2020-01-02 22:29:10 +01:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2020-08-06 17:42:46 +02:00
										 |  |  |         result['author'] = author.encode(errors='ignore').decode() | 
					
						
							|  |  |  |         result['source'] = source.encode(errors='ignore').decode() | 
					
						
							|  |  |  |         result['title'] = title.encode(errors='ignore').decode() | 
					
						
							|  |  |  |         result['content'] = content.encode(errors='ignore').decode() | 
					
						
							| 
									
										
										
										
											2020-01-02 22:29:10 +01:00
										 |  |  |         results.append(result) | 
					
						
							| 
									
										
										
										
											2014-12-22 14:15:59 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-12-16 20:40:03 +01:00
										 |  |  |     return results |