diff --git a/src/snek/static/base.css b/src/snek/static/base.css index b072e8c..24b03cd 100644 --- a/src/snek/static/base.css +++ b/src/snek/static/base.css @@ -544,3 +544,44 @@ dialog .dialog-button.secondary:hover { } +.embed-url-link { + display: flex; + flex-direction: column; +} + +.embed-url-link img, +.embed-url-link video, +.embed-url-link iframe, +.embed-url-link div { + width: auto; + height: auto; + max-width: 100%; + max-height: 400px; + object-fit: contain; + border-radius: 12px 12px 0 0; +} + +.embed-url-link .page-site { + font-size: 0.9em; + color: #aaa; + margin-bottom: 5px; +} + +.embed-url-link .page-name { + font-size: 1.2em; + color: #f05a28; + margin-bottom: 5px; +} + +.embed-url-link .page-description { + font-size: 1em; + color: #e6e6e6; + margin-bottom: 10px; +} + +.embed-url-link .page-link { + font-size: 0.9em; + color: #f05a28; + text-decoration: none; + margin-top: 10px; +} \ No newline at end of file diff --git a/src/snek/system/template.py b/src/snek/system/template.py index 4ded527..a234629 100644 --- a/src/snek/system/template.py +++ b/src/snek/system/template.py @@ -1,8 +1,14 @@ +import asyncio import re +from functools import lru_cache + +import requests from urllib.parse import urlparse, parse_qs from types import SimpleNamespace import mimetypes + +import aiohttp import emoji from bs4 import BeautifulSoup from jinja2 import TemplateSyntaxError, nodes @@ -231,6 +237,153 @@ def linkify_https(text): return set_link_target_blank(str(soup)) +@lru_cache(maxsize=128) +def get_url_content(url): + try: + response = requests.get(url, timeout=5) + response.raise_for_status() + return response.text + except Exception as e: + print(f"Error fetching {url}: {e}") + return None + + +def embed_url(text): + soup = BeautifulSoup(text, "html.parser") + + attachments = {} + + for element in soup.find_all("a"): + if "href" in element.attrs and element.attrs["href"].startswith("http"): + page_url = urlparse(element.attrs["href"]) + page = get_url_content(element.attrs["href"]) + if page: + parsed_page = BeautifulSoup(page, "html.parser") + head_info = parsed_page.find("head") + if head_info: + + def get_element_options( + elem=None, meta=None, ograph=None, twitter=None + ): + if twitter: + tw_tag = head_info.find( + "meta", attrs={"name": "twitter:" + twitter} + ) or head_info.find( + "meta", attrs={"property": "twitter:" + twitter} + ) + if tw_tag: + return tw_tag.get("content", tw_tag.get("value", None)) + + if ograph: + og_tag = head_info.find( + "meta", attrs={"property": "og:" + ograph} + ) or head_info.find("meta", attrs={"name": "og:" + ograph}) + if og_tag: + return og_tag.get("content", og_tag.get("value", None)) + + if meta: + meta_tag = head_info.find( + "meta", attrs={"name": meta} + ) or head_info.find("meta", attrs={"property": meta}) + if meta_tag: + return meta_tag.get( + "content", meta_tag.get("value", None) + ) + + if elem: + elem_tag = head_info.find(elem) + if elem_tag: + return elem_tag.text + + return None + + original_link_name = element.attrs["href"] + + if original_link_name in attachments: + continue + + page_name = ( + get_element_options("title", "title", "title", "title") + or page_url.netloc + ) + page_site = ( + get_element_options(None, "site", "site", "site") + or page_url.netloc + ) + page_description = get_element_options( + None, "description", "description", "description" + ) + page_image = get_element_options(None, "image", "image", "image") + page_image_alt = get_element_options( + None, "image:alt", "image:alt", "image:alt" + ) + page_video = get_element_options(None, "video", "video", "video") + page_audio = get_element_options(None, "audio", "audio", "audio") + + preview_size = ( + get_element_options(None, None, None, "card") + or "summary_large_image" + ) + + + attachment_base = BeautifulSoup(str(element), "html.parser") + attachments[original_link_name] = attachment_base + + attachment = next(attachment_base.children) + + attachment.clear() + attachment.attrs["class"] = "embed-url-link" + + render_element = attachment + + if page_image: + image_template = f'{page_image_alt or page_name}' + render_element.append( + BeautifulSoup(image_template, "html.parser") + ) + if page_video: + video_template = f'' + render_element.append( + BeautifulSoup(video_template, "html.parser") + ) + if page_audio: + audio_template = f'' + render_element.append( + BeautifulSoup(audio_template, "html.parser") + ) + + description_element_base = BeautifulSoup( + "", "html.parser" + ) + description_element = next(description_element_base.children) + description_element.append( + BeautifulSoup( + f'

{page_site}

', + "html.parser", + ) + ) + + description_element.append( + BeautifulSoup(f'{page_name}', "html.parser") + ) + + description_element.append( + BeautifulSoup(f"

{page_description or "No description available."}

", "html.parser") + ) + + description_element.append( + BeautifulSoup(f"", "html.parser") + ) + + render_element.append(description_element_base) + + + for attachment in attachments.values(): + soup.append(attachment) + + return str(soup) + + class EmojiExtension(Extension): tags = {"emoji"} @@ -276,6 +429,9 @@ class LinkifyExtension(Extension): result = embed_youtube(result) result = enrich_image_rendering(result) + + result = embed_url(result) + return result