implemented client side filters, mobile first styling and modals for articles

This commit is contained in:
2025-08-02 01:33:49 +02:00
parent e1f51794af
commit ccc1a90cbe
14 changed files with 1248 additions and 131 deletions

View File

@@ -126,7 +126,7 @@ class NewsFetcher:
def build_prompt(
url: str,
title: str = "",
description: str = "",
summary: str = "",
content: str = "") -> str:
"""
Generate a prompt for the LLM to summarize an article.
@@ -134,7 +134,7 @@ class NewsFetcher:
Args:
url: Public URL of the article to summarize
title: Article title from RSS feed (optional)
description: Article description from RSS feed (optional)
summary: Article summary from RSS feed (optional)
content: Extracted article content (optional)
Returns:
@@ -144,8 +144,8 @@ class NewsFetcher:
context_info = []
if title:
context_info.append(f"RSS-Titel: {title}")
if description:
context_info.append(f"RSS-Beschreibung: {description}")
if summary:
context_info.append(f"RSS-Beschreibung: {summary}")
if content:
content_preview = content[:500] + \
"..." if len(content) > 500 else content
@@ -164,9 +164,9 @@ class NewsFetcher:
"2. Falls kein Artikel-Inhalt verfügbar ist, nutze RSS-Titel und -Beschreibung\n"
"3. Falls keine ausreichenden Informationen vorliegen, erstelle eine plausible Zusammenfassung basierend auf der URL\n"
"4. Gib ausschließlich **gültiges minifiziertes JSON** zurück kein Markdown, keine Kommentare\n"
"5. Struktur: {\"title\":\"\",\"description\":\"\"}\n"
"5. Struktur: {\"title\":\"\",\"summary\":\"\"}\n"
"6. title: Aussagekräftiger deutscher Titel (max 100 Zeichen)\n"
"7. description: Deutsche Zusammenfassung (zwischen 100 und 160 Wörter)\n"
"7. summary: Deutsche Zusammenfassung (zwischen 100 und 160 Wörter)\n"
"8. Kein Text vor oder nach dem JSON\n\n"
"### Ausgabe\n"
"Jetzt antworte mit dem JSON:"
@@ -177,7 +177,7 @@ class NewsFetcher:
client: httpx.AsyncClient,
url: str,
title: str = "",
description: str = ""
summary: str = ""
) -> Optional[ArticleSummary]:
"""
Generate a summary of an article using the LLM.
@@ -187,7 +187,7 @@ class NewsFetcher:
client: An active httpx AsyncClient for making requests
url: URL of the article to summarize
title: Article title from RSS feed
description: Article description from RSS feed
summary: Article summary from RSS feed
Returns:
A dictionary containing the article title and summaries in German and English,
@@ -200,7 +200,7 @@ class NewsFetcher:
f"⚠️ Could not fetch article content, using RSS data only")
prompt = NewsFetcher.build_prompt(
url, title, description, article_content)
url, title, summary, article_content)
payload = {
"model": LLM_MODEL,
"prompt": prompt,
@@ -226,7 +226,7 @@ class NewsFetcher:
summary_data = llm_response
# Validate required fields
required_fields = ["title", "description"]
required_fields = ["title", "summary"]
missing_fields = [
field for field in required_fields if field not in summary_data]
@@ -237,12 +237,12 @@ class NewsFetcher:
return None
# Check summary quality metrics
description = len(summary_data.get("description", "").split())
summary_length = len(summary_data.get("summary", "").split())
if description > 160 or description < 100:
if summary_length > 160:
logger.warning(
f"⚠️ Summary exceeds word limit - "
f"Description: {description}/160"
f"Summary: {summary_length}/160"
)
return cast(ArticleSummary, summary_data)
@@ -373,7 +373,7 @@ class NewsFetcher:
f"⚠️ Database check failed for article {i}, continuing: {db_error}")
rss_title = getattr(entry, 'title', '')
rss_description = getattr(
rss_summary = getattr(
entry, 'description', '') or getattr(
entry, 'summary', '')
@@ -381,9 +381,11 @@ class NewsFetcher:
client,
article_url,
title=rss_title,
description=rss_description
summary=rss_summary
)
logger.info(summary)
if not summary:
logger.warning(
f"❌ Failed to get summary for article {i}: {article_url}")
@@ -398,12 +400,12 @@ class NewsFetcher:
"""
INSERT
OR IGNORE INTO news
(title, description, url, published, country)
(title, summary, url, published, country)
VALUES (?, ?, ?, ?, ?)
""",
(
summary["title"],
summary["description"],
summary["summary"],
article_url,
published_timestamp,
feed_row["country"],