[refactor] replaced info logging with debug for improved logging granularity and removed redundant log statements in backend services

This commit is contained in:
2025-08-07 22:48:35 +02:00
parent cf163082b2
commit c19813cbe2
2 changed files with 7 additions and 25 deletions

View File

@@ -150,8 +150,6 @@ async def get_news(
where_conditions.append("published BETWEEN ? AND ?") where_conditions.append("published BETWEEN ? AND ?")
params.extend([from_ts, to_ts]) params.extend([from_ts, to_ts])
logger.info(f"Date range: {from_date} to {to_date} (UTC timestamps: {from_ts} to {to_ts})")
# Build the complete SQL query # Build the complete SQL query
base_sql = """ base_sql = """
SELECT id, title, summary, url, published, country, created_at SELECT id, title, summary, url, published, country, created_at
@@ -163,27 +161,13 @@ async def get_news(
else: else:
sql = base_sql sql = base_sql
sql += " ORDER BY published DESC LIMIT 1000" sql += " ORDER BY published DESC"
# Log query info
if all_countries and all_dates:
logger.info("Querying ALL news articles (no filters)")
elif all_countries:
logger.info(f"Querying news from ALL countries with date filter")
elif all_dates:
logger.info(f"Querying ALL dates for countries: {country}")
else:
logger.info(f"Querying news: countries={country}, timezone={timezone_name}")
logger.info(f"SQL: {sql}")
logger.info(f"Parameters: {params}")
# Execute the query # Execute the query
db.execute(sql, params) db.execute(sql, params)
rows = db.fetchall() rows = db.fetchall()
result = [dict(row) for row in rows] result = [dict(row) for row in rows]
logger.info(f"Found {len(result)} news articles")
return result return result
except ValueError as e: except ValueError as e:

View File

@@ -206,7 +206,7 @@ class NewsFetcher:
A dictionary containing the article title and summaries in German and English, A dictionary containing the article title and summaries in German and English,
or None if summarization failed or None if summarization failed
""" """
logger.info("[AI] Fetching article content from: " + url) logger.debug("[AI] Fetching article content from: " + url)
article_content = await NewsFetcher.fetch_article_content(client, url) article_content = await NewsFetcher.fetch_article_content(client, url)
@@ -228,6 +228,9 @@ class NewsFetcher:
"title": { "title": {
"type": "string" "type": "string"
}, },
"location": {
"type": "string"
},
"summary": { "summary": {
"type": "string" "type": "string"
}, },
@@ -236,9 +239,6 @@ class NewsFetcher:
"items": { "items": {
"type": "string" "type": "string"
} }
},
"location": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -253,7 +253,7 @@ class NewsFetcher:
} }
} }
logger.info("[AI] Running summary generation...") logger.debug("[AI] Running summary generation...")
try: try:
response = await client.post( response = await client.post(
@@ -266,7 +266,7 @@ class NewsFetcher:
result = response.json() result = response.json()
llm_response = result["response"] llm_response = result["response"]
logger.info("[AI] " + llm_response) logger.debug("[AI] " + llm_response)
if isinstance(llm_response, str): if isinstance(llm_response, str):
summary_data = json.loads(llm_response) summary_data = json.loads(llm_response)
@@ -432,8 +432,6 @@ class NewsFetcher:
summary=rss_summary summary=rss_summary
) )
logger.info(summary)
if not summary: if not summary:
logger.warning( logger.warning(
f"❌ Failed to get summary for article {i}: {article_url}") f"❌ Failed to get summary for article {i}: {article_url}")