[refactor] replaced info logging with debug for improved logging granularity and removed redundant log statements in backend services

This commit is contained in:
2025-08-07 22:48:35 +02:00
parent cf163082b2
commit c19813cbe2
2 changed files with 7 additions and 25 deletions

View File

@@ -150,8 +150,6 @@ async def get_news(
where_conditions.append("published BETWEEN ? AND ?")
params.extend([from_ts, to_ts])
logger.info(f"Date range: {from_date} to {to_date} (UTC timestamps: {from_ts} to {to_ts})")
# Build the complete SQL query
base_sql = """
SELECT id, title, summary, url, published, country, created_at
@@ -163,27 +161,13 @@ async def get_news(
else:
sql = base_sql
sql += " ORDER BY published DESC LIMIT 1000"
# Log query info
if all_countries and all_dates:
logger.info("Querying ALL news articles (no filters)")
elif all_countries:
logger.info(f"Querying news from ALL countries with date filter")
elif all_dates:
logger.info(f"Querying ALL dates for countries: {country}")
else:
logger.info(f"Querying news: countries={country}, timezone={timezone_name}")
logger.info(f"SQL: {sql}")
logger.info(f"Parameters: {params}")
sql += " ORDER BY published DESC"
# Execute the query
db.execute(sql, params)
rows = db.fetchall()
result = [dict(row) for row in rows]
logger.info(f"Found {len(result)} news articles")
return result
except ValueError as e:

View File

@@ -206,7 +206,7 @@ class NewsFetcher:
A dictionary containing the article title and summaries in German and English,
or None if summarization failed
"""
logger.info("[AI] Fetching article content from: " + url)
logger.debug("[AI] Fetching article content from: " + url)
article_content = await NewsFetcher.fetch_article_content(client, url)
@@ -228,6 +228,9 @@ class NewsFetcher:
"title": {
"type": "string"
},
"location": {
"type": "string"
},
"summary": {
"type": "string"
},
@@ -236,9 +239,6 @@ class NewsFetcher:
"items": {
"type": "string"
}
},
"location": {
"type": "string"
}
},
"required": [
@@ -253,7 +253,7 @@ class NewsFetcher:
}
}
logger.info("[AI] Running summary generation...")
logger.debug("[AI] Running summary generation...")
try:
response = await client.post(
@@ -266,7 +266,7 @@ class NewsFetcher:
result = response.json()
llm_response = result["response"]
logger.info("[AI] " + llm_response)
logger.debug("[AI] " + llm_response)
if isinstance(llm_response, str):
summary_data = json.loads(llm_response)
@@ -432,8 +432,6 @@ class NewsFetcher:
summary=rss_summary
)
logger.info(summary)
if not summary:
logger.warning(
f"❌ Failed to get summary for article {i}: {article_url}")