updated backend imports for clarity, enhanced sync logic, and expanded proxy routes in frontend

This commit is contained in:
2025-08-04 22:38:53 +02:00
parent c3eb540261
commit 26c3cc79d7
4 changed files with 25 additions and 9 deletions

View File

@@ -2,7 +2,7 @@ import logging
import os
from pathlib import Path
DB_PATH = Path(os.getenv("DB_NAME", "owlynews.sqlite3"))
DB_PATH = Path(os.getenv("DB_NAME", os.path.join(os.path.dirname(os.path.dirname(__file__)), "owlynews.sqlite3")))
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://localhost:11434")
MIN_CRON_HOURS = float(os.getenv("MIN_CRON_HOURS", 0.5))
DEFAULT_CRON_HOURS = float(os.getenv("CRON_HOURS", MIN_CRON_HOURS))

View File

@@ -5,7 +5,7 @@ from contextlib import contextmanager
from pathlib import Path
from typing import Iterator
from .config import (
from backend.app.config import (
ARTICLE_FETCH_TIMEOUT,
CRON_HOURS,
DB_PATH,

View File

@@ -24,7 +24,7 @@ from fastapi import Depends, FastAPI, HTTPException, Response, status
from fastapi.staticfiles import StaticFiles
from starlette.responses import JSONResponse
from .config import (
from backend.app.config import (
CRON_HOURS,
LLM_MODEL,
MIN_CRON_HOURS,
@@ -34,8 +34,8 @@ from .config import (
frontend_path,
logger,
)
from .database import get_db, get_db_write
from .models import (
from backend.app.database import get_db, get_db_write
from backend.app.models import (
CronSettings,
ErrorResponse,
FeedData,
@@ -44,7 +44,7 @@ from .models import (
SuccessResponse,
TimestampResponse,
)
from .services import NewsFetcher
from backend.app.services import NewsFetcher
app = FastAPI(
title="Owly News Summariser",
@@ -290,7 +290,7 @@ async def get_model_status():
@app.post("/sync", response_model=None)
async def manual_sync(db: sqlite3.Cursor = Depends(get_db)):
async def manual_sync(db: sqlite3.Cursor = Depends(get_db_write)): # Note: changed to get_db_write
"""
Manually trigger a feed synchronization.
@@ -302,7 +302,14 @@ async def manual_sync(db: sqlite3.Cursor = Depends(get_db)):
"""
db.execute("SELECT val FROM meta WHERE key='last_sync'")
row = db.fetchone()
last_sync_ts = int(row["val"])
if row is None:
# Initialize the last_sync key if it doesn't exist
import time
last_sync_ts = int(time.time()) - (SYNC_COOLDOWN_MINUTES * 60 + 1) # Set to a time that allows sync
db.execute("INSERT INTO meta (key, val) VALUES ('last_sync', ?)", (str(last_sync_ts),))
else:
last_sync_ts = int(row["val"])
now = datetime.now(timezone.utc)
last_sync_time = datetime.fromtimestamp(last_sync_ts, timezone.utc)
@@ -314,13 +321,18 @@ async def manual_sync(db: sqlite3.Cursor = Depends(get_db)):
try:
task = asyncio.create_task(NewsFetcher.harvest_feeds())
# Update the last_sync timestamp after triggering the sync
current_ts = int(time.time())
db.execute("UPDATE meta SET val=? WHERE key='last_sync'", (str(current_ts),))
return {"status": "triggered", "task_id": id(task)}
except Exception as e:
logger.error(f"❌ Failed to trigger sync: {e}")
raise HTTPException(
500, f"Failed to trigger sync: {str(e)}"
)
@app.get("/meta/last-sync", response_model=TimestampResponse)
async def get_last_sync(db: sqlite3.Cursor = Depends(get_db)):
"""

View File

@@ -23,7 +23,11 @@ export default defineConfig({
server: {
proxy: {
'/news': 'http://localhost:8000',
'/meta': 'http://localhost:8000'
'/meta': 'http://localhost:8000',
'/feeds': 'http://localhost:8000',
'/model': 'http://localhost:8000',
'/sync': 'http://localhost:8000',
'/settings': 'http://localhost:8000'
}
},
});