feat(discovery): Pass 0 halbmonat buckets + konfidenz/status + link verification

Pass 0 splits every month into two halves (H1 = days 1-15, H2 = 16-EOM)
so each agent call fits within Mistral's 4096 max_tokens budget. The
response schema picks up richer per-market signals and dead agent URLs
get filtered before they land in the admin queue.

DB:
- 000015: add halbmonat char(2) to discovery_buckets, widen unique key,
  backfill existing rows as H1 + insert H2 siblings (624 → 1248 rows).
- 000016: rename discovered_markets.extraktion → konfidenz with
  best-effort value mapping (verbatim→hoch, abgeleitet→mittel); add
  agent_status column.

Backend:
- model: Bucket gains Halbmonat; Pass0Bucket same. Pass0Market renames
  Extraktion → Konfidenz and adds AgentStatus (JSON tag "status").
  DiscoveredMarket mirrors both fields; queue-lifecycle Status column
  stays distinct from agent-reported AgentStatus.
- repository: all SELECT/INSERT touched to use the new columns; picker
  orders by year_month, halbmonat so H1 runs before H2 in the same
  month.
- agent client: prompt now injects halbmonat and recherche_datum (today)
  so the agent has explicit date context.
- link verification: new LinkChecker does concurrent HEAD (GET fallback
  on 405) with a 5s timeout. FilterURLs runs before InsertDiscovered —
  markets whose quellen all fail are dropped and counted as
  link_check_failed in TickSummary. Failing website URLs are cleared
  but don't block insert.
- Service.linkChecker is a narrow interface so tests inject a noop
  stub instead of hitting the network.

Web:
- DiscoveredMarket type gains konfidenz + agent_status, drops extraktion.
- Queue column renames "Extraktion" → "Konfidenz" with three-level
  coloring (hoch=emerald, mittel=amber, niedrig=red, else neutral).
- A small pill next to markt_name surfaces agent_status when it's not
  "bestaetigt" — red for "abgesagt", amber for "unklar" and
  "vorjahr_unbestaetigt" — so risky entries are obvious before accept.
This commit is contained in:
2026-04-18 09:51:57 +02:00
parent 1af97bda21
commit cd836564f1
13 changed files with 232 additions and 38 deletions

View File

@@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
"strings"
"time"
"marktvogt.de/backend/internal/pkg/ai"
)
@@ -31,8 +32,8 @@ func (c *AgentClient) Discover(ctx context.Context, b Bucket) (Pass0Response, er
return Pass0Response{}, fmt.Errorf("discovery agent not configured")
}
prompt := fmt.Sprintf(
"Bucket:\nland: %s\nregion: %s\njahr_monat: %s\n\nFinde alle Maerkte in diesem Bucket und antworte im vorgegebenen JSON-Format.",
b.Land, b.Region, b.YearMonth,
"bucket:\n land: %s\n region: %s\n jahr_monat: %s\n halbmonat: %s\nrecherche_datum: %s\n\nFinde alle Maerkte in diesem Bucket und antworte im vorgegebenen JSON-Schema.",
b.Land, b.Region, b.YearMonth, b.Halbmonat, time.Now().UTC().Format("2006-01-02"),
)
result, err := c.ai.Pass0(ctx, c.agentID, prompt)
if err != nil {

View File

@@ -0,0 +1,82 @@
package discovery
import (
"context"
"net/http"
"sync"
"time"
)
// LinkChecker verifies that URLs returned by the discovery agent are actually
// reachable. Pass 0 sometimes returns dead kalender URLs or redirects that
// land on 404 pages; we want to filter those out before they land in the
// admin queue.
type LinkChecker struct {
client *http.Client
}
func NewLinkChecker() *LinkChecker {
return &LinkChecker{
client: &http.Client{
Timeout: 5 * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= 5 {
return http.ErrUseLastResponse
}
return nil
},
},
}
}
// CheckURL returns true when the URL responds with a status < 400. Tries HEAD
// first and falls back to GET on 405 Method Not Allowed (some CDNs reject HEAD).
func (lc *LinkChecker) CheckURL(ctx context.Context, url string) bool {
if url == "" {
return false
}
for _, method := range []string{http.MethodHead, http.MethodGet} {
req, err := http.NewRequestWithContext(ctx, method, url, nil)
if err != nil {
return false
}
req.Header.Set("User-Agent", "Marktvogt-Discovery/1.0 (+https://marktvogt.de)")
resp, err := lc.client.Do(req)
if err != nil {
return false
}
_ = resp.Body.Close()
if resp.StatusCode >= 200 && resp.StatusCode < 400 {
return true
}
if resp.StatusCode != http.StatusMethodNotAllowed {
return false
}
}
return false
}
// FilterURLs returns only the URLs that pass CheckURL, preserving input order.
// Checks run concurrently — most agent responses have 1-3 URLs per market.
func (lc *LinkChecker) FilterURLs(ctx context.Context, urls []string) []string {
if len(urls) == 0 {
return urls
}
ok := make([]bool, len(urls))
var wg sync.WaitGroup
for i, u := range urls {
wg.Add(1)
go func(i int, u string) {
defer wg.Done()
ok[i] = lc.CheckURL(ctx, u)
}(i, u)
}
wg.Wait()
out := make([]string, 0, len(urls))
for i, u := range urls {
if ok[i] {
out = append(out, u)
}
}
return out
}

View File

@@ -86,3 +86,9 @@ func (m *mockRepo) Stats(ctx context.Context, forwardMonths, recentErrorsLimit i
func (m *mockRepo) UpdatePending(ctx context.Context, id uuid.UUID, f UpdatePendingFields, nn *string) error {
return nil
}
// noopLinkVerifier passes every URL — used by tests to isolate from network.
type noopLinkVerifier struct{}
func (noopLinkVerifier) FilterURLs(_ context.Context, urls []string) []string { return urls }
func (noopLinkVerifier) CheckURL(_ context.Context, _ string) bool { return true }

View File

@@ -7,18 +7,24 @@ import (
"github.com/google/uuid"
)
// Bucket is a scheduler row: one (land, region, year_month) tuple.
// Bucket is a scheduler row: one (land, region, year_month, halbmonat) tuple.
// Halbmonat splits the month into H1 (days 1-15) and H2 (days 16-EOM) so each
// Pass 0 call covers a smaller window and fits within the 4096-token response.
type Bucket struct {
ID uuid.UUID `json:"id"`
Land string `json:"land"`
Region string `json:"region"`
YearMonth string `json:"year_month"` // 'YYYY-MM'
Halbmonat string `json:"halbmonat"` // 'H1' | 'H2'
LastQueriedAt *time.Time `json:"last_queried_at"`
LastError string `json:"last_error"`
CreatedAt time.Time `json:"created_at"`
}
// DiscoveredMarket is a queue entry awaiting admin review.
// Status is the queue lifecycle (pending|accepted|rejected). AgentStatus is
// the separate confirmation signal from Pass 0 (bestaetigt|unklar|
// vorjahr_unbestaetigt|abgesagt).
type DiscoveredMarket struct {
ID uuid.UUID `json:"id"`
BucketID uuid.UUID `json:"bucket_id"`
@@ -30,11 +36,12 @@ type DiscoveredMarket struct {
EndDatum *time.Time `json:"end_datum"`
Website string `json:"website"`
Quellen []string `json:"quellen"`
Extraktion string `json:"extraktion"`
Konfidenz string `json:"konfidenz"` // 'hoch' | 'mittel' | 'niedrig'
AgentStatus string `json:"agent_status"` // 'bestaetigt' | 'unklar' | 'vorjahr_unbestaetigt' | 'abgesagt'
Hinweis string `json:"hinweis"`
NameNormalized string `json:"name_normalized"`
MatchedSeriesID *uuid.UUID `json:"matched_series_id"`
Status string `json:"status"` // 'pending' | 'accepted' | 'rejected'
Status string `json:"status"` // queue lifecycle
DiscoveredAt time.Time `json:"discovered_at"`
ReviewedAt *time.Time `json:"reviewed_at"`
ReviewedBy *uuid.UUID `json:"reviewed_by"`
@@ -64,18 +71,20 @@ type Pass0Bucket struct {
Land string `json:"land"`
Region string `json:"region"`
JahrMonat string `json:"jahr_monat"`
Halbmonat string `json:"halbmonat"`
}
type Pass0Market struct {
MarktName string `json:"markt_name"`
Stadt string `json:"stadt"`
Bundesland string `json:"bundesland"`
StartDatum string `json:"start_datum"` // 'YYYY-MM-DD' or ""
EndDatum string `json:"end_datum"`
Website string `json:"website"`
Quellen []string `json:"quellen"`
Extraktion string `json:"extraktion"`
Hinweis string `json:"hinweis"`
MarktName string `json:"markt_name"`
Stadt string `json:"stadt"`
Bundesland string `json:"bundesland"`
StartDatum string `json:"start_datum"` // 'YYYY-MM-DD' or ""
EndDatum string `json:"end_datum"`
Website string `json:"website"`
Quellen []string `json:"quellen"`
Konfidenz string `json:"konfidenz"` // 'hoch' | 'mittel' | 'niedrig'
AgentStatus string `json:"status"` // agent's status field; see DiscoveredMarket.AgentStatus for values
Hinweis string `json:"hinweis"`
}
// UpdatePendingFields is a partial update for a pending discovered_market row.

View File

@@ -46,12 +46,12 @@ func NewRepository(pool *pgxpool.Pool) Repository {
func (r *pgRepository) PickStaleBuckets(ctx context.Context, forwardMonths, limit int) ([]Bucket, error) {
q := `
SELECT id, land, region, year_month, last_queried_at, coalesce(last_error, ''), created_at
SELECT id, land, region, year_month, halbmonat, last_queried_at, coalesce(last_error, ''), created_at
FROM discovery_buckets
WHERE year_month >= to_char(date_trunc('month', now()), 'YYYY-MM')
AND year_month <= to_char(date_trunc('month', now()) + ($1 * interval '1 month'), 'YYYY-MM')
AND (last_queried_at IS NULL OR last_queried_at < now() - interval '7 days')
ORDER BY last_queried_at NULLS FIRST, year_month
ORDER BY last_queried_at NULLS FIRST, year_month, halbmonat
LIMIT $2`
rows, err := r.pool.Query(ctx, q, forwardMonths, limit)
if err != nil {
@@ -61,7 +61,7 @@ LIMIT $2`
out := make([]Bucket, 0)
for rows.Next() {
var b Bucket
if err := rows.Scan(&b.ID, &b.Land, &b.Region, &b.YearMonth, &b.LastQueriedAt, &b.LastError, &b.CreatedAt); err != nil {
if err := rows.Scan(&b.ID, &b.Land, &b.Region, &b.YearMonth, &b.Halbmonat, &b.LastQueriedAt, &b.LastError, &b.CreatedAt); err != nil {
return nil, err
}
out = append(out, b)
@@ -114,14 +114,22 @@ func (r *pgRepository) InsertDiscovered(ctx context.Context, d DiscoveredMarket)
err := r.pool.QueryRow(ctx, `
INSERT INTO discovered_markets
(bucket_id, markt_name, stadt, bundesland, land, start_datum, end_datum, website,
quellen, extraktion, hinweis, name_normalized, matched_series_id)
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)
quellen, konfidenz, agent_status, hinweis, name_normalized, matched_series_id)
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)
RETURNING id`,
d.BucketID, d.MarktName, d.Stadt, d.Bundesland, d.Land, d.StartDatum, d.EndDatum, d.Website,
d.Quellen, d.Extraktion, d.Hinweis, d.NameNormalized, d.MatchedSeriesID).Scan(&id)
d.Quellen, nilIfEmpty(d.Konfidenz), nilIfEmpty(d.AgentStatus), d.Hinweis, d.NameNormalized, d.MatchedSeriesID).Scan(&id)
return id, err
}
// nilIfEmpty maps empty strings to a typed-nil for nullable text columns.
func nilIfEmpty(s string) any {
if s == "" {
return nil
}
return s
}
func (r *pgRepository) IsRejected(ctx context.Context, nameNormalized, stadt string, year int) (bool, error) {
var exists bool
err := r.pool.QueryRow(ctx,
@@ -145,8 +153,8 @@ SELECT EXISTS(
func (r *pgRepository) ListQueue(ctx context.Context, status string, limit, offset int) ([]DiscoveredMarket, error) {
rows, err := r.pool.Query(ctx, `
SELECT id, bucket_id, markt_name, stadt, coalesce(bundesland,''), land,
start_datum, end_datum, coalesce(website,''), quellen, coalesce(extraktion,''),
coalesce(hinweis,''), name_normalized, matched_series_id, status,
start_datum, end_datum, coalesce(website,''), quellen, coalesce(konfidenz,''),
coalesce(agent_status,''), coalesce(hinweis,''), name_normalized, matched_series_id, status,
discovered_at, reviewed_at, reviewed_by, created_edition_id
FROM discovered_markets
WHERE status = $1
@@ -161,8 +169,8 @@ LIMIT $2 OFFSET $3`, status, limit, offset)
var d DiscoveredMarket
if err := rows.Scan(
&d.ID, &d.BucketID, &d.MarktName, &d.Stadt, &d.Bundesland, &d.Land,
&d.StartDatum, &d.EndDatum, &d.Website, &d.Quellen, &d.Extraktion,
&d.Hinweis, &d.NameNormalized, &d.MatchedSeriesID, &d.Status,
&d.StartDatum, &d.EndDatum, &d.Website, &d.Quellen, &d.Konfidenz,
&d.AgentStatus, &d.Hinweis, &d.NameNormalized, &d.MatchedSeriesID, &d.Status,
&d.DiscoveredAt, &d.ReviewedAt, &d.ReviewedBy, &d.CreatedEditionID,
); err != nil {
return nil, err
@@ -176,13 +184,13 @@ func (r *pgRepository) GetDiscovered(ctx context.Context, id uuid.UUID) (Discove
var d DiscoveredMarket
err := r.pool.QueryRow(ctx, `
SELECT id, bucket_id, markt_name, stadt, coalesce(bundesland,''), land,
start_datum, end_datum, coalesce(website,''), quellen, coalesce(extraktion,''),
coalesce(hinweis,''), name_normalized, matched_series_id, status,
start_datum, end_datum, coalesce(website,''), quellen, coalesce(konfidenz,''),
coalesce(agent_status,''), coalesce(hinweis,''), name_normalized, matched_series_id, status,
discovered_at, reviewed_at, reviewed_by, created_edition_id
FROM discovered_markets WHERE id = $1`, id).Scan(
&d.ID, &d.BucketID, &d.MarktName, &d.Stadt, &d.Bundesland, &d.Land,
&d.StartDatum, &d.EndDatum, &d.Website, &d.Quellen, &d.Extraktion,
&d.Hinweis, &d.NameNormalized, &d.MatchedSeriesID, &d.Status,
&d.StartDatum, &d.EndDatum, &d.Website, &d.Quellen, &d.Konfidenz,
&d.AgentStatus, &d.Hinweis, &d.NameNormalized, &d.MatchedSeriesID, &d.Status,
&d.DiscoveredAt, &d.ReviewedAt, &d.ReviewedBy, &d.CreatedEditionID,
)
return d, err

View File

@@ -19,10 +19,18 @@ type marketCreator interface {
}
// Service orchestrates bucket scheduling, agent invocation, and queue management.
// linkVerifier is the narrow interface Service depends on for URL validation.
// *LinkChecker is the real implementation; tests inject a noop stub.
type linkVerifier interface {
FilterURLs(ctx context.Context, urls []string) []string
CheckURL(ctx context.Context, url string) bool
}
type Service struct {
repo Repository
agent *AgentClient
marketCreator marketCreator
linkChecker linkVerifier
batchSize int
forwardMonths int
}
@@ -33,6 +41,7 @@ func NewService(repo Repository, agent *AgentClient, mc marketCreator, batchSize
repo: repo,
agent: agent,
marketCreator: mc,
linkChecker: NewLinkChecker(),
batchSize: batchSize,
forwardMonths: forwardMonths,
}
@@ -52,6 +61,7 @@ type TickSummary struct {
DedupedQueue int `json:"deduped_queue"`
Errors int `json:"errors"`
RateLimited int `json:"rate_limited"`
LinkCheckFailed int `json:"link_check_failed"`
}
// Tick picks N stale buckets and runs Pass 0 for each, writing net-new discoveries.
@@ -188,6 +198,22 @@ func (s *Service) processBucketResponse(ctx context.Context, b Bucket, resp Pass
continue
}
// Link verification — drop URLs the agent hallucinated or that are now
// 404/unreachable. If all quellen fail, skip the market entirely (we
// need at least one verifiable source). If the website fails, clear it
// but keep the market since quellen is the primary evidence.
verifiedQuellen := s.linkChecker.FilterURLs(ctx, m.Quellen)
if len(verifiedQuellen) == 0 {
slog.InfoContext(ctx, "link check dropped all quellen; skipping market",
"markt", m.MarktName, "stadt", m.Stadt)
summary.LinkCheckFailed++
continue
}
verifiedWebsite := m.Website
if verifiedWebsite != "" && !s.linkChecker.CheckURL(ctx, verifiedWebsite) {
verifiedWebsite = ""
}
dm := DiscoveredMarket{
BucketID: b.ID,
MarktName: m.MarktName,
@@ -196,9 +222,10 @@ func (s *Service) processBucketResponse(ctx context.Context, b Bucket, resp Pass
Land: b.Land,
StartDatum: startDatum,
EndDatum: endDatum,
Website: m.Website,
Quellen: m.Quellen,
Extraktion: m.Extraktion,
Website: verifiedWebsite,
Quellen: verifiedQuellen,
Konfidenz: m.Konfidenz,
AgentStatus: m.AgentStatus,
Hinweis: m.Hinweis,
NameNormalized: nameNorm,
MatchedSeriesID: matchedSeriesID,

View File

@@ -58,6 +58,7 @@ func TestPickBucketsPassesConfigToRepo(t *testing.T) {
},
}
svc := NewService(m, nil, nil, 4, 12)
svc.linkChecker = noopLinkVerifier{}
got, err := svc.PickBuckets(context.Background())
if err != nil {
t.Fatalf("err: %v", err)
@@ -92,6 +93,7 @@ func TestProcessBucket_DedupsExisting(t *testing.T) {
updateBucketFn: func(_ context.Context, _ uuid.UUID, _ string) error { return nil },
}
svc := NewService(m, nil, nil, 4, 12)
svc.linkChecker = noopLinkVerifier{}
resp := Pass0Response{
Bucket: Pass0Bucket{Land: "Deutschland", Region: "Bayern", JahrMonat: "2026-09"},
@@ -128,6 +130,7 @@ func TestProcessBucket_InsertsNetNew(t *testing.T) {
updateBucketFn: func(_ context.Context, _ uuid.UUID, _ string) error { return nil },
}
svc := NewService(m, nil, nil, 4, 12)
svc.linkChecker = noopLinkVerifier{}
resp := Pass0Response{
Bucket: Pass0Bucket{Land: "Deutschland", Region: "Bayern", JahrMonat: "2026-09"},
@@ -187,6 +190,7 @@ func TestAccept_NewSeries_CallsCreate(t *testing.T) {
}
mc := &stubCreator{}
svc := NewService(m, nil, mc, 4, 12)
svc.linkChecker = noopLinkVerifier{}
_, _, err := svc.Accept(context.Background(), qID, uuid.New())
if err != nil {
t.Fatalf("accept err: %v", err)
@@ -209,6 +213,7 @@ func TestAccept_ExistingSeries_CallsCreateEditionForSeries(t *testing.T) {
}
mc := &stubCreator{}
svc := NewService(m, nil, mc, 4, 12)
svc.linkChecker = noopLinkVerifier{}
_, _, err := svc.Accept(context.Background(), uuid.New(), uuid.New())
if err != nil {
t.Fatalf("accept err: %v", err)

View File

@@ -0,0 +1,9 @@
DELETE FROM discovery_buckets WHERE halbmonat = 'H2';
ALTER TABLE discovery_buckets
DROP CONSTRAINT discovery_buckets_lrym_hm_key;
ALTER TABLE discovery_buckets
ADD CONSTRAINT discovery_buckets_land_region_year_month_key UNIQUE (land, region, year_month);
ALTER TABLE discovery_buckets DROP COLUMN halbmonat;

View File

@@ -0,0 +1,13 @@
ALTER TABLE discovery_buckets ADD COLUMN halbmonat char(2) NOT NULL DEFAULT 'H1';
ALTER TABLE discovery_buckets
DROP CONSTRAINT discovery_buckets_land_region_year_month_key;
ALTER TABLE discovery_buckets
ADD CONSTRAINT discovery_buckets_lrym_hm_key UNIQUE (land, region, year_month, halbmonat);
INSERT INTO discovery_buckets (land, region, year_month, halbmonat)
SELECT land, region, year_month, 'H2'
FROM discovery_buckets
WHERE halbmonat = 'H1'
ON CONFLICT (land, region, year_month, halbmonat) DO NOTHING;

View File

@@ -0,0 +1,3 @@
ALTER TABLE discovered_markets DROP COLUMN agent_status;
ALTER TABLE discovered_markets RENAME COLUMN konfidenz TO extraktion;

View File

@@ -0,0 +1,6 @@
ALTER TABLE discovered_markets RENAME COLUMN extraktion TO konfidenz;
UPDATE discovered_markets SET konfidenz = 'hoch' WHERE konfidenz = 'verbatim';
UPDATE discovered_markets SET konfidenz = 'mittel' WHERE konfidenz = 'abgeleitet';
ALTER TABLE discovered_markets ADD COLUMN agent_status text;

View File

@@ -12,7 +12,8 @@ type DiscoveredMarket = {
end_datum: string | null;
website: string;
quellen: string[];
extraktion: string;
konfidenz: string; // 'hoch' | 'mittel' | 'niedrig'
agent_status: string; // 'bestaetigt' | 'unklar' | 'vorjahr_unbestaetigt' | 'abgesagt'
hinweis: string;
matched_series_id: string | null;
discovered_at: string;

View File

@@ -32,6 +32,19 @@
return s || e || '';
}
function konfidenzClass(k: string): string {
switch (k) {
case 'hoch':
return 'bg-emerald-100 text-emerald-700 dark:bg-emerald-900/50 dark:text-emerald-300';
case 'mittel':
return 'bg-amber-100 text-amber-700 dark:bg-amber-900/50 dark:text-amber-300';
case 'niedrig':
return 'bg-red-100 text-red-700 dark:bg-red-900/50 dark:text-red-300';
default:
return 'bg-stone-100 text-stone-600 dark:bg-stone-800 dark:text-stone-400';
}
}
const lastTickLabel = $derived.by(() => {
if (!data.stats.last_tick_at) return 'nie';
const ts = new Date(data.stats.last_tick_at).getTime();
@@ -139,7 +152,7 @@
<th class="py-2 pr-4 font-medium">Datum</th>
<th class="py-2 pr-4 font-medium">Website</th>
<th class="py-2 pr-4 font-medium">Quellen</th>
<th class="py-2 pr-4 font-medium">Extraktion</th>
<th class="py-2 pr-4 font-medium">Konfidenz</th>
<th class="py-2 pl-4 text-right font-medium">Aktion</th>
</tr>
</thead>
@@ -159,7 +172,20 @@
<td class="py-3 pr-4 whitespace-nowrap text-stone-600 dark:text-stone-400">
{row.bundesland || row.land}
</td>
<td class="py-3 pr-4 font-medium">{row.markt_name}</td>
<td class="py-3 pr-4 font-medium">
{row.markt_name}
{#if row.agent_status && row.agent_status !== 'bestaetigt'}
<span
class="ml-1 inline-block rounded px-1.5 py-0.5 align-middle text-[10px] {row.agent_status ===
'abgesagt'
? 'bg-red-100 text-red-700 dark:bg-red-900/50 dark:text-red-300'
: 'bg-amber-100 text-amber-700 dark:bg-amber-900/50 dark:text-amber-300'}"
title="Agent status"
>
{row.agent_status}
</span>
{/if}
</td>
<td class="py-3 pr-4">{row.stadt}</td>
<td class="py-3 pr-4 whitespace-nowrap">
{#if row.start_datum}
@@ -187,11 +213,9 @@
</td>
<td class="py-3 pr-4">
<span
class="inline-block rounded px-2 py-0.5 text-xs {row.extraktion === 'verbatim'
? 'bg-emerald-100 text-emerald-700 dark:bg-emerald-900/50 dark:text-emerald-300'
: 'bg-amber-100 text-amber-700 dark:bg-amber-900/50 dark:text-amber-300'}"
class="inline-block rounded px-2 py-0.5 text-xs {konfidenzClass(row.konfidenz)}"
>
{row.extraktion || '—'}
{row.konfidenz || '—'}
</span>
</td>
<td class="py-3 pl-4 text-right whitespace-nowrap">