Refactor: Implement proper structured logging with slog and remove string interpolation

This commit is contained in:
Jeffrey Paul 2025-05-22 06:56:20 -07:00
parent b8f2ea7b7b
commit e64ec45054
4 changed files with 41 additions and 36 deletions

View File

@ -12,13 +12,16 @@ import (
// broadcaster runs on startup and every hour to select and broadcast the most important article
func broadcaster(shutdown chan struct{}, dryRun bool) {
logInfo("broadcaster", fmt.Sprintf("Starting broadcaster (waiting %d seconds before first broadcast)", int(STARTUP_DELAY.Seconds())), map[string]interface{}{
logInfo("broadcaster", "Starting broadcaster", map[string]interface{}{
"startupDelay": int(STARTUP_DELAY.Seconds()),
"interval": BROADCAST_INTERVAL.String(),
"dryRun": dryRun,
})
// Sleep on startup
logInfo("broadcaster", fmt.Sprintf("Sleeping for %d seconds before first broadcast", int(STARTUP_DELAY.Seconds())), nil)
logInfo("broadcaster", "Sleeping before first broadcast", map[string]interface{}{
"seconds": int(STARTUP_DELAY.Seconds()),
})
select {
case <-time.After(STARTUP_DELAY):
// Continue after sleep
@ -169,7 +172,8 @@ func broadcastArticle(chosen Article, dryRun bool) {
maxSummaryLen := MAX_MESSAGE_LENGTH - len(ts) - len(sourceAbbr) - len(": [] [AI/LLM] ") - 3 // 3 for "..."
truncatedSummary := strings.TrimSpace(chosen.Summary)[:maxSummaryLen] + "..."
msg = fmt.Sprintf("%s: [%s] [AI/LLM] %s", ts, sourceAbbr, truncatedSummary)
logInfo("broadcaster", fmt.Sprintf("Message truncated to fit %d character limit", MAX_MESSAGE_LENGTH), map[string]interface{}{
logInfo("broadcaster", "Message truncated to fit character limit", map[string]interface{}{
"limit": MAX_MESSAGE_LENGTH,
"originalLength": len(chosen.Summary),
"truncatedLength": maxSummaryLen,
})
@ -189,7 +193,9 @@ func broadcastArticle(chosen Article, dryRun bool) {
})
// Wait before broadcasting to allow time to see the message
logInfo("broadcaster", fmt.Sprintf("Waiting %d seconds before broadcasting...", int(BROADCAST_PREPARATION_DELAY.Seconds())), nil)
logInfo("broadcaster", "Waiting before broadcasting", map[string]interface{}{
"seconds": int(BROADCAST_PREPARATION_DELAY.Seconds()),
})
time.Sleep(BROADCAST_PREPARATION_DELAY)
// Update broadcast time and save to database

8
llm.go
View File

@ -25,7 +25,9 @@ type SummaryResult struct {
// articleSummarizer checks for articles without summaries every 10 seconds and processes them in batches
func articleSummarizer(shutdown chan struct{}, ollamaURL, ollamaModel string) {
logInfo("summarizer", fmt.Sprintf("Starting article summarizer (interval: %s)", SUMMARIZE_INTERVAL), nil)
logInfo("summarizer", "Starting article summarizer", map[string]interface{}{
"interval": SUMMARIZE_INTERVAL.String(),
})
ticker := time.NewTicker(SUMMARIZE_INTERVAL)
defer ticker.Stop()
@ -84,9 +86,11 @@ func summarizeArticles(ollamaURL, ollamaModel string) {
batchInfo = append(batchInfo, a.ID[:8])
}
logInfo("summarizer", fmt.Sprintf("Processing batch %d to %d", i+1, end), map[string]interface{}{
logInfo("summarizer", "Processing batch", map[string]interface{}{
"batchSize": len(batch),
"batchIds": strings.Join(batchInfo, ","),
"startItem": i + 1,
"endItem": end,
})
startTime := time.Now()

8
rss.go
View File

@ -1,7 +1,6 @@
package main
import (
"fmt"
"net/http"
"sync"
"time"
@ -52,7 +51,9 @@ var feeds = map[string]string{
// rssFeedChecker checks RSS feeds every 15 minutes and adds new articles to the database
func rssFeedChecker(shutdown chan struct{}, ollamaURL, ollamaModel string) {
logInfo("rss", fmt.Sprintf("Starting RSS feed checker (interval: %s)", RSS_CHECK_INTERVAL), nil)
logInfo("rss", "Starting RSS feed checker", map[string]interface{}{
"interval": RSS_CHECK_INTERVAL.String(),
})
// Run immediately on startup
checkRSSFeeds()
@ -99,8 +100,9 @@ func checkRSSFeeds() {
})
}
newCount++
logInfo("new", fmt.Sprintf("Found new article: %s", a.Title), map[string]interface{}{
logInfo("new", "Found new article", map[string]interface{}{
"id": a.ID,
"title": a.Title,
"source": a.Source,
"published": a.Published.Format(time.RFC3339),
})

View File

@ -5,10 +5,8 @@ import (
"database/sql"
"encoding/hex"
"encoding/json"
"fmt"
"log"
"log/slog"
"os"
"sort"
"time"
"github.com/oklog/ulid/v2"
@ -349,8 +347,15 @@ func setupLogging() {
var err error
logFile, err = os.Create(logPath)
if err != nil {
log.Fatalf("Could not create log file: %v", err)
slog.Error("Could not create log file", "error", err)
os.Exit(1)
}
// Set up structured logger
jsonHandler := slog.NewJSONHandler(os.Stderr, &slog.HandlerOptions{
Level: slog.LevelInfo,
})
slog.SetDefault(slog.New(jsonHandler))
}
func flushLog() {
@ -395,7 +400,7 @@ func logEvent(event string, details map[string]interface{}) {
// Store log in database
logBytes, err := json.Marshal(entry)
if err != nil {
log.Printf("Error marshaling log entry: %v", err)
slog.Error("Error marshaling log entry", "error", err)
return
}
@ -407,7 +412,7 @@ func logEvent(event string, details map[string]interface{}) {
_, err = db.Exec("INSERT INTO logs (id, timestamp, log) VALUES (?, ?, ?)",
id, timestamp, string(logBytes))
if err != nil {
log.Printf("Error storing log in database: %v", err)
slog.Error("Error storing log in database", "error", err)
}
}
@ -497,7 +502,7 @@ func logCleanupWorker(shutdown chan struct{}) {
}
// logInfo logs a structured message to both console and log file
func logInfo(component, message string, data map[string]interface{}) {
func logInfo(component string, message string, data map[string]interface{}) {
// Create a copy of the data map to avoid modifying the original
logData := make(map[string]interface{})
for k, v := range data {
@ -508,25 +513,13 @@ func logInfo(component, message string, data map[string]interface{}) {
logData["component"] = component
logData["message"] = message
// Format console output: timestamp component: message key1=val1 key2=val2
timestamp := time.Now().Format("15:04:05.000")
console := fmt.Sprintf("[%s] [%s] %s", timestamp, component, message)
// Add key-value pairs to console output
keys := make([]string, 0, len(data))
for k := range data {
keys = append(keys, k)
// Use slog for structured logging to console
attrs := []any{}
for k, v := range logData {
attrs = append(attrs, k, v)
}
sort.Strings(keys) // Sort keys for consistent output
slog.Info(message, attrs...)
for _, k := range keys {
v := data[k]
console += fmt.Sprintf(" %s=%v", k, v)
}
// Print to console
fmt.Fprintln(os.Stderr, console)
// Log to structured log file
// Log to structured log file and database
logEvent("info", logData)
}