Compare commits
8 Commits
68ce2c88d2
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 015ffef17c | |||
| 15d16938dc | |||
| be5792a0c7 | |||
| c13bf2bf35 | |||
| aec5cc4e3c | |||
| 7c1a930355 | |||
| 9c8cbfd8ff | |||
| ae93557d15 |
@@ -31,8 +31,17 @@ func broadcaster(shutdown chan struct{}, dryRun bool) {
|
||||
return
|
||||
}
|
||||
|
||||
// Track when the last broadcast happened
|
||||
var lastBroadcastTime time.Time
|
||||
// Initialize the last broadcast time from the database
|
||||
lastBroadcastTime := getLastBroadcastTime()
|
||||
|
||||
if !lastBroadcastTime.IsZero() {
|
||||
logInfo("broadcaster", "Initialized last broadcast time from database", map[string]interface{}{
|
||||
"lastBroadcastTime": lastBroadcastTime.Format(time.RFC3339),
|
||||
"timeSince": time.Since(lastBroadcastTime).String(),
|
||||
})
|
||||
} else {
|
||||
logInfo("broadcaster", "No previous broadcast time found in database", nil)
|
||||
}
|
||||
|
||||
// Run checks frequently
|
||||
ticker := time.NewTicker(BROADCAST_CHECK_INTERVAL)
|
||||
|
||||
119
constants.go
Normal file
119
constants.go
Normal file
@@ -0,0 +1,119 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// Application constants
|
||||
const (
|
||||
// Database
|
||||
dbPath = "articles.db"
|
||||
|
||||
// LLM system prompt
|
||||
SYSTEM_PROMPT = "You are a news analyst."
|
||||
|
||||
// LLM batch processing settings
|
||||
BATCH_SIZE = 5
|
||||
MAX_INDIVIDUAL_PROCESSING = 50
|
||||
|
||||
// Timing constants
|
||||
RSS_CHECK_INTERVAL = 15 * time.Minute
|
||||
SUMMARIZE_INTERVAL = 10 * time.Second
|
||||
BROADCAST_INTERVAL = 1 * time.Hour
|
||||
BROADCAST_CHECK_INTERVAL = 10 * time.Second // Interval to check if broadcasting is needed
|
||||
DEVICE_REBOOT_INTERVAL = 6 * time.Hour // Interval to reboot Meshtastic device
|
||||
STARTUP_DELAY = 60 * time.Second // Delay before first broadcast
|
||||
BROADCAST_PREPARATION_DELAY = 30 * time.Second // Delay before executing broadcast command
|
||||
ARTICLE_FRESHNESS_WINDOW = 24 * time.Hour // Time window for considering articles fresh
|
||||
|
||||
// Message limits
|
||||
MAX_MESSAGE_LENGTH = 200 // Maximum length of broadcast messages in characters
|
||||
MAX_LOG_ENTRIES = 1000 // Maximum number of log entries to keep in memory
|
||||
)
|
||||
|
||||
// ANSI color codes for colorized logging
|
||||
const (
|
||||
colorReset = "\033[0m"
|
||||
colorRed = "\033[31m"
|
||||
colorGreen = "\033[32m"
|
||||
colorYellow = "\033[33m"
|
||||
colorBlue = "\033[34m"
|
||||
colorPurple = "\033[35m"
|
||||
colorCyan = "\033[36m"
|
||||
colorGray = "\033[37m"
|
||||
colorWhite = "\033[97m"
|
||||
bold = "\033[1m"
|
||||
)
|
||||
|
||||
// Main LLM prompt for article summarization
|
||||
const ARTICLES_PROMPT = `Summarize each of these news items in under 165
|
||||
characters, optimizing for information density (common news headline
|
||||
abbreviations OK) and rate their importance from 1 to 100.
|
||||
|
||||
100 means most important; 1 means least important.
|
||||
|
||||
Never rate over 90 unless it is a massive event such as: war outbreak,
|
||||
revolution, death of a head of state, large-scale natural disaster, mass
|
||||
casualty terrorism, etc.
|
||||
|
||||
Rank any headlines primarily promoting commercial products or
|
||||
services as 1 (lowest importance).
|
||||
|
||||
Rank any article with a headline that poses a question without providing an
|
||||
answer (as an attempt to lure a reader into clicking a link) as 1 (lowest
|
||||
importance).
|
||||
|
||||
Boost the importance score by 10 points for breaking news that is less than 60
|
||||
minutes old based on its original publication date (which is provided for each
|
||||
article), but only for time-critical events that need to be reported in minutes,
|
||||
such as currently unfolding events. Don't boost the importance score simply because
|
||||
it was recently published, unless it is a time-critical event.
|
||||
|
||||
Do not editorialize or otherwise label the summary.
|
||||
|
||||
For each article, return a JSON object with "id", "summary", and "importance"
|
||||
fields. Return your response as a JSON array of objects like: [{"id":
|
||||
"article_id", "summary": "...", "importance": 42}, ...]
|
||||
|
||||
Here are the articles:
|
||||
`
|
||||
|
||||
// Map of source names to their abbreviations
|
||||
var sourceAbbreviations = map[string]string{
|
||||
"BBC": "BBC",
|
||||
"CNN": "CNN",
|
||||
"NYTimes": "NYT",
|
||||
"Guardian": "Grd",
|
||||
"Al Jazeera": "AlJ",
|
||||
"NBC": "NBC",
|
||||
"ABC": "ABC",
|
||||
"CBS": "CBS",
|
||||
"Sky News": "Sky",
|
||||
"Time": "Time",
|
||||
"NPR": "NPR",
|
||||
"Deutsche Welle": "DW",
|
||||
"France 24": "F24",
|
||||
"The Independent": "Ind",
|
||||
"Washington Post": "WaPo",
|
||||
"WSJ": "WSJ",
|
||||
}
|
||||
|
||||
// RSS feed URLs
|
||||
var feeds = map[string]string{
|
||||
"BBC": "https://feeds.bbci.co.uk/news/world/rss.xml",
|
||||
"CNN": "http://rss.cnn.com/rss/edition.rss",
|
||||
"NYTimes": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml",
|
||||
"Guardian": "https://www.theguardian.com/world/rss",
|
||||
"Al Jazeera": "https://www.aljazeera.com/xml/rss/all.xml",
|
||||
"NBC": "http://feeds.nbcnews.com/nbcnews/public/news",
|
||||
"ABC": "https://abcnews.go.com/abcnews/topstories",
|
||||
"CBS": "https://www.cbsnews.com/latest/rss/world",
|
||||
"Sky News": "https://feeds.skynews.com/feeds/rss/world.xml",
|
||||
"Time": "https://time.com/feed/",
|
||||
"NPR": "https://feeds.npr.org/1001/rss.xml",
|
||||
"Deutsche Welle": "https://rss.dw.com/rdf/rss-en-world",
|
||||
"France 24": "https://www.france24.com/en/rss",
|
||||
"The Independent": "https://www.independent.co.uk/news/world/rss",
|
||||
"Washington Post": "https://feeds.washingtonpost.com/rss/world",
|
||||
"WSJ": "https://feeds.a.dj.com/rss/RSSWorldNews.xml",
|
||||
}
|
||||
82
models.go
82
models.go
@@ -5,18 +5,20 @@ import (
|
||||
)
|
||||
|
||||
type Article struct {
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Link string `json:"link"`
|
||||
Published time.Time `json:"published"` // When we first saw the article
|
||||
OriginalDate time.Time `json:"originalDate"` // Original publication date from the feed
|
||||
Source string `json:"source"`
|
||||
FirstSeen time.Time `json:"firstseen"`
|
||||
Seen time.Time `json:"seen"`
|
||||
Summary string `json:"summary"`
|
||||
Importance int `json:"importance"`
|
||||
ID string `json:"id"`
|
||||
BroadcastTime time.Time `json:"broadcastTime,omitempty"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Link string `json:"link"`
|
||||
Published time.Time `json:"published"` // When we first saw the article
|
||||
OriginalDate time.Time `json:"originalDate"` // Original publication date from the feed
|
||||
Source string `json:"source"`
|
||||
FirstSeen time.Time `json:"firstseen"`
|
||||
Seen time.Time `json:"seen"`
|
||||
Summary string `json:"summary"`
|
||||
Importance int `json:"importance"`
|
||||
ID string `json:"id"`
|
||||
BroadcastTime time.Time `json:"broadcastTime,omitempty"`
|
||||
RelativeTime string `json:"-"` // Relative time for FirstSeen (calculated field, not stored)
|
||||
BroadcastRelativeTime string `json:"-"` // Relative time for BroadcastTime (calculated field, not stored)
|
||||
}
|
||||
|
||||
type LogEntry struct {
|
||||
@@ -32,61 +34,9 @@ type DashboardData struct {
|
||||
TotalBroadcast int
|
||||
NewInLastHour int
|
||||
UnsummarizedCount int
|
||||
NextBroadcastIn string // Time until the next broadcast attempt
|
||||
LastBroadcastTime time.Time // When the last broadcast occurred
|
||||
NextUp []Article
|
||||
History []Article
|
||||
RecentLogs []LogEntry
|
||||
}
|
||||
|
||||
const (
|
||||
dbPath = "articles.db"
|
||||
|
||||
// LLM prompts
|
||||
ARTICLES_PROMPT = `Summarize each of these news items in under 165
|
||||
characters, optimizing for information density (common news headline
|
||||
abbreviations OK) and rate their importance from 1 to 100.
|
||||
|
||||
100 means most important; 1 means least important.
|
||||
|
||||
Never rate over 90 unless it is a massive event such as: war outbreak,
|
||||
revolution, death of a head of state, large-scale natural disaster, mass
|
||||
casualty terrorism, etc.
|
||||
|
||||
Rank any headlines primarily promoting commercial products or
|
||||
services as 1 (lowest importance).
|
||||
|
||||
Rank any article with a headline that poses a question without providing an
|
||||
answer (as an attempt to lure a reader into clicking a link) as 1 (lowest
|
||||
importance).
|
||||
|
||||
Boost the importance score by 10 points for breaking news that is less than 60
|
||||
minutes old based on its original publication date (which is provided for each
|
||||
article), but only for events that need to be reported in minutes, such as
|
||||
emeregencies or other critical breaking news.
|
||||
|
||||
Do not editorialize or otherwise label the summary.
|
||||
|
||||
For each article, return a JSON object with "id", "summary", and "importance"
|
||||
fields. Return your response as a JSON array of objects like: [{"id":
|
||||
"article_id", "summary": "...", "importance": 42}, ...]
|
||||
|
||||
Here are the articles:
|
||||
`
|
||||
|
||||
SYSTEM_PROMPT = "You are a news analyst."
|
||||
BATCH_SIZE = 5
|
||||
MAX_INDIVIDUAL_PROCESSING = 50
|
||||
|
||||
// Timing constants
|
||||
RSS_CHECK_INTERVAL = 15 * time.Minute
|
||||
SUMMARIZE_INTERVAL = 10 * time.Second
|
||||
BROADCAST_INTERVAL = 1 * time.Hour
|
||||
BROADCAST_CHECK_INTERVAL = 10 * time.Second // Interval to check if broadcasting is needed
|
||||
DEVICE_REBOOT_INTERVAL = 6 * time.Hour // Interval to reboot Meshtastic device
|
||||
STARTUP_DELAY = 60 * time.Second // Delay before first broadcast
|
||||
BROADCAST_PREPARATION_DELAY = 30 * time.Second // Delay before executing broadcast command
|
||||
ARTICLE_FRESHNESS_WINDOW = 24 * time.Hour // Time window for considering articles fresh
|
||||
|
||||
// Other constants
|
||||
MAX_MESSAGE_LENGTH = 200 // Maximum length of broadcast messages in characters
|
||||
MAX_LOG_ENTRIES = 1000 // Maximum number of log entries to keep in memory
|
||||
)
|
||||
|
||||
39
rss.go
39
rss.go
@@ -8,45 +8,6 @@ import (
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
// Map of source names to their abbreviations
|
||||
var sourceAbbreviations = map[string]string{
|
||||
"BBC": "BBC",
|
||||
"CNN": "CNN",
|
||||
"NYTimes": "NYT",
|
||||
"Guardian": "Grd",
|
||||
"Al Jazeera": "AlJ",
|
||||
"NBC": "NBC",
|
||||
"ABC": "ABC",
|
||||
"CBS": "CBS",
|
||||
"Sky News": "Sky",
|
||||
"Time": "Time",
|
||||
"NPR": "NPR",
|
||||
"Deutsche Welle": "DW",
|
||||
"France 24": "F24",
|
||||
"The Independent": "Ind",
|
||||
"Washington Post": "WaPo",
|
||||
"WSJ": "WSJ",
|
||||
}
|
||||
|
||||
var feeds = map[string]string{
|
||||
"BBC": "https://feeds.bbci.co.uk/news/world/rss.xml",
|
||||
"CNN": "http://rss.cnn.com/rss/edition.rss",
|
||||
"NYTimes": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml",
|
||||
"Guardian": "https://www.theguardian.com/world/rss",
|
||||
"Al Jazeera": "https://www.aljazeera.com/xml/rss/all.xml",
|
||||
"NBC": "http://feeds.nbcnews.com/nbcnews/public/news",
|
||||
"ABC": "https://abcnews.go.com/abcnews/topstories",
|
||||
"CBS": "https://www.cbsnews.com/latest/rss/world",
|
||||
"Sky News": "https://feeds.skynews.com/feeds/rss/world.xml",
|
||||
"Time": "https://time.com/feed/",
|
||||
"NPR": "https://feeds.npr.org/1001/rss.xml",
|
||||
"Deutsche Welle": "https://rss.dw.com/rdf/rss-en-world",
|
||||
"France 24": "https://www.france24.com/en/rss",
|
||||
"The Independent": "https://www.independent.co.uk/news/world/rss",
|
||||
"Washington Post": "https://feeds.washingtonpost.com/rss/world",
|
||||
"WSJ": "https://feeds.a.dj.com/rss/RSSWorldNews.xml",
|
||||
}
|
||||
|
||||
// This function was unused and removed to satisfy linter
|
||||
|
||||
// rssFeedChecker checks RSS feeds every 15 minutes and adds new articles to the database
|
||||
|
||||
50
storage.go
50
storage.go
@@ -17,20 +17,6 @@ import (
|
||||
"github.com/oklog/ulid/v2"
|
||||
)
|
||||
|
||||
// ANSI color codes
|
||||
const (
|
||||
colorReset = "\033[0m"
|
||||
colorRed = "\033[31m"
|
||||
colorGreen = "\033[32m"
|
||||
colorYellow = "\033[33m"
|
||||
colorBlue = "\033[34m"
|
||||
colorPurple = "\033[35m"
|
||||
colorCyan = "\033[36m"
|
||||
colorGray = "\033[37m"
|
||||
colorWhite = "\033[97m"
|
||||
bold = "\033[1m"
|
||||
)
|
||||
|
||||
// ColorizedHandler is a custom slog.Handler that outputs colorized logs
|
||||
type ColorizedHandler struct {
|
||||
w io.Writer
|
||||
@@ -728,3 +714,39 @@ func logInfo(component string, message string, data map[string]interface{}) {
|
||||
// Log to structured log file and database
|
||||
logEvent("info", logData)
|
||||
}
|
||||
|
||||
// getLastBroadcastTime retrieves the most recent broadcast time from the database
|
||||
func getLastBroadcastTime() time.Time {
|
||||
var lastBroadcastTime time.Time
|
||||
|
||||
// Query for the most recent valid broadcast time
|
||||
row := db.QueryRow(`
|
||||
SELECT MAX(broadcastTime) FROM articles
|
||||
WHERE broadcastTime IS NOT NULL
|
||||
AND broadcastTime > 1
|
||||
AND broadcastTime != 0
|
||||
AND datetime(broadcastTime) != '1970-01-01 00:00:00'
|
||||
AND datetime(broadcastTime) != '0001-01-01 00:00:00'
|
||||
AND strftime('%Y', broadcastTime) > '2000' -- Ensure year is at least 2000
|
||||
`)
|
||||
|
||||
// Scan the result
|
||||
var lastTime sql.NullTime
|
||||
if err := row.Scan(&lastTime); err != nil {
|
||||
logInfo("db", "Error retrieving last broadcast time", map[string]interface{}{
|
||||
"error": err.Error(),
|
||||
})
|
||||
return time.Time{} // Return zero time on error
|
||||
}
|
||||
|
||||
if lastTime.Valid {
|
||||
lastBroadcastTime = lastTime.Time
|
||||
logInfo("db", "Retrieved last broadcast time", map[string]interface{}{
|
||||
"lastBroadcastTime": lastBroadcastTime.Format(time.RFC3339),
|
||||
})
|
||||
} else {
|
||||
logInfo("db", "No previous broadcasts found in database", nil)
|
||||
}
|
||||
|
||||
return lastBroadcastTime
|
||||
}
|
||||
|
||||
@@ -85,8 +85,13 @@
|
||||
.timestamp {
|
||||
font-size: 0.9em;
|
||||
color: #7f8c8d;
|
||||
min-width: 180px;
|
||||
min-width: 120px;
|
||||
font-family: monospace;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.timestamp[title] {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #7f8c8d;
|
||||
}
|
||||
.source {
|
||||
font-weight: bold;
|
||||
@@ -155,6 +160,23 @@
|
||||
margin: 0 2px;
|
||||
font-family: monospace;
|
||||
}
|
||||
.footer {
|
||||
background-color: #e0e0e0;
|
||||
padding: 15px;
|
||||
text-align: center;
|
||||
font-size: 12px;
|
||||
color: #777;
|
||||
margin-top: 40px;
|
||||
border-radius: 5px;
|
||||
box-shadow: 0 -2px 5px rgba(0,0,0,0.1);
|
||||
}
|
||||
.footer a {
|
||||
color: #555;
|
||||
text-decoration: none;
|
||||
}
|
||||
.footer a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -186,6 +208,10 @@
|
||||
<div class="stat-label">Awaiting Summary</div>
|
||||
<div class="stat-number">{{.UnsummarizedCount}}</div>
|
||||
</div>
|
||||
<div class="stat-box">
|
||||
<div class="stat-label">Next Broadcast In</div>
|
||||
<div class="stat-number">{{.NextBroadcastIn}}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
@@ -209,7 +235,7 @@
|
||||
<td class="source">{{.Source}}</td>
|
||||
<td class="title">{{.Title}}</td>
|
||||
<td><a href="{{.Link}}" class="article-link" target="_blank">{{.Summary}}</a></td>
|
||||
<td class="timestamp">{{.FirstSeen.Format "2006-01-02 15:04:05 MST"}}</td>
|
||||
<td class="timestamp" title="{{.FirstSeen.Format "2006-01-02 15:04:05 MST"}}">{{.RelativeTime}}</td>
|
||||
</tr>
|
||||
{{else}}
|
||||
<tr>
|
||||
@@ -237,7 +263,7 @@
|
||||
{{range .History}}
|
||||
<tr>
|
||||
<td class="id">{{.ID}}</td>
|
||||
<td class="timestamp">{{.BroadcastTime.Format "2006-01-02 15:04:05 MST"}}</td>
|
||||
<td class="timestamp" title="{{.BroadcastTime.Format "2006-01-02 15:04:05 MST"}}">{{.BroadcastRelativeTime}}</td>
|
||||
<td class="importance {{if ge .Importance 70}}high{{else if ge .Importance 40}}medium{{else}}low{{end}}">{{.Importance}}</td>
|
||||
<td class="source">{{.Source}}</td>
|
||||
<td class="title">{{.Title}}</td>
|
||||
@@ -265,7 +291,7 @@
|
||||
<tbody>
|
||||
{{range .RecentLogs}}
|
||||
<tr>
|
||||
<td class="timestamp">{{.Timestamp.Format "2006-01-02 15:04:05 MST"}}</td>
|
||||
<td class="timestamp" style="white-space: nowrap;">{{.Timestamp.Format "2006-01-02 15:04:05 MST"}}</td>
|
||||
<td class="log-event">{{.Event}}</td>
|
||||
<td class="log-details">
|
||||
{{range $key, $value := .Details}}
|
||||
@@ -286,5 +312,9 @@
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<a href="https://git.eeqj.de/sneak/gomeshalerter">gomeshalerter</a> is a project by <a href="https://sneak.berlin">@sneak</a> and released under the WTFPL (<a href="https://git.eeqj.de/sneak/gomeshalerter">source</a>)
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
127
webserver.go
127
webserver.go
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"strings"
|
||||
@@ -19,8 +20,30 @@ func webServer(shutdown chan struct{}) {
|
||||
return
|
||||
}
|
||||
|
||||
// Define HTTP handlers
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
// Create a custom request handler with security headers
|
||||
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Add security headers
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
w.Header().Set("X-Frame-Options", "DENY")
|
||||
w.Header().Set("Content-Security-Policy", "default-src 'self'; style-src 'self' 'unsafe-inline'")
|
||||
w.Header().Set("Referrer-Policy", "strict-origin-when-cross-origin")
|
||||
w.Header().Set("Strict-Transport-Security", "max-age=31536000; includeSubDomains")
|
||||
|
||||
// Enforce request method
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Limit request body size (1MB)
|
||||
r.Body = http.MaxBytesReader(w, r.Body, 1<<20)
|
||||
|
||||
// Only serve the index page
|
||||
if r.URL.Path != "/" {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
data, err := getDashboardData()
|
||||
if err != nil {
|
||||
http.Error(w, "Error fetching data: "+err.Error(), http.StatusInternalServerError)
|
||||
@@ -42,10 +65,14 @@ func webServer(shutdown chan struct{}) {
|
||||
}
|
||||
})
|
||||
|
||||
// Start the server
|
||||
// Configure server with appropriate timeouts
|
||||
server := &http.Server{
|
||||
Addr: ":8080",
|
||||
Handler: nil, // Use default mux
|
||||
Addr: ":8080",
|
||||
Handler: handler,
|
||||
ReadTimeout: 10 * time.Second, // Time to read the request
|
||||
WriteTimeout: 30 * time.Second, // Time to write the response
|
||||
IdleTimeout: 60 * time.Second, // Keep-alive connections timeout
|
||||
MaxHeaderBytes: 1 << 20, // 1MB max header size
|
||||
}
|
||||
|
||||
// Create a goroutine for the server
|
||||
@@ -91,9 +118,15 @@ func getDashboardData() (DashboardData, error) {
|
||||
}
|
||||
|
||||
// Count broadcast articles, recent articles, and unsummarized articles
|
||||
var lastBroadcastTime time.Time
|
||||
for _, a := range articles {
|
||||
if !a.BroadcastTime.IsZero() && a.BroadcastTime.Unix() > 1 {
|
||||
data.TotalBroadcast++
|
||||
|
||||
// Track the most recent broadcast time
|
||||
if a.BroadcastTime.After(lastBroadcastTime) {
|
||||
lastBroadcastTime = a.BroadcastTime
|
||||
}
|
||||
}
|
||||
|
||||
if a.FirstSeen.After(hourAgo) {
|
||||
@@ -105,11 +138,46 @@ func getDashboardData() (DashboardData, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Set the last broadcast time
|
||||
data.LastBroadcastTime = lastBroadcastTime
|
||||
|
||||
// Calculate time until next broadcast
|
||||
if lastBroadcastTime.IsZero() {
|
||||
data.NextBroadcastIn = "As soon as articles are summarized"
|
||||
} else {
|
||||
nextBroadcastTime := lastBroadcastTime.Add(BROADCAST_INTERVAL)
|
||||
if now.After(nextBroadcastTime) {
|
||||
// If we're past the interval but haven't broadcast yet,
|
||||
// likely waiting for articles to be summarized
|
||||
if data.UnsummarizedCount > 0 {
|
||||
data.NextBroadcastIn = "Waiting for articles to be summarized"
|
||||
} else {
|
||||
data.NextBroadcastIn = "Pending (checking every " + BROADCAST_CHECK_INTERVAL.String() + ")"
|
||||
}
|
||||
} else {
|
||||
// We're still within the interval, calculate remaining time
|
||||
timeUntilNextBroadcast := nextBroadcastTime.Sub(now)
|
||||
|
||||
// Format as hours and minutes
|
||||
hours := int(timeUntilNextBroadcast.Hours())
|
||||
minutes := int(timeUntilNextBroadcast.Minutes()) % 60
|
||||
|
||||
if hours > 0 {
|
||||
data.NextBroadcastIn = fmt.Sprintf("%dh %dm", hours, minutes)
|
||||
} else {
|
||||
data.NextBroadcastIn = fmt.Sprintf("%dm", minutes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get broadcast history (last 100)
|
||||
history, err := getBroadcastHistory(100)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
// Add relative time information to history articles
|
||||
history = addRelativeTimes(history)
|
||||
data.History = history
|
||||
|
||||
// Get next up articles (importance sorted, less than 24 hours old)
|
||||
@@ -117,6 +185,9 @@ func getDashboardData() (DashboardData, error) {
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
// Add relative time information to next up articles
|
||||
nextUp = addRelativeTimes(nextUp)
|
||||
data.NextUp = nextUp
|
||||
|
||||
// Get recent logs
|
||||
@@ -141,3 +212,49 @@ func isResponseHeaderWritten(err error) bool {
|
||||
strings.Contains(errStr, "write: connection reset by peer") ||
|
||||
strings.Contains(errStr, "http: superfluous response.WriteHeader")
|
||||
}
|
||||
|
||||
// formatRelativeTime returns a human-readable relative time string
|
||||
func formatRelativeTime(t time.Time) string {
|
||||
if t.IsZero() {
|
||||
return ""
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
diff := now.Sub(t)
|
||||
|
||||
// Less than a minute
|
||||
if diff < time.Minute {
|
||||
return "just now"
|
||||
}
|
||||
|
||||
// Less than an hour
|
||||
if diff < time.Hour {
|
||||
minutes := int(diff.Minutes())
|
||||
return fmt.Sprintf("%dm ago", minutes)
|
||||
}
|
||||
|
||||
// Less than a day
|
||||
if diff < 24*time.Hour {
|
||||
hours := int(diff.Hours())
|
||||
return fmt.Sprintf("%dh ago", hours)
|
||||
}
|
||||
|
||||
// Less than a week
|
||||
if diff < 7*24*time.Hour {
|
||||
days := int(diff.Hours() / 24)
|
||||
return fmt.Sprintf("%dd ago", days)
|
||||
}
|
||||
|
||||
// More than a week
|
||||
weeks := int(diff.Hours() / 24 / 7)
|
||||
return fmt.Sprintf("%dw ago", weeks)
|
||||
}
|
||||
|
||||
// Add relative time information to articles
|
||||
func addRelativeTimes(articles []Article) []Article {
|
||||
for i := range articles {
|
||||
articles[i].RelativeTime = formatRelativeTime(articles[i].FirstSeen)
|
||||
articles[i].BroadcastRelativeTime = formatRelativeTime(articles[i].BroadcastTime)
|
||||
}
|
||||
return articles
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user