Major schema refactoring: simplify ASN and prefix tracking
- Remove UUID primary keys from ASNs table, use ASN number as primary key - Update announcements table to reference ASN numbers directly - Rename asns.number column to asns.asn for consistency - Add prefix tracking to PrefixHandler to populate prefixes_v4/v6 tables - Add UpdatePrefixesBatch method for efficient batch updates - Update all database methods and models to use new schema - Fix all references in code to use ASN field instead of Number - Update test mocks to match new interfaces
This commit is contained in:
parent
a165ecf759
commit
c9da20e630
@ -367,6 +367,100 @@ func (d *Database) DeleteLiveRouteBatch(deletions []LiveRouteDeletion) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatePrefixesBatch updates the last_seen time for multiple prefixes in a single transaction
|
||||||
|
func (d *Database) UpdatePrefixesBatch(prefixes map[string]time.Time) error {
|
||||||
|
if len(prefixes) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
d.lock("UpdatePrefixesBatch")
|
||||||
|
defer d.unlock()
|
||||||
|
|
||||||
|
tx, err := d.beginTx()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err := tx.Rollback(); err != nil && err != sql.ErrTxDone {
|
||||||
|
d.logger.Error("Failed to rollback transaction", "error", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Prepare statements for both IPv4 and IPv6 tables
|
||||||
|
selectV4Stmt, err := tx.Prepare("SELECT id FROM prefixes_v4 WHERE prefix = ?")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv4 select statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = selectV4Stmt.Close() }()
|
||||||
|
|
||||||
|
updateV4Stmt, err := tx.Prepare("UPDATE prefixes_v4 SET last_seen = ? WHERE prefix = ?")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv4 update statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = updateV4Stmt.Close() }()
|
||||||
|
|
||||||
|
insertV4Stmt, err := tx.Prepare("INSERT INTO prefixes_v4 (id, prefix, first_seen, last_seen) VALUES (?, ?, ?, ?)")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv4 insert statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = insertV4Stmt.Close() }()
|
||||||
|
|
||||||
|
selectV6Stmt, err := tx.Prepare("SELECT id FROM prefixes_v6 WHERE prefix = ?")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv6 select statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = selectV6Stmt.Close() }()
|
||||||
|
|
||||||
|
updateV6Stmt, err := tx.Prepare("UPDATE prefixes_v6 SET last_seen = ? WHERE prefix = ?")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv6 update statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = updateV6Stmt.Close() }()
|
||||||
|
|
||||||
|
insertV6Stmt, err := tx.Prepare("INSERT INTO prefixes_v6 (id, prefix, first_seen, last_seen) VALUES (?, ?, ?, ?)")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to prepare IPv6 insert statement: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = insertV6Stmt.Close() }()
|
||||||
|
|
||||||
|
for prefix, timestamp := range prefixes {
|
||||||
|
ipVersion := detectIPVersion(prefix)
|
||||||
|
|
||||||
|
var selectStmt, updateStmt, insertStmt *sql.Stmt
|
||||||
|
if ipVersion == ipVersionV4 {
|
||||||
|
selectStmt, updateStmt, insertStmt = selectV4Stmt, updateV4Stmt, insertV4Stmt
|
||||||
|
} else {
|
||||||
|
selectStmt, updateStmt, insertStmt = selectV6Stmt, updateV6Stmt, insertV6Stmt
|
||||||
|
}
|
||||||
|
|
||||||
|
var id string
|
||||||
|
err = selectStmt.QueryRow(prefix).Scan(&id)
|
||||||
|
|
||||||
|
switch err {
|
||||||
|
case nil:
|
||||||
|
// Prefix exists, update last_seen
|
||||||
|
_, err = updateStmt.Exec(timestamp, prefix)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to update prefix %s: %w", prefix, err)
|
||||||
|
}
|
||||||
|
case sql.ErrNoRows:
|
||||||
|
// Prefix doesn't exist, create it
|
||||||
|
_, err = insertStmt.Exec(generateUUID().String(), prefix, timestamp, timestamp)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to insert prefix %s: %w", prefix, err)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("failed to query prefix %s: %w", prefix, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = tx.Commit(); err != nil {
|
||||||
|
return fmt.Errorf("failed to commit transaction: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetOrCreateASNBatch creates or updates multiple ASNs in a single transaction
|
// GetOrCreateASNBatch creates or updates multiple ASNs in a single transaction
|
||||||
func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
||||||
if len(asns) == 0 {
|
if len(asns) == 0 {
|
||||||
@ -388,20 +482,20 @@ func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
|||||||
|
|
||||||
// Prepare statements
|
// Prepare statements
|
||||||
selectStmt, err := tx.Prepare(
|
selectStmt, err := tx.Prepare(
|
||||||
"SELECT id, number, handle, description, first_seen, last_seen FROM asns WHERE number = ?")
|
"SELECT asn, handle, description, first_seen, last_seen FROM asns WHERE asn = ?")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to prepare select statement: %w", err)
|
return fmt.Errorf("failed to prepare select statement: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = selectStmt.Close() }()
|
defer func() { _ = selectStmt.Close() }()
|
||||||
|
|
||||||
updateStmt, err := tx.Prepare("UPDATE asns SET last_seen = ? WHERE id = ?")
|
updateStmt, err := tx.Prepare("UPDATE asns SET last_seen = ? WHERE asn = ?")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to prepare update statement: %w", err)
|
return fmt.Errorf("failed to prepare update statement: %w", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = updateStmt.Close() }()
|
defer func() { _ = updateStmt.Close() }()
|
||||||
|
|
||||||
insertStmt, err := tx.Prepare(
|
insertStmt, err := tx.Prepare(
|
||||||
"INSERT INTO asns (id, number, handle, description, first_seen, last_seen) VALUES (?, ?, ?, ?, ?, ?)")
|
"INSERT INTO asns (asn, handle, description, first_seen, last_seen) VALUES (?, ?, ?, ?, ?)")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to prepare insert statement: %w", err)
|
return fmt.Errorf("failed to prepare insert statement: %w", err)
|
||||||
}
|
}
|
||||||
@ -409,15 +503,13 @@ func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
|||||||
|
|
||||||
for number, timestamp := range asns {
|
for number, timestamp := range asns {
|
||||||
var asn ASN
|
var asn ASN
|
||||||
var idStr string
|
|
||||||
var handle, description sql.NullString
|
var handle, description sql.NullString
|
||||||
|
|
||||||
err = selectStmt.QueryRow(number).Scan(&idStr, &asn.Number, &handle, &description, &asn.FirstSeen, &asn.LastSeen)
|
err = selectStmt.QueryRow(number).Scan(&asn.ASN, &handle, &description, &asn.FirstSeen, &asn.LastSeen)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
// ASN exists, update last_seen
|
// ASN exists, update last_seen
|
||||||
asn.ID, _ = uuid.Parse(idStr)
|
_, err = updateStmt.Exec(timestamp, number)
|
||||||
_, err = updateStmt.Exec(timestamp, asn.ID.String())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to update ASN %d: %w", number, err)
|
return fmt.Errorf("failed to update ASN %d: %w", number, err)
|
||||||
}
|
}
|
||||||
@ -428,8 +520,7 @@ func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
|||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
// ASN doesn't exist, create it
|
// ASN doesn't exist, create it
|
||||||
asn = ASN{
|
asn = ASN{
|
||||||
ID: generateUUID(),
|
ASN: number,
|
||||||
Number: number,
|
|
||||||
FirstSeen: timestamp,
|
FirstSeen: timestamp,
|
||||||
LastSeen: timestamp,
|
LastSeen: timestamp,
|
||||||
}
|
}
|
||||||
@ -440,7 +531,7 @@ func (d *Database) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
|||||||
asn.Description = info.Description
|
asn.Description = info.Description
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = insertStmt.Exec(asn.ID.String(), asn.Number, asn.Handle, asn.Description, asn.FirstSeen, asn.LastSeen)
|
_, err = insertStmt.Exec(asn.ASN, asn.Handle, asn.Description, asn.FirstSeen, asn.LastSeen)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to insert ASN %d: %w", number, err)
|
return fmt.Errorf("failed to insert ASN %d: %w", number, err)
|
||||||
}
|
}
|
||||||
@ -476,17 +567,15 @@ func (d *Database) GetOrCreateASN(number int, timestamp time.Time) (*ASN, error)
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
var asn ASN
|
var asn ASN
|
||||||
var idStr string
|
|
||||||
var handle, description sql.NullString
|
var handle, description sql.NullString
|
||||||
err = tx.QueryRow("SELECT id, number, handle, description, first_seen, last_seen FROM asns WHERE number = ?", number).
|
err = tx.QueryRow("SELECT asn, handle, description, first_seen, last_seen FROM asns WHERE asn = ?", number).
|
||||||
Scan(&idStr, &asn.Number, &handle, &description, &asn.FirstSeen, &asn.LastSeen)
|
Scan(&asn.ASN, &handle, &description, &asn.FirstSeen, &asn.LastSeen)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
// ASN exists, update last_seen
|
// ASN exists, update last_seen
|
||||||
asn.ID, _ = uuid.Parse(idStr)
|
|
||||||
asn.Handle = handle.String
|
asn.Handle = handle.String
|
||||||
asn.Description = description.String
|
asn.Description = description.String
|
||||||
_, err = tx.Exec("UPDATE asns SET last_seen = ? WHERE id = ?", timestamp, asn.ID.String())
|
_, err = tx.Exec("UPDATE asns SET last_seen = ? WHERE asn = ?", timestamp, number)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -507,8 +596,7 @@ func (d *Database) GetOrCreateASN(number int, timestamp time.Time) (*ASN, error)
|
|||||||
|
|
||||||
// ASN doesn't exist, create it with ASN info lookup
|
// ASN doesn't exist, create it with ASN info lookup
|
||||||
asn = ASN{
|
asn = ASN{
|
||||||
ID: generateUUID(),
|
ASN: number,
|
||||||
Number: number,
|
|
||||||
FirstSeen: timestamp,
|
FirstSeen: timestamp,
|
||||||
LastSeen: timestamp,
|
LastSeen: timestamp,
|
||||||
}
|
}
|
||||||
@ -519,8 +607,8 @@ func (d *Database) GetOrCreateASN(number int, timestamp time.Time) (*ASN, error)
|
|||||||
asn.Description = info.Description
|
asn.Description = info.Description
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.Exec("INSERT INTO asns (id, number, handle, description, first_seen, last_seen) VALUES (?, ?, ?, ?, ?, ?)",
|
_, err = tx.Exec("INSERT INTO asns (asn, handle, description, first_seen, last_seen) VALUES (?, ?, ?, ?, ?)",
|
||||||
asn.ID.String(), asn.Number, asn.Handle, asn.Description, asn.FirstSeen, asn.LastSeen)
|
asn.ASN, asn.Handle, asn.Description, asn.FirstSeen, asn.LastSeen)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -615,10 +703,10 @@ func (d *Database) RecordAnnouncement(announcement *Announcement) error {
|
|||||||
defer d.unlock()
|
defer d.unlock()
|
||||||
|
|
||||||
err := d.exec(`
|
err := d.exec(`
|
||||||
INSERT INTO announcements (id, prefix_id, asn_id, origin_asn_id, path, next_hop, timestamp, is_withdrawal)
|
INSERT INTO announcements (id, prefix_id, peer_asn, origin_asn, path, next_hop, timestamp, is_withdrawal)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
announcement.ID.String(), announcement.PrefixID.String(),
|
announcement.ID.String(), announcement.PrefixID.String(),
|
||||||
announcement.ASNID.String(), announcement.OriginASNID.String(),
|
announcement.PeerASN, announcement.OriginASN,
|
||||||
announcement.Path, announcement.NextHop, announcement.Timestamp, announcement.IsWithdrawal)
|
announcement.Path, announcement.NextHop, announcement.Timestamp, announcement.IsWithdrawal)
|
||||||
|
|
||||||
return err
|
return err
|
||||||
@ -815,13 +903,13 @@ func (d *Database) GetStatsContext(ctx context.Context) (Stats, error) {
|
|||||||
return stats, err
|
return stats, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count unique prefixes from live routes tables
|
// Count prefixes from both tables
|
||||||
err = d.db.QueryRowContext(ctx, "SELECT COUNT(DISTINCT prefix) FROM live_routes_v4").Scan(&stats.IPv4Prefixes)
|
err = d.db.QueryRowContext(ctx, "SELECT COUNT(*) FROM prefixes_v4").Scan(&stats.IPv4Prefixes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return stats, err
|
return stats, err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = d.db.QueryRowContext(ctx, "SELECT COUNT(DISTINCT prefix) FROM live_routes_v6").Scan(&stats.IPv6Prefixes)
|
err = d.db.QueryRowContext(ctx, "SELECT COUNT(*) FROM prefixes_v6").Scan(&stats.IPv6Prefixes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return stats, err
|
return stats, err
|
||||||
}
|
}
|
||||||
@ -1246,12 +1334,11 @@ func (d *Database) GetASDetails(asn int) (*ASN, []LiveRoute, error) {
|
|||||||
func (d *Database) GetASDetailsContext(ctx context.Context, asn int) (*ASN, []LiveRoute, error) {
|
func (d *Database) GetASDetailsContext(ctx context.Context, asn int) (*ASN, []LiveRoute, error) {
|
||||||
// Get AS information
|
// Get AS information
|
||||||
var asnInfo ASN
|
var asnInfo ASN
|
||||||
var idStr string
|
|
||||||
var handle, description sql.NullString
|
var handle, description sql.NullString
|
||||||
err := d.db.QueryRowContext(ctx,
|
err := d.db.QueryRowContext(ctx,
|
||||||
"SELECT id, number, handle, description, first_seen, last_seen FROM asns WHERE number = ?",
|
"SELECT asn, handle, description, first_seen, last_seen FROM asns WHERE asn = ?",
|
||||||
asn,
|
asn,
|
||||||
).Scan(&idStr, &asnInfo.Number, &handle, &description, &asnInfo.FirstSeen, &asnInfo.LastSeen)
|
).Scan(&asnInfo.ASN, &handle, &description, &asnInfo.FirstSeen, &asnInfo.LastSeen)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
@ -1261,7 +1348,6 @@ func (d *Database) GetASDetailsContext(ctx context.Context, asn int) (*ASN, []Li
|
|||||||
return nil, nil, fmt.Errorf("failed to query AS: %w", err)
|
return nil, nil, fmt.Errorf("failed to query AS: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
asnInfo.ID, _ = uuid.Parse(idStr)
|
|
||||||
asnInfo.Handle = handle.String
|
asnInfo.Handle = handle.String
|
||||||
asnInfo.Description = description.String
|
asnInfo.Description = description.String
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ type Store interface {
|
|||||||
|
|
||||||
// Prefix operations
|
// Prefix operations
|
||||||
GetOrCreatePrefix(prefix string, timestamp time.Time) (*Prefix, error)
|
GetOrCreatePrefix(prefix string, timestamp time.Time) (*Prefix, error)
|
||||||
|
UpdatePrefixesBatch(prefixes map[string]time.Time) error
|
||||||
|
|
||||||
// Announcement operations
|
// Announcement operations
|
||||||
RecordAnnouncement(announcement *Announcement) error
|
RecordAnnouncement(announcement *Announcement) error
|
||||||
|
@ -8,8 +8,7 @@ import (
|
|||||||
|
|
||||||
// ASN represents an Autonomous System Number
|
// ASN represents an Autonomous System Number
|
||||||
type ASN struct {
|
type ASN struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ASN int `json:"asn"`
|
||||||
Number int `json:"number"`
|
|
||||||
Handle string `json:"handle"`
|
Handle string `json:"handle"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
FirstSeen time.Time `json:"first_seen"`
|
FirstSeen time.Time `json:"first_seen"`
|
||||||
@ -29,8 +28,8 @@ type Prefix struct {
|
|||||||
type Announcement struct {
|
type Announcement struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
PrefixID uuid.UUID `json:"prefix_id"`
|
PrefixID uuid.UUID `json:"prefix_id"`
|
||||||
ASNID uuid.UUID `json:"asn_id"`
|
PeerASN int `json:"peer_asn"`
|
||||||
OriginASNID uuid.UUID `json:"origin_asn_id"`
|
OriginASN int `json:"origin_asn"`
|
||||||
Path string `json:"path"` // JSON-encoded AS path
|
Path string `json:"path"` // JSON-encoded AS path
|
||||||
NextHop string `json:"next_hop"`
|
NextHop string `json:"next_hop"`
|
||||||
Timestamp time.Time `json:"timestamp"`
|
Timestamp time.Time `json:"timestamp"`
|
||||||
@ -40,8 +39,8 @@ type Announcement struct {
|
|||||||
// ASNPeering represents a peering relationship between two ASNs
|
// ASNPeering represents a peering relationship between two ASNs
|
||||||
type ASNPeering struct {
|
type ASNPeering struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
FromASNID uuid.UUID `json:"from_asn_id"`
|
ASA int `json:"as_a"`
|
||||||
ToASNID uuid.UUID `json:"to_asn_id"`
|
ASB int `json:"as_b"`
|
||||||
FirstSeen time.Time `json:"first_seen"`
|
FirstSeen time.Time `json:"first_seen"`
|
||||||
LastSeen time.Time `json:"last_seen"`
|
LastSeen time.Time `json:"last_seen"`
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
-- DO NOT make schema changes anywhere else in the codebase.
|
-- DO NOT make schema changes anywhere else in the codebase.
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS asns (
|
CREATE TABLE IF NOT EXISTS asns (
|
||||||
id TEXT PRIMARY KEY,
|
asn INTEGER PRIMARY KEY,
|
||||||
number INTEGER UNIQUE NOT NULL,
|
|
||||||
handle TEXT,
|
handle TEXT,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
first_seen DATETIME NOT NULL,
|
first_seen DATETIME NOT NULL,
|
||||||
@ -30,15 +29,14 @@ CREATE TABLE IF NOT EXISTS prefixes_v6 (
|
|||||||
CREATE TABLE IF NOT EXISTS announcements (
|
CREATE TABLE IF NOT EXISTS announcements (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
prefix_id TEXT NOT NULL,
|
prefix_id TEXT NOT NULL,
|
||||||
asn_id TEXT NOT NULL,
|
peer_asn INTEGER NOT NULL,
|
||||||
origin_asn_id TEXT NOT NULL,
|
origin_asn INTEGER NOT NULL,
|
||||||
path TEXT NOT NULL,
|
path TEXT NOT NULL,
|
||||||
next_hop TEXT,
|
next_hop TEXT,
|
||||||
timestamp DATETIME NOT NULL,
|
timestamp DATETIME NOT NULL,
|
||||||
is_withdrawal BOOLEAN NOT NULL DEFAULT 0,
|
is_withdrawal BOOLEAN NOT NULL DEFAULT 0,
|
||||||
FOREIGN KEY (prefix_id) REFERENCES prefixes(id),
|
FOREIGN KEY (peer_asn) REFERENCES asns(asn),
|
||||||
FOREIGN KEY (asn_id) REFERENCES asns(id),
|
FOREIGN KEY (origin_asn) REFERENCES asns(asn)
|
||||||
FOREIGN KEY (origin_asn_id) REFERENCES asns(id)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS peerings (
|
CREATE TABLE IF NOT EXISTS peerings (
|
||||||
@ -67,13 +65,14 @@ CREATE INDEX IF NOT EXISTS idx_prefixes_v4_prefix ON prefixes_v4(prefix);
|
|||||||
CREATE INDEX IF NOT EXISTS idx_prefixes_v6_prefix ON prefixes_v6(prefix);
|
CREATE INDEX IF NOT EXISTS idx_prefixes_v6_prefix ON prefixes_v6(prefix);
|
||||||
CREATE INDEX IF NOT EXISTS idx_announcements_timestamp ON announcements(timestamp);
|
CREATE INDEX IF NOT EXISTS idx_announcements_timestamp ON announcements(timestamp);
|
||||||
CREATE INDEX IF NOT EXISTS idx_announcements_prefix_id ON announcements(prefix_id);
|
CREATE INDEX IF NOT EXISTS idx_announcements_prefix_id ON announcements(prefix_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_announcements_asn_id ON announcements(asn_id);
|
CREATE INDEX IF NOT EXISTS idx_announcements_peer_asn ON announcements(peer_asn);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_announcements_origin_asn ON announcements(origin_asn);
|
||||||
CREATE INDEX IF NOT EXISTS idx_peerings_as_a ON peerings(as_a);
|
CREATE INDEX IF NOT EXISTS idx_peerings_as_a ON peerings(as_a);
|
||||||
CREATE INDEX IF NOT EXISTS idx_peerings_as_b ON peerings(as_b);
|
CREATE INDEX IF NOT EXISTS idx_peerings_as_b ON peerings(as_b);
|
||||||
CREATE INDEX IF NOT EXISTS idx_peerings_lookup ON peerings(as_a, as_b);
|
CREATE INDEX IF NOT EXISTS idx_peerings_lookup ON peerings(as_a, as_b);
|
||||||
|
|
||||||
-- Indexes for asns table
|
-- Indexes for asns table
|
||||||
CREATE INDEX IF NOT EXISTS idx_asns_number ON asns(number);
|
CREATE INDEX IF NOT EXISTS idx_asns_asn ON asns(asn);
|
||||||
|
|
||||||
-- Indexes for bgp_peers table
|
-- Indexes for bgp_peers table
|
||||||
CREATE INDEX IF NOT EXISTS idx_bgp_peers_asn ON bgp_peers(peer_asn);
|
CREATE INDEX IF NOT EXISTS idx_bgp_peers_asn ON bgp_peers(peer_asn);
|
||||||
|
@ -61,8 +61,7 @@ func (m *mockStore) GetOrCreateASN(number int, timestamp time.Time) (*database.A
|
|||||||
}
|
}
|
||||||
|
|
||||||
asn := &database.ASN{
|
asn := &database.ASN{
|
||||||
ID: uuid.New(),
|
ASN: number,
|
||||||
Number: number,
|
|
||||||
FirstSeen: timestamp,
|
FirstSeen: timestamp,
|
||||||
LastSeen: timestamp,
|
LastSeen: timestamp,
|
||||||
}
|
}
|
||||||
@ -72,6 +71,37 @@ func (m *mockStore) GetOrCreateASN(number int, timestamp time.Time) (*database.A
|
|||||||
return asn, nil
|
return asn, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatePrefixesBatch mock implementation
|
||||||
|
func (m *mockStore) UpdatePrefixesBatch(prefixes map[string]time.Time) error {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
|
for prefix, timestamp := range prefixes {
|
||||||
|
if p, exists := m.Prefixes[prefix]; exists {
|
||||||
|
p.LastSeen = timestamp
|
||||||
|
} else {
|
||||||
|
const (
|
||||||
|
ipVersionV4 = 4
|
||||||
|
ipVersionV6 = 6
|
||||||
|
)
|
||||||
|
|
||||||
|
ipVersion := ipVersionV4
|
||||||
|
if strings.Contains(prefix, ":") {
|
||||||
|
ipVersion = ipVersionV6
|
||||||
|
}
|
||||||
|
|
||||||
|
m.Prefixes[prefix] = &database.Prefix{
|
||||||
|
ID: uuid.New(),
|
||||||
|
Prefix: prefix,
|
||||||
|
IPVersion: ipVersion,
|
||||||
|
FirstSeen: timestamp,
|
||||||
|
LastSeen: timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetOrCreatePrefix mock implementation
|
// GetOrCreatePrefix mock implementation
|
||||||
func (m *mockStore) GetOrCreatePrefix(prefix string, timestamp time.Time) (*database.Prefix, error) {
|
func (m *mockStore) GetOrCreatePrefix(prefix string, timestamp time.Time) (*database.Prefix, error) {
|
||||||
m.mu.Lock()
|
m.mu.Lock()
|
||||||
@ -302,8 +332,7 @@ func (m *mockStore) GetOrCreateASNBatch(asns map[int]time.Time) error {
|
|||||||
for number, timestamp := range asns {
|
for number, timestamp := range asns {
|
||||||
if _, exists := m.ASNs[number]; !exists {
|
if _, exists := m.ASNs[number]; !exists {
|
||||||
m.ASNs[number] = &database.ASN{
|
m.ASNs[number] = &database.ASN{
|
||||||
ID: uuid.New(),
|
ASN: number,
|
||||||
Number: number,
|
|
||||||
FirstSeen: timestamp,
|
FirstSeen: timestamp,
|
||||||
LastSeen: timestamp,
|
LastSeen: timestamp,
|
||||||
}
|
}
|
||||||
|
@ -182,9 +182,15 @@ func (h *PrefixHandler) flushBatchLocked() {
|
|||||||
var routesToUpsert []*database.LiveRoute
|
var routesToUpsert []*database.LiveRoute
|
||||||
var routesToDelete []database.LiveRouteDeletion
|
var routesToDelete []database.LiveRouteDeletion
|
||||||
|
|
||||||
// Skip the prefix table updates entirely - just update live_routes
|
// Collect unique prefixes to update
|
||||||
// The prefix table is not critical for routing lookups
|
prefixesToUpdate := make(map[string]time.Time)
|
||||||
|
|
||||||
for _, update := range prefixMap {
|
for _, update := range prefixMap {
|
||||||
|
// Track prefix for both announcements and withdrawals
|
||||||
|
if _, exists := prefixesToUpdate[update.prefix]; !exists || update.timestamp.After(prefixesToUpdate[update.prefix]) {
|
||||||
|
prefixesToUpdate[update.prefix] = update.timestamp
|
||||||
|
}
|
||||||
|
|
||||||
if update.messageType == "announcement" && update.originASN > 0 {
|
if update.messageType == "announcement" && update.originASN > 0 {
|
||||||
// Create live route for batch upsert
|
// Create live route for batch upsert
|
||||||
route := h.createLiveRoute(update)
|
route := h.createLiveRoute(update)
|
||||||
@ -228,6 +234,13 @@ func (h *PrefixHandler) flushBatchLocked() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update prefix tables
|
||||||
|
if len(prefixesToUpdate) > 0 {
|
||||||
|
if err := h.db.UpdatePrefixesBatch(prefixesToUpdate); err != nil {
|
||||||
|
h.logger.Error("Failed to update prefix batch", "error", err, "count", len(prefixesToUpdate))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
elapsed := time.Since(startTime)
|
elapsed := time.Since(startTime)
|
||||||
h.logger.Debug("Flushed prefix batch",
|
h.logger.Debug("Flushed prefix batch",
|
||||||
"batch_size", batchSize,
|
"batch_size", batchSize,
|
||||||
|
@ -605,7 +605,7 @@ func (s *Server) handlePrefixDetail() http.HandlerFunc {
|
|||||||
|
|
||||||
// Group by origin AS and collect unique AS info
|
// Group by origin AS and collect unique AS info
|
||||||
type ASNInfo struct {
|
type ASNInfo struct {
|
||||||
Number int
|
ASN int
|
||||||
Handle string
|
Handle string
|
||||||
Description string
|
Description string
|
||||||
PeerCount int
|
PeerCount int
|
||||||
@ -622,7 +622,7 @@ func (s *Server) handlePrefixDetail() http.HandlerFunc {
|
|||||||
description = asInfo.Description
|
description = asInfo.Description
|
||||||
}
|
}
|
||||||
originMap[route.OriginASN] = &ASNInfo{
|
originMap[route.OriginASN] = &ASNInfo{
|
||||||
Number: route.OriginASN,
|
ASN: route.OriginASN,
|
||||||
Handle: handle,
|
Handle: handle,
|
||||||
Description: description,
|
Description: description,
|
||||||
PeerCount: 0,
|
PeerCount: 0,
|
||||||
@ -655,7 +655,7 @@ func (s *Server) handlePrefixDetail() http.HandlerFunc {
|
|||||||
|
|
||||||
// Create enhanced routes with AS path handles
|
// Create enhanced routes with AS path handles
|
||||||
type ASPathEntry struct {
|
type ASPathEntry struct {
|
||||||
Number int
|
ASN int
|
||||||
Handle string
|
Handle string
|
||||||
}
|
}
|
||||||
type EnhancedRoute struct {
|
type EnhancedRoute struct {
|
||||||
@ -674,7 +674,7 @@ func (s *Server) handlePrefixDetail() http.HandlerFunc {
|
|||||||
for j, asn := range route.ASPath {
|
for j, asn := range route.ASPath {
|
||||||
handle := asinfo.GetHandle(asn)
|
handle := asinfo.GetHandle(asn)
|
||||||
enhancedRoute.ASPathWithHandle[j] = ASPathEntry{
|
enhancedRoute.ASPathWithHandle[j] = ASPathEntry{
|
||||||
Number: asn,
|
ASN: asn,
|
||||||
Handle: handle,
|
Handle: handle,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
21
log.txt
21
log.txt
@ -113308,3 +113308,24 @@
|
|||||||
{"time":"2025-07-28T22:44:43.48873+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
{"time":"2025-07-28T22:44:43.48873+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
||||||
2025/07/28 22:44:43 [akrotiri/2FBRup0aOv-000241] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2968B in 45.233084ms
|
2025/07/28 22:44:43 [akrotiri/2FBRup0aOv-000241] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2968B in 45.233084ms
|
||||||
{"time":"2025-07-28T22:44:43.511196+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"GetOrCreateASNBatch (database.go:376)","duration_ms":22}
|
{"time":"2025-07-28T22:44:43.511196+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"GetOrCreateASNBatch (database.go:376)","duration_ms":22}
|
||||||
|
2025/07/28 22:44:43 [akrotiri/2FBRup0aOv-000242] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2967B in 27.269375ms
|
||||||
|
{"time":"2025-07-28T22:44:44.240499+02:00","level":"DEBUG","msg":"Acquiring database lock","source":"database.go:147","func":"database.(*Database).lock","operation":"UpsertLiveRouteBatch","caller":"database.go:184"}
|
||||||
|
{"time":"2025-07-28T22:44:44.240515+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"UpsertLiveRouteBatch","caller":"database.go:184"}
|
||||||
|
{"time":"2025-07-28T22:44:44.36068+02:00","level":"DEBUG","msg":"Acquiring database lock","source":"database.go:147","func":"database.(*Database).lock","operation":"UpdatePeerBatch","caller":"database.go:694"}
|
||||||
|
{"time":"2025-07-28T22:44:44.361365+02:00","level":"DEBUG","msg":"Acquiring database lock","source":"database.go:147","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
||||||
|
{"time":"2025-07-28T22:44:44.410976+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"UpsertLiveRouteBatch (database.go:184)","duration_ms":170}
|
||||||
|
{"time":"2025-07-28T22:44:44.410986+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"UpdatePeerBatch","caller":"database.go:694"}
|
||||||
|
{"time":"2025-07-28T22:44:44.410992+02:00","level":"DEBUG","msg":"Acquiring database lock","source":"database.go:147","func":"database.(*Database).lock","operation":"DeleteLiveRouteBatch","caller":"database.go:291"}
|
||||||
|
{"time":"2025-07-28T22:44:44.414786+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"UpdatePeerBatch (database.go:694)","duration_ms":3}
|
||||||
|
{"time":"2025-07-28T22:44:44.414796+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
||||||
|
{"time":"2025-07-28T22:44:44.42383+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"GetOrCreateASNBatch (database.go:376)","duration_ms":9}
|
||||||
|
{"time":"2025-07-28T22:44:44.423855+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"DeleteLiveRouteBatch","caller":"database.go:291"}
|
||||||
|
{"time":"2025-07-28T22:44:44.425152+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"DeleteLiveRouteBatch (database.go:291)","duration_ms":1}
|
||||||
|
{"time":"2025-07-28T22:44:44.425159+02:00","level":"DEBUG","msg":"Flushed prefix batch","source":"prefixhandler.go:232","func":"routewatch.(*PrefixHandler).flushBatchLocked","batch_size":24228,"unique_prefixes":8580,"success":8580,"duration_ms":194}
|
||||||
|
2025/07/28 22:44:44 [akrotiri/2FBRup0aOv-000243] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2969B in 18.925ms
|
||||||
|
2025/07/28 22:44:44 [akrotiri/2FBRup0aOv-000244] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2969B in 20.872ms
|
||||||
|
{"time":"2025-07-28T22:44:45.326817+02:00","level":"DEBUG","msg":"Acquiring database lock","source":"database.go:147","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
||||||
|
{"time":"2025-07-28T22:44:45.326846+02:00","level":"DEBUG","msg":"Database lock acquired","source":"database.go:153","func":"database.(*Database).lock","operation":"GetOrCreateASNBatch","caller":"database.go:376"}
|
||||||
|
{"time":"2025-07-28T22:44:45.353652+02:00","level":"DEBUG","msg":"Database lock released","source":"database.go:165","func":"database.(*Database).unlock","held_by":"GetOrCreateASNBatch (database.go:376)","duration_ms":26}
|
||||||
|
{"time":"2025-07-28T22:44:45.408399+02:00","level":"WARN","msg":"BGP notification","source":"streamer.go:517","func":"streamer.(*Streamer).stream","peer":"198.32.160.113","peer_asn":"15547"}
|
||||||
|
2025/07/28 22:44:45 [akrotiri/2FBRup0aOv-000245] "GET http://127.0.0.1:8080/api/v1/stats HTTP/1.1" from 127.0.0.1:50416 - 200 2969B in 31.485542ms
|
||||||
|
Loading…
Reference in New Issue
Block a user