refactor: use pinned golangci-lint Docker image for linting
All checks were successful
check / check (push) Successful in 1m41s
All checks were successful
check / check (push) Successful in 1m41s
Refactor Dockerfile to use a separate lint stage with a pinned golangci-lint v2.11.3 Docker image instead of installing golangci-lint via curl in the builder stage. This follows the pattern used by sneak/pixa. Changes: - Dockerfile: separate lint stage using golangci/golangci-lint:v2.11.3 (Debian-based, pinned by sha256) with COPY --from=lint dependency - Bump Go from 1.24 to 1.26.1 (golang:1.26.1-bookworm, pinned) - Bump golangci-lint from v1.64.8 to v2.11.3 - Migrate .golangci.yml from v1 to v2 format (same linters, format only) - All Docker images pinned by sha256 digest - Fix all lint issues from the v2 linter upgrade: - Add package comments to all packages - Add doc comments to all exported types, functions, and methods - Fix unchecked errors (errcheck) - Fix unused parameters (revive) - Fix gosec warnings (MaxBytesReader for form parsing) - Fix staticcheck suggestions (fmt.Fprintf instead of WriteString) - Rename DeliveryTask to Task to avoid stutter (delivery.Task) - Rename shadowed builtin 'max' parameter - Update README.md version requirements
This commit is contained in:
@@ -34,7 +34,7 @@ func testMainDB(t *testing.T) *gorm.DB {
|
||||
|
||||
sqlDB, err := sql.Open("sqlite", dsn)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() { sqlDB.Close() })
|
||||
t.Cleanup(func() { _ = sqlDB.Close() })
|
||||
|
||||
db, err := gorm.Open(sqlite.Dialector{Conn: sqlDB}, &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
@@ -80,8 +80,8 @@ func testEngineWithDB(t *testing.T, mainDB *gorm.DB, dbMgr *database.WebhookDBMa
|
||||
dbManager: dbMgr,
|
||||
log: slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: slog.LevelDebug})),
|
||||
client: &http.Client{Timeout: 5 * time.Second},
|
||||
deliveryCh: make(chan DeliveryTask, deliveryChannelSize),
|
||||
retryCh: make(chan DeliveryTask, retryChannelSize),
|
||||
deliveryCh: make(chan Task, deliveryChannelSize),
|
||||
retryCh: make(chan Task, retryChannelSize),
|
||||
workers: 2,
|
||||
}
|
||||
}
|
||||
@@ -101,7 +101,7 @@ func TestProcessNewTask_InlineBody(t *testing.T) {
|
||||
received.Store(true)
|
||||
assert.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprint(w, `{"ok":true}`)
|
||||
_, _ = fmt.Fprint(w, `{"ok":true}`)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
@@ -128,7 +128,7 @@ func TestProcessNewTask_InlineBody(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -196,7 +196,7 @@ func TestProcessNewTask_LargeBody_FetchFromDB(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
// Body is nil — engine should fetch from DB
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -231,7 +231,7 @@ func TestProcessNewTask_InvalidWebhookID(t *testing.T) {
|
||||
|
||||
// Use a webhook ID that has no database
|
||||
// GetDB will create it lazily in the real impl, but the event won't exist
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: uuid.New().String(),
|
||||
EventID: uuid.New().String(),
|
||||
WebhookID: uuid.New().String(),
|
||||
@@ -285,7 +285,7 @@ func TestProcessRetryTask_SuccessfulRetry(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -340,7 +340,7 @@ func TestProcessRetryTask_SkipsNonRetryingDelivery(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -399,7 +399,7 @@ func TestProcessRetryTask_LargeBody_FetchFromDB(t *testing.T) {
|
||||
}
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -456,7 +456,7 @@ func TestWorkerLifecycle_StartStop(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -472,7 +472,7 @@ func TestWorkerLifecycle_StartStop(t *testing.T) {
|
||||
AttemptNum: 1,
|
||||
}
|
||||
|
||||
e.Notify([]DeliveryTask{task})
|
||||
e.Notify([]Task{task})
|
||||
|
||||
// Wait for the worker to process the task
|
||||
require.Eventually(t, func() bool {
|
||||
@@ -526,7 +526,7 @@ func TestWorkerLifecycle_ProcessesRetryChannel(t *testing.T) {
|
||||
|
||||
// Send task directly to retry channel
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -597,7 +597,7 @@ func TestProcessDelivery_UnknownTargetType(t *testing.T) {
|
||||
}
|
||||
d.ID = delivery.ID
|
||||
|
||||
task := &DeliveryTask{
|
||||
task := &Task{
|
||||
DeliveryID: delivery.ID,
|
||||
TargetType: database.TargetType("unknown"),
|
||||
}
|
||||
@@ -867,7 +867,7 @@ func TestDeliverHTTP_CustomTargetHeaders(t *testing.T) {
|
||||
require.NoError(t, webhookDB.Create(&delivery).Error)
|
||||
|
||||
bodyStr := event.Body
|
||||
task := DeliveryTask{
|
||||
task := Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: webhookID,
|
||||
@@ -914,7 +914,7 @@ func TestDeliverHTTP_TargetTimeout(t *testing.T) {
|
||||
event := seedEvent(t, db, `{"timeout":"test"}`)
|
||||
delivery := seedDelivery(t, db, event.ID, targetID, database.DeliveryStatusPending)
|
||||
|
||||
task := &DeliveryTask{
|
||||
task := &Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: event.WebhookID,
|
||||
@@ -964,7 +964,7 @@ func TestDeliverHTTP_InvalidConfig(t *testing.T) {
|
||||
event := seedEvent(t, db, `{"config":"invalid"}`)
|
||||
delivery := seedDelivery(t, db, event.ID, targetID, database.DeliveryStatusPending)
|
||||
|
||||
task := &DeliveryTask{
|
||||
task := &Task{
|
||||
DeliveryID: delivery.ID,
|
||||
EventID: event.ID,
|
||||
WebhookID: event.WebhookID,
|
||||
@@ -1002,9 +1002,9 @@ func TestNotify_MultipleTasks(t *testing.T) {
|
||||
t.Parallel()
|
||||
e := testEngine(t, 1)
|
||||
|
||||
tasks := make([]DeliveryTask, 5)
|
||||
tasks := make([]Task, 5)
|
||||
for i := range tasks {
|
||||
tasks[i] = DeliveryTask{
|
||||
tasks[i] = Task{
|
||||
DeliveryID: fmt.Sprintf("task-%d", i),
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user