Compare commits
1 Commits
feature/gi
...
feature/ba
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb91f314c5 |
@@ -1,16 +1,17 @@
|
|||||||
# µPaaS by [@sneak](https://sneak.berlin)
|
# µPaaS by [@sneak](https://sneak.berlin)
|
||||||
|
|
||||||
A simple self-hosted PaaS that auto-deploys Docker containers from Git repositories via webhooks from Gitea, GitHub, or GitLab.
|
A simple self-hosted PaaS that auto-deploys Docker containers from Git repositories via Gitea webhooks.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Single admin user with argon2id password hashing
|
- Single admin user with argon2id password hashing
|
||||||
- Per-app SSH keypairs for read-only deploy keys
|
- Per-app SSH keypairs for read-only deploy keys
|
||||||
- Per-app UUID-based webhook URLs with auto-detection of Gitea, GitHub, and GitLab
|
- Per-app UUID-based webhook URLs for Gitea integration
|
||||||
- Branch filtering - only deploy on configured branch changes
|
- Branch filtering - only deploy on configured branch changes
|
||||||
- Environment variables, labels, and volume mounts per app
|
- Environment variables, labels, and volume mounts per app
|
||||||
- Docker builds via socket access
|
- Docker builds via socket access
|
||||||
- Notifications via ntfy and Slack-compatible webhooks
|
- Notifications via ntfy and Slack-compatible webhooks
|
||||||
|
- Backup/restore of app configurations (JSON export/import via UI and API)
|
||||||
- Simple server-rendered UI with Tailwind CSS
|
- Simple server-rendered UI with Tailwind CSS
|
||||||
|
|
||||||
## Non-Goals
|
## Non-Goals
|
||||||
@@ -19,7 +20,7 @@ A simple self-hosted PaaS that auto-deploys Docker containers from Git repositor
|
|||||||
- Complex CI pipelines
|
- Complex CI pipelines
|
||||||
- Multiple container orchestration
|
- Multiple container orchestration
|
||||||
- SPA/API-first design
|
- SPA/API-first design
|
||||||
- Support for non-push webhook events (e.g. issues, merge requests)
|
- Support for non-Gitea webhooks
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
@@ -44,7 +45,7 @@ upaas/
|
|||||||
│ │ ├── auth/ # Authentication service
|
│ │ ├── auth/ # Authentication service
|
||||||
│ │ ├── deploy/ # Deployment orchestration
|
│ │ ├── deploy/ # Deployment orchestration
|
||||||
│ │ ├── notify/ # Notifications (ntfy, Slack)
|
│ │ ├── notify/ # Notifications (ntfy, Slack)
|
||||||
│ │ └── webhook/ # Webhook processing (Gitea, GitHub, GitLab)
|
│ │ └── webhook/ # Gitea webhook processing
|
||||||
│ └── ssh/ # SSH key generation
|
│ └── ssh/ # SSH key generation
|
||||||
├── static/ # Embedded CSS/JS assets
|
├── static/ # Embedded CSS/JS assets
|
||||||
└── templates/ # Embedded HTML templates
|
└── templates/ # Embedded HTML templates
|
||||||
|
|||||||
@@ -27,6 +27,11 @@ func apiRouter(tc *testContext) http.Handler {
|
|||||||
apiR.Get("/apps", tc.handlers.HandleAPIListApps())
|
apiR.Get("/apps", tc.handlers.HandleAPIListApps())
|
||||||
apiR.Get("/apps/{id}", tc.handlers.HandleAPIGetApp())
|
apiR.Get("/apps/{id}", tc.handlers.HandleAPIGetApp())
|
||||||
apiR.Get("/apps/{id}/deployments", tc.handlers.HandleAPIListDeployments())
|
apiR.Get("/apps/{id}/deployments", tc.handlers.HandleAPIListDeployments())
|
||||||
|
|
||||||
|
// Backup/Restore API
|
||||||
|
apiR.Get("/apps/{id}/export", tc.handlers.HandleAPIExportApp())
|
||||||
|
apiR.Get("/backup/export", tc.handlers.HandleAPIExportAllApps())
|
||||||
|
apiR.Post("/backup/import", tc.handlers.HandleAPIImportApps())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
282
internal/handlers/backup.go
Normal file
282
internal/handlers/backup.go
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
|
||||||
|
"sneak.berlin/go/upaas/internal/models"
|
||||||
|
"sneak.berlin/go/upaas/internal/service/app"
|
||||||
|
"sneak.berlin/go/upaas/templates"
|
||||||
|
)
|
||||||
|
|
||||||
|
// importMaxBodyBytes is the maximum allowed request body size for backup import (10 MB).
|
||||||
|
const importMaxBodyBytes = 10 << 20
|
||||||
|
|
||||||
|
// HandleExportApp exports a single app's configuration as a JSON download.
|
||||||
|
func (h *Handlers) HandleExportApp() http.HandlerFunc {
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
appID := chi.URLParam(request, "id")
|
||||||
|
|
||||||
|
application, findErr := models.FindApp(request.Context(), h.db, appID)
|
||||||
|
if findErr != nil || application == nil {
|
||||||
|
http.NotFound(writer, request)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
bundle, exportErr := h.appService.ExportApp(request.Context(), application)
|
||||||
|
if exportErr != nil {
|
||||||
|
h.log.Error("failed to export app", "error", exportErr, "app", application.Name)
|
||||||
|
http.Error(writer, "Internal Server Error", http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
filename := fmt.Sprintf("upaas-backup-%s-%s.json",
|
||||||
|
application.Name,
|
||||||
|
time.Now().UTC().Format("20060102-150405"),
|
||||||
|
)
|
||||||
|
|
||||||
|
writer.Header().Set("Content-Type", "application/json")
|
||||||
|
writer.Header().Set("Content-Disposition",
|
||||||
|
`attachment; filename="`+filename+`"`)
|
||||||
|
|
||||||
|
encoder := json.NewEncoder(writer)
|
||||||
|
encoder.SetIndent("", " ")
|
||||||
|
|
||||||
|
err := encoder.Encode(bundle)
|
||||||
|
if err != nil {
|
||||||
|
h.log.Error("failed to encode backup", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleExportAllApps exports all app configurations as a JSON download.
|
||||||
|
func (h *Handlers) HandleExportAllApps() http.HandlerFunc {
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
bundle, exportErr := h.appService.ExportAllApps(request.Context())
|
||||||
|
if exportErr != nil {
|
||||||
|
h.log.Error("failed to export all apps", "error", exportErr)
|
||||||
|
http.Error(writer, "Internal Server Error", http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
filename := fmt.Sprintf("upaas-backup-all-%s.json",
|
||||||
|
time.Now().UTC().Format("20060102-150405"),
|
||||||
|
)
|
||||||
|
|
||||||
|
writer.Header().Set("Content-Type", "application/json")
|
||||||
|
writer.Header().Set("Content-Disposition",
|
||||||
|
`attachment; filename="`+filename+`"`)
|
||||||
|
|
||||||
|
encoder := json.NewEncoder(writer)
|
||||||
|
encoder.SetIndent("", " ")
|
||||||
|
|
||||||
|
err := encoder.Encode(bundle)
|
||||||
|
if err != nil {
|
||||||
|
h.log.Error("failed to encode backup", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleImportPage renders the import/restore page.
|
||||||
|
func (h *Handlers) HandleImportPage() http.HandlerFunc {
|
||||||
|
tmpl := templates.GetParsed()
|
||||||
|
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
data := h.addGlobals(map[string]any{
|
||||||
|
"Success": request.URL.Query().Get("success"),
|
||||||
|
}, request)
|
||||||
|
|
||||||
|
h.renderTemplate(writer, tmpl, "backup_import.html", data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleImportApps processes an uploaded backup JSON file and imports apps.
|
||||||
|
func (h *Handlers) HandleImportApps() http.HandlerFunc {
|
||||||
|
tmpl := templates.GetParsed()
|
||||||
|
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
bundle, parseErr := h.parseBackupUpload(request)
|
||||||
|
if parseErr != "" {
|
||||||
|
data := h.addGlobals(map[string]any{"Error": parseErr}, request)
|
||||||
|
h.renderTemplate(writer, tmpl, "backup_import.html", data)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
imported, skipped, importErr := h.appService.ImportApps(
|
||||||
|
request.Context(), bundle,
|
||||||
|
)
|
||||||
|
if importErr != nil {
|
||||||
|
h.log.Error("failed to import apps", "error", importErr)
|
||||||
|
|
||||||
|
data := h.addGlobals(map[string]any{
|
||||||
|
"Error": "Import failed: " + importErr.Error(),
|
||||||
|
}, request)
|
||||||
|
h.renderTemplate(writer, tmpl, "backup_import.html", data)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
successMsg := fmt.Sprintf("Imported %d app(s)", len(imported))
|
||||||
|
if len(skipped) > 0 {
|
||||||
|
successMsg += fmt.Sprintf(", skipped %d (name conflict)", len(skipped))
|
||||||
|
}
|
||||||
|
|
||||||
|
http.Redirect(writer, request,
|
||||||
|
"/backup/import?success="+successMsg,
|
||||||
|
http.StatusSeeOther,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseBackupUpload extracts and validates a BackupBundle from a multipart upload.
|
||||||
|
// Returns the bundle and an empty string on success, or nil and an error message.
|
||||||
|
func (h *Handlers) parseBackupUpload(
|
||||||
|
request *http.Request,
|
||||||
|
) (*app.BackupBundle, string) {
|
||||||
|
request.Body = http.MaxBytesReader(nil, request.Body, importMaxBodyBytes)
|
||||||
|
|
||||||
|
parseErr := request.ParseMultipartForm(importMaxBodyBytes)
|
||||||
|
if parseErr != nil {
|
||||||
|
return nil, "Failed to parse upload: " + parseErr.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
file, _, openErr := request.FormFile("backup_file")
|
||||||
|
if openErr != nil {
|
||||||
|
return nil, "Please select a backup file to import"
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
|
||||||
|
decodeErr := json.NewDecoder(file).Decode(&bundle)
|
||||||
|
if decodeErr != nil {
|
||||||
|
return nil, "Invalid backup file: " + decodeErr.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
if bundle.Version != 1 {
|
||||||
|
return nil, fmt.Sprintf(
|
||||||
|
"Unsupported backup version: %d (expected 1)", bundle.Version,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(bundle.Apps) == 0 {
|
||||||
|
return nil, "Backup file contains no apps"
|
||||||
|
}
|
||||||
|
|
||||||
|
return &bundle, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleAPIExportApp exports a single app's configuration as JSON via API.
|
||||||
|
func (h *Handlers) HandleAPIExportApp() http.HandlerFunc {
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
appID := chi.URLParam(request, "id")
|
||||||
|
|
||||||
|
application, err := h.appService.GetApp(request.Context(), appID)
|
||||||
|
if err != nil {
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "internal server error"},
|
||||||
|
http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if application == nil {
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "app not found"},
|
||||||
|
http.StatusNotFound)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
bundle, exportErr := h.appService.ExportApp(request.Context(), application)
|
||||||
|
if exportErr != nil {
|
||||||
|
h.log.Error("failed to export app", "error", exportErr)
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "failed to export app"},
|
||||||
|
http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.respondJSON(writer, request, bundle, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleAPIExportAllApps exports all app configurations as JSON via API.
|
||||||
|
func (h *Handlers) HandleAPIExportAllApps() http.HandlerFunc {
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
bundle, exportErr := h.appService.ExportAllApps(request.Context())
|
||||||
|
if exportErr != nil {
|
||||||
|
h.log.Error("failed to export all apps", "error", exportErr)
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "failed to export apps"},
|
||||||
|
http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.respondJSON(writer, request, bundle, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleAPIImportApps imports app configurations from a JSON request body via API.
|
||||||
|
func (h *Handlers) HandleAPIImportApps() http.HandlerFunc {
|
||||||
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
request.Body = http.MaxBytesReader(writer, request.Body, importMaxBodyBytes)
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
|
||||||
|
decodeErr := json.NewDecoder(request.Body).Decode(&bundle)
|
||||||
|
if decodeErr != nil {
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "invalid request body"},
|
||||||
|
http.StatusBadRequest)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if bundle.Version != 1 {
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": fmt.Sprintf(
|
||||||
|
"unsupported backup version: %d", bundle.Version,
|
||||||
|
)},
|
||||||
|
http.StatusBadRequest)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(bundle.Apps) == 0 {
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "backup contains no apps"},
|
||||||
|
http.StatusBadRequest)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
imported, skipped, importErr := h.appService.ImportApps(
|
||||||
|
request.Context(), &bundle,
|
||||||
|
)
|
||||||
|
if importErr != nil {
|
||||||
|
h.log.Error("api: failed to import apps", "error", importErr)
|
||||||
|
h.respondJSON(writer, request,
|
||||||
|
map[string]string{"error": "import failed: " + importErr.Error()},
|
||||||
|
http.StatusInternalServerError)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.respondJSON(writer, request, map[string]any{
|
||||||
|
"imported": imported,
|
||||||
|
"skipped": skipped,
|
||||||
|
}, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
582
internal/handlers/backup_test.go
Normal file
582
internal/handlers/backup_test.go
Normal file
@@ -0,0 +1,582 @@
|
|||||||
|
package handlers_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"sneak.berlin/go/upaas/internal/models"
|
||||||
|
"sneak.berlin/go/upaas/internal/service/app"
|
||||||
|
)
|
||||||
|
|
||||||
|
// createTestAppWithConfig creates an app with env vars, labels, volumes, and ports.
|
||||||
|
func createTestAppWithConfig(
|
||||||
|
t *testing.T,
|
||||||
|
tc *testContext,
|
||||||
|
name string,
|
||||||
|
) *models.App {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
createdApp := createTestApp(t, tc, name)
|
||||||
|
|
||||||
|
// Add env vars
|
||||||
|
ev := models.NewEnvVar(tc.database)
|
||||||
|
ev.AppID = createdApp.ID
|
||||||
|
ev.Key = "DATABASE_URL"
|
||||||
|
ev.Value = "postgres://localhost/mydb"
|
||||||
|
require.NoError(t, ev.Save(context.Background()))
|
||||||
|
|
||||||
|
// Add label
|
||||||
|
label := models.NewLabel(tc.database)
|
||||||
|
label.AppID = createdApp.ID
|
||||||
|
label.Key = "traefik.enable"
|
||||||
|
label.Value = "true"
|
||||||
|
require.NoError(t, label.Save(context.Background()))
|
||||||
|
|
||||||
|
// Add volume
|
||||||
|
volume := models.NewVolume(tc.database)
|
||||||
|
volume.AppID = createdApp.ID
|
||||||
|
volume.HostPath = "/data/app"
|
||||||
|
volume.ContainerPath = "/app/data"
|
||||||
|
volume.ReadOnly = false
|
||||||
|
require.NoError(t, volume.Save(context.Background()))
|
||||||
|
|
||||||
|
// Add port
|
||||||
|
port := models.NewPort(tc.database)
|
||||||
|
port.AppID = createdApp.ID
|
||||||
|
port.HostPort = 8080
|
||||||
|
port.ContainerPort = 80
|
||||||
|
port.Protocol = models.PortProtocolTCP
|
||||||
|
require.NoError(t, port.Save(context.Background()))
|
||||||
|
|
||||||
|
return createdApp
|
||||||
|
}
|
||||||
|
|
||||||
|
// createTestAppWithConfigPort creates an app with a custom host port.
|
||||||
|
func createTestAppWithConfigPort(
|
||||||
|
t *testing.T,
|
||||||
|
tc *testContext,
|
||||||
|
name string,
|
||||||
|
hostPort int,
|
||||||
|
) *models.App {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
createdApp := createTestApp(t, tc, name)
|
||||||
|
|
||||||
|
ev := models.NewEnvVar(tc.database)
|
||||||
|
ev.AppID = createdApp.ID
|
||||||
|
ev.Key = "DATABASE_URL"
|
||||||
|
ev.Value = "postgres://localhost/mydb"
|
||||||
|
require.NoError(t, ev.Save(context.Background()))
|
||||||
|
|
||||||
|
port := models.NewPort(tc.database)
|
||||||
|
port.AppID = createdApp.ID
|
||||||
|
port.HostPort = hostPort
|
||||||
|
port.ContainerPort = 80
|
||||||
|
port.Protocol = models.PortProtocolTCP
|
||||||
|
require.NoError(t, port.Save(context.Background()))
|
||||||
|
|
||||||
|
return createdApp
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleExportApp(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
createdApp := createTestAppWithConfig(t, testCtx, "export-test-app")
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/apps/"+createdApp.ID+"/export", nil)
|
||||||
|
request = addChiURLParams(request, map[string]string{"id": createdApp.ID})
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleExportApp()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Header().Get("Content-Type"), "application/json")
|
||||||
|
assert.Contains(t, recorder.Header().Get("Content-Disposition"), "attachment")
|
||||||
|
assert.Contains(t, recorder.Header().Get("Content-Disposition"), "export-test-app")
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
require.NoError(t, json.Unmarshal(recorder.Body.Bytes(), &bundle))
|
||||||
|
|
||||||
|
assert.Equal(t, 1, bundle.Version)
|
||||||
|
assert.NotEmpty(t, bundle.ExportedAt)
|
||||||
|
require.Len(t, bundle.Apps, 1)
|
||||||
|
|
||||||
|
appBackup := bundle.Apps[0]
|
||||||
|
assert.Equal(t, "export-test-app", appBackup.Name)
|
||||||
|
assert.Equal(t, "main", appBackup.Branch)
|
||||||
|
assert.Len(t, appBackup.EnvVars, 1)
|
||||||
|
assert.Equal(t, "DATABASE_URL", appBackup.EnvVars[0].Key)
|
||||||
|
assert.Equal(t, "postgres://localhost/mydb", appBackup.EnvVars[0].Value)
|
||||||
|
assert.Len(t, appBackup.Labels, 1)
|
||||||
|
assert.Equal(t, "traefik.enable", appBackup.Labels[0].Key)
|
||||||
|
assert.Len(t, appBackup.Volumes, 1)
|
||||||
|
assert.Equal(t, "/data/app", appBackup.Volumes[0].HostPath)
|
||||||
|
assert.Len(t, appBackup.Ports, 1)
|
||||||
|
assert.Equal(t, 8080, appBackup.Ports[0].HostPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleExportAppNotFound(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/apps/nonexistent/export", nil)
|
||||||
|
request = addChiURLParams(request, map[string]string{"id": "nonexistent"})
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleExportApp()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusNotFound, recorder.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleExportAllApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
createTestAppWithConfig(t, testCtx, "export-all-app1")
|
||||||
|
createTestAppWithConfigPort(t, testCtx, "export-all-app2", 8081)
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/backup/export", nil)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleExportAllApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Header().Get("Content-Disposition"), "upaas-backup-all")
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
require.NoError(t, json.Unmarshal(recorder.Body.Bytes(), &bundle))
|
||||||
|
|
||||||
|
assert.Equal(t, 1, bundle.Version)
|
||||||
|
assert.Len(t, bundle.Apps, 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleExportAllAppsEmpty(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/backup/export", nil)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleExportAllApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
require.NoError(t, json.Unmarshal(recorder.Body.Bytes(), &bundle))
|
||||||
|
|
||||||
|
assert.Empty(t, bundle.Apps)
|
||||||
|
}
|
||||||
|
|
||||||
|
// createMultipartBackupRequest builds a multipart form request with backup JSON as a file upload.
|
||||||
|
func createMultipartBackupRequest(
|
||||||
|
t *testing.T,
|
||||||
|
backupJSON string,
|
||||||
|
) *http.Request {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
var body bytes.Buffer
|
||||||
|
|
||||||
|
writer := multipart.NewWriter(&body)
|
||||||
|
|
||||||
|
part, err := writer.CreateFormFile("backup_file", "backup.json")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = io.WriteString(part, backupJSON)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.NoError(t, writer.Close())
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/backup/import", &body)
|
||||||
|
request.Header.Set("Content-Type", writer.FormDataContentType())
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
backupJSON := `{
|
||||||
|
"version": 1,
|
||||||
|
"exportedAt": "2025-01-01T00:00:00Z",
|
||||||
|
"apps": [{
|
||||||
|
"name": "imported-app",
|
||||||
|
"repoUrl": "git@example.com:user/repo.git",
|
||||||
|
"branch": "main",
|
||||||
|
"dockerfilePath": "Dockerfile",
|
||||||
|
"envVars": [{"key": "FOO", "value": "bar"}],
|
||||||
|
"labels": [{"key": "app.name", "value": "test"}],
|
||||||
|
"volumes": [{"hostPath": "/data", "containerPath": "/app/data", "readOnly": true}],
|
||||||
|
"ports": [{"hostPort": 3000, "containerPort": 8080, "protocol": "tcp"}]
|
||||||
|
}]
|
||||||
|
}`
|
||||||
|
|
||||||
|
request := createMultipartBackupRequest(t, backupJSON)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
// Should redirect on success
|
||||||
|
assert.Equal(t, http.StatusSeeOther, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Header().Get("Location"), "success=")
|
||||||
|
|
||||||
|
// Verify the app was created
|
||||||
|
apps, err := models.AllApps(context.Background(), testCtx.database)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, apps, 1)
|
||||||
|
assert.Equal(t, "imported-app", apps[0].Name)
|
||||||
|
|
||||||
|
// Verify env vars
|
||||||
|
envVars, _ := apps[0].GetEnvVars(context.Background())
|
||||||
|
require.Len(t, envVars, 1)
|
||||||
|
assert.Equal(t, "FOO", envVars[0].Key)
|
||||||
|
assert.Equal(t, "bar", envVars[0].Value)
|
||||||
|
|
||||||
|
// Verify labels
|
||||||
|
labels, _ := apps[0].GetLabels(context.Background())
|
||||||
|
require.Len(t, labels, 1)
|
||||||
|
assert.Equal(t, "app.name", labels[0].Key)
|
||||||
|
|
||||||
|
// Verify volumes
|
||||||
|
volumes, _ := apps[0].GetVolumes(context.Background())
|
||||||
|
require.Len(t, volumes, 1)
|
||||||
|
assert.Equal(t, "/data", volumes[0].HostPath)
|
||||||
|
assert.True(t, volumes[0].ReadOnly)
|
||||||
|
|
||||||
|
// Verify ports
|
||||||
|
ports, _ := apps[0].GetPorts(context.Background())
|
||||||
|
require.Len(t, ports, 1)
|
||||||
|
assert.Equal(t, 3000, ports[0].HostPort)
|
||||||
|
assert.Equal(t, 8080, ports[0].ContainerPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportAppsSkipsDuplicateNames(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
// Create an existing app with same name
|
||||||
|
createTestApp(t, testCtx, "existing-app")
|
||||||
|
|
||||||
|
backupJSON := `{
|
||||||
|
"version": 1,
|
||||||
|
"exportedAt": "2025-01-01T00:00:00Z",
|
||||||
|
"apps": [
|
||||||
|
{
|
||||||
|
"name": "existing-app",
|
||||||
|
"repoUrl": "git@example.com:user/repo.git",
|
||||||
|
"branch": "main",
|
||||||
|
"dockerfilePath": "Dockerfile",
|
||||||
|
"envVars": [],
|
||||||
|
"labels": [],
|
||||||
|
"volumes": [],
|
||||||
|
"ports": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "new-app",
|
||||||
|
"repoUrl": "git@example.com:user/new.git",
|
||||||
|
"branch": "main",
|
||||||
|
"dockerfilePath": "Dockerfile",
|
||||||
|
"envVars": [],
|
||||||
|
"labels": [],
|
||||||
|
"volumes": [],
|
||||||
|
"ports": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
|
||||||
|
request := createMultipartBackupRequest(t, backupJSON)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusSeeOther, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Header().Get("Location"), "skipped")
|
||||||
|
|
||||||
|
// Should have 2 apps total (existing + new)
|
||||||
|
apps, err := models.AllApps(context.Background(), testCtx.database)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Len(t, apps, 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportAppsInvalidJSON(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
request := createMultipartBackupRequest(t, "not valid json")
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
// Should render the page with error, not redirect
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Body.String(), "Invalid backup file")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportAppsUnsupportedVersion(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
backupJSON := `{"version": 99, "exportedAt": "2025-01-01T00:00:00Z", "apps": [{"name": "test"}]}`
|
||||||
|
|
||||||
|
request := createMultipartBackupRequest(t, backupJSON)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Body.String(), "Unsupported backup version")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportAppsEmptyBundle(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
backupJSON := `{"version": 1, "exportedAt": "2025-01-01T00:00:00Z", "apps": []}`
|
||||||
|
|
||||||
|
request := createMultipartBackupRequest(t, backupJSON)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportApps()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Body.String(), "contains no apps")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleImportPage(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/backup/import", nil)
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
|
||||||
|
handler := testCtx.handlers.HandleImportPage()
|
||||||
|
handler.ServeHTTP(recorder, request)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
assert.Contains(t, recorder.Body.String(), "Import Backup")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportImportRoundTrip(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testCtx := setupTestHandlers(t)
|
||||||
|
createTestAppWithConfig(t, testCtx, "roundtrip-app")
|
||||||
|
|
||||||
|
// Export
|
||||||
|
exportReq := httptest.NewRequest(http.MethodGet, "/backup/export", nil)
|
||||||
|
exportRec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
testCtx.handlers.HandleExportAllApps().ServeHTTP(exportRec, exportReq)
|
||||||
|
|
||||||
|
require.Equal(t, http.StatusOK, exportRec.Code)
|
||||||
|
|
||||||
|
exportedJSON := exportRec.Body.String()
|
||||||
|
|
||||||
|
// Delete the original app
|
||||||
|
apps, _ := models.AllApps(context.Background(), testCtx.database)
|
||||||
|
for _, a := range apps {
|
||||||
|
require.NoError(t, a.Delete(context.Background()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import
|
||||||
|
importReq := createMultipartBackupRequest(t, exportedJSON)
|
||||||
|
importRec := httptest.NewRecorder()
|
||||||
|
|
||||||
|
testCtx.handlers.HandleImportApps().ServeHTTP(importRec, importReq)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusSeeOther, importRec.Code)
|
||||||
|
|
||||||
|
// Verify the app was recreated with all config
|
||||||
|
restoredApps, _ := models.AllApps(context.Background(), testCtx.database)
|
||||||
|
require.Len(t, restoredApps, 1)
|
||||||
|
assert.Equal(t, "roundtrip-app", restoredApps[0].Name)
|
||||||
|
|
||||||
|
envVars, _ := restoredApps[0].GetEnvVars(context.Background())
|
||||||
|
assert.Len(t, envVars, 1)
|
||||||
|
|
||||||
|
labels, _ := restoredApps[0].GetLabels(context.Background())
|
||||||
|
assert.Len(t, labels, 1)
|
||||||
|
|
||||||
|
volumes, _ := restoredApps[0].GetVolumes(context.Background())
|
||||||
|
assert.Len(t, volumes, 1)
|
||||||
|
|
||||||
|
ports, _ := restoredApps[0].GetPorts(context.Background())
|
||||||
|
assert.Len(t, ports, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIExportApp tests the API endpoint for exporting a single app.
|
||||||
|
func TestAPIExportApp(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
createdApp, err := tc.appSvc.CreateApp(t.Context(), app.CreateAppInput{
|
||||||
|
Name: "api-export-app",
|
||||||
|
RepoURL: "git@example.com:user/repo.git",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
rr := apiGet(t, tc, cookies, "/api/v1/apps/"+createdApp.ID+"/export")
|
||||||
|
assert.Equal(t, http.StatusOK, rr.Code)
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
require.NoError(t, json.Unmarshal(rr.Body.Bytes(), &bundle))
|
||||||
|
|
||||||
|
assert.Equal(t, 1, bundle.Version)
|
||||||
|
require.Len(t, bundle.Apps, 1)
|
||||||
|
assert.Equal(t, "api-export-app", bundle.Apps[0].Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIExportAppNotFound tests the API endpoint for a nonexistent app.
|
||||||
|
func TestAPIExportAppNotFound(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
rr := apiGet(t, tc, cookies, "/api/v1/apps/nonexistent/export")
|
||||||
|
assert.Equal(t, http.StatusNotFound, rr.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIExportAllApps tests the API endpoint for exporting all apps.
|
||||||
|
func TestAPIExportAllApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
_, err := tc.appSvc.CreateApp(t.Context(), app.CreateAppInput{
|
||||||
|
Name: "api-export-all-1",
|
||||||
|
RepoURL: "git@example.com:user/repo1.git",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tc.appSvc.CreateApp(t.Context(), app.CreateAppInput{
|
||||||
|
Name: "api-export-all-2",
|
||||||
|
RepoURL: "git@example.com:user/repo2.git",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
rr := apiGet(t, tc, cookies, "/api/v1/backup/export")
|
||||||
|
assert.Equal(t, http.StatusOK, rr.Code)
|
||||||
|
|
||||||
|
var bundle app.BackupBundle
|
||||||
|
require.NoError(t, json.Unmarshal(rr.Body.Bytes(), &bundle))
|
||||||
|
|
||||||
|
assert.Len(t, bundle.Apps, 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIImportApps tests the API import endpoint.
|
||||||
|
func TestAPIImportApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
backupJSON := `{
|
||||||
|
"version": 1,
|
||||||
|
"exportedAt": "2025-01-01T00:00:00Z",
|
||||||
|
"apps": [{
|
||||||
|
"name": "api-imported-app",
|
||||||
|
"repoUrl": "git@example.com:user/repo.git",
|
||||||
|
"branch": "main",
|
||||||
|
"dockerfilePath": "Dockerfile",
|
||||||
|
"envVars": [],
|
||||||
|
"labels": [],
|
||||||
|
"volumes": [],
|
||||||
|
"ports": []
|
||||||
|
}]
|
||||||
|
}`
|
||||||
|
|
||||||
|
r := apiRouter(tc)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/backup/import", strings.NewReader(backupJSON))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
for _, c := range cookies {
|
||||||
|
req.AddCookie(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := httptest.NewRecorder()
|
||||||
|
r.ServeHTTP(rr, req)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, rr.Code)
|
||||||
|
|
||||||
|
var resp map[string]any
|
||||||
|
require.NoError(t, json.Unmarshal(rr.Body.Bytes(), &resp))
|
||||||
|
|
||||||
|
imported, ok := resp["imported"].([]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Len(t, imported, 1)
|
||||||
|
assert.Equal(t, "api-imported-app", imported[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIImportAppsInvalidBody tests that the API rejects bad JSON.
|
||||||
|
func TestAPIImportAppsInvalidBody(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
r := apiRouter(tc)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/backup/import", strings.NewReader("not json"))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
for _, c := range cookies {
|
||||||
|
req.AddCookie(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := httptest.NewRecorder()
|
||||||
|
r.ServeHTTP(rr, req)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, rr.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAPIImportAppsUnsupportedVersion tests that the API rejects bad versions.
|
||||||
|
func TestAPIImportAppsUnsupportedVersion(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tc, cookies := setupAPITest(t)
|
||||||
|
|
||||||
|
r := apiRouter(tc)
|
||||||
|
|
||||||
|
body := `{"version": 42, "apps": [{"name": "x"}]}`
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/backup/import", strings.NewReader(body))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
for _, c := range cookies {
|
||||||
|
req.AddCookie(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := httptest.NewRecorder()
|
||||||
|
r.ServeHTTP(rr, req)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, rr.Code)
|
||||||
|
}
|
||||||
@@ -7,14 +7,12 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
|
|
||||||
"sneak.berlin/go/upaas/internal/models"
|
"sneak.berlin/go/upaas/internal/models"
|
||||||
"sneak.berlin/go/upaas/internal/service/webhook"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// maxWebhookBodySize is the maximum allowed size of a webhook request body (1MB).
|
// maxWebhookBodySize is the maximum allowed size of a webhook request body (1MB).
|
||||||
const maxWebhookBodySize = 1 << 20
|
const maxWebhookBodySize = 1 << 20
|
||||||
|
|
||||||
// HandleWebhook handles incoming webhooks from Gitea, GitHub, or GitLab.
|
// HandleWebhook handles incoming Gitea webhooks.
|
||||||
// The webhook source is auto-detected from HTTP headers.
|
|
||||||
func (h *Handlers) HandleWebhook() http.HandlerFunc {
|
func (h *Handlers) HandleWebhook() http.HandlerFunc {
|
||||||
return func(writer http.ResponseWriter, request *http.Request) {
|
return func(writer http.ResponseWriter, request *http.Request) {
|
||||||
secret := chi.URLParam(request, "secret")
|
secret := chi.URLParam(request, "secret")
|
||||||
@@ -52,17 +50,16 @@ func (h *Handlers) HandleWebhook() http.HandlerFunc {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-detect webhook source from headers
|
// Get event type from header
|
||||||
source := webhook.DetectWebhookSource(request.Header)
|
eventType := request.Header.Get("X-Gitea-Event")
|
||||||
|
if eventType == "" {
|
||||||
// Extract event type based on detected source
|
eventType = "push"
|
||||||
eventType := webhook.DetectEventType(request.Header, source)
|
}
|
||||||
|
|
||||||
// Process webhook
|
// Process webhook
|
||||||
webhookErr := h.webhook.HandleWebhook(
|
webhookErr := h.webhook.HandleWebhook(
|
||||||
request.Context(),
|
request.Context(),
|
||||||
application,
|
application,
|
||||||
source,
|
|
||||||
eventType,
|
eventType,
|
||||||
body,
|
body,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -98,6 +98,12 @@ func (s *Server) SetupRoutes() {
|
|||||||
// Ports
|
// Ports
|
||||||
r.Post("/apps/{id}/ports", s.handlers.HandlePortAdd())
|
r.Post("/apps/{id}/ports", s.handlers.HandlePortAdd())
|
||||||
r.Post("/apps/{id}/ports/{portID}/delete", s.handlers.HandlePortDelete())
|
r.Post("/apps/{id}/ports/{portID}/delete", s.handlers.HandlePortDelete())
|
||||||
|
|
||||||
|
// Backup/Restore
|
||||||
|
r.Get("/apps/{id}/export", s.handlers.HandleExportApp())
|
||||||
|
r.Get("/backup/export", s.handlers.HandleExportAllApps())
|
||||||
|
r.Get("/backup/import", s.handlers.HandleImportPage())
|
||||||
|
r.Post("/backup/import", s.handlers.HandleImportApps())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -115,6 +121,11 @@ func (s *Server) SetupRoutes() {
|
|||||||
r.Get("/apps", s.handlers.HandleAPIListApps())
|
r.Get("/apps", s.handlers.HandleAPIListApps())
|
||||||
r.Get("/apps/{id}", s.handlers.HandleAPIGetApp())
|
r.Get("/apps/{id}", s.handlers.HandleAPIGetApp())
|
||||||
r.Get("/apps/{id}/deployments", s.handlers.HandleAPIListDeployments())
|
r.Get("/apps/{id}/deployments", s.handlers.HandleAPIListDeployments())
|
||||||
|
|
||||||
|
// Backup/Restore API
|
||||||
|
r.Get("/apps/{id}/export", s.handlers.HandleAPIExportApp())
|
||||||
|
r.Get("/backup/export", s.handlers.HandleAPIExportAllApps())
|
||||||
|
r.Post("/backup/import", s.handlers.HandleAPIImportApps())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
391
internal/service/app/backup.go
Normal file
391
internal/service/app/backup.go
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"sneak.berlin/go/upaas/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BackupEnvVar represents an environment variable in a backup.
|
||||||
|
type BackupEnvVar struct {
|
||||||
|
Key string `json:"key"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackupLabel represents a Docker label in a backup.
|
||||||
|
type BackupLabel struct {
|
||||||
|
Key string `json:"key"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackupVolume represents a volume mount in a backup.
|
||||||
|
type BackupVolume struct {
|
||||||
|
HostPath string `json:"hostPath"`
|
||||||
|
ContainerPath string `json:"containerPath"`
|
||||||
|
ReadOnly bool `json:"readOnly"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackupPort represents a port mapping in a backup.
|
||||||
|
type BackupPort struct {
|
||||||
|
HostPort int `json:"hostPort"`
|
||||||
|
ContainerPort int `json:"containerPort"`
|
||||||
|
Protocol string `json:"protocol"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup represents the exported configuration of a single app.
|
||||||
|
type Backup struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
RepoURL string `json:"repoUrl"`
|
||||||
|
Branch string `json:"branch"`
|
||||||
|
DockerfilePath string `json:"dockerfilePath"`
|
||||||
|
DockerNetwork string `json:"dockerNetwork,omitempty"`
|
||||||
|
NtfyTopic string `json:"ntfyTopic,omitempty"`
|
||||||
|
SlackWebhook string `json:"slackWebhook,omitempty"`
|
||||||
|
EnvVars []BackupEnvVar `json:"envVars"`
|
||||||
|
Labels []BackupLabel `json:"labels"`
|
||||||
|
Volumes []BackupVolume `json:"volumes"`
|
||||||
|
Ports []BackupPort `json:"ports"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackupBundle represents a complete backup of one or more apps.
|
||||||
|
type BackupBundle struct {
|
||||||
|
Version int `json:"version"`
|
||||||
|
ExportedAt string `json:"exportedAt"`
|
||||||
|
Apps []Backup `json:"apps"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// backupVersion is the current backup format version.
|
||||||
|
const backupVersion = 1
|
||||||
|
|
||||||
|
// ExportApp exports a single app's configuration as a BackupBundle.
|
||||||
|
func (svc *Service) ExportApp(
|
||||||
|
ctx context.Context,
|
||||||
|
application *models.App,
|
||||||
|
) (*BackupBundle, error) {
|
||||||
|
appBackup, err := svc.buildAppBackup(ctx, application)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &BackupBundle{
|
||||||
|
Version: backupVersion,
|
||||||
|
ExportedAt: time.Now().UTC().Format(time.RFC3339),
|
||||||
|
Apps: []Backup{appBackup},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExportAllApps exports all app configurations as a BackupBundle.
|
||||||
|
func (svc *Service) ExportAllApps(ctx context.Context) (*BackupBundle, error) {
|
||||||
|
apps, err := models.AllApps(ctx, svc.db)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("listing apps for export: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
backups := make([]Backup, 0, len(apps))
|
||||||
|
|
||||||
|
for _, application := range apps {
|
||||||
|
appBackup, buildErr := svc.buildAppBackup(ctx, application)
|
||||||
|
if buildErr != nil {
|
||||||
|
return nil, buildErr
|
||||||
|
}
|
||||||
|
|
||||||
|
backups = append(backups, appBackup)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &BackupBundle{
|
||||||
|
Version: backupVersion,
|
||||||
|
ExportedAt: time.Now().UTC().Format(time.RFC3339),
|
||||||
|
Apps: backups,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImportApps imports app configurations from a BackupBundle.
|
||||||
|
// It creates new apps (with fresh IDs, SSH keys, and webhook secrets)
|
||||||
|
// and populates their env vars, labels, volumes, and ports.
|
||||||
|
// Apps whose names conflict with existing apps are skipped and reported.
|
||||||
|
func (svc *Service) ImportApps(
|
||||||
|
ctx context.Context,
|
||||||
|
bundle *BackupBundle,
|
||||||
|
) ([]string, []string, error) {
|
||||||
|
// Build a set of existing app names for conflict detection
|
||||||
|
existingApps, listErr := models.AllApps(ctx, svc.db)
|
||||||
|
if listErr != nil {
|
||||||
|
return nil, nil, fmt.Errorf("listing existing apps: %w", listErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
existingNames := make(map[string]bool, len(existingApps))
|
||||||
|
for _, a := range existingApps {
|
||||||
|
existingNames[a.Name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var imported, skipped []string
|
||||||
|
|
||||||
|
for _, ab := range bundle.Apps {
|
||||||
|
if existingNames[ab.Name] {
|
||||||
|
skipped = append(skipped, ab.Name)
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
importErr := svc.importSingleApp(ctx, ab)
|
||||||
|
if importErr != nil {
|
||||||
|
return imported, skipped, fmt.Errorf(
|
||||||
|
"importing app %q: %w", ab.Name, importErr,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
imported = append(imported, ab.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return imported, skipped, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importSingleApp creates a single app from backup data.
|
||||||
|
func (svc *Service) importSingleApp(
|
||||||
|
ctx context.Context,
|
||||||
|
ab Backup,
|
||||||
|
) error {
|
||||||
|
createdApp, createErr := svc.CreateApp(ctx, CreateAppInput{
|
||||||
|
Name: ab.Name,
|
||||||
|
RepoURL: ab.RepoURL,
|
||||||
|
Branch: ab.Branch,
|
||||||
|
DockerfilePath: ab.DockerfilePath,
|
||||||
|
DockerNetwork: ab.DockerNetwork,
|
||||||
|
NtfyTopic: ab.NtfyTopic,
|
||||||
|
SlackWebhook: ab.SlackWebhook,
|
||||||
|
})
|
||||||
|
if createErr != nil {
|
||||||
|
return fmt.Errorf("creating app: %w", createErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
envErr := svc.importEnvVars(ctx, createdApp.ID, ab.EnvVars)
|
||||||
|
if envErr != nil {
|
||||||
|
return envErr
|
||||||
|
}
|
||||||
|
|
||||||
|
labelErr := svc.importLabels(ctx, createdApp.ID, ab.Labels)
|
||||||
|
if labelErr != nil {
|
||||||
|
return labelErr
|
||||||
|
}
|
||||||
|
|
||||||
|
volErr := svc.importVolumes(ctx, createdApp.ID, ab.Volumes)
|
||||||
|
if volErr != nil {
|
||||||
|
return volErr
|
||||||
|
}
|
||||||
|
|
||||||
|
portErr := svc.importPorts(ctx, createdApp.ID, ab.Ports)
|
||||||
|
if portErr != nil {
|
||||||
|
return portErr
|
||||||
|
}
|
||||||
|
|
||||||
|
svc.log.Info("app imported from backup",
|
||||||
|
"id", createdApp.ID, "name", createdApp.Name)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importEnvVars adds env vars from backup to an app.
|
||||||
|
func (svc *Service) importEnvVars(
|
||||||
|
ctx context.Context,
|
||||||
|
appID string,
|
||||||
|
envVars []BackupEnvVar,
|
||||||
|
) error {
|
||||||
|
for _, ev := range envVars {
|
||||||
|
addErr := svc.AddEnvVar(ctx, appID, ev.Key, ev.Value)
|
||||||
|
if addErr != nil {
|
||||||
|
return fmt.Errorf("adding env var %q: %w", ev.Key, addErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importLabels adds labels from backup to an app.
|
||||||
|
func (svc *Service) importLabels(
|
||||||
|
ctx context.Context,
|
||||||
|
appID string,
|
||||||
|
labels []BackupLabel,
|
||||||
|
) error {
|
||||||
|
for _, l := range labels {
|
||||||
|
addErr := svc.AddLabel(ctx, appID, l.Key, l.Value)
|
||||||
|
if addErr != nil {
|
||||||
|
return fmt.Errorf("adding label %q: %w", l.Key, addErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importVolumes adds volumes from backup to an app.
|
||||||
|
func (svc *Service) importVolumes(
|
||||||
|
ctx context.Context,
|
||||||
|
appID string,
|
||||||
|
volumes []BackupVolume,
|
||||||
|
) error {
|
||||||
|
for _, v := range volumes {
|
||||||
|
addErr := svc.AddVolume(ctx, appID, v.HostPath, v.ContainerPath, v.ReadOnly)
|
||||||
|
if addErr != nil {
|
||||||
|
return fmt.Errorf("adding volume %q: %w", v.ContainerPath, addErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPorts adds ports from backup to an app.
|
||||||
|
func (svc *Service) importPorts(
|
||||||
|
ctx context.Context,
|
||||||
|
appID string,
|
||||||
|
ports []BackupPort,
|
||||||
|
) error {
|
||||||
|
for _, p := range ports {
|
||||||
|
port := models.NewPort(svc.db)
|
||||||
|
port.AppID = appID
|
||||||
|
port.HostPort = p.HostPort
|
||||||
|
port.ContainerPort = p.ContainerPort
|
||||||
|
port.Protocol = models.PortProtocol(p.Protocol)
|
||||||
|
|
||||||
|
if port.Protocol == "" {
|
||||||
|
port.Protocol = models.PortProtocolTCP
|
||||||
|
}
|
||||||
|
|
||||||
|
saveErr := port.Save(ctx)
|
||||||
|
if saveErr != nil {
|
||||||
|
return fmt.Errorf("adding port %d: %w", p.HostPort, saveErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildAppBackup collects all configuration for a single app into a Backup.
|
||||||
|
func (svc *Service) buildAppBackup(
|
||||||
|
ctx context.Context,
|
||||||
|
application *models.App,
|
||||||
|
) (Backup, error) {
|
||||||
|
envVars, labels, volumes, ports, err := svc.fetchAppResources(ctx, application)
|
||||||
|
if err != nil {
|
||||||
|
return Backup{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
backup := Backup{
|
||||||
|
Name: application.Name,
|
||||||
|
RepoURL: application.RepoURL,
|
||||||
|
Branch: application.Branch,
|
||||||
|
DockerfilePath: application.DockerfilePath,
|
||||||
|
EnvVars: convertEnvVars(envVars),
|
||||||
|
Labels: convertLabels(labels),
|
||||||
|
Volumes: convertVolumes(volumes),
|
||||||
|
Ports: convertPorts(ports),
|
||||||
|
}
|
||||||
|
|
||||||
|
if application.DockerNetwork.Valid {
|
||||||
|
backup.DockerNetwork = application.DockerNetwork.String
|
||||||
|
}
|
||||||
|
|
||||||
|
if application.NtfyTopic.Valid {
|
||||||
|
backup.NtfyTopic = application.NtfyTopic.String
|
||||||
|
}
|
||||||
|
|
||||||
|
if application.SlackWebhook.Valid {
|
||||||
|
backup.SlackWebhook = application.SlackWebhook.String
|
||||||
|
}
|
||||||
|
|
||||||
|
return backup, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchAppResources retrieves all sub-resources for an app.
|
||||||
|
func (svc *Service) fetchAppResources(
|
||||||
|
ctx context.Context,
|
||||||
|
application *models.App,
|
||||||
|
) ([]*models.EnvVar, []*models.Label, []*models.Volume, []*models.Port, error) {
|
||||||
|
envVars, err := application.GetEnvVars(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, fmt.Errorf(
|
||||||
|
"getting env vars for %q: %w", application.Name, err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
labels, err := application.GetLabels(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, fmt.Errorf(
|
||||||
|
"getting labels for %q: %w", application.Name, err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
volumes, err := application.GetVolumes(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, fmt.Errorf(
|
||||||
|
"getting volumes for %q: %w", application.Name, err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
ports, err := application.GetPorts(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, nil, nil, fmt.Errorf(
|
||||||
|
"getting ports for %q: %w", application.Name, err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return envVars, labels, volumes, ports, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertEnvVars converts model env vars to backup format.
|
||||||
|
func convertEnvVars(envVars []*models.EnvVar) []BackupEnvVar {
|
||||||
|
result := make([]BackupEnvVar, 0, len(envVars))
|
||||||
|
|
||||||
|
for _, ev := range envVars {
|
||||||
|
result = append(result, BackupEnvVar{
|
||||||
|
Key: ev.Key,
|
||||||
|
Value: ev.Value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertLabels converts model labels to backup format.
|
||||||
|
func convertLabels(labels []*models.Label) []BackupLabel {
|
||||||
|
result := make([]BackupLabel, 0, len(labels))
|
||||||
|
|
||||||
|
for _, l := range labels {
|
||||||
|
result = append(result, BackupLabel{
|
||||||
|
Key: l.Key,
|
||||||
|
Value: l.Value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertVolumes converts model volumes to backup format.
|
||||||
|
func convertVolumes(volumes []*models.Volume) []BackupVolume {
|
||||||
|
result := make([]BackupVolume, 0, len(volumes))
|
||||||
|
|
||||||
|
for _, v := range volumes {
|
||||||
|
result = append(result, BackupVolume{
|
||||||
|
HostPath: v.HostPath,
|
||||||
|
ContainerPath: v.ContainerPath,
|
||||||
|
ReadOnly: v.ReadOnly,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// convertPorts converts model ports to backup format.
|
||||||
|
func convertPorts(ports []*models.Port) []BackupPort {
|
||||||
|
result := make([]BackupPort, 0, len(ports))
|
||||||
|
|
||||||
|
for _, p := range ports {
|
||||||
|
result = append(result, BackupPort{
|
||||||
|
HostPort: p.HostPort,
|
||||||
|
ContainerPort: p.ContainerPort,
|
||||||
|
Protocol: string(p.Protocol),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
379
internal/service/app/backup_test.go
Normal file
379
internal/service/app/backup_test.go
Normal file
@@ -0,0 +1,379 @@
|
|||||||
|
package app_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uber.org/fx"
|
||||||
|
|
||||||
|
"sneak.berlin/go/upaas/internal/config"
|
||||||
|
"sneak.berlin/go/upaas/internal/database"
|
||||||
|
"sneak.berlin/go/upaas/internal/globals"
|
||||||
|
"sneak.berlin/go/upaas/internal/logger"
|
||||||
|
"sneak.berlin/go/upaas/internal/models"
|
||||||
|
"sneak.berlin/go/upaas/internal/service/app"
|
||||||
|
)
|
||||||
|
|
||||||
|
// backupTestContext bundles test dependencies for backup tests.
|
||||||
|
type backupTestContext struct {
|
||||||
|
svc *app.Service
|
||||||
|
db *database.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupBackupTest(t *testing.T) *backupTestContext {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
|
||||||
|
globals.SetAppname("upaas-test")
|
||||||
|
globals.SetVersion("test")
|
||||||
|
|
||||||
|
globalsInst, err := globals.New(fx.Lifecycle(nil))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
loggerInst, err := logger.New(
|
||||||
|
fx.Lifecycle(nil),
|
||||||
|
logger.Params{Globals: globalsInst},
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
cfg := &config.Config{
|
||||||
|
Port: 8080,
|
||||||
|
DataDir: tmpDir,
|
||||||
|
SessionSecret: "test-secret-key-at-least-32-chars",
|
||||||
|
}
|
||||||
|
|
||||||
|
dbInst, err := database.New(fx.Lifecycle(nil), database.Params{
|
||||||
|
Logger: loggerInst,
|
||||||
|
Config: cfg,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
svc, err := app.New(fx.Lifecycle(nil), app.ServiceParams{
|
||||||
|
Logger: loggerInst,
|
||||||
|
Database: dbInst,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return &backupTestContext{svc: svc, db: dbInst}
|
||||||
|
}
|
||||||
|
|
||||||
|
// createAppWithFullConfig creates an app with env vars, labels, volumes, and ports.
|
||||||
|
func createAppWithFullConfig(
|
||||||
|
t *testing.T,
|
||||||
|
btc *backupTestContext,
|
||||||
|
name string,
|
||||||
|
) *models.App {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
createdApp, err := btc.svc.CreateApp(context.Background(), app.CreateAppInput{
|
||||||
|
Name: name,
|
||||||
|
RepoURL: "git@example.com:user/" + name + ".git",
|
||||||
|
Branch: "develop",
|
||||||
|
NtfyTopic: "https://ntfy.sh/" + name,
|
||||||
|
DockerNetwork: "test-network",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.NoError(t, btc.svc.AddEnvVar(
|
||||||
|
context.Background(), createdApp.ID, "DB_HOST", "localhost",
|
||||||
|
))
|
||||||
|
require.NoError(t, btc.svc.AddEnvVar(
|
||||||
|
context.Background(), createdApp.ID, "DB_PORT", "5432",
|
||||||
|
))
|
||||||
|
require.NoError(t, btc.svc.AddLabel(
|
||||||
|
context.Background(), createdApp.ID, "traefik.enable", "true",
|
||||||
|
))
|
||||||
|
require.NoError(t, btc.svc.AddVolume(
|
||||||
|
context.Background(), createdApp.ID, "/data", "/app/data", false,
|
||||||
|
))
|
||||||
|
|
||||||
|
port := models.NewPort(btc.db)
|
||||||
|
port.AppID = createdApp.ID
|
||||||
|
port.HostPort = 9090
|
||||||
|
port.ContainerPort = 8080
|
||||||
|
port.Protocol = models.PortProtocolTCP
|
||||||
|
require.NoError(t, port.Save(context.Background()))
|
||||||
|
|
||||||
|
return createdApp
|
||||||
|
}
|
||||||
|
|
||||||
|
// createAppWithConfigPort creates an app like createAppWithFullConfig but with
|
||||||
|
// a custom host port to avoid UNIQUE constraint collisions.
|
||||||
|
func createAppWithConfigPort(
|
||||||
|
t *testing.T,
|
||||||
|
btc *backupTestContext,
|
||||||
|
name string,
|
||||||
|
hostPort int,
|
||||||
|
) *models.App {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
createdApp, err := btc.svc.CreateApp(context.Background(), app.CreateAppInput{
|
||||||
|
Name: name,
|
||||||
|
RepoURL: "git@example.com:user/" + name + ".git",
|
||||||
|
Branch: "develop",
|
||||||
|
NtfyTopic: "https://ntfy.sh/" + name,
|
||||||
|
DockerNetwork: "test-network",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.NoError(t, btc.svc.AddEnvVar(
|
||||||
|
context.Background(), createdApp.ID, "DB_HOST", "localhost",
|
||||||
|
))
|
||||||
|
require.NoError(t, btc.svc.AddLabel(
|
||||||
|
context.Background(), createdApp.ID, "traefik.enable", "true",
|
||||||
|
))
|
||||||
|
require.NoError(t, btc.svc.AddVolume(
|
||||||
|
context.Background(), createdApp.ID, "/data2", "/app/data2", false,
|
||||||
|
))
|
||||||
|
|
||||||
|
port := models.NewPort(btc.db)
|
||||||
|
port.AppID = createdApp.ID
|
||||||
|
port.HostPort = hostPort
|
||||||
|
port.ContainerPort = 8080
|
||||||
|
port.Protocol = models.PortProtocolTCP
|
||||||
|
require.NoError(t, port.Save(context.Background()))
|
||||||
|
|
||||||
|
return createdApp
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportApp(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
createdApp := createAppWithFullConfig(t, btc, "export-svc-test")
|
||||||
|
|
||||||
|
bundle, err := btc.svc.ExportApp(context.Background(), createdApp)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, bundle.Version)
|
||||||
|
assert.NotEmpty(t, bundle.ExportedAt)
|
||||||
|
require.Len(t, bundle.Apps, 1)
|
||||||
|
|
||||||
|
ab := bundle.Apps[0]
|
||||||
|
assert.Equal(t, "export-svc-test", ab.Name)
|
||||||
|
assert.Equal(t, "develop", ab.Branch)
|
||||||
|
assert.Equal(t, "test-network", ab.DockerNetwork)
|
||||||
|
assert.Equal(t, "https://ntfy.sh/export-svc-test", ab.NtfyTopic)
|
||||||
|
assert.Len(t, ab.EnvVars, 2)
|
||||||
|
assert.Len(t, ab.Labels, 1)
|
||||||
|
assert.Len(t, ab.Volumes, 1)
|
||||||
|
assert.Len(t, ab.Ports, 1)
|
||||||
|
assert.Equal(t, 9090, ab.Ports[0].HostPort)
|
||||||
|
assert.Equal(t, 8080, ab.Ports[0].ContainerPort)
|
||||||
|
assert.Equal(t, "tcp", ab.Ports[0].Protocol)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportAllApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
createAppWithFullConfig(t, btc, "export-all-1")
|
||||||
|
createAppWithConfigPort(t, btc, "export-all-2", 9091)
|
||||||
|
|
||||||
|
bundle, err := btc.svc.ExportAllApps(context.Background())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, bundle.Version)
|
||||||
|
assert.Len(t, bundle.Apps, 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportAllAppsEmpty(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
|
||||||
|
bundle, err := btc.svc.ExportAllApps(context.Background())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Empty(t, bundle.Apps)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImportApps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
|
||||||
|
bundle := &app.BackupBundle{
|
||||||
|
Version: 1,
|
||||||
|
ExportedAt: "2025-01-01T00:00:00Z",
|
||||||
|
Apps: []app.Backup{
|
||||||
|
{
|
||||||
|
Name: "imported-test",
|
||||||
|
RepoURL: "git@example.com:user/imported.git",
|
||||||
|
Branch: "main",
|
||||||
|
DockerfilePath: "Dockerfile",
|
||||||
|
DockerNetwork: "my-network",
|
||||||
|
EnvVars: []app.BackupEnvVar{
|
||||||
|
{Key: "FOO", Value: "bar"},
|
||||||
|
},
|
||||||
|
Labels: []app.BackupLabel{
|
||||||
|
{Key: "app", Value: "test"},
|
||||||
|
},
|
||||||
|
Volumes: []app.BackupVolume{
|
||||||
|
{HostPath: "/host", ContainerPath: "/container", ReadOnly: true},
|
||||||
|
},
|
||||||
|
Ports: []app.BackupPort{
|
||||||
|
{HostPort: 3000, ContainerPort: 8080, Protocol: "tcp"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imported, skipped, err := btc.svc.ImportApps(context.Background(), bundle)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, []string{"imported-test"}, imported)
|
||||||
|
assert.Empty(t, skipped)
|
||||||
|
|
||||||
|
// Verify the app was created
|
||||||
|
apps, _ := btc.svc.ListApps(context.Background())
|
||||||
|
require.Len(t, apps, 1)
|
||||||
|
assert.Equal(t, "imported-test", apps[0].Name)
|
||||||
|
assert.True(t, apps[0].DockerNetwork.Valid)
|
||||||
|
assert.Equal(t, "my-network", apps[0].DockerNetwork.String)
|
||||||
|
|
||||||
|
// Has fresh secrets
|
||||||
|
assert.NotEmpty(t, apps[0].WebhookSecret)
|
||||||
|
assert.NotEmpty(t, apps[0].SSHPublicKey)
|
||||||
|
|
||||||
|
// Verify sub-resources
|
||||||
|
envVars, _ := apps[0].GetEnvVars(context.Background())
|
||||||
|
assert.Len(t, envVars, 1)
|
||||||
|
|
||||||
|
labels, _ := apps[0].GetLabels(context.Background())
|
||||||
|
assert.Len(t, labels, 1)
|
||||||
|
|
||||||
|
volumes, _ := apps[0].GetVolumes(context.Background())
|
||||||
|
assert.Len(t, volumes, 1)
|
||||||
|
assert.True(t, volumes[0].ReadOnly)
|
||||||
|
|
||||||
|
ports, _ := apps[0].GetPorts(context.Background())
|
||||||
|
assert.Len(t, ports, 1)
|
||||||
|
assert.Equal(t, 3000, ports[0].HostPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImportAppsSkipsDuplicates(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
|
||||||
|
// Create existing app
|
||||||
|
_, err := btc.svc.CreateApp(context.Background(), app.CreateAppInput{
|
||||||
|
Name: "existing",
|
||||||
|
RepoURL: "git@example.com:user/existing.git",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
bundle := &app.BackupBundle{
|
||||||
|
Version: 1,
|
||||||
|
ExportedAt: "2025-01-01T00:00:00Z",
|
||||||
|
Apps: []app.Backup{
|
||||||
|
{
|
||||||
|
Name: "existing",
|
||||||
|
RepoURL: "git@example.com:user/existing.git",
|
||||||
|
Branch: "main",
|
||||||
|
DockerfilePath: "Dockerfile",
|
||||||
|
EnvVars: []app.BackupEnvVar{},
|
||||||
|
Labels: []app.BackupLabel{},
|
||||||
|
Volumes: []app.BackupVolume{},
|
||||||
|
Ports: []app.BackupPort{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "brand-new",
|
||||||
|
RepoURL: "git@example.com:user/new.git",
|
||||||
|
Branch: "main",
|
||||||
|
DockerfilePath: "Dockerfile",
|
||||||
|
EnvVars: []app.BackupEnvVar{},
|
||||||
|
Labels: []app.BackupLabel{},
|
||||||
|
Volumes: []app.BackupVolume{},
|
||||||
|
Ports: []app.BackupPort{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imported, skipped, err := btc.svc.ImportApps(context.Background(), bundle)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, []string{"brand-new"}, imported)
|
||||||
|
assert.Equal(t, []string{"existing"}, skipped)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImportAppsPortDefaultProtocol(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
|
||||||
|
bundle := &app.BackupBundle{
|
||||||
|
Version: 1,
|
||||||
|
ExportedAt: "2025-01-01T00:00:00Z",
|
||||||
|
Apps: []app.Backup{
|
||||||
|
{
|
||||||
|
Name: "port-default-test",
|
||||||
|
RepoURL: "git@example.com:user/repo.git",
|
||||||
|
Branch: "main",
|
||||||
|
DockerfilePath: "Dockerfile",
|
||||||
|
EnvVars: []app.BackupEnvVar{},
|
||||||
|
Labels: []app.BackupLabel{},
|
||||||
|
Volumes: []app.BackupVolume{},
|
||||||
|
Ports: []app.BackupPort{
|
||||||
|
{HostPort: 80, ContainerPort: 80, Protocol: ""},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imported, _, err := btc.svc.ImportApps(context.Background(), bundle)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Len(t, imported, 1)
|
||||||
|
|
||||||
|
apps, _ := btc.svc.ListApps(context.Background())
|
||||||
|
ports, _ := apps[0].GetPorts(context.Background())
|
||||||
|
require.Len(t, ports, 1)
|
||||||
|
assert.Equal(t, models.PortProtocolTCP, ports[0].Protocol)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportImportRoundTripService(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
btc := setupBackupTest(t)
|
||||||
|
createAppWithFullConfig(t, btc, "roundtrip-svc")
|
||||||
|
|
||||||
|
// Export
|
||||||
|
bundle, err := btc.svc.ExportAllApps(context.Background())
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, bundle.Apps, 1)
|
||||||
|
|
||||||
|
// Delete original
|
||||||
|
apps, _ := btc.svc.ListApps(context.Background())
|
||||||
|
for _, a := range apps {
|
||||||
|
require.NoError(t, btc.svc.DeleteApp(context.Background(), a))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import
|
||||||
|
imported, skipped, err := btc.svc.ImportApps(context.Background(), bundle)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Len(t, imported, 1)
|
||||||
|
assert.Empty(t, skipped)
|
||||||
|
|
||||||
|
// Verify round-trip fidelity
|
||||||
|
restored, _ := btc.svc.ListApps(context.Background())
|
||||||
|
require.Len(t, restored, 1)
|
||||||
|
assert.Equal(t, "roundtrip-svc", restored[0].Name)
|
||||||
|
assert.Equal(t, "develop", restored[0].Branch)
|
||||||
|
assert.Equal(t, "test-network", restored[0].DockerNetwork.String)
|
||||||
|
|
||||||
|
envVars, _ := restored[0].GetEnvVars(context.Background())
|
||||||
|
assert.Len(t, envVars, 2)
|
||||||
|
|
||||||
|
labels, _ := restored[0].GetLabels(context.Background())
|
||||||
|
assert.Len(t, labels, 1)
|
||||||
|
|
||||||
|
volumes, _ := restored[0].GetVolumes(context.Background())
|
||||||
|
assert.Len(t, volumes, 1)
|
||||||
|
|
||||||
|
ports, _ := restored[0].GetPorts(context.Background())
|
||||||
|
assert.Len(t, ports, 1)
|
||||||
|
}
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
package webhook
|
|
||||||
|
|
||||||
import "encoding/json"
|
|
||||||
|
|
||||||
// GiteaPushPayload represents a Gitea push webhook payload.
|
|
||||||
//
|
|
||||||
//nolint:tagliatelle // Field names match Gitea API (snake_case)
|
|
||||||
type GiteaPushPayload struct {
|
|
||||||
Ref string `json:"ref"`
|
|
||||||
Before string `json:"before"`
|
|
||||||
After string `json:"after"`
|
|
||||||
CompareURL UnparsedURL `json:"compare_url"`
|
|
||||||
Repository struct {
|
|
||||||
FullName string `json:"full_name"`
|
|
||||||
CloneURL UnparsedURL `json:"clone_url"`
|
|
||||||
SSHURL string `json:"ssh_url"`
|
|
||||||
HTMLURL UnparsedURL `json:"html_url"`
|
|
||||||
} `json:"repository"`
|
|
||||||
Pusher struct {
|
|
||||||
Username string `json:"username"`
|
|
||||||
Email string `json:"email"`
|
|
||||||
} `json:"pusher"`
|
|
||||||
Commits []struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
URL UnparsedURL `json:"url"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
Author struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Email string `json:"email"`
|
|
||||||
} `json:"author"`
|
|
||||||
} `json:"commits"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GitHubPushPayload represents a GitHub push webhook payload.
|
|
||||||
//
|
|
||||||
//nolint:tagliatelle // Field names match GitHub API (snake_case)
|
|
||||||
type GitHubPushPayload struct {
|
|
||||||
Ref string `json:"ref"`
|
|
||||||
Before string `json:"before"`
|
|
||||||
After string `json:"after"`
|
|
||||||
CompareURL string `json:"compare"`
|
|
||||||
Repository struct {
|
|
||||||
FullName string `json:"full_name"`
|
|
||||||
CloneURL UnparsedURL `json:"clone_url"`
|
|
||||||
SSHURL string `json:"ssh_url"`
|
|
||||||
HTMLURL UnparsedURL `json:"html_url"`
|
|
||||||
} `json:"repository"`
|
|
||||||
Pusher struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Email string `json:"email"`
|
|
||||||
} `json:"pusher"`
|
|
||||||
HeadCommit *struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
URL UnparsedURL `json:"url"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
} `json:"head_commit"`
|
|
||||||
Commits []struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
URL UnparsedURL `json:"url"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
Author struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Email string `json:"email"`
|
|
||||||
} `json:"author"`
|
|
||||||
} `json:"commits"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GitLabPushPayload represents a GitLab push webhook payload.
|
|
||||||
//
|
|
||||||
//nolint:tagliatelle // Field names match GitLab API (snake_case)
|
|
||||||
type GitLabPushPayload struct {
|
|
||||||
Ref string `json:"ref"`
|
|
||||||
Before string `json:"before"`
|
|
||||||
After string `json:"after"`
|
|
||||||
UserName string `json:"user_name"`
|
|
||||||
UserEmail string `json:"user_email"`
|
|
||||||
Project struct {
|
|
||||||
PathWithNamespace string `json:"path_with_namespace"`
|
|
||||||
GitHTTPURL UnparsedURL `json:"git_http_url"`
|
|
||||||
GitSSHURL string `json:"git_ssh_url"`
|
|
||||||
WebURL UnparsedURL `json:"web_url"`
|
|
||||||
} `json:"project"`
|
|
||||||
Commits []struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
URL UnparsedURL `json:"url"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
Author struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Email string `json:"email"`
|
|
||||||
} `json:"author"`
|
|
||||||
} `json:"commits"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParsePushPayload parses a raw webhook payload into a normalized PushEvent
|
|
||||||
// based on the detected webhook source. Returns an error if JSON unmarshaling
|
|
||||||
// fails. For SourceUnknown, falls back to Gitea format for backward
|
|
||||||
// compatibility.
|
|
||||||
func ParsePushPayload(source Source, payload []byte) (*PushEvent, error) {
|
|
||||||
switch source {
|
|
||||||
case SourceGitHub:
|
|
||||||
return parseGitHubPush(payload)
|
|
||||||
case SourceGitLab:
|
|
||||||
return parseGitLabPush(payload)
|
|
||||||
case SourceGitea, SourceUnknown:
|
|
||||||
// Gitea and unknown both use Gitea format for backward compatibility.
|
|
||||||
return parseGiteaPush(payload)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unreachable for known source values, but satisfies exhaustive checker.
|
|
||||||
return parseGiteaPush(payload)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGiteaPush(payload []byte) (*PushEvent, error) {
|
|
||||||
var p GiteaPushPayload
|
|
||||||
|
|
||||||
unmarshalErr := json.Unmarshal(payload, &p)
|
|
||||||
if unmarshalErr != nil {
|
|
||||||
return nil, unmarshalErr
|
|
||||||
}
|
|
||||||
|
|
||||||
commitURL := extractGiteaCommitURL(p)
|
|
||||||
|
|
||||||
return &PushEvent{
|
|
||||||
Source: SourceGitea,
|
|
||||||
Ref: p.Ref,
|
|
||||||
Before: p.Before,
|
|
||||||
After: p.After,
|
|
||||||
Branch: extractBranch(p.Ref),
|
|
||||||
RepoName: p.Repository.FullName,
|
|
||||||
CloneURL: p.Repository.CloneURL,
|
|
||||||
HTMLURL: p.Repository.HTMLURL,
|
|
||||||
CommitURL: commitURL,
|
|
||||||
Pusher: p.Pusher.Username,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGitHubPush(payload []byte) (*PushEvent, error) {
|
|
||||||
var p GitHubPushPayload
|
|
||||||
|
|
||||||
unmarshalErr := json.Unmarshal(payload, &p)
|
|
||||||
if unmarshalErr != nil {
|
|
||||||
return nil, unmarshalErr
|
|
||||||
}
|
|
||||||
|
|
||||||
commitURL := extractGitHubCommitURL(p)
|
|
||||||
|
|
||||||
return &PushEvent{
|
|
||||||
Source: SourceGitHub,
|
|
||||||
Ref: p.Ref,
|
|
||||||
Before: p.Before,
|
|
||||||
After: p.After,
|
|
||||||
Branch: extractBranch(p.Ref),
|
|
||||||
RepoName: p.Repository.FullName,
|
|
||||||
CloneURL: p.Repository.CloneURL,
|
|
||||||
HTMLURL: p.Repository.HTMLURL,
|
|
||||||
CommitURL: commitURL,
|
|
||||||
Pusher: p.Pusher.Name,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGitLabPush(payload []byte) (*PushEvent, error) {
|
|
||||||
var p GitLabPushPayload
|
|
||||||
|
|
||||||
unmarshalErr := json.Unmarshal(payload, &p)
|
|
||||||
if unmarshalErr != nil {
|
|
||||||
return nil, unmarshalErr
|
|
||||||
}
|
|
||||||
|
|
||||||
commitURL := extractGitLabCommitURL(p)
|
|
||||||
|
|
||||||
return &PushEvent{
|
|
||||||
Source: SourceGitLab,
|
|
||||||
Ref: p.Ref,
|
|
||||||
Before: p.Before,
|
|
||||||
After: p.After,
|
|
||||||
Branch: extractBranch(p.Ref),
|
|
||||||
RepoName: p.Project.PathWithNamespace,
|
|
||||||
CloneURL: p.Project.GitHTTPURL,
|
|
||||||
HTMLURL: p.Project.WebURL,
|
|
||||||
CommitURL: commitURL,
|
|
||||||
Pusher: p.UserName,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractBranch extracts the branch name from a git ref.
|
|
||||||
func extractBranch(ref string) string {
|
|
||||||
// refs/heads/main -> main
|
|
||||||
const prefix = "refs/heads/"
|
|
||||||
|
|
||||||
if len(ref) >= len(prefix) && ref[:len(prefix)] == prefix {
|
|
||||||
return ref[len(prefix):]
|
|
||||||
}
|
|
||||||
|
|
||||||
return ref
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractGiteaCommitURL extracts the commit URL from a Gitea push payload.
|
|
||||||
// Prefers the URL from the head commit, falls back to constructing from repo URL.
|
|
||||||
func extractGiteaCommitURL(payload GiteaPushPayload) UnparsedURL {
|
|
||||||
for _, commit := range payload.Commits {
|
|
||||||
if commit.ID == payload.After && commit.URL != "" {
|
|
||||||
return commit.URL
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if payload.Repository.HTMLURL != "" && payload.After != "" {
|
|
||||||
return UnparsedURL(payload.Repository.HTMLURL.String() + "/commit/" + payload.After)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractGitHubCommitURL extracts the commit URL from a GitHub push payload.
|
|
||||||
// Prefers head_commit.url, then searches commits, then constructs from repo URL.
|
|
||||||
func extractGitHubCommitURL(payload GitHubPushPayload) UnparsedURL {
|
|
||||||
if payload.HeadCommit != nil && payload.HeadCommit.URL != "" {
|
|
||||||
return payload.HeadCommit.URL
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, commit := range payload.Commits {
|
|
||||||
if commit.ID == payload.After && commit.URL != "" {
|
|
||||||
return commit.URL
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if payload.Repository.HTMLURL != "" && payload.After != "" {
|
|
||||||
return UnparsedURL(payload.Repository.HTMLURL.String() + "/commit/" + payload.After)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractGitLabCommitURL extracts the commit URL from a GitLab push payload.
|
|
||||||
// Prefers commit URL from the commits list, falls back to constructing from
|
|
||||||
// project web URL.
|
|
||||||
func extractGitLabCommitURL(payload GitLabPushPayload) UnparsedURL {
|
|
||||||
for _, commit := range payload.Commits {
|
|
||||||
if commit.ID == payload.After && commit.URL != "" {
|
|
||||||
return commit.URL
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if payload.Project.WebURL != "" && payload.After != "" {
|
|
||||||
return UnparsedURL(payload.Project.WebURL.String() + "/-/commit/" + payload.After)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
package webhook
|
package webhook
|
||||||
|
|
||||||
import "net/http"
|
|
||||||
|
|
||||||
// UnparsedURL is a URL stored as a plain string without parsing.
|
// UnparsedURL is a URL stored as a plain string without parsing.
|
||||||
// Use this instead of string when the value is known to be a URL
|
// Use this instead of string when the value is known to be a URL
|
||||||
// but should not be parsed into a net/url.URL (e.g. webhook URLs,
|
// but should not be parsed into a net/url.URL (e.g. webhook URLs,
|
||||||
@@ -10,84 +8,3 @@ type UnparsedURL string
|
|||||||
|
|
||||||
// String implements the fmt.Stringer interface.
|
// String implements the fmt.Stringer interface.
|
||||||
func (u UnparsedURL) String() string { return string(u) }
|
func (u UnparsedURL) String() string { return string(u) }
|
||||||
|
|
||||||
// Source identifies which git hosting platform sent the webhook.
|
|
||||||
type Source string
|
|
||||||
|
|
||||||
const (
|
|
||||||
// SourceGitea indicates the webhook was sent by a Gitea instance.
|
|
||||||
SourceGitea Source = "gitea"
|
|
||||||
|
|
||||||
// SourceGitHub indicates the webhook was sent by GitHub.
|
|
||||||
SourceGitHub Source = "github"
|
|
||||||
|
|
||||||
// SourceGitLab indicates the webhook was sent by a GitLab instance.
|
|
||||||
SourceGitLab Source = "gitlab"
|
|
||||||
|
|
||||||
// SourceUnknown indicates the webhook source could not be determined.
|
|
||||||
SourceUnknown Source = "unknown"
|
|
||||||
)
|
|
||||||
|
|
||||||
// String implements the fmt.Stringer interface.
|
|
||||||
func (s Source) String() string { return string(s) }
|
|
||||||
|
|
||||||
// DetectWebhookSource determines the webhook source from HTTP headers.
|
|
||||||
// It checks for platform-specific event headers in this order:
|
|
||||||
// Gitea (X-Gitea-Event), GitHub (X-GitHub-Event), GitLab (X-Gitlab-Event).
|
|
||||||
// Returns SourceUnknown if no recognized header is found.
|
|
||||||
func DetectWebhookSource(headers http.Header) Source {
|
|
||||||
if headers.Get("X-Gitea-Event") != "" {
|
|
||||||
return SourceGitea
|
|
||||||
}
|
|
||||||
|
|
||||||
if headers.Get("X-Github-Event") != "" {
|
|
||||||
return SourceGitHub
|
|
||||||
}
|
|
||||||
|
|
||||||
if headers.Get("X-Gitlab-Event") != "" {
|
|
||||||
return SourceGitLab
|
|
||||||
}
|
|
||||||
|
|
||||||
return SourceUnknown
|
|
||||||
}
|
|
||||||
|
|
||||||
// DetectEventType extracts the event type string from HTTP headers
|
|
||||||
// based on the detected webhook source. Returns "push" as a fallback
|
|
||||||
// when no event header is found.
|
|
||||||
func DetectEventType(headers http.Header, source Source) string {
|
|
||||||
switch source {
|
|
||||||
case SourceGitea:
|
|
||||||
if v := headers.Get("X-Gitea-Event"); v != "" {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
case SourceGitHub:
|
|
||||||
if v := headers.Get("X-Github-Event"); v != "" {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
case SourceGitLab:
|
|
||||||
if v := headers.Get("X-Gitlab-Event"); v != "" {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
case SourceUnknown:
|
|
||||||
// Fall through to default
|
|
||||||
}
|
|
||||||
|
|
||||||
return "push"
|
|
||||||
}
|
|
||||||
|
|
||||||
// PushEvent is a normalized representation of a push webhook payload
|
|
||||||
// from any supported source (Gitea, GitHub, GitLab). The webhook
|
|
||||||
// service converts source-specific payloads into this format before
|
|
||||||
// processing.
|
|
||||||
type PushEvent struct {
|
|
||||||
Source Source
|
|
||||||
Ref string
|
|
||||||
Before string
|
|
||||||
After string
|
|
||||||
Branch string
|
|
||||||
RepoName string
|
|
||||||
CloneURL UnparsedURL
|
|
||||||
HTMLURL UnparsedURL
|
|
||||||
CommitURL UnparsedURL
|
|
||||||
Pusher string
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ package webhook
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
|
||||||
@@ -43,46 +44,68 @@ func New(_ fx.Lifecycle, params ServiceParams) (*Service, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// HandleWebhook processes a webhook request from any supported source
|
// GiteaPushPayload represents a Gitea push webhook payload.
|
||||||
// (Gitea, GitHub, or GitLab). The source parameter determines which
|
//
|
||||||
// payload format to use for parsing.
|
//nolint:tagliatelle // Field names match Gitea API (snake_case)
|
||||||
|
type GiteaPushPayload struct {
|
||||||
|
Ref string `json:"ref"`
|
||||||
|
Before string `json:"before"`
|
||||||
|
After string `json:"after"`
|
||||||
|
CompareURL UnparsedURL `json:"compare_url"`
|
||||||
|
Repository struct {
|
||||||
|
FullName string `json:"full_name"`
|
||||||
|
CloneURL UnparsedURL `json:"clone_url"`
|
||||||
|
SSHURL string `json:"ssh_url"`
|
||||||
|
HTMLURL UnparsedURL `json:"html_url"`
|
||||||
|
} `json:"repository"`
|
||||||
|
Pusher struct {
|
||||||
|
Username string `json:"username"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
} `json:"pusher"`
|
||||||
|
Commits []struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
URL UnparsedURL `json:"url"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Author struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
} `json:"author"`
|
||||||
|
} `json:"commits"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleWebhook processes a webhook request.
|
||||||
func (svc *Service) HandleWebhook(
|
func (svc *Service) HandleWebhook(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
app *models.App,
|
app *models.App,
|
||||||
source Source,
|
|
||||||
eventType string,
|
eventType string,
|
||||||
payload []byte,
|
payload []byte,
|
||||||
) error {
|
) error {
|
||||||
svc.log.Info("processing webhook",
|
svc.log.Info("processing webhook", "app", app.Name, "event", eventType)
|
||||||
"app", app.Name,
|
|
||||||
"source", source.String(),
|
|
||||||
"event", eventType,
|
|
||||||
)
|
|
||||||
|
|
||||||
// Parse payload into normalized push event
|
// Parse payload
|
||||||
pushEvent, parseErr := ParsePushPayload(source, payload)
|
var pushPayload GiteaPushPayload
|
||||||
if parseErr != nil {
|
|
||||||
svc.log.Warn("failed to parse webhook payload",
|
unmarshalErr := json.Unmarshal(payload, &pushPayload)
|
||||||
"error", parseErr,
|
if unmarshalErr != nil {
|
||||||
"source", source.String(),
|
svc.log.Warn("failed to parse webhook payload", "error", unmarshalErr)
|
||||||
)
|
// Continue anyway to log the event
|
||||||
// Continue with empty push event to still log the webhook
|
|
||||||
pushEvent = &PushEvent{Source: source}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract branch from ref
|
||||||
|
branch := extractBranch(pushPayload.Ref)
|
||||||
|
commitSHA := pushPayload.After
|
||||||
|
commitURL := extractCommitURL(pushPayload)
|
||||||
|
|
||||||
// Check if branch matches
|
// Check if branch matches
|
||||||
matched := pushEvent.Branch == app.Branch
|
matched := branch == app.Branch
|
||||||
|
|
||||||
// Create webhook event record
|
// Create webhook event record
|
||||||
event := models.NewWebhookEvent(svc.db)
|
event := models.NewWebhookEvent(svc.db)
|
||||||
event.AppID = app.ID
|
event.AppID = app.ID
|
||||||
event.EventType = eventType
|
event.EventType = eventType
|
||||||
event.Branch = pushEvent.Branch
|
event.Branch = branch
|
||||||
event.CommitSHA = sql.NullString{String: pushEvent.After, Valid: pushEvent.After != ""}
|
event.CommitSHA = sql.NullString{String: commitSHA, Valid: commitSHA != ""}
|
||||||
event.CommitURL = sql.NullString{
|
event.CommitURL = sql.NullString{String: commitURL.String(), Valid: commitURL != ""}
|
||||||
String: pushEvent.CommitURL.String(),
|
|
||||||
Valid: pushEvent.CommitURL != "",
|
|
||||||
}
|
|
||||||
event.Payload = sql.NullString{String: string(payload), Valid: true}
|
event.Payload = sql.NullString{String: string(payload), Valid: true}
|
||||||
event.Matched = matched
|
event.Matched = matched
|
||||||
event.Processed = false
|
event.Processed = false
|
||||||
@@ -94,10 +117,9 @@ func (svc *Service) HandleWebhook(
|
|||||||
|
|
||||||
svc.log.Info("webhook event recorded",
|
svc.log.Info("webhook event recorded",
|
||||||
"app", app.Name,
|
"app", app.Name,
|
||||||
"source", source.String(),
|
"branch", branch,
|
||||||
"branch", pushEvent.Branch,
|
|
||||||
"matched", matched,
|
"matched", matched,
|
||||||
"commit", pushEvent.After,
|
"commit", commitSHA,
|
||||||
)
|
)
|
||||||
|
|
||||||
// If branch matches, trigger deployment
|
// If branch matches, trigger deployment
|
||||||
@@ -132,3 +154,33 @@ func (svc *Service) triggerDeployment(
|
|||||||
_ = event.Save(deployCtx)
|
_ = event.Save(deployCtx)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// extractBranch extracts the branch name from a git ref.
|
||||||
|
func extractBranch(ref string) string {
|
||||||
|
// refs/heads/main -> main
|
||||||
|
const prefix = "refs/heads/"
|
||||||
|
|
||||||
|
if len(ref) >= len(prefix) && ref[:len(prefix)] == prefix {
|
||||||
|
return ref[len(prefix):]
|
||||||
|
}
|
||||||
|
|
||||||
|
return ref
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractCommitURL extracts the commit URL from the webhook payload.
|
||||||
|
// Prefers the URL from the head commit, falls back to constructing from repo URL.
|
||||||
|
func extractCommitURL(payload GiteaPushPayload) UnparsedURL {
|
||||||
|
// Try to find the URL from the head commit (matching After SHA)
|
||||||
|
for _, commit := range payload.Commits {
|
||||||
|
if commit.ID == payload.After && commit.URL != "" {
|
||||||
|
return commit.URL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to constructing URL from repo HTML URL
|
||||||
|
if payload.Repository.HTMLURL != "" && payload.After != "" {
|
||||||
|
return UnparsedURL(payload.Repository.HTMLURL.String() + "/commit/" + payload.After)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ package webhook_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
@@ -103,114 +102,44 @@ func createTestApp(
|
|||||||
return app
|
return app
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestDetectWebhookSource tests auto-detection of webhook source from HTTP headers.
|
|
||||||
//
|
|
||||||
//nolint:funlen // table-driven test with comprehensive test cases
|
//nolint:funlen // table-driven test with comprehensive test cases
|
||||||
func TestDetectWebhookSource(testingT *testing.T) {
|
func TestExtractBranch(testingT *testing.T) {
|
||||||
testingT.Parallel()
|
testingT.Parallel()
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
headers map[string]string
|
ref string
|
||||||
expected webhook.Source
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "detects Gitea from X-Gitea-Event header",
|
|
||||||
headers: map[string]string{"X-Gitea-Event": "push"},
|
|
||||||
expected: webhook.SourceGitea,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "detects GitHub from X-GitHub-Event header",
|
|
||||||
headers: map[string]string{"X-GitHub-Event": "push"},
|
|
||||||
expected: webhook.SourceGitHub,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "detects GitLab from X-Gitlab-Event header",
|
|
||||||
headers: map[string]string{"X-Gitlab-Event": "Push Hook"},
|
|
||||||
expected: webhook.SourceGitLab,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "returns unknown when no recognized header",
|
|
||||||
headers: map[string]string{"Content-Type": "application/json"},
|
|
||||||
expected: webhook.SourceUnknown,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "returns unknown for empty headers",
|
|
||||||
headers: map[string]string{},
|
|
||||||
expected: webhook.SourceUnknown,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Gitea takes precedence over GitHub",
|
|
||||||
headers: map[string]string{
|
|
||||||
"X-Gitea-Event": "push",
|
|
||||||
"X-GitHub-Event": "push",
|
|
||||||
},
|
|
||||||
expected: webhook.SourceGitea,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "GitHub takes precedence over GitLab",
|
|
||||||
headers: map[string]string{
|
|
||||||
"X-GitHub-Event": "push",
|
|
||||||
"X-Gitlab-Event": "Push Hook",
|
|
||||||
},
|
|
||||||
expected: webhook.SourceGitHub,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, testCase := range tests {
|
|
||||||
testingT.Run(testCase.name, func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
headers := http.Header{}
|
|
||||||
for key, value := range testCase.headers {
|
|
||||||
headers.Set(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
result := webhook.DetectWebhookSource(headers)
|
|
||||||
assert.Equal(t, testCase.expected, result)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestDetectEventType tests event type extraction from HTTP headers.
|
|
||||||
func TestDetectEventType(testingT *testing.T) {
|
|
||||||
testingT.Parallel()
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
headers map[string]string
|
|
||||||
source webhook.Source
|
|
||||||
expected string
|
expected string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "extracts Gitea event type",
|
name: "extracts main branch",
|
||||||
headers: map[string]string{"X-Gitea-Event": "push"},
|
ref: "refs/heads/main",
|
||||||
source: webhook.SourceGitea,
|
expected: "main",
|
||||||
expected: "push",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "extracts GitHub event type",
|
name: "extracts feature branch",
|
||||||
headers: map[string]string{"X-GitHub-Event": "push"},
|
ref: "refs/heads/feature/new-feature",
|
||||||
source: webhook.SourceGitHub,
|
expected: "feature/new-feature",
|
||||||
expected: "push",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "extracts GitLab event type",
|
name: "extracts develop branch",
|
||||||
headers: map[string]string{"X-Gitlab-Event": "Push Hook"},
|
ref: "refs/heads/develop",
|
||||||
source: webhook.SourceGitLab,
|
expected: "develop",
|
||||||
expected: "Push Hook",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "returns push for unknown source",
|
name: "returns raw ref if no prefix",
|
||||||
headers: map[string]string{},
|
ref: "main",
|
||||||
source: webhook.SourceUnknown,
|
expected: "main",
|
||||||
expected: "push",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "returns push when header missing for source",
|
name: "handles empty ref",
|
||||||
headers: map[string]string{},
|
ref: "",
|
||||||
source: webhook.SourceGitea,
|
expected: "",
|
||||||
expected: "push",
|
},
|
||||||
|
{
|
||||||
|
name: "handles partial prefix",
|
||||||
|
ref: "refs/heads/",
|
||||||
|
expected: "",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -218,318 +147,123 @@ func TestDetectEventType(testingT *testing.T) {
|
|||||||
testingT.Run(testCase.name, func(t *testing.T) {
|
testingT.Run(testCase.name, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
headers := http.Header{}
|
// We test via HandleWebhook since extractBranch is not exported.
|
||||||
for key, value := range testCase.headers {
|
// The test verifies behavior indirectly through the webhook event's branch.
|
||||||
headers.Set(key, value)
|
svc, dbInst, cleanup := setupTestService(t)
|
||||||
}
|
defer cleanup()
|
||||||
|
|
||||||
result := webhook.DetectEventType(headers, testCase.source)
|
app := createTestApp(t, dbInst, testCase.expected)
|
||||||
assert.Equal(t, testCase.expected, result)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestWebhookSourceString tests the String method on WebhookSource.
|
payload := []byte(`{"ref": "` + testCase.ref + `"}`)
|
||||||
func TestWebhookSourceString(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
assert.Equal(t, "gitea", webhook.SourceGitea.String())
|
err := svc.HandleWebhook(context.Background(), app, "push", payload)
|
||||||
assert.Equal(t, "github", webhook.SourceGitHub.String())
|
|
||||||
assert.Equal(t, "gitlab", webhook.SourceGitLab.String())
|
|
||||||
assert.Equal(t, "unknown", webhook.SourceUnknown.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestUnparsedURLString tests the String method on UnparsedURL.
|
|
||||||
func TestUnparsedURLString(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
u := webhook.UnparsedURL("https://example.com/test")
|
|
||||||
assert.Equal(t, "https://example.com/test", u.String())
|
|
||||||
|
|
||||||
empty := webhook.UnparsedURL("")
|
|
||||||
assert.Empty(t, empty.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadGitea tests parsing of Gitea push payloads.
|
|
||||||
func TestParsePushPayloadGitea(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"before": "0000000000000000000000000000000000000000",
|
|
||||||
"after": "abc123def456789",
|
|
||||||
"compare_url": "https://gitea.example.com/myorg/myrepo/compare/000...abc",
|
|
||||||
"repository": {
|
|
||||||
"full_name": "myorg/myrepo",
|
|
||||||
"clone_url": "https://gitea.example.com/myorg/myrepo.git",
|
|
||||||
"ssh_url": "git@gitea.example.com:myorg/myrepo.git",
|
|
||||||
"html_url": "https://gitea.example.com/myorg/myrepo"
|
|
||||||
},
|
|
||||||
"pusher": {"username": "developer", "email": "dev@example.com"},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "abc123def456789",
|
|
||||||
"url": "https://gitea.example.com/myorg/myrepo/commit/abc123def456789",
|
|
||||||
"message": "Fix bug",
|
|
||||||
"author": {"name": "Developer", "email": "dev@example.com"}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitea, payload)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, webhook.SourceGitea, event.Source)
|
|
||||||
assert.Equal(t, "refs/heads/main", event.Ref)
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.Equal(t, "abc123def456789", event.After)
|
|
||||||
assert.Equal(t, "myorg/myrepo", event.RepoName)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitea.example.com/myorg/myrepo.git"), event.CloneURL)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitea.example.com/myorg/myrepo"), event.HTMLURL)
|
|
||||||
assert.Equal(t,
|
|
||||||
webhook.UnparsedURL("https://gitea.example.com/myorg/myrepo/commit/abc123def456789"),
|
|
||||||
event.CommitURL,
|
|
||||||
)
|
|
||||||
assert.Equal(t, "developer", event.Pusher)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadGitHub tests parsing of GitHub push payloads.
|
|
||||||
func TestParsePushPayloadGitHub(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"before": "0000000000000000000000000000000000000000",
|
|
||||||
"after": "abc123def456789",
|
|
||||||
"compare": "https://github.com/myorg/myrepo/compare/000...abc",
|
|
||||||
"repository": {
|
|
||||||
"full_name": "myorg/myrepo",
|
|
||||||
"clone_url": "https://github.com/myorg/myrepo.git",
|
|
||||||
"ssh_url": "git@github.com:myorg/myrepo.git",
|
|
||||||
"html_url": "https://github.com/myorg/myrepo"
|
|
||||||
},
|
|
||||||
"pusher": {"name": "developer", "email": "dev@example.com"},
|
|
||||||
"head_commit": {
|
|
||||||
"id": "abc123def456789",
|
|
||||||
"url": "https://github.com/myorg/myrepo/commit/abc123def456789",
|
|
||||||
"message": "Fix bug"
|
|
||||||
},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "abc123def456789",
|
|
||||||
"url": "https://github.com/myorg/myrepo/commit/abc123def456789",
|
|
||||||
"message": "Fix bug",
|
|
||||||
"author": {"name": "Developer", "email": "dev@example.com"}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitHub, payload)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, webhook.SourceGitHub, event.Source)
|
|
||||||
assert.Equal(t, "refs/heads/main", event.Ref)
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.Equal(t, "abc123def456789", event.After)
|
|
||||||
assert.Equal(t, "myorg/myrepo", event.RepoName)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://github.com/myorg/myrepo.git"), event.CloneURL)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://github.com/myorg/myrepo"), event.HTMLURL)
|
|
||||||
assert.Equal(t,
|
|
||||||
webhook.UnparsedURL("https://github.com/myorg/myrepo/commit/abc123def456789"),
|
|
||||||
event.CommitURL,
|
|
||||||
)
|
|
||||||
assert.Equal(t, "developer", event.Pusher)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadGitLab tests parsing of GitLab push payloads.
|
|
||||||
func TestParsePushPayloadGitLab(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/develop",
|
|
||||||
"before": "0000000000000000000000000000000000000000",
|
|
||||||
"after": "abc123def456789",
|
|
||||||
"user_name": "developer",
|
|
||||||
"user_email": "dev@example.com",
|
|
||||||
"project": {
|
|
||||||
"path_with_namespace": "mygroup/myproject",
|
|
||||||
"git_http_url": "https://gitlab.com/mygroup/myproject.git",
|
|
||||||
"git_ssh_url": "git@gitlab.com:mygroup/myproject.git",
|
|
||||||
"web_url": "https://gitlab.com/mygroup/myproject"
|
|
||||||
},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "abc123def456789",
|
|
||||||
"url": "https://gitlab.com/mygroup/myproject/-/commit/abc123def456789",
|
|
||||||
"message": "Fix bug",
|
|
||||||
"author": {"name": "Developer", "email": "dev@example.com"}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitLab, payload)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, webhook.SourceGitLab, event.Source)
|
|
||||||
assert.Equal(t, "refs/heads/develop", event.Ref)
|
|
||||||
assert.Equal(t, "develop", event.Branch)
|
|
||||||
assert.Equal(t, "abc123def456789", event.After)
|
|
||||||
assert.Equal(t, "mygroup/myproject", event.RepoName)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitlab.com/mygroup/myproject.git"), event.CloneURL)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitlab.com/mygroup/myproject"), event.HTMLURL)
|
|
||||||
assert.Equal(t,
|
|
||||||
webhook.UnparsedURL("https://gitlab.com/mygroup/myproject/-/commit/abc123def456789"),
|
|
||||||
event.CommitURL,
|
|
||||||
)
|
|
||||||
assert.Equal(t, "developer", event.Pusher)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadUnknownFallsBackToGitea tests that unknown source uses Gitea parser.
|
|
||||||
func TestParsePushPayloadUnknownFallsBackToGitea(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"after": "abc123",
|
|
||||||
"repository": {"full_name": "user/repo"},
|
|
||||||
"pusher": {"username": "user"}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceUnknown, payload)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, webhook.SourceGitea, event.Source)
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.Equal(t, "abc123", event.After)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadInvalidJSON tests that invalid JSON returns an error.
|
|
||||||
func TestParsePushPayloadInvalidJSON(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
sources := []webhook.Source{
|
|
||||||
webhook.SourceGitea,
|
|
||||||
webhook.SourceGitHub,
|
|
||||||
webhook.SourceGitLab,
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, source := range sources {
|
|
||||||
t.Run(source.String(), func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
_, err := webhook.ParsePushPayload(source, []byte(`{invalid json}`))
|
|
||||||
require.Error(t, err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestParsePushPayloadEmptyPayload tests parsing of empty JSON objects.
|
|
||||||
func TestParsePushPayloadEmptyPayload(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
sources := []webhook.Source{
|
|
||||||
webhook.SourceGitea,
|
|
||||||
webhook.SourceGitHub,
|
|
||||||
webhook.SourceGitLab,
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, source := range sources {
|
|
||||||
t.Run(source.String(), func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(source, []byte(`{}`))
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Empty(t, event.Branch)
|
// Allow async deployment goroutine to complete before test cleanup
|
||||||
assert.Empty(t, event.After)
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
|
||||||
|
events, err := app.GetWebhookEvents(context.Background(), 10)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, events, 1)
|
||||||
|
|
||||||
|
assert.Equal(t, testCase.expected, events[0].Branch)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGitHubCommitURLFallback tests commit URL extraction fallback paths for GitHub.
|
func TestHandleWebhookMatchingBranch(t *testing.T) {
|
||||||
func TestGitHubCommitURLFallback(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
t.Run("uses head_commit URL when available", func(t *testing.T) {
|
svc, dbInst, cleanup := setupTestService(t)
|
||||||
t.Parallel()
|
defer cleanup()
|
||||||
|
|
||||||
payload := []byte(`{
|
app := createTestApp(t, dbInst, "main")
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"after": "abc123",
|
|
||||||
"head_commit": {"id": "abc123", "url": "https://github.com/u/r/commit/abc123"},
|
|
||||||
"repository": {"html_url": "https://github.com/u/r"}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitHub, payload)
|
payload := []byte(`{
|
||||||
require.NoError(t, err)
|
"ref": "refs/heads/main",
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://github.com/u/r/commit/abc123"), event.CommitURL)
|
"before": "0000000000000000000000000000000000000000",
|
||||||
})
|
"after": "abc123def456",
|
||||||
|
"repository": {
|
||||||
|
"full_name": "user/repo",
|
||||||
|
"clone_url": "https://gitea.example.com/user/repo.git",
|
||||||
|
"ssh_url": "git@gitea.example.com:user/repo.git"
|
||||||
|
},
|
||||||
|
"pusher": {"username": "testuser", "email": "test@example.com"},
|
||||||
|
"commits": [{"id": "abc123def456", "message": "Test commit",
|
||||||
|
"author": {"name": "Test User", "email": "test@example.com"}}]
|
||||||
|
}`)
|
||||||
|
|
||||||
t.Run("falls back to commits list", func(t *testing.T) {
|
err := svc.HandleWebhook(context.Background(), app, "push", payload)
|
||||||
t.Parallel()
|
require.NoError(t, err)
|
||||||
|
|
||||||
payload := []byte(`{
|
// Allow async deployment goroutine to complete before test cleanup
|
||||||
"ref": "refs/heads/main",
|
time.Sleep(100 * time.Millisecond)
|
||||||
"after": "abc123",
|
|
||||||
"commits": [{"id": "abc123", "url": "https://github.com/u/r/commit/abc123"}],
|
|
||||||
"repository": {"html_url": "https://github.com/u/r"}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitHub, payload)
|
events, err := app.GetWebhookEvents(context.Background(), 10)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://github.com/u/r/commit/abc123"), event.CommitURL)
|
require.Len(t, events, 1)
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("constructs URL from repo HTML URL", func(t *testing.T) {
|
event := events[0]
|
||||||
t.Parallel()
|
assert.Equal(t, "push", event.EventType)
|
||||||
|
assert.Equal(t, "main", event.Branch)
|
||||||
payload := []byte(`{
|
assert.True(t, event.Matched)
|
||||||
"ref": "refs/heads/main",
|
assert.Equal(t, "abc123def456", event.CommitSHA.String)
|
||||||
"after": "abc123",
|
|
||||||
"repository": {"html_url": "https://github.com/u/r"}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitHub, payload)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://github.com/u/r/commit/abc123"), event.CommitURL)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGitLabCommitURLFallback tests commit URL extraction fallback paths for GitLab.
|
func TestHandleWebhookNonMatchingBranch(t *testing.T) {
|
||||||
func TestGitLabCommitURLFallback(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
t.Run("uses commit URL from list", func(t *testing.T) {
|
svc, dbInst, cleanup := setupTestService(t)
|
||||||
t.Parallel()
|
defer cleanup()
|
||||||
|
|
||||||
payload := []byte(`{
|
app := createTestApp(t, dbInst, "main")
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"after": "abc123",
|
|
||||||
"project": {"web_url": "https://gitlab.com/g/p"},
|
|
||||||
"commits": [{"id": "abc123", "url": "https://gitlab.com/g/p/-/commit/abc123"}]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitLab, payload)
|
payload := []byte(`{"ref": "refs/heads/develop", "after": "def789ghi012"}`)
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitlab.com/g/p/-/commit/abc123"), event.CommitURL)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("constructs URL from project web URL", func(t *testing.T) {
|
err := svc.HandleWebhook(context.Background(), app, "push", payload)
|
||||||
t.Parallel()
|
require.NoError(t, err)
|
||||||
|
|
||||||
payload := []byte(`{
|
events, err := app.GetWebhookEvents(context.Background(), 10)
|
||||||
"ref": "refs/heads/main",
|
require.NoError(t, err)
|
||||||
"after": "abc123",
|
require.Len(t, events, 1)
|
||||||
"project": {"web_url": "https://gitlab.com/g/p"}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
event, err := webhook.ParsePushPayload(webhook.SourceGitLab, payload)
|
assert.Equal(t, "develop", events[0].Branch)
|
||||||
require.NoError(t, err)
|
assert.False(t, events[0].Matched)
|
||||||
assert.Equal(t, webhook.UnparsedURL("https://gitlab.com/g/p/-/commit/abc123"), event.CommitURL)
|
}
|
||||||
})
|
|
||||||
|
func TestHandleWebhookInvalidJSON(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
svc, dbInst, cleanup := setupTestService(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
app := createTestApp(t, dbInst, "main")
|
||||||
|
|
||||||
|
err := svc.HandleWebhook(context.Background(), app, "push", []byte(`{invalid json}`))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
events, err := app.GetWebhookEvents(context.Background(), 10)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, events, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleWebhookEmptyPayload(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
svc, dbInst, cleanup := setupTestService(t)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
app := createTestApp(t, dbInst, "main")
|
||||||
|
|
||||||
|
err := svc.HandleWebhook(context.Background(), app, "push", []byte(`{}`))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
events, err := app.GetWebhookEvents(context.Background(), 10)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, events, 1)
|
||||||
|
assert.False(t, events[0].Matched)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGiteaPushPayloadParsing tests direct deserialization of the Gitea payload struct.
|
|
||||||
func TestGiteaPushPayloadParsing(testingT *testing.T) {
|
func TestGiteaPushPayloadParsing(testingT *testing.T) {
|
||||||
testingT.Parallel()
|
testingT.Parallel()
|
||||||
|
|
||||||
@@ -588,354 +322,6 @@ func TestGiteaPushPayloadParsing(testingT *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestGitHubPushPayloadParsing tests direct deserialization of the GitHub payload struct.
|
|
||||||
func TestGitHubPushPayloadParsing(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"before": "0000000000",
|
|
||||||
"after": "abc123",
|
|
||||||
"compare": "https://github.com/o/r/compare/000...abc",
|
|
||||||
"repository": {
|
|
||||||
"full_name": "o/r",
|
|
||||||
"clone_url": "https://github.com/o/r.git",
|
|
||||||
"ssh_url": "git@github.com:o/r.git",
|
|
||||||
"html_url": "https://github.com/o/r"
|
|
||||||
},
|
|
||||||
"pusher": {"name": "octocat", "email": "octocat@github.com"},
|
|
||||||
"head_commit": {
|
|
||||||
"id": "abc123",
|
|
||||||
"url": "https://github.com/o/r/commit/abc123",
|
|
||||||
"message": "Update README"
|
|
||||||
},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "abc123",
|
|
||||||
"url": "https://github.com/o/r/commit/abc123",
|
|
||||||
"message": "Update README",
|
|
||||||
"author": {"name": "Octocat", "email": "octocat@github.com"}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
var p webhook.GitHubPushPayload
|
|
||||||
|
|
||||||
err := json.Unmarshal(payload, &p)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "refs/heads/main", p.Ref)
|
|
||||||
assert.Equal(t, "abc123", p.After)
|
|
||||||
assert.Equal(t, "o/r", p.Repository.FullName)
|
|
||||||
assert.Equal(t, "octocat", p.Pusher.Name)
|
|
||||||
assert.NotNil(t, p.HeadCommit)
|
|
||||||
assert.Equal(t, "abc123", p.HeadCommit.ID)
|
|
||||||
assert.Len(t, p.Commits, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestGitLabPushPayloadParsing tests direct deserialization of the GitLab payload struct.
|
|
||||||
func TestGitLabPushPayloadParsing(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"before": "0000000000",
|
|
||||||
"after": "abc123",
|
|
||||||
"user_name": "gitlab-user",
|
|
||||||
"user_email": "user@gitlab.com",
|
|
||||||
"project": {
|
|
||||||
"path_with_namespace": "group/project",
|
|
||||||
"git_http_url": "https://gitlab.com/group/project.git",
|
|
||||||
"git_ssh_url": "git@gitlab.com:group/project.git",
|
|
||||||
"web_url": "https://gitlab.com/group/project"
|
|
||||||
},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "abc123",
|
|
||||||
"url": "https://gitlab.com/group/project/-/commit/abc123",
|
|
||||||
"message": "Fix pipeline",
|
|
||||||
"author": {"name": "GitLab User", "email": "user@gitlab.com"}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
var p webhook.GitLabPushPayload
|
|
||||||
|
|
||||||
err := json.Unmarshal(payload, &p)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "refs/heads/main", p.Ref)
|
|
||||||
assert.Equal(t, "abc123", p.After)
|
|
||||||
assert.Equal(t, "group/project", p.Project.PathWithNamespace)
|
|
||||||
assert.Equal(t, "gitlab-user", p.UserName)
|
|
||||||
assert.Len(t, p.Commits, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestExtractBranch tests branch extraction via HandleWebhook integration (extractBranch is unexported).
|
|
||||||
//
|
|
||||||
//nolint:funlen // table-driven test with comprehensive test cases
|
|
||||||
func TestExtractBranch(testingT *testing.T) {
|
|
||||||
testingT.Parallel()
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
ref string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "extracts main branch",
|
|
||||||
ref: "refs/heads/main",
|
|
||||||
expected: "main",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "extracts feature branch",
|
|
||||||
ref: "refs/heads/feature/new-feature",
|
|
||||||
expected: "feature/new-feature",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "extracts develop branch",
|
|
||||||
ref: "refs/heads/develop",
|
|
||||||
expected: "develop",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "returns raw ref if no prefix",
|
|
||||||
ref: "main",
|
|
||||||
expected: "main",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "handles empty ref",
|
|
||||||
ref: "",
|
|
||||||
expected: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "handles partial prefix",
|
|
||||||
ref: "refs/heads/",
|
|
||||||
expected: "",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, testCase := range tests {
|
|
||||||
testingT.Run(testCase.name, func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
// We test via HandleWebhook since extractBranch is not exported.
|
|
||||||
// The test verifies behavior indirectly through the webhook event's branch.
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, testCase.expected)
|
|
||||||
|
|
||||||
payload := []byte(`{"ref": "` + testCase.ref + `"}`)
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitea, "push", payload,
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
// Allow async deployment goroutine to complete before test cleanup
|
|
||||||
time.Sleep(100 * time.Millisecond)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
|
|
||||||
assert.Equal(t, testCase.expected, events[0].Branch)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHandleWebhookMatchingBranch(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"before": "0000000000000000000000000000000000000000",
|
|
||||||
"after": "abc123def456",
|
|
||||||
"repository": {
|
|
||||||
"full_name": "user/repo",
|
|
||||||
"clone_url": "https://gitea.example.com/user/repo.git",
|
|
||||||
"ssh_url": "git@gitea.example.com:user/repo.git"
|
|
||||||
},
|
|
||||||
"pusher": {"username": "testuser", "email": "test@example.com"},
|
|
||||||
"commits": [{"id": "abc123def456", "message": "Test commit",
|
|
||||||
"author": {"name": "Test User", "email": "test@example.com"}}]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitea, "push", payload,
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
// Allow async deployment goroutine to complete before test cleanup
|
|
||||||
time.Sleep(100 * time.Millisecond)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
|
|
||||||
event := events[0]
|
|
||||||
assert.Equal(t, "push", event.EventType)
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.True(t, event.Matched)
|
|
||||||
assert.Equal(t, "abc123def456", event.CommitSHA.String)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHandleWebhookNonMatchingBranch(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
payload := []byte(`{"ref": "refs/heads/develop", "after": "def789ghi012"}`)
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitea, "push", payload,
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
|
|
||||||
assert.Equal(t, "develop", events[0].Branch)
|
|
||||||
assert.False(t, events[0].Matched)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHandleWebhookInvalidJSON(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitea, "push", []byte(`{invalid json}`),
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHandleWebhookEmptyPayload(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitea, "push", []byte(`{}`),
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
assert.False(t, events[0].Matched)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestHandleWebhookGitHubSource tests HandleWebhook with a GitHub push payload.
|
|
||||||
func TestHandleWebhookGitHubSource(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"after": "github123",
|
|
||||||
"repository": {
|
|
||||||
"full_name": "org/repo",
|
|
||||||
"clone_url": "https://github.com/org/repo.git",
|
|
||||||
"html_url": "https://github.com/org/repo"
|
|
||||||
},
|
|
||||||
"pusher": {"name": "octocat", "email": "octocat@github.com"},
|
|
||||||
"head_commit": {
|
|
||||||
"id": "github123",
|
|
||||||
"url": "https://github.com/org/repo/commit/github123",
|
|
||||||
"message": "Update feature"
|
|
||||||
}
|
|
||||||
}`)
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitHub, "push", payload,
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
// Allow async deployment goroutine to complete before test cleanup
|
|
||||||
time.Sleep(100 * time.Millisecond)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
|
|
||||||
event := events[0]
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.True(t, event.Matched)
|
|
||||||
assert.Equal(t, "github123", event.CommitSHA.String)
|
|
||||||
assert.Equal(t, "https://github.com/org/repo/commit/github123", event.CommitURL.String)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestHandleWebhookGitLabSource tests HandleWebhook with a GitLab push payload.
|
|
||||||
func TestHandleWebhookGitLabSource(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
svc, dbInst, cleanup := setupTestService(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
app := createTestApp(t, dbInst, "main")
|
|
||||||
|
|
||||||
payload := []byte(`{
|
|
||||||
"ref": "refs/heads/main",
|
|
||||||
"after": "gitlab456",
|
|
||||||
"user_name": "gitlab-dev",
|
|
||||||
"user_email": "dev@gitlab.com",
|
|
||||||
"project": {
|
|
||||||
"path_with_namespace": "group/project",
|
|
||||||
"git_http_url": "https://gitlab.com/group/project.git",
|
|
||||||
"web_url": "https://gitlab.com/group/project"
|
|
||||||
},
|
|
||||||
"commits": [
|
|
||||||
{
|
|
||||||
"id": "gitlab456",
|
|
||||||
"url": "https://gitlab.com/group/project/-/commit/gitlab456",
|
|
||||||
"message": "Deploy fix"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`)
|
|
||||||
|
|
||||||
err := svc.HandleWebhook(
|
|
||||||
context.Background(), app, webhook.SourceGitLab, "push", payload,
|
|
||||||
)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
// Allow async deployment goroutine to complete before test cleanup
|
|
||||||
time.Sleep(100 * time.Millisecond)
|
|
||||||
|
|
||||||
events, err := app.GetWebhookEvents(context.Background(), 10)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
|
|
||||||
event := events[0]
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.True(t, event.Matched)
|
|
||||||
assert.Equal(t, "gitlab456", event.CommitSHA.String)
|
|
||||||
assert.Equal(t, "https://gitlab.com/group/project/-/commit/gitlab456", event.CommitURL.String)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestSetupTestService verifies the test helper creates a working test service.
|
// TestSetupTestService verifies the test helper creates a working test service.
|
||||||
func TestSetupTestService(testingT *testing.T) {
|
func TestSetupTestService(testingT *testing.T) {
|
||||||
testingT.Parallel()
|
testingT.Parallel()
|
||||||
@@ -955,25 +341,3 @@ func TestSetupTestService(testingT *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestPushEventConstruction tests that PushEvent can be constructed directly.
|
|
||||||
func TestPushEventConstruction(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
event := webhook.PushEvent{
|
|
||||||
Source: webhook.SourceGitHub,
|
|
||||||
Ref: "refs/heads/main",
|
|
||||||
Before: "000",
|
|
||||||
After: "abc",
|
|
||||||
Branch: "main",
|
|
||||||
RepoName: "org/repo",
|
|
||||||
CloneURL: webhook.UnparsedURL("https://github.com/org/repo.git"),
|
|
||||||
HTMLURL: webhook.UnparsedURL("https://github.com/org/repo"),
|
|
||||||
CommitURL: webhook.UnparsedURL("https://github.com/org/repo/commit/abc"),
|
|
||||||
Pusher: "user",
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Equal(t, "main", event.Branch)
|
|
||||||
assert.Equal(t, webhook.SourceGitHub, event.Source)
|
|
||||||
assert.Equal(t, "abc", event.After)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -432,6 +432,18 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Backup -->
|
||||||
|
<div class="card p-6 mb-6">
|
||||||
|
<h2 class="section-title mb-4">Backup</h2>
|
||||||
|
<p class="text-sm text-gray-500 mb-3">Export this app's configuration (settings, env vars, labels, volumes, ports) as a JSON file for backup or migration.</p>
|
||||||
|
<a href="/apps/{{.App.ID}}/export" class="btn-secondary">
|
||||||
|
<svg class="w-4 h-4 mr-1 inline" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"/>
|
||||||
|
</svg>
|
||||||
|
Export Config
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Danger Zone -->
|
<!-- Danger Zone -->
|
||||||
<div class="card border-2 border-error-500/20 bg-error-50/50 p-6">
|
<div class="card border-2 border-error-500/20 bg-error-50/50 p-6">
|
||||||
<h2 class="text-lg font-medium text-error-700 mb-4">Danger Zone</h2>
|
<h2 class="text-lg font-medium text-error-700 mb-4">Danger Zone</h2>
|
||||||
|
|||||||
62
templates/backup_import.html
Normal file
62
templates/backup_import.html
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
{{template "base" .}}
|
||||||
|
|
||||||
|
{{define "title"}}Import Backup - µPaaS{{end}}
|
||||||
|
|
||||||
|
{{define "content"}}
|
||||||
|
{{template "nav" .}}
|
||||||
|
|
||||||
|
<main class="max-w-4xl mx-auto px-4 py-8">
|
||||||
|
<div class="mb-6">
|
||||||
|
<a href="/" class="text-primary-600 hover:text-primary-800 inline-flex items-center">
|
||||||
|
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
|
||||||
|
</svg>
|
||||||
|
Back to Dashboard
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{template "alert-success" .}}
|
||||||
|
{{template "alert-error" .}}
|
||||||
|
|
||||||
|
<h1 class="text-2xl font-medium text-gray-900 mb-6">Import Backup</h1>
|
||||||
|
|
||||||
|
<div class="card p-6 mb-6">
|
||||||
|
<h2 class="section-title mb-4">Restore from Backup File</h2>
|
||||||
|
<p class="text-sm text-gray-500 mb-4">
|
||||||
|
Upload a previously exported µPaaS backup file (JSON) to restore app configurations.
|
||||||
|
New apps will be created with fresh SSH keys and webhook secrets.
|
||||||
|
Apps whose names already exist will be skipped.
|
||||||
|
</p>
|
||||||
|
<form method="POST" action="/backup/import" enctype="multipart/form-data">
|
||||||
|
{{ .CSRFField }}
|
||||||
|
<div class="mb-4">
|
||||||
|
<label for="backup_file" class="form-label">Backup File</label>
|
||||||
|
<input type="file" id="backup_file" name="backup_file" accept=".json,application/json"
|
||||||
|
class="block w-full text-sm text-gray-500
|
||||||
|
file:mr-4 file:py-2 file:px-4
|
||||||
|
file:rounded file:border-0
|
||||||
|
file:text-sm file:font-medium
|
||||||
|
file:bg-primary-50 file:text-primary-700
|
||||||
|
hover:file:bg-primary-100
|
||||||
|
cursor-pointer">
|
||||||
|
</div>
|
||||||
|
<button type="submit" class="btn-primary">Import</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="card p-6">
|
||||||
|
<h2 class="section-title mb-4">Export All Apps</h2>
|
||||||
|
<p class="text-sm text-gray-500 mb-4">
|
||||||
|
Download a backup of all app configurations. This includes app settings,
|
||||||
|
environment variables, labels, volumes, and port mappings.
|
||||||
|
Secrets (SSH keys, webhook tokens) are not included — they are regenerated on import.
|
||||||
|
</p>
|
||||||
|
<a href="/backup/export" class="btn-secondary">
|
||||||
|
<svg class="w-4 h-4 mr-1 inline" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"/>
|
||||||
|
</svg>
|
||||||
|
Export All Apps
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
{{end}}
|
||||||
@@ -11,12 +11,20 @@
|
|||||||
|
|
||||||
<div class="section-header">
|
<div class="section-header">
|
||||||
<h1 class="text-2xl font-medium text-gray-900">Applications</h1>
|
<h1 class="text-2xl font-medium text-gray-900">Applications</h1>
|
||||||
<a href="/apps/new" class="btn-primary">
|
<div class="flex gap-3">
|
||||||
<svg class="w-5 h-5 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
<a href="/backup/import" class="btn-secondary">
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4v16m8-8H4"/>
|
<svg class="w-4 h-4 mr-1 inline" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
</svg>
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-8l-4-4m0 0L8 8m4-4v12"/>
|
||||||
New App
|
</svg>
|
||||||
</a>
|
Backup / Restore
|
||||||
|
</a>
|
||||||
|
<a href="/apps/new" class="btn-primary">
|
||||||
|
<svg class="w-5 h-5 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4v16m8-8H4"/>
|
||||||
|
</svg>
|
||||||
|
New App
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{{if .AppStats}}
|
{{if .AppStats}}
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ func initTemplates() {
|
|||||||
"app_edit.html",
|
"app_edit.html",
|
||||||
"deployments.html",
|
"deployments.html",
|
||||||
"webhook_events.html",
|
"webhook_events.html",
|
||||||
|
"backup_import.html",
|
||||||
}
|
}
|
||||||
|
|
||||||
pageTemplates = make(map[string]*template.Template)
|
pageTemplates = make(map[string]*template.Template)
|
||||||
|
|||||||
Reference in New Issue
Block a user